code stringlengths 5 1.03M | repo_name stringlengths 5 90 | path stringlengths 4 158 | license stringclasses 15 values | size int64 5 1.03M | n_ast_errors int64 0 53.9k | ast_max_depth int64 2 4.17k | n_whitespaces int64 0 365k | n_ast_nodes int64 3 317k | n_ast_terminals int64 1 171k | n_ast_nonterminals int64 1 146k | loc int64 -1 37.3k | cycloplexity int64 -1 1.31k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE CPP #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE DefaultSignatures #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE ScopedTypeVariables #-}
#ifndef NO_OVERLAP
{-# LANGUAGE OverlappingInstances #-}
#endif
module Database.Persist.Sql.Class
( RawSql (..)
, PersistFieldSql (..)
) where
import Control.Applicative ((<$>), (<*>))
import Database.Persist
import Data.Monoid ((<>))
import Database.Persist.Sql.Types
import Control.Arrow ((&&&))
import Data.Text (Text, intercalate, pack)
import Data.Maybe (fromMaybe)
import Data.Fixed
import Data.Proxy (Proxy)
import qualified Data.Text as T
import qualified Data.Text.Lazy as TL
import qualified Data.Map as M
import qualified Data.IntMap as IM
import qualified Data.Set as S
import Data.Time (UTCTime, TimeOfDay, Day)
import Data.Int
import Data.Word
import Data.ByteString (ByteString)
import Text.Blaze.Html (Html)
import Data.Bits (bitSize)
import qualified Data.Vector as V
-- | Class for data types that may be retrived from a 'rawSql'
-- query.
class RawSql a where
-- | Number of columns that this data type needs and the list
-- of substitutions for @SELECT@ placeholders @??@.
rawSqlCols :: (DBName -> Text) -> a -> (Int, [Text])
-- | A string telling the user why the column count is what
-- it is.
rawSqlColCountReason :: a -> String
-- | Transform a row of the result into the data type.
rawSqlProcessRow :: [PersistValue] -> Either Text a
instance PersistField a => RawSql (Single a) where
rawSqlCols _ _ = (1, [])
rawSqlColCountReason _ = "one column for a 'Single' data type"
rawSqlProcessRow [pv] = Single <$> fromPersistValue pv
rawSqlProcessRow _ = Left $ pack "RawSql (Single a): wrong number of columns."
instance (PersistEntity a, PersistEntityBackend a ~ SqlBackend) => RawSql (Key a) where
rawSqlCols _ key = (length $ keyToValues key, [])
rawSqlColCountReason key = "The primary key is composed of "
++ (show $ length $ keyToValues key)
++ " columns"
rawSqlProcessRow = keyFromValues
instance (PersistEntity record, PersistEntityBackend record ~ SqlBackend)
=> RawSql (Entity record) where
rawSqlCols escape ent = (length sqlFields, [intercalate ", " sqlFields])
where
sqlFields = map (((name <> ".") <>) . escape)
$ map fieldDB
-- Hacky for a composite key because
-- it selects the same field multiple times
$ entityKeyFields entDef ++ entityFields entDef
name = escape (entityDB entDef)
entDef = entityDef (Nothing :: Maybe record)
rawSqlColCountReason a =
case fst (rawSqlCols (error "RawSql") a) of
1 -> "one column for an 'Entity' data type without fields"
n -> show n ++ " columns for an 'Entity' data type"
rawSqlProcessRow row = case splitAt nKeyFields row of
(rowKey, rowVal) -> Entity <$> keyFromValues rowKey
<*> fromPersistValues rowVal
where
nKeyFields = length $ entityKeyFields entDef
entDef = entityDef (Nothing :: Maybe record)
-- | Since 1.0.1.
instance RawSql a => RawSql (Maybe a) where
rawSqlCols e = rawSqlCols e . extractMaybe
rawSqlColCountReason = rawSqlColCountReason . extractMaybe
rawSqlProcessRow cols
| all isNull cols = return Nothing
| otherwise =
case rawSqlProcessRow cols of
Right v -> Right (Just v)
Left msg -> Left $ "RawSql (Maybe a): not all columns were Null " <>
"but the inner parser has failed. Its message " <>
"was \"" <> msg <> "\". Did you apply Maybe " <>
"to a tuple, perhaps? The main use case for " <>
"Maybe is to allow OUTER JOINs to be written, " <>
"in which case 'Maybe (Entity v)' is used."
where isNull PersistNull = True
isNull _ = False
instance (RawSql a, RawSql b) => RawSql (a, b) where
rawSqlCols e x = rawSqlCols e (fst x) # rawSqlCols e (snd x)
where (cnta, lsta) # (cntb, lstb) = (cnta + cntb, lsta ++ lstb)
rawSqlColCountReason x = rawSqlColCountReason (fst x) ++ ", " ++
rawSqlColCountReason (snd x)
rawSqlProcessRow =
let x = getType processRow
getType :: (z -> Either y x) -> x
getType = error "RawSql.getType"
colCountFst = fst $ rawSqlCols (error "RawSql.getType2") (fst x)
processRow row =
let (rowFst, rowSnd) = splitAt colCountFst row
in (,) <$> rawSqlProcessRow rowFst
<*> rawSqlProcessRow rowSnd
in colCountFst `seq` processRow
-- Avoids recalculating 'colCountFst'.
instance (RawSql a, RawSql b, RawSql c) => RawSql (a, b, c) where
rawSqlCols e = rawSqlCols e . from3
rawSqlColCountReason = rawSqlColCountReason . from3
rawSqlProcessRow = fmap to3 . rawSqlProcessRow
from3 :: (a,b,c) -> ((a,b),c)
from3 (a,b,c) = ((a,b),c)
to3 :: ((a,b),c) -> (a,b,c)
to3 ((a,b),c) = (a,b,c)
instance (RawSql a, RawSql b, RawSql c, RawSql d) => RawSql (a, b, c, d) where
rawSqlCols e = rawSqlCols e . from4
rawSqlColCountReason = rawSqlColCountReason . from4
rawSqlProcessRow = fmap to4 . rawSqlProcessRow
from4 :: (a,b,c,d) -> ((a,b),(c,d))
from4 (a,b,c,d) = ((a,b),(c,d))
to4 :: ((a,b),(c,d)) -> (a,b,c,d)
to4 ((a,b),(c,d)) = (a,b,c,d)
instance (RawSql a, RawSql b, RawSql c,
RawSql d, RawSql e)
=> RawSql (a, b, c, d, e) where
rawSqlCols e = rawSqlCols e . from5
rawSqlColCountReason = rawSqlColCountReason . from5
rawSqlProcessRow = fmap to5 . rawSqlProcessRow
from5 :: (a,b,c,d,e) -> ((a,b),(c,d),e)
from5 (a,b,c,d,e) = ((a,b),(c,d),e)
to5 :: ((a,b),(c,d),e) -> (a,b,c,d,e)
to5 ((a,b),(c,d),e) = (a,b,c,d,e)
instance (RawSql a, RawSql b, RawSql c,
RawSql d, RawSql e, RawSql f)
=> RawSql (a, b, c, d, e, f) where
rawSqlCols e = rawSqlCols e . from6
rawSqlColCountReason = rawSqlColCountReason . from6
rawSqlProcessRow = fmap to6 . rawSqlProcessRow
from6 :: (a,b,c,d,e,f) -> ((a,b),(c,d),(e,f))
from6 (a,b,c,d,e,f) = ((a,b),(c,d),(e,f))
to6 :: ((a,b),(c,d),(e,f)) -> (a,b,c,d,e,f)
to6 ((a,b),(c,d),(e,f)) = (a,b,c,d,e,f)
instance (RawSql a, RawSql b, RawSql c,
RawSql d, RawSql e, RawSql f,
RawSql g)
=> RawSql (a, b, c, d, e, f, g) where
rawSqlCols e = rawSqlCols e . from7
rawSqlColCountReason = rawSqlColCountReason . from7
rawSqlProcessRow = fmap to7 . rawSqlProcessRow
from7 :: (a,b,c,d,e,f,g) -> ((a,b),(c,d),(e,f),g)
from7 (a,b,c,d,e,f,g) = ((a,b),(c,d),(e,f),g)
to7 :: ((a,b),(c,d),(e,f),g) -> (a,b,c,d,e,f,g)
to7 ((a,b),(c,d),(e,f),g) = (a,b,c,d,e,f,g)
instance (RawSql a, RawSql b, RawSql c,
RawSql d, RawSql e, RawSql f,
RawSql g, RawSql h)
=> RawSql (a, b, c, d, e, f, g, h) where
rawSqlCols e = rawSqlCols e . from8
rawSqlColCountReason = rawSqlColCountReason . from8
rawSqlProcessRow = fmap to8 . rawSqlProcessRow
from8 :: (a,b,c,d,e,f,g,h) -> ((a,b),(c,d),(e,f),(g,h))
from8 (a,b,c,d,e,f,g,h) = ((a,b),(c,d),(e,f),(g,h))
to8 :: ((a,b),(c,d),(e,f),(g,h)) -> (a,b,c,d,e,f,g,h)
to8 ((a,b),(c,d),(e,f),(g,h)) = (a,b,c,d,e,f,g,h)
extractMaybe :: Maybe a -> a
extractMaybe = fromMaybe (error "Database.Persist.GenericSql.extractMaybe")
class PersistField a => PersistFieldSql a where
sqlType :: Proxy a -> SqlType
#ifndef NO_OVERLAP
instance PersistFieldSql String where
sqlType _ = SqlString
#endif
instance PersistFieldSql ByteString where
sqlType _ = SqlBlob
instance PersistFieldSql T.Text where
sqlType _ = SqlString
instance PersistFieldSql TL.Text where
sqlType _ = SqlString
instance PersistFieldSql Html where
sqlType _ = SqlString
instance PersistFieldSql Int where
sqlType _
| bitSize (0 :: Int) <= 32 = SqlInt32
| otherwise = SqlInt64
instance PersistFieldSql Int8 where
sqlType _ = SqlInt32
instance PersistFieldSql Int16 where
sqlType _ = SqlInt32
instance PersistFieldSql Int32 where
sqlType _ = SqlInt32
instance PersistFieldSql Int64 where
sqlType _ = SqlInt64
instance PersistFieldSql Word where
sqlType _ = SqlInt64
instance PersistFieldSql Word8 where
sqlType _ = SqlInt32
instance PersistFieldSql Word16 where
sqlType _ = SqlInt32
instance PersistFieldSql Word32 where
sqlType _ = SqlInt64
instance PersistFieldSql Word64 where
sqlType _ = SqlInt64
instance PersistFieldSql Double where
sqlType _ = SqlReal
instance PersistFieldSql Bool where
sqlType _ = SqlBool
instance PersistFieldSql Day where
sqlType _ = SqlDay
instance PersistFieldSql TimeOfDay where
sqlType _ = SqlTime
instance PersistFieldSql UTCTime where
sqlType _ = SqlDayTime
instance PersistFieldSql a => PersistFieldSql [a] where
sqlType _ = SqlString
instance PersistFieldSql a => PersistFieldSql (V.Vector a) where
sqlType _ = SqlString
instance (Ord a, PersistFieldSql a) => PersistFieldSql (S.Set a) where
sqlType _ = SqlString
instance (PersistFieldSql a, PersistFieldSql b) => PersistFieldSql (a,b) where
sqlType _ = SqlString
instance PersistFieldSql v => PersistFieldSql (IM.IntMap v) where
sqlType _ = SqlString
instance PersistFieldSql v => PersistFieldSql (M.Map T.Text v) where
sqlType _ = SqlString
instance PersistFieldSql PersistValue where
sqlType _ = SqlInt64 -- since PersistValue should only be used like this for keys, which in SQL are Int64
instance PersistFieldSql Checkmark where
sqlType _ = SqlBool
instance (HasResolution a) => PersistFieldSql (Fixed a) where
sqlType a =
SqlNumeric long prec
where
prec = round $ (log $ fromIntegral $ resolution n) / (log 10 :: Double) -- FIXME: May lead to problems with big numbers
long = prec + 10 -- FIXME: Is this enough ?
n = 0
_mn = return n `asTypeOf` a
instance PersistFieldSql Rational where
sqlType _ = SqlNumeric 32 20 -- need to make this field big enough to handle Rational to Mumber string conversion for ODBC
-- An embedded Entity
instance (PersistField record, PersistEntity record) => PersistFieldSql (Entity record) where
sqlType _ = SqlString
| nakaji-dayo/persistent | persistent/Database/Persist/Sql/Class.hs | mit | 10,685 | 0 | 17 | 2,740 | 3,731 | 2,108 | 1,623 | 226 | 1 |
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE DeriveDataTypeable #-}
module Database.Persist.Sql.Types.Internal
( HasPersistBackend (..)
, IsPersistBackend (..)
, SqlReadBackend (unSqlReadBackend)
, SqlWriteBackend (unSqlWriteBackend)
, readToUnknown
, readToWrite
, writeToUnknown
, LogFunc
, InsertSqlResult (..)
, Statement (..)
, SqlBackend (..)
, SqlBackendCanRead
, SqlBackendCanWrite
, SqlReadT
, SqlWriteT
, IsSqlBackend
) where
import Control.Monad.IO.Class (MonadIO (..))
import Control.Monad.Logger (LogSource, LogLevel)
import Control.Monad.Trans.Class (lift)
import Control.Monad.Trans.Reader (ReaderT, runReaderT, ask)
import Data.Acquire (Acquire)
import Data.Conduit (Source)
import Data.Int (Int64)
import Data.IORef (IORef)
import Data.Map (Map)
import Data.Text (Text)
import Data.Typeable (Typeable)
import Database.Persist.Class
( HasPersistBackend (..)
, PersistQueryRead, PersistQueryWrite
, PersistStoreRead, PersistStoreWrite
, PersistUniqueRead, PersistUniqueWrite
)
import Database.Persist.Class.PersistStore (IsPersistBackend (..))
import Database.Persist.Types
import Language.Haskell.TH.Syntax (Loc)
import System.Log.FastLogger (LogStr)
type LogFunc = Loc -> LogSource -> LogLevel -> LogStr -> IO ()
data InsertSqlResult = ISRSingle Text
| ISRInsertGet Text Text
| ISRManyKeys Text [PersistValue]
data Statement = Statement
{ stmtFinalize :: IO ()
, stmtReset :: IO ()
, stmtExecute :: [PersistValue] -> IO Int64
, stmtQuery :: forall m. MonadIO m
=> [PersistValue]
-> Acquire (Source m [PersistValue])
}
data SqlBackend = SqlBackend
{ connPrepare :: Text -> IO Statement
-- | table name, column names, id name, either 1 or 2 statements to run
, connInsertSql :: EntityDef -> [PersistValue] -> InsertSqlResult
, connInsertManySql :: Maybe (EntityDef -> [[PersistValue]] -> InsertSqlResult) -- ^ SQL for inserting many rows and returning their primary keys, for backends that support this functioanlity. If 'Nothing', rows will be inserted one-at-a-time using 'connInsertSql'.
, connUpsertSql :: Maybe (EntityDef -> Text -> Text)
, connStmtMap :: IORef (Map Text Statement)
, connClose :: IO ()
, connMigrateSql
:: [EntityDef]
-> (Text -> IO Statement)
-> EntityDef
-> IO (Either [Text] [(Bool, Text)])
, connBegin :: (Text -> IO Statement) -> IO ()
, connCommit :: (Text -> IO Statement) -> IO ()
, connRollback :: (Text -> IO Statement) -> IO ()
, connEscapeName :: DBName -> Text
, connNoLimit :: Text
, connRDBMS :: Text
, connLimitOffset :: (Int,Int) -> Bool -> Text -> Text
, connLogFunc :: LogFunc
, connMaxParams :: Maybe Int
-- ^ Some databases (probably only Sqlite) have a limit on how
-- many question-mark parameters may be used in a statement
--
-- @since 2.6.1
}
deriving Typeable
instance HasPersistBackend SqlBackend where
type BaseBackend SqlBackend = SqlBackend
persistBackend = id
instance IsPersistBackend SqlBackend where
mkPersistBackend = id
-- | An SQL backend which can only handle read queries
newtype SqlReadBackend = SqlReadBackend { unSqlReadBackend :: SqlBackend } deriving Typeable
instance HasPersistBackend SqlReadBackend where
type BaseBackend SqlReadBackend = SqlBackend
persistBackend = unSqlReadBackend
instance IsPersistBackend SqlReadBackend where
mkPersistBackend = SqlReadBackend
-- | An SQL backend which can handle read or write queries
newtype SqlWriteBackend = SqlWriteBackend { unSqlWriteBackend :: SqlBackend } deriving Typeable
instance HasPersistBackend SqlWriteBackend where
type BaseBackend SqlWriteBackend = SqlBackend
persistBackend = unSqlWriteBackend
instance IsPersistBackend SqlWriteBackend where
mkPersistBackend = SqlWriteBackend
-- | Useful for running a write query against an untagged backend with unknown capabilities.
writeToUnknown :: Monad m => ReaderT SqlWriteBackend m a -> ReaderT SqlBackend m a
writeToUnknown ma = do
unknown <- ask
lift . runReaderT ma $ SqlWriteBackend unknown
-- | Useful for running a read query against a backend with read and write capabilities.
readToWrite :: Monad m => ReaderT SqlReadBackend m a -> ReaderT SqlWriteBackend m a
readToWrite ma = do
write <- ask
lift . runReaderT ma . SqlReadBackend $ unSqlWriteBackend write
-- | Useful for running a read query against a backend with unknown capabilities.
readToUnknown :: Monad m => ReaderT SqlReadBackend m a -> ReaderT SqlBackend m a
readToUnknown ma = do
unknown <- ask
lift . runReaderT ma $ SqlReadBackend unknown
-- | A constraint synonym which witnesses that a backend is SQL and can run read queries.
type SqlBackendCanRead backend =
( IsSqlBackend backend
, PersistQueryRead backend, PersistStoreRead backend, PersistUniqueRead backend
)
-- | A constraint synonym which witnesses that a backend is SQL and can run read and write queries.
type SqlBackendCanWrite backend =
( SqlBackendCanRead backend
, PersistQueryWrite backend, PersistStoreWrite backend, PersistUniqueWrite backend
)
-- | Like @SqlPersistT@ but compatible with any SQL backend which can handle read queries.
type SqlReadT m a = forall backend. (SqlBackendCanRead backend) => ReaderT backend m a
-- | Like @SqlPersistT@ but compatible with any SQL backend which can handle read and write queries.
type SqlWriteT m a = forall backend. (SqlBackendCanWrite backend) => ReaderT backend m a
-- | A backend which is a wrapper around @SqlBackend@.
type IsSqlBackend backend = (IsPersistBackend backend, BaseBackend backend ~ SqlBackend)
| psibi/persistent | persistent/Database/Persist/Sql/Types/Internal.hs | mit | 5,849 | 0 | 16 | 1,136 | 1,231 | 709 | 522 | 118 | 1 |
module Control.Stud_Aufg.DB where
-- $Id$
import Control.SQL
import Control.Types hiding ( ok )
import Control.Stud_Aufg.Typ
import qualified Control.Exception as CE
import Prelude hiding ( all )
put_blank :: SNr -> ANr -> IO Stud_Aufg
put_blank snr anr = do
conn <- myconnect
let common = [ ( reed "SNr", toEx $ snr )
, ( reed "ANr", toEx $ anr )
, ( reed "OK", toEx $ Oks 0 )
, ( reed "NO", toEx $ Nos 0 )
]
try $ squery conn $ Query
( Insert (reed "stud_aufg") common )
[ ]
disconnect conn
[ sauf ] <- get_snr_anr snr anr
return sauf
try :: IO a -> IO (Either CE.SomeException a)
try = CE.try
-- | alle einsendungen zu dieser aufgabe
get_anr :: ANr -> IO [ Stud_Aufg ]
get_anr anr =
get_where [ equals ( reed "stud_aufg.ANr" ) ( toEx anr ) ]
-- | alle einsendungen dieser aufgabe, dieses studenten
get_snr_anr :: SNr -> ANr -> IO [ Stud_Aufg ]
get_snr_anr snr anr =
get_where [ equals ( reed "stud_aufg.ANr" ) ( toEx anr )
, equals ( reed "stud_aufg.SNr" ) ( toEx snr )
]
get_where :: [ Expression ] -> IO [ Stud_Aufg ]
get_where wh = do
conn <- myconnect
stat <- squery conn $ Query
( Select $ map reed [ "SNr", "ANr", "Ok", "No"
, "Instant", "Result", "Input", "Report"
] )
[ From $ map reed [ "stud_aufg" ]
, Where $ ands wh
]
inh <- collectRows (\ state -> do
s_snr <- getFieldValue state "SNr"
s_anr <- getFieldValue state "ANr"
s_ok <- getFieldValue state "Ok"
s_no <- getFieldValue state "No"
s_instant <- getFieldValueMB state "Instant"
s_result <- getFieldValueMB state "Result"
s_input <- getFieldValueMB state "Input"
s_report <- getFieldValueMB state "Report"
return $ Stud_Aufg { snr = s_snr
, anr = s_anr
, ok = s_ok
, no = s_no
, instant = s_instant
, result = s_result
, input = s_input
, report = s_report
}
) stat
disconnect conn
return inh
| florianpilz/autotool | src/Control/Stud_Aufg/DB.hs | gpl-2.0 | 2,072 | 21 | 16 | 644 | 671 | 351 | 320 | 56 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ExistentialQuantification #-}
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fwarn-unused-imports #-}
-----------------------------------------------------------------------------
--
-- Module : IDE.Pane.Info
-- Copyright : (c) Juergen Nicklisch-Franken, Hamish Mackenzie
-- License : GNU-GPL
--
-- Maintainer : <maintainer at leksah.org>
-- Stability : provisional
-- Portability : portable
--
-- | The GUI stuff for infos
--
-------------------------------------------------------------------------------
module IDE.Pane.Info (
IDEInfo
, InfoState(..)
, showInfo
, setInfo
, setInfoStyle
, replayInfoHistory
, openDocu
) where
import Data.IORef
import Data.Typeable
import Data.Char (isAlphaNum)
import Network.URI (escapeURIString)
import IDE.Core.State
import IDE.SymbolNavigation
import IDE.Pane.SourceBuffer
import IDE.TextEditor (newDefaultBuffer, TextEditor(..), EditorView(..))
import IDE.Utils.GUIUtils (getDarkState, openBrowser, __)
import Control.Monad.IO.Class (MonadIO(..))
import Control.Monad.Reader.Class (MonadReader(..))
import Graphics.UI.Gtk
(widgetHide, widgetShowAll, menuShellAppend,
menuItemActivate, menuItemNewWithLabel, containerGetChildren, Menu,
scrolledWindowSetPolicy, castToWidget, ScrolledWindow)
import Graphics.UI.Gtk.General.Enums (PolicyType(..))
import System.Glib.Signals (on)
import Control.Monad (unless, void, when)
import Data.Foldable (forM_)
import qualified Data.Text as T (unpack, pack, null)
import Data.Monoid ((<>))
-- | An info pane description
--
data IDEInfo = forall editor. TextEditor editor => IDEInfo {
sw :: ScrolledWindow
, currentDescr :: IORef (Maybe Descr)
, descriptionView :: EditorView editor
} deriving Typeable
data InfoState = InfoState (Maybe Descr)
deriving(Eq,Ord,Read,Show,Typeable)
instance Pane IDEInfo IDEM
where
primPaneName _ = __ "Info"
getAddedIndex _ = 0
getTopWidget = castToWidget . sw
paneId b = "*Info"
instance RecoverablePane IDEInfo InfoState IDEM where
saveState p = do
currentDescr' <- liftIO $ readIORef (currentDescr p)
return (Just (InfoState currentDescr'))
recoverState pp (InfoState descr) = do
nb <- getNotebook pp
buildPane pp nb builder
builder pp nb windows =
let idDescr = Nothing in do
prefs <- readIDE prefs
ideR <- ask
descriptionBuffer <- newDefaultBuffer Nothing ""
descriptionView <- newView descriptionBuffer (textviewFont prefs)
preferDark <- getDarkState
setStyle preferDark descriptionBuffer $ case sourceStyle prefs of
(False,_) -> Nothing
(True,v) -> Just v
sw <- getScrolledWindow descriptionView
createHyperLinkSupport descriptionView sw (\_ _ iter -> do
(beg, en) <- getIdentifierUnderCursorFromIter (iter, iter)
return (beg, en)) (\_ shift' slice ->
unless (T.null slice) $ do
-- liftIO$ print ("slice",slice)
triggerEventIDE (SelectInfo slice shift')
return ()
)
liftIO $ scrolledWindowSetPolicy sw PolicyAutomatic PolicyAutomatic
--openType
currentDescr' <- liftIO $ newIORef idDescr
cids1 <- onPopulatePopup descriptionView $ \ menu -> do
ideR <- ask
liftIO $ populatePopupMenu ideR currentDescr' menu
let info = IDEInfo sw currentDescr' descriptionView
-- ids5 <- sv `onLookupInfo` selectInfo descriptionView -- obsolete by hyperlinks
cids2 <- descriptionView `afterFocusIn` makeActive info
return (Just info, cids1 ++ cids2)
getInfo :: IDEM IDEInfo
getInfo = forceGetPane (Right "*Info")
showInfo :: IDEAction
showInfo = do
pane <- getInfo
displayPane pane False
gotoSource :: IDEAction
gotoSource = do
mbInfo <- getInfoCont
case mbInfo of
Nothing -> do ideMessage Normal "gotoSource:noDefinition"
return ()
Just info -> void (goToDefinition info)
gotoModule' :: IDEAction
gotoModule' = do
mbInfo <- getInfoCont
case mbInfo of
Nothing -> return ()
Just info -> void (triggerEventIDE (SelectIdent info))
setInfo :: Descr -> IDEAction
setInfo identifierDescr = do
info <- getInfo
setInfo' info
displayPane info False
where
setInfo' (info@IDEInfo{descriptionView = v}) = do
oldDescr <- liftIO $ readIORef (currentDescr info)
liftIO $ writeIORef (currentDescr info) (Just identifierDescr)
tb <- getBuffer v
setText tb (T.pack $ show (Present identifierDescr) ++ "\n") -- EOL for text iters to work
recordInfoHistory (Just identifierDescr) oldDescr
setInfoStyle :: IDEAction
setInfoStyle = getPane >>= setInfoStyle'
where
setInfoStyle' Nothing = return ()
setInfoStyle' (Just IDEInfo{..}) = do
prefs <- readIDE prefs
preferDark <- getDarkState
buffer <- getBuffer descriptionView
setStyle preferDark buffer $ case sourceStyle prefs of
(False,_) -> Nothing
(True,v) -> Just v
getInfoCont :: IDEM (Maybe Descr)
getInfoCont = do
mbPane <- getPane
case mbPane of
Nothing -> return Nothing
Just p -> liftIO $ readIORef (currentDescr p)
-- * GUI History
recordInfoHistory :: Maybe Descr -> Maybe Descr -> IDEAction
recordInfoHistory descr oldDescr = do
triggerEventIDE (RecordHistory
(InfoElementSelected descr, InfoElementSelected oldDescr))
return ()
replayInfoHistory :: Maybe Descr -> IDEAction
replayInfoHistory mbDescr = forM_ mbDescr setInfo
openDocu :: IDEAction
openDocu = do
mbDescr <- getInfoCont
case mbDescr of
Nothing -> return ()
Just descr -> do
prefs' <- readIDE prefs
openBrowser $ docuSearchURL prefs' <> T.pack (escapeURIString isAlphaNum (T.unpack $ dscName descr))
populatePopupMenu :: IDERef -> IORef (Maybe Descr) -> Menu -> IO ()
populatePopupMenu ideR currentDescr' menu = do
items <- containerGetChildren menu
item0 <- menuItemNewWithLabel (__ "Goto Definition")
item0 `on` menuItemActivate $ reflectIDE gotoSource ideR
item1 <- menuItemNewWithLabel (__ "Select Module")
item1 `on` menuItemActivate $ reflectIDE gotoModule' ideR
item2 <- menuItemNewWithLabel (__ "Open Documentation")
item2 `on` menuItemActivate $ reflectIDE openDocu ideR
menuShellAppend menu item0
menuShellAppend menu item1
menuShellAppend menu item2
widgetShowAll menu
mapM_ widgetHide $ take 2 (reverse items)
return ()
| juhp/leksah | src/IDE/Pane/Info.hs | gpl-2.0 | 7,163 | 0 | 18 | 1,901 | 1,806 | 917 | 889 | 157 | 3 |
module Sortier.Netz.Check where
import Sortier.Netz.Type
import Sortier.Netz.Rechnung
import Sortier.Netz.Example
import Sortier.Netz.Bild
import Autolib.Util.Bild
import qualified Autolib.Util.Wort ( alle )
import Data.List ( tails)
import Data.Typeable
import Autolib.Reporter
import Autolib.ToDoc
import qualified Challenger as C
import Inter.Types
import Autolib.Size
check :: Int -> Netz -> Reporter ()
check soll n = do
let verify xs = do
let xss = rechnung n xs
when ( not $ is_increasing $ last xss )
$ reject $ vcat
[ text "Diese Eingabe wird nicht korrekt geordnet:"
, nest 4 $ toDoc xs
, text "Das Netz berechnet die Ausgabe:"
, nest 4 $ toDoc $ last xss
, text "Die Rechung des Netzes ist:"
, nest 4 $ toDoc $ toBild ( n , xss )
]
mapM_ verify $ testing soll
inform $ text "Das Netz hat alle möglichen Eingaben korrekt geordnet."
return ()
-- | es gilt ja der satz:
-- alle 0-1-folgen sortiert <=> überhaupt alle folgen sortiert.
-- also generiere ich 0-1-folgen (weil das weniger sind)
-- aber um die studenten nicht unnötig aufzuregen,
-- rechne ich es in folgen aus lauter verschiedenen elementen um
testing :: Int -> [ State ]
testing soll = do
w <- Autolib.Util.Wort.alle [ 0, 1 ] soll
return $ umrech w
-- | 0-1-Folge zu Zahlenfolge
-- mit gleicher Steigung, aber lauter verschiedenen Zahlen
-- dabei alle 0 aufsteigend, dann alle 1 aufsteigend
umrech :: [ Int ] -> [ Int ]
umrech w = um w 1 (1 + length w - sum w ) where
um [] _ _ = []
um (0 : xs) low high = low : um xs (succ low) high
um (1 : xs) low high = high : um xs low (succ high)
is_increasing :: Ord a => [a] -> Bool
is_increasing xs = and $ do
x : y : rest <- tails xs
return $ x <= y
data Sortier = Sortier deriving ( Eq, Ord, Show, Read, Typeable )
instance C.Verify Sortier Int where
verify p i =
assert ( i > 0)
$ text "die Breite soll eine positive Zahl sein"
instance OrderScore Sortier where
scoringOrder _ = Increasing
instance C.Partial Sortier Int Netz where
describe p i = vcat
[ text "Finden Sie ein Sortiernetz für"
<+> toDoc i <+> text "Eingänge"
, text "mit weniger als" <+> toDoc ( size $ bubble i )
<+> text "Komparatoren."
]
initial p i = bubble i
partial p i b = do
inform $ text "Ihr Netz ist:" <+> toDoc ( toBild b )
let ist = high b - low b + 1
when ( i /= ist ) $ reject $ vcat
[ text "Das Netz soll Breite" <+> toDoc i <+> text "haben"
, text "es hat aber" <+> toDoc ist
]
total p i b = do
check i b
when ( size b >= size ( bubble i ) ) $ reject $ vcat
[ text "Das sind zuviele Komparatoren."
]
make :: Make
make = direct Sortier (5 :: Int)
| florianpilz/autotool | src/Sortier/Netz/Check.hs | gpl-2.0 | 2,825 | 32 | 20 | 779 | 726 | 408 | 318 | -1 | -1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.SES.VerifyEmailIdentity
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Verifies an email address. This action causes a confirmation email
-- message to be sent to the specified address.
--
-- This action is throttled at one request per second.
--
-- /See:/ <http://docs.aws.amazon.com/ses/latest/APIReference/API_VerifyEmailIdentity.html AWS API Reference> for VerifyEmailIdentity.
module Network.AWS.SES.VerifyEmailIdentity
(
-- * Creating a Request
verifyEmailIdentity
, VerifyEmailIdentity
-- * Request Lenses
, veiEmailAddress
-- * Destructuring the Response
, verifyEmailIdentityResponse
, VerifyEmailIdentityResponse
-- * Response Lenses
, veirsResponseStatus
) where
import Network.AWS.Prelude
import Network.AWS.Request
import Network.AWS.Response
import Network.AWS.SES.Types
import Network.AWS.SES.Types.Product
-- | Represents a request instructing the service to begin email address
-- verification.
--
-- /See:/ 'verifyEmailIdentity' smart constructor.
newtype VerifyEmailIdentity = VerifyEmailIdentity'
{ _veiEmailAddress :: Text
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'VerifyEmailIdentity' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'veiEmailAddress'
verifyEmailIdentity
:: Text -- ^ 'veiEmailAddress'
-> VerifyEmailIdentity
verifyEmailIdentity pEmailAddress_ =
VerifyEmailIdentity'
{ _veiEmailAddress = pEmailAddress_
}
-- | The email address to be verified.
veiEmailAddress :: Lens' VerifyEmailIdentity Text
veiEmailAddress = lens _veiEmailAddress (\ s a -> s{_veiEmailAddress = a});
instance AWSRequest VerifyEmailIdentity where
type Rs VerifyEmailIdentity =
VerifyEmailIdentityResponse
request = postQuery sES
response
= receiveXMLWrapper "VerifyEmailIdentityResult"
(\ s h x ->
VerifyEmailIdentityResponse' <$> (pure (fromEnum s)))
instance ToHeaders VerifyEmailIdentity where
toHeaders = const mempty
instance ToPath VerifyEmailIdentity where
toPath = const "/"
instance ToQuery VerifyEmailIdentity where
toQuery VerifyEmailIdentity'{..}
= mconcat
["Action" =: ("VerifyEmailIdentity" :: ByteString),
"Version" =: ("2010-12-01" :: ByteString),
"EmailAddress" =: _veiEmailAddress]
-- | An empty element. Receiving this element indicates that the request
-- completed successfully.
--
-- /See:/ 'verifyEmailIdentityResponse' smart constructor.
newtype VerifyEmailIdentityResponse = VerifyEmailIdentityResponse'
{ _veirsResponseStatus :: Int
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'VerifyEmailIdentityResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'veirsResponseStatus'
verifyEmailIdentityResponse
:: Int -- ^ 'veirsResponseStatus'
-> VerifyEmailIdentityResponse
verifyEmailIdentityResponse pResponseStatus_ =
VerifyEmailIdentityResponse'
{ _veirsResponseStatus = pResponseStatus_
}
-- | The response status code.
veirsResponseStatus :: Lens' VerifyEmailIdentityResponse Int
veirsResponseStatus = lens _veirsResponseStatus (\ s a -> s{_veirsResponseStatus = a});
| fmapfmapfmap/amazonka | amazonka-ses/gen/Network/AWS/SES/VerifyEmailIdentity.hs | mpl-2.0 | 4,062 | 0 | 13 | 797 | 471 | 290 | 181 | 61 | 1 |
{-# LANGUAGE TemplateHaskell #-}
--------------------------------------------------------------------------------
-- |
-- Module : Data.Comp.Multi.Derive.HTraversable
-- Copyright : (c) 2011 Patrick Bahr
-- License : BSD3
-- Maintainer : Patrick Bahr <paba@diku.dk>
-- Stability : experimental
-- Portability : non-portable (GHC Extensions)
--
-- Automatically derive instances of @HTraversable@.
--
--------------------------------------------------------------------------------
module Data.Comp.Multi.Derive.HTraversable
(
HTraversable,
makeHTraversable
) where
import Control.Applicative
import Control.Monad hiding (mapM, sequence)
import Data.Comp.Derive.Utils
import Data.Comp.Multi.HTraversable
import Data.Foldable hiding (any, or)
import Data.Maybe
import Data.Traversable
import Language.Haskell.TH
import Prelude hiding (foldl, foldr, mapM, sequence)
import qualified Prelude as P (foldl, foldr, mapM)
iter 0 _ e = e
iter n f e = iter (n-1) f (f `appE` e)
{-| Derive an instance of 'HTraversable' for a type constructor of any
higher-order kind taking at least two arguments. -}
makeHTraversable :: Name -> Q [Dec]
makeHTraversable fname = do
TyConI (DataD _cxt name args constrs _deriving) <- abstractNewtypeQ $ reify fname
let args' = init args
fArg = VarT . tyVarBndrName $ last args'
argNames = map (VarT . tyVarBndrName) (init args')
complType = foldl AppT (ConT name) argNames
classType = AppT (ConT ''HTraversable) complType
constrs' <- P.mapM (mkPatAndVars . isFarg fArg <=< normalConExp) constrs
traverseDecl <- funD 'htraverse (map traverseClause constrs')
mapMDecl <- funD 'hmapM (map mapMClause constrs')
return [InstanceD [] classType [traverseDecl, mapMDecl]]
where isFarg fArg (constr, args) = (constr, map (`containsType'` fArg) args)
filterVar _ nonFarg [] x = nonFarg x
filterVar farg _ [depth] x = farg depth x
filterVar _ _ _ _ = error "functor variable occurring twice in argument type"
filterVars args varNs farg nonFarg = zipWith (filterVar farg nonFarg) args varNs
mkCPat constr varNs = ConP constr $ map mkPat varNs
mkPat = VarP
mkPatAndVars (constr, args) =
do varNs <- newNames (length args) "x"
return (conE constr, mkCPat constr varNs,
\f g -> filterVars args varNs (\ d x -> f d (varE x)) (g . varE),
any (not . null) args, map varE varNs, catMaybes $ filterVars args varNs (curry Just) (const Nothing))
traverseClause (con, pat,vars',hasFargs,_,_) =
do fn <- newName "f"
let f = varE fn
fp = if hasFargs then VarP fn else WildP
vars = vars' (\d x -> iter d [|traverse|] f `appE` x) (\x -> [|pure $x|])
body <- P.foldl (\ x y -> [|$x <*> $y|]) [|pure $con|] vars
return $ Clause [fp, pat] (NormalB body) []
-- Note: the monadic versions are not defined
-- applicatively, as this results in a considerable
-- performance penalty (by factor 2)!
mapMClause (con, pat,_,hasFargs,allVars, fvars) =
do fn <- newName "f"
let f = varE fn
fp = if hasFargs then VarP fn else WildP
conAp = P.foldl appE con allVars
conBind (d,x) y = [| $(iter d [|mapM|] f) $(varE x) >>= $(lamE [varP x] y)|]
body <- P.foldr conBind [|return $conAp|] fvars
return $ Clause [fp, pat] (NormalB body) []
| spacekitteh/compdata | src/Data/Comp/Multi/Derive/HTraversable.hs | bsd-3-clause | 3,698 | 4 | 18 | 1,058 | 1,042 | 570 | 472 | -1 | -1 |
{-# OPTIONS -Wall #-}
import Language.Paraiso.Annotation
main :: IO ()
main = do
putStrLn "hi"
| nushio3/Paraiso | attic/TestAnnotation.hs | bsd-3-clause | 100 | 0 | 7 | 20 | 29 | 15 | 14 | 5 | 1 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE DataKinds, PolyKinds #-}
module Main where
import Data.Kind
import Language.Haskell.TH
type Id1 a = a
type Id2 k (a :: k) = a
data Proxy1 (a :: Id1 k) = Proxy1
data Proxy2 (a :: Id2 * k) = Proxy2
$(return [])
main :: IO ()
main = do
putStrLn $(reify ''Proxy1 >>= stringE . pprint)
putStrLn $(reify ''Proxy2 >>= stringE . pprint)
| ghcjs/ghcjs | test/ghc/th/t11463.hs | mit | 394 | 0 | 13 | 88 | 147 | 81 | 66 | 14 | 1 |
{-# LANGUAGE GADTs #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE DeriveFunctor #-}
-- TODO:
-- better interface
-- have error messages in the right order
-- have a message for plain failures as well / remove failure in recoveries
-- Optimize profile info (no more Ints)
module Parser.Incremental (Process,
recoverWith, symbol, eof, lookNext, testNext, run,
mkProcess, profile, pushSyms, pushEof, evalL, evalR, feedZ,
Parser(Look, Enter, Yuck), countWidth, fullLog, LogEntry(..),
evalL'
) where
import Control.Arrow (first, second, (***))
import Control.Applicative (Alternative ((<|>), empty))
import Data.Tree (Tree (Node))
data a :< b = (:<) {top :: a, _rest :: b}
infixr :<
-- | Parser specification
data Parser s a where
Pure :: a -> Parser s a
Appl :: Parser s (b -> a) -> Parser s b -> Parser s a
Bind :: Parser s a -> (a -> Parser s b) -> Parser s b
Look :: Parser s a -> (s -> Parser s a) -> Parser s a
Shif :: Parser s a -> Parser s a
Empt :: Parser s a
Disj :: Parser s a -> Parser s a -> Parser s a
Yuck :: Parser s a -> Parser s a
Enter :: String -> Parser s a -> Parser s a
-- | Parser process
data Steps s a where
Val :: a -> Steps s r -> Steps s (a :< r)
App :: Steps s ((b -> a) :< (b :< r)) -> Steps s (a :< r)
Done :: Steps s ()
Shift :: Steps s a -> Steps s a
Sh' :: Steps s a -> Steps s a
Sus :: Steps s a -> (s -> Steps s a) -> Steps s a
Best :: Ordering -> Profile -> Steps s a -> Steps s a -> Steps s a
Dislike :: Steps s a -> Steps s a
Log :: String -> Steps s a -> Steps s a
Fail :: Steps s a
-- profile !! s = number of Dislikes found to do s Shifts
data ProfileF a = PSusp | PFail | PRes a | !a :> ProfileF a
deriving (Show, Functor)
type Profile = ProfileF Int
-- Map lookahead to maximum dislike difference we accept. When looking much further,
-- we are more prone to discard smaller differences. It's essential that this drops below 0 when
-- its argument increases, so that we can discard things with dislikes using only
-- finite lookahead.
dislikeThreshold :: Int -> Int
dislikeThreshold n
| n < 5 = 0
| otherwise = -1 -- we looked 5 tokens ahead, and still have no clue who is the best. Pick at random.
-- | Compute the combination of two profiles, as well as which one is the best.
better :: Int -> Profile -> Profile -> (Ordering, Profile)
better _ PFail p = (GT, p) -- avoid failure
better _ p PFail = (LT, p)
better _ PSusp _ = (EQ, PSusp) -- could not decide before suspension => leave undecided.
better _ _ PSusp = (EQ, PSusp)
better _ (PRes x) (PRes y) = if x <= y then (LT, PRes x) else (GT, PRes y) -- two results, just pick the best.
better lk xs@(PRes x) (y:>ys) = if x == 0 || y-x > dislikeThreshold lk then (LT, xs) else min x y +> better (lk+1) xs ys
better lk (y:>ys) xs@(PRes x) = if x == 0 || y-x > dislikeThreshold lk then (GT, xs) else min x y +> better (lk+1) ys xs
better lk (x:>xs) (y:>ys)
| x == 0 && y == 0 = recur -- never drop things with no error: this ensures to find a correct parse if it exists.
| x - y > threshold = (GT, y:>ys)
| y - x > threshold = (LT, x:>xs) -- if at any point something is too disliked, drop it.
| otherwise = recur
where threshold = dislikeThreshold lk
recur = min x y +> better (lk + 1) xs ys
(+>) :: Int -> (t, Profile) -> (t, Profile)
x +> ~(ordering, xs) = (ordering, x :> xs)
data LogEntry = LLog String | LEmpty | LDislike | LShift
| LDone | LFail | LSusp | LS String
deriving Show
rightLog :: Steps s r -> Tree LogEntry
rightLog (Val _ p) = rightLog p
rightLog (App p) = rightLog p
rightLog (Shift p) = Node LShift [rightLog p]
rightLog (Done) = Node LDone []
rightLog (Fail) = Node LFail []
rightLog (Dislike p) = Node LDislike [rightLog p]
rightLog (Log msg p) = Node (LLog msg) [rightLog p]
rightLog (Sus _ _) = Node LSusp []
rightLog (Best _ _ l r) = Node LEmpty (rightLog l:[rightLog r])
rightLog (Sh' _) = error "Sh' should be hidden by Sus"
profile :: Steps s r -> Profile
profile (Val _ p) = profile p
profile (App p) = profile p
profile (Shift p) = 0 :> profile p
profile (Done) = PRes 0 -- success with zero dislikes
profile (Fail) = PFail
profile (Dislike p) = fmap succ (profile p)
profile (Log _ p) = profile p
profile (Sus _ _) = PSusp
profile (Best _ pr _ _) = pr
profile (Sh' _) = error "Sh' should be hidden by Sus"
instance Show (Steps s r) where
show (Val _ p) = 'v' : show p
show (App p) = '*' : show p
show (Done) = "1"
show (Shift p) = '>' : show p
show (Sh' p) = '\'' : show p
show (Dislike p) = '?' : show p
show (Log msg p) = "[" ++ msg ++ "]" ++ show p
show (Fail) = "0"
show (Sus _ _) = "..."
show (Best _ _ p q) = "(" ++ show p ++ ")" ++ show q
countWidth :: Zip s r -> Int
countWidth (Zip _ _ r) = countWidth' r
where countWidth' :: Steps s r -> Int
countWidth' r' = case r' of
(Best _ _ p q) -> countWidth' p + countWidth' q
(Val _ p) -> countWidth' p
(App p) -> countWidth' p
(Done) -> 1
(Shift p) -> countWidth' p
(Sh' p) -> countWidth' p
(Dislike p) -> countWidth' p
(Log _ p) -> countWidth' p
(Fail) -> 1
(Sus _ _) -> 1
instance Show (RPolish i o) where
show (RPush _ p) = show p ++ "^"
show (RApp p) = show p ++ "@"
show (RStop) = "!"
apply :: forall t t1 a. ((t -> a) :< (t :< t1)) -> a :< t1
apply ~(f:< ~(a:<r)) = f a :< r
-- | Right-eval a fully defined process (ie. one that has no Sus)
evalR' :: Steps s r -> (r, [String])
evalR' Done = ((), [])
evalR' (Val a r) = first (a :<) (evalR' r)
evalR' (App s) = first apply (evalR' s)
evalR' (Shift v) = evalR' v
evalR' (Dislike v) = evalR' v
evalR' (Log err v) = second (err:) (evalR' v)
evalR' (Fail) = error "evalR: No parse!"
evalR' (Sus _ _) = error "evalR: Not fully evaluated!"
evalR' (Sh' _) = error "evalR: Sh' should be hidden by Sus"
evalR' (Best choice _ p q) = case choice of
LT -> evalR' p
GT -> evalR' q
EQ -> error $ "evalR: Ambiguous parse: " ++ show p ++ " ~~~ " ++ show q
instance Functor (Parser s) where
fmap f = (pure f <*>)
instance Applicative (Parser s) where
(<*>) = Appl
pure = Pure
instance Alternative (Parser s) where
(<|>) = Disj
empty = Empt
instance Monad (Parser s) where
(>>=) = Bind
return = pure
fail _message = Empt
toQ :: Parser s a -> forall h r. ((h,a) -> Steps s r) -> h -> Steps s r
toQ (Look a f) = \k h -> Sus (toQ a k h) (\s -> toQ (f s) k h)
toQ (p `Appl` q) = \k -> toQ p $ toQ q $ \((h, b2a), b) -> k (h, b2a b)
toQ (Pure a) = \k h -> k (h, a)
toQ (Disj p q) = \k h -> iBest (toQ p k h) (toQ q k h)
toQ (Bind p a2q) = \k -> toQ p (\(h,a) -> toQ (a2q a) k h)
toQ Empt = \_k _h -> Fail
toQ (Yuck p) = \k h -> Dislike $ toQ p k h
toQ (Enter err p) = \k h -> Log err $ toQ p k h
toQ (Shif p) = \k h -> Sh' $ toQ p k h
toP :: Parser s a -> forall r. Steps s r -> Steps s (a :< r)
toP (Look a f) = {-# SCC "toP_Look" #-} \fut -> Sus (toP a fut) (\s -> toP (f s) fut)
toP (Appl f x) = {-# SCC "toP_Appl" #-} App . toP f . toP x
toP (Pure x) = {-# SCC "toP_Pure" #-} Val x
toP Empt = {-# SCC "toP_Empt" #-} const Fail
toP (Disj a b) = {-# SCC "toP_Disj" #-} \fut -> iBest (toP a fut) (toP b fut)
toP (Bind p a2q) = {-# SCC "toP_Bind" #-} \fut -> toQ p (\(_,a) -> toP (a2q a) fut) ()
toP (Yuck p) = {-# SCC "toP_Yuck" #-} Dislike . toP p
toP (Enter err p) = {-# SCC "toP_Enter" #-} Log err . toP p
toP (Shif p) = {-# SCC "toP_Shif" #-} Sh' . toP p
-- | Intelligent, caching best.
iBest :: Steps s a -> Steps s a -> Steps s a
iBest p q = let ~(choice, pr) = better 0 (profile p) (profile q) in Best choice pr p q
symbol :: forall s. (s -> Bool) -> Parser s s
symbol f = Look empty $ \s -> if f s then Shif $ pure s else empty
eof :: forall s. Parser s ()
eof = Look (pure ()) (const empty)
-- | Push a chunk of symbols or eof in the process. This forces some suspensions.
feed :: Maybe [s] -> Steps s r -> Steps s r
feed (Just []) p = p -- nothing more left to feed
feed ss p = case p of
(Sus nil cons) -> case ss of
Just [] -> p -- no more info, stop feeding
Nothing -> feed Nothing nil -- finish
Just (s:_) -> feed ss (cons s)
(Shift p') -> Shift (feed ss p')
(Sh' p') -> Shift (feed (fmap (drop 1) ss) p')
(Dislike p') -> Dislike (feed ss p')
(Log err p') -> Log err (feed ss p')
(Val x p') -> Val x (feed ss p')
(App p') -> App (feed ss p')
Done -> Done
Fail -> Fail
Best _ _ p' q' -> iBest (feed ss p') (feed ss q')
-- TODO: it would be nice to be able to reuse the profile here.
feedZ :: Maybe [s] -> Zip s r -> Zip s r
feedZ x = onRight (feed x)
-- Move the zipper to right, and simplify if something is pushed in
-- the left part.
evalL :: forall s output. Zip s output -> Zip s output
evalL (Zip errs0 l0 r0) = help errs0 l0 r0
where
help :: [String] -> RPolish mid output -> Steps s mid -> Zip s output
help errs l rhs = case rhs of
(Val a r) -> help errs (simplify (RPush a l)) r
(App r) -> help errs (RApp l) r
(Shift p) -> help errs l p
(Log err p) -> help (err:errs) l p
(Dislike p) -> help errs l p
(Best choice _ p q) -> case choice of
LT -> help errs l p
GT -> help errs l q
EQ -> reZip errs l rhs -- don't know where to go: don't speculate on evaluating either branch.
_ -> reZip errs l rhs
reZip :: [String] -> RPolish mid output -> Steps s mid -> Zip s output
reZip errs l r = l `seq` Zip errs l r
evalL' :: Zip s output -> Zip s output
evalL' (Zip errs0 l0 r0) = Zip errs0 l0 (simplRhs r0)
where simplRhs :: Steps s a ->Steps s a
simplRhs rhs = case rhs of
(Val a r) -> Val a (simplRhs r)
(App r) -> App (simplRhs r)
(Shift p) -> Shift (simplRhs p)
(Log err p) -> Log err $ simplRhs p
(Dislike p) -> Dislike $ simplRhs p
(Best choice _ p q) -> case choice of
LT -> simplRhs p
GT -> simplRhs q
EQ -> iBest (simplRhs p) (simplRhs q)
x -> x
-- | Push some symbols.
pushSyms :: forall s r. [s] -> Zip s r -> Zip s r
pushSyms x = feedZ (Just x)
-- | Push eof
pushEof :: forall s r. Zip s r -> Zip s r
pushEof = feedZ Nothing
-- | Make a parser into a process.
mkProcess :: forall s a. Parser s a -> Process s a
mkProcess p = Zip [] RStop (toP p Done)
-- | Run a process (in case you do not need the incremental interface)
run :: Process s a -> [s] -> (a, [String])
run p input = evalR $ pushEof $ pushSyms input p
testNext :: (Maybe s -> Bool) -> Parser s ()
testNext f = Look (if f Nothing then ok else empty) (\s ->
if f $ Just s then ok else empty)
where ok = pure ()
lookNext :: Parser s (Maybe s)
lookNext = Look (pure Nothing) (pure . Just)
-- | Parse the same thing as the argument, but will be used only as
-- backup. ie, it will be used only if disjuncted with a failing
-- parser.
recoverWith :: Parser s a -> Parser s a
recoverWith = Enter "recoverWith" . Yuck
----------------------------------------------------
--------------------------------
-- The zipper for efficient evaluation:
-- Arbitrary expressions in Reverse Polish notation.
-- This can also be seen as an automaton that transforms a stack.
-- RPolish is indexed by the types in the stack consumed by the automaton (input),
-- and the stack produced (output)
data RPolish input output where
RPush :: a -> RPolish (a :< rest) output -> RPolish rest output
RApp :: RPolish (b :< rest) output -> RPolish ((a -> b) :< a :< rest) output
RStop :: RPolish rest rest
-- Evaluate the output of an RP automaton, given an input stack
evalRP :: RPolish input output -> input -> output
evalRP RStop acc = acc
evalRP (RPush v r) acc = evalRP r (v :< acc)
evalRP (RApp r) ~(f :< ~(a :< rest)) = evalRP r (f a :< rest)
-- execute the automaton as far as possible
simplify :: RPolish s output -> RPolish s output
simplify (RPush x (RPush f (RApp r))) = simplify (RPush (f x) r)
simplify x = x
evalR :: Zip token (a :< rest) -> (a, [String])
evalR (Zip errs l r) = ((top . evalRP l) *** (errs ++)) (evalR' r)
-- Gluing a Polish expression and an RP automaton.
-- This can also be seen as a zipper of Polish expressions.
data Zip s output where
Zip :: [String] -> RPolish mid output -> Steps s mid -> Zip s output
-- note that the Stack produced by the Polish expression matches
-- the stack consumed by the RP automaton.
fullLog :: Zip s output -> ([String],Tree LogEntry)
fullLog (Zip msg _ rhs) = (reverse msg, rightLog rhs)
instance Show (Zip s output) where
show (Zip errs l r) = show l ++ "<>" ++ show r ++ ", errs = " ++ show errs
onRight :: (forall r. Steps s r -> Steps s r) -> Zip s a -> Zip s a
onRight f (Zip errs x y) = Zip errs x (f y)
type Process token result = Zip token (result :< ())
| noughtmare/yi | yi-core/src/Parser/Incremental.hs | gpl-2.0 | 13,925 | 6 | 15 | 4,340 | 5,654 | 2,899 | 2,755 | 259 | 12 |
{-# language ScopedTypeVariables #-}
-- | Contains configuration values for all robots.
module Sorts.Robots.Configuration where
import Physics.Chipmunk as CM
import Graphics.Qt
import Base
-- * physics
-- | robot mass per (square-)pixel
robotMaterialMass :: CpFloat = 2 -- tweakValue "robotMaterialMass" -- 78
-- | physical attributes of the robots
-- (Obviously, don't change the collisionType.)
robotShapeAttributes = ShapeAttributes{
elasticity = 0.8,
friction = 0.4,-- tweakValue "RobotFriction",
CM.collisionType = RobotCT
}
-- | size of one of the robots "face tiles"
robotBodySize :: Size CpFloat = fmap fromUber $ Size 15 15
| nikki-and-the-robots/nikki | src/Sorts/Robots/Configuration.hs | lgpl-3.0 | 658 | 0 | 6 | 116 | 93 | 57 | 36 | 11 | 1 |
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE CPP #-}
-- |
-- Module : Network.TLS.Record.State
-- License : BSD-style
-- Maintainer : Vincent Hanquez <vincent@snarc.org>
-- Stability : experimental
-- Portability : unknown
--
module Network.TLS.Record.State
( CryptState(..)
, MacState(..)
, RecordState(..)
, newRecordState
, incrRecordState
, RecordM
, runRecordM
, getRecordVersion
, setRecordIV
, withCompression
, computeDigest
, makeDigest
, getBulk
, getMacSequence
) where
import Data.Word
import Control.Applicative
import Control.Monad.State
import Network.TLS.Compression
import Network.TLS.Cipher
import Network.TLS.ErrT
import Network.TLS.Struct
import Network.TLS.Wire
import Network.TLS.Packet
import Network.TLS.MAC
import Network.TLS.Util
import qualified Data.ByteString as B
data CryptState = CryptState
{ cstKey :: !BulkState
, cstIV :: !Bytes
, cstMacSecret :: !Bytes
} deriving (Show)
newtype MacState = MacState
{ msSequence :: Word64
} deriving (Show)
data RecordState = RecordState
{ stCipher :: Maybe Cipher
, stCompression :: Compression
, stCryptState :: !CryptState
, stMacState :: !MacState
} deriving (Show)
newtype RecordM a = RecordM { runRecordM :: Version
-> RecordState
-> Either TLSError (a, RecordState) }
instance Applicative RecordM where
pure = return
(<*>) = ap
instance Monad RecordM where
return a = RecordM $ \_ st -> Right (a, st)
m1 >>= m2 = RecordM $ \ver st -> do
case runRecordM m1 ver st of
Left err -> Left err
Right (a, st2) -> runRecordM (m2 a) ver st2
instance Functor RecordM where
fmap f m = RecordM $ \ver st ->
case runRecordM m ver st of
Left err -> Left err
Right (a, st2) -> Right (f a, st2)
getRecordVersion :: RecordM Version
getRecordVersion = RecordM $ \ver st -> Right (ver, st)
instance MonadState RecordState RecordM where
put x = RecordM $ \_ _ -> Right ((), x)
get = RecordM $ \_ st -> Right (st, st)
#if MIN_VERSION_mtl(2,1,0)
state f = RecordM $ \_ st -> Right (f st)
#endif
instance MonadError TLSError RecordM where
throwError e = RecordM $ \_ _ -> Left e
catchError m f = RecordM $ \ver st ->
case runRecordM m ver st of
Left err -> runRecordM (f err) ver st
r -> r
newRecordState :: RecordState
newRecordState = RecordState
{ stCipher = Nothing
, stCompression = nullCompression
, stCryptState = CryptState BulkStateUninitialized B.empty B.empty
, stMacState = MacState 0
}
incrRecordState :: RecordState -> RecordState
incrRecordState ts = ts { stMacState = MacState (ms + 1) }
where (MacState ms) = stMacState ts
setRecordIV :: Bytes -> RecordState -> RecordState
setRecordIV iv st = st { stCryptState = (stCryptState st) { cstIV = iv } }
withCompression :: (Compression -> (Compression, a)) -> RecordM a
withCompression f = do
st <- get
let (nc, a) = f $ stCompression st
put $ st { stCompression = nc }
return a
computeDigest :: Version -> RecordState -> Header -> Bytes -> (Bytes, RecordState)
computeDigest ver tstate hdr content = (digest, incrRecordState tstate)
where digest = macF (cstMacSecret cst) msg
cst = stCryptState tstate
cipher = fromJust "cipher" $ stCipher tstate
hashA = cipherHash cipher
encodedSeq = encodeWord64 $ msSequence $ stMacState tstate
(macF, msg)
| ver < TLS10 = (macSSL hashA, B.concat [ encodedSeq, encodeHeaderNoVer hdr, content ])
| otherwise = (hmac hashA, B.concat [ encodedSeq, encodeHeader hdr, content ])
makeDigest :: Header -> Bytes -> RecordM Bytes
makeDigest hdr content = do
ver <- getRecordVersion
st <- get
let (digest, nstate) = computeDigest ver st hdr content
put nstate
return digest
getBulk :: RecordM Bulk
getBulk = cipherBulk . fromJust "cipher" . stCipher <$> get
getMacSequence :: RecordM Word64
getMacSequence = msSequence . stMacState <$> get
| lancelotsix/hs-tls | core/Network/TLS/Record/State.hs | bsd-3-clause | 4,389 | 0 | 15 | 1,248 | 1,267 | 687 | 580 | 120 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-|
Module : Stack.Types.Sig
Description : Signature Types
Copyright : (c) FPComplete.com, 2015
License : BSD3
Maintainer : Tim Dysinger <tim@fpcomplete.com>
Stability : experimental
Portability : POSIX
-}
module Stack.Types.Sig
(Signature(..), Fingerprint(..), SigException(..))
where
import Control.Exception (Exception)
import Data.Aeson (Value(..), ToJSON(..), FromJSON(..))
import Data.ByteString (ByteString)
import qualified Data.ByteString as SB
import Data.Char (isDigit, isAlpha, isSpace)
import Data.Monoid ((<>))
import Data.String (IsString(..))
import Data.Text (Text)
import qualified Data.Text as T
import Data.Typeable (Typeable)
import Stack.Types.PackageName
-- | A GPG signature.
newtype Signature =
Signature ByteString
deriving (Ord,Eq)
instance Show Signature where
show (Signature s) = "Signature " ++
(if SB.length s > 140
then show (SB.take 140 s) ++
"..."
else show (SB.take 140 s))
-- | The GPG fingerprint.
newtype Fingerprint = Fingerprint
{ fingerprintSample :: Text
} deriving (Eq,Ord,Show)
instance FromJSON Fingerprint where
parseJSON j = do
s <- parseJSON j
let withoutSpaces = T.filter (not . isSpace) s
if T.null withoutSpaces ||
T.all
(\c ->
isAlpha c || isDigit c || isSpace c)
withoutSpaces
then return (Fingerprint withoutSpaces)
else fail ("Expected fingerprint, but got: " ++ T.unpack s)
instance ToJSON Fingerprint where
toJSON (Fingerprint txt) = String txt
instance IsString Fingerprint where
fromString = Fingerprint . T.pack
instance FromJSON (Aeson PackageName) where
parseJSON j = do
s <- parseJSON j
case parsePackageName s of
Just name -> return (Aeson name)
Nothing -> fail ("Invalid package name: " <> T.unpack s)
-- | Handy wrapper for orphan instances.
newtype Aeson a = Aeson
{ _unAeson :: a
} deriving (Ord,Eq)
-- | Exceptions
data SigException
= GPGFingerprintException String
| GPGSignException String
| GPGVerifyException String
| SigInvalidSDistTarBall
| SigNoProjectRootException
| SigServiceException String
deriving (Typeable)
instance Exception SigException
instance Show SigException where
show (GPGFingerprintException e) =
"Error extracting a GPG fingerprint " <> e
show (GPGSignException e) = "Error signing with GPG " <> e
show (GPGVerifyException e) = "Error verifying with GPG " <> e
show SigNoProjectRootException = "Missing Project Root"
show SigInvalidSDistTarBall = "Invalid sdist tarball"
show (SigServiceException e) = "Error with the Signature Service " <> e
| harendra-kumar/stack | src/Stack/Types/Sig.hs | bsd-3-clause | 3,021 | 0 | 15 | 798 | 709 | 389 | 320 | 70 | 0 |
{-
(c) The University of Glasgow 2011
The deriving code for the Generic class
(equivalent to the code in TcGenDeriv, for other classes)
-}
{-# LANGUAGE CPP, ScopedTypeVariables #-}
{-# LANGUAGE FlexibleContexts #-}
module TcGenGenerics (canDoGenerics, canDoGenerics1,
GenericKind(..),
MetaTyCons, genGenericMetaTyCons,
gen_Generic_binds, get_gen1_constrained_tys) where
import DynFlags
import HsSyn
import Type
import Kind ( isKind )
import TcType
import TcGenDeriv
import DataCon
import TyCon
import FamInstEnv ( FamInst, FamFlavor(..), mkSingleCoAxiom )
import FamInst
import Module ( Module, moduleName, moduleNameString
, modulePackageKey, packageKeyString )
import IfaceEnv ( newGlobalBinder )
import Name hiding ( varName )
import RdrName
import BasicTypes
import TysWiredIn
import PrelNames
import InstEnv
import TcEnv
import MkId
import TcRnMonad
import HscTypes
import ErrUtils( Validity(..), andValid )
import BuildTyCl
import SrcLoc
import Bag
import VarSet (elemVarSet)
import Outputable
import FastString
import Util
import Control.Monad (mplus,forM)
#include "HsVersions.h"
{-
************************************************************************
* *
\subsection{Bindings for the new generic deriving mechanism}
* *
************************************************************************
For the generic representation we need to generate:
\begin{itemize}
\item A Generic instance
\item A Rep type instance
\item Many auxiliary datatypes and instances for them (for the meta-information)
\end{itemize}
-}
gen_Generic_binds :: GenericKind -> TyCon -> MetaTyCons -> Module
-> TcM (LHsBinds RdrName, FamInst)
gen_Generic_binds gk tc metaTyCons mod = do
repTyInsts <- tc_mkRepFamInsts gk tc metaTyCons mod
return (mkBindsRep gk tc, repTyInsts)
genGenericMetaTyCons :: TyCon -> Module -> TcM (MetaTyCons, BagDerivStuff)
genGenericMetaTyCons tc mod =
do loc <- getSrcSpanM
let
tc_name = tyConName tc
tc_cons = tyConDataCons tc
tc_arits = map dataConSourceArity tc_cons
tc_occ = nameOccName tc_name
d_occ = mkGenD tc_occ
c_occ m = mkGenC tc_occ m
s_occ m n = mkGenS tc_occ m n
mkTyCon name = ASSERT( isExternalName name )
buildAlgTyCon name [] [] Nothing [] distinctAbstractTyConRhs
NonRecursive
False -- Not promotable
False -- Not GADT syntax
NoParentTyCon
d_name <- newGlobalBinder mod d_occ loc
c_names <- forM (zip [0..] tc_cons) $ \(m,_) ->
newGlobalBinder mod (c_occ m) loc
s_names <- forM (zip [0..] tc_arits) $ \(m,a) -> forM [0..a-1] $ \n ->
newGlobalBinder mod (s_occ m n) loc
let metaDTyCon = mkTyCon d_name
metaCTyCons = map mkTyCon c_names
metaSTyCons = map (map mkTyCon) s_names
metaDts = MetaTyCons metaDTyCon metaCTyCons metaSTyCons
-- pprTrace "rep0" (ppr rep0_tycon) $
(,) metaDts `fmap` metaTyConsToDerivStuff tc metaDts
-- both the tycon declarations and related instances
metaTyConsToDerivStuff :: TyCon -> MetaTyCons -> TcM BagDerivStuff
metaTyConsToDerivStuff tc metaDts =
do loc <- getSrcSpanM
dflags <- getDynFlags
dClas <- tcLookupClass datatypeClassName
let new_dfun_name clas tycon = newDFunName clas [mkTyConApp tycon []] loc
d_dfun_name <- new_dfun_name dClas tc
cClas <- tcLookupClass constructorClassName
c_dfun_names <- sequence [ new_dfun_name cClas tc | _ <- metaC metaDts ]
sClas <- tcLookupClass selectorClassName
s_dfun_names <- sequence (map sequence [ [ new_dfun_name sClas tc
| _ <- x ]
| x <- metaS metaDts ])
fix_env <- getFixityEnv
let
(dBinds,cBinds,sBinds) = mkBindsMetaD fix_env tc
mk_inst clas tc dfun_name
= mkLocalInstance (mkDictFunId dfun_name [] [] clas tys)
OverlapFlag { overlapMode = (NoOverlap "")
, isSafeOverlap = safeLanguageOn dflags }
[] clas tys
where
tys = [mkTyConTy tc]
-- Datatype
d_metaTycon = metaD metaDts
d_inst = mk_inst dClas d_metaTycon d_dfun_name
d_binds = InstBindings { ib_binds = dBinds
, ib_tyvars = []
, ib_pragmas = []
, ib_extensions = []
, ib_derived = True }
d_mkInst = DerivInst (InstInfo { iSpec = d_inst, iBinds = d_binds })
-- Constructor
c_metaTycons = metaC metaDts
c_insts = [ mk_inst cClas c ds
| (c, ds) <- myZip1 c_metaTycons c_dfun_names ]
c_binds = [ InstBindings { ib_binds = c
, ib_tyvars = []
, ib_pragmas = []
, ib_extensions = []
, ib_derived = True }
| c <- cBinds ]
c_mkInst = [ DerivInst (InstInfo { iSpec = is, iBinds = bs })
| (is,bs) <- myZip1 c_insts c_binds ]
-- Selector
s_metaTycons = metaS metaDts
s_insts = map (map (\(s,ds) -> mk_inst sClas s ds))
(myZip2 s_metaTycons s_dfun_names)
s_binds = [ [ InstBindings { ib_binds = s
, ib_tyvars = []
, ib_pragmas = []
, ib_extensions = []
, ib_derived = True }
| s <- ss ] | ss <- sBinds ]
s_mkInst = map (map (\(is,bs) -> DerivInst (InstInfo { iSpec = is
, iBinds = bs})))
(myZip2 s_insts s_binds)
myZip1 :: [a] -> [b] -> [(a,b)]
myZip1 l1 l2 = ASSERT(length l1 == length l2) zip l1 l2
myZip2 :: [[a]] -> [[b]] -> [[(a,b)]]
myZip2 l1 l2 =
ASSERT(and (zipWith (>=) (map length l1) (map length l2)))
[ zip x1 x2 | (x1,x2) <- zip l1 l2 ]
return $ mapBag DerivTyCon (metaTyCons2TyCons metaDts)
`unionBags` listToBag (d_mkInst : c_mkInst ++ concat s_mkInst)
{-
************************************************************************
* *
\subsection{Generating representation types}
* *
************************************************************************
-}
get_gen1_constrained_tys :: TyVar -> Type -> [Type]
-- called by TcDeriv.inferConstraints; generates a list of types, each of which
-- must be a Functor in order for the Generic1 instance to work.
get_gen1_constrained_tys argVar
= argTyFold argVar $ ArgTyAlg { ata_rec0 = const []
, ata_par1 = [], ata_rec1 = const []
, ata_comp = (:) }
{-
Note [Requirements for deriving Generic and Rep]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In the following, T, Tfun, and Targ are "meta-variables" ranging over type
expressions.
(Generic T) and (Rep T) are derivable for some type expression T if the
following constraints are satisfied.
(a) T = (D v1 ... vn) with free variables v1, v2, ..., vn where n >= 0 v1
... vn are distinct type variables. Cf #5939.
(b) D is a type constructor *value*. In other words, D is either a type
constructor or it is equivalent to the head of a data family instance (up to
alpha-renaming).
(c) D cannot have a "stupid context".
(d) The right-hand side of D cannot include unboxed types, existential types,
or universally quantified types.
(e) T :: *.
(Generic1 T) and (Rep1 T) are derivable for some type expression T if the
following constraints are satisfied.
(a),(b),(c),(d) As above.
(f) T must expect arguments, and its last parameter must have kind *.
We use `a' to denote the parameter of D that corresponds to the last
parameter of T.
(g) For any type-level application (Tfun Targ) in the right-hand side of D
where the head of Tfun is not a tuple constructor:
(b1) `a' must not occur in Tfun.
(b2) If `a' occurs in Targ, then Tfun :: * -> *.
-}
canDoGenerics :: TyCon -> [Type] -> Validity
-- canDoGenerics rep_tc tc_args determines if Generic/Rep can be derived for a
-- type expression (rep_tc tc_arg0 tc_arg1 ... tc_argn).
--
-- Check (b) from Note [Requirements for deriving Generic and Rep] is taken
-- care of because canDoGenerics is applied to rep tycons.
--
-- It returns Nothing if deriving is possible. It returns (Just reason) if not.
canDoGenerics tc tc_args
= mergeErrors (
-- Check (c) from Note [Requirements for deriving Generic and Rep].
(if (not (null (tyConStupidTheta tc)))
then (NotValid (tc_name <+> text "must not have a datatype context"))
else IsValid) :
-- Check (a) from Note [Requirements for deriving Generic and Rep].
--
-- Data family indices can be instantiated; the `tc_args` here are
-- the representation tycon args
(if (all isTyVarTy (filterOut isKind tc_args))
then IsValid
else NotValid (tc_name <+> text "must not be instantiated;" <+>
text "try deriving `" <> tc_name <+> tc_tys <>
text "' instead"))
-- See comment below
: (map bad_con (tyConDataCons tc)))
where
-- The tc can be a representation tycon. When we want to display it to the
-- user (in an error message) we should print its parent
(tc_name, tc_tys) = case tyConParent tc of
FamInstTyCon _ ptc tys -> (ppr ptc, hsep (map ppr
(tys ++ drop (length tys) tc_args)))
_ -> (ppr tc, hsep (map ppr (tyConTyVars tc)))
-- Check (d) from Note [Requirements for deriving Generic and Rep].
--
-- If any of the constructors has an unboxed type as argument,
-- then we can't build the embedding-projection pair, because
-- it relies on instantiating *polymorphic* sum and product types
-- at the argument types of the constructors
bad_con dc = if (any bad_arg_type (dataConOrigArgTys dc))
then (NotValid (ppr dc <+> text "must not have unlifted or polymorphic arguments"))
else (if (not (isVanillaDataCon dc))
then (NotValid (ppr dc <+> text "must be a vanilla data constructor"))
else IsValid)
-- Nor can we do the job if it's an existential data constructor,
-- Nor if the args are polymorphic types (I don't think)
bad_arg_type ty = isUnLiftedType ty || not (isTauTy ty)
mergeErrors :: [Validity] -> Validity
mergeErrors [] = IsValid
mergeErrors (NotValid s:t) = case mergeErrors t of
IsValid -> NotValid s
NotValid s' -> NotValid (s <> text ", and" $$ s')
mergeErrors (IsValid : t) = mergeErrors t
-- A datatype used only inside of canDoGenerics1. It's the result of analysing
-- a type term.
data Check_for_CanDoGenerics1 = CCDG1
{ _ccdg1_hasParam :: Bool -- does the parameter of interest occurs in
-- this type?
, _ccdg1_errors :: Validity -- errors generated by this type
}
{-
Note [degenerate use of FFoldType]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We use foldDataConArgs here only for its ability to treat tuples
specially. foldDataConArgs also tracks covariance (though it assumes all
higher-order type parameters are covariant) and has hooks for special handling
of functions and polytypes, but we do *not* use those.
The key issue is that Generic1 deriving currently offers no sophisticated
support for functions. For example, we cannot handle
data F a = F ((a -> Int) -> Int)
even though a is occurring covariantly.
In fact, our rule is harsh: a is simply not allowed to occur within the first
argument of (->). We treat (->) the same as any other non-tuple tycon.
Unfortunately, this means we have to track "the parameter occurs in this type"
explicitly, even though foldDataConArgs is also doing this internally.
-}
-- canDoGenerics1 rep_tc tc_args determines if a Generic1/Rep1 can be derived
-- for a type expression (rep_tc tc_arg0 tc_arg1 ... tc_argn).
--
-- Checks (a) through (d) from Note [Requirements for deriving Generic and Rep]
-- are taken care of by the call to canDoGenerics.
--
-- It returns Nothing if deriving is possible. It returns (Just reason) if not.
canDoGenerics1 :: TyCon -> [Type] -> Validity
canDoGenerics1 rep_tc tc_args =
canDoGenerics rep_tc tc_args `andValid` additionalChecks
where
additionalChecks
-- check (f) from Note [Requirements for deriving Generic and Rep]
| null (tyConTyVars rep_tc) = NotValid $
ptext (sLit "Data type") <+> quotes (ppr rep_tc)
<+> ptext (sLit "must have some type parameters")
| otherwise = mergeErrors $ concatMap check_con data_cons
data_cons = tyConDataCons rep_tc
check_con con = case check_vanilla con of
j@(NotValid {}) -> [j]
IsValid -> _ccdg1_errors `map` foldDataConArgs (ft_check con) con
bad :: DataCon -> SDoc -> SDoc
bad con msg = ptext (sLit "Constructor") <+> quotes (ppr con) <+> msg
check_vanilla :: DataCon -> Validity
check_vanilla con | isVanillaDataCon con = IsValid
| otherwise = NotValid (bad con existential)
bmzero = CCDG1 False IsValid
bmbad con s = CCDG1 True $ NotValid $ bad con s
bmplus (CCDG1 b1 m1) (CCDG1 b2 m2) = CCDG1 (b1 || b2) (m1 `andValid` m2)
-- check (g) from Note [degenerate use of FFoldType]
ft_check :: DataCon -> FFoldType Check_for_CanDoGenerics1
ft_check con = FT
{ ft_triv = bmzero
, ft_var = caseVar, ft_co_var = caseVar
-- (component_0,component_1,...,component_n)
, ft_tup = \_ components -> if any _ccdg1_hasParam (init components)
then bmbad con wrong_arg
else foldr bmplus bmzero components
-- (dom -> rng), where the head of ty is not a tuple tycon
, ft_fun = \dom rng -> -- cf #8516
if _ccdg1_hasParam dom
then bmbad con wrong_arg
else bmplus dom rng
-- (ty arg), where head of ty is neither (->) nor a tuple constructor and
-- the parameter of interest does not occur in ty
, ft_ty_app = \_ arg -> arg
, ft_bad_app = bmbad con wrong_arg
, ft_forall = \_ body -> body -- polytypes are handled elsewhere
}
where
caseVar = CCDG1 True IsValid
existential = text "must not have existential arguments"
wrong_arg = text "applies a type to an argument involving the last parameter"
$$ text "but the applied type is not of kind * -> *"
{-
************************************************************************
* *
\subsection{Generating the RHS of a generic default method}
* *
************************************************************************
-}
type US = Int -- Local unique supply, just a plain Int
type Alt = (LPat RdrName, LHsExpr RdrName)
-- GenericKind serves to mark if a datatype derives Generic (Gen0) or
-- Generic1 (Gen1).
data GenericKind = Gen0 | Gen1
-- as above, but with a payload of the TyCon's name for "the" parameter
data GenericKind_ = Gen0_ | Gen1_ TyVar
-- as above, but using a single datacon's name for "the" parameter
data GenericKind_DC = Gen0_DC | Gen1_DC TyVar
forgetArgVar :: GenericKind_DC -> GenericKind
forgetArgVar Gen0_DC = Gen0
forgetArgVar Gen1_DC{} = Gen1
-- When working only within a single datacon, "the" parameter's name should
-- match that datacon's name for it.
gk2gkDC :: GenericKind_ -> DataCon -> GenericKind_DC
gk2gkDC Gen0_ _ = Gen0_DC
gk2gkDC Gen1_{} d = Gen1_DC $ last $ dataConUnivTyVars d
-- Bindings for the Generic instance
mkBindsRep :: GenericKind -> TyCon -> LHsBinds RdrName
mkBindsRep gk tycon =
unitBag (mkRdrFunBind (L loc from01_RDR) from_matches)
`unionBags`
unitBag (mkRdrFunBind (L loc to01_RDR) to_matches)
where
from_matches = [mkSimpleHsAlt pat rhs | (pat,rhs) <- from_alts]
to_matches = [mkSimpleHsAlt pat rhs | (pat,rhs) <- to_alts ]
loc = srcLocSpan (getSrcLoc tycon)
datacons = tyConDataCons tycon
(from01_RDR, to01_RDR) = case gk of
Gen0 -> (from_RDR, to_RDR)
Gen1 -> (from1_RDR, to1_RDR)
-- Recurse over the sum first
from_alts, to_alts :: [Alt]
(from_alts, to_alts) = mkSum gk_ (1 :: US) tycon datacons
where gk_ = case gk of
Gen0 -> Gen0_
Gen1 -> ASSERT(length tyvars >= 1)
Gen1_ (last tyvars)
where tyvars = tyConTyVars tycon
--------------------------------------------------------------------------------
-- The type synonym instance and synonym
-- type instance Rep (D a b) = Rep_D a b
-- type Rep_D a b = ...representation type for D ...
--------------------------------------------------------------------------------
tc_mkRepFamInsts :: GenericKind -- Gen0 or Gen1
-> TyCon -- The type to generate representation for
-> MetaTyCons -- Metadata datatypes to refer to
-> Module -- Used as the location of the new RepTy
-> TcM (FamInst) -- Generated representation0 coercion
tc_mkRepFamInsts gk tycon metaDts mod =
-- Consider the example input tycon `D`, where data D a b = D_ a
-- Also consider `R:DInt`, where { data family D x y :: * -> *
-- ; data instance D Int a b = D_ a }
do { -- `rep` = GHC.Generics.Rep or GHC.Generics.Rep1 (type family)
fam_tc <- case gk of
Gen0 -> tcLookupTyCon repTyConName
Gen1 -> tcLookupTyCon rep1TyConName
; let -- `tyvars` = [a,b]
(tyvars, gk_) = case gk of
Gen0 -> (all_tyvars, Gen0_)
Gen1 -> ASSERT(not $ null all_tyvars)
(init all_tyvars, Gen1_ $ last all_tyvars)
where all_tyvars = tyConTyVars tycon
tyvar_args = mkTyVarTys tyvars
appT :: [Type]
appT = case tyConFamInst_maybe tycon of
-- `appT` = D Int a b (data families case)
Just (famtycon, apps) ->
-- `fam` = D
-- `apps` = [Int, a, b]
let allApps = case gk of
Gen0 -> apps
Gen1 -> ASSERT(not $ null apps)
init apps
in [mkTyConApp famtycon allApps]
-- `appT` = D a b (normal case)
Nothing -> [mkTyConApp tycon tyvar_args]
-- `repTy` = D1 ... (C1 ... (S1 ... (Rec0 a))) :: * -> *
; repTy <- tc_mkRepTy gk_ tycon metaDts
-- `rep_name` is a name we generate for the synonym
; rep_name <- let mkGen = case gk of Gen0 -> mkGenR; Gen1 -> mkGen1R
in newGlobalBinder mod (mkGen (nameOccName (tyConName tycon)))
(nameSrcSpan (tyConName tycon))
; let axiom = mkSingleCoAxiom Nominal rep_name tyvars fam_tc appT repTy
; newFamInst SynFamilyInst axiom }
--------------------------------------------------------------------------------
-- Type representation
--------------------------------------------------------------------------------
-- | See documentation of 'argTyFold'; that function uses the fields of this
-- type to interpret the structure of a type when that type is considered as an
-- argument to a constructor that is being represented with 'Rep1'.
data ArgTyAlg a = ArgTyAlg
{ ata_rec0 :: (Type -> a)
, ata_par1 :: a, ata_rec1 :: (Type -> a)
, ata_comp :: (Type -> a -> a)
}
-- | @argTyFold@ implements a generalised and safer variant of the @arg@
-- function from Figure 3 in <http://dreixel.net/research/pdf/gdmh.pdf>. @arg@
-- is conceptually equivalent to:
--
-- > arg t = case t of
-- > _ | isTyVar t -> if (t == argVar) then Par1 else Par0 t
-- > App f [t'] |
-- > representable1 f &&
-- > t' == argVar -> Rec1 f
-- > App f [t'] |
-- > representable1 f &&
-- > t' has tyvars -> f :.: (arg t')
-- > _ -> Rec0 t
--
-- where @argVar@ is the last type variable in the data type declaration we are
-- finding the representation for.
--
-- @argTyFold@ is more general than @arg@ because it uses 'ArgTyAlg' to
-- abstract out the concrete invocations of @Par0@, @Rec0@, @Par1@, @Rec1@, and
-- @:.:@.
--
-- @argTyFold@ is safer than @arg@ because @arg@ would lead to a GHC panic for
-- some data types. The problematic case is when @t@ is an application of a
-- non-representable type @f@ to @argVar@: @App f [argVar]@ is caught by the
-- @_@ pattern, and ends up represented as @Rec0 t@. This type occurs /free/ in
-- the RHS of the eventual @Rep1@ instance, which is therefore ill-formed. Some
-- representable1 checks have been relaxed, and others were moved to
-- @canDoGenerics1@.
argTyFold :: forall a. TyVar -> ArgTyAlg a -> Type -> a
argTyFold argVar (ArgTyAlg {ata_rec0 = mkRec0,
ata_par1 = mkPar1, ata_rec1 = mkRec1,
ata_comp = mkComp}) =
-- mkRec0 is the default; use it if there is no interesting structure
-- (e.g. occurrences of parameters or recursive occurrences)
\t -> maybe (mkRec0 t) id $ go t where
go :: Type -> -- type to fold through
Maybe a -- the result (e.g. representation type), unless it's trivial
go t = isParam `mplus` isApp where
isParam = do -- handles parameters
t' <- getTyVar_maybe t
Just $ if t' == argVar then mkPar1 -- moreover, it is "the" parameter
else mkRec0 t -- NB mkRec0 instead of the conventional mkPar0
isApp = do -- handles applications
(phi, beta) <- tcSplitAppTy_maybe t
let interesting = argVar `elemVarSet` exactTyVarsOfType beta
-- Does it have no interesting structure to represent?
if not interesting then Nothing
else -- Is the argument the parameter? Special case for mkRec1.
if Just argVar == getTyVar_maybe beta then Just $ mkRec1 phi
else mkComp phi `fmap` go beta -- It must be a composition.
tc_mkRepTy :: -- Gen0_ or Gen1_, for Rep or Rep1
GenericKind_
-- The type to generate representation for
-> TyCon
-- Metadata datatypes to refer to
-> MetaTyCons
-- Generated representation0 type
-> TcM Type
tc_mkRepTy gk_ tycon metaDts =
do
d1 <- tcLookupTyCon d1TyConName
c1 <- tcLookupTyCon c1TyConName
s1 <- tcLookupTyCon s1TyConName
nS1 <- tcLookupTyCon noSelTyConName
rec0 <- tcLookupTyCon rec0TyConName
rec1 <- tcLookupTyCon rec1TyConName
par1 <- tcLookupTyCon par1TyConName
u1 <- tcLookupTyCon u1TyConName
v1 <- tcLookupTyCon v1TyConName
plus <- tcLookupTyCon sumTyConName
times <- tcLookupTyCon prodTyConName
comp <- tcLookupTyCon compTyConName
let mkSum' a b = mkTyConApp plus [a,b]
mkProd a b = mkTyConApp times [a,b]
mkComp a b = mkTyConApp comp [a,b]
mkRec0 a = mkTyConApp rec0 [a]
mkRec1 a = mkTyConApp rec1 [a]
mkPar1 = mkTyConTy par1
mkD a = mkTyConApp d1 [metaDTyCon, sumP (tyConDataCons a)]
mkC i d a = mkTyConApp c1 [d, prod i (dataConInstOrigArgTys a $ mkTyVarTys $ tyConTyVars tycon)
(null (dataConFieldLabels a))]
-- This field has no label
mkS True _ a = mkTyConApp s1 [mkTyConTy nS1, a]
-- This field has a label
mkS False d a = mkTyConApp s1 [d, a]
-- Sums and products are done in the same way for both Rep and Rep1
sumP [] = mkTyConTy v1
sumP l = ASSERT(length metaCTyCons == length l)
foldBal mkSum' [ mkC i d a
| (d,(a,i)) <- zip metaCTyCons (zip l [0..])]
-- The Bool is True if this constructor has labelled fields
prod :: Int -> [Type] -> Bool -> Type
prod i [] _ = ASSERT(length metaSTyCons > i)
ASSERT(length (metaSTyCons !! i) == 0)
mkTyConTy u1
prod i l b = ASSERT(length metaSTyCons > i)
ASSERT(length l == length (metaSTyCons !! i))
foldBal mkProd [ arg d t b
| (d,t) <- zip (metaSTyCons !! i) l ]
arg :: Type -> Type -> Bool -> Type
arg d t b = mkS b d $ case gk_ of
-- Here we previously used Par0 if t was a type variable, but we
-- realized that we can't always guarantee that we are wrapping-up
-- all type variables in Par0. So we decided to stop using Par0
-- altogether, and use Rec0 all the time.
Gen0_ -> mkRec0 t
Gen1_ argVar -> argPar argVar t
where
-- Builds argument represention for Rep1 (more complicated due to
-- the presence of composition).
argPar argVar = argTyFold argVar $ ArgTyAlg
{ata_rec0 = mkRec0, ata_par1 = mkPar1,
ata_rec1 = mkRec1, ata_comp = mkComp}
metaDTyCon = mkTyConTy (metaD metaDts)
metaCTyCons = map mkTyConTy (metaC metaDts)
metaSTyCons = map (map mkTyConTy) (metaS metaDts)
return (mkD tycon)
--------------------------------------------------------------------------------
-- Meta-information
--------------------------------------------------------------------------------
data MetaTyCons = MetaTyCons { -- One meta datatype per datatype
metaD :: TyCon
-- One meta datatype per constructor
, metaC :: [TyCon]
-- One meta datatype per selector per constructor
, metaS :: [[TyCon]] }
instance Outputable MetaTyCons where
ppr (MetaTyCons d c s) = ppr d $$ vcat (map ppr c) $$ vcat (map ppr (concat s))
metaTyCons2TyCons :: MetaTyCons -> Bag TyCon
metaTyCons2TyCons (MetaTyCons d c s) = listToBag (d : c ++ concat s)
-- Bindings for Datatype, Constructor, and Selector instances
mkBindsMetaD :: FixityEnv -> TyCon
-> ( LHsBinds RdrName -- Datatype instance
, [LHsBinds RdrName] -- Constructor instances
, [[LHsBinds RdrName]]) -- Selector instances
mkBindsMetaD fix_env tycon = (dtBinds, allConBinds, allSelBinds)
where
mkBag l = foldr1 unionBags
[ unitBag (mkRdrFunBind (L loc name) matches)
| (name, matches) <- l ]
dtBinds = mkBag ( [ (datatypeName_RDR, dtName_matches)
, (moduleName_RDR, moduleName_matches)
, (packageName_RDR, pkgName_matches)]
++ ifElseEmpty (isNewTyCon tycon)
[ (isNewtypeName_RDR, isNewtype_matches) ] )
allConBinds = map conBinds datacons
conBinds c = mkBag ( [ (conName_RDR, conName_matches c)]
++ ifElseEmpty (dataConIsInfix c)
[ (conFixity_RDR, conFixity_matches c) ]
++ ifElseEmpty (length (dataConFieldLabels c) > 0)
[ (conIsRecord_RDR, conIsRecord_matches c) ]
)
ifElseEmpty p x = if p then x else []
fixity c = case lookupFixity fix_env (dataConName c) of
Fixity n InfixL -> buildFix n leftAssocDataCon_RDR
Fixity n InfixR -> buildFix n rightAssocDataCon_RDR
Fixity n InfixN -> buildFix n notAssocDataCon_RDR
buildFix n assoc = nlHsApps infixDataCon_RDR [nlHsVar assoc
, nlHsIntLit (toInteger n)]
allSelBinds = map (map selBinds) datasels
selBinds s = mkBag [(selName_RDR, selName_matches s)]
loc = srcLocSpan (getSrcLoc tycon)
mkStringLHS s = [mkSimpleHsAlt nlWildPat (nlHsLit (mkHsString s))]
datacons = tyConDataCons tycon
datasels = map dataConFieldLabels datacons
tyConName_user = case tyConFamInst_maybe tycon of
Just (ptycon, _) -> tyConName ptycon
Nothing -> tyConName tycon
dtName_matches = mkStringLHS . occNameString . nameOccName
$ tyConName_user
moduleName_matches = mkStringLHS . moduleNameString . moduleName
. nameModule . tyConName $ tycon
pkgName_matches = mkStringLHS . packageKeyString . modulePackageKey
. nameModule . tyConName $ tycon
isNewtype_matches = [mkSimpleHsAlt nlWildPat (nlHsVar true_RDR)]
conName_matches c = mkStringLHS . occNameString . nameOccName
. dataConName $ c
conFixity_matches c = [mkSimpleHsAlt nlWildPat (fixity c)]
conIsRecord_matches _ = [mkSimpleHsAlt nlWildPat (nlHsVar true_RDR)]
selName_matches s = mkStringLHS (occNameString (nameOccName s))
--------------------------------------------------------------------------------
-- Dealing with sums
--------------------------------------------------------------------------------
mkSum :: GenericKind_ -- Generic or Generic1?
-> US -- Base for generating unique names
-> TyCon -- The type constructor
-> [DataCon] -- The data constructors
-> ([Alt], -- Alternatives for the T->Trep "from" function
[Alt]) -- Alternatives for the Trep->T "to" function
-- Datatype without any constructors
mkSum _ _ tycon [] = ([from_alt], [to_alt])
where
from_alt = (nlWildPat, mkM1_E (makeError errMsgFrom))
to_alt = (mkM1_P nlWildPat, makeError errMsgTo)
-- These M1s are meta-information for the datatype
makeError s = nlHsApp (nlHsVar error_RDR) (nlHsLit (mkHsString s))
tyConStr = occNameString (nameOccName (tyConName tycon))
errMsgFrom = "No generic representation for empty datatype " ++ tyConStr
errMsgTo = "No values for empty datatype " ++ tyConStr
-- Datatype with at least one constructor
mkSum gk_ us _ datacons =
-- switch the payload of gk_ to be datacon-centric instead of tycon-centric
unzip [ mk1Sum (gk2gkDC gk_ d) us i (length datacons) d
| (d,i) <- zip datacons [1..] ]
-- Build the sum for a particular constructor
mk1Sum :: GenericKind_DC -- Generic or Generic1?
-> US -- Base for generating unique names
-> Int -- The index of this constructor
-> Int -- Total number of constructors
-> DataCon -- The data constructor
-> (Alt, -- Alternative for the T->Trep "from" function
Alt) -- Alternative for the Trep->T "to" function
mk1Sum gk_ us i n datacon = (from_alt, to_alt)
where
gk = forgetArgVar gk_
-- Existentials already excluded
argTys = dataConOrigArgTys datacon
n_args = dataConSourceArity datacon
datacon_varTys = zip (map mkGenericLocal [us .. us+n_args-1]) argTys
datacon_vars = map fst datacon_varTys
us' = us + n_args
datacon_rdr = getRdrName datacon
from_alt = (nlConVarPat datacon_rdr datacon_vars, from_alt_rhs)
from_alt_rhs = mkM1_E (genLR_E i n (mkProd_E gk_ us' datacon_varTys))
to_alt = (mkM1_P (genLR_P i n (mkProd_P gk us' datacon_vars)), to_alt_rhs)
-- These M1s are meta-information for the datatype
to_alt_rhs = case gk_ of
Gen0_DC -> nlHsVarApps datacon_rdr datacon_vars
Gen1_DC argVar -> nlHsApps datacon_rdr $ map argTo datacon_varTys
where
argTo (var, ty) = converter ty `nlHsApp` nlHsVar var where
converter = argTyFold argVar $ ArgTyAlg
{ata_rec0 = const $ nlHsVar unK1_RDR,
ata_par1 = nlHsVar unPar1_RDR,
ata_rec1 = const $ nlHsVar unRec1_RDR,
ata_comp = \_ cnv -> (nlHsVar fmap_RDR `nlHsApp` cnv)
`nlHsCompose` nlHsVar unComp1_RDR}
-- Generates the L1/R1 sum pattern
genLR_P :: Int -> Int -> LPat RdrName -> LPat RdrName
genLR_P i n p
| n == 0 = error "impossible"
| n == 1 = p
| i <= div n 2 = nlConPat l1DataCon_RDR [genLR_P i (div n 2) p]
| otherwise = nlConPat r1DataCon_RDR [genLR_P (i-m) (n-m) p]
where m = div n 2
-- Generates the L1/R1 sum expression
genLR_E :: Int -> Int -> LHsExpr RdrName -> LHsExpr RdrName
genLR_E i n e
| n == 0 = error "impossible"
| n == 1 = e
| i <= div n 2 = nlHsVar l1DataCon_RDR `nlHsApp` genLR_E i (div n 2) e
| otherwise = nlHsVar r1DataCon_RDR `nlHsApp` genLR_E (i-m) (n-m) e
where m = div n 2
--------------------------------------------------------------------------------
-- Dealing with products
--------------------------------------------------------------------------------
-- Build a product expression
mkProd_E :: GenericKind_DC -- Generic or Generic1?
-> US -- Base for unique names
-> [(RdrName, Type)] -- List of variables matched on the lhs and their types
-> LHsExpr RdrName -- Resulting product expression
mkProd_E _ _ [] = mkM1_E (nlHsVar u1DataCon_RDR)
mkProd_E gk_ _ varTys = mkM1_E (foldBal prod appVars)
-- These M1s are meta-information for the constructor
where
appVars = map (wrapArg_E gk_) varTys
prod a b = prodDataCon_RDR `nlHsApps` [a,b]
wrapArg_E :: GenericKind_DC -> (RdrName, Type) -> LHsExpr RdrName
wrapArg_E Gen0_DC (var, _) = mkM1_E (k1DataCon_RDR `nlHsVarApps` [var])
-- This M1 is meta-information for the selector
wrapArg_E (Gen1_DC argVar) (var, ty) = mkM1_E $ converter ty `nlHsApp` nlHsVar var
-- This M1 is meta-information for the selector
where converter = argTyFold argVar $ ArgTyAlg
{ata_rec0 = const $ nlHsVar k1DataCon_RDR,
ata_par1 = nlHsVar par1DataCon_RDR,
ata_rec1 = const $ nlHsVar rec1DataCon_RDR,
ata_comp = \_ cnv -> nlHsVar comp1DataCon_RDR `nlHsCompose`
(nlHsVar fmap_RDR `nlHsApp` cnv)}
-- Build a product pattern
mkProd_P :: GenericKind -- Gen0 or Gen1
-> US -- Base for unique names
-> [RdrName] -- List of variables to match
-> LPat RdrName -- Resulting product pattern
mkProd_P _ _ [] = mkM1_P (nlNullaryConPat u1DataCon_RDR)
mkProd_P gk _ vars = mkM1_P (foldBal prod appVars)
-- These M1s are meta-information for the constructor
where
appVars = map (wrapArg_P gk) vars
prod a b = prodDataCon_RDR `nlConPat` [a,b]
wrapArg_P :: GenericKind -> RdrName -> LPat RdrName
wrapArg_P Gen0 v = mkM1_P (k1DataCon_RDR `nlConVarPat` [v])
-- This M1 is meta-information for the selector
wrapArg_P Gen1 v = m1DataCon_RDR `nlConVarPat` [v]
mkGenericLocal :: US -> RdrName
mkGenericLocal u = mkVarUnqual (mkFastString ("g" ++ show u))
mkM1_E :: LHsExpr RdrName -> LHsExpr RdrName
mkM1_E e = nlHsVar m1DataCon_RDR `nlHsApp` e
mkM1_P :: LPat RdrName -> LPat RdrName
mkM1_P p = m1DataCon_RDR `nlConPat` [p]
nlHsCompose :: LHsExpr RdrName -> LHsExpr RdrName -> LHsExpr RdrName
nlHsCompose x y = compose_RDR `nlHsApps` [x, y]
-- | Variant of foldr1 for producing balanced lists
foldBal :: (a -> a -> a) -> [a] -> a
foldBal op = foldBal' op (error "foldBal: empty list")
foldBal' :: (a -> a -> a) -> a -> [a] -> a
foldBal' _ x [] = x
foldBal' _ _ [y] = y
foldBal' op x l = let (a,b) = splitAt (length l `div` 2) l
in foldBal' op x a `op` foldBal' op x b
| urbanslug/ghc | compiler/typecheck/TcGenGenerics.hs | bsd-3-clause | 37,401 | 0 | 23 | 11,995 | 7,361 | 3,917 | 3,444 | 498 | 6 |
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE NoImplicitPrelude #-}
-----------------------------------------------------------------------------
-- |
-- Module : Data.Functor
-- Copyright : (c) The University of Glasgow 2001
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : libraries@haskell.org
-- Stability : provisional
-- Portability : portable
--
-- Functors: uniform action over a parameterized type, generalizing the
-- 'Data.List.map' function on lists.
module Data.Functor
(
Functor(fmap),
(<$),
($>),
(<$>),
void,
) where
import GHC.Base ( Functor(..), flip )
-- $setup
-- Allow the use of Prelude in doctests.
-- >>> import Prelude
infixl 4 <$>
-- | An infix synonym for 'fmap'.
--
-- The name of this operator is an allusion to '$'.
-- Note the similarities between their types:
--
-- > ($) :: (a -> b) -> a -> b
-- > (<$>) :: Functor f => (a -> b) -> f a -> f b
--
-- Whereas '$' is function application, '<$>' is function
-- application lifted over a 'Functor'.
--
-- ==== __Examples__
--
-- Convert from a @'Maybe' 'Int'@ to a @'Maybe' 'String'@ using 'show':
--
-- >>> show <$> Nothing
-- Nothing
-- >>> show <$> Just 3
-- Just "3"
--
-- Convert from an @'Either' 'Int' 'Int'@ to an @'Either' 'Int'@
-- 'String' using 'show':
--
-- >>> show <$> Left 17
-- Left 17
-- >>> show <$> Right 17
-- Right "17"
--
-- Double each element of a list:
--
-- >>> (*2) <$> [1,2,3]
-- [2,4,6]
--
-- Apply 'even' to the second element of a pair:
--
-- >>> even <$> (2,2)
-- (2,True)
--
(<$>) :: Functor f => (a -> b) -> f a -> f b
(<$>) = fmap
infixl 4 $>
-- | Flipped version of '<$'.
--
-- @since 4.7.0.0
--
-- ==== __Examples__
--
-- Replace the contents of a @'Maybe' 'Int'@ with a constant 'String':
--
-- >>> Nothing $> "foo"
-- Nothing
-- >>> Just 90210 $> "foo"
-- Just "foo"
--
-- Replace the contents of an @'Either' 'Int' 'Int'@ with a constant
-- 'String', resulting in an @'Either' 'Int' 'String'@:
--
-- >>> Left 8675309 $> "foo"
-- Left 8675309
-- >>> Right 8675309 $> "foo"
-- Right "foo"
--
-- Replace each element of a list with a constant 'String':
--
-- >>> [1,2,3] $> "foo"
-- ["foo","foo","foo"]
--
-- Replace the second element of a pair with a constant 'String':
--
-- >>> (1,2) $> "foo"
-- (1,"foo")
--
($>) :: Functor f => f a -> b -> f b
($>) = flip (<$)
-- | @'void' value@ discards or ignores the result of evaluation, such
-- as the return value of an 'System.IO.IO' action.
--
-- ==== __Examples__
--
-- Replace the contents of a @'Maybe' 'Int'@ with unit:
--
-- >>> void Nothing
-- Nothing
-- >>> void (Just 3)
-- Just ()
--
-- Replace the contents of an @'Either' 'Int' 'Int'@ with unit,
-- resulting in an @'Either' 'Int' '()'@:
--
-- >>> void (Left 8675309)
-- Left 8675309
-- >>> void (Right 8675309)
-- Right ()
--
-- Replace every element of a list with unit:
--
-- >>> void [1,2,3]
-- [(),(),()]
--
-- Replace the second element of a pair with unit:
--
-- >>> void (1,2)
-- (1,())
--
-- Discard the result of an 'System.IO.IO' action:
--
-- >>> mapM print [1,2]
-- 1
-- 2
-- [(),()]
-- >>> void $ mapM print [1,2]
-- 1
-- 2
--
void :: Functor f => f a -> f ()
void x = () <$ x
| tolysz/prepare-ghcjs | spec-lts8/base/Data/Functor.hs | bsd-3-clause | 3,230 | 0 | 8 | 688 | 307 | 231 | 76 | 21 | 1 |
import Test.Cabal.Prelude
-- Test PATH-munging
main = setupAndCabalTest $ do
setup_build []
runExe' "hello-world" []
>>= assertOutputContains "1111"
| mydaum/cabal | cabal-testsuite/PackageTests/BuildTools/Internal/setup.test.hs | bsd-3-clause | 165 | 0 | 10 | 34 | 42 | 20 | 22 | 5 | 1 |
module Test where
test = "test"
| ezyang/ghc | testsuite/tests/ghci/scripts/ghci062/Test.hs | bsd-3-clause | 33 | 0 | 4 | 7 | 9 | 6 | 3 | 2 | 1 |
-----------------------------------------------------------------------------
-- |
-- Module : Test
-- Copyright : (c) Simon Marlow 2002
-- License : BSD-style
--
-- Maintainer : libraries@haskell.org
-- Stability : provisional
-- Portability : portable
--
-- This module illustrates & tests most of the features of Haddock.
-- Testing references from the description: 'T', 'f', 'g', 'Visible.visible'.
--
-----------------------------------------------------------------------------
-- This is plain comment, ignored by Haddock.
{-# LANGUAGE Rank2Types, GADTs #-}
module Test (
-- Section headings are introduced with '-- *':
-- * Type declarations
-- Subsection headings are introduced with '-- **' and so on.
-- ** Data types
T(..), T2, T3(..), T4(..), T5(..), T6(..),
N1(..), N2(..), N3(..), N4, N5(..), N6(..), N7(..),
-- ** Records
R(..), R1(..),
-- | test that we can export record selectors on their own:
p, q, u,
-- * Class declarations
C(a,b), D(..), E, F(..),
-- | Test that we can export a class method on its own:
a,
-- * Function types
f, g,
-- * Auxiliary stuff
-- $aux1
-- $aux2
-- $aux3
-- $aux4
-- $aux5
-- $aux6
-- $aux7
-- $aux8
-- $aux9
-- $aux10
-- $aux11
-- $aux12
-- | This is some inline documentation in the export list
--
-- > a code block using bird-tracks
-- > each line must begin with > (which isn't significant unless it
-- > is at the beginning of the line).
-- * A hidden module
module Hidden,
-- * A visible module
module Visible,
{-| nested-style doc comments -}
-- * Existential \/ Universal types
Ex(..),
-- * Type signatures with argument docs
k, l, m, o,
-- * A section
-- and without an intervening comma:
-- ** A subsection
{-|
> a literal line
$ a non /literal/ line $
-}
f',
withType, withoutType
) where
import Hidden
import Visible
import Data.Maybe
bla = Nothing
-- | This comment applies to the /following/ declaration
-- and it continues until the next non-comment line
data T a b
= A Int (Maybe Float) -- ^ This comment describes the 'A' constructor
| -- | This comment describes the 'B' constructor
B (T a b, T Int Float) -- ^
-- | An abstract data declaration
data T2 a b = T2 a b
-- | A data declaration with no documentation annotations on the constructors
data T3 a b = A1 a | B1 b
-- A data declaration with no documentation annotations at all
data T4 a b = A2 a | B2 b
-- A data declaration documentation on the constructors only
data T5 a b
= A3 a -- ^ documents 'A3'
| B3 b -- ^ documents 'B3'
-- | Testing alternative comment styles
data T6
-- | This is the doc for 'A4'
= A4
| B4
| -- ^ This is the doc for 'B4'
-- | This is the doc for 'C4'
C4
-- | A newtype
newtype N1 a = N1 a
-- | A newtype with a fieldname
newtype N2 a b = N2 {n :: a b}
-- | A newtype with a fieldname, documentation on the field
newtype N3 a b = N3 {n3 :: a b -- ^ this is the 'n3' field
}
-- | An abstract newtype - we show this one as data rather than newtype because
-- the difference isn\'t visible to the programmer for an abstract type.
newtype N4 a b = N4 a
newtype N5 a b = N5 {n5 :: a b -- ^ no docs on the datatype or the constructor
}
newtype N6 a b = N6 {n6 :: a b
}
-- ^ docs on the constructor only
-- | docs on the newtype and the constructor
newtype N7 a b = N7 {n7 :: a b
}
-- ^ The 'N7' constructor
class (D a) => C a where
-- |this is a description of the 'a' method
a :: IO a
b :: [a]
-- ^ this is a description of the 'b' method
c :: a -- c is hidden in the export list
c = undefined
-- ^ This comment applies to the /previous/ declaration (the 'C' class)
class D a where
d :: T a b
e :: (a,a)
-- ^ This is a class declaration with no separate docs for the methods
instance D Int where
d = undefined
e = undefined
-- instance with a qualified class name
instance Test.D Float where
d = undefined
e = undefined
class E a where
ee :: a
-- ^ This is a class declaration with no methods (or no methods exported)
-- This is a class declaration with no documentation at all
class F a where
ff :: a
-- | This is the documentation for the 'R' record, which has four fields,
-- 'p', 'q', 'r', and 's'.
data R =
-- | This is the 'C1' record constructor, with the following fields:
C1 { p :: Int -- ^ This comment applies to the 'p' field
, q :: forall a . a->a -- ^ This comment applies to the 'q' field
, -- | This comment applies to both 'r' and 's'
r,s :: Int
}
| C2 { t :: T1 -> (T2 Int Int)-> (T3 Bool Bool) -> (T4 Float Float) -> T5 () (),
u,v :: Int
}
-- ^ This is the 'C2' record constructor, also with some fields:
-- | Testing different record commenting styles
data R1
-- | This is the 'C3' record constructor
= C3 {
-- | The 's1' record selector
s1 :: Int
-- | The 's2' record selector
, s2 :: Int
, s3 :: Int -- NOTE: In the original examples/Test.hs in Haddock, there is an extra "," here.
-- Since GHC doesn't allow that, I have removed it in this file.
-- ^ The 's3' record selector
}
-- These section headers are only used when there is no export list to
-- give the structure of the documentation:
-- * This is a section header (level 1)
-- ** This is a section header (level 2)
-- *** This is a section header (level 3)
{-|
In a comment string we can refer to identifiers in scope with
single quotes like this: 'T', and we can refer to modules by
using double quotes: "Foo". We can add emphasis /like this/.
* This is a bulleted list
- This is the next item (different kind of bullet)
(1) This is an ordered list
2. This is the next item (different kind of bullet)
[cat] a small, furry, domesticated mammal
[pineapple] a fruit grown in the tropics
@
This is a block of code, which can include other markup: 'R'
formatting
is
significant
@
> this is another block of code
We can also include URLs in documentation: <http://www.haskell.org/>.
-}
f :: C a => a -> Int
-- | we can export foreign declarations too
foreign import ccall g :: Int -> IO CInt
-- | this doc string has a parse error in it: \'
h :: Int
h = 42
-- $aux1 This is some documentation that is attached to a name ($aux1)
-- rather than a source declaration. The documentation may be
-- referred to in the export list using its name.
--
-- @ code block in named doc @
-- $aux2 This is some documentation that is attached to a name ($aux2)
-- $aux3
-- @ code block on its own in named doc @
-- $aux4
--
-- @ code block on its own in named doc (after newline) @
{- $aux5 a nested, named doc comment
with a paragraph,
@ and a code block @
-}
-- some tests for various arrangements of code blocks:
{- $aux6
>test
>test1
@ test2
test3
@
-}
{- $aux7
@
test1
test2
@
-}
{- $aux8
>test3
>test4
-}
{- $aux9
@
test1
test2
@
>test3
>test4
-}
{- $aux10
>test3
>test4
@
test1
test2
@
-}
-- This one is currently wrong (Haddock 0.4). The @...@ part is
-- interpreted as part of the bird-tracked code block.
{- $aux11
aux11:
>test3
>test4
@
test1
test2
@
-}
-- $aux12
-- > foo
--
-- > bar
--
-- | A data-type using existential\/universal types
data Ex a
= forall b . C b => Ex1 b
| forall b . Ex2 b
| forall b . C a => Ex3 b -- NOTE: I have added "forall b" here make GHC accept this file
| Ex4 (forall a . a -> a)
-- | This is a function with documentation for each argument
k :: T () () -- ^ This argument has type 'T'
-> (T2 Int Int) -- ^ This argument has type 'T2 Int Int'
-> (T3 Bool Bool -> T4 Float Float) -- ^ This argument has type @T3 Bool Bool -> T4 Float Float@
-> T5 () () -- ^ This argument has a very long description that should
-- hopefully cause some wrapping to happen when it is finally
-- rendered by Haddock in the generated HTML page.
-> IO () -- ^ This is the result type
-- This function has arg docs but no docs for the function itself
l :: (Int, Int, Float) -- ^ takes a triple
-> Int -- ^ returns an 'Int'
-- | This function has some arg docs
m :: R
-> N1 () -- ^ one of the arguments
-> IO Int -- ^ and the return value
-- | This function has some arg docs but not a return value doc
-- can't use the original name ('n') with GHC
newn :: R -- ^ one of the arguments, an 'R'
-> N1 () -- ^ one of the arguments
-> IO Int
newn = undefined
-- | A foreign import with argument docs
foreign import ccall unsafe
o :: Float -- ^ The input float
-> IO Float -- ^ The output float
-- | We should be able to escape this: \#\#\#
-- p :: Int
-- can't use the above original definition with GHC
newp :: Int
newp = undefined
-- | a function with a prime can be referred to as 'f''
-- but f' doesn't get link'd 'f\''
f' :: Int
-- | Comment on a definition without type signature
withoutType = undefined
-- | Comment on a definition with type signature
withType :: Int
withType = 1
-- Add some definitions here so that this file can be compiled with GHC
data T1
f = undefined
f' = undefined
type CInt = Int
k = undefined
l = undefined
m = undefined
| DavidAlphaFox/ghc | utils/haddock/html-test/src/Test.hs | bsd-3-clause | 9,158 | 74 | 10 | 2,263 | 1,219 | 796 | 423 | -1 | -1 |
-- From a blog post: http://www.jonmsterling.com/posts/2012-01-12-unifying-monoids-and-monads-with-polymorphic-kinds.html
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleInstances, FlexibleContexts #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE UnicodeSyntax #-}
{-# LANGUAGE TypeFamilies #-}
module Main where
import Control.Monad (Monad(..), join, ap, liftM)
import Data.Monoid (Monoid(..))
-- First we define the type class Monoidy:
class Monoidy (to :: k0 -> k1 -> *) (m :: k1) where
type MComp to m :: k1 -> k1 -> k0
type MId to m :: k0
munit :: MId to m `to` m
mjoin :: MComp to m m m `to` m
-- We use functional dependencies to help the typechecker understand that
-- m and ~> uniquely determine comp (times) and id.
-- This kind of type class would not have been possible in previous
-- versions of GHC; with the new kind system, however, we can abstract
-- over kinds!2 Now, let’s create types for the additive and
-- multiplicative monoids over the natural numbers:
newtype Sum a = Sum a deriving Show
newtype Product a = Product a deriving Show
instance Num a ⇒ Monoidy (→) (Sum a) where
type MComp (→) (Sum a) = (,)
type MId (→) (Sum a) = ()
munit _ = Sum 0
mjoin (Sum x, Sum y) = Sum $ x + y
instance Num a ⇒ Monoidy (→) (Product a) where
type MComp (→) (Product a) = (,)
type MId (→) (Product a) = ()
munit _ = Product 1
mjoin (Product x, Product y) = Product $ x * y
-- It will be slightly more complicated to make a monadic instance with
-- Monoidy. First, we need to define the identity functor, a type for
-- natural transformations, and a type for functor composition:
data Id α = Id { runId :: α } deriving Functor
-- A natural transformation (Λ f g α. (f α) → (g α)) may be encoded in Haskell as follows:
data NT f g = NT { runNT :: ∀ α. f α → g α }
-- Functor composition (Λ f g α. f (g α)) is encoded as follows:
data FC f g α = FC { runFC :: f (g α) }
-- Now, let us define some type T which should be a monad:
data Wrapper a = Wrapper { runWrapper :: a } deriving (Show, Functor)
instance Monoidy NT Wrapper where
type MComp NT Wrapper = FC
type MId NT Wrapper = Id
munit = NT $ Wrapper . runId
mjoin = NT $ runWrapper . runFC
-- With these defined, we can use them as follows:
test1 = do { print (mjoin (munit (), Sum 2))
-- Sum 2
; print (mjoin (Product 2, Product 3))
-- Product 6
; print (runNT mjoin $ FC $ Wrapper (Wrapper "hello, world"))
-- Wrapper {runWrapper = "hello, world" }
}
-- We can even provide a special binary operator for the appropriate monoids as follows:
(<+>) :: (Monoidy (→) m, MId (→) m ~ (), MComp (→) m ~ (,))
⇒ m → m → m
(<+>) = curry mjoin
test2 = print (Sum 1 <+> Sum 2 <+> Sum 4) -- Sum 7
-- Now, all the extra wrapping that Haskell requires for encoding this is
-- rather cumbersome in actual use. So, we can give traditional Monad and
-- Monoid instances for instances of Monoidy:
instance (MId (→) m ~ (), MComp (→) m ~ (,), Monoidy (→) m)
⇒ Monoid m where
mempty = munit ()
mappend = curry mjoin
instance Applicative Wrapper where
pure = return
(<*>) = ap
instance Monad Wrapper where
return x = runNT munit $ Id x
x >>= f = runNT mjoin $ FC (f `fmap` x)
-- And so the following works:
test3
= do { print (mappend mempty (Sum 2))
-- Sum 2
; print (mappend (Product 2) (Product 3))
-- Product 6
; print (join $ Wrapper $ Wrapper "hello")
-- Wrapper {runWrapper = "hello" }
; print (Wrapper "hello, world" >>= return)
-- Wrapper {runWrapper = "hello, world" }
}
main = test1 >> test2 >> test3
| urbanslug/ghc | testsuite/tests/polykinds/MonoidsTF.hs | bsd-3-clause | 3,963 | 2 | 12 | 965 | 1,025 | 568 | 457 | 62 | 1 |
{-# LANGUAGE NoImplicitPrelude #-}
module D where
import A
import C
typeRepX :: Fingerprint
typeRepX = mkTyConApp Fingerprint
| urbanslug/ghc | testsuite/tests/driver/T7373/D.hs | bsd-3-clause | 129 | 0 | 5 | 21 | 24 | 15 | 9 | 6 | 1 |
{-|
Module : Control.Spear
Description : Thicker Arrows
Copyright : Matt Gambogi, Travis Whitaker 2015
License : MIT
Maintainer : m@gambogi.com
Stability : Provisional
Portability : Portable
Thicker Arrows.
-}
{-# LANGUAGE DefaultSignatures, TypeFamilies #-}
module Control.Spear where
import Control.Arrow
class Arrow s => Spear (s :: * -> * -> *) where
type ParamSpace s :: *
type Observation s :: *
applySpear :: ParamSpace s -> s a b
train :: ParamSpace s -> Observation s -> ParamSpace s
(<^>) :: Spear s => ParamSpace s -> s a b
(<^>) = applySpear
| gambogi/spear | src/Control/Spear.hs | mit | 597 | 0 | 9 | 138 | 129 | 70 | 59 | 10 | 1 |
module Protocol where
import Bitcoin.Protocol
import qualified Data.Serialize as S
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import Control.Applicative
import Test.QuickCheck
import Network.Socket
import Data.List
import Data.Word
--instance (Arbitrary w, Bits w) => Arbitrary (Set.T w a) where arbitrary = Set.Cons <$> arbitrary
instance Arbitrary SockAddr where
arbitrary = do
a <- SockAddrInet <$> (PortNum <$> arbitrary) <*> arbitrary
elements [a]
instance Arbitrary Services where
arbitrary = Services . nub <$> listOf (elements [NodeNetwork])
instance Arbitrary NetworkAddress where
arbitrary = NetworkAddress <$> arbitrary <*> arbitrary
instance Arbitrary VarInt where
arbitrary = VarInt <$> choose (0, fromIntegral (maxBound :: Word64))
instance Arbitrary T.Text where
arbitrary = T.pack <$> arbitrary
instance Arbitrary VarString where
arbitrary = VarString <$> arbitrary
instance Arbitrary Version where
arbitrary = Version <$> arbitrary
instance Arbitrary Timestamp where
arbitrary = Timestamp . fromIntegral <$> (arbitrary :: Gen Word)
instance Arbitrary MsgVersion where
arbitrary = MsgVersion <$> arbitrary
<*> arbitrary
<*> arbitrary
<*> arbitrary
<*> arbitrary
<*> arbitrary
<*> arbitrary
<*> arbitrary
<*> arbitrary
testCoding x = (S.decode . S.encode) x == Right x
| fhaust/bitcoin | test/Protocol.hs | mit | 1,584 | 0 | 14 | 448 | 356 | 194 | 162 | 39 | 1 |
{- Cameras module. Exports all modules in Cameras. -}
module Graphics.ThreeJs.Cameras
( -- * Modules
module Graphics.ThreeJs.Cameras.PerspectiveCamera
) where
import Graphics.ThreeJs.Cameras.PerspectiveCamera
| sanghakchun/three-js-haste | src/Graphics/ThreeJs/Cameras.hs | mit | 219 | 0 | 5 | 31 | 26 | 19 | 7 | 4 | 0 |
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE CPP #-}
-- | Debugging helper functions, for internal use only
module Dissent.Internal.Debug where
import Debug.Trace
-- | Alias to Debug.Trace(trace), but disabled in non-debug builds
log :: String -> a -> a
#ifdef DEBUG
log = trace
#else
log _ ret = ret
#endif
| solatis/dissent | src/Dissent/Internal/Debug.hs | mit | 318 | 0 | 6 | 56 | 37 | 25 | 12 | 6 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
module Partials.AdminBlock (AdminBlock(..)) where
import Control.Monad (forM_, when)
import Data.List (intercalate, nub, sort)
import Data.Monoid ((<>))
import Data.String.QQ
import qualified Data.Text as T (Text, pack, unlines,
unpack)
import qualified Data.Text.Lazy as TL (Text, toStrict, unpack)
import GHC.Exts (fromString)
import Text.Blaze.Html.Renderer.Text (renderHtml)
import Text.Blaze.Html5 ((!))
import qualified Text.Blaze.Html5 as H
import qualified Text.Blaze.Html5.Attributes as A
import qualified Web.Spock as Sp
-- nanopage imports
import Internal.FileDB (FileDB (..), Page (..), Params,
categories, getPagesNoContent,
keywords, slug, tags, title)
import Internal.Partial
-- | Partial "AdminBlock"
data AdminBlock = AdminBlock
instance Partial_ AdminBlock where
partialRoutes_ _ = [route]
partialRender_ = _partial
partialName_ _ = "adminblock"
route :: Sp.SpockM FileDB () () ()
route = Sp.get "admin/getpages" $ do
params <- Sp.params
renderAdminTags params
isPageMatch :: (T.Text, T.Text) -> Page -> Bool
isPageMatch ("tag",v) p = v `elem` map TL.toStrict (tags p)
isPageMatch ("keyword",v) p = v `elem` map TL.toStrict (keywords p)
isPageMatch ("category",v) p = v `elem` map TL.toStrict (categories p)
isPageMatch (_,_) _ = False
renderPagesList :: [Page] -> H.Html
renderPagesList ps = H.ul ! A.class_ "collection" $
forM_ ps $ \p -> H.a ! A.class_ "collection-item" ! (A.href . fromString) (TL.unpack $ slug p) $ H.toHtml (title p)
renderAdminTags :: [(T.Text, T.Text)] -> Sp.ActionCtxT () (Sp.WebStateM FileDB () ()) ()
renderAdminTags params = do
pages <- Sp.runQuery (return . getPagesNoContent)
when (null params) (Sp.text "")
let param = head params
let pages' = filter (isPageMatch param) pages
let pagesList = renderPagesList pages'
(Sp.html . TL.toStrict . renderHtml) $ do
H.p $ do
H.toHtml ("Pages with " <> fst param <> " ")
H.b $ H.i $ H.toHtml $ snd param
H.html ":"
pagesList
_partial :: AdminBlock -> FileDB -> Page -> Params -> H.Html
_partial _ db p _ = do
let pages = getPagesNoContent db
H.style $ H.text style
H.div ! A.class_ (fromString "adminblock") $ do
H.h2 $ H.toHtml ("Pages" :: TL.Text)
renderPagesList pages
H.h2 $ H.toHtml ("Tags" :: TL.Text)
H.ul ! hClass $ allTags pages
H.div ! A.id "tag" $ ""
H.h2 $ H.toHtml ("Categories" :: TL.Text)
H.ul ! hClass $ allCategories pages
H.div ! A.id "category" $ ""
H.h2 $ H.toHtml ("Keywords" :: TL.Text)
H.ul ! hClass $ allKeywords pages
H.div ! A.id "keyword" $ ""
H.script ! A.type_ "text/javascript" $ H.text javascript
javascript :: T.Text
javascript = [s|
function getPages(query, target) {
$.ajax({
url: "admin/getpages?" + query,
success: function(result) { $(target).html(result); },
cache:true
}
);
}
|]
style :: T.Text
style = [s|
ul.hlist > a.btn {
margin-right: 2px;
margin-bottom: 2px;
}
.btn-small {
height: 24px;
line-height: 24px;
padding: 0 0.5rem;
border-radius: 9px;
}
|]
hClass = A.class_ (fromString "hlist")
liClass = A.class_ (fromString "chip")
unique :: Eq a => [a] -> [a]
unique = Prelude.reverse . nub . Prelude.reverse
mkButton :: TL.Text -> TL.Text -> H.Html
mkButton typ name = H.a ! A.class_ (fromString "waves-effect waves-light btn btn-small") !
A.href (fromString js) $ H.toHtml name where
js = "javascript:getPages(\"" ++ typ' ++ "=" ++ name' ++ "\", " ++ ("\"#" <> typ') ++ "\")"
typ' = TL.unpack typ
name' = TL.unpack name
listAll :: TL.Text -> (Page -> [TL.Text]) -> [Page] -> H.Html
listAll name f ps = forM_ ts (mkButton name) where
ts' = unique $ concat $ map f ps :: [TL.Text]
ts = sort ts' :: [TL.Text]
allTags = listAll "tag" tags
allKeywords = listAll "keyword" keywords
allCategories = listAll "category" categories
| mayeranalytics/nanoPage | src/Partials/AdminBlock.hs | mit | 4,446 | 0 | 17 | 1,289 | 1,457 | 768 | 689 | 88 | 1 |
{-# LANGUAGE OverloadedStrings #-}
-- This example shows how to run a CPU-intensive thread in the
-- background while keeping the UI responsive. All FLTK calls are
-- done on the main thread.
--
-- Press the "start background thread" button to start the
-- CPU-intensive thread. Then observe that clicking the "increment"
-- button remains responsive.
module Main where
import qualified Graphics.UI.FLTK.LowLevel.FL as FL
import Graphics.UI.FLTK.LowLevel.Fl_Types
import Graphics.UI.FLTK.LowLevel.FLTKHS
import Control.Concurrent
import Control.Concurrent.STM
import Control.Exception (evaluate)
import Control.Monad
import Data.IORef
import qualified Data.Text as T
main :: IO ()
main = do
-- Set up the window and widgets.
w <- windowNew (Size (Width 260) (Height 110)) Nothing Nothing
startButton <- buttonNew (Rectangle (Position (X 15) (Y 10)) (Size (Width 230) (Height 25))) (Just "start background thread")
b <- outputNew (Rectangle (Position (X 135) (Y 40)) (Size (Width 110) (Height 25))) (Just "primes ") (Just FlNormalOutput)
button <- buttonNew (Rectangle (Position (X 15) (Y 75)) (Size (Width 110) (Height 25))) (Just "increment")
counter <- outputNew (Rectangle (Position (X 135) (Y 75)) (Size (Width 110) (Height 25))) Nothing (Just FlNormalOutput)
clearVisibleFocus b
clearVisibleFocus counter
-- The communication channel between the CPU-heavy thread and the
-- main thread.
c <- newTChanIO
-- When the start button is pressed, start the CPU-heavy thread.
setCallback startButton $ \_ -> do
void $ forkIO $ computationThread c
-- Start the click counter at zero.
counterRef <- newIORef (0 :: Integer)
_ <- setValue counter (T.pack (show (0 :: Integer)))
-- When the button is pressed, increment the counter and update the
-- label.
setCallback button $ \_ -> do
modifyIORef counterRef (+1)
x <- readIORef counterRef
void $ setValue counter (T.pack (show x))
-- Every so often, check for messages from our worker thread.
FL.addTimeout 0.025 (tick b c)
-- Start the UI.
showWidget w
_ <- FL.run
return ()
-- Check for a message from our worker thread. If there is a message,
-- gobble all the messages up and set the label to the contents on the
-- most recent message.
tick :: Ref Output -> TChan Integer -> IO ()
tick b c = do
mx <- atomically $ tryReadTChan c
case mx of
Nothing -> return ()
Just x -> inner x
_ <- FL.repeatTimeout 0.025 (tick b c)
return ()
where inner x = do
mx <- atomically $ tryReadTChan c
case mx of
Nothing -> void $ setValue b (T.pack (show x))
Just x' -> inner x'
-- Very slow prime-testing predicate.
isPrime :: Integer -> Bool
isPrime 1 = False
isPrime x = not $ any (\y -> x `mod` y == 0) [2..x-1]
-- Write prime numbers to a channel forever.
computationThread :: TChan Integer -> IO ()
computationThread channel = do
let primes = filter isPrime [1000000..]
forM_ primes $ \p -> do
_ <- evaluate p
atomically $ writeTChan channel p
| deech/fltkhs-demos | src/Examples/threads.hs | mit | 3,036 | 0 | 17 | 633 | 935 | 468 | 467 | 55 | 3 |
import Test.HUnit (Assertion, (@=?), runTestTT, Test(..), Counts(..))
import System.Exit (ExitCode(..), exitWith)
import Sublist (Sublist(Equal, Sublist, Superlist, Unequal), sublist)
exitProperly :: IO Counts -> IO ()
exitProperly m = do
counts <- m
exitWith $ if failures counts /= 0 || errors counts /= 0 then ExitFailure 1 else ExitSuccess
testCase :: String -> Assertion -> Test
testCase label assertion = TestLabel label (TestCase assertion)
main :: IO ()
main = exitProperly $ runTestTT $ TestList
[ TestList sublistTests ]
sublistTests :: [Test]
sublistTests =
[ testCase "empty equals empty" $ do
Equal @=? sublist "" ""
, testCase "empty is a sublist of anything" $ do
Sublist @=? sublist "" "asdf"
, testCase "anything is a superlist of empty" $ do
Superlist @=? sublist "asdf" ""
, testCase "1 is not 2" $ do
Unequal @=? sublist "1" "2"
, testCase "compare larger equal lists" $ do
let xs = replicate 1000 'x'
Equal @=? sublist xs xs
, testCase "sublist at start" $ do
Sublist @=? sublist "123" "12345"
, testCase "sublist in middle" $ do
Sublist @=? sublist "432" "54321"
, testCase "sublist at end" $ do
Sublist @=? sublist "345" "12345"
, testCase "partially matching sublist at start" $ do
Sublist @=? sublist "112" "1112"
, testCase "sublist early in huge list" $ do
Sublist @=? sublist [3, 4, 5] [1 .. 1000000 :: Int]
, testCase "huge sublist not in huge list" $ do
Unequal @=? sublist [10 .. 1000001] [1 .. 1000000 :: Int]
, testCase "superlist at start" $ do
Superlist @=? sublist "12345" "123"
, testCase "superlist in middle" $ do
Superlist @=? sublist "54321" "432"
, testCase "superlist at end" $ do
Superlist @=? sublist "12345" "345"
, testCase "partially matching superlist at start" $ do
Superlist @=? sublist "1112" "112"
, testCase "superlist early in huge list" $ do
Superlist @=? sublist [1 .. 1000000] [3, 4, 5 :: Int]
, testCase "recurring values sublist" $ do
Sublist @=? sublist "12123" "1231212321"
, testCase "recurring values unequal" $ do
Unequal @=? sublist "12123" "1231232321"
]
| dzeban/haskell-exercism | sublist/sublist_test.hs | mit | 2,152 | 0 | 12 | 474 | 679 | 342 | 337 | 51 | 2 |
{- |
Module : $Header$
Description : Abstract syntax for propositional logic extended with QBFs
Copyright : (c) Jonathan von Schroeder, DFKI GmbH 2010
License : GPLv2 or higher, see LICENSE.txt
Maintainer : <jonathan.von_schroeder@dfki.de>
Stability : experimental
Portability : portable
Definition of abstract syntax for propositional logic extended with QBFs
Ref.
<http://en.wikipedia.org/wiki/Propositional_logic>
<http://www.voronkov.com/lics.cgi>
-}
module QBF.AS_BASIC_QBF
( FORMULA (..) -- datatype for Propositional Formulas
, BASICITEMS (..) -- Items of a Basic Spec
, BASICSPEC (..) -- Basic Spec
, SYMBITEMS (..) -- List of symbols
, SYMB (..) -- Symbols
, SYMBMAPITEMS (..) -- Symbol map
, SYMBORMAP (..) -- Symbol or symbol map
, PREDITEM (..) -- Predicates
, isPrimForm
, ID (..)
) where
import Common.Id as Id
import Common.Doc
import Common.DocUtils
import Common.Keywords
import Common.AS_Annotation as AS_Anno
import qualified Data.List as List
import Data.Maybe (isJust)
-- DrIFT command
{-! global: GetRange !-}
-- | predicates = propositions
data PREDITEM = PredItem [Id.Token] Id.Range
deriving Show
newtype BASICSPEC = BasicSpec [AS_Anno.Annoted BASICITEMS]
deriving Show
data BASICITEMS =
PredDecl PREDITEM
| AxiomItems [AS_Anno.Annoted FORMULA]
-- pos: dots
deriving Show
-- | Datatype for QBF formulas
data FORMULA =
FalseAtom Id.Range
-- pos: "False
| TrueAtom Id.Range
-- pos: "True"
| Predication Id.Token
-- pos: Propositional Identifiers
| Negation FORMULA Id.Range
-- pos: not
| Conjunction [FORMULA] Id.Range
-- pos: "/\"s
| Disjunction [FORMULA] Id.Range
-- pos: "\/"s
| Implication FORMULA FORMULA Id.Range
-- pos: "=>"
| Equivalence FORMULA FORMULA Id.Range
-- pos: "<=>"
| ForAll [Id.Token] FORMULA Id.Range
| Exists [Id.Token] FORMULA Id.Range
deriving (Show, Ord)
data ID = ID Id.Token (Maybe Id.Token)
instance Eq ID where
ID t1 (Just t2) == ID t3 (Just t4) =
((t1 == t3) && (t2 == t4))
|| ((t2 == t3) && (t1 == t4))
ID t1 Nothing == ID t2 t3 = (t1 == t2) || (Just t1 == t3)
ID _ (Just _) == ID _ Nothing = False
{- two QBFs are equivalent if bound variables
can be renamed such that the QBFs are equal -}
qbfMakeEqual :: Maybe [ID] -> FORMULA -> [Id.Token]
-> FORMULA -> [Id.Token] -> Maybe [ID]
qbfMakeEqual (Just ids) f ts f1 ts1 = if length ts /= length ts1 then
Nothing
else case (f, f1) of
(Predication t, Predication t1)
| t == t1 -> Just ids
| t `elem` ts && t1 `elem` ts1 -> let tt1 = ID t (Just t1) in
if tt1 `elem` ids then
Just ids
else
if ID t Nothing `notElem` ids && ID t1 Nothing `notElem` ids then
Just (tt1 : ids)
else
Nothing
| otherwise -> Nothing
(Negation f_ _, Negation f1_ _) -> qbfMakeEqual (Just ids) f_ ts f1_ ts1
(Conjunction (f_ : fs) _, Conjunction (f1_ : fs1) _) ->
if length fs /= length fs1 then Nothing else
case r of
Nothing -> Nothing
_ -> qbfMakeEqual r
(Conjunction fs nullRange) ts
(Conjunction fs1 nullRange) ts1
where
r = qbfMakeEqual (Just ids) f_ ts f1_ ts1
(Disjunction fs r, Disjunction fs1 r1) -> qbfMakeEqual (Just ids)
(Conjunction fs r) ts (Conjunction fs1 r1) ts1
(Implication f_ f1_ _, Implication f2 f3 _) -> case r of
Nothing -> Nothing
_ -> qbfMakeEqual r f1_ ts f3 ts1
where
r = qbfMakeEqual (Just ids) f_ ts f2 ts1
(Equivalence f_ f1_ r1, Equivalence f2 f3 _) -> qbfMakeEqual (Just ids)
(Implication f_ f1_ r1) ts
(Implication f2 f3 r1) ts1
(ForAll ts_ f_ _, ForAll ts1_ f1_ _) -> case r of
Nothing -> Nothing
(Just ids_) -> Just (ids ++ filter (\ (ID x my) ->
let Just y = my in
(x `elem` ts_ && y `notElem` ts1_) ||
(x `elem` ts1_ && y `notElem` ts_)) d)
where
d = ids_ List.\\ ids
where
r = qbfMakeEqual (Just ids) f_ (ts ++ ts_) f1_ (ts1 ++ ts1_)
(Exists ts_ f_ r, Exists ts1_ f1_ r1) -> qbfMakeEqual (Just ids)
(Exists ts_ f_ r) ts
(Exists ts1_ f1_ r1) ts1
(_1, _2) -> Nothing
qbfMakeEqual Nothing _ _ _ _ = Nothing
-- ranges are always equal (see Common/Id.hs) - thus they can be ignored
instance Eq FORMULA where
FalseAtom _ == FalseAtom _ = True
TrueAtom _ == TrueAtom _ = True
Predication t == Predication t1 = t == t1
Negation f _ == Negation f1 _ = f == f1
Conjunction xs _ == Conjunction xs1 _ = xs == xs1
Disjunction xs _ == Disjunction xs1 _ = xs == xs1
Implication f f1 _ == Implication f2 f3 _ = (f == f2) && (f1 == f3)
Equivalence f f1 _ == Equivalence f2 f3 _ = (f == f2) && (f1 == f3)
ForAll ts f _ == ForAll ts1 f1 _ = isJust (qbfMakeEqual (Just []) f ts f1 ts1)
Exists ts f _ == Exists ts1 f1 _ = isJust (qbfMakeEqual (Just []) f ts f1 ts1)
_ == _ = False
data SYMBITEMS = SymbItems [SYMB] Id.Range
-- pos: SYMB_KIND, commas
deriving (Show, Eq)
newtype SYMB = SymbId Id.Token
-- pos: colon
deriving (Show, Eq)
data SYMBMAPITEMS = SymbMapItems [SYMBORMAP] Id.Range
-- pos: SYMB_KIND, commas
deriving (Show, Eq)
data SYMBORMAP = Symb SYMB
| SymbMap SYMB SYMB Id.Range
-- pos: "|->"
deriving (Show, Eq)
-- All about pretty printing we chose the easy way here :)
instance Pretty FORMULA where
pretty = printFormula
instance Pretty BASICSPEC where
pretty = printBasicSpec
instance Pretty SYMB where
pretty = printSymbol
instance Pretty SYMBITEMS where
pretty = printSymbItems
instance Pretty SYMBMAPITEMS where
pretty = printSymbMapItems
instance Pretty BASICITEMS where
pretty = printBasicItems
instance Pretty SYMBORMAP where
pretty = printSymbOrMap
instance Pretty PREDITEM where
pretty = printPredItem
isPrimForm :: FORMULA -> Bool
isPrimForm f = case f of
TrueAtom _ -> True
FalseAtom _ -> True
Predication _ -> True
Negation _ _ -> True
_ -> False
-- Pretty printing for formulas
printFormula :: FORMULA -> Doc
printFormula frm =
let ppf p f = (if p f then id else parens) $ printFormula f
isJunctForm f = case f of
Implication _ _ _ -> False
Equivalence _ _ _ -> False
ForAll _ _ _ -> False
Exists _ _ _ -> False
_ -> True
in case frm of
FalseAtom _ -> text falseS
TrueAtom _ -> text trueS
Predication x -> pretty x
Negation f _ -> notDoc <+> ppf isPrimForm f
Conjunction xs _ -> sepByArbitrary andDoc $ map (ppf isPrimForm) xs
Disjunction xs _ -> sepByArbitrary orDoc $ map (ppf isPrimForm) xs
Implication x y _ -> ppf isJunctForm x <+> implies <+> ppf isJunctForm y
Equivalence x y _ -> ppf isJunctForm x <+> equiv <+> ppf isJunctForm y
ForAll xs y _ -> forallDoc <+> sepByArbitrary comma (map pretty xs)
<+> space
<+> ppf isJunctForm y
Exists xs y _ -> exists <+> sepByArbitrary comma (map pretty xs)
<+> space
<+> ppf isJunctForm y
sepByArbitrary :: Doc -> [Doc] -> Doc
sepByArbitrary d = fsep . prepPunctuate (d <> space)
printPredItem :: PREDITEM -> Doc
printPredItem (PredItem xs _) = fsep $ map pretty xs
printBasicSpec :: BASICSPEC -> Doc
printBasicSpec (BasicSpec xs) = vcat $ map pretty xs
printBasicItems :: BASICITEMS -> Doc
printBasicItems (AxiomItems xs) = vcat $ map pretty xs
printBasicItems (PredDecl x) = pretty x
printSymbol :: SYMB -> Doc
printSymbol (SymbId sym) = pretty sym
printSymbItems :: SYMBITEMS -> Doc
printSymbItems (SymbItems xs _) = fsep $ map pretty xs
printSymbOrMap :: SYMBORMAP -> Doc
printSymbOrMap (Symb sym) = pretty sym
printSymbOrMap (SymbMap source dest _) =
pretty source <+> mapsto <+> pretty dest
printSymbMapItems :: SYMBMAPITEMS -> Doc
printSymbMapItems (SymbMapItems xs _) = fsep $ map pretty xs
| nevrenato/Hets_Fork | QBF/AS_BASIC_QBF.der.hs | gpl-2.0 | 8,279 | 0 | 24 | 2,390 | 2,699 | 1,386 | 1,313 | 177 | 16 |
module Interp where
import Control.Monad.Reader
import AbsFun
import ErrM
data Strategy = CallByName -- evaluation strategy
| CallByValue
data Env = Map Ident Exp -- environment
data Value = VInt Int -- values are integers
| VClos Exp Env -- or closures
type Eval = ReaderT Env Err -- evaluation monad: passes around state (Env)
-- and returns `Err a` (either result `a` or error)
interpret :: Strategy -> Program -> Eval Int
interpret _ (Prog defs) = undefined
-- Add global function defs to env
-- Evaluate 'main'
eval :: Exp -> Eval Value
eval = undefined
| izimbra/PLT2014 | Lab4/2015/Interp.hs | gpl-2.0 | 645 | 0 | 7 | 184 | 121 | 71 | 50 | 14 | 1 |
{-# LANGUAGE CPP, TupleSections #-}
{-
Copyright (C) 2006-2014 John MacFarlane <jgm@berkeley.edu>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-}
{- |
Module : Main
Copyright : Copyright (C) 2006-2014 John MacFarlane
License : GNU GPL, version 2 or above
Maintainer : John MacFarlane <jgm@berkeley@edu>
Stability : alpha
Portability : portable
Parses command-line options and calls the appropriate readers and
writers.
-}
module Main where
import Text.Pandoc
import Text.Pandoc.Builder (setMeta)
import Text.Pandoc.PDF (makePDF)
import Text.Pandoc.Walk (walk)
import Text.Pandoc.Readers.LaTeX (handleIncludes)
import Text.Pandoc.Shared ( tabFilter, readDataFileUTF8, readDataFile,
safeRead, headerShift, normalize, err, warn,
openURL )
import Text.Pandoc.MediaBag ( mediaDirectory, extractMediaBag, MediaBag )
import Text.Pandoc.XML ( toEntities )
import Text.Pandoc.SelfContained ( makeSelfContained )
import Text.Pandoc.Process (pipeProcess)
import Text.Highlighting.Kate ( languages, Style, tango, pygments,
espresso, zenburn, kate, haddock, monochrome )
import System.Environment ( getArgs, getProgName )
import System.Exit ( exitWith, ExitCode (..) )
import System.FilePath
import System.Console.GetOpt
import Data.Char ( toLower )
import Data.List ( intercalate, isPrefixOf, isSuffixOf, sort )
import System.Directory ( getAppUserDataDirectory, findExecutable,
doesFileExist, Permissions(..), getPermissions )
import System.IO ( stdout, stderr )
import System.IO.Error ( isDoesNotExistError )
import qualified Control.Exception as E
import Control.Exception.Extensible ( throwIO )
import qualified Text.Pandoc.UTF8 as UTF8
import Control.Monad (when, unless, (>=>))
import Data.Maybe (isJust, fromMaybe)
import Data.Foldable (foldrM)
import Network.URI (parseURI, isURI, URI(..))
import qualified Data.ByteString.Lazy as B
import qualified Data.ByteString as BS
import Data.Aeson (eitherDecode', encode)
import qualified Data.Map as M
import Data.Yaml (decode)
import qualified Data.Yaml as Yaml
import qualified Data.Text as T
import Control.Applicative ((<$>), (<|>))
import Text.Pandoc.Readers.Txt2Tags (getT2TMeta)
import Data.Monoid
type Transform = Pandoc -> Pandoc
copyrightMessage :: String
copyrightMessage = intercalate "\n" [
"",
"Copyright (C) 2006-2014 John MacFarlane",
"Web: http://johnmacfarlane.net/pandoc",
"This is free software; see the source for copying conditions.",
"There is no warranty, not even for merchantability or fitness",
"for a particular purpose." ]
compileInfo :: String
compileInfo =
"\nCompiled with texmath " ++
VERSION_texmath ++ ", highlighting-kate " ++ VERSION_highlighting_kate ++
".\nSyntax highlighting is supported for the following languages:\n " ++
wrapWords 4 78
[map toLower l | l <- languages, l /= "Alert" && l /= "Alert_indent"]
-- | Converts a list of strings into a single string with the items printed as
-- comma separated words in lines with a maximum line length.
wrapWords :: Int -> Int -> [String] -> String
wrapWords indent c = wrap' (c - indent) (c - indent)
where
wrap' _ _ [] = ""
wrap' cols remaining (x:xs)
| remaining == cols =
x ++ wrap' cols (remaining - length x) xs
| (length x + 1) > remaining =
",\n" ++ replicate indent ' ' ++ x ++
wrap' cols (cols - length x) xs
| otherwise =
", " ++ x ++
wrap' cols (remaining - length x - 2) xs
isTextFormat :: String -> Bool
isTextFormat s = takeWhile (`notElem` "+-") s `notElem` binaries
where binaries = ["odt","docx","epub","epub3"]
externalFilter :: FilePath -> [String] -> Pandoc -> IO Pandoc
externalFilter f args' d = do
mbexe <- if '/' `elem` f
-- don't check PATH if filter name has a path
then return Nothing
-- we catch isDoesNotExistError because this will
-- be triggered if PATH not set:
else E.catch (findExecutable f)
(\e -> if isDoesNotExistError e
then return Nothing
else throwIO e)
(f', args'') <- case mbexe of
Just x -> return (x, args')
Nothing -> do
exists <- doesFileExist f
if exists
then do
isExecutable <- executable `fmap`
getPermissions f
return $
case map toLower $ takeExtension f of
_ | isExecutable -> (f, args')
".py" -> ("python", f:args')
".hs" -> ("runhaskell", f:args')
".pl" -> ("perl", f:args')
".rb" -> ("ruby", f:args')
".php" -> ("php", f:args')
_ -> (f, args')
else err 85 $ "Filter " ++ f ++ " not found"
(exitcode, outbs, errbs) <- E.handle filterException $
pipeProcess Nothing f' args'' $ encode d
when (not $ B.null errbs) $ B.hPutStr stderr errbs
case exitcode of
ExitSuccess -> return $ either error id $ eitherDecode' outbs
ExitFailure _ -> err 83 $ "Error running filter " ++ f
where filterException :: E.SomeException -> IO a
filterException e = err 83 $ "Error running filter " ++ f ++ "\n" ++
show e
-- | Data structure for command line options.
data Opt = Opt
{ optTabStop :: Int -- ^ Number of spaces per tab
, optPreserveTabs :: Bool -- ^ Preserve tabs instead of converting to spaces
, optStandalone :: Bool -- ^ Include header, footer
, optReader :: String -- ^ Reader format
, optWriter :: String -- ^ Writer format
, optParseRaw :: Bool -- ^ Parse unconvertable HTML and TeX
, optTableOfContents :: Bool -- ^ Include table of contents
, optTransforms :: [Transform] -- ^ Doc transforms to apply
, optTemplate :: Maybe FilePath -- ^ Custom template
, optVariables :: [(String,String)] -- ^ Template variables to set
, optMetadata :: M.Map String MetaValue -- ^ Metadata fields to set
, optOutputFile :: String -- ^ Name of output file
, optNumberSections :: Bool -- ^ Number sections in LaTeX
, optNumberOffset :: [Int] -- ^ Starting number for sections
, optSectionDivs :: Bool -- ^ Put sections in div tags in HTML
, optIncremental :: Bool -- ^ Use incremental lists in Slidy/Slideous/S5
, optSelfContained :: Bool -- ^ Make HTML accessible offline
, optSmart :: Bool -- ^ Use smart typography
, optOldDashes :: Bool -- ^ Parse dashes like pandoc <=1.8.2.1
, optHtml5 :: Bool -- ^ Produce HTML5 in HTML
, optHtmlQTags :: Bool -- ^ Use <q> tags in HTML
, optHighlight :: Bool -- ^ Highlight source code
, optHighlightStyle :: Style -- ^ Style to use for highlighted code
, optChapters :: Bool -- ^ Use chapter for top-level sects
, optHTMLMathMethod :: HTMLMathMethod -- ^ Method to print HTML math
, optReferenceODT :: Maybe FilePath -- ^ Path of reference.odt
, optReferenceDocx :: Maybe FilePath -- ^ Path of reference.docx
, optEpubStylesheet :: Maybe String -- ^ EPUB stylesheet
, optEpubMetadata :: String -- ^ EPUB metadata
, optEpubFonts :: [FilePath] -- ^ EPUB fonts to embed
, optEpubChapterLevel :: Int -- ^ Header level at which to split chapters
, optTOCDepth :: Int -- ^ Number of levels to include in TOC
, optDumpArgs :: Bool -- ^ Output command-line arguments
, optIgnoreArgs :: Bool -- ^ Ignore command-line arguments
, optVerbose :: Bool -- ^ Verbose diagnostic output
, optReferenceLinks :: Bool -- ^ Use reference links in writing markdown, rst
, optWrapText :: Bool -- ^ Wrap text
, optColumns :: Int -- ^ Line length in characters
, optFilters :: [FilePath] -- ^ Filters to apply
, optEmailObfuscation :: ObfuscationMethod
, optIdentifierPrefix :: String
, optIndentedCodeClasses :: [String] -- ^ Default classes for indented code blocks
, optDataDir :: Maybe FilePath
, optCiteMethod :: CiteMethod -- ^ Method to output cites
, optListings :: Bool -- ^ Use listings package for code blocks
, optLaTeXEngine :: String -- ^ Program to use for latex -> pdf
, optSlideLevel :: Maybe Int -- ^ Header level that creates slides
, optSetextHeaders :: Bool -- ^ Use atx headers for markdown level 1-2
, optAscii :: Bool -- ^ Use ascii characters only in html
, optTeXLigatures :: Bool -- ^ Use TeX ligatures for quotes/dashes
, optDefaultImageExtension :: String -- ^ Default image extension
, optExtractMedia :: Maybe FilePath -- ^ Path to extract embedded media
, optTrace :: Bool -- ^ Print debug information
, optTrackChanges :: TrackChanges -- ^ Accept or reject MS Word track-changes.
, optKaTeXStylesheet :: Maybe String -- ^ Path to stylesheet for KaTeX
, optKaTeXJS :: Maybe String -- ^ Path to js file for KaTeX
}
-- | Defaults for command-line options.
defaultOpts :: Opt
defaultOpts = Opt
{ optTabStop = 4
, optPreserveTabs = False
, optStandalone = False
, optReader = "" -- null for default reader
, optWriter = "" -- null for default writer
, optParseRaw = False
, optTableOfContents = False
, optTransforms = []
, optTemplate = Nothing
, optVariables = []
, optMetadata = M.empty
, optOutputFile = "-" -- "-" means stdout
, optNumberSections = False
, optNumberOffset = [0,0,0,0,0,0]
, optSectionDivs = False
, optIncremental = False
, optSelfContained = False
, optSmart = False
, optOldDashes = False
, optHtml5 = False
, optHtmlQTags = False
, optHighlight = True
, optHighlightStyle = pygments
, optChapters = False
, optHTMLMathMethod = PlainMath
, optReferenceODT = Nothing
, optReferenceDocx = Nothing
, optEpubStylesheet = Nothing
, optEpubMetadata = ""
, optEpubFonts = []
, optEpubChapterLevel = 1
, optTOCDepth = 3
, optDumpArgs = False
, optIgnoreArgs = False
, optVerbose = False
, optReferenceLinks = False
, optWrapText = True
, optColumns = 72
, optFilters = []
, optEmailObfuscation = JavascriptObfuscation
, optIdentifierPrefix = ""
, optIndentedCodeClasses = []
, optDataDir = Nothing
, optCiteMethod = Citeproc
, optListings = False
, optLaTeXEngine = "pdflatex"
, optSlideLevel = Nothing
, optSetextHeaders = True
, optAscii = False
, optTeXLigatures = True
, optDefaultImageExtension = ""
, optExtractMedia = Nothing
, optTrace = False
, optTrackChanges = AcceptChanges
, optKaTeXStylesheet = Nothing
, optKaTeXJS = Nothing
}
-- | A list of functions, each transforming the options data structure
-- in response to a command-line option.
options :: [OptDescr (Opt -> IO Opt)]
options =
[ Option "fr" ["from","read"]
(ReqArg
(\arg opt -> return opt { optReader = arg })
"FORMAT")
""
, Option "tw" ["to","write"]
(ReqArg
(\arg opt -> return opt { optWriter = arg })
"FORMAT")
""
, Option "o" ["output"]
(ReqArg
(\arg opt -> return opt { optOutputFile = arg })
"FILENAME")
"" -- "Name of output file"
, Option "" ["data-dir"]
(ReqArg
(\arg opt -> return opt { optDataDir = Just arg })
"DIRECTORY") -- "Directory containing pandoc data files."
""
, Option "" ["strict"]
(NoArg
(\opt -> do
err 59 $ "The --strict option has been removed.\n" ++
"Use `markdown_strict' input or output format instead."
return opt ))
"" -- "Disable markdown syntax extensions"
, Option "R" ["parse-raw"]
(NoArg
(\opt -> return opt { optParseRaw = True }))
"" -- "Parse untranslatable HTML codes and LaTeX environments as raw"
, Option "S" ["smart"]
(NoArg
(\opt -> return opt { optSmart = True }))
"" -- "Use smart quotes, dashes, and ellipses"
, Option "" ["old-dashes"]
(NoArg
(\opt -> return opt { optSmart = True
, optOldDashes = True }))
"" -- "Use smart quotes, dashes, and ellipses"
, Option "" ["base-header-level"]
(ReqArg
(\arg opt ->
case safeRead arg of
Just t | t > 0 -> do
let oldTransforms = optTransforms opt
let shift = t - 1
return opt{ optTransforms =
headerShift shift : oldTransforms }
_ -> err 19
"base-header-level must be a number > 0")
"NUMBER")
"" -- "Headers base level"
, Option "" ["indented-code-classes"]
(ReqArg
(\arg opt -> return opt { optIndentedCodeClasses = words $
map (\c -> if c == ',' then ' ' else c) arg })
"STRING")
"" -- "Classes (whitespace- or comma-separated) to use for indented code-blocks"
, Option "F" ["filter"]
(ReqArg
(\arg opt -> return opt { optFilters = arg : optFilters opt })
"PROGRAM")
"" -- "External JSON filter"
, Option "" ["normalize"]
(NoArg
(\opt -> return opt { optTransforms =
normalize : optTransforms opt } ))
"" -- "Normalize the Pandoc AST"
, Option "p" ["preserve-tabs"]
(NoArg
(\opt -> return opt { optPreserveTabs = True }))
"" -- "Preserve tabs instead of converting to spaces"
, Option "" ["tab-stop"]
(ReqArg
(\arg opt ->
case safeRead arg of
Just t | t > 0 -> return opt { optTabStop = t }
_ -> err 31
"tab-stop must be a number greater than 0")
"NUMBER")
"" -- "Tab stop (default 4)"
, Option "" ["track-changes"]
(ReqArg
(\arg opt -> do
action <- case arg of
"accept" -> return AcceptChanges
"reject" -> return RejectChanges
"all" -> return AllChanges
_ -> err 6
("Unknown option for track-changes: " ++ arg)
return opt { optTrackChanges = action })
"accept|reject|all")
"" -- "Accepting or reject MS Word track-changes.""
, Option "" ["extract-media"]
(ReqArg
(\arg opt -> do
return opt { optExtractMedia = Just arg })
"PATH")
"" -- "Directory to which to extract embedded media"
, Option "s" ["standalone"]
(NoArg
(\opt -> return opt { optStandalone = True }))
"" -- "Include needed header and footer on output"
, Option "" ["template"]
(ReqArg
(\arg opt -> do
return opt{ optTemplate = Just arg,
optStandalone = True })
"FILENAME")
"" -- "Use custom template"
, Option "M" ["metadata"]
(ReqArg
(\arg opt -> do
let (key,val) = case break (`elem` ":=") arg of
(k,_:v) -> (k, readMetaValue v)
(k,_) -> (k, MetaBool True)
return opt{ optMetadata = addMetadata key val
$ optMetadata opt })
"KEY[:VALUE]")
""
, Option "V" ["variable"]
(ReqArg
(\arg opt -> do
let (key,val) = case break (`elem` ":=") arg of
(k,_:v) -> (k,v)
(k,_) -> (k,"true")
return opt{ optVariables = (key,val) : optVariables opt })
"KEY[:VALUE]")
""
, Option "D" ["print-default-template"]
(ReqArg
(\arg _ -> do
templ <- getDefaultTemplate Nothing arg
case templ of
Right t -> UTF8.hPutStr stdout t
Left e -> error $ show e
exitWith ExitSuccess)
"FORMAT")
"" -- "Print default template for FORMAT"
, Option "" ["print-default-data-file"]
(ReqArg
(\arg _ -> do
readDataFile Nothing arg >>= BS.hPutStr stdout
exitWith ExitSuccess)
"FILE")
"" -- "Print default data file"
, Option "" ["no-wrap"]
(NoArg
(\opt -> return opt { optWrapText = False }))
"" -- "Do not wrap text in output"
, Option "" ["columns"]
(ReqArg
(\arg opt ->
case safeRead arg of
Just t | t > 0 -> return opt { optColumns = t }
_ -> err 33 $
"columns must be a number greater than 0")
"NUMBER")
"" -- "Length of line in characters"
, Option "" ["toc", "table-of-contents"]
(NoArg
(\opt -> return opt { optTableOfContents = True }))
"" -- "Include table of contents"
, Option "" ["toc-depth"]
(ReqArg
(\arg opt -> do
case safeRead arg of
Just t | t >= 1 && t <= 6 ->
return opt { optTOCDepth = t }
_ -> err 57 $
"TOC level must be a number between 1 and 6")
"NUMBER")
"" -- "Number of levels to include in TOC"
, Option "" ["no-highlight"]
(NoArg
(\opt -> return opt { optHighlight = False }))
"" -- "Don't highlight source code"
, Option "" ["highlight-style"]
(ReqArg
(\arg opt -> do
newStyle <- case map toLower arg of
"pygments" -> return pygments
"tango" -> return tango
"espresso" -> return espresso
"zenburn" -> return zenburn
"kate" -> return kate
"monochrome" -> return monochrome
"haddock" -> return haddock
_ -> err 39 $
"Unknown style :" ++ arg
return opt{ optHighlightStyle = newStyle })
"STYLE")
"" -- "Style for highlighted code"
, Option "H" ["include-in-header"]
(ReqArg
(\arg opt -> do
text <- UTF8.readFile arg
-- add new ones to end, so they're included in order specified
let newvars = optVariables opt ++ [("header-includes",text)]
return opt { optVariables = newvars,
optStandalone = True })
"FILENAME")
"" -- "File to include at end of header (implies -s)"
, Option "B" ["include-before-body"]
(ReqArg
(\arg opt -> do
text <- UTF8.readFile arg
-- add new ones to end, so they're included in order specified
let newvars = optVariables opt ++ [("include-before",text)]
return opt { optVariables = newvars,
optStandalone = True })
"FILENAME")
"" -- "File to include before document body"
, Option "A" ["include-after-body"]
(ReqArg
(\arg opt -> do
text <- UTF8.readFile arg
-- add new ones to end, so they're included in order specified
let newvars = optVariables opt ++ [("include-after",text)]
return opt { optVariables = newvars,
optStandalone = True })
"FILENAME")
"" -- "File to include after document body"
, Option "" ["self-contained"]
(NoArg
(\opt -> return opt { optSelfContained = True,
optStandalone = True }))
"" -- "Make slide shows include all the needed js and css"
, Option "" ["offline"]
(NoArg
(\opt -> do warn $ "--offline is deprecated. Use --self-contained instead."
return opt { optSelfContained = True,
optStandalone = True }))
"" -- "Make slide shows include all the needed js and css"
-- deprecated synonym for --self-contained
, Option "5" ["html5"]
(NoArg
(\opt -> do
warn $ "--html5 is deprecated. "
++ "Use the html5 output format instead."
return opt { optHtml5 = True }))
"" -- "Produce HTML5 in HTML output"
, Option "" ["html-q-tags"]
(NoArg
(\opt -> do
return opt { optHtmlQTags = True }))
"" -- "Use <q> tags for quotes in HTML"
, Option "" ["ascii"]
(NoArg
(\opt -> return opt { optAscii = True }))
"" -- "Use ascii characters only in HTML output"
, Option "" ["reference-links"]
(NoArg
(\opt -> return opt { optReferenceLinks = True } ))
"" -- "Use reference links in parsing HTML"
, Option "" ["atx-headers"]
(NoArg
(\opt -> return opt { optSetextHeaders = False } ))
"" -- "Use atx-style headers for markdown"
, Option "" ["chapters"]
(NoArg
(\opt -> return opt { optChapters = True }))
"" -- "Use chapter for top-level sections in LaTeX, DocBook"
, Option "N" ["number-sections"]
(NoArg
(\opt -> return opt { optNumberSections = True }))
"" -- "Number sections in LaTeX"
, Option "" ["number-offset"]
(ReqArg
(\arg opt ->
case safeRead ('[':arg ++ "]") of
Just ns -> return opt { optNumberOffset = ns,
optNumberSections = True }
_ -> err 57 "could not parse number-offset")
"NUMBERS")
"" -- "Starting number for sections, subsections, etc."
, Option "" ["no-tex-ligatures"]
(NoArg
(\opt -> return opt { optTeXLigatures = False }))
"" -- "Don't use tex ligatures for quotes, dashes"
, Option "" ["listings"]
(NoArg
(\opt -> return opt { optListings = True }))
"" -- "Use listings package for LaTeX code blocks"
, Option "i" ["incremental"]
(NoArg
(\opt -> return opt { optIncremental = True }))
"" -- "Make list items display incrementally in Slidy/Slideous/S5"
, Option "" ["slide-level"]
(ReqArg
(\arg opt -> do
case safeRead arg of
Just t | t >= 1 && t <= 6 ->
return opt { optSlideLevel = Just t }
_ -> err 39 $
"slide level must be a number between 1 and 6")
"NUMBER")
"" -- "Force header level for slides"
, Option "" ["section-divs"]
(NoArg
(\opt -> return opt { optSectionDivs = True }))
"" -- "Put sections in div tags in HTML"
, Option "" ["default-image-extension"]
(ReqArg
(\arg opt -> return opt { optDefaultImageExtension = arg })
"extension")
"" -- "Default extension for extensionless images"
, Option "" ["email-obfuscation"]
(ReqArg
(\arg opt -> do
method <- case arg of
"references" -> return ReferenceObfuscation
"javascript" -> return JavascriptObfuscation
"none" -> return NoObfuscation
_ -> err 6
("Unknown obfuscation method: " ++ arg)
return opt { optEmailObfuscation = method })
"none|javascript|references")
"" -- "Method for obfuscating email in HTML"
, Option "" ["id-prefix"]
(ReqArg
(\arg opt -> return opt { optIdentifierPrefix = arg })
"STRING")
"" -- "Prefix to add to automatically generated HTML identifiers"
, Option "T" ["title-prefix"]
(ReqArg
(\arg opt -> do
let newvars = ("title-prefix", arg) : optVariables opt
return opt { optVariables = newvars,
optStandalone = True })
"STRING")
"" -- "String to prefix to HTML window title"
, Option "c" ["css"]
(ReqArg
(\arg opt -> do
-- add new link to end, so it is included in proper order
let newvars = optVariables opt ++ [("css",arg)]
return opt { optVariables = newvars,
optStandalone = True })
"URL")
"" -- "Link to CSS style sheet"
, Option "" ["reference-odt"]
(ReqArg
(\arg opt -> do
return opt { optReferenceODT = Just arg })
"FILENAME")
"" -- "Path of custom reference.odt"
, Option "" ["reference-docx"]
(ReqArg
(\arg opt -> do
return opt { optReferenceDocx = Just arg })
"FILENAME")
"" -- "Path of custom reference.docx"
, Option "" ["epub-stylesheet"]
(ReqArg
(\arg opt -> do
text <- UTF8.readFile arg
return opt { optEpubStylesheet = Just text })
"FILENAME")
"" -- "Path of epub.css"
, Option "" ["epub-cover-image"]
(ReqArg
(\arg opt ->
return opt { optVariables =
("epub-cover-image", arg) : optVariables opt })
"FILENAME")
"" -- "Path of epub cover image"
, Option "" ["epub-metadata"]
(ReqArg
(\arg opt -> do
text <- UTF8.readFile arg
return opt { optEpubMetadata = text })
"FILENAME")
"" -- "Path of epub metadata file"
, Option "" ["epub-embed-font"]
(ReqArg
(\arg opt -> do
return opt{ optEpubFonts = arg : optEpubFonts opt })
"FILE")
"" -- "Directory of fonts to embed"
, Option "" ["epub-chapter-level"]
(ReqArg
(\arg opt -> do
case safeRead arg of
Just t | t >= 1 && t <= 6 ->
return opt { optEpubChapterLevel = t }
_ -> err 59 $
"chapter level must be a number between 1 and 6")
"NUMBER")
"" -- "Header level at which to split chapters in EPUB"
, Option "" ["latex-engine"]
(ReqArg
(\arg opt -> do
let b = takeBaseName arg
if b `elem` ["pdflatex", "lualatex", "xelatex"]
then return opt { optLaTeXEngine = arg }
else err 45 "latex-engine must be pdflatex, lualatex, or xelatex.")
"PROGRAM")
"" -- "Name of latex program to use in generating PDF"
, Option "" ["bibliography"]
(ReqArg
(\arg opt -> return opt{ optMetadata = addMetadata
"bibliography" (readMetaValue arg)
$ optMetadata opt
, optVariables =
("biblio-files", dropExtension arg) :
optVariables opt
})
"FILE")
""
, Option "" ["csl"]
(ReqArg
(\arg opt ->
return opt{ optMetadata = addMetadata "csl"
(readMetaValue arg)
$ optMetadata opt })
"FILE")
""
, Option "" ["citation-abbreviations"]
(ReqArg
(\arg opt ->
return opt{ optMetadata = addMetadata
"citation-abbreviations"
(readMetaValue arg)
$ optMetadata opt })
"FILE")
""
, Option "" ["natbib"]
(NoArg
(\opt -> return opt { optCiteMethod = Natbib }))
"" -- "Use natbib cite commands in LaTeX output"
, Option "" ["biblatex"]
(NoArg
(\opt -> return opt { optCiteMethod = Biblatex }))
"" -- "Use biblatex cite commands in LaTeX output"
, Option "m" ["latexmathml", "asciimathml"]
(OptArg
(\arg opt ->
return opt { optHTMLMathMethod = LaTeXMathML arg })
"URL")
"" -- "Use LaTeXMathML script in html output"
, Option "" ["mathml"]
(OptArg
(\arg opt ->
return opt { optHTMLMathMethod = MathML arg })
"URL")
"" -- "Use mathml for HTML math"
, Option "" ["mimetex"]
(OptArg
(\arg opt -> do
let url' = case arg of
Just u -> u ++ "?"
Nothing -> "/cgi-bin/mimetex.cgi?"
return opt { optHTMLMathMethod = WebTeX url' })
"URL")
"" -- "Use mimetex for HTML math"
, Option "" ["webtex"]
(OptArg
(\arg opt -> do
let url' = case arg of
Just u -> u
Nothing -> "http://chart.apis.google.com/chart?cht=tx&chl="
return opt { optHTMLMathMethod = WebTeX url' })
"URL")
"" -- "Use web service for HTML math"
, Option "" ["jsmath"]
(OptArg
(\arg opt -> return opt { optHTMLMathMethod = JsMath arg})
"URL")
"" -- "Use jsMath for HTML math"
, Option "" ["mathjax"]
(OptArg
(\arg opt -> do
let url' = case arg of
Just u -> u
Nothing -> "//cdn.mathjax.org/mathjax/latest/MathJax.js?config=TeX-AMS-MML_HTMLorMML"
return opt { optHTMLMathMethod = MathJax url'})
"URL")
"" -- "Use MathJax for HTML math"
, Option "" ["katex"]
(OptArg
(\arg opt ->
return opt
{ optKaTeXJS =
arg <|> Just "http://cdnjs.cloudflare.com/ajax/libs/KaTeX/0.1.0/katex.min.js"})
"URL")
"" -- Use KaTeX for HTML Math
, Option "" ["katex-stylesheet"]
(ReqArg
(\arg opt ->
return opt { optKaTeXStylesheet = Just arg })
"URL")
"" -- Set the KaTeX Stylesheet location
, Option "" ["gladtex"]
(NoArg
(\opt -> return opt { optHTMLMathMethod = GladTeX }))
"" -- "Use gladtex for HTML math"
, Option "" ["trace"]
(NoArg
(\opt -> return opt { optTrace = True }))
"" -- "Turn on diagnostic tracing in readers."
, Option "" ["dump-args"]
(NoArg
(\opt -> return opt { optDumpArgs = True }))
"" -- "Print output filename and arguments to stdout."
, Option "" ["ignore-args"]
(NoArg
(\opt -> return opt { optIgnoreArgs = True }))
"" -- "Ignore command-line arguments."
, Option "" ["verbose"]
(NoArg
(\opt -> return opt { optVerbose = True }))
"" -- "Verbose diagnostic output."
, Option "v" ["version"]
(NoArg
(\_ -> do
prg <- getProgName
defaultDatadir <- getAppUserDataDirectory "pandoc"
UTF8.hPutStrLn stdout (prg ++ " " ++ pandocVersion ++
compileInfo ++ "\nDefault user data directory: " ++
defaultDatadir ++ copyrightMessage)
exitWith ExitSuccess ))
"" -- "Print version"
, Option "h" ["help"]
(NoArg
(\_ -> do
prg <- getProgName
UTF8.hPutStr stdout (usageMessage prg options)
exitWith ExitSuccess ))
"" -- "Show help"
]
addMetadata :: String -> MetaValue -> M.Map String MetaValue
-> M.Map String MetaValue
addMetadata k v m = case M.lookup k m of
Nothing -> M.insert k v m
Just (MetaList xs) -> M.insert k
(MetaList (xs ++ [v])) m
Just x -> M.insert k (MetaList [v, x]) m
readMetaValue :: String -> MetaValue
readMetaValue s = case decode (UTF8.fromString s) of
Just (Yaml.String t) -> MetaString $ T.unpack t
Just (Yaml.Bool b) -> MetaBool b
_ -> MetaString s
-- Returns usage message
usageMessage :: String -> [OptDescr (Opt -> IO Opt)] -> String
usageMessage programName = usageInfo
(programName ++ " [OPTIONS] [FILES]" ++ "\nInput formats: " ++
(wrapWords 16 78 $ readers'names) ++ "\nOutput formats: " ++
(wrapWords 16 78 $ writers'names) ++
'\n' : replicate 16 ' ' ++
"[*for pdf output, use latex or beamer and -o FILENAME.pdf]\nOptions:")
where
writers'names = sort $ "pdf*" : map fst writers
readers'names = sort $ map fst readers
-- Determine default reader based on source file extensions
defaultReaderName :: String -> [FilePath] -> String
defaultReaderName fallback [] = fallback
defaultReaderName fallback (x:xs) =
case takeExtension (map toLower x) of
".xhtml" -> "html"
".html" -> "html"
".htm" -> "html"
".tex" -> "latex"
".latex" -> "latex"
".ltx" -> "latex"
".rst" -> "rst"
".org" -> "org"
".lhs" -> "markdown+lhs"
".db" -> "docbook"
".opml" -> "opml"
".wiki" -> "mediawiki"
".dokuwiki" -> "dokuwiki"
".textile" -> "textile"
".native" -> "native"
".json" -> "json"
".docx" -> "docx"
".t2t" -> "t2t"
".epub" -> "epub"
".odt" -> "odt" -- so we get an "unknown reader" error
".pdf" -> "pdf" -- so we get an "unknown reader" error
".doc" -> "doc" -- so we get an "unknown reader" error
_ -> defaultReaderName fallback xs
-- Returns True if extension of first source is .lhs
lhsExtension :: [FilePath] -> Bool
lhsExtension (x:_) = takeExtension x == ".lhs"
lhsExtension _ = False
-- Determine default writer based on output file extension
defaultWriterName :: FilePath -> String
defaultWriterName "-" = "html" -- no output file
defaultWriterName x =
case takeExtension (map toLower x) of
"" -> "markdown" -- empty extension
".tex" -> "latex"
".latex" -> "latex"
".ltx" -> "latex"
".context" -> "context"
".ctx" -> "context"
".rtf" -> "rtf"
".rst" -> "rst"
".s5" -> "s5"
".native" -> "native"
".json" -> "json"
".txt" -> "markdown"
".text" -> "markdown"
".md" -> "markdown"
".markdown" -> "markdown"
".textile" -> "textile"
".lhs" -> "markdown+lhs"
".texi" -> "texinfo"
".texinfo" -> "texinfo"
".db" -> "docbook"
".odt" -> "odt"
".docx" -> "docx"
".epub" -> "epub"
".org" -> "org"
".asciidoc" -> "asciidoc"
".pdf" -> "latex"
".fb2" -> "fb2"
".opml" -> "opml"
".icml" -> "icml"
['.',y] | y `elem` ['1'..'9'] -> "man"
_ -> "html"
-- Transformations of a Pandoc document post-parsing:
extractMedia :: MediaBag -> FilePath -> Pandoc -> IO Pandoc
extractMedia media dir d =
case [fp | (fp, _, _) <- mediaDirectory media] of
[] -> return d
fps -> do
extractMediaBag True dir media
return $ walk (adjustImagePath dir fps) d
adjustImagePath :: FilePath -> [FilePath] -> Inline -> Inline
adjustImagePath dir paths (Image lab (src, tit))
| src `elem` paths = Image lab (dir ++ "/" ++ src, tit)
adjustImagePath _ _ x = x
adjustMetadata :: M.Map String MetaValue -> Pandoc -> IO Pandoc
adjustMetadata metadata d = return $ M.foldWithKey setMeta d metadata
applyTransforms :: [Transform] -> Pandoc -> IO Pandoc
applyTransforms transforms d = return $ foldr ($) d transforms
applyFilters :: [FilePath] -> [String] -> Pandoc -> IO Pandoc
applyFilters filters args d =
foldrM ($) d $ map (flip externalFilter args) filters
main :: IO ()
main = do
rawArgs <- map UTF8.decodeArg <$> getArgs
prg <- getProgName
let compatMode = (prg == "hsmarkdown")
let (actions, args, errors) = if compatMode
then ([], rawArgs, [])
else getOpt Permute options rawArgs
unless (null errors) $
err 2 $ concat $ errors ++
["Try " ++ prg ++ " --help for more information."]
let defaultOpts' = if compatMode
then defaultOpts { optReader = "markdown_strict"
, optWriter = "html"
, optEmailObfuscation =
ReferenceObfuscation }
else defaultOpts
-- thread option data structure through all supplied option actions
opts <- foldl (>>=) (return defaultOpts') actions
let Opt { optTabStop = tabStop
, optPreserveTabs = preserveTabs
, optStandalone = standalone
, optReader = readerName
, optWriter = writerName
, optParseRaw = parseRaw
, optVariables = variables
, optMetadata = metadata
, optTableOfContents = toc
, optTransforms = transforms
, optTemplate = templatePath
, optOutputFile = outputFile
, optNumberSections = numberSections
, optNumberOffset = numberFrom
, optSectionDivs = sectionDivs
, optIncremental = incremental
, optSelfContained = selfContained
, optSmart = smart
, optOldDashes = oldDashes
, optHtml5 = html5
, optHtmlQTags = htmlQTags
, optHighlight = highlight
, optHighlightStyle = highlightStyle
, optChapters = chapters
, optHTMLMathMethod = mathMethod'
, optReferenceODT = referenceODT
, optReferenceDocx = referenceDocx
, optEpubStylesheet = epubStylesheet
, optEpubMetadata = epubMetadata
, optEpubFonts = epubFonts
, optEpubChapterLevel = epubChapterLevel
, optTOCDepth = epubTOCDepth
, optDumpArgs = dumpArgs
, optIgnoreArgs = ignoreArgs
, optVerbose = verbose
, optReferenceLinks = referenceLinks
, optWrapText = wrap
, optColumns = columns
, optFilters = filters
, optEmailObfuscation = obfuscationMethod
, optIdentifierPrefix = idPrefix
, optIndentedCodeClasses = codeBlockClasses
, optDataDir = mbDataDir
, optCiteMethod = citeMethod
, optListings = listings
, optLaTeXEngine = latexEngine
, optSlideLevel = slideLevel
, optSetextHeaders = setextHeaders
, optAscii = ascii
, optTeXLigatures = texLigatures
, optDefaultImageExtension = defaultImageExtension
, optExtractMedia = mbExtractMedia
, optTrace = trace
, optTrackChanges = trackChanges
, optKaTeXStylesheet = katexStylesheet
, optKaTeXJS = katexJS
} = opts
when dumpArgs $
do UTF8.hPutStrLn stdout outputFile
mapM_ (\arg -> UTF8.hPutStrLn stdout arg) args
exitWith ExitSuccess
let csscdn = "http://cdnjs.cloudflare.com/ajax/libs/KaTeX/0.1.0/katex.min.css"
let mathMethod =
case (katexJS, katexStylesheet) of
(Nothing, _) -> mathMethod'
(Just js, ss) -> KaTeX js (fromMaybe csscdn ss)
-- --bibliography implies -F pandoc-citeproc for backwards compatibility:
let needsCiteproc = isJust (M.lookup "bibliography" metadata) &&
optCiteMethod opts `notElem` [Natbib, Biblatex] &&
"pandoc-citeproc" `notElem` map takeBaseName filters
let filters' = if needsCiteproc then "pandoc-citeproc" : filters
else filters
let sources = if ignoreArgs then [] else args
datadir <- case mbDataDir of
Nothing -> E.catch
(Just <$> getAppUserDataDirectory "pandoc")
(\e -> let _ = (e :: E.SomeException)
in return Nothing)
Just _ -> return mbDataDir
-- assign reader and writer based on options and filenames
let readerName' = case map toLower readerName of
[] -> defaultReaderName
(if any isURI sources
then "html"
else "markdown") sources
"html4" -> "html"
x -> x
let writerName' = case map toLower writerName of
[] -> defaultWriterName outputFile
"epub2" -> "epub"
"html4" -> "html"
x -> x
let pdfOutput = map toLower (takeExtension outputFile) == ".pdf"
let laTeXOutput = "latex" `isPrefixOf` writerName' ||
"beamer" `isPrefixOf` writerName'
writer <- if ".lua" `isSuffixOf` writerName'
-- note: use non-lowercased version writerName
then return $ IOStringWriter $ writeCustom writerName
else case getWriter writerName' of
Left e -> err 9 $
if writerName' == "pdf"
then e ++
"\nTo create a pdf with pandoc, use " ++
"the latex or beamer writer and specify\n" ++
"an output file with .pdf extension " ++
"(pandoc -t latex -o filename.pdf)."
else e
Right w -> return w
reader <- if "t2t" == readerName'
then (mkStringReader .
readTxt2Tags) <$>
(getT2TMeta sources outputFile)
else case getReader readerName' of
Right r -> return r
Left e -> err 7 e'
where e' = case readerName' of
"odt" -> e ++
"\nPandoc can convert to ODT, but not from ODT.\nTry using LibreOffice to export as HTML, and convert that with pandoc."
"pdf" -> e ++
"\nPandoc can convert to PDF, but not from PDF."
"doc" -> e ++
"\nPandoc can convert from DOCX, but not from DOC.\nTry using Word to save your DOC file as DOCX, and convert that with pandoc."
_ -> e
let standalone' = standalone || not (isTextFormat writerName') || pdfOutput
templ <- case templatePath of
_ | not standalone' -> return ""
Nothing -> do
deftemp <- getDefaultTemplate datadir writerName'
case deftemp of
Left e -> throwIO e
Right t -> return t
Just tp -> do
-- strip off extensions
let format = takeWhile (`notElem` "+-") writerName'
let tp' = case takeExtension tp of
"" -> tp <.> format
_ -> tp
E.catch (UTF8.readFile tp')
(\e -> if isDoesNotExistError e
then E.catch
(readDataFileUTF8 datadir
("templates" </> tp'))
(\e' -> let _ = (e' :: E.SomeException)
in throwIO e')
else throwIO e)
variables' <- case mathMethod of
LaTeXMathML Nothing -> do
s <- readDataFileUTF8 datadir "LaTeXMathML.js"
return $ ("mathml-script", s) : variables
MathML Nothing -> do
s <- readDataFileUTF8 datadir "MathMLinHTML.js"
return $ ("mathml-script", s) : variables
_ -> return variables
variables'' <- if "dzslides" `isPrefixOf` writerName'
then do
dztempl <- readDataFileUTF8 datadir
("dzslides" </> "template.html")
let dzline = "<!-- {{{{ dzslides core"
let dzcore = unlines
$ dropWhile (not . (dzline `isPrefixOf`))
$ lines dztempl
return $ ("dzslides-core", dzcore) : variables'
else return variables'
let sourceURL = case sources of
[] -> Nothing
(x:_) -> case parseURI x of
Just u
| uriScheme u `elem` ["http:","https:"] ->
Just $ show u{ uriQuery = "",
uriFragment = "" }
_ -> Nothing
let readerOpts = def{ readerSmart = smart || (texLigatures &&
(laTeXOutput || "context" `isPrefixOf` writerName'))
, readerStandalone = standalone'
, readerParseRaw = parseRaw
, readerColumns = columns
, readerTabStop = tabStop
, readerOldDashes = oldDashes
, readerIndentedCodeClasses = codeBlockClasses
, readerApplyMacros = not laTeXOutput
, readerDefaultImageExtension = defaultImageExtension
, readerTrace = trace
, readerTrackChanges = trackChanges
}
when (not (isTextFormat writerName') && outputFile == "-") $
err 5 $ "Cannot write " ++ writerName' ++ " output to stdout.\n" ++
"Specify an output file using the -o option."
let readSources [] = mapM readSource ["-"]
readSources srcs = mapM readSource srcs
readSource "-" = UTF8.getContents
readSource src = case parseURI src of
Just u | uriScheme u `elem` ["http:","https:"] ->
readURI src
_ -> UTF8.readFile src
readURI src = do
res <- openURL src
case res of
Left e -> throwIO e
Right (bs,_) -> return $ UTF8.toString bs
let readFiles [] = error "Cannot read archive from stdin"
readFiles [x] = B.readFile x
readFiles (x:xs) = mapM (warn . ("Ignoring: " ++)) xs >> B.readFile x
let convertTabs = tabFilter (if preserveTabs || readerName' == "t2t"
then 0
else tabStop)
let handleIncludes' = if readerName' == "latex" ||
readerName' == "latex+lhs"
then handleIncludes
else return
(doc, media) <-
case reader of
StringReader r-> (, mempty) <$>
( readSources >=>
handleIncludes' . convertTabs . intercalate "\n" >=>
r readerOpts ) sources
ByteStringReader r -> readFiles sources >>= r readerOpts
let writerOptions = def { writerStandalone = standalone',
writerTemplate = templ,
writerVariables = variables'',
writerTabStop = tabStop,
writerTableOfContents = toc,
writerHTMLMathMethod = mathMethod,
writerIncremental = incremental,
writerCiteMethod = citeMethod,
writerIgnoreNotes = False,
writerNumberSections = numberSections,
writerNumberOffset = numberFrom,
writerSectionDivs = sectionDivs,
writerReferenceLinks = referenceLinks,
writerWrapText = wrap,
writerColumns = columns,
writerEmailObfuscation = obfuscationMethod,
writerIdentifierPrefix = idPrefix,
writerSourceURL = sourceURL,
writerUserDataDir = datadir,
writerHtml5 = html5,
writerHtmlQTags = htmlQTags,
writerChapters = chapters,
writerListings = listings,
writerBeamer = False,
writerSlideLevel = slideLevel,
writerHighlight = highlight,
writerHighlightStyle = highlightStyle,
writerSetextHeaders = setextHeaders,
writerTeXLigatures = texLigatures,
writerEpubMetadata = epubMetadata,
writerEpubStylesheet = epubStylesheet,
writerEpubFonts = epubFonts,
writerEpubChapterLevel = epubChapterLevel,
writerTOCDepth = epubTOCDepth,
writerReferenceODT = referenceODT,
writerReferenceDocx = referenceDocx,
writerMediaBag = media,
writerVerbose = verbose
}
doc' <- (maybe return (extractMedia media) mbExtractMedia >=>
adjustMetadata metadata >=>
applyTransforms transforms >=>
applyFilters filters' [writerName']) doc
let writeBinary :: B.ByteString -> IO ()
writeBinary = B.writeFile (UTF8.encodePath outputFile)
let writerFn :: FilePath -> String -> IO ()
writerFn "-" = UTF8.putStr
writerFn f = UTF8.writeFile f
case writer of
IOStringWriter f -> f writerOptions doc' >>= writerFn outputFile
IOByteStringWriter f -> f writerOptions doc' >>= writeBinary
PureStringWriter f
| pdfOutput -> do
-- make sure writer is latex or beamer
unless laTeXOutput $
err 47 $ "cannot produce pdf output with " ++ writerName' ++
" writer"
-- check for latex program
mbLatex <- findExecutable latexEngine
when (mbLatex == Nothing) $
err 41 $ latexEngine ++ " not found. " ++
latexEngine ++ " is needed for pdf output."
res <- makePDF latexEngine f writerOptions doc'
case res of
Right pdf -> writeBinary pdf
Left err' -> do
B.hPutStr stderr $ err'
B.hPut stderr $ B.pack [10]
err 43 "Error producing PDF from TeX source"
| otherwise -> selfcontain (f writerOptions doc' ++
['\n' | not standalone'])
>>= writerFn outputFile . handleEntities
where htmlFormat = writerName' `elem`
["html","html+lhs","html5","html5+lhs",
"s5","slidy","slideous","dzslides","revealjs"]
selfcontain = if selfContained && htmlFormat
then makeSelfContained writerOptions
else return
handleEntities = if htmlFormat && ascii
then toEntities
else id
| sapek/pandoc | pandoc.hs | gpl-2.0 | 59,657 | 0 | 25 | 27,007 | 11,383 | 6,158 | 5,225 | -1 | -1 |
{- This module was generated from data in the Kate syntax
highlighting file sgml.xml, version 1.02, by -}
module Text.Highlighting.Kate.Syntax.Sgml
(highlight, parseExpression, syntaxName, syntaxExtensions)
where
import Text.Highlighting.Kate.Types
import Text.Highlighting.Kate.Common
import Text.ParserCombinators.Parsec hiding (State)
import Control.Monad.State
import Data.Char (isSpace)
-- | Full name of language.
syntaxName :: String
syntaxName = "SGML"
-- | Filename extensions for this language.
syntaxExtensions :: String
syntaxExtensions = "*.sgml"
-- | Highlight source code using this syntax definition.
highlight :: String -> [SourceLine]
highlight input = evalState (mapM parseSourceLine $ lines input) startingState
parseSourceLine :: String -> State SyntaxState SourceLine
parseSourceLine = mkParseSourceLine (parseExpression Nothing)
-- | Parse an expression using appropriate local context.
parseExpression :: Maybe (String,String)
-> KateParser Token
parseExpression mbcontext = do
(lang,cont) <- maybe currentContext return mbcontext
result <- parseRules (lang,cont)
optional $ do eof
updateState $ \st -> st{ synStPrevChar = '\n' }
pEndLine
return result
startingState = SyntaxState {synStContexts = [("SGML","Normal Text")], synStLineNumber = 0, synStPrevChar = '\n', synStPrevNonspace = False, synStContinuation = False, synStCaseSensitive = True, synStKeywordCaseSensitive = False, synStCaptures = []}
pEndLine = do
updateState $ \st -> st{ synStPrevNonspace = False }
context <- currentContext
contexts <- synStContexts `fmap` getState
st <- getState
if length contexts >= 2
then case context of
_ | synStContinuation st -> updateState $ \st -> st{ synStContinuation = False }
("SGML","Normal Text") -> return ()
("SGML","Attribute") -> return ()
("SGML","Value") -> return ()
("SGML","Value 2") -> return ()
("SGML","Comment") -> return ()
_ -> return ()
else return ()
withAttribute attr txt = do
when (null txt) $ fail "Parser matched no text"
updateState $ \st -> st { synStPrevChar = last txt
, synStPrevNonspace = synStPrevNonspace st || not (all isSpace txt) }
return (attr, txt)
regex_'3c'5cs'2a'5c'2f'3f'5cs'2a'5ba'2dzA'2dZ'5f'3a'5d'5ba'2dzA'2dZ0'2d9'2e'5f'3a'2d'5d'2a = compileRegex True "<\\s*\\/?\\s*[a-zA-Z_:][a-zA-Z0-9._:-]*"
regex_'5cs'2a'3d'5cs'2a = compileRegex True "\\s*=\\s*"
parseRules ("SGML","Normal Text") =
(((pString False "<!--" >>= withAttribute CommentTok) >>~ pushContext ("SGML","Comment"))
<|>
((pRegExpr regex_'3c'5cs'2a'5c'2f'3f'5cs'2a'5ba'2dzA'2dZ'5f'3a'5d'5ba'2dzA'2dZ0'2d9'2e'5f'3a'2d'5d'2a >>= withAttribute KeywordTok) >>~ pushContext ("SGML","Attribute"))
<|>
(currentContext >>= \x -> guard (x == ("SGML","Normal Text")) >> pDefault >>= withAttribute NormalTok))
parseRules ("SGML","Attribute") =
(((pDetect2Chars False '/' '>' >>= withAttribute KeywordTok) >>~ (popContext))
<|>
((pDetectChar False '>' >>= withAttribute KeywordTok) >>~ (popContext))
<|>
((pRegExpr regex_'5cs'2a'3d'5cs'2a >>= withAttribute NormalTok) >>~ pushContext ("SGML","Value"))
<|>
(currentContext >>= \x -> guard (x == ("SGML","Attribute")) >> pDefault >>= withAttribute OtherTok))
parseRules ("SGML","Value") =
(((pDetect2Chars False '/' '>' >>= withAttribute KeywordTok) >>~ (popContext >> popContext))
<|>
((pDetectChar False '>' >>= withAttribute KeywordTok) >>~ (popContext >> popContext))
<|>
((pDetectChar False '"' >>= withAttribute DataTypeTok) >>~ pushContext ("SGML","Value 2"))
<|>
(currentContext >>= \x -> guard (x == ("SGML","Value")) >> pDefault >>= withAttribute DataTypeTok))
parseRules ("SGML","Value 2") =
(((pDetectChar False '"' >>= withAttribute DataTypeTok) >>~ (popContext >> popContext))
<|>
(currentContext >>= \x -> guard (x == ("SGML","Value 2")) >> pDefault >>= withAttribute DataTypeTok))
parseRules ("SGML","Comment") =
(((pString False "-->" >>= withAttribute CommentTok) >>~ (popContext))
<|>
(currentContext >>= \x -> guard (x == ("SGML","Comment")) >> pDefault >>= withAttribute CommentTok))
parseRules x = parseRules ("SGML","Normal Text") <|> fail ("Unknown context" ++ show x)
| ambiata/highlighting-kate | Text/Highlighting/Kate/Syntax/Sgml.hs | gpl-2.0 | 4,310 | 0 | 15 | 744 | 1,281 | 687 | 594 | 78 | 8 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Directory.Roles.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Retrieves a paginated list of all the roles in a domain.
--
-- /See:/ <https://developers.google.com/admin-sdk/directory/ Admin Directory API Reference> for @directory.roles.list@.
module Network.Google.Resource.Directory.Roles.List
(
-- * REST Resource
RolesListResource
-- * Creating a Request
, rolesList
, RolesList
-- * Request Lenses
, rlCustomer
, rlPageToken
, rlMaxResults
) where
import Network.Google.Directory.Types
import Network.Google.Prelude
-- | A resource alias for @directory.roles.list@ method which the
-- 'RolesList' request conforms to.
type RolesListResource =
"admin" :>
"directory" :>
"v1" :>
"customer" :>
Capture "customer" Text :>
"roles" :>
QueryParam "pageToken" Text :>
QueryParam "maxResults" (Textual Int32) :>
QueryParam "alt" AltJSON :> Get '[JSON] Roles
-- | Retrieves a paginated list of all the roles in a domain.
--
-- /See:/ 'rolesList' smart constructor.
data RolesList = RolesList'
{ _rlCustomer :: !Text
, _rlPageToken :: !(Maybe Text)
, _rlMaxResults :: !(Maybe (Textual Int32))
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'RolesList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'rlCustomer'
--
-- * 'rlPageToken'
--
-- * 'rlMaxResults'
rolesList
:: Text -- ^ 'rlCustomer'
-> RolesList
rolesList pRlCustomer_ =
RolesList'
{ _rlCustomer = pRlCustomer_
, _rlPageToken = Nothing
, _rlMaxResults = Nothing
}
-- | Immutable id of the Google Apps account.
rlCustomer :: Lens' RolesList Text
rlCustomer
= lens _rlCustomer (\ s a -> s{_rlCustomer = a})
-- | Token to specify the next page in the list.
rlPageToken :: Lens' RolesList (Maybe Text)
rlPageToken
= lens _rlPageToken (\ s a -> s{_rlPageToken = a})
-- | Maximum number of results to return.
rlMaxResults :: Lens' RolesList (Maybe Int32)
rlMaxResults
= lens _rlMaxResults (\ s a -> s{_rlMaxResults = a})
. mapping _Coerce
instance GoogleRequest RolesList where
type Rs RolesList = Roles
type Scopes RolesList =
'["https://www.googleapis.com/auth/admin.directory.rolemanagement",
"https://www.googleapis.com/auth/admin.directory.rolemanagement.readonly"]
requestClient RolesList'{..}
= go _rlCustomer _rlPageToken _rlMaxResults
(Just AltJSON)
directoryService
where go
= buildClient (Proxy :: Proxy RolesListResource)
mempty
| rueshyna/gogol | gogol-admin-directory/gen/Network/Google/Resource/Directory/Roles/List.hs | mpl-2.0 | 3,493 | 0 | 16 | 859 | 492 | 289 | 203 | 73 | 1 |
{-# LANGUAGE TupleSections #-}
module Web.Info
( makeWebFileInfo
, loadWebFileMap
) where
import qualified Data.HashMap.Strict as HM
import Data.Text (pack)
import qualified Network.Mime as Mime
import System.FilePath (takeFileName)
import System.Posix.ByteString (getFileStatus)
import Files
import Web
import Web.Files
import Web.Types
makeWebFileInfo :: WebFilePath -> IO WebFileInfo
makeWebFileInfo f = do
fp <- unRawFilePath $ webFileAbs f
let format = Mime.defaultMimeLookup (pack (takeFileName fp))
hash <- hashFile $ webFileAbs f
ts <- modificationTimestamp <$> getFileStatus (webFileAbs f)
return $ WebFileInfo format hash ts
loadWebFileMap :: IO WebFileMap
loadWebFileMap = fmap HM.fromList . mapM (\f -> (f, ) <$> makeWebFileInfo f) =<< allWebFiles
| databrary/databrary | src/Web/Info.hs | agpl-3.0 | 780 | 0 | 14 | 120 | 235 | 126 | 109 | 22 | 1 |
-- Copyright (C) 2016-2017 Red Hat, Inc.
--
-- This library is free software; you can redistribute it and/or
-- modify it under the terms of the GNU Lesser General Public
-- License as published by the Free Software Foundation; either
-- version 2.1 of the License, or (at your option) any later version.
--
-- This library is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-- Lesser General Public License for more details.
--
-- You should have received a copy of the GNU Lesser General Public
-- License along with this library; if not, see <http://www.gnu.org/licenses/>.
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Import.Repodata(RepoException,
loadFromURI,
loadRepoFromURI)
where
import Control.Applicative((<|>))
import Control.Exception(Exception)
import Control.Monad.IO.Class(MonadIO)
import Control.Monad.Reader(ReaderT)
import Control.Monad.Trans.Resource(MonadBaseControl, MonadThrow)
import Data.Conduit((.|), runConduitRes)
import Data.Data(Typeable)
import Data.Maybe(listToMaybe)
import qualified Data.Text as T
import Network.URI(URI)
import Text.XML(Document, sinkDoc)
import Text.XML.Cursor
import Text.XML.Stream.Parse(def)
import qualified Import.Comps as Comps
import Import.Conduit(getFromURI, ungzipIfCompressed)
import qualified Import.RPM as RPM
import Import.State(ImportState(..))
import Import.URI(appendURI, baseURI)
import BDCS.Exceptions(throwIfNothing)
data RepoException = RepoException
deriving(Show, Typeable)
instance Exception RepoException
extractLocations :: Document -> [T.Text]
extractLocations doc = let
cursor = fromDocument doc
in
-- Find all <location href=""> elements and return the href's value. laxElement
-- means we ignore case and ignore namespacing. Otherwise we need to take into
-- account the namespace given in the primary.xml.
cursor $// laxElement "location"
>=> hasAttribute "href"
>=> attribute "href"
-- For a given datatype name, return the first /<root>/data[@type=<type>]/@href
extractType :: Document -> T.Text -> Maybe T.Text
extractType doc dataType = let
cursor = fromDocument doc
in
listToMaybe $ cursor $/ laxElement "data" >=>
attributeIs "type" dataType &/
laxElement "location" >=>
attribute "href"
-- fetch and parse an XML document
fetchAndParse :: (MonadThrow m, MonadIO m, MonadBaseControl IO m) => URI -> m Document
fetchAndParse uri = runConduitRes $ getFromURI uri .| ungzipIfCompressed .| sinkDoc def
loadRepoFromURI :: URI -> ReaderT ImportState IO ()
loadRepoFromURI uri = do
-- Fetch and parse repomd.xml
repomd <- fetchAndParse (appendOrThrow "repodata/repomd.xml")
-- Import primary.xml
let primary = extractType repomd "primary" `throwIfNothing` RepoException
loadFromURI $ appendOrThrow primary
-- Import comps if it exists
-- Try group_gz, then group. If neither exists group will be Nothing, which is fine, just skip it
let group = extractType repomd "group_gz" <|> extractType repomd "group"
let groupURI = fmap appendOrThrow group
case groupURI of
Just u -> Comps.loadFromURI u
Nothing -> return ()
where
appendOrThrow :: T.Text -> URI
appendOrThrow path = appendURI uri (T.unpack path) `throwIfNothing` RepoException
loadFromURI :: URI -> ReaderT ImportState IO ()
loadFromURI metadataURI = do
document <- fetchAndParse metadataURI
let locations = map appendOrThrow $ extractLocations document
mapM_ RPM.loadFromURI locations
where
appendOrThrow :: T.Text -> URI
appendOrThrow path = appendURI (baseURI metadataURI) (T.unpack path) `throwIfNothing` RepoException
| dashea/bdcs | importer/Import/Repodata.hs | lgpl-2.1 | 4,140 | 0 | 11 | 934 | 767 | 418 | 349 | 63 | 2 |
{-# LANGUAGE OverloadedStrings #-}
module Transform (
toPigLatin
) where
import Model.Definition
import Data.Text.Lazy as D
toPigLatin :: [Definition] -> [(Definition, D.Text)]
toPigLatin defs = error "todo"
| codemiller/fp-in-the-cloud | src/Transform.hs | apache-2.0 | 228 | 0 | 8 | 47 | 57 | 35 | 22 | 7 | 1 |
{-# LANGUAGE FlexibleContexts, OverloadedStrings,
NoImplicitPrelude, FlexibleInstances, GADTs, Rank2Types,
TypeFamilies, ScopedTypeVariables, UndecidableInstances #-}
module Ethereum.Analyzer.EVM.IR
( HplCfg
, HplContract(..)
, HplOp(..)
, WordLabelMapM
, WordLabelMapFuelM
, unWordLabelMapM
, evmOps2HplCfg
, evmOps2HplContract
, labelFor
, labelsFor
, showOp
, showOps
) where
import Protolude hiding (show)
import Blockchain.ExtWord as BE
import Blockchain.VM.Opcodes as BVO
import Compiler.Hoopl as CH
import Ckev.In.Text
import Data.Bimap as DB
import qualified Data.Text.Lazy as DTL
import Legacy.Haskoin.V0102.Network.Haskoin.Crypto.BigWord
newtype MyLabel =
MyLabel Label
deriving (Eq, Show)
data HplOp e x where
CoOp :: Label -> HplOp C O
OoOp :: (Word256, Operation) -> HplOp O O
OcOp :: (Word256, Operation) -> [Label] -> HplOp O C
HpJump :: MyLabel -> Label -> HplOp O C
HpEnd :: MyLabel -> HplOp O C
HpCodeCopy :: Word256 -> HplOp O O
showLoc :: Word256 -> Text
showLoc = showT . getBigWordInteger
showOp :: (Word256, Operation) -> Text
showOp (lineNo, op) = showLoc lineNo <> ": " <> showT op
showOps :: [(Word256, Operation)] -> [Text]
showOps = fmap showOp
instance ShowText (HplOp e x) where
showText (CoOp l) = "CO: " <> showT l
showText (OoOp op) = "OO: " <> showOp op
showText (OcOp op ll) = "OC: " <> showOp op <> " -> " <> showT ll
showText (HpJump _ l) = "OC: HpJump -> " <> showT l
showText HpEnd {} = "OC: HpEnd"
showText (HpCodeCopy offset) = "HpCodeCopy " <> showT offset
instance Eq (HplOp C O) where
(==) (CoOp a) (CoOp b) = a == b
instance Eq (HplOp O O) where
(==) (OoOp a) (OoOp b) = a == b
(==) (HpCodeCopy a) (HpCodeCopy b) = a == b
(==) _ _ = False
instance Eq (HplOp O C) where
(==) (OcOp a _) (OcOp b _) = a == b
(==) (HpJump l1 _) (HpJump l2 _) = l1 == l2
(==) (HpEnd l1) (HpEnd l2) = l1 == l2
(==) _ _ = False
instance NonLocal HplOp where
entryLabel (CoOp l) = l
successors (OcOp _ ll) = ll
successors (HpJump _ ll) = [ll]
successors (HpEnd _) = []
type HplCfg = Graph HplOp O C
instance ShowText HplCfg where
showText = showGraphT showText
data HplContract = HplContract
{ ctorOf :: HplCfg
, dispatcherOf :: HplCfg
}
instance ShowText HplContract where
showText c = showT [showText $ ctorOf c, showText $ dispatcherOf c]
emptyHplCfg
:: UniqueMonad m
=> m HplCfg
emptyHplCfg = do
l <- myFreshLabel
return $ mkLast (HpEnd l)
evmOps2HplContract :: [(Word256, Operation)] -> WordLabelMapM HplContract
evmOps2HplContract l = do
ctorBody <- evmOps2HplCfg l
ec <- emptyHplCfg
return HplContract {ctorOf = ctorBody, dispatcherOf = ec}
myFreshLabel
:: UniqueMonad m
=> m MyLabel
myFreshLabel = fmap MyLabel freshLabel
evmOps2HplCfg :: [(Word256, Operation)] -> WordLabelMapM HplCfg
evmOps2HplCfg [] = emptyHplCfg
evmOps2HplCfg el@((loc, _):_) = do
l <- labelFor loc
jpLabel <- myFreshLabel
doEvmOps2HplCfg (mkLast $ HpJump jpLabel l) (mkFirst $ CoOp l) el
where
doEvmOps2HplCfg :: HplCfg
-> Graph HplOp C O
-> [(Word256, Operation)]
-> WordLabelMapM HplCfg
doEvmOps2HplCfg body _ [] = return body -- sliently discarding bad hds
doEvmOps2HplCfg body hd [h'] =
if isTerminator (snd h')
then return $ body |*><*| hd CH.<*> mkLast (OcOp h' [])
else return body -- sliently discarding bad hds
doEvmOps2HplCfg body hd (h':(t'@((loc', op'):_)))
| isTerminator (snd h') = do
l' <- labelFor loc'
doEvmOps2HplCfg
(body |*><*| hd CH.<*> mkLast (OcOp h' [l' | canPassThrough (snd h')]))
(mkFirst $ CoOp l')
t'
| op' /= JUMPDEST = doEvmOps2HplCfg body (hd CH.<*> mkMiddle (OoOp h')) t'
| otherwise = do
l' <- labelFor loc'
doEvmOps2HplCfg
(body |*><*| hd CH.<*> mkLast (OcOp h' [l' | canPassThrough (snd h')]))
(mkFirst $ CoOp l')
t'
isTerminator :: Operation -> Bool
isTerminator STOP = True
isTerminator JUMP = True
isTerminator JUMPI = True
isTerminator CALL = True
isTerminator CALLCODE = True
isTerminator RETURN = True
isTerminator DELEGATECALL = True
isTerminator INVALID = True
isTerminator SUICIDE = True
isTerminator _ = False
canPassThrough :: Operation -> Bool
canPassThrough STOP = False
canPassThrough JUMP = False
canPassThrough RETURN = False
canPassThrough INVALID = False
canPassThrough SUICIDE = False
canPassThrough _ = True
--------------------------------------------------------------------------------
-- The WordLabelMapM monad
--------------------------------------------------------------------------------
type WordLabelMap = Bimap Word256 Label
newtype WordLabelMapM a =
WordLabelMapM (WordLabelMap -> SimpleUniqueMonad (WordLabelMap, a))
instance CheckpointMonad WordLabelMapM where
type Checkpoint WordLabelMapM = (WordLabelMap, Checkpoint SimpleUniqueMonad)
checkpoint =
let mapper
:: WordLabelMap
-> SimpleUniqueMonad (WordLabelMap, Checkpoint WordLabelMapM)
mapper m = do
suCheckpoint <- CH.checkpoint
return (m, (m, suCheckpoint))
in WordLabelMapM mapper
restart (m, suCheckpoint) =
let mapper :: WordLabelMap -> CH.SimpleUniqueMonad (WordLabelMap, ())
mapper _ = do
_ <- CH.restart suCheckpoint
return (m, ())
in WordLabelMapM mapper
instance UniqueMonad WordLabelMapM where
freshUnique = WordLabelMapM f
where
f m = do
u <- freshUnique
return (m, u)
type WordLabelMapFuelM = CheckingFuelMonad WordLabelMapM
labelFor :: Word256 -> WordLabelMapM Label
labelFor word = WordLabelMapM f
where
f m =
case DB.lookup word m of
Just l' -> return (m, l')
Nothing -> do
l' <- freshLabel
let m' = DB.insert word l' m
return (m', l')
labelsFor :: [Word256] -> WordLabelMapM [Label]
labelsFor = mapM labelFor
instance Monad WordLabelMapM where
return = pure
WordLabelMapM f1 >>= k =
WordLabelMapM $ \m -> do
(m', x) <- f1 m
let (WordLabelMapM f2) = k x
f2 m'
instance Functor WordLabelMapM where
fmap = liftM
instance Applicative WordLabelMapM where
pure x = WordLabelMapM (\m -> return (m, x))
(<*>) = ap
class UnWordLabelMapM a where
unWordLabelMapM :: WordLabelMapM a -> a
instance UnWordLabelMapM Int where
unWordLabelMapM = internalUnWordLabelMapM
instance UnWordLabelMapM Text where
unWordLabelMapM = internalUnWordLabelMapM
instance UnWordLabelMapM DTL.Text where
unWordLabelMapM = internalUnWordLabelMapM
instance UnWordLabelMapM (a, b) where
unWordLabelMapM = internalUnWordLabelMapM
internalUnWordLabelMapM :: WordLabelMapM a -> a
internalUnWordLabelMapM (WordLabelMapM f) =
snd $ runSimpleUniqueMonad (f DB.empty)
| zchn/ethereum-analyzer | ethereum-analyzer/src/Ethereum/Analyzer/EVM/IR.hs | apache-2.0 | 6,899 | 0 | 20 | 1,582 | 2,295 | 1,196 | 1,099 | 194 | 4 |
{-# LANGUAGE Haskell2010 #-}
module PR643_1 where
infixr 5 `test`
-- | Some big documentation
test :: ()
test = ()
| haskell/haddock | html-test/src/PR643_1.hs | bsd-2-clause | 117 | 0 | 5 | 23 | 27 | 18 | 9 | 5 | 1 |
{-# LANGUAGE ScopedTypeVariables #-}
module D5Lib where
import Data.Char
import Data.List
import Data.Maybe
import Util
type CharPos = (Char, Int)
passwordLength = 8
maxStretches = 100000000 -- reasonable limit to detect infinite recursion
stretches = [0..]
password :: String -> String
password seed = take passwordLength $ passPieces $ passChar seed
-- Not sure what I did, but this impl is non-terminating, or just VERY poor
-- performing
{-orderedPassword :: String -> String-}
{-orderedPassword seed = process $ passPieces $ orderedPassChar seed-}
{-where-}
{-process = map fst . sortBy sorter . take passwordLength . foldl reduce []-}
{-sorter (_, i1) (_, i2) = compare i1 i2-}
{-reduce :: [CharPos] -> CharPos -> [CharPos]-}
{-reduce xs (c, i) = if i `elem` (map snd xs)-}
{-then xs-}
{-else (c, i) : xs-}
orderedPassword :: String -> String
orderedPassword seed = map fst . sortBy sorter $ build [] 0
where
sorter (_, i1) (_, i2) = compare i1 i2
build :: [CharPos] -> Int -> [CharPos]
build xs _ | length xs >= passwordLength = xs
build xs i | i > maxStretches = error $ "Too many iterations xs=" ++ show xs ++ " i=" ++ show i
build xs i = case orderedPassChar seed i of
Just (c, ci) -> if ci `elem` map snd xs
then build xs (i + 1)
else build ((c, ci) : xs) (i + 1)
Nothing -> build xs (i + 1)
passPieces :: forall a. (Int -> Maybe a) -> [a]
passPieces f = process stretches
where
process = map (fromMaybe err) . filter isJust . map f
err = error "wtf"
passChar :: String -> Int -> Maybe Char
passChar seed idx = (!! 5) <$> idxHash seed idx
orderedPassChar :: String -> Int -> Maybe CharPos
orderedPassChar seed idx = idxHash seed idx >>= extract
where
extract str = if validPos str
then Just (str !! 6, rInt [str !! 5])
else Nothing
rInt = read :: String -> Int
validPos str = isDigit (str !! 5) && rInt [str !! 5] < 8
idxHash :: String -> Int -> Maybe String
idxHash _ idx | idx > maxStretches = error $ "Too many iterations (might be infinite) idx=" ++ show idx
idxHash seed idx =
if "00000" `isPrefixOf` hash
then Just hash
else Nothing
where
hash = md5 (seed ++ show idx)
| wfleming/advent-of-code-2016 | 2016/src/D5Lib.hs | bsd-3-clause | 2,257 | 0 | 14 | 564 | 691 | 362 | 329 | 43 | 5 |
{-# LANGUAGE FlexibleInstances, TypeOperators, DefaultSignatures, DeriveGeneric, FlexibleContexts, OverloadedStrings, RankNTypes, EmptyDataDecls #-}
-- |
-- Module: Text.XML.Generic.ToXml
-- Copyright: 2013 Dmitry Olshansky
-- License: BSD3
--
-- Maintainer: olshanskydr@gmail.com
--
-- Parameters of xml to ADT conversion and some utils for FromXml
--
module Text.XML.Generic.FromXmlUtil where
import Control.Arrow
import Control.Monad.Trans.Class(lift)
import Control.Monad.Trans.State
import Data.Conduit
import qualified Data.Conduit.List as CL
import Data.Default(Default(..))
import qualified Data.Text as T
import Data.XML.Types(Event(..), Content(..), Name(..))
import GHC.Generics
import Text.Printf(printf)
import Data.Monoid ((<>))
{-
import Control.Monad(when)
import Debug.Trace
-- _trace = False
_trace = True
trace' s = when _trace $ trace s $ return ()
-}
-- | Transformation works in State monad with state 'FS'
type Mon m = StateT FS m
-- | Label for Element
data E
-- | Label for Attribute
data A
-- | Two functions which are used in conversion process for attributes and elements.
-- Type parameters are labels (S | D | C) and (E | A)
data Checks s t = Checks { chIs :: forall m. Monad m => Mon m Bool -- ^ should we try to convert attribute (or element) to Selector (or Datatype or Constructor)
, chCheck :: forall m. Monad m => Mon m Bool -- whether conversion successful, i.e. names are equivalent
}
-- | Checks for element and attribute
data ChecksEA s = ChecksEA { ceaElem :: Checks s E -- ^ Checks for element
, ceaAttr :: Checks s A -- ^ Checks for attribute
}
-- | Parameters of conversion
data FO = FO { foOpt :: Bool -- ^ Is optional conversion (for Maybe etc)
, foRoot :: Bool -- ^ Is root element. True only on very begining
, foSel :: ChecksEA S -- ^ Functions to check selector
, foDt :: ChecksEA D -- ^ Functions to check datatype
, foCons :: ChecksEA C -- ^ Functions to check constructor
}
-- it works in sequence:
-- Enter Selector => Enter Datatype => Enter Constructor => Enter K1 => Val
-- => Exit K1 => Exit Constructor => Exit Datatype => Exit Selector
instance Default FO where
def = FO { foOpt = False
, foRoot = True
, foSel = chSelEA
, foDt = chDtEA
, foCons = chConsEA
}
where
getElem :: FS -> Name
getElem = maybe "?" fst . fsElem
getAttr :: FS -> Name
getAttr = maybe "?" fst . fsAttr
chSelEA :: ChecksEA S
chSelEA = ChecksEA { ceaElem = check getElem
, ceaAttr = check getAttr }
where
check f = Checks { chIs = return True
, chCheck = do
(s,iss,d,c,e) <- gets $ (,,,,)
<$> maybe "" T.pack . fsSel
<*> fsIsSum
<*> maybe "" (T.pack . fst) . fsDT
<*> maybe "" (T.pack . fst) . fsCons
<*> nameLocalName . f
return $ (not (T.null s) && e == s)
|| (T.null s && iss && e == d)
|| (T.null s && not iss && T.null c && e == d)
|| (T.null s && not iss && not (T.null c) && e == c)
}
chDtEA :: ChecksEA D
chDtEA = ChecksEA { ceaElem = check getElem
, ceaAttr = check getAttr }
where
check f = Checks { chIs = return False
, chCheck = do -- not used if chIs == return False
gets $ (==)
<$> maybe "" (T.pack . fst) . fsDT
<*> nameLocalName . f
}
chConsEA :: ChecksEA C
chConsEA = ChecksEA { ceaElem = check getElem
, ceaAttr = check getAttr }
where
check f = Checks { chIs = gets fsIsSum
, chCheck = do
gets $ (==)
<$> maybe "" (T.pack . fst) . fsCons
<*> nameLocalName . f
}
-- | State of conversion
data FS = FS { fsEvents :: [Event] -- ^ stack of Events to leftover them
-- in case of parse Nothing
, fsElem :: Maybe (Name, [(Name, [Content])])
-- ^ current Elem (elemName, attrs)
, fsAttr :: Maybe (Name, [Content]) -- ^ current Attr to check
, fsCons :: Maybe (String, (Fixity, Bool)) -- ^ current constructor to check
, fsDT :: Maybe (String, String) -- ^ current datatype to check
, fsSel :: Maybe String -- ^ current selector to check
, fsIsSum :: Bool -- ^ does Sum type there
, fsIsProd :: Bool -- ^ does we have Product constructor
} deriving Show
instance Default FS where
def = FS def def def def def def False False
getAndModifyEvents :: (Monad m, Functor m)
=> ([Event] -> [Event]) -> StateT FS m [Event]
getAndModifyEvents f = gets fsEvents <* modifyEvents f
modifySel :: Monad m => (Maybe String -> Maybe String) -> Mon m ()
modifySel f = modify $ \fs -> fs { fsSel = f (fsSel fs) }
modifyEvents :: Monad m
=> ([Event] -> [Event]) -> StateT FS m ()
modifyEvents f = modify $ \fs -> fs { fsEvents = f (fsEvents fs) }
modifyElem :: Monad m
=> (Maybe (Name, [(Name, [Content])]) -> Maybe (Name, [(Name, [Content])]))
-> StateT FS m ()
modifyElem f = modify $ \fs -> fs { fsElem = f (fsElem fs) }
modifyAttr :: Monad m
=> (Maybe (Name, [Content]) -> Maybe (Name, [Content]))
-> StateT FS m ()
modifyAttr f = modify $ \fs -> fs { fsAttr = f (fsAttr fs) }
modifyCons :: Monad m
=> (Maybe (String, (Fixity, Bool)) -> Maybe (String, (Fixity, Bool)))
-> StateT FS m ()
modifyCons f = modify $ \fs -> fs { fsCons = f (fsCons fs) }
modifyDT :: Monad m
=> (Maybe (String, String) -> Maybe (String, String))
-> StateT FS m ()
modifyDT f = modify $ \fs -> fs { fsDT = f (fsDT fs) }
modifyIsSum :: Monad m
=> (Bool -> Bool) -> StateT FS m ()
modifyIsSum f = modify $ \fs -> fs { fsIsSum = f (fsIsSum fs) }
modifyIsProd :: Monad m
=> (Bool -> Bool) -> StateT FS m ()
modifyIsProd f = modify $ \fs -> fs { fsIsProd = f (fsIsProd fs) }
await' :: Monad m => Consumer a (Mon m) (Maybe a)
await' = lift get >>= (await <*) . lift . put
leftover' :: Monad m => a -> Conduit a (Mon m) b
leftover' x = lift get >>= (leftover x <*) . lift . put
-- | Linearize xml. I.e.
--
-- * left only EventBeginElement / EventEndElement / EventContent
--
-- * concat content to one ContentText
--
linearize :: Monad m => Conduit Event m Event
linearize = awaitForever $ \e -> do
case e of
EventBeginElement _ _ -> yield e
EventEndElement _ -> yield e
EventContent c -> go (getText c <>) >>= yield . EventContent . ContentText
_ -> return ()
where
getText (ContentText t) = t
getText (ContentEntity t) = t
go front = do
mx <- await
case mx of
Just (EventContent c) -> go $ front . (getText c <>)
Just x -> leftover x >> return (front mempty)
Nothing -> return $ front mempty
awaitOpt :: (Monad m, Functor m) => FO -> Consumer Event (Mon m) (Maybe Event)
awaitOpt fo
| not $ foOpt fo = await'
| otherwise = do
ma <- await'
maybe (return ())
(\a -> lift $ modifyEvents (a:))
ma
return ma
leftoverOpt :: (Monad m, Functor m) => a -> Conduit a (Mon m) o
leftoverOpt x = do
leftover' x
lift $ modifyEvents $ drop 1
doFrom :: (Monad m, Functor m)
=> FO -> (FO -> Consumer Event (Mon m) (Either String x)) -> (x -> Mon m ()) -> ChecksEA t
-> Consumer Event (Mon m) (Either String x)
doFrom fo getRes fmod chs = do
isA <- lift $ chIs $ ceaAttr chs
if isA
then do -- process attrs from parent elem
mba <- lift $ checkAttrs (getRes def) fmod $ chCheck $ ceaAttr chs
maybe (do
isE <- lift $ chIs $ ceaElem chs
if isE
then withEl fo getRes fmod $ chCheck $ ceaElem chs -- process new elem
else return $ Left "There is no attr correspondence and elem is prohibited"
)
(return . Right) mba
else do
isE <- lift $ chIs $ ceaElem chs
if isE
then withEl fo getRes fmod $ chCheck $ ceaElem chs -- process new elem
else withoutEl fo getRes fmod
withEl :: (Monad m, Functor m)
=> FO -> (FO -> Consumer Event (Mon m) (Either String x)) -> (x -> Mon m ()) -> Mon m Bool
-> Consumer Event (Mon m) (Either String x)
withEl fo getRes fmod checkEl = do
me <- awaitOpt fo
case me of
Just (EventBeginElement e attrs) -> do
parentEl <- lift $ gets fsElem <* modifyElem (const $ Just (e,attrs))
er <- getRes fo
res <- either (return.Left) (\r -> do
lift $ fmod r
meEnd <- awaitOpt fo
case meEnd of
Just (EventEndElement e')
| e /= e' -> return $ Left $ printf (unlines
[ "Invalid end of element."
, "Expected end of: '%s'."
, "Got end of '%s'" ])
(show e) (show e')
| otherwise -> do
checked <- lift $ checkEl
if checked then return $ Right r
else do
fs <- lift get
return $ Left $ printf (unlines
[ "Element doesn't correspond value."
, "Element: '%s'"
, "Current state: '%s'" ]
) (show e) (show fs)
e' -> return $ Left $ printf
"Invalid end of element in constructor.\nExpected end of '%s'.\nGot event '%s'"
(show e) (show e')
) er
lift $ modifyElem $ const parentEl
return res
e' -> return $ Left $ printf
"Begin of element was expected.\n But got an event '%s'"
(show e')
withoutEl :: (Monad m, Functor m)
=> FO -> (FO -> Consumer Event (Mon m) (Either String x)) -> (x -> Mon m ())
-> Consumer Event (Mon m) (Either String x)
withoutEl fo getRes fmod
= getRes fo >>= either (return . Left) (\r -> lift (fmod r) >> return (Right r))
checkAttrs :: (Monad m, Functor m)
=> Consumer Event (Mon m) (Either String x) -> (x -> Mon m ()) -> Mon m Bool
-> Mon m (Maybe x)
checkAttrs getRes fmod ch = do
attrs <- gets $ maybe [] snd . fsElem
(ma, as) <- go attrs id
maybe (return Nothing) (\x -> do
modifyElem $ fmap $ second $ const as
return $ Just x
) ma
where
go [] front = return (Nothing, front [])
go (a:as) front = do
modifyAttr $ const $ Just a
er <- checkAttr a
maybe (go as $ front . (a:)) (\r -> return (Just r, front as)) er
checkAttr (_,cs)= do
ex <- lift $ CL.sourceList (map EventContent cs) $$ linearize =$ transPipe (flip evalStateT def) getRes
either (return . const Nothing) (\x -> do
fmod x
is <- ch
return $ if is then Just x else Nothing
) ex
| odr/xml-conduit-generic | Text/XML/Generic/FromXmlUtil.hs | bsd-3-clause | 13,282 | 1 | 31 | 5,857 | 3,694 | 1,924 | 1,770 | -1 | -1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
-- | This is the bindings per-se to the Docker Registry API but you probably
-- want to use the module `Network.Docker.Registry`. Still this module is
-- available if you have your own data types to represent repositories and
-- images and don't want to use the ones defined in
-- `Network.Docker.Registry.Types`.
module Network.Docker.Registry.Internal where
import Blaze.ByteString.Builder (Builder)
import qualified Blaze.ByteString.Builder as Builder (toByteString)
import qualified Blaze.ByteString.Builder.ByteString as Builder
import Control.Exception (throwIO, Exception)
import Data.ByteString (ByteString)
import qualified Data.ByteString.Char8 as B
import qualified Data.ByteString.Lazy as LB
import Data.Monoid (mappend, mempty)
import Data.Typeable (Typeable)
import Network.Http.Client
import OpenSSL (withOpenSSL)
import System.IO.Streams (OutputStream)
import qualified System.IO.Streams as Streams
import Network.Docker.Registry.Types
getImageJson :: Credentials -> ByteString -> ByteString -> IO (Int, ByteString)
getImageJson credentials host image = withOpenSSL $ do
let url = B.concat ["/v1/images/" `B.append` image `B.append` "/json"]
q <- buildRequest $ do
http GET url
maybe (return ()) (uncurry setAuthorizationBasic) credentials
let handler response i1 = do
i2 <- Streams.map Builder.fromByteString i1
x <- Streams.fold mappend mempty i2
return (getStatusCode response, Builder.toByteString x)
sendAndReceive host q emptyBody handler
putImageJson :: Credentials -> ByteString -> ByteString -> LB.ByteString -> IO Int
putImageJson credentials host image json = withOpenSSL $ do
let url = B.concat ["/v1/images/" `B.append` image `B.append` "/json"]
body = json
q <- buildRequest $ do
http PUT url
maybe (return ()) (uncurry setAuthorizationBasic) credentials
setContentLength (fromIntegral $ LB.length body)
setContentType "application/json"
body' <- Streams.fromLazyByteString body
sendAndReceive host q (inputStreamBody body') getStatusCode'
-- | Upload an image layer.
putImageLayer :: Credentials -> ByteString -> ByteString -> LayerAsInput -> IO Int
putImageLayer credentials host image layer = do
let upload o = layer (\body -> inputStreamBody body o)
putImageLayer' credentials host image upload
-- | Upload an image layer but don't send the terminating chunk.
putImageLayerBroken :: Credentials -> ByteString -> ByteString -> LB.ByteString
-> IO Int
putImageLayerBroken credentials host image layer = do
let brokenUpload o = do
Streams.write (Just (Builder.fromLazyByteString layer)) o
throwIO MisconductException
putImageLayer' credentials host image brokenUpload
putImageLayer' :: Credentials -> ByteString -> ByteString
-> (OutputStream Builder -> IO ()) -> IO Int
putImageLayer' credentials host image upload = withOpenSSL $ do
let url = B.concat ["/v1/images/" `B.append` image `B.append` "/layer"]
q <- buildRequest $ do
http PUT url
-- TODO The official registry does a 500 if there is no version.
setHeader "User-Agent" "docker/1.1.2-fake"
maybe (return ()) (uncurry setAuthorizationBasic) credentials
setTransferEncoding -- "chunked"
sendAndReceive host q upload getStatusCode'
putImageChecksum :: Credentials -> ByteString -> ByteString -> LB.ByteString
-> IO Int
putImageChecksum credentials host image checksum = withOpenSSL $ do
let url = B.concat ["/v1/images/" `B.append` image `B.append` "/checksum"]
q <- buildRequest $ do
http PUT url
-- TODO The official registry does a 500 if there is no version.
setHeader "User-Agent" "docker/1.1.2-fake"
maybe (return ()) (uncurry setAuthorizationBasic) credentials
-- TODO Older clients use tarsum+sha256
setHeader "X-Docker-Checksum-Payload" $ B.concat $ LB.toChunks checksum
setContentLength 0
sendAndReceive host q emptyBody getStatusCode'
putRepository :: Credentials -> ByteString -> ByteString -> ByteString
-> LB.ByteString -> IO Int
putRepository credentials host namespace repo json = withOpenSSL $ do
-- TODO The official registry accepts also quuxbar instead of quux/bar.
let url = B.concat ["/v1/repositories/" `B.append` namespace `B.append`
"/" `B.append` repo `B.append` "/"]
body = json
q <- buildRequest $ do
http PUT url
maybe (return ()) (uncurry setAuthorizationBasic) credentials
setContentLength (fromIntegral $ LB.length body)
setContentType "application/json"
body' <- Streams.fromLazyByteString body
sendAndReceive host q (inputStreamBody body') getStatusCode'
putRepositoryTag :: Credentials -> ByteString -> ByteString -> ByteString
-> ByteString -> ByteString -> IO Int
putRepositoryTag credentials host namespace repo tag image = withOpenSSL $ do
-- TODO The official registry accepts also quuxbar instead of quux/bar.
let url = B.concat ["/v1/repositories/" `B.append` namespace `B.append` "/"
`B.append` repo `B.append` "/tags/" `B.append` tag]
body = B.concat ["\"", image, "\""]
q <- buildRequest $ do
http PUT url
maybe (return ()) (uncurry setAuthorizationBasic) credentials
setContentLength (fromIntegral $ B.length body)
setContentType "application/json"
body' <- Streams.fromByteString body
sendAndReceive host q (inputStreamBody body') getStatusCode'
sendAndReceive :: ByteString -> Request -> (OutputStream Builder -> IO ())
-> (Response -> Streams.InputStream ByteString -> IO a) -> IO a
sendAndReceive host q body handler = do
-- TODO Use bracket to close the connection.
c <- establishConnection $ "https://" `B.append` host
_ <- sendRequest c q body
r <- receiveResponse c handler
closeConnection c
return r
getStatusCode' response _ = return $ getStatusCode response
-- | Exception used in a handler (for `sendRequest`) to intentionnally brake
-- an upload.
data MisconductException = MisconductException
deriving (Typeable, Show)
instance Exception MisconductException
| noteed/rescoyl-checks | Network/Docker/Registry/Internal.hs | bsd-3-clause | 6,101 | 0 | 17 | 1,052 | 1,636 | 822 | 814 | 110 | 1 |
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree.
module Duckling.Dimensions.TA
( allDimensions
) where
import Duckling.Dimensions.Types
allDimensions :: [Seal Dimension]
allDimensions =
[ Seal Numeral
, Seal Ordinal
]
| facebookincubator/duckling | Duckling/Dimensions/TA.hs | bsd-3-clause | 388 | 0 | 6 | 70 | 51 | 32 | 19 | 7 | 1 |
{-|
Copyright : (c) Dave Laing, 2017
License : BSD3
Maintainer : dave.laing.80@gmail.com
Stability : experimental
Portability : non-portable
-}
module Fragment.Let.Ast (
module X
) where
import Fragment.Let.Ast.Term as X
| dalaing/type-systems | src/Fragment/Let/Ast.hs | bsd-3-clause | 237 | 0 | 4 | 47 | 23 | 17 | 6 | 3 | 0 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE TemplateHaskell #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
-- | Description: Types defining the protocol for client/server interaction
-- with retcond.
module Retcon.Network.Protocol where
import Control.Applicative
import Control.Exception
import Control.Lens.TH
import qualified Data.Aeson as Aeson
import qualified Data.Aeson.Diff as Diff
import Data.Binary
import qualified Data.ByteString as BS
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import Data.Typeable
import Retcon.Document
import Retcon.Identifier
import Retcon.Store
-------------------------------------------------------------------------------
-- * Errors
-- | Values describing error states of the retcon API.
data APIError
= UnknownServerError
| TimeoutError
| DecodeError
| InvalidNumberOfMessageParts
| UnknownKeyError -- ^ Notification contained an unknown key.
deriving (Show, Eq, Typeable)
instance Exception APIError
instance Enum APIError where
fromEnum TimeoutError = 0
fromEnum InvalidNumberOfMessageParts = 1
fromEnum DecodeError = 2
fromEnum UnknownKeyError = 3
fromEnum UnknownServerError = maxBound
toEnum 0 = TimeoutError
toEnum 1 = InvalidNumberOfMessageParts
toEnum 2 = DecodeError
toEnum 3 = UnknownKeyError
toEnum _ = UnknownServerError
-------------------------------------------------------------------------------
-- * Network messages
-- | Message headers for network protocol.
data Header request response where
-- | Get a list of unresolved conflicts.
HeaderConflicted :: Header RequestConflicted ResponseConflicted
-- | Process a change
HeaderChange :: Header RequestChange ResponseChange
-- | Resolve an unresolved conflict.
HeaderResolve :: Header RequestResolve ResponseResolve
-- | Invalid request/response.
InvalidHeader :: Header InvalidRequest InvalidResponse
instance Enum SomeHeader where
fromEnum (SomeHeader HeaderConflicted) = 0
fromEnum (SomeHeader HeaderChange) = 1
fromEnum (SomeHeader HeaderResolve) = 2
fromEnum (SomeHeader InvalidHeader) = maxBound
toEnum 0 = SomeHeader HeaderConflicted
toEnum 1 = SomeHeader HeaderChange
toEnum 2 = SomeHeader HeaderResolve
toEnum _ = SomeHeader InvalidHeader
data SomeHeader where
SomeHeader
:: Header request response
-> SomeHeader
-- ** List unresolved conflicts
-- | Request a list of conflicts.
data RequestConflicted = RequestConflicted
deriving (Eq, Show)
data ResponseConflictedItem = ResponseConflictedItem
{ _conflictDocument :: Document
, _conflictPatch :: Diff.Patch
, _conflictDiffID :: DiffID
, _conflictOps :: [(OpID, Diff.Operation)]
} deriving (Eq, Show)
instance Binary ResponseConflictedItem where
put (ResponseConflictedItem d p i o) = put d >> put p >> put i >> put o
get = ResponseConflictedItem <$> get <*> get <*> get <*> get
data ResponseConflictedSerialisedItem = ResponseConflictedSerialisedItem
{ _conflictDocument' :: BS.ByteString
, _conflictPatch' :: BS.ByteString
, _conflictDiffID' :: Int
, _conflictOps' :: [(Int, BS.ByteString)]
} deriving (Eq, Show)
instance Binary ResponseConflictedSerialisedItem where
put (ResponseConflictedSerialisedItem d p i o) = put d >> put p >> put i >> put o
get = ResponseConflictedSerialisedItem <$> get <*> get <*> get <*> get
-- | Response containing a list of unresolved conflicts.
data ResponseConflicted
= ResponseConflicted [ResponseConflictedItem]
-- | Pre-serialised version of the same data. This is generated on the server
-- to avoid the overhead of de-serialising from the database only to serialise
-- immediately. Woo.
| ResponseConflictedSerialised [ResponseConflictedSerialisedItem]
deriving (Eq, Show)
instance Binary RequestConflicted where
put _ = return ()
get = return RequestConflicted
instance Binary ResponseConflicted where
put (ResponseConflicted ds) = put ds
put (ResponseConflictedSerialised ds) = put ds
get = ResponseConflicted <$> get
-- ** Resolve outstanding conflict
data RequestResolve = RequestResolve DiffID [OpID]
deriving (Eq, Show)
data ResponseResolve = ResponseResolve
deriving (Eq, Show)
instance Binary RequestResolve where
put (RequestResolve did conflicts) = put (did, conflicts)
get = do
(did, conflicts) <- get
return $ RequestResolve did conflicts
instance Binary ResponseResolve where
put _ = return ()
get = return ResponseResolve
-- ** Processing a change
data RequestChange = RequestChange ChangeNotification
deriving (Eq, Show)
data ResponseChange = ResponseChange
deriving (Eq, Show)
instance Binary RequestChange where
put (RequestChange (ChangeNotification entity source fk)) =
put (entity, source, fk)
get = do
(entity, source, fk) <- get
return . RequestChange $ ChangeNotification entity source fk
instance Binary ResponseChange where
put _ = return ()
get = return ResponseChange
-- ** Invalid request
data InvalidRequest = InvalidRequest
deriving (Eq, Show)
data InvalidResponse = InvalidResponse
deriving (Eq, Show)
instance Binary InvalidRequest where
put _ = return ()
get = return InvalidRequest
instance Binary InvalidResponse where
put _ = return ()
get = return InvalidResponse
-------------------------------------------------------------------------------
-- * Types
-- | A notification for Retcon that the document with 'ForeignID' which is an
-- 'EntityName' at the data source 'SourceName' has changed in some way.
data ChangeNotification = ChangeNotification
{ _notificationEntity :: EntityName
, _notificationSource :: SourceName
, _notificationForeignID :: ForeignID
}
deriving (Eq, Show)
makeLenses ''ChangeNotification
instance Binary T.Text where
put = put . T.encodeUtf8
get = T.decodeUtf8 <$> get
instance Binary EntityName where
put (EntityName n) = put n
get = EntityName <$> get
instance Binary SourceName where
put (SourceName n) = put n
get = SourceName <$> get
instance Binary Diff.Patch where
put = put . Aeson.encode
get = getJSON
instance Binary Diff.Operation where
put = put . Aeson.encode
get = getJSON
instance Binary Document where
put = put . Aeson.encode
get = getJSON
-- | Get and decode a value crammed into Binary as a JSON 'Value'.
getJSON
:: Aeson.FromJSON a
=> Get a
getJSON = do
json <- Aeson.eitherDecode <$> get
case json of
Right x -> return x
Left msg -> fail msg
| anchor/retcon | lib/Retcon/Network/Protocol.hs | bsd-3-clause | 6,822 | 0 | 11 | 1,464 | 1,480 | 799 | 681 | 153 | 2 |
module Rules.Nofib where
import Base
import Expression
import GHC
import Oracles.Setting
import Target
import System.Environment
import System.Exit
nofibRules :: Rules ()
nofibRules = do
root <- buildRootRules
-- a phony "nofib" rule that just triggers
-- the rule below.
"nofib" ~> need [root -/- nofibLogFile]
-- a rule to produce <build root>/nofig-log
-- by running the nofib suite and capturing
-- the relevant output.
root -/- nofibLogFile %> \fp -> do
needNofibDeps
makePath <- builderPath (Make "nofib")
top <- topDirectory
ghcPath <- builderPath (Ghc CompileHs Stage2)
perlPath <- builderPath Perl
-- some makefiles in nofib rely on a $MAKE
-- env var being defined
liftIO (setEnv "MAKE" makePath)
-- this runs make commands in the nofib
-- subdirectory, passing the path to
-- the GHC to benchmark and perl to
-- nofib's makefiles.
let nofibArgs = ["WithNofibHc=" ++ (top -/- ghcPath), "PERL=" ++ perlPath]
unit $ cmd (Cwd "nofib") [makePath] ["clean"]
unit $ cmd (Cwd "nofib") [makePath] (nofibArgs ++ ["boot"])
(Exit e, Stdouterr log) <- cmd (Cwd "nofib") [makePath] nofibArgs
writeFile' fp log
if e == ExitSuccess
then putLoud $ "nofib log available at " ++ fp
else error $ "nofib failed, full log available at " ++ fp
nofibLogFile :: FilePath
nofibLogFile = "nofib-log"
-- the dependencies that nofib seems to require.
needNofibDeps :: Action ()
needNofibDeps = do
unlitPath <- programPath (Context Stage1 unlit vanilla)
mtlPath <- pkgConfFile (Context Stage1 mtl vanilla)
need [ unlitPath, mtlPath ]
needBuilder (Ghc CompileHs Stage2)
| bgamari/shaking-up-ghc | src/Rules/Nofib.hs | bsd-3-clause | 1,669 | 0 | 17 | 362 | 422 | 215 | 207 | 35 | 2 |
module Utility where
import System.IO
import Control.Concurrent
import Control.Exception
sendSafe :: Handle -> String -> IO ()
sendSafe hand str = finally (hPutStrLn hand str) (return ())
| allonsy/chirp | src/Utility.hs | bsd-3-clause | 191 | 0 | 8 | 30 | 68 | 36 | 32 | 6 | 1 |
module Sexy.Instances.FromInteger.Int () where
import Sexy.Classes (FromInteger(..))
import Sexy.Data (Int)
import qualified Prelude as P
instance FromInteger Int where
fromInteger = P.fromInteger
| DanBurton/sexy | src/Sexy/Instances/FromInteger/Int.hs | bsd-3-clause | 202 | 0 | 6 | 27 | 56 | 36 | 20 | 6 | 0 |
{- |
CoreFoundation tends to store filepaths as URIs rather than in POSIX
format. The functions 'uriToFilepath' and 'filepathToUri' provide the
appropriate conversions.
-}
module CoreFoundation.URI(
Uri,
uriToFilepath,
filepathToUri,
) where
import qualified Prelude
import Prelude hiding(String)
import CoreFoundation.Types.String
import Control.Monad
import Network.URI
import System.FilePath
-- | CoreFoundation strings which are formatted as uris
type Uri = String
fileScheme = "file:"
fileAuth = Just $ URIAuth "" "localhost" ""
fileQuery = ""
fileFragment = ""
{- |
>>> uriToFilepath "file://localhost/path/to/foo%20bar"
Just "/path/to/foo bar"
>>> uriToFilePath "malformed..."
Nothing
-}
uriToFilepath :: Uri -> Maybe FilePath
uriToFilepath str = do
uri <- parseURI $ toString str
guard $ uriScheme uri == fileScheme
guard $ uriAuthority uri == fileAuth
guard $ uriQuery uri == fileQuery
guard $ uriFragment uri == fileFragment
return $ unEscapeString $ uriPath uri
{- |
>>> filepathToUri "/path/to/foo bar"
"file://localhost/path/to/foo%20bar"
>>> filepathToUri "path/to/foo"
error: input path must be absolute
-}
filepathToUri :: FilePath -> Uri
filepathToUri fp
| not (isAbsolute fp) = error "CoreFoundation.Utils.filepathToUri: input path must be absolute"
| otherwise = fromString $ uriToString id uri ""
where
uri =
URI {
uriScheme = fileScheme,
uriAuthority = fileAuth,
uriPath = escapeURIString isUnescapedInURI fp,
uriQuery = fileQuery,
uriFragment = fileFragment
}
| reinerp/CoreFoundation | CoreFoundation/URI.hs | bsd-3-clause | 1,612 | 0 | 10 | 332 | 296 | 157 | 139 | 34 | 1 |
module Data.TTask.Pretty
( module Data.TTask.Pretty.Contents
, module Data.TTask.Pretty.Status
) where
import Data.TTask.Pretty.Contents
import Data.TTask.Pretty.Status
| tokiwoousaka/ttask | src/Data/TTask/Pretty.hs | bsd-3-clause | 175 | 0 | 5 | 20 | 39 | 28 | 11 | 5 | 0 |
-----------------------------------------------------------------------------
-- |
-- Module : Blip.Compiler.Types
-- Copyright : (c) 2012, 2013, 2014 Bernie Pope
-- License : BSD-style
-- Maintainer : florbitous@gmail.com
-- Stability : experimental
-- Portability : ghc
--
-- Type definitions which are used in multiple modules.
--
-----------------------------------------------------------------------------
module Blip.Compiler.Types
( Identifier, CompileConfig (..), VarIndex, IndexedVarSet
, ConstantID, ConstantCache, CompileState (..), BlockState (..)
, AnnotatedCode (..), LabelMap, Dumpable (..), VarSet
, LocalScope (..), NestedScope (..), VarInfo (..)
, ScopeIdentifier, FrameBlockInfo (..), Context (..), ParameterTypes (..)
) where
import Data.Set (Set)
import Blip.Bytecode (Bytecode (..))
import Blip.Marshal (PyObject (..))
import Data.Word (Word32, Word16)
import qualified Data.Map as Map
-- The context in which a variable is used affects the bytecode
-- related to that use.
data Context
= ModuleContext
| ClassContext
| FunctionContext
deriving (Eq, Ord, Show)
-- information about how a variable is bound plus its offset into
-- the appropriate structure
data VarInfo
= LocalVar
| CellVar !VarIndex
| FreeVar !VarIndex
| ExplicitGlobal
| ImplicitGlobal
type VarSet = Set Identifier
-- XXX need to handle keyword only paramters
data ParameterTypes
= ParameterTypes
{ parameterTypes_pos :: ![Identifier]
, parameterTypes_varPos :: !(Maybe Identifier)
, parameterTypes_varKeyword :: !(Maybe Identifier)
}
deriving (Eq, Show)
data LocalScope
= LocalScope
{ localScope_params :: !ParameterTypes
, localScope_locals :: !VarSet
, localScope_freeVars :: !VarSet
, localScope_cellVars :: !VarSet
, localScope_explicitGlobals :: !VarSet
}
deriving Show
-- start and end coordinates of span (row, col, row, col)
type ScopeIdentifier = (Int, Int, Int, Int)
-- mapping from source location to pair of (scope name, local scope)
newtype NestedScope =
NestedScope (Map.Map ScopeIdentifier (String, LocalScope))
deriving Show
data Dumpable = DumpScope | DumpAST
deriving (Eq, Ord, Show)
data AnnotatedCode
= AnnotatedCode
{ annotatedCode_bytecode :: !Bytecode
, annotatedCode_labels :: ![Word16] -- instruction can be labelled zero or more times
, annotatedCode_index :: !Word16 } -- byte offset of the instruction within this sequence of bytecode
deriving Show
type Identifier = String -- a variable name
data CompileConfig =
CompileConfig
{ compileConfig_magic :: !Word32
, compileConfig_dumps :: !(Set Dumpable)
}
deriving (Eq, Show)
type ConstantID = Word16
type ConstantCache = Map.Map PyObject ConstantID
data CompileState = CompileState
{ state_config :: !CompileConfig
, state_blockState :: !BlockState
, state_filename :: !FilePath
, state_nestedScope :: !NestedScope
}
-- Map from Label to Instruction offset.
-- The same instruction can be labelled multiple times,
-- but each label is attached to exactly one instruction.
type LabelMap = Map.Map Word16 Word16
type VarIndex = Word16
type IndexedVarSet = Map.Map Identifier VarIndex
data BlockState = BlockState
{ state_label :: !Word16
, state_instructions :: ![AnnotatedCode]
, state_labelNextInstruction :: ![Word16] -- zero or more labels for the next instruction
, state_constants :: ![PyObject]
, state_constantCache :: !ConstantCache
, state_nextConstantID :: !ConstantID
, state_names :: ![Identifier]
, state_nameCache :: !IndexedVarSet
, state_nextNameID :: !VarIndex
, state_objectName :: !String
, state_instruction_index :: !Word16
, state_labelMap :: !LabelMap
, state_locals :: !VarSet
, state_fastLocals :: !IndexedVarSet
, state_freeVars :: !IndexedVarSet
, state_cellVars :: !IndexedVarSet
, state_explicitGlobals :: !VarSet
, state_argcount :: !Word32
, state_flags :: !Word32
, state_frameBlockStack :: ![FrameBlockInfo]
, state_context :: !Context
, state_lineNumber :: !Word32
, state_lineNumberTable :: ![(Word16, Word32)] -- mapping from bytecode offset to source line number
, state_firstLineNumber :: !Word32
}
deriving (Show)
data FrameBlockInfo
= FrameBlockLoop !Word16
| FrameBlockExcept
| FrameBlockFinallyTry
| FrameBlockFinallyEnd
deriving (Eq, Show)
| bjpop/blip | blipcompiler/src/Blip/Compiler/Types.hs | bsd-3-clause | 4,459 | 0 | 11 | 855 | 839 | 512 | 327 | 185 | 0 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE UndecidableInstances #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
-- |
-- Module : Database.Monarch.Mock.Types
-- Copyright : 2013 Noriyuki OHKAWA
-- License : BSD3
--
-- Maintainer : n.ohkawa@gmail.com
-- Stability : experimental
-- Portability : unknown
--
-- Mock actions.
--
module Database.Monarch.Mock.Action () where
import Control.Concurrent.STM.TVar
import Control.Monad.Reader
import Control.Monad.STM ( atomically )
import Control.Monad.Trans.Control
import Database.Monarch.Types ( MonadMonarch(..) )
import Database.Monarch.Mock.Types ( MockT, MockDB, mockDB, emptyMockDB, TTValue(..) )
import qualified Data.ByteString as BS
import qualified Data.Map as M
import Data.Map ( (!) )
import Data.Monoid ( (<>) )
putDB :: BS.ByteString -> TTValue -> MockDB -> MockDB
putDB key value db = db { mockDB = M.insert key value (mockDB db) }
putDBS :: BS.ByteString -> BS.ByteString -> MockDB -> MockDB
putDBS key value = putDB key (TTString value)
putDBI :: BS.ByteString -> Int -> MockDB -> MockDB
putDBI key value = putDB key (TTInt value)
putDBD :: BS.ByteString -> Double -> MockDB -> MockDB
putDBD key value = putDB key (TTDouble value)
getDB :: BS.ByteString -> MockDB -> Maybe BS.ByteString
getDB key db = M.lookup key (mockDB db) >>= \mvalue ->
case mvalue of
TTString value -> return value
_ -> error "get"
instance ( MonadBaseControl IO m, MonadIO m ) => MonadMonarch (MockT m) where
put key value = do
tdb <- ask
liftIO $ atomically $ modifyTVar tdb $ putDBS key value
multiplePut = mapM_ (uncurry put)
putKeep key value = do
tdb <- ask
let modify db
| M.member key (mockDB db) = db
| otherwise = putDBS key value db
liftIO $ atomically $ modifyTVar tdb modify
putCat key value = do
tdb <- ask
let modify db
| M.member key (mockDB db) =
case mockDB db ! key of
TTString v -> putDBS key (v <> value) db
_ -> error "putCat"
| otherwise =
putDBS key value db
liftIO $ atomically $ modifyTVar tdb modify
putShiftLeft key value width = do
tdb <- ask
let modify db
| M.member key (mockDB db) =
case mockDB db ! key of
TTString v -> putDBS key (BS.drop (BS.length (v <> value) - width) $ v <> value) db
_ -> error "putShiftLeft"
| otherwise =
putDBS key value db
liftIO $ atomically $ modifyTVar tdb modify
putNoResponse = put
out key = do
tdb <- ask
let modify db = db { mockDB = M.delete key (mockDB db) }
liftIO $ atomically $ modifyTVar tdb modify
multipleOut = mapM_ out
get key = do
tdb <- ask
liftIO $ atomically $ fmap (getDB key) $ readTVar tdb
multipleGet keys = do
vs <- mapM (\k -> fmap (\v -> (k, v)) $ get k) keys
return [ (k, v) | (k, Just v) <- vs]
valueSize = fmap (fmap BS.length) . get
iterInit = return ()
iterNext = error "not implemented"
forwardMatchingKeys prefix n = do
tdb <- ask
let readKeys db = filter (BS.isPrefixOf prefix) $ M.keys $ mockDB db
ks <- liftIO $ atomically $ fmap readKeys $ readTVar tdb
case n of
Nothing -> return ks
Just x -> return $ take x ks
addInt key n = do
tdb <- ask
let modify db
| M.member key (mockDB db) =
case mockDB db ! key of
TTInt x -> putDBI key (x + n) db
_ -> error "addInt"
| otherwise =
putDBI key n db
let readDouble db = case mockDB db ! key of
TTInt x -> x
_ -> error "addInt"
liftIO $ atomically $ modifyTVar tdb modify >> fmap readDouble (readTVar tdb)
addDouble key n = do
tdb <- ask
let modify db
| M.member key (mockDB db) =
case mockDB db ! key of
TTDouble x -> putDBD key (x + n) db
_ -> error "addDouble"
| otherwise =
putDBD key n db
let readDouble db = case mockDB db ! key of
TTDouble x -> x
_ -> error "addDouble"
liftIO $ atomically $ modifyTVar tdb modify >> fmap readDouble (readTVar tdb)
ext _func _opts _key _value = error "not implemented"
sync = error "not implemented"
optimize _param = return ()
vanish = do
tdb <- ask
liftIO $ atomically $ modifyTVar tdb $ const emptyMockDB
copy _path = error "not implemented"
restore _path _usec _opts = error "not implemented"
setMaster _host _port _usec _opts = error "not implemented"
recordNum = do
tdb <- ask
liftIO $ atomically $ fmap (toEnum . M.size . mockDB) $ readTVar tdb
size = error "not implemented"
status = error "not implemented"
misc _func _opts _args = error "not implemented"
| notogawa/monarch | src/Database/Monarch/Mock/Action.hs | bsd-3-clause | 5,289 | 0 | 24 | 1,872 | 1,735 | 846 | 889 | 121 | 2 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
-- | This module provides types and functions to safely encode the remoteStorage
-- support specified by IETF's @draft-dejong-remotestorage-00.txt@ draft.
module Network.RemoteStorage.Types
( apiVersion
, apiAuthMethod
, apiWebfingerLink
-- * Store
-- ** Storage backend
, Store(..)
-- ** Individual items
, ItemName
, unItemName
, parseItemName
, validItemNameChar
, ItemVersion
, parseItemVersion
, itemVersionMilliseconds
, itemVersionFromMilliseconds
, showItemVersion
, ItemType(..)
, Folder(..)
, Document(..)
-- ** Item paths
, Path(..)
, parsePath
, isPublicPath
, bshowPath
, bshowFolderPath
, bshowDocumentPath
-- * Requests
, RequestOp(..)
, Request
-- * Modules
, ModuleName
, unModuleName
, parseModuleName
, validModuleNameChar
-- * Access Levels
, AccessLevel(..)
, parseAccessLevel
-- * Access Scope
, AccessScope
, parseAccessScope
) where
import qualified Codec.MIME.Type as MIME
import qualified Data.Aeson as J
import qualified Data.ByteString.Char8 as B
import qualified Data.Char as C
import Data.Monoid ((<>))
import qualified Data.Text as T
import Data.Time.Clock
import Data.Time.Clock.POSIX
import Data.Traversable (traverse)
import qualified Network.URI as URI
apiVersion :: B.ByteString
apiVersion = "draft-dejong-remotestorage-00"
apiAuthMethod :: B.ByteString
apiAuthMethod = "http://tools.ietf.org/html/rfc6749#section-4.2"
-- | Renders a WebFinger “links” entry for the given remoteStorage root URI and
-- authentication endpoint URI.
apiWebfingerLink :: URI.URI -> URI.URI -> J.Value
apiWebfingerLink storageRoot authEndpoint = J.object
[ "rel" J..= ("remotestorage" :: B.ByteString)
, "href" J..= URI.uriToString (const "") storageRoot ""
, "type" J..= apiVersion
, "properties" J..=
[ "auth-method" J..= apiAuthMethod
, "auth-endpoint" J..= URI.uriToString (const "") authEndpoint ""
]
]
--------------------------------------------------------------------------------
-- | An 'ItemName' is a 'ByteString' that can only contain valid item names.
--
-- Use the smart constructor 'parseItemName' to build an 'ItemName'.
newtype ItemName = ItemName { unItemName :: B.ByteString }
deriving (Eq, Show, Ord)
-- | 'Just' an 'ItemName' if the given 'B.ByteString' is a valid item name
-- otherwise 'Nothing'.
parseItemName :: B.ByteString -> Maybe ItemName
parseItemName "" = Nothing
parseItemName s
| B.all validItemNameChar s = Just $ ItemName s
| otherwise = Nothing
-- | Whether the given 'Char' is one of: @a-z@, @A-Z@, @0-9@, @%@, @-@, @_@
validItemNameChar :: Char -> Bool
validItemNameChar c = C.isAsciiUpper c || C.isAsciiLower c || C.isDigit c
|| c == '%' || c == '-' || c == '_'
--------------------------------------------------------------------------------
newtype ItemVersion = ItemVersion { unItemVersion :: UTCTime }
deriving (Eq, Show, Read, Ord, J.ToJSON, J.FromJSON)
itemVersionMilliseconds :: ItemVersion -> Integer
itemVersionMilliseconds = truncate . (*1000) . utcTimeToPOSIXSeconds . unItemVersion
itemVersionFromMilliseconds :: Integer -> ItemVersion
itemVersionFromMilliseconds i = ItemVersion . posixSecondsToUTCTime $ fromInteger i / 1000
parseItemVersion :: B.ByteString -> Maybe ItemVersion
parseItemVersion s = case reads (B.unpack s) of
((i,""):_) -> Just $ itemVersionFromMilliseconds i
_ -> Nothing
showItemVersion :: ItemVersion -> String
showItemVersion = show . itemVersionMilliseconds
--------------------------------------------------------------------------------
data ItemType = TFolder | TDocument
deriving (Show, Eq, Ord, Enum)
--------------------------------------------------------------------------------
data Folder = Folder ItemVersion [(ItemType, ItemName, ItemVersion)]
deriving (Eq, Show)
data Document = Document
{ docVersion :: ItemVersion
, docContentType :: MIME.Type
} deriving (Eq, Show)
instance J.ToJSON Folder where
toJSON (Folder _ xs) = J.object $ map pair xs
where
pair (itemt, ItemName n, ver) =
let n' = T.pack . B.unpack $ n
ver' = show . itemVersionMilliseconds $ ver in
case itemt of
TFolder -> (n' <> "/") J..= ver'
TDocument -> n' J..= ver'
--------------------------------------------------------------------------------
data Path = Path ItemType [ItemName]
deriving (Eq, Show)
bshowPath :: Path -> B.ByteString
bshowPath (Path TFolder xs) = bshowFolderPath xs
bshowPath (Path TDocument xs) = bshowDocumentPath xs
bshowFolderPath :: [ItemName] -> B.ByteString
bshowFolderPath [] = "/"
bshowFolderPath xs = bshowDocumentPath xs <> "/"
bshowDocumentPath :: [ItemName] -> B.ByteString
bshowDocumentPath xs = "/" <> (B.intercalate "/" $ fmap unItemName xs)
parsePath :: B.ByteString -> Maybe Path
parsePath "" = Nothing
parsePath "/" = Just $ Path TFolder []
parsePath s
| B.head s /= '/' = Nothing
| otherwise = return . Path pathType =<< path
where
path = traverse id . fmap parseItemName $ pieces
isFolder = B.last s == '/'
pathType | isFolder = TFolder
| otherwise = TDocument
pieces | isFolder = init pieces'
| otherwise = pieces'
where pieces' = B.split '/' (B.tail s)
isPublicPath :: Path -> Bool
isPublicPath (Path TFolder (ItemName "public":_)) = True
isPublicPath (Path TDocument (ItemName "public":_:_)) = True
isPublicPath _ = False
data Store m a = Store
{ sGetDocument :: Path -> Maybe ItemVersion -> m (Either String (Document, a))
, sPutDocument :: Path -> Maybe ItemVersion -> m (Either String ItemVersion)
, sDelDocument :: Path -> Maybe ItemVersion -> m (Either String ())
, sGetFolder :: Path -> Maybe ItemVersion -> m (Either String Folder)
}
--------------------------------------------------------------------------------
data RequestOp
= GetDocument
| PutDocument
| DelDocument
| GetFolder
deriving (Eq, Show, Enum)
type Request = (RequestOp, Path, Maybe ItemVersion)
--------------------------------------------------------------------------------
-- | A 'ModuleName' is a 'B.ByteString' that can only contain valid module names.
--
-- Use the smart constructor 'parseModuleName' to build an 'ModuleName'.
newtype ModuleName = ModuleName { unModuleName :: B.ByteString }
deriving (Eq, Show)
-- | 'Just' a 'ModuleName' if the given 'B.ByteString' would be a valid
-- 'ModuleName', otherwise 'Nothing'.
parseModuleName :: B.ByteString -> Maybe ModuleName
parseModuleName "" = Nothing
parseModuleName "public" = Nothing
parseModuleName s
| B.all validModuleNameChar s = Just $ ModuleName s
| otherwise = Nothing
-- | Whether the given 'Char' is one of: @a-z@, @0-9@
validModuleNameChar :: Char -> Bool
validModuleNameChar c = C.isAsciiLower c || C.isDigit c
--------------------------------------------------------------------------------
data AccessLevel = Read | ReadWrite
deriving (Eq, Show, Enum)
parseAccessLevel :: B.ByteString -> Maybe AccessLevel
parseAccessLevel "r" = Just Read
parseAccessLevel "rw" = Just ReadWrite
parseAccessLevel _ = Nothing
--------------------------------------------------------------------------------
type AccessScope = (ModuleName, AccessLevel)
parseAccessScope :: B.ByteString -> Maybe AccessScope
parseAccessScope t =
let (a,b) = B.break (==':') t in
case (parseModuleName a, parseAccessLevel $ B.drop 1 b) of
(Just a', Just b') -> Just (a',b')
_ -> Nothing
| k0001/snaplet-remotestorage | src/Network/RemoteStorage/Types.hs | bsd-3-clause | 7,905 | 0 | 15 | 1,666 | 1,882 | 1,028 | 854 | 157 | 2 |
{-| General purpose utilities
The names in this module clash heavily with the Haskell Prelude, so I
recommend the following import scheme:
> import Pipes
> import qualified Pipes.Prelude as P -- or use any other qualifier you prefer
Note that 'String'-based 'IO' is inefficient. The 'String'-based utilities
in this module exist only for simple demonstrations without incurring a
dependency on the @text@ package.
Also, 'stdinLn' and 'stdoutLn' remove and add newlines, respectively. This
behavior is intended to simplify examples. The corresponding @stdin@ and
@stdout@ utilities from @pipes-bytestring@ and @pipes-text@ preserve
newlines.
-}
{-# LANGUAGE RankNTypes, Trustworthy #-}
{-# OPTIONS_GHC -fno-warn-unused-do-bind #-}
module Pipes.Prelude (
-- * Producers
-- $producers
stdinLn
, readLn
, fromHandle
, repeatM
, replicateM
-- * Consumers
-- $consumers
, stdoutLn
, stdoutLn'
, mapM_
, print
, toHandle
, drain
-- * Pipes
-- $pipes
, map
, mapM
, sequence
, mapFoldable
, filter
, filterM
, take
, takeWhile
, takeWhile'
, drop
, dropWhile
, concat
, elemIndices
, findIndices
, scan
, scanM
, chain
, read
, show
, seq
-- * Folds
-- $folds
, fold
, fold'
, foldM
, foldM'
, all
, any
, and
, or
, elem
, notElem
, find
, findIndex
, head
, index
, last
, length
, maximum
, minimum
, null
, sum
, product
, toList
, toListM
, toListM'
-- * Zips
, zip
, zipWith
-- * Utilities
, tee
, generalize
) where
import Control.Exception (throwIO, try)
import Control.Monad (liftM, replicateM_, when, unless)
import Control.Monad.Trans.State.Strict (get, put)
import Data.Functor.Identity (Identity, runIdentity)
import Foreign.C.Error (Errno(Errno), ePIPE)
import Pipes
import Pipes.Core
import Pipes.Internal
import Pipes.Lift (evalStateP)
import qualified GHC.IO.Exception as G
import qualified System.IO as IO
import qualified Prelude
import Prelude hiding (
all
, and
, any
, concat
, drop
, dropWhile
, elem
, filter
, head
, last
, length
, map
, mapM
, mapM_
, maximum
, minimum
, notElem
, null
, or
, print
, product
, read
, readLn
, sequence
, show
, seq
, sum
, take
, takeWhile
, zip
, zipWith
)
{- $producers
Use 'for' loops to iterate over 'Producer's whenever you want to perform the
same action for every element:
> -- Echo all lines from standard input to standard output
> runEffect $ for P.stdinLn $ \str -> do
> lift $ putStrLn str
... or more concisely:
>>> runEffect $ for P.stdinLn (lift . putStrLn)
Test<Enter>
Test
ABC<Enter>
ABC
...
-}
{-| Read 'String's from 'IO.stdin' using 'getLine'
Terminates on end of input
-}
stdinLn :: MonadIO m => Producer' String m ()
stdinLn = fromHandle IO.stdin
{-# INLINABLE stdinLn #-}
-- | 'read' values from 'IO.stdin', ignoring failed parses
readLn :: (MonadIO m, Read a) => Producer' a m ()
readLn = stdinLn >-> read
{-# INLINABLE readLn #-}
{-| Read 'String's from a 'IO.Handle' using 'IO.hGetLine'
Terminates on end of input
-}
fromHandle :: MonadIO m => IO.Handle -> Producer' String m ()
fromHandle h = go
where
go = do
eof <- liftIO $ IO.hIsEOF h
unless eof $ do
str <- liftIO $ IO.hGetLine h
yield str
go
{-# INLINABLE fromHandle #-}
-- | Repeat a monadic action indefinitely, 'yield'ing each result
repeatM :: Monad m => m a -> Producer' a m r
repeatM m = lift m >~ cat
{-# INLINABLE repeatM #-}
{-# RULES
"repeatM m >-> p" forall m p . repeatM m >-> p = lift m >~ p
#-}
{-| Repeat a monadic action a fixed number of times, 'yield'ing each result
> replicateM 0 x = return ()
>
> replicateM (m + n) x = replicateM m x >> replicateM n x -- 0 <= {m,n}
-}
replicateM :: Monad m => Int -> m a -> Producer' a m ()
replicateM n m = lift m >~ take n
{-# INLINABLE replicateM #-}
{- $consumers
Feed a 'Consumer' the same value repeatedly using ('>~'):
>>> runEffect $ lift getLine >~ P.stdoutLn
Test<Enter>
Test
ABC<Enter>
ABC
...
-}
{-| Write 'String's to 'IO.stdout' using 'putStrLn'
Unlike 'toHandle', 'stdoutLn' gracefully terminates on a broken output pipe
-}
stdoutLn :: MonadIO m => Consumer' String m ()
stdoutLn = go
where
go = do
str <- await
x <- liftIO $ try (putStrLn str)
case x of
Left (G.IOError { G.ioe_type = G.ResourceVanished
, G.ioe_errno = Just ioe })
| Errno ioe == ePIPE
-> return ()
Left e -> liftIO (throwIO e)
Right () -> go
{-# INLINABLE stdoutLn #-}
{-| Write 'String's to 'IO.stdout' using 'putStrLn'
This does not handle a broken output pipe, but has a polymorphic return
value
-}
stdoutLn' :: MonadIO m => Consumer' String m r
stdoutLn' = for cat (\str -> liftIO (putStrLn str))
{-# INLINABLE stdoutLn' #-}
{-# RULES
"p >-> stdoutLn'" forall p .
p >-> stdoutLn' = for p (\str -> liftIO (putStrLn str))
#-}
-- | Consume all values using a monadic function
mapM_ :: Monad m => (a -> m ()) -> Consumer' a m r
mapM_ f = for cat (\a -> lift (f a))
{-# INLINABLE mapM_ #-}
{-# RULES
"p >-> mapM_ f" forall p f .
p >-> mapM_ f = for p (\a -> lift (f a))
#-}
-- | 'print' values to 'IO.stdout'
print :: (MonadIO m, Show a) => Consumer' a m r
print = for cat (\a -> liftIO (Prelude.print a))
{-# INLINABLE print #-}
{-# RULES
"p >-> print" forall p .
p >-> print = for p (\a -> liftIO (Prelude.print a))
#-}
-- | Write 'String's to a 'IO.Handle' using 'IO.hPutStrLn'
toHandle :: MonadIO m => IO.Handle -> Consumer' String m r
toHandle handle = for cat (\str -> liftIO (IO.hPutStrLn handle str))
{-# INLINABLE toHandle #-}
{-# RULES
"p >-> toHandle handle" forall p handle .
p >-> toHandle handle = for p (\str -> liftIO (IO.hPutStrLn handle str))
#-}
-- | 'discard' all incoming values
drain :: Monad m => Consumer' a m r
drain = for cat discard
{-# INLINABLE drain #-}
{-# RULES
"p >-> drain" forall p .
p >-> drain = for p discard
#-}
{- $pipes
Use ('>->') to connect 'Producer's, 'Pipe's, and 'Consumer's:
>>> runEffect $ P.stdinLn >-> P.takeWhile (/= "quit") >-> P.stdoutLn
Test<Enter>
Test
ABC<Enter>
ABC
quit<Enter>
>>>
-}
{-| Apply a function to all values flowing downstream
> map id = cat
>
> map (g . f) = map f >-> map g
-}
map :: Monad m => (a -> b) -> Pipe a b m r
map f = for cat (\a -> yield (f a))
{-# INLINABLE map #-}
{-# RULES
"p >-> map f" forall p f . p >-> map f = for p (\a -> yield (f a))
; "map f >-> p" forall p f . map f >-> p = (do
a <- await
return (f a) ) >~ p
#-}
{-| Apply a monadic function to all values flowing downstream
> mapM return = cat
>
> mapM (f >=> g) = mapM f >-> mapM g
-}
mapM :: Monad m => (a -> m b) -> Pipe a b m r
mapM f = for cat $ \a -> do
b <- lift (f a)
yield b
{-# INLINABLE mapM #-}
{-# RULES
"p >-> mapM f" forall p f . p >-> mapM f = for p (\a -> do
b <- lift (f a)
yield b )
; "mapM f >-> p" forall p f . mapM f >-> p = (do
a <- await
b <- lift (f a)
return b ) >~ p
#-}
-- | Convert a stream of actions to a stream of values
sequence :: Monad m => Pipe (m a) a m r
sequence = mapM id
{-# INLINABLE sequence #-}
{- | Apply a function to all values flowing downstream, and
forward each element of the result.
-}
mapFoldable :: (Monad m, Foldable t) => (a -> t b) -> Pipe a b m r
mapFoldable f = for cat (\a -> each (f a))
{-# INLINABLE mapFoldable #-}
{-# RULES
"p >-> mapFoldable f" forall p f .
p >-> mapFoldable f = for p (\a -> each (f a))
#-}
{-| @(filter predicate)@ only forwards values that satisfy the predicate.
> filter (pure True) = cat
>
> filter (liftA2 (&&) p1 p2) = filter p1 >-> filter p2
-}
filter :: Monad m => (a -> Bool) -> Pipe a a m r
filter predicate = for cat $ \a -> when (predicate a) (yield a)
{-# INLINABLE filter #-}
{-# RULES
"p >-> filter predicate" forall p predicate.
p >-> filter predicate = for p (\a -> when (predicate a) (yield a))
#-}
{-| @(filterM predicate)@ only forwards values that satisfy the monadic
predicate
> filterM (pure (pure True)) = cat
>
> filterM (liftA2 (liftA2 (&&)) p1 p2) = filterM p1 >-> filterM p2
-}
filterM :: Monad m => (a -> m Bool) -> Pipe a a m r
filterM predicate = for cat $ \a -> do
b <- lift (predicate a)
when b (yield a)
{-# INLINABLE filterM #-}
{-# RULES
"p >-> filterM predicate" forall p predicate .
p >-> filterM predicate = for p (\a -> do
b <- lift (predicate a)
when b (yield a) )
#-}
{-| @(take n)@ only allows @n@ values to pass through
> take 0 = return ()
>
> take (m + n) = take m >> take n
> take <infinity> = cat
>
> take (min m n) = take m >-> take n
-}
take :: Monad m => Int -> Pipe a a m ()
take n = replicateM_ n $ do
a <- await
yield a
{-# INLINABLE take #-}
{-| @(takeWhile p)@ allows values to pass downstream so long as they satisfy
the predicate @p@.
> takeWhile (pure True) = cat
>
> takeWhile (liftA2 (&&) p1 p2) = takeWhile p1 >-> takeWhile p2
-}
takeWhile :: Monad m => (a -> Bool) -> Pipe a a m ()
takeWhile predicate = go
where
go = do
a <- await
if (predicate a)
then do
yield a
go
else return ()
{-# INLINABLE takeWhile #-}
{-| @(takeWhile' p)@ is a version of takeWhile that returns the value failing
the predicate.
> takeWhile' (pure True) = cat
>
> takeWhile' (liftA2 (&&) p1 p2) = takeWhile' p1 >-> takeWhile' p2
-}
takeWhile' :: Monad m => (a -> Bool) -> Pipe a a m a
takeWhile' predicate = go
where
go = do
a <- await
if (predicate a)
then do
yield a
go
else return a
{-# INLINABLE takeWhile' #-}
{-| @(drop n)@ discards @n@ values going downstream
> drop 0 = cat
>
> drop (m + n) = drop m >-> drop n
-}
drop :: Monad m => Int -> Pipe a a m r
drop n = do
replicateM_ n await
cat
{-# INLINABLE drop #-}
{-| @(dropWhile p)@ discards values going downstream until one violates the
predicate @p@.
> dropWhile (pure False) = cat
>
> dropWhile (liftA2 (||) p1 p2) = dropWhile p1 >-> dropWhile p2
-}
dropWhile :: Monad m => (a -> Bool) -> Pipe a a m r
dropWhile predicate = go
where
go = do
a <- await
if (predicate a)
then go
else do
yield a
cat
{-# INLINABLE dropWhile #-}
-- | Flatten all 'Foldable' elements flowing downstream
concat :: (Monad m, Foldable f) => Pipe (f a) a m r
concat = for cat each
{-# INLINABLE concat #-}
{-# RULES
"p >-> concat" forall p . p >-> concat = for p each
#-}
-- | Outputs the indices of all elements that match the given element
elemIndices :: (Monad m, Eq a) => a -> Pipe a Int m r
elemIndices a = findIndices (a ==)
{-# INLINABLE elemIndices #-}
-- | Outputs the indices of all elements that satisfied the predicate
findIndices :: Monad m => (a -> Bool) -> Pipe a Int m r
findIndices predicate = loop 0
where
loop n = do
a <- await
when (predicate a) (yield n)
loop $! n + 1
{-# INLINABLE findIndices #-}
{-| Strict left scan
> Control.Foldl.purely scan :: Monad m => Fold a b -> Pipe a b m r
-}
scan :: Monad m => (x -> a -> x) -> x -> (x -> b) -> Pipe a b m r
scan step begin done = loop begin
where
loop x = do
yield (done x)
a <- await
let x' = step x a
loop $! x'
{-# INLINABLE scan #-}
{-| Strict, monadic left scan
> Control.Foldl.impurely scan :: Monad m => FoldM a m b -> Pipe a b m r
-}
scanM :: Monad m => (x -> a -> m x) -> m x -> (x -> m b) -> Pipe a b m r
scanM step begin done = do
x <- lift begin
loop x
where
loop x = do
b <- lift (done x)
yield b
a <- await
x' <- lift (step x a)
loop $! x'
{-# INLINABLE scanM #-}
{-| Apply an action to all values flowing downstream
> chain (pure (return ())) = cat
>
> chain (liftA2 (>>) m1 m2) = chain m1 >-> chain m2
-}
chain :: Monad m => (a -> m ()) -> Pipe a a m r
chain f = for cat $ \a -> do
lift (f a)
yield a
{-# INLINABLE chain #-}
{-# RULES
"p >-> chain f" forall p f .
p >-> chain f = for p (\a -> do
lift (f a)
yield a )
; "chain f >-> p" forall p f .
chain f >-> p = (do
a <- await
lift (f a)
return a ) >~ p
#-}
-- | Parse 'Read'able values, only forwarding the value if the parse succeeds
read :: (Monad m, Read a) => Pipe String a m r
read = for cat $ \str -> case (reads str) of
[(a, "")] -> yield a
_ -> return ()
{-# INLINABLE read #-}
{-# RULES
"p >-> read" forall p .
p >-> read = for p (\str -> case (reads str) of
[(a, "")] -> yield a
_ -> return () )
#-}
-- | Convert 'Show'able values to 'String's
show :: (Monad m, Show a) => Pipe a String m r
show = map Prelude.show
{-# INLINABLE show #-}
-- | Evaluate all values flowing downstream to WHNF
seq :: Monad m => Pipe a a m r
seq = for cat $ \a -> yield $! a
{-# INLINABLE seq #-}
{- $folds
Use these to fold the output of a 'Producer'. Many of these folds will stop
drawing elements if they can compute their result early, like 'any':
>>> P.any null P.stdinLn
Test<Enter>
ABC<Enter>
<Enter>
True
>>>
-}
{-| Strict fold of the elements of a 'Producer'
> Control.Foldl.purely fold :: Monad m => Fold a b -> Producer a m () -> m b
-}
fold :: Monad m => (x -> a -> x) -> x -> (x -> b) -> Producer a m () -> m b
fold step begin done p0 = loop p0 begin
where
loop p x = case p of
Request v _ -> closed v
Respond a fu -> loop (fu ()) $! step x a
M m -> m >>= \p' -> loop p' x
Pure _ -> return (done x)
{-# INLINABLE fold #-}
{-| Strict fold of the elements of a 'Producer' that preserves the return value
> Control.Foldl.purely fold' :: Monad m => Fold a b -> Producer a m r -> m (b, r)
-}
fold' :: Monad m => (x -> a -> x) -> x -> (x -> b) -> Producer a m r -> m (b, r)
fold' step begin done p0 = loop p0 begin
where
loop p x = case p of
Request v _ -> closed v
Respond a fu -> loop (fu ()) $! step x a
M m -> m >>= \p' -> loop p' x
Pure r -> return (done x, r)
{-# INLINABLE fold' #-}
{-| Strict, monadic fold of the elements of a 'Producer'
> Control.Foldl.impurely foldM :: Monad m => FoldM a b -> Producer a m () -> m b
-}
foldM
:: Monad m
=> (x -> a -> m x) -> m x -> (x -> m b) -> Producer a m () -> m b
foldM step begin done p0 = do
x0 <- begin
loop p0 x0
where
loop p x = case p of
Request v _ -> closed v
Respond a fu -> do
x' <- step x a
loop (fu ()) $! x'
M m -> m >>= \p' -> loop p' x
Pure _ -> done x
{-# INLINABLE foldM #-}
{-| Strict, monadic fold of the elements of a 'Producer'
> Control.Foldl.impurely foldM' :: Monad m => FoldM a b -> Producer a m r -> m (b, r)
-}
foldM'
:: Monad m
=> (x -> a -> m x) -> m x -> (x -> m b) -> Producer a m r -> m (b, r)
foldM' step begin done p0 = do
x0 <- begin
loop p0 x0
where
loop p x = case p of
Request v _ -> closed v
Respond a fu -> do
x' <- step x a
loop (fu ()) $! x'
M m -> m >>= \p' -> loop p' x
Pure r -> do
b <- done x
return (b, r)
{-# INLINABLE foldM' #-}
{-| @(all predicate p)@ determines whether all the elements of @p@ satisfy the
predicate.
-}
all :: Monad m => (a -> Bool) -> Producer a m () -> m Bool
all predicate p = null $ p >-> filter (\a -> not (predicate a))
{-# INLINABLE all #-}
{-| @(any predicate p)@ determines whether any element of @p@ satisfies the
predicate.
-}
any :: Monad m => (a -> Bool) -> Producer a m () -> m Bool
any predicate p = liftM not $ null (p >-> filter predicate)
{-# INLINABLE any #-}
-- | Determines whether all elements are 'True'
and :: Monad m => Producer Bool m () -> m Bool
and = all id
{-# INLINABLE and #-}
-- | Determines whether any element is 'True'
or :: Monad m => Producer Bool m () -> m Bool
or = any id
{-# INLINABLE or #-}
{-| @(elem a p)@ returns 'True' if @p@ has an element equal to @a@, 'False'
otherwise
-}
elem :: (Monad m, Eq a) => a -> Producer a m () -> m Bool
elem a = any (a ==)
{-# INLINABLE elem #-}
{-| @(notElem a)@ returns 'False' if @p@ has an element equal to @a@, 'True'
otherwise
-}
notElem :: (Monad m, Eq a) => a -> Producer a m () -> m Bool
notElem a = all (a /=)
{-# INLINABLE notElem #-}
-- | Find the first element of a 'Producer' that satisfies the predicate
find :: Monad m => (a -> Bool) -> Producer a m () -> m (Maybe a)
find predicate p = head (p >-> filter predicate)
{-# INLINABLE find #-}
{-| Find the index of the first element of a 'Producer' that satisfies the
predicate
-}
findIndex :: Monad m => (a -> Bool) -> Producer a m () -> m (Maybe Int)
findIndex predicate p = head (p >-> findIndices predicate)
{-# INLINABLE findIndex #-}
-- | Retrieve the first element from a 'Producer'
head :: Monad m => Producer a m () -> m (Maybe a)
head p = do
x <- next p
return $ case x of
Left _ -> Nothing
Right (a, _) -> Just a
{-# INLINABLE head #-}
-- | Index into a 'Producer'
index :: Monad m => Int -> Producer a m () -> m (Maybe a)
index n p = head (p >-> drop n)
{-# INLINABLE index #-}
-- | Retrieve the last element from a 'Producer'
last :: Monad m => Producer a m () -> m (Maybe a)
last p0 = do
x <- next p0
case x of
Left _ -> return Nothing
Right (a, p') -> loop a p'
where
loop a p = do
x <- next p
case x of
Left _ -> return (Just a)
Right (a', p') -> loop a' p'
{-# INLINABLE last #-}
-- | Count the number of elements in a 'Producer'
length :: Monad m => Producer a m () -> m Int
length = fold (\n _ -> n + 1) 0 id
{-# INLINABLE length #-}
-- | Find the maximum element of a 'Producer'
maximum :: (Monad m, Ord a) => Producer a m () -> m (Maybe a)
maximum = fold step Nothing id
where
step x a = Just $ case x of
Nothing -> a
Just a' -> max a a'
{-# INLINABLE maximum #-}
-- | Find the minimum element of a 'Producer'
minimum :: (Monad m, Ord a) => Producer a m () -> m (Maybe a)
minimum = fold step Nothing id
where
step x a = Just $ case x of
Nothing -> a
Just a' -> min a a'
{-# INLINABLE minimum #-}
-- | Determine if a 'Producer' is empty
null :: Monad m => Producer a m () -> m Bool
null p = do
x <- next p
return $ case x of
Left _ -> True
Right _ -> False
{-# INLINABLE null #-}
-- | Compute the sum of the elements of a 'Producer'
sum :: (Monad m, Num a) => Producer a m () -> m a
sum = fold (+) 0 id
{-# INLINABLE sum #-}
-- | Compute the product of the elements of a 'Producer'
product :: (Monad m, Num a) => Producer a m () -> m a
product = fold (*) 1 id
{-# INLINABLE product #-}
-- | Convert a pure 'Producer' into a list
toList :: Producer a Identity () -> [a]
toList = loop
where
loop p = case p of
Request v _ -> closed v
Respond a fu -> a:loop (fu ())
M m -> loop (runIdentity m)
Pure _ -> []
{-# INLINABLE toList #-}
{-| Convert an effectful 'Producer' into a list
Note: 'toListM' is not an idiomatic use of @pipes@, but I provide it for
simple testing purposes. Idiomatic @pipes@ style consumes the elements
immediately as they are generated instead of loading all elements into
memory.
-}
toListM :: Monad m => Producer a m () -> m [a]
toListM = fold step begin done
where
step x a = x . (a:)
begin = id
done x = x []
{-# INLINABLE toListM #-}
{-| Convert an effectful 'Producer' into a list alongside the return value
Note: 'toListM'' is not an idiomatic use of @pipes@, but I provide it for
simple testing purposes. Idiomatic @pipes@ style consumes the elements
immediately as they are generated instead of loading all elements into
memory.
-}
toListM' :: Monad m => Producer a m r -> m ([a], r)
toListM' = fold' step begin done
where
step x a = x . (a:)
begin = id
done x = x []
{-# INLINABLE toListM' #-}
-- | Zip two 'Producer's
zip :: Monad m
=> (Producer a m r)
-> (Producer b m r)
-> (Producer' (a, b) m r)
zip = zipWith (,)
{-# INLINABLE zip #-}
-- | Zip two 'Producer's using the provided combining function
zipWith :: Monad m
=> (a -> b -> c)
-> (Producer a m r)
-> (Producer b m r)
-> (Producer' c m r)
zipWith f = go
where
go p1 p2 = do
e1 <- lift $ next p1
case e1 of
Left r -> return r
Right (a, p1') -> do
e2 <- lift $ next p2
case e2 of
Left r -> return r
Right (b, p2') -> do
yield (f a b)
go p1' p2'
{-# INLINABLE zipWith #-}
{-| Transform a 'Consumer' to a 'Pipe' that reforwards all values further
downstream
-}
tee :: Monad m => Consumer a m r -> Pipe a a m r
tee p = evalStateP Nothing $ do
r <- up >\\ (hoist lift p //> dn)
ma <- lift get
case ma of
Nothing -> return ()
Just a -> yield a
return r
where
up () = do
ma <- lift get
case ma of
Nothing -> return ()
Just a -> yield a
a <- await
lift $ put (Just a)
return a
dn v = closed v
{-# INLINABLE tee #-}
{-| Transform a unidirectional 'Pipe' to a bidirectional 'Proxy'
> generalize (f >-> g) = generalize f >+> generalize g
>
> generalize cat = pull
-}
generalize :: Monad m => Pipe a b m r -> x -> Proxy x a x b m r
generalize p x0 = evalStateP x0 $ up >\\ hoist lift p //> dn
where
up () = do
x <- lift get
request x
dn a = do
x <- respond a
lift $ put x
{-# INLINABLE generalize #-}
| FranklinChen/Haskell-Pipes-Library | src/Pipes/Prelude.hs | bsd-3-clause | 22,513 | 0 | 21 | 6,900 | 5,525 | 2,802 | 2,723 | -1 | -1 |
module Compiler where
import qualified Codegen
import qualified Lexer
import qualified Parser
import Data.Bifunctor (first)
import Text.Megaparsec.Error (ParseError)
import Control.Exception.Base (try, SomeException)
data CompilationError = ParseError ParseError
| LlvmError Codegen.LlvmError
| CheckerError String
| OtherError SomeException
instance Show CompilationError where
show (ParseError p) = "Parse error: " ++ show p
show (LlvmError l) = "Assembly error: " ++ show l
show (CheckerError s) = show s
show (OtherError e) = "Exception before assembly: " ++ show e
compile :: String -> IO (Either CompilationError String)
compile input =
let parsed = first ParseError $ Parser.parse "" =<< Lexer.lex "" input
in case parsed of
Left err -> pure (Left err)
Right funDefs -> do tried <- (try $ Codegen.toAssembly funDefs) :: IO (Either SomeException (Either Codegen.LlvmError String))
case tried of
Left err -> pure . Left . OtherError $ err
Right assembled -> pure $ first LlvmError assembled
| letsbreelhere/egg | src/Compiler.hs | bsd-3-clause | 1,174 | 0 | 18 | 326 | 347 | 176 | 171 | 25 | 3 |
--
-- >>> Main <<<
--
-- Main driver for the hub tool (see the README for details).
--
-- (c) 2011-2012 Chris Dornan
module Main(main) where
import Control.Monad
import System.IO
import System.Exit
import Text.Printf
import Hub.System
import Hub.FilePaths
import Hub.Directory
import Hub.CommandLine
import Hub.Commands
import qualified Version as V
main :: IO ()
main =
do cl <- commandLine
case cl of
HelpCL _ _ -> return ()
VrsnCL -> return ()
_ -> initDirectory
case cl of
ProgCL hub (prg,as) -> _prog hub prg as
HelpCL err hlp -> _help err hlp
VrsnCL -> _vrsn
DfltCL -> _default
StDfCL hub -> _default_hub $ Just hub
RsDfCL -> _default_hub Nothing
LsCL af qf -> _ls af qf
GetCL -> _get
SetCL hub -> _set hub
UnsetCL -> _unset
NameCL hub -> _name hub
InfoCL hub -> _info hub
LockCL hub -> _lock hub
UnlockCL hub -> _unlock hub
PathCL hub -> _path hub
XmlCL hub -> _xml hub
InitCL hub hn set -> _init hub hn set
CommentCL hub cmt -> _comment hub cmt
CpCL hub hn -> _cp hub hn
MvCL hub hn -> _mv hub hn
RmCL hub -> _rm hub
SwapCL hub hn -> _swap hub hn
GcCL -> _gc
ListCL hub -> _list hub
CheckCL hub -> _check hub
SaveCL hub -> _save hub
LoadCL hn -> _load hn
VerifyCL hub sf -> _verify hub sf
InstallCL hub pkns -> _install hub pkns
EraseCL hub pkns ef -> _erase hub pkns ef
_help :: Bool -> String -> IO ()
_help False hlp = putStr hlp
_help True hlp = hPutStrLn stderr hlp >> exitWith (ExitFailure 1)
_vrsn :: IO ()
_vrsn =
do putStr $ printf "hub %s\n" V.version
ex <- fileExists sysVersion
when ex $
readAFile sysVersion >>= putStr
| Lainepress/hub-src | hub.hs | bsd-3-clause | 2,482 | 0 | 11 | 1,238 | 641 | 301 | 340 | 58 | 32 |
{-# Language RebindableSyntax #-}
{-# Language ScopedTypeVariables #-}
{-# Language FlexibleContexts #-}
module Main where
import Prelude hiding ((>>=), (>>), fail, return)
import Symmetry.Language
import Symmetry.Verify
pingServer :: (DSL repr) => repr (Process repr ())
pingServer = do (p :: repr ()) <- recv
return tt
master :: (DSL repr) => repr
(RSing -> Process repr ())
master = lam $ \r -> do p <- spawn r pingServer
send p tt
mainProc :: (DSL repr) => repr ()
mainProc = exec $ do r <- newRSing
r |> master
main :: IO ()
main = checkerMain mainProc
| abakst/symmetry | checker/tests/pos/PingSingle.hs | mit | 638 | 0 | 11 | 178 | 221 | 118 | 103 | 19 | 1 |
{-
-----------------------------------------------------------------------------
--
-- (c) The University of Glasgow 2001-2003
--
-- Access to system tools: gcc, cp, rm etc
--
-----------------------------------------------------------------------------
-}
{-# LANGUAGE CPP, ScopedTypeVariables #-}
module SysTools (
-- Initialisation
initSysTools,
-- Interface to system tools
runUnlit, runCpp, runCc, -- [Option] -> IO ()
runPp, -- [Option] -> IO ()
runSplit, -- [Option] -> IO ()
runAs, runLink, runLibtool, -- [Option] -> IO ()
runMkDLL,
runWindres,
runLlvmOpt,
runLlvmLlc,
runClang,
figureLlvmVersion,
readElfSection,
getLinkerInfo,
getCompilerInfo,
linkDynLib,
askCc,
touch, -- String -> String -> IO ()
copy,
copyWithHeader,
-- Temporary-file management
setTmpDir,
newTempName, newTempLibName,
cleanTempDirs, cleanTempFiles, cleanTempFilesExcept,
addFilesToClean,
Option(..),
-- frameworks
getPkgFrameworkOpts,
getFrameworkOpts
) where
#include "HsVersions.h"
import DriverPhases
import Module
import Packages
import Config
import Outputable
import ErrUtils
import Panic
import Platform
import Util
import DynFlags
import Exception
import Data.IORef
import Control.Monad
import System.Exit
import System.Environment
import System.FilePath
import System.IO
import System.IO.Error as IO
import System.Directory
import Data.Char
import Data.List
import qualified Data.Map as Map
import Text.ParserCombinators.ReadP hiding (char)
import qualified Text.ParserCombinators.ReadP as R
#ifndef mingw32_HOST_OS
import qualified System.Posix.Internals
#else /* Must be Win32 */
import Foreign
import Foreign.C.String
#endif
import System.Process
import Control.Concurrent
import FastString
import SrcLoc ( SrcLoc, mkSrcLoc, noSrcSpan, mkSrcSpan )
#ifdef mingw32_HOST_OS
# if defined(i386_HOST_ARCH)
# define WINDOWS_CCONV stdcall
# elif defined(x86_64_HOST_ARCH)
# define WINDOWS_CCONV ccall
# else
# error Unknown mingw32 arch
# endif
#endif
{-
How GHC finds its files
~~~~~~~~~~~~~~~~~~~~~~~
[Note topdir]
GHC needs various support files (library packages, RTS etc), plus
various auxiliary programs (cp, gcc, etc). It starts by finding topdir,
the root of GHC's support files
On Unix:
- ghc always has a shell wrapper that passes a -B<dir> option
On Windows:
- ghc never has a shell wrapper.
- we can find the location of the ghc binary, which is
$topdir/bin/<something>.exe
where <something> may be "ghc", "ghc-stage2", or similar
- we strip off the "bin/<something>.exe" to leave $topdir.
from topdir we can find package.conf, ghc-asm, etc.
SysTools.initSysProgs figures out exactly where all the auxiliary programs
are, and initialises mutable variables to make it easy to call them.
To to this, it makes use of definitions in Config.hs, which is a Haskell
file containing variables whose value is figured out by the build system.
Config.hs contains two sorts of things
cGCC, The *names* of the programs
cCPP e.g. cGCC = gcc
cUNLIT cCPP = gcc -E
etc They do *not* include paths
cUNLIT_DIR The *path* to the directory containing unlit, split etc
cSPLIT_DIR *relative* to the root of the build tree,
for use when running *in-place* in a build tree (only)
---------------------------------------------
NOTES for an ALTERNATIVE scheme (i.e *not* what is currently implemented):
Another hair-brained scheme for simplifying the current tool location
nightmare in GHC: Simon originally suggested using another
configuration file along the lines of GCC's specs file - which is fine
except that it means adding code to read yet another configuration
file. What I didn't notice is that the current package.conf is
general enough to do this:
Package
{name = "tools", import_dirs = [], source_dirs = [],
library_dirs = [], hs_libraries = [], extra_libraries = [],
include_dirs = [], c_includes = [], package_deps = [],
extra_ghc_opts = ["-pgmc/usr/bin/gcc","-pgml${topdir}/bin/unlit", ... etc.],
extra_cc_opts = [], extra_ld_opts = []}
Which would have the advantage that we get to collect together in one
place the path-specific package stuff with the path-specific tool
stuff.
End of NOTES
---------------------------------------------
************************************************************************
* *
\subsection{Initialisation}
* *
************************************************************************
-}
initSysTools :: Maybe String -- Maybe TopDir path (without the '-B' prefix)
-> IO Settings -- Set all the mutable variables above, holding
-- (a) the system programs
-- (b) the package-config file
-- (c) the GHC usage message
initSysTools mbMinusB
= do top_dir <- findTopDir mbMinusB
-- see [Note topdir]
-- NB: top_dir is assumed to be in standard Unix
-- format, '/' separated
let settingsFile = top_dir </> "settings"
platformConstantsFile = top_dir </> "platformConstants"
installed :: FilePath -> FilePath
installed file = top_dir </> file
settingsStr <- readFile settingsFile
platformConstantsStr <- readFile platformConstantsFile
mySettings <- case maybeReadFuzzy settingsStr of
Just s ->
return s
Nothing ->
pgmError ("Can't parse " ++ show settingsFile)
platformConstants <- case maybeReadFuzzy platformConstantsStr of
Just s ->
return s
Nothing ->
pgmError ("Can't parse " ++
show platformConstantsFile)
let getSetting key = case lookup key mySettings of
Just xs ->
return $ case stripPrefix "$topdir" xs of
Just [] ->
top_dir
Just xs'@(c:_)
| isPathSeparator c ->
top_dir ++ xs'
_ ->
xs
Nothing -> pgmError ("No entry for " ++ show key ++ " in " ++ show settingsFile)
getBooleanSetting key = case lookup key mySettings of
Just "YES" -> return True
Just "NO" -> return False
Just xs -> pgmError ("Bad value for " ++ show key ++ ": " ++ show xs)
Nothing -> pgmError ("No entry for " ++ show key ++ " in " ++ show settingsFile)
readSetting key = case lookup key mySettings of
Just xs ->
case maybeRead xs of
Just v -> return v
Nothing -> pgmError ("Failed to read " ++ show key ++ " value " ++ show xs)
Nothing -> pgmError ("No entry for " ++ show key ++ " in " ++ show settingsFile)
crossCompiling <- getBooleanSetting "cross compiling"
targetArch <- readSetting "target arch"
targetOS <- readSetting "target os"
targetWordSize <- readSetting "target word size"
targetUnregisterised <- getBooleanSetting "Unregisterised"
targetHasGnuNonexecStack <- readSetting "target has GNU nonexec stack"
targetHasIdentDirective <- readSetting "target has .ident directive"
targetHasSubsectionsViaSymbols <- readSetting "target has subsections via symbols"
myExtraGccViaCFlags <- getSetting "GCC extra via C opts"
-- On Windows, mingw is distributed with GHC,
-- so we look in TopDir/../mingw/bin
-- It would perhaps be nice to be able to override this
-- with the settings file, but it would be a little fiddly
-- to make that possible, so for now you can't.
gcc_prog <- getSetting "C compiler command"
gcc_args_str <- getSetting "C compiler flags"
cpp_prog <- getSetting "Haskell CPP command"
cpp_args_str <- getSetting "Haskell CPP flags"
let unreg_gcc_args = if targetUnregisterised
then ["-DNO_REGS", "-DUSE_MINIINTERPRETER"]
else []
-- TABLES_NEXT_TO_CODE affects the info table layout.
tntc_gcc_args
| mkTablesNextToCode targetUnregisterised
= ["-DTABLES_NEXT_TO_CODE"]
| otherwise = []
cpp_args= map Option (words cpp_args_str)
gcc_args = map Option (words gcc_args_str
++ unreg_gcc_args
++ tntc_gcc_args)
ldSupportsCompactUnwind <- getBooleanSetting "ld supports compact unwind"
ldSupportsBuildId <- getBooleanSetting "ld supports build-id"
ldSupportsFilelist <- getBooleanSetting "ld supports filelist"
ldIsGnuLd <- getBooleanSetting "ld is GNU ld"
perl_path <- getSetting "perl command"
let pkgconfig_path = installed "package.conf.d"
ghc_usage_msg_path = installed "ghc-usage.txt"
ghci_usage_msg_path = installed "ghci-usage.txt"
-- For all systems, unlit, split, mangle are GHC utilities
-- architecture-specific stuff is done when building Config.hs
unlit_path = installed cGHC_UNLIT_PGM
-- split is a Perl script
split_script = installed cGHC_SPLIT_PGM
windres_path <- getSetting "windres command"
libtool_path <- getSetting "libtool command"
tmpdir <- getTemporaryDirectory
touch_path <- getSetting "touch command"
let -- On Win32 we don't want to rely on #!/bin/perl, so we prepend
-- a call to Perl to get the invocation of split.
-- On Unix, scripts are invoked using the '#!' method. Binary
-- installations of GHC on Unix place the correct line on the
-- front of the script at installation time, so we don't want
-- to wire-in our knowledge of $(PERL) on the host system here.
(split_prog, split_args)
| isWindowsHost = (perl_path, [Option split_script])
| otherwise = (split_script, [])
mkdll_prog <- getSetting "dllwrap command"
let mkdll_args = []
-- cpp is derived from gcc on all platforms
-- HACK, see setPgmP below. We keep 'words' here to remember to fix
-- Config.hs one day.
-- Other things being equal, as and ld are simply gcc
gcc_link_args_str <- getSetting "C compiler link flags"
let as_prog = gcc_prog
as_args = gcc_args
ld_prog = gcc_prog
ld_args = gcc_args ++ map Option (words gcc_link_args_str)
-- We just assume on command line
lc_prog <- getSetting "LLVM llc command"
lo_prog <- getSetting "LLVM opt command"
let platform = Platform {
platformArch = targetArch,
platformOS = targetOS,
platformWordSize = targetWordSize,
platformUnregisterised = targetUnregisterised,
platformHasGnuNonexecStack = targetHasGnuNonexecStack,
platformHasIdentDirective = targetHasIdentDirective,
platformHasSubsectionsViaSymbols = targetHasSubsectionsViaSymbols,
platformIsCrossCompiling = crossCompiling
}
return $ Settings {
sTargetPlatform = platform,
sTmpDir = normalise tmpdir,
sGhcUsagePath = ghc_usage_msg_path,
sGhciUsagePath = ghci_usage_msg_path,
sTopDir = top_dir,
sRawSettings = mySettings,
sExtraGccViaCFlags = words myExtraGccViaCFlags,
sSystemPackageConfig = pkgconfig_path,
sLdSupportsCompactUnwind = ldSupportsCompactUnwind,
sLdSupportsBuildId = ldSupportsBuildId,
sLdSupportsFilelist = ldSupportsFilelist,
sLdIsGnuLd = ldIsGnuLd,
sProgramName = "ghc",
sProjectVersion = cProjectVersion,
sPgm_L = unlit_path,
sPgm_P = (cpp_prog, cpp_args),
sPgm_F = "",
sPgm_c = (gcc_prog, gcc_args),
sPgm_s = (split_prog,split_args),
sPgm_a = (as_prog, as_args),
sPgm_l = (ld_prog, ld_args),
sPgm_dll = (mkdll_prog,mkdll_args),
sPgm_T = touch_path,
sPgm_windres = windres_path,
sPgm_libtool = libtool_path,
sPgm_lo = (lo_prog,[]),
sPgm_lc = (lc_prog,[]),
sOpt_L = [],
sOpt_P = [],
sOpt_F = [],
sOpt_c = [],
sOpt_a = [],
sOpt_l = [],
sOpt_windres = [],
sOpt_lo = [],
sOpt_lc = [],
sPlatformConstants = platformConstants
}
-- returns a Unix-format path (relying on getBaseDir to do so too)
findTopDir :: Maybe String -- Maybe TopDir path (without the '-B' prefix).
-> IO String -- TopDir (in Unix format '/' separated)
findTopDir (Just minusb) = return (normalise minusb)
findTopDir Nothing
= do -- Get directory of executable
maybe_exec_dir <- getBaseDir
case maybe_exec_dir of
-- "Just" on Windows, "Nothing" on unix
Nothing -> throwGhcExceptionIO (InstallationError "missing -B<dir> option")
Just dir -> return dir
{-
************************************************************************
* *
\subsection{Running an external program}
* *
************************************************************************
-}
runUnlit :: DynFlags -> [Option] -> IO ()
runUnlit dflags args = do
let prog = pgm_L dflags
opts = getOpts dflags opt_L
runSomething dflags "Literate pre-processor" prog
(map Option opts ++ args)
runCpp :: DynFlags -> [Option] -> IO ()
runCpp dflags args = do
let (p,args0) = pgm_P dflags
args1 = map Option (getOpts dflags opt_P)
args2 = if gopt Opt_WarnIsError dflags
then [Option "-Werror"]
else []
mb_env <- getGccEnv args2
runSomethingFiltered dflags id "C pre-processor" p
(args0 ++ args1 ++ args2 ++ args) mb_env
runPp :: DynFlags -> [Option] -> IO ()
runPp dflags args = do
let prog = pgm_F dflags
opts = map Option (getOpts dflags opt_F)
runSomething dflags "Haskell pre-processor" prog (args ++ opts)
runCc :: DynFlags -> [Option] -> IO ()
runCc dflags args = do
let (p,args0) = pgm_c dflags
args1 = map Option (getOpts dflags opt_c)
args2 = args0 ++ args1 ++ args
mb_env <- getGccEnv args2
runSomethingResponseFile dflags cc_filter "C Compiler" p args2 mb_env
where
-- discard some harmless warnings from gcc that we can't turn off
cc_filter = unlines . doFilter . lines
{-
gcc gives warnings in chunks like so:
In file included from /foo/bar/baz.h:11,
from /foo/bar/baz2.h:22,
from wibble.c:33:
/foo/flibble:14: global register variable ...
/foo/flibble:15: warning: call-clobbered r...
We break it up into its chunks, remove any call-clobbered register
warnings from each chunk, and then delete any chunks that we have
emptied of warnings.
-}
doFilter = unChunkWarnings . filterWarnings . chunkWarnings []
-- We can't assume that the output will start with an "In file inc..."
-- line, so we start off expecting a list of warnings rather than a
-- location stack.
chunkWarnings :: [String] -- The location stack to use for the next
-- list of warnings
-> [String] -- The remaining lines to look at
-> [([String], [String])]
chunkWarnings loc_stack [] = [(loc_stack, [])]
chunkWarnings loc_stack xs
= case break loc_stack_start xs of
(warnings, lss:xs') ->
case span loc_start_continuation xs' of
(lsc, xs'') ->
(loc_stack, warnings) : chunkWarnings (lss : lsc) xs''
_ -> [(loc_stack, xs)]
filterWarnings :: [([String], [String])] -> [([String], [String])]
filterWarnings [] = []
-- If the warnings are already empty then we are probably doing
-- something wrong, so don't delete anything
filterWarnings ((xs, []) : zs) = (xs, []) : filterWarnings zs
filterWarnings ((xs, ys) : zs) = case filter wantedWarning ys of
[] -> filterWarnings zs
ys' -> (xs, ys') : filterWarnings zs
unChunkWarnings :: [([String], [String])] -> [String]
unChunkWarnings [] = []
unChunkWarnings ((xs, ys) : zs) = xs ++ ys ++ unChunkWarnings zs
loc_stack_start s = "In file included from " `isPrefixOf` s
loc_start_continuation s = " from " `isPrefixOf` s
wantedWarning w
| "warning: call-clobbered register used" `isContainedIn` w = False
| otherwise = True
isContainedIn :: String -> String -> Bool
xs `isContainedIn` ys = any (xs `isPrefixOf`) (tails ys)
askCc :: DynFlags -> [Option] -> IO String
askCc dflags args = do
let (p,args0) = pgm_c dflags
args1 = map Option (getOpts dflags opt_c)
args2 = args0 ++ args1 ++ args
mb_env <- getGccEnv args2
runSomethingWith dflags "gcc" p args2 $ \real_args ->
readCreateProcessWithExitCode' (proc p real_args){ env = mb_env }
-- Similar to System.Process.readCreateProcessWithExitCode, but stderr is
-- inherited from the parent process, and output to stderr is not captured.
readCreateProcessWithExitCode'
:: CreateProcess
-> IO (ExitCode, String) -- ^ stdout
readCreateProcessWithExitCode' proc = do
(_, Just outh, _, pid) <-
createProcess proc{ std_out = CreatePipe }
-- fork off a thread to start consuming the output
output <- hGetContents outh
outMVar <- newEmptyMVar
_ <- forkIO $ evaluate (length output) >> putMVar outMVar ()
-- wait on the output
takeMVar outMVar
hClose outh
-- wait on the process
ex <- waitForProcess pid
return (ex, output)
readProcessEnvWithExitCode
:: String -- ^ program path
-> [String] -- ^ program args
-> [(String, String)] -- ^ environment to override
-> IO (ExitCode, String, String) -- ^ (exit_code, stdout, stderr)
readProcessEnvWithExitCode prog args env_update = do
current_env <- getEnvironment
let new_env = env_update ++ [ (k, v)
| let overriden_keys = map fst env_update
, (k, v) <- current_env
, k `notElem` overriden_keys
]
p = proc prog args
(_stdin, Just stdoh, Just stdeh, pid) <-
createProcess p{ std_out = CreatePipe
, std_err = CreatePipe
, env = Just new_env
}
outMVar <- newEmptyMVar
errMVar <- newEmptyMVar
_ <- forkIO $ do
stdo <- hGetContents stdoh
_ <- evaluate (length stdo)
putMVar outMVar stdo
_ <- forkIO $ do
stde <- hGetContents stdeh
_ <- evaluate (length stde)
putMVar errMVar stde
out <- takeMVar outMVar
hClose stdoh
err <- takeMVar errMVar
hClose stdeh
ex <- waitForProcess pid
return (ex, out, err)
-- Don't let gcc localize version info string, #8825
en_locale_env :: [(String, String)]
en_locale_env = [("LANGUAGE", "en")]
-- If the -B<dir> option is set, add <dir> to PATH. This works around
-- a bug in gcc on Windows Vista where it can't find its auxiliary
-- binaries (see bug #1110).
getGccEnv :: [Option] -> IO (Maybe [(String,String)])
getGccEnv opts =
if null b_dirs
then return Nothing
else do env <- getEnvironment
return (Just (map mangle_path env))
where
(b_dirs, _) = partitionWith get_b_opt opts
get_b_opt (Option ('-':'B':dir)) = Left dir
get_b_opt other = Right other
mangle_path (path,paths) | map toUpper path == "PATH"
= (path, '\"' : head b_dirs ++ "\";" ++ paths)
mangle_path other = other
runSplit :: DynFlags -> [Option] -> IO ()
runSplit dflags args = do
let (p,args0) = pgm_s dflags
runSomething dflags "Splitter" p (args0++args)
runAs :: DynFlags -> [Option] -> IO ()
runAs dflags args = do
let (p,args0) = pgm_a dflags
args1 = map Option (getOpts dflags opt_a)
args2 = args0 ++ args1 ++ args
mb_env <- getGccEnv args2
runSomethingFiltered dflags id "Assembler" p args2 mb_env
-- | Run the LLVM Optimiser
runLlvmOpt :: DynFlags -> [Option] -> IO ()
runLlvmOpt dflags args = do
let (p,args0) = pgm_lo dflags
args1 = map Option (getOpts dflags opt_lo)
runSomething dflags "LLVM Optimiser" p (args0 ++ args1 ++ args)
-- | Run the LLVM Compiler
runLlvmLlc :: DynFlags -> [Option] -> IO ()
runLlvmLlc dflags args = do
let (p,args0) = pgm_lc dflags
args1 = map Option (getOpts dflags opt_lc)
runSomething dflags "LLVM Compiler" p (args0 ++ args1 ++ args)
-- | Run the clang compiler (used as an assembler for the LLVM
-- backend on OS X as LLVM doesn't support the OS X system
-- assembler)
runClang :: DynFlags -> [Option] -> IO ()
runClang dflags args = do
-- we simply assume its available on the PATH
let clang = "clang"
-- be careful what options we call clang with
-- see #5903 and #7617 for bugs caused by this.
(_,args0) = pgm_a dflags
args1 = map Option (getOpts dflags opt_a)
args2 = args0 ++ args1 ++ args
mb_env <- getGccEnv args2
Exception.catch (do
runSomethingFiltered dflags id "Clang (Assembler)" clang args2 mb_env
)
(\(err :: SomeException) -> do
errorMsg dflags $
text ("Error running clang! you need clang installed to use the" ++
" LLVM backend") $+$
text "(or GHC tried to execute clang incorrectly)"
throwIO err
)
-- | Figure out which version of LLVM we are running this session
figureLlvmVersion :: DynFlags -> IO (Maybe (Int, Int))
figureLlvmVersion dflags = do
let (pgm,opts) = pgm_lc dflags
args = filter notNull (map showOpt opts)
-- we grab the args even though they should be useless just in
-- case the user is using a customised 'llc' that requires some
-- of the options they've specified. llc doesn't care what other
-- options are specified when '-version' is used.
args' = args ++ ["-version"]
ver <- catchIO (do
(pin, pout, perr, _) <- runInteractiveProcess pgm args'
Nothing Nothing
{- > llc -version
LLVM (http://llvm.org/):
LLVM version 3.5.2
...
-}
hSetBinaryMode pout False
_ <- hGetLine pout
vline <- dropWhile (not . isDigit) `fmap` hGetLine pout
v <- case span (/= '.') vline of
("",_) -> fail "no digits!"
(x,y) -> return (read x
, read $ takeWhile isDigit $ drop 1 y)
hClose pin
hClose pout
hClose perr
return $ Just v
)
(\err -> do
debugTraceMsg dflags 2
(text "Error (figuring out LLVM version):" <+>
text (show err))
errorMsg dflags $ vcat
[ text "Warning:", nest 9 $
text "Couldn't figure out LLVM version!" $$
text "Make sure you have installed LLVM"]
return Nothing)
return ver
{- Note [Windows stack usage]
See: Trac #8870 (and #8834 for related info)
On Windows, occasionally we need to grow the stack. In order to do
this, we would normally just bump the stack pointer - but there's a
catch on Windows.
If the stack pointer is bumped by more than a single page, then the
pages between the initial pointer and the resulting location must be
properly committed by the Windows virtual memory subsystem. This is
only needed in the event we bump by more than one page (i.e 4097 bytes
or more).
Windows compilers solve this by emitting a call to a special function
called _chkstk, which does this committing of the pages for you.
The reason this was causing a segfault was because due to the fact the
new code generator tends to generate larger functions, we needed more
stack space in GHC itself. In the x86 codegen, we needed approximately
~12kb of stack space in one go, which caused the process to segfault,
as the intervening pages were not committed.
In the future, we should do the same thing, to make the problem
completely go away. In the mean time, we're using a workaround: we
instruct the linker to specify the generated PE as having an initial
reserved stack size of 8mb, as well as a initial *committed* stack
size of 8mb. The default committed size was previously only 4k.
Theoretically it's possible to still hit this problem if you request a
stack bump of more than 8mb in one go. But the amount of code
necessary is quite large, and 8mb "should be more than enough for
anyone" right now (he said, before millions of lines of code cried out
in terror).
-}
{- Note [Run-time linker info]
See also: Trac #5240, Trac #6063, Trac #10110
Before 'runLink', we need to be sure to get the relevant information
about the linker we're using at runtime to see if we need any extra
options. For example, GNU ld requires '--reduce-memory-overheads' and
'--hash-size=31' in order to use reasonable amounts of memory (see
trac #5240.) But this isn't supported in GNU gold.
Generally, the linker changing from what was detected at ./configure
time has always been possible using -pgml, but on Linux it can happen
'transparently' by installing packages like binutils-gold, which
change what /usr/bin/ld actually points to.
Clang vs GCC notes:
For gcc, 'gcc -Wl,--version' gives a bunch of output about how to
invoke the linker before the version information string. For 'clang',
the version information for 'ld' is all that's output. For this
reason, we typically need to slurp up all of the standard error output
and look through it.
Other notes:
We cache the LinkerInfo inside DynFlags, since clients may link
multiple times. The definition of LinkerInfo is there to avoid a
circular dependency.
-}
{- Note [ELF needed shared libs]
Some distributions change the link editor's default handling of
ELF DT_NEEDED tags to include only those shared objects that are
needed to resolve undefined symbols. For Template Haskell we need
the last temporary shared library also if it is not needed for the
currently linked temporary shared library. We specify --no-as-needed
to override the default. This flag exists in GNU ld and GNU gold.
The flag is only needed on ELF systems. On Windows (PE) and Mac OS X
(Mach-O) the flag is not needed.
-}
{- Note [Windows static libGCC]
The GCC versions being upgraded to in #10726 are configured with
dynamic linking of libgcc supported. This results in libgcc being
linked dynamically when a shared library is created.
This introduces thus an extra dependency on GCC dll that was not
needed before by shared libraries created with GHC. This is a particular
issue on Windows because you get a non-obvious error due to this missing
dependency. This dependent dll is also not commonly on your path.
For this reason using the static libgcc is preferred as it preserves
the same behaviour that existed before. There are however some very good
reasons to have the shared version as well as described on page 181 of
https://gcc.gnu.org/onlinedocs/gcc-5.2.0/gcc.pdf :
"There are several situations in which an application should use the
shared ‘libgcc’ instead of the static version. The most common of these
is when the application wishes to throw and catch exceptions across different
shared libraries. In that case, each of the libraries as well as the application
itself should use the shared ‘libgcc’. "
-}
neededLinkArgs :: LinkerInfo -> [Option]
neededLinkArgs (GnuLD o) = o
neededLinkArgs (GnuGold o) = o
neededLinkArgs (DarwinLD o) = o
neededLinkArgs (SolarisLD o) = o
neededLinkArgs UnknownLD = []
-- Grab linker info and cache it in DynFlags.
getLinkerInfo :: DynFlags -> IO LinkerInfo
getLinkerInfo dflags = do
info <- readIORef (rtldInfo dflags)
case info of
Just v -> return v
Nothing -> do
v <- getLinkerInfo' dflags
writeIORef (rtldInfo dflags) (Just v)
return v
-- See Note [Run-time linker info].
getLinkerInfo' :: DynFlags -> IO LinkerInfo
getLinkerInfo' dflags = do
let platform = targetPlatform dflags
os = platformOS platform
(pgm,args0) = pgm_l dflags
args1 = map Option (getOpts dflags opt_l)
args2 = args0 ++ args1
args3 = filter notNull (map showOpt args2)
-- Try to grab the info from the process output.
parseLinkerInfo stdo _stde _exitc
| any ("GNU ld" `isPrefixOf`) stdo =
-- GNU ld specifically needs to use less memory. This especially
-- hurts on small object files. Trac #5240.
-- Set DT_NEEDED for all shared libraries. Trac #10110.
return (GnuLD $ map Option ["-Wl,--hash-size=31",
"-Wl,--reduce-memory-overheads",
-- ELF specific flag
-- see Note [ELF needed shared libs]
"-Wl,--no-as-needed"])
| any ("GNU gold" `isPrefixOf`) stdo =
-- GNU gold only needs --no-as-needed. Trac #10110.
-- ELF specific flag, see Note [ELF needed shared libs]
return (GnuGold [Option "-Wl,--no-as-needed"])
-- Unknown linker.
| otherwise = fail "invalid --version output, or linker is unsupported"
-- Process the executable call
info <- catchIO (do
case os of
OSSolaris2 ->
-- Solaris uses its own Solaris linker. Even all
-- GNU C are recommended to configure with Solaris
-- linker instead of using GNU binutils linker. Also
-- all GCC distributed with Solaris follows this rule
-- precisely so we assume here, the Solaris linker is
-- used.
return $ SolarisLD []
OSDarwin ->
-- Darwin has neither GNU Gold or GNU LD, but a strange linker
-- that doesn't support --version. We can just assume that's
-- what we're using.
return $ DarwinLD []
OSiOS ->
-- Ditto for iOS
return $ DarwinLD []
OSMinGW32 ->
-- GHC doesn't support anything but GNU ld on Windows anyway.
-- Process creation is also fairly expensive on win32, so
-- we short-circuit here.
return $ GnuLD $ map Option
[ -- Reduce ld memory usage
"-Wl,--hash-size=31"
, "-Wl,--reduce-memory-overheads"
-- Increase default stack, see
-- Note [Windows stack usage]
-- Force static linking of libGCC
-- Note [Windows static libGCC]
, "-Xlinker", "--stack=0x800000,0x800000", "-static-libgcc" ]
_ -> do
-- In practice, we use the compiler as the linker here. Pass
-- -Wl,--version to get linker version info.
(exitc, stdo, stde) <- readProcessEnvWithExitCode pgm
(["-Wl,--version"] ++ args3)
en_locale_env
-- Split the output by lines to make certain kinds
-- of processing easier. In particular, 'clang' and 'gcc'
-- have slightly different outputs for '-Wl,--version', but
-- it's still easy to figure out.
parseLinkerInfo (lines stdo) (lines stde) exitc
)
(\err -> do
debugTraceMsg dflags 2
(text "Error (figuring out linker information):" <+>
text (show err))
errorMsg dflags $ hang (text "Warning:") 9 $
text "Couldn't figure out linker information!" $$
text "Make sure you're using GNU ld, GNU gold" <+>
text "or the built in OS X linker, etc."
return UnknownLD)
return info
-- Grab compiler info and cache it in DynFlags.
getCompilerInfo :: DynFlags -> IO CompilerInfo
getCompilerInfo dflags = do
info <- readIORef (rtccInfo dflags)
case info of
Just v -> return v
Nothing -> do
v <- getCompilerInfo' dflags
writeIORef (rtccInfo dflags) (Just v)
return v
-- See Note [Run-time linker info].
getCompilerInfo' :: DynFlags -> IO CompilerInfo
getCompilerInfo' dflags = do
let (pgm,_) = pgm_c dflags
-- Try to grab the info from the process output.
parseCompilerInfo _stdo stde _exitc
-- Regular GCC
| any ("gcc version" `isInfixOf`) stde =
return GCC
-- Regular clang
| any ("clang version" `isInfixOf`) stde =
return Clang
-- XCode 5.1 clang
| any ("Apple LLVM version 5.1" `isPrefixOf`) stde =
return AppleClang51
-- XCode 5 clang
| any ("Apple LLVM version" `isPrefixOf`) stde =
return AppleClang
-- XCode 4.1 clang
| any ("Apple clang version" `isPrefixOf`) stde =
return AppleClang
-- Unknown linker.
| otherwise = fail "invalid -v output, or compiler is unsupported"
-- Process the executable call
info <- catchIO (do
(exitc, stdo, stde) <-
readProcessEnvWithExitCode pgm ["-v"] en_locale_env
-- Split the output by lines to make certain kinds
-- of processing easier.
parseCompilerInfo (lines stdo) (lines stde) exitc
)
(\err -> do
debugTraceMsg dflags 2
(text "Error (figuring out C compiler information):" <+>
text (show err))
errorMsg dflags $ hang (text "Warning:") 9 $
text "Couldn't figure out C compiler information!" $$
text "Make sure you're using GNU gcc, or clang"
return UnknownCC)
return info
runLink :: DynFlags -> [Option] -> IO ()
runLink dflags args = do
-- See Note [Run-time linker info]
linkargs <- neededLinkArgs `fmap` getLinkerInfo dflags
let (p,args0) = pgm_l dflags
args1 = map Option (getOpts dflags opt_l)
args2 = args0 ++ linkargs ++ args1 ++ args
mb_env <- getGccEnv args2
runSomethingResponseFile dflags ld_filter "Linker" p args2 mb_env
where
ld_filter = case (platformOS (targetPlatform dflags)) of
OSSolaris2 -> sunos_ld_filter
_ -> id
{-
SunOS/Solaris ld emits harmless warning messages about unresolved
symbols in case of compiling into shared library when we do not
link against all the required libs. That is the case of GHC which
does not link against RTS library explicitly in order to be able to
choose the library later based on binary application linking
parameters. The warnings look like:
Undefined first referenced
symbol in file
stg_ap_n_fast ./T2386_Lib.o
stg_upd_frame_info ./T2386_Lib.o
templatezmhaskell_LanguageziHaskellziTHziLib_litE_closure ./T2386_Lib.o
templatezmhaskell_LanguageziHaskellziTHziLib_appE_closure ./T2386_Lib.o
templatezmhaskell_LanguageziHaskellziTHziLib_conE_closure ./T2386_Lib.o
templatezmhaskell_LanguageziHaskellziTHziSyntax_mkNameGzud_closure ./T2386_Lib.o
newCAF ./T2386_Lib.o
stg_bh_upd_frame_info ./T2386_Lib.o
stg_ap_ppp_fast ./T2386_Lib.o
templatezmhaskell_LanguageziHaskellziTHziLib_stringL_closure ./T2386_Lib.o
stg_ap_p_fast ./T2386_Lib.o
stg_ap_pp_fast ./T2386_Lib.o
ld: warning: symbol referencing errors
this is actually coming from T2386 testcase. The emitting of those
warnings is also a reason why so many TH testcases fail on Solaris.
Following filter code is SunOS/Solaris linker specific and should
filter out only linker warnings. Please note that the logic is a
little bit more complex due to the simple reason that we need to preserve
any other linker emitted messages. If there are any. Simply speaking
if we see "Undefined" and later "ld: warning:..." then we omit all
text between (including) the marks. Otherwise we copy the whole output.
-}
sunos_ld_filter :: String -> String
sunos_ld_filter = unlines . sunos_ld_filter' . lines
sunos_ld_filter' x = if (undefined_found x && ld_warning_found x)
then (ld_prefix x) ++ (ld_postfix x)
else x
breakStartsWith x y = break (isPrefixOf x) y
ld_prefix = fst . breakStartsWith "Undefined"
undefined_found = not . null . snd . breakStartsWith "Undefined"
ld_warn_break = breakStartsWith "ld: warning: symbol referencing errors"
ld_postfix = tail . snd . ld_warn_break
ld_warning_found = not . null . snd . ld_warn_break
runLibtool :: DynFlags -> [Option] -> IO ()
runLibtool dflags args = do
linkargs <- neededLinkArgs `fmap` getLinkerInfo dflags
let args1 = map Option (getOpts dflags opt_l)
args2 = [Option "-static"] ++ args1 ++ args ++ linkargs
libtool = pgm_libtool dflags
mb_env <- getGccEnv args2
runSomethingFiltered dflags id "Linker" libtool args2 mb_env
runMkDLL :: DynFlags -> [Option] -> IO ()
runMkDLL dflags args = do
let (p,args0) = pgm_dll dflags
args1 = args0 ++ args
mb_env <- getGccEnv (args0++args)
runSomethingFiltered dflags id "Make DLL" p args1 mb_env
runWindres :: DynFlags -> [Option] -> IO ()
runWindres dflags args = do
let (gcc, gcc_args) = pgm_c dflags
windres = pgm_windres dflags
opts = map Option (getOpts dflags opt_windres)
quote x = "\"" ++ x ++ "\""
args' = -- If windres.exe and gcc.exe are in a directory containing
-- spaces then windres fails to run gcc. We therefore need
-- to tell it what command to use...
Option ("--preprocessor=" ++
unwords (map quote (gcc :
map showOpt gcc_args ++
map showOpt opts ++
["-E", "-xc", "-DRC_INVOKED"])))
-- ...but if we do that then if windres calls popen then
-- it can't understand the quoting, so we have to use
-- --use-temp-file so that it interprets it correctly.
-- See #1828.
: Option "--use-temp-file"
: args
mb_env <- getGccEnv gcc_args
runSomethingFiltered dflags id "Windres" windres args' mb_env
touch :: DynFlags -> String -> String -> IO ()
touch dflags purpose arg =
runSomething dflags purpose (pgm_T dflags) [FileOption "" arg]
copy :: DynFlags -> String -> FilePath -> FilePath -> IO ()
copy dflags purpose from to = copyWithHeader dflags purpose Nothing from to
copyWithHeader :: DynFlags -> String -> Maybe String -> FilePath -> FilePath
-> IO ()
copyWithHeader dflags purpose maybe_header from to = do
showPass dflags purpose
hout <- openBinaryFile to WriteMode
hin <- openBinaryFile from ReadMode
ls <- hGetContents hin -- inefficient, but it'll do for now. ToDo: speed up
maybe (return ()) (header hout) maybe_header
hPutStr hout ls
hClose hout
hClose hin
where
-- write the header string in UTF-8. The header is something like
-- {-# LINE "foo.hs" #-}
-- and we want to make sure a Unicode filename isn't mangled.
header h str = do
hSetEncoding h utf8
hPutStr h str
hSetBinaryMode h True
-- | read the contents of the named section in an ELF object as a
-- String.
readElfSection :: DynFlags -> String -> FilePath -> IO (Maybe String)
readElfSection _dflags section exe = do
let
prog = "readelf"
args = [Option "-p", Option section, FileOption "" exe]
--
r <- readProcessEnvWithExitCode prog (filter notNull (map showOpt args))
en_locale_env
case r of
(ExitSuccess, out, _err) -> return (doFilter (lines out))
_ -> return Nothing
where
doFilter [] = Nothing
doFilter (s:r) = case readP_to_S parse s of
[(p,"")] -> Just p
_r -> doFilter r
where parse = do
skipSpaces
_ <- R.char '['
skipSpaces
_ <- string "0]"
skipSpaces
munch (const True)
{-
************************************************************************
* *
\subsection{Managing temporary files
* *
************************************************************************
-}
cleanTempDirs :: DynFlags -> IO ()
cleanTempDirs dflags
= unless (gopt Opt_KeepTmpFiles dflags)
$ mask_
$ do let ref = dirsToClean dflags
ds <- atomicModifyIORef' ref $ \ds -> (Map.empty, ds)
removeTmpDirs dflags (Map.elems ds)
cleanTempFiles :: DynFlags -> IO ()
cleanTempFiles dflags
= unless (gopt Opt_KeepTmpFiles dflags)
$ mask_
$ do let ref = filesToClean dflags
fs <- atomicModifyIORef' ref $ \fs -> ([],fs)
removeTmpFiles dflags fs
cleanTempFilesExcept :: DynFlags -> [FilePath] -> IO ()
cleanTempFilesExcept dflags dont_delete
= unless (gopt Opt_KeepTmpFiles dflags)
$ mask_
$ do let ref = filesToClean dflags
to_delete <- atomicModifyIORef' ref $ \files ->
let (to_keep,to_delete) = partition (`elem` dont_delete) files
in (to_keep,to_delete)
removeTmpFiles dflags to_delete
-- Return a unique numeric temp file suffix
newTempSuffix :: DynFlags -> IO Int
newTempSuffix dflags = atomicModifyIORef' (nextTempSuffix dflags) $ \n -> (n+1,n)
-- Find a temporary name that doesn't already exist.
newTempName :: DynFlags -> Suffix -> IO FilePath
newTempName dflags extn
= do d <- getTempDir dflags
findTempName (d </> "ghc_") -- See Note [Deterministic base name]
where
findTempName :: FilePath -> IO FilePath
findTempName prefix
= do n <- newTempSuffix dflags
let filename = prefix ++ show n <.> extn
b <- doesFileExist filename
if b then findTempName prefix
else do -- clean it up later
consIORef (filesToClean dflags) filename
return filename
newTempLibName :: DynFlags -> Suffix -> IO (FilePath, FilePath, String)
newTempLibName dflags extn
= do d <- getTempDir dflags
findTempName d ("ghc_")
where
findTempName :: FilePath -> String -> IO (FilePath, FilePath, String)
findTempName dir prefix
= do n <- newTempSuffix dflags -- See Note [Deterministic base name]
let libname = prefix ++ show n
filename = dir </> "lib" ++ libname <.> extn
b <- doesFileExist filename
if b then findTempName dir prefix
else do -- clean it up later
consIORef (filesToClean dflags) filename
return (filename, dir, libname)
-- Return our temporary directory within tmp_dir, creating one if we
-- don't have one yet.
getTempDir :: DynFlags -> IO FilePath
getTempDir dflags = do
mapping <- readIORef dir_ref
case Map.lookup tmp_dir mapping of
Nothing -> do
pid <- getProcessID
let prefix = tmp_dir </> "ghc" ++ show pid ++ "_"
mask_ $ mkTempDir prefix
Just dir -> return dir
where
tmp_dir = tmpDir dflags
dir_ref = dirsToClean dflags
mkTempDir :: FilePath -> IO FilePath
mkTempDir prefix = do
n <- newTempSuffix dflags
let our_dir = prefix ++ show n
-- 1. Speculatively create our new directory.
createDirectory our_dir
-- 2. Update the dirsToClean mapping unless an entry already exists
-- (i.e. unless another thread beat us to it).
their_dir <- atomicModifyIORef' dir_ref $ \mapping ->
case Map.lookup tmp_dir mapping of
Just dir -> (mapping, Just dir)
Nothing -> (Map.insert tmp_dir our_dir mapping, Nothing)
-- 3. If there was an existing entry, return it and delete the
-- directory we created. Otherwise return the directory we created.
case their_dir of
Nothing -> do
debugTraceMsg dflags 2 $
text "Created temporary directory:" <+> text our_dir
return our_dir
Just dir -> do
removeDirectory our_dir
return dir
`catchIO` \e -> if isAlreadyExistsError e
then mkTempDir prefix else ioError e
-- Note [Deterministic base name]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
--
-- The filename of temporary files, especially the basename of C files, can end
-- up in the output in some form, e.g. as part of linker debug information. In the
-- interest of bit-wise exactly reproducible compilation (#4012), the basename of
-- the temporary file no longer contains random information (it used to contain
-- the process id).
--
-- This is ok, as the temporary directory used contains the pid (see getTempDir).
addFilesToClean :: DynFlags -> [FilePath] -> IO ()
-- May include wildcards [used by DriverPipeline.run_phase SplitMangle]
addFilesToClean dflags new_files
= atomicModifyIORef' (filesToClean dflags) $ \files -> (new_files++files, ())
removeTmpDirs :: DynFlags -> [FilePath] -> IO ()
removeTmpDirs dflags ds
= traceCmd dflags "Deleting temp dirs"
("Deleting: " ++ unwords ds)
(mapM_ (removeWith dflags removeDirectory) ds)
removeTmpFiles :: DynFlags -> [FilePath] -> IO ()
removeTmpFiles dflags fs
= warnNon $
traceCmd dflags "Deleting temp files"
("Deleting: " ++ unwords deletees)
(mapM_ (removeWith dflags removeFile) deletees)
where
-- Flat out refuse to delete files that are likely to be source input
-- files (is there a worse bug than having a compiler delete your source
-- files?)
--
-- Deleting source files is a sign of a bug elsewhere, so prominently flag
-- the condition.
warnNon act
| null non_deletees = act
| otherwise = do
putMsg dflags (text "WARNING - NOT deleting source files:" <+> hsep (map text non_deletees))
act
(non_deletees, deletees) = partition isHaskellUserSrcFilename fs
removeWith :: DynFlags -> (FilePath -> IO ()) -> FilePath -> IO ()
removeWith dflags remover f = remover f `catchIO`
(\e ->
let msg = if isDoesNotExistError e
then ptext (sLit "Warning: deleting non-existent") <+> text f
else ptext (sLit "Warning: exception raised when deleting")
<+> text f <> colon
$$ text (show e)
in debugTraceMsg dflags 2 msg
)
-----------------------------------------------------------------------------
-- Running an external program
runSomething :: DynFlags
-> String -- For -v message
-> String -- Command name (possibly a full path)
-- assumed already dos-ified
-> [Option] -- Arguments
-- runSomething will dos-ify them
-> IO ()
runSomething dflags phase_name pgm args =
runSomethingFiltered dflags id phase_name pgm args Nothing
-- | Run a command, placing the arguments in an external response file.
--
-- This command is used in order to avoid overlong command line arguments on
-- Windows. The command line arguments are first written to an external,
-- temporary response file, and then passed to the linker via @filepath.
-- response files for passing them in. See:
--
-- https://gcc.gnu.org/wiki/Response_Files
-- https://ghc.haskell.org/trac/ghc/ticket/10777
runSomethingResponseFile
:: DynFlags -> (String->String) -> String -> String -> [Option]
-> Maybe [(String,String)] -> IO ()
runSomethingResponseFile dflags filter_fn phase_name pgm args mb_env =
runSomethingWith dflags phase_name pgm args $ \real_args -> do
fp <- getResponseFile real_args
let args = ['@':fp]
r <- builderMainLoop dflags filter_fn pgm args mb_env
return (r,())
where
getResponseFile args = do
fp <- newTempName dflags "rsp"
withFile fp WriteMode $ \h -> do
hSetEncoding h utf8
hPutStr h $ unlines $ map escape args
return fp
-- Note: Response files have backslash-escaping, double quoting, and are
-- whitespace separated (some implementations use newline, others any
-- whitespace character). Therefore, escape any backslashes, newlines, and
-- double quotes in the argument, and surround the content with double
-- quotes.
--
-- Another possibility that could be considered would be to convert
-- backslashes in the argument to forward slashes. This would generally do
-- the right thing, since backslashes in general only appear in arguments
-- as part of file paths on Windows, and the forward slash is accepted for
-- those. However, escaping is more reliable, in case somehow a backslash
-- appears in a non-file.
escape x = concat
[ "\""
, concatMap
(\c ->
case c of
'\\' -> "\\\\"
'\n' -> "\\n"
'\"' -> "\\\""
_ -> [c])
x
, "\""
]
runSomethingFiltered
:: DynFlags -> (String->String) -> String -> String -> [Option]
-> Maybe [(String,String)] -> IO ()
runSomethingFiltered dflags filter_fn phase_name pgm args mb_env = do
runSomethingWith dflags phase_name pgm args $ \real_args -> do
r <- builderMainLoop dflags filter_fn pgm real_args mb_env
return (r,())
runSomethingWith
:: DynFlags -> String -> String -> [Option]
-> ([String] -> IO (ExitCode, a))
-> IO a
runSomethingWith dflags phase_name pgm args io = do
let real_args = filter notNull (map showOpt args)
cmdLine = showCommandForUser pgm real_args
traceCmd dflags phase_name cmdLine $ handleProc pgm phase_name $ io real_args
handleProc :: String -> String -> IO (ExitCode, r) -> IO r
handleProc pgm phase_name proc = do
(rc, r) <- proc `catchIO` handler
case rc of
ExitSuccess{} -> return r
ExitFailure n -> throwGhcExceptionIO (
ProgramError ("`" ++ takeBaseName pgm ++ "'" ++
" failed in phase `" ++ phase_name ++ "'." ++
" (Exit code: " ++ show n ++ ")"))
where
handler err =
if IO.isDoesNotExistError err
then does_not_exist
else throwGhcExceptionIO (ProgramError $ show err)
does_not_exist = throwGhcExceptionIO (InstallationError ("could not execute: " ++ pgm))
builderMainLoop :: DynFlags -> (String -> String) -> FilePath
-> [String] -> Maybe [(String, String)]
-> IO ExitCode
builderMainLoop dflags filter_fn pgm real_args mb_env = do
chan <- newChan
(hStdIn, hStdOut, hStdErr, hProcess) <- runInteractiveProcess pgm real_args Nothing mb_env
-- and run a loop piping the output from the compiler to the log_action in DynFlags
hSetBuffering hStdOut LineBuffering
hSetBuffering hStdErr LineBuffering
_ <- forkIO (readerProc chan hStdOut filter_fn)
_ <- forkIO (readerProc chan hStdErr filter_fn)
-- we don't want to finish until 2 streams have been completed
-- (stdout and stderr)
-- nor until 1 exit code has been retrieved.
rc <- loop chan hProcess (2::Integer) (1::Integer) ExitSuccess
-- after that, we're done here.
hClose hStdIn
hClose hStdOut
hClose hStdErr
return rc
where
-- status starts at zero, and increments each time either
-- a reader process gets EOF, or the build proc exits. We wait
-- for all of these to happen (status==3).
-- ToDo: we should really have a contingency plan in case any of
-- the threads dies, such as a timeout.
loop _ _ 0 0 exitcode = return exitcode
loop chan hProcess t p exitcode = do
mb_code <- if p > 0
then getProcessExitCode hProcess
else return Nothing
case mb_code of
Just code -> loop chan hProcess t (p-1) code
Nothing
| t > 0 -> do
msg <- readChan chan
case msg of
BuildMsg msg -> do
log_action dflags dflags SevInfo noSrcSpan defaultUserStyle msg
loop chan hProcess t p exitcode
BuildError loc msg -> do
log_action dflags dflags SevError (mkSrcSpan loc loc) defaultUserStyle msg
loop chan hProcess t p exitcode
EOF ->
loop chan hProcess (t-1) p exitcode
| otherwise -> loop chan hProcess t p exitcode
readerProc :: Chan BuildMessage -> Handle -> (String -> String) -> IO ()
readerProc chan hdl filter_fn =
(do str <- hGetContents hdl
loop (linesPlatform (filter_fn str)) Nothing)
`finally`
writeChan chan EOF
-- ToDo: check errors more carefully
-- ToDo: in the future, the filter should be implemented as
-- a stream transformer.
where
loop [] Nothing = return ()
loop [] (Just err) = writeChan chan err
loop (l:ls) in_err =
case in_err of
Just err@(BuildError srcLoc msg)
| leading_whitespace l -> do
loop ls (Just (BuildError srcLoc (msg $$ text l)))
| otherwise -> do
writeChan chan err
checkError l ls
Nothing -> do
checkError l ls
_ -> panic "readerProc/loop"
checkError l ls
= case parseError l of
Nothing -> do
writeChan chan (BuildMsg (text l))
loop ls Nothing
Just (file, lineNum, colNum, msg) -> do
let srcLoc = mkSrcLoc (mkFastString file) lineNum colNum
loop ls (Just (BuildError srcLoc (text msg)))
leading_whitespace [] = False
leading_whitespace (x:_) = isSpace x
parseError :: String -> Maybe (String, Int, Int, String)
parseError s0 = case breakColon s0 of
Just (filename, s1) ->
case breakIntColon s1 of
Just (lineNum, s2) ->
case breakIntColon s2 of
Just (columnNum, s3) ->
Just (filename, lineNum, columnNum, s3)
Nothing ->
Just (filename, lineNum, 0, s2)
Nothing -> Nothing
Nothing -> Nothing
breakColon :: String -> Maybe (String, String)
breakColon xs = case break (':' ==) xs of
(ys, _:zs) -> Just (ys, zs)
_ -> Nothing
breakIntColon :: String -> Maybe (Int, String)
breakIntColon xs = case break (':' ==) xs of
(ys, _:zs)
| not (null ys) && all isAscii ys && all isDigit ys ->
Just (read ys, zs)
_ -> Nothing
data BuildMessage
= BuildMsg !SDoc
| BuildError !SrcLoc !SDoc
| EOF
traceCmd :: DynFlags -> String -> String -> IO a -> IO a
-- trace the command (at two levels of verbosity)
traceCmd dflags phase_name cmd_line action
= do { let verb = verbosity dflags
; showPass dflags phase_name
; debugTraceMsg dflags 3 (text cmd_line)
; case flushErr dflags of
FlushErr io -> io
-- And run it!
; action `catchIO` handle_exn verb
}
where
handle_exn _verb exn = do { debugTraceMsg dflags 2 (char '\n')
; debugTraceMsg dflags 2 (ptext (sLit "Failed:") <+> text cmd_line <+> text (show exn))
; throwGhcExceptionIO (ProgramError (show exn))}
{-
************************************************************************
* *
\subsection{Support code}
* *
************************************************************************
-}
-----------------------------------------------------------------------------
-- Define getBaseDir :: IO (Maybe String)
getBaseDir :: IO (Maybe String)
#if defined(mingw32_HOST_OS)
-- Assuming we are running ghc, accessed by path $(stuff)/bin/ghc.exe,
-- return the path $(stuff)/lib.
getBaseDir = try_size 2048 -- plenty, PATH_MAX is 512 under Win32.
where
try_size size = allocaArray (fromIntegral size) $ \buf -> do
ret <- c_GetModuleFileName nullPtr buf size
case ret of
0 -> return Nothing
_ | ret < size -> fmap (Just . rootDir) $ peekCWString buf
| otherwise -> try_size (size * 2)
rootDir s = case splitFileName $ normalise s of
(d, ghc_exe)
| lower ghc_exe `elem` ["ghc.exe",
"ghc-stage1.exe",
"ghc-stage2.exe",
"ghc-stage3.exe"] ->
case splitFileName $ takeDirectory d of
-- ghc is in $topdir/bin/ghc.exe
(d', bin) | lower bin == "bin" -> takeDirectory d' </> "lib"
_ -> fail
_ -> fail
where fail = panic ("can't decompose ghc.exe path: " ++ show s)
lower = map toLower
foreign import WINDOWS_CCONV unsafe "windows.h GetModuleFileNameW"
c_GetModuleFileName :: Ptr () -> CWString -> Word32 -> IO Word32
#else
getBaseDir = return Nothing
#endif
#ifdef mingw32_HOST_OS
foreign import ccall unsafe "_getpid" getProcessID :: IO Int -- relies on Int == Int32 on Windows
#else
getProcessID :: IO Int
getProcessID = System.Posix.Internals.c_getpid >>= return . fromIntegral
#endif
-- Divvy up text stream into lines, taking platform dependent
-- line termination into account.
linesPlatform :: String -> [String]
#if !defined(mingw32_HOST_OS)
linesPlatform ls = lines ls
#else
linesPlatform "" = []
linesPlatform xs =
case lineBreak xs of
(as,xs1) -> as : linesPlatform xs1
where
lineBreak "" = ("","")
lineBreak ('\r':'\n':xs) = ([],xs)
lineBreak ('\n':xs) = ([],xs)
lineBreak (x:xs) = let (as,bs) = lineBreak xs in (x:as,bs)
#endif
linkDynLib :: DynFlags -> [String] -> [UnitId] -> IO ()
linkDynLib dflags0 o_files dep_packages
= do
let -- This is a rather ugly hack to fix dynamically linked
-- GHC on Windows. If GHC is linked with -threaded, then
-- it links against libHSrts_thr. But if base is linked
-- against libHSrts, then both end up getting loaded,
-- and things go wrong. We therefore link the libraries
-- with the same RTS flags that we link GHC with.
dflags1 = if cGhcThreaded then addWay' WayThreaded dflags0
else dflags0
dflags2 = if cGhcDebugged then addWay' WayDebug dflags1
else dflags1
dflags = updateWays dflags2
verbFlags = getVerbFlags dflags
o_file = outputFile dflags
pkgs <- getPreloadPackagesAnd dflags dep_packages
let pkg_lib_paths = collectLibraryPaths pkgs
let pkg_lib_path_opts = concatMap get_pkg_lib_path_opts pkg_lib_paths
get_pkg_lib_path_opts l
| ( osElfTarget (platformOS (targetPlatform dflags)) ||
osMachOTarget (platformOS (targetPlatform dflags)) ) &&
dynLibLoader dflags == SystemDependent &&
not (gopt Opt_Static dflags)
= ["-L" ++ l, "-Wl,-rpath", "-Wl," ++ l]
| otherwise = ["-L" ++ l]
let lib_paths = libraryPaths dflags
let lib_path_opts = map ("-L"++) lib_paths
-- We don't want to link our dynamic libs against the RTS package,
-- because the RTS lib comes in several flavours and we want to be
-- able to pick the flavour when a binary is linked.
-- On Windows we need to link the RTS import lib as Windows does
-- not allow undefined symbols.
-- The RTS library path is still added to the library search path
-- above in case the RTS is being explicitly linked in (see #3807).
let platform = targetPlatform dflags
os = platformOS platform
pkgs_no_rts = case os of
OSMinGW32 ->
pkgs
_ ->
filter ((/= rtsUnitId) . packageConfigId) pkgs
let pkg_link_opts = let (package_hs_libs, extra_libs, other_flags) = collectLinkOpts dflags pkgs_no_rts
in package_hs_libs ++ extra_libs ++ other_flags
-- probably _stub.o files
-- and last temporary shared object file
let extra_ld_inputs = ldInputs dflags
-- frameworks
pkg_framework_opts <- getPkgFrameworkOpts dflags platform
(map unitId pkgs)
let framework_opts = getFrameworkOpts dflags platform
case os of
OSMinGW32 -> do
-------------------------------------------------------------
-- Making a DLL
-------------------------------------------------------------
let output_fn = case o_file of
Just s -> s
Nothing -> "HSdll.dll"
runLink dflags (
map Option verbFlags
++ [ Option "-o"
, FileOption "" output_fn
, Option "-shared"
] ++
[ FileOption "-Wl,--out-implib=" (output_fn ++ ".a")
| gopt Opt_SharedImplib dflags
]
++ map (FileOption "") o_files
-- Permit the linker to auto link _symbol to _imp_symbol
-- This lets us link against DLLs without needing an "import library"
++ [Option "-Wl,--enable-auto-import"]
++ extra_ld_inputs
++ map Option (
lib_path_opts
++ pkg_lib_path_opts
++ pkg_link_opts
))
OSDarwin -> do
-------------------------------------------------------------------
-- Making a darwin dylib
-------------------------------------------------------------------
-- About the options used for Darwin:
-- -dynamiclib
-- Apple's way of saying -shared
-- -undefined dynamic_lookup:
-- Without these options, we'd have to specify the correct
-- dependencies for each of the dylibs. Note that we could
-- (and should) do without this for all libraries except
-- the RTS; all we need to do is to pass the correct
-- HSfoo_dyn.dylib files to the link command.
-- This feature requires Mac OS X 10.3 or later; there is
-- a similar feature, -flat_namespace -undefined suppress,
-- which works on earlier versions, but it has other
-- disadvantages.
-- -single_module
-- Build the dynamic library as a single "module", i.e. no
-- dynamic binding nonsense when referring to symbols from
-- within the library. The NCG assumes that this option is
-- specified (on i386, at least).
-- -install_name
-- Mac OS/X stores the path where a dynamic library is (to
-- be) installed in the library itself. It's called the
-- "install name" of the library. Then any library or
-- executable that links against it before it's installed
-- will search for it in its ultimate install location.
-- By default we set the install name to the absolute path
-- at build time, but it can be overridden by the
-- -dylib-install-name option passed to ghc. Cabal does
-- this.
-------------------------------------------------------------------
let output_fn = case o_file of { Just s -> s; Nothing -> "a.out"; }
instName <- case dylibInstallName dflags of
Just n -> return n
Nothing -> return $ "@rpath" `combine` (takeFileName output_fn)
runLink dflags (
map Option verbFlags
++ [ Option "-dynamiclib"
, Option "-o"
, FileOption "" output_fn
]
++ map Option o_files
++ [ Option "-undefined",
Option "dynamic_lookup",
Option "-single_module" ]
++ (if platformArch platform == ArchX86_64
then [ ]
else [ Option "-Wl,-read_only_relocs,suppress" ])
++ [ Option "-install_name", Option instName ]
++ map Option lib_path_opts
++ extra_ld_inputs
++ map Option framework_opts
++ map Option pkg_lib_path_opts
++ map Option pkg_link_opts
++ map Option pkg_framework_opts
)
OSiOS -> throwGhcExceptionIO (ProgramError "dynamic libraries are not supported on iOS target")
_ -> do
-------------------------------------------------------------------
-- Making a DSO
-------------------------------------------------------------------
let output_fn = case o_file of { Just s -> s; Nothing -> "a.out"; }
let bsymbolicFlag = -- we need symbolic linking to resolve
-- non-PIC intra-package-relocations
["-Wl,-Bsymbolic"]
runLink dflags (
map Option verbFlags
++ [ Option "-o"
, FileOption "" output_fn
]
++ map Option o_files
++ [ Option "-shared" ]
++ map Option bsymbolicFlag
-- Set the library soname. We use -h rather than -soname as
-- Solaris 10 doesn't support the latter:
++ [ Option ("-Wl,-h," ++ takeFileName output_fn) ]
++ extra_ld_inputs
++ map Option lib_path_opts
++ map Option pkg_lib_path_opts
++ map Option pkg_link_opts
)
getPkgFrameworkOpts :: DynFlags -> Platform -> [UnitId] -> IO [String]
getPkgFrameworkOpts dflags platform dep_packages
| platformUsesFrameworks platform = do
pkg_framework_path_opts <- do
pkg_framework_paths <- getPackageFrameworkPath dflags dep_packages
return $ map ("-F" ++) pkg_framework_paths
pkg_framework_opts <- do
pkg_frameworks <- getPackageFrameworks dflags dep_packages
return $ concat [ ["-framework", fw] | fw <- pkg_frameworks ]
return (pkg_framework_path_opts ++ pkg_framework_opts)
| otherwise = return []
getFrameworkOpts :: DynFlags -> Platform -> [String]
getFrameworkOpts dflags platform
| platformUsesFrameworks platform = framework_path_opts ++ framework_opts
| otherwise = []
where
framework_paths = frameworkPaths dflags
framework_path_opts = map ("-F" ++) framework_paths
frameworks = cmdlineFrameworks dflags
-- reverse because they're added in reverse order from the cmd line:
framework_opts = concat [ ["-framework", fw]
| fw <- reverse frameworks ]
| AlexanderPankiv/ghc | compiler/main/SysTools.hs | bsd-3-clause | 70,435 | 97 | 22 | 22,950 | 12,199 | 6,235 | 5,964 | 989 | 12 |
module Random where
import System.Random.MWC
import Control.Monad.Primitive
import Data.Array.Accelerate as A
import Data.Array.Accelerate.Array.Data as A
import Data.Array.Accelerate.Array.Sugar as Sugar
randomArrayIO :: (Shape sh, Elt e) => (sh -> GenIO -> IO e) -> sh -> IO (Array sh e)
randomArrayIO f sh =
withSystemRandom . asGenIO $ \gen -> do
seed <- save gen
return $! randomArrayOfWithSeed f seed sh
-- | Generate an array of random values using the supplied generator function.
-- The generator for variates is initialised with a fixed seed.
randomArrayOf :: (Shape sh, Elt e) => (sh -> GenIO -> IO e) -> sh -> Array sh e
randomArrayOf f sh
= let
n = Sugar.size sh
(adata, _) = runArrayData $ do
gen <- create
arr <- newArrayData n
let write ix = unsafeWriteArrayData arr (Sugar.toIndex sh ix)
. fromElt =<< f ix gen
iter sh write (>>) (return ())
return (arr, undefined)
in adata `seq` Array (fromElt sh) adata
-- | Generate an array of random values using a supplied generator function and
-- seed value.
randomArrayOfWithSeed :: (Shape sh, Elt e) => (sh -> GenIO -> IO e) -> Seed -> sh -> Array sh e
randomArrayOfWithSeed f seed sh
= let
n = Sugar.size sh
(adata, _) = runArrayData $ do
gen <- restore seed
arr <- newArrayData n
let write ix = unsafeWriteArrayData arr (Sugar.toIndex sh ix)
. fromElt =<< f ix gen
iter sh write (>>) (return ())
return (arr, undefined)
in adata `seq` Array (fromElt sh) adata
| aesadde/AccObsBenchmarks | Sort/haskell/src/Random.hs | bsd-3-clause | 1,926 | 0 | 20 | 752 | 551 | 282 | 269 | 35 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-| DRBD proc file parser
This module holds the definition of the parser that extracts status
information from the DRBD proc file.
-}
{-
Copyright (C) 2012 Google Inc.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301, USA.
-}
module Ganeti.Block.Drbd.Parser (drbdStatusParser, commaIntParser) where
import Control.Applicative ((<*>), (*>), (<*), (<$>), (<|>), pure)
import qualified Data.Attoparsec.Text as A
import qualified Data.Attoparsec.Combinator as AC
import Data.Attoparsec.Text (Parser)
import Data.List
import Data.Maybe
import Data.Text (Text, unpack)
import Ganeti.Block.Drbd.Types
-- | Our own space-skipping function, because A.skipSpace also skips
-- newline characters. It skips ZERO or more spaces, so it does not
-- fail if there are no spaces.
skipSpaces :: Parser ()
skipSpaces = A.skipWhile A.isHorizontalSpace
-- | Skips spaces and the given string, then executes a parser and
-- returns its result.
skipSpacesAndString :: Text -> Parser a -> Parser a
skipSpacesAndString s parser =
skipSpaces
*> A.string s
*> parser
-- | Predicate verifying (potentially bad) end of lines
isBadEndOfLine :: Char -> Bool
isBadEndOfLine c = (c == '\0') || A.isEndOfLine c
-- | Takes a parser and returns it with the content wrapped in a Maybe
-- object. The resulting parser never fails, but contains Nothing if
-- it couldn't properly parse the string.
optional :: Parser a -> Parser (Maybe a)
optional parser = (Just <$> parser) <|> pure Nothing
-- | The parser for a whole DRBD status file.
drbdStatusParser :: [DrbdInstMinor] -> Parser DRBDStatus
drbdStatusParser instMinor =
DRBDStatus <$> versionInfoParser
<*> deviceParser instMinor `AC.manyTill` A.endOfInput
<* A.endOfInput
-- | The parser for the version information lines.
versionInfoParser :: Parser VersionInfo
versionInfoParser = do
versionF <- optional versionP
apiF <- optional apiP
protoF <- optional protoP
srcVersionF <- optional srcVersion
ghF <- fmap unpack <$> optional gh
builderF <- fmap unpack <$> optional builder
if isNothing versionF
&& isNothing apiF
&& isNothing protoF
&& isNothing srcVersionF
&& isNothing ghF
&& isNothing builderF
then fail "versionInfo"
else pure $ VersionInfo versionF apiF protoF srcVersionF ghF builderF
where versionP =
A.string "version:"
*> skipSpaces
*> fmap unpack (A.takeWhile $ not . A.isHorizontalSpace)
apiP =
skipSpacesAndString "(api:" . fmap unpack $ A.takeWhile (/= '/')
protoP =
A.string "/proto:"
*> fmap Data.Text.unpack (A.takeWhile (/= ')'))
<* A.takeTill A.isEndOfLine <* A.endOfLine
srcVersion =
A.string "srcversion:"
*> AC.skipMany1 A.space
*> fmap unpack (A.takeTill A.isEndOfLine)
<* A.endOfLine
gh =
A.string "GIT-hash:"
*> skipSpaces
*> A.takeWhile (not . A.isHorizontalSpace)
builder =
skipSpacesAndString "build by" $
skipSpaces
*> A.takeTill A.isEndOfLine
<* A.endOfLine
-- | The parser for a (multi-line) string representing a device.
deviceParser :: [DrbdInstMinor] -> Parser DeviceInfo
deviceParser instMinor = do
deviceNum <- skipSpaces *> A.decimal <* A.char ':'
cs <- skipSpacesAndString "cs:" connStateParser
if cs == Unconfigured
then do
_ <- additionalEOL
return $ UnconfiguredDevice deviceNum
else do
ro <- skipSpaces *> skipRoleString *> localRemoteParser roleParser
ds <- skipSpacesAndString "ds:" $ localRemoteParser diskStateParser
replicProtocol <- A.space *> A.anyChar
io <- skipSpaces *> ioFlagsParser <* A.skipWhile isBadEndOfLine
pIndicators <- perfIndicatorsParser
syncS <- conditionalSyncStatusParser cs
reS <- optional resyncParser
act <- optional actLogParser
_ <- additionalEOL
let inst = find ((deviceNum ==) . dimMinor) instMinor
iName = fmap dimInstName inst
return $ DeviceInfo deviceNum cs ro ds replicProtocol io pIndicators
syncS reS act iName
where conditionalSyncStatusParser SyncSource = Just <$> syncStatusParser
conditionalSyncStatusParser SyncTarget = Just <$> syncStatusParser
conditionalSyncStatusParser _ = pure Nothing
skipRoleString = A.string "ro:" <|> A.string "st:"
resyncParser = skipSpacesAndString "resync:" additionalInfoParser
actLogParser = skipSpacesAndString "act_log:" additionalInfoParser
additionalEOL = A.skipWhile A.isEndOfLine
-- | The parser for the connection state.
connStateParser :: Parser ConnState
connStateParser =
standAlone
<|> disconnecting
<|> unconnected
<|> timeout
<|> brokenPipe
<|> networkFailure
<|> protocolError
<|> tearDown
<|> wfConnection
<|> wfReportParams
<|> connected
<|> startingSyncS
<|> startingSyncT
<|> wfBitMapS
<|> wfBitMapT
<|> wfSyncUUID
<|> syncSource
<|> syncTarget
<|> pausedSyncS
<|> pausedSyncT
<|> verifyS
<|> verifyT
<|> unconfigured
where standAlone = A.string "StandAlone" *> pure StandAlone
disconnecting = A.string "Disconnectiog" *> pure Disconnecting
unconnected = A.string "Unconnected" *> pure Unconnected
timeout = A.string "Timeout" *> pure Timeout
brokenPipe = A.string "BrokenPipe" *> pure BrokenPipe
networkFailure = A.string "NetworkFailure" *> pure NetworkFailure
protocolError = A.string "ProtocolError" *> pure ProtocolError
tearDown = A.string "TearDown" *> pure TearDown
wfConnection = A.string "WFConnection" *> pure WFConnection
wfReportParams = A.string "WFReportParams" *> pure WFReportParams
connected = A.string "Connected" *> pure Connected
startingSyncS = A.string "StartingSyncS" *> pure StartingSyncS
startingSyncT = A.string "StartingSyncT" *> pure StartingSyncT
wfBitMapS = A.string "WFBitMapS" *> pure WFBitMapS
wfBitMapT = A.string "WFBitMapT" *> pure WFBitMapT
wfSyncUUID = A.string "WFSyncUUID" *> pure WFSyncUUID
syncSource = A.string "SyncSource" *> pure SyncSource
syncTarget = A.string "SyncTarget" *> pure SyncTarget
pausedSyncS = A.string "PausedSyncS" *> pure PausedSyncS
pausedSyncT = A.string "PausedSyncT" *> pure PausedSyncT
verifyS = A.string "VerifyS" *> pure VerifyS
verifyT = A.string "VerifyT" *> pure VerifyT
unconfigured = A.string "Unconfigured" *> pure Unconfigured
-- | Parser for recognizing strings describing two elements of the
-- same type separated by a '/'. The first one is considered local,
-- the second remote.
localRemoteParser :: Parser a -> Parser (LocalRemote a)
localRemoteParser parser = LocalRemote <$> parser <*> (A.char '/' *> parser)
-- | The parser for resource roles.
roleParser :: Parser Role
roleParser =
primary
<|> secondary
<|> unknown
where primary = A.string "Primary" *> pure Primary
secondary = A.string "Secondary" *> pure Secondary
unknown = A.string "Unknown" *> pure Unknown
-- | The parser for disk states.
diskStateParser :: Parser DiskState
diskStateParser =
diskless
<|> attaching
<|> failed
<|> negotiating
<|> inconsistent
<|> outdated
<|> dUnknown
<|> consistent
<|> upToDate
where diskless = A.string "Diskless" *> pure Diskless
attaching = A.string "Attaching" *> pure Attaching
failed = A.string "Failed" *> pure Failed
negotiating = A.string "Negotiating" *> pure Negotiating
inconsistent = A.string "Inconsistent" *> pure Inconsistent
outdated = A.string "Outdated" *> pure Outdated
dUnknown = A.string "DUnknown" *> pure DUnknown
consistent = A.string "Consistent" *> pure Consistent
upToDate = A.string "UpToDate" *> pure UpToDate
-- | The parser for I/O flags.
ioFlagsParser :: Parser String
ioFlagsParser = fmap unpack . A.takeWhile $ not . isBadEndOfLine
-- | The parser for performance indicators.
perfIndicatorsParser :: Parser PerfIndicators
perfIndicatorsParser =
PerfIndicators
<$> skipSpacesAndString "ns:" A.decimal
<*> skipSpacesAndString "nr:" A.decimal
<*> skipSpacesAndString "dw:" A.decimal
<*> skipSpacesAndString "dr:" A.decimal
<*> skipSpacesAndString "al:" A.decimal
<*> skipSpacesAndString "bm:" A.decimal
<*> skipSpacesAndString "lo:" A.decimal
<*> skipSpacesAndString "pe:" A.decimal
<*> skipSpacesAndString "ua:" A.decimal
<*> skipSpacesAndString "ap:" A.decimal
<*> optional (skipSpacesAndString "ep:" A.decimal)
<*> optional (skipSpacesAndString "wo:" A.anyChar)
<*> optional (skipSpacesAndString "oos:" A.decimal)
<* skipSpaces <* A.endOfLine
-- | The parser for the syncronization status.
syncStatusParser :: Parser SyncStatus
syncStatusParser = do
_ <- statusBarParser
percent <-
skipSpacesAndString "sync'ed:" $ skipSpaces *> A.double <* A.char '%'
partSyncSize <- skipSpaces *> A.char '(' *> A.decimal
totSyncSize <- A.char '/' *> A.decimal <* A.char ')'
sizeUnit <- sizeUnitParser <* optional A.endOfLine
timeToEnd <- skipSpacesAndString "finish:" $ skipSpaces *> timeParser
sp <-
skipSpacesAndString "speed:" $
skipSpaces
*> commaIntParser
<* skipSpaces
<* A.char '('
<* commaIntParser
<* A.char ')'
w <- skipSpacesAndString "want:" (
skipSpaces
*> (Just <$> commaIntParser)
)
<|> pure Nothing
sSizeUnit <- skipSpaces *> sizeUnitParser
sTimeUnit <- A.char '/' *> timeUnitParser
_ <- A.endOfLine
return $
SyncStatus percent partSyncSize totSyncSize sizeUnit timeToEnd sp w
sSizeUnit sTimeUnit
-- | The parser for recognizing (and discarding) the sync status bar.
statusBarParser :: Parser ()
statusBarParser =
skipSpaces
*> A.char '['
*> A.skipWhile (== '=')
*> A.skipWhile (== '>')
*> A.skipWhile (== '.')
*> A.char ']'
*> pure ()
-- | The parser for recognizing data size units (only the ones
-- actually found in DRBD files are implemented).
sizeUnitParser :: Parser SizeUnit
sizeUnitParser =
kilobyte
<|> megabyte
where kilobyte = A.string "K" *> pure KiloByte
megabyte = A.string "M" *> pure MegaByte
-- | The parser for recognizing time (hh:mm:ss).
timeParser :: Parser Time
timeParser = Time <$> h <*> m <*> s
where h = A.decimal :: Parser Int
m = A.char ':' *> A.decimal :: Parser Int
s = A.char ':' *> A.decimal :: Parser Int
-- | The parser for recognizing time units (only the ones actually
-- found in DRBD files are implemented).
timeUnitParser :: Parser TimeUnit
timeUnitParser = second
where second = A.string "sec" *> pure Second
-- | Haskell does not recognise ',' as the thousands separator every 3
-- digits but DRBD uses it, so we need an ah-hoc parser.
-- If a number beginning with more than 3 digits without a comma is
-- parsed, only the first 3 digits are considered to be valid, the rest
-- is not consumed, and left for further parsing.
commaIntParser :: Parser Int
commaIntParser = do
first <-
AC.count 3 A.digit <|> AC.count 2 A.digit <|> AC.count 1 A.digit
allDigits <- commaIntHelper (read first)
pure allDigits
-- | Helper (triplet parser) for the commaIntParser
commaIntHelper :: Int -> Parser Int
commaIntHelper acc = nextTriplet <|> end
where nextTriplet = do
_ <- A.char ','
triplet <- AC.count 3 A.digit
commaIntHelper $ acc * 1000 + (read triplet :: Int)
end = pure acc :: Parser Int
-- | Parser for the additional information provided by DRBD <= 8.0.
additionalInfoParser::Parser AdditionalInfo
additionalInfoParser = AdditionalInfo
<$> skipSpacesAndString "used:" A.decimal
<*> (A.char '/' *> A.decimal)
<*> skipSpacesAndString "hits:" A.decimal
<*> skipSpacesAndString "misses:" A.decimal
<*> skipSpacesAndString "starving:" A.decimal
<*> skipSpacesAndString "dirty:" A.decimal
<*> skipSpacesAndString "changed:" A.decimal
<* A.endOfLine
| dblia/nosql-ganeti | src/Ganeti/Block/Drbd/Parser.hs | gpl-2.0 | 13,103 | 0 | 26 | 3,130 | 2,904 | 1,429 | 1,475 | 265 | 4 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE ViewPatterns #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.SSM.Types
-- Copyright : (c) 2013-2014 Brendan Hay <brendan.g.hay@gmail.com>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
module Network.AWS.SSM.Types
(
-- * Service
SSM
-- ** Error
, JSONError
-- * CreateAssociationBatchRequestEntry
, CreateAssociationBatchRequestEntry
, createAssociationBatchRequestEntry
, cabreInstanceId
, cabreName
-- * DocumentFilter
, DocumentFilter
, documentFilter
, dfKey
, dfValue
-- * AssociationDescription
, AssociationDescription
, associationDescription
, adDate
, adInstanceId
, adName
, adStatus
-- * AssociationStatusName
, AssociationStatusName (..)
-- * DocumentFilterKey
, DocumentFilterKey (..)
-- * DocumentDescription
, DocumentDescription
, documentDescription
, dd1CreatedDate
, dd1Name
, dd1Sha1
, dd1Status
-- * AssociationFilter
, AssociationFilter
, associationFilter
, afKey
, afValue
-- * DocumentIdentifier
, DocumentIdentifier
, documentIdentifier
, diName
-- * Fault
, Fault (..)
-- * AssociationStatus
, AssociationStatus
, associationStatus
, asAdditionalInfo
, asDate
, asMessage
, asName
-- * DocumentStatus
, DocumentStatus (..)
-- * AssociationFilterKey
, AssociationFilterKey (..)
-- * FailedCreateAssociation
, FailedCreateAssociation
, failedCreateAssociation
, fcaEntry
, fcaFault
, fcaMessage
-- * Association
, Association
, association
, aInstanceId
, aName
) where
import Network.AWS.Prelude
import Network.AWS.Signing
import qualified GHC.Exts
-- | Version @2014-11-06@ of the Amazon Simple Systems Management Service service.
data SSM
instance AWSService SSM where
type Sg SSM = V4
type Er SSM = JSONError
service = service'
where
service' :: Service SSM
service' = Service
{ _svcAbbrev = "SSM"
, _svcPrefix = "ssm"
, _svcVersion = "2014-11-06"
, _svcTargetPrefix = Just "AmazonSSM"
, _svcJSONVersion = Just "1.1"
, _svcHandle = handle
, _svcRetry = retry
}
handle :: Status
-> Maybe (LazyByteString -> ServiceError JSONError)
handle = jsonError statusSuccess service'
retry :: Retry SSM
retry = Exponential
{ _retryBase = 0.05
, _retryGrowth = 2
, _retryAttempts = 5
, _retryCheck = check
}
check :: Status
-> JSONError
-> Bool
check (statusCode -> s) (awsErrorCode -> e)
| s == 500 = True -- General Server Error
| s == 509 = True -- Limit Exceeded
| s == 503 = True -- Service Unavailable
| otherwise = False
data CreateAssociationBatchRequestEntry = CreateAssociationBatchRequestEntry
{ _cabreInstanceId :: Maybe Text
, _cabreName :: Maybe Text
} deriving (Eq, Ord, Read, Show)
-- | 'CreateAssociationBatchRequestEntry' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'cabreInstanceId' @::@ 'Maybe' 'Text'
--
-- * 'cabreName' @::@ 'Maybe' 'Text'
--
createAssociationBatchRequestEntry :: CreateAssociationBatchRequestEntry
createAssociationBatchRequestEntry = CreateAssociationBatchRequestEntry
{ _cabreName = Nothing
, _cabreInstanceId = Nothing
}
-- | The ID of the instance.
cabreInstanceId :: Lens' CreateAssociationBatchRequestEntry (Maybe Text)
cabreInstanceId = lens _cabreInstanceId (\s a -> s { _cabreInstanceId = a })
-- | The name of the configuration document.
cabreName :: Lens' CreateAssociationBatchRequestEntry (Maybe Text)
cabreName = lens _cabreName (\s a -> s { _cabreName = a })
instance FromJSON CreateAssociationBatchRequestEntry where
parseJSON = withObject "CreateAssociationBatchRequestEntry" $ \o -> CreateAssociationBatchRequestEntry
<$> o .:? "InstanceId"
<*> o .:? "Name"
instance ToJSON CreateAssociationBatchRequestEntry where
toJSON CreateAssociationBatchRequestEntry{..} = object
[ "Name" .= _cabreName
, "InstanceId" .= _cabreInstanceId
]
data DocumentFilter = DocumentFilter
{ _dfKey :: DocumentFilterKey
, _dfValue :: Text
} deriving (Eq, Read, Show)
-- | 'DocumentFilter' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'dfKey' @::@ 'DocumentFilterKey'
--
-- * 'dfValue' @::@ 'Text'
--
documentFilter :: DocumentFilterKey -- ^ 'dfKey'
-> Text -- ^ 'dfValue'
-> DocumentFilter
documentFilter p1 p2 = DocumentFilter
{ _dfKey = p1
, _dfValue = p2
}
-- | The name of the filter.
dfKey :: Lens' DocumentFilter DocumentFilterKey
dfKey = lens _dfKey (\s a -> s { _dfKey = a })
-- | The value of the filter.
dfValue :: Lens' DocumentFilter Text
dfValue = lens _dfValue (\s a -> s { _dfValue = a })
instance FromJSON DocumentFilter where
parseJSON = withObject "DocumentFilter" $ \o -> DocumentFilter
<$> o .: "key"
<*> o .: "value"
instance ToJSON DocumentFilter where
toJSON DocumentFilter{..} = object
[ "key" .= _dfKey
, "value" .= _dfValue
]
data AssociationDescription = AssociationDescription
{ _adDate :: Maybe POSIX
, _adInstanceId :: Maybe Text
, _adName :: Maybe Text
, _adStatus :: Maybe AssociationStatus
} deriving (Eq, Read, Show)
-- | 'AssociationDescription' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'adDate' @::@ 'Maybe' 'UTCTime'
--
-- * 'adInstanceId' @::@ 'Maybe' 'Text'
--
-- * 'adName' @::@ 'Maybe' 'Text'
--
-- * 'adStatus' @::@ 'Maybe' 'AssociationStatus'
--
associationDescription :: AssociationDescription
associationDescription = AssociationDescription
{ _adName = Nothing
, _adInstanceId = Nothing
, _adDate = Nothing
, _adStatus = Nothing
}
-- | The date when the association was made.
adDate :: Lens' AssociationDescription (Maybe UTCTime)
adDate = lens _adDate (\s a -> s { _adDate = a }) . mapping _Time
-- | The ID of the instance.
adInstanceId :: Lens' AssociationDescription (Maybe Text)
adInstanceId = lens _adInstanceId (\s a -> s { _adInstanceId = a })
-- | The name of the configuration document.
adName :: Lens' AssociationDescription (Maybe Text)
adName = lens _adName (\s a -> s { _adName = a })
-- | The association status.
adStatus :: Lens' AssociationDescription (Maybe AssociationStatus)
adStatus = lens _adStatus (\s a -> s { _adStatus = a })
instance FromJSON AssociationDescription where
parseJSON = withObject "AssociationDescription" $ \o -> AssociationDescription
<$> o .:? "Date"
<*> o .:? "InstanceId"
<*> o .:? "Name"
<*> o .:? "Status"
instance ToJSON AssociationDescription where
toJSON AssociationDescription{..} = object
[ "Name" .= _adName
, "InstanceId" .= _adInstanceId
, "Date" .= _adDate
, "Status" .= _adStatus
]
data AssociationStatusName
= Failed -- ^ Failed
| Pending -- ^ Pending
| Success -- ^ Success
deriving (Eq, Ord, Read, Show, Generic, Enum)
instance Hashable AssociationStatusName
instance FromText AssociationStatusName where
parser = takeLowerText >>= \case
"failed" -> pure Failed
"pending" -> pure Pending
"success" -> pure Success
e -> fail $
"Failure parsing AssociationStatusName from " ++ show e
instance ToText AssociationStatusName where
toText = \case
Failed -> "Failed"
Pending -> "Pending"
Success -> "Success"
instance ToByteString AssociationStatusName
instance ToHeader AssociationStatusName
instance ToQuery AssociationStatusName
instance FromJSON AssociationStatusName where
parseJSON = parseJSONText "AssociationStatusName"
instance ToJSON AssociationStatusName where
toJSON = toJSONText
data DocumentFilterKey
= Name -- ^ Name
deriving (Eq, Ord, Read, Show, Generic, Enum)
instance Hashable DocumentFilterKey
instance FromText DocumentFilterKey where
parser = takeLowerText >>= \case
"name" -> pure Name
e -> fail $
"Failure parsing DocumentFilterKey from " ++ show e
instance ToText DocumentFilterKey where
toText Name = "Name"
instance ToByteString DocumentFilterKey
instance ToHeader DocumentFilterKey
instance ToQuery DocumentFilterKey
instance FromJSON DocumentFilterKey where
parseJSON = parseJSONText "DocumentFilterKey"
instance ToJSON DocumentFilterKey where
toJSON = toJSONText
data DocumentDescription = DocumentDescription
{ _dd1CreatedDate :: Maybe POSIX
, _dd1Name :: Maybe Text
, _dd1Sha1 :: Maybe Text
, _dd1Status :: Maybe DocumentStatus
} deriving (Eq, Read, Show)
-- | 'DocumentDescription' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'dd1CreatedDate' @::@ 'Maybe' 'UTCTime'
--
-- * 'dd1Name' @::@ 'Maybe' 'Text'
--
-- * 'dd1Sha1' @::@ 'Maybe' 'Text'
--
-- * 'dd1Status' @::@ 'Maybe' 'DocumentStatus'
--
documentDescription :: DocumentDescription
documentDescription = DocumentDescription
{ _dd1Sha1 = Nothing
, _dd1Name = Nothing
, _dd1CreatedDate = Nothing
, _dd1Status = Nothing
}
-- | The date when the configuration document was created.
dd1CreatedDate :: Lens' DocumentDescription (Maybe UTCTime)
dd1CreatedDate = lens _dd1CreatedDate (\s a -> s { _dd1CreatedDate = a }) . mapping _Time
-- | The name of the configuration document.
dd1Name :: Lens' DocumentDescription (Maybe Text)
dd1Name = lens _dd1Name (\s a -> s { _dd1Name = a })
-- | The SHA1 hash of the document, which you can use for verification purposes.
dd1Sha1 :: Lens' DocumentDescription (Maybe Text)
dd1Sha1 = lens _dd1Sha1 (\s a -> s { _dd1Sha1 = a })
-- | The status of the configuration document.
dd1Status :: Lens' DocumentDescription (Maybe DocumentStatus)
dd1Status = lens _dd1Status (\s a -> s { _dd1Status = a })
instance FromJSON DocumentDescription where
parseJSON = withObject "DocumentDescription" $ \o -> DocumentDescription
<$> o .:? "CreatedDate"
<*> o .:? "Name"
<*> o .:? "Sha1"
<*> o .:? "Status"
instance ToJSON DocumentDescription where
toJSON DocumentDescription{..} = object
[ "Sha1" .= _dd1Sha1
, "Name" .= _dd1Name
, "CreatedDate" .= _dd1CreatedDate
, "Status" .= _dd1Status
]
data AssociationFilter = AssociationFilter
{ _afKey :: AssociationFilterKey
, _afValue :: Text
} deriving (Eq, Read, Show)
-- | 'AssociationFilter' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'afKey' @::@ 'AssociationFilterKey'
--
-- * 'afValue' @::@ 'Text'
--
associationFilter :: AssociationFilterKey -- ^ 'afKey'
-> Text -- ^ 'afValue'
-> AssociationFilter
associationFilter p1 p2 = AssociationFilter
{ _afKey = p1
, _afValue = p2
}
-- | The name of the filter.
afKey :: Lens' AssociationFilter AssociationFilterKey
afKey = lens _afKey (\s a -> s { _afKey = a })
-- | The filter value.
afValue :: Lens' AssociationFilter Text
afValue = lens _afValue (\s a -> s { _afValue = a })
instance FromJSON AssociationFilter where
parseJSON = withObject "AssociationFilter" $ \o -> AssociationFilter
<$> o .: "key"
<*> o .: "value"
instance ToJSON AssociationFilter where
toJSON AssociationFilter{..} = object
[ "key" .= _afKey
, "value" .= _afValue
]
newtype DocumentIdentifier = DocumentIdentifier
{ _diName :: Maybe Text
} deriving (Eq, Ord, Read, Show, Monoid)
-- | 'DocumentIdentifier' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'diName' @::@ 'Maybe' 'Text'
--
documentIdentifier :: DocumentIdentifier
documentIdentifier = DocumentIdentifier
{ _diName = Nothing
}
-- | The name of the configuration document.
diName :: Lens' DocumentIdentifier (Maybe Text)
diName = lens _diName (\s a -> s { _diName = a })
instance FromJSON DocumentIdentifier where
parseJSON = withObject "DocumentIdentifier" $ \o -> DocumentIdentifier
<$> o .:? "Name"
instance ToJSON DocumentIdentifier where
toJSON DocumentIdentifier{..} = object
[ "Name" .= _diName
]
data Fault
= Client -- ^ Client
| Server -- ^ Server
| Unknown -- ^ Unknown
deriving (Eq, Ord, Read, Show, Generic, Enum)
instance Hashable Fault
instance FromText Fault where
parser = takeLowerText >>= \case
"client" -> pure Client
"server" -> pure Server
"unknown" -> pure Unknown
e -> fail $
"Failure parsing Fault from " ++ show e
instance ToText Fault where
toText = \case
Client -> "Client"
Server -> "Server"
Unknown -> "Unknown"
instance ToByteString Fault
instance ToHeader Fault
instance ToQuery Fault
instance FromJSON Fault where
parseJSON = parseJSONText "Fault"
instance ToJSON Fault where
toJSON = toJSONText
data AssociationStatus = AssociationStatus
{ _asAdditionalInfo :: Maybe Text
, _asDate :: POSIX
, _asMessage :: Text
, _asName :: AssociationStatusName
} deriving (Eq, Read, Show)
-- | 'AssociationStatus' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'asAdditionalInfo' @::@ 'Maybe' 'Text'
--
-- * 'asDate' @::@ 'UTCTime'
--
-- * 'asMessage' @::@ 'Text'
--
-- * 'asName' @::@ 'AssociationStatusName'
--
associationStatus :: UTCTime -- ^ 'asDate'
-> AssociationStatusName -- ^ 'asName'
-> Text -- ^ 'asMessage'
-> AssociationStatus
associationStatus p1 p2 p3 = AssociationStatus
{ _asDate = withIso _Time (const id) p1
, _asName = p2
, _asMessage = p3
, _asAdditionalInfo = Nothing
}
-- | A user-defined string.
asAdditionalInfo :: Lens' AssociationStatus (Maybe Text)
asAdditionalInfo = lens _asAdditionalInfo (\s a -> s { _asAdditionalInfo = a })
-- | The date when the status changed.
asDate :: Lens' AssociationStatus UTCTime
asDate = lens _asDate (\s a -> s { _asDate = a }) . _Time
-- | The reason for the status.
asMessage :: Lens' AssociationStatus Text
asMessage = lens _asMessage (\s a -> s { _asMessage = a })
-- | The status.
asName :: Lens' AssociationStatus AssociationStatusName
asName = lens _asName (\s a -> s { _asName = a })
instance FromJSON AssociationStatus where
parseJSON = withObject "AssociationStatus" $ \o -> AssociationStatus
<$> o .:? "AdditionalInfo"
<*> o .: "Date"
<*> o .: "Message"
<*> o .: "Name"
instance ToJSON AssociationStatus where
toJSON AssociationStatus{..} = object
[ "Date" .= _asDate
, "Name" .= _asName
, "Message" .= _asMessage
, "AdditionalInfo" .= _asAdditionalInfo
]
data DocumentStatus
= Active -- ^ Active
| Creating -- ^ Creating
| Deleting -- ^ Deleting
deriving (Eq, Ord, Read, Show, Generic, Enum)
instance Hashable DocumentStatus
instance FromText DocumentStatus where
parser = takeLowerText >>= \case
"active" -> pure Active
"creating" -> pure Creating
"deleting" -> pure Deleting
e -> fail $
"Failure parsing DocumentStatus from " ++ show e
instance ToText DocumentStatus where
toText = \case
Active -> "Active"
Creating -> "Creating"
Deleting -> "Deleting"
instance ToByteString DocumentStatus
instance ToHeader DocumentStatus
instance ToQuery DocumentStatus
instance FromJSON DocumentStatus where
parseJSON = parseJSONText "DocumentStatus"
instance ToJSON DocumentStatus where
toJSON = toJSONText
data AssociationFilterKey
= AFKInstanceId -- ^ InstanceId
| AFKName -- ^ Name
deriving (Eq, Ord, Read, Show, Generic, Enum)
instance Hashable AssociationFilterKey
instance FromText AssociationFilterKey where
parser = takeLowerText >>= \case
"instanceid" -> pure AFKInstanceId
"name" -> pure AFKName
e -> fail $
"Failure parsing AssociationFilterKey from " ++ show e
instance ToText AssociationFilterKey where
toText = \case
AFKInstanceId -> "InstanceId"
AFKName -> "Name"
instance ToByteString AssociationFilterKey
instance ToHeader AssociationFilterKey
instance ToQuery AssociationFilterKey
instance FromJSON AssociationFilterKey where
parseJSON = parseJSONText "AssociationFilterKey"
instance ToJSON AssociationFilterKey where
toJSON = toJSONText
data FailedCreateAssociation = FailedCreateAssociation
{ _fcaEntry :: Maybe CreateAssociationBatchRequestEntry
, _fcaFault :: Maybe Fault
, _fcaMessage :: Maybe Text
} deriving (Eq, Read, Show)
-- | 'FailedCreateAssociation' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'fcaEntry' @::@ 'Maybe' 'CreateAssociationBatchRequestEntry'
--
-- * 'fcaFault' @::@ 'Maybe' 'Fault'
--
-- * 'fcaMessage' @::@ 'Maybe' 'Text'
--
failedCreateAssociation :: FailedCreateAssociation
failedCreateAssociation = FailedCreateAssociation
{ _fcaEntry = Nothing
, _fcaMessage = Nothing
, _fcaFault = Nothing
}
-- | The association.
fcaEntry :: Lens' FailedCreateAssociation (Maybe CreateAssociationBatchRequestEntry)
fcaEntry = lens _fcaEntry (\s a -> s { _fcaEntry = a })
-- | The source of the failure.
fcaFault :: Lens' FailedCreateAssociation (Maybe Fault)
fcaFault = lens _fcaFault (\s a -> s { _fcaFault = a })
-- | A description of the failure.
fcaMessage :: Lens' FailedCreateAssociation (Maybe Text)
fcaMessage = lens _fcaMessage (\s a -> s { _fcaMessage = a })
instance FromJSON FailedCreateAssociation where
parseJSON = withObject "FailedCreateAssociation" $ \o -> FailedCreateAssociation
<$> o .:? "Entry"
<*> o .:? "Fault"
<*> o .:? "Message"
instance ToJSON FailedCreateAssociation where
toJSON FailedCreateAssociation{..} = object
[ "Entry" .= _fcaEntry
, "Message" .= _fcaMessage
, "Fault" .= _fcaFault
]
data Association = Association
{ _aInstanceId :: Maybe Text
, _aName :: Maybe Text
} deriving (Eq, Ord, Read, Show)
-- | 'Association' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'aInstanceId' @::@ 'Maybe' 'Text'
--
-- * 'aName' @::@ 'Maybe' 'Text'
--
association :: Association
association = Association
{ _aName = Nothing
, _aInstanceId = Nothing
}
-- | The ID of the instance.
aInstanceId :: Lens' Association (Maybe Text)
aInstanceId = lens _aInstanceId (\s a -> s { _aInstanceId = a })
-- | The name of the configuration document.
aName :: Lens' Association (Maybe Text)
aName = lens _aName (\s a -> s { _aName = a })
instance FromJSON Association where
parseJSON = withObject "Association" $ \o -> Association
<$> o .:? "InstanceId"
<*> o .:? "Name"
instance ToJSON Association where
toJSON Association{..} = object
[ "Name" .= _aName
, "InstanceId" .= _aInstanceId
]
| romanb/amazonka | amazonka-ssm/gen/Network/AWS/SSM/Types.hs | mpl-2.0 | 20,605 | 0 | 15 | 5,271 | 4,099 | 2,293 | 1,806 | -1 | -1 |
{-#LANGUAGE CPP#-}
import qualified CV.Matrix as M
import CV.Calibration
main = do
let mat = M.fromList (2,10) $ concat [[x,y] | x <- [1,2], y <- [1..5]]
mat2 = M.fromList (1,10) $ [(x,y) | x <- [1,2], y <- [1..5]]
a = findFundamentalMat mat mat c'CV_FM_RANSAC 1 0.6
(c,d) = stereoRectifyUncalibrated mat2 mat2 a (30,30) 0.5
print a
print c
print d
| TomMD/CV | examples/Fundamental.hs | bsd-3-clause | 398 | 0 | 15 | 111 | 202 | 109 | 93 | 11 | 1 |
--------------------------------------------------------------------------------
-- |
-- Module : Data.Comp
-- Copyright : (c) 2010-2011 Patrick Bahr, Tom Hvitved
-- License : BSD3
-- Maintainer : Patrick Bahr <paba@diku.dk>, Tom Hvitved <hvitved@diku.dk>
-- Stability : experimental
-- Portability : non-portable (GHC Extensions)
--
-- This module defines the infrastructure necessary to use
-- /Compositional Data Types/. Compositional Data Types is an extension of
-- Wouter Swierstra's Functional Pearl: /Data types a la carte/. Examples of
-- usage are bundled with the package in the library @examples\/Examples@.
--
--------------------------------------------------------------------------------
module Data.Comp
(
module X
) where
import Data.Comp.Algebra as X
import Data.Comp.Annotation as X
import Data.Comp.Equality as X
import Data.Comp.Generic as X
import Data.Comp.Ordering as X
import Data.Comp.Sum as X
import Data.Comp.Term as X
| spacekitteh/compdata | src/Data/Comp.hs | bsd-3-clause | 982 | 0 | 4 | 154 | 83 | 65 | 18 | 10 | 0 |
{-# LANGUAGE ScopedTypeVariables #-}
-- TODO This module was originally based on the PackageTests.PackageTester
-- module in Cabal, however it has a few differences. I suspect that as
-- this module ages the two modules will diverge further. As such, I have
-- not attempted to merge them into a single module nor to extract a common
-- module from them. Refactor this module and/or Cabal's
-- PackageTests.PackageTester to remove commonality.
-- 2014-05-15 Ben Armston
-- | Routines for black-box testing cabal-install.
--
-- Instead of driving the tests by making library calls into
-- Distribution.Simple.* or Distribution.Client.* this module only every
-- executes the `cabal-install` binary.
--
-- You can set the following VERBOSE environment variable to control
-- the verbosity of the output generated by this module.
module PackageTests.PackageTester
( TestsPaths(..)
, Result(..)
, packageTestsDirectory
, packageTestsConfigFile
-- * Running cabal commands
, cabal_clean
, cabal_exec
, cabal_freeze
, cabal_install
, cabal_sandbox
, run
-- * Test helpers
, assertCleanSucceeded
, assertExecFailed
, assertExecSucceeded
, assertFreezeSucceeded
, assertInstallSucceeded
, assertSandboxSucceeded
) where
import qualified Control.Exception.Extensible as E
import Control.Monad (when, unless)
import Data.Maybe (fromMaybe)
import System.Directory (canonicalizePath, doesFileExist)
import System.Environment (getEnv)
import System.Exit (ExitCode(ExitSuccess))
import System.FilePath ( (<.>) )
import System.IO (hClose, hGetChar, hIsEOF)
import System.IO.Error (isDoesNotExistError)
import System.Process (runProcess, waitForProcess)
import Test.HUnit (Assertion, assertFailure)
import Distribution.Simple.BuildPaths (exeExtension)
import Distribution.Simple.Utils (printRawCommandAndArgs)
import Distribution.Compat.CreatePipe (createPipe)
import Distribution.ReadE (readEOrFail)
import Distribution.Verbosity (Verbosity, flagToVerbosity, normal)
data Success = Failure
-- | ConfigureSuccess
-- | BuildSuccess
-- | TestSuccess
-- | BenchSuccess
| CleanSuccess
| ExecSuccess
| FreezeSuccess
| InstallSuccess
| SandboxSuccess
deriving (Eq, Show)
data TestsPaths = TestsPaths
{ cabalPath :: FilePath -- ^ absolute path to cabal executable.
, ghcPkgPath :: FilePath -- ^ absolute path to ghc-pkg executable.
, configPath :: FilePath -- ^ absolute path of the default config file
-- to use for tests (tests are free to use
-- a different one).
}
data Result = Result
{ successful :: Bool
, success :: Success
, outputText :: String
} deriving Show
nullResult :: Result
nullResult = Result True Failure ""
------------------------------------------------------------------------
-- * Config
packageTestsDirectory :: FilePath
packageTestsDirectory = "PackageTests"
packageTestsConfigFile :: FilePath
packageTestsConfigFile = "cabal-config"
------------------------------------------------------------------------
-- * Running cabal commands
recordRun :: (String, ExitCode, String) -> Success -> Result -> Result
recordRun (cmd, exitCode, exeOutput) thisSucc res =
res { successful = successful res && exitCode == ExitSuccess
, success = if exitCode == ExitSuccess then thisSucc
else success res
, outputText =
(if null $ outputText res then "" else outputText res ++ "\n") ++
cmd ++ "\n" ++ exeOutput
}
-- | Run the clean command and return its result.
cabal_clean :: TestsPaths -> FilePath -> [String] -> IO Result
cabal_clean paths dir args = do
res <- cabal paths dir (["clean"] ++ args)
return $ recordRun res CleanSuccess nullResult
-- | Run the exec command and return its result.
cabal_exec :: TestsPaths -> FilePath -> [String] -> IO Result
cabal_exec paths dir args = do
res <- cabal paths dir (["exec"] ++ args)
return $ recordRun res ExecSuccess nullResult
-- | Run the freeze command and return its result.
cabal_freeze :: TestsPaths -> FilePath -> [String] -> IO Result
cabal_freeze paths dir args = do
res <- cabal paths dir (["freeze"] ++ args)
return $ recordRun res FreezeSuccess nullResult
-- | Run the install command and return its result.
cabal_install :: TestsPaths -> FilePath -> [String] -> IO Result
cabal_install paths dir args = do
res <- cabal paths dir (["install"] ++ args)
return $ recordRun res InstallSuccess nullResult
-- | Run the sandbox command and return its result.
cabal_sandbox :: TestsPaths -> FilePath -> [String] -> IO Result
cabal_sandbox paths dir args = do
res <- cabal paths dir (["sandbox"] ++ args)
return $ recordRun res SandboxSuccess nullResult
-- | Returns the command that was issued, the return code, and the output text.
cabal :: TestsPaths -> FilePath -> [String] -> IO (String, ExitCode, String)
cabal paths dir cabalArgs = do
run (Just dir) (cabalPath paths) args
where
args = configFileArg : cabalArgs
configFileArg = "--config-file=" ++ configPath paths
-- | Returns the command that was issued, the return code, and the output text
run :: Maybe FilePath -> String -> [String] -> IO (String, ExitCode, String)
run cwd path args = do
verbosity <- getVerbosity
-- path is relative to the current directory; canonicalizePath makes it
-- absolute, so that runProcess will find it even when changing directory.
path' <- do pathExists <- doesFileExist path
canonicalizePath (if pathExists then path else path <.> exeExtension)
printRawCommandAndArgs verbosity path' args
(readh, writeh) <- createPipe
pid <- runProcess path' args cwd Nothing Nothing (Just writeh) (Just writeh)
-- fork off a thread to start consuming the output
out <- suckH [] readh
hClose readh
-- wait for the program to terminate
exitcode <- waitForProcess pid
let fullCmd = unwords (path' : args)
return ("\"" ++ fullCmd ++ "\" in " ++ fromMaybe "" cwd, exitcode, out)
where
suckH output h = do
eof <- hIsEOF h
if eof
then return (reverse output)
else do
c <- hGetChar h
suckH (c:output) h
------------------------------------------------------------------------
-- * Test helpers
assertCleanSucceeded :: Result -> Assertion
assertCleanSucceeded result = unless (successful result) $
assertFailure $
"expected: \'cabal clean\' should succeed\n" ++
" output: " ++ outputText result
assertExecSucceeded :: Result -> Assertion
assertExecSucceeded result = unless (successful result) $
assertFailure $
"expected: \'cabal exec\' should succeed\n" ++
" output: " ++ outputText result
assertExecFailed :: Result -> Assertion
assertExecFailed result = when (successful result) $
assertFailure $
"expected: \'cabal exec\' should fail\n" ++
" output: " ++ outputText result
assertFreezeSucceeded :: Result -> Assertion
assertFreezeSucceeded result = unless (successful result) $
assertFailure $
"expected: \'cabal freeze\' should succeed\n" ++
" output: " ++ outputText result
assertInstallSucceeded :: Result -> Assertion
assertInstallSucceeded result = unless (successful result) $
assertFailure $
"expected: \'cabal install\' should succeed\n" ++
" output: " ++ outputText result
assertSandboxSucceeded :: Result -> Assertion
assertSandboxSucceeded result = unless (successful result) $
assertFailure $
"expected: \'cabal sandbox\' should succeed\n" ++
" output: " ++ outputText result
------------------------------------------------------------------------
-- Verbosity
lookupEnv :: String -> IO (Maybe String)
lookupEnv name =
(fmap Just $ getEnv name)
`E.catch` \ (e :: IOError) ->
if isDoesNotExistError e
then return Nothing
else E.throw e
-- TODO: Convert to a "-v" flag instead.
getVerbosity :: IO Verbosity
getVerbosity = do
maybe normal (readEOrFail flagToVerbosity) `fmap` lookupEnv "VERBOSE"
| DavidAlphaFox/ghc | libraries/Cabal/cabal-install/tests/PackageTests/PackageTester.hs | bsd-3-clause | 8,262 | 0 | 14 | 1,805 | 1,696 | 918 | 778 | 149 | 3 |
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE NoMonomorphismRestriction #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeSynonymInstances #-}
module Heist.Compiled.LowLevel
(
-- * Lower level promise functions
Promise
, newEmptyPromise
, getPromise
, putPromise
, adjustPromise
) where
import Heist.Compiled.Internal
| 23Skidoo/heist | src/Heist/Compiled/LowLevel.hs | bsd-3-clause | 514 | 0 | 4 | 127 | 38 | 28 | 10 | 15 | 0 |
{-# LANGUAGE EmptyDataDecls, GADTs, TypeFamilies #-}
module T7489 where
data Credit
data Debit
data family Account (s :: *) (t :: *)
data instance Account Int t where
CAccount :: Account Int Credit
DAccount :: { debitAccountPostings :: [Int] } -> Account Int Debit
| ghc-android/ghc | testsuite/tests/indexed-types/should_compile/T7489.hs | bsd-3-clause | 274 | 0 | 8 | 53 | 72 | 45 | 27 | -1 | -1 |
-- | This module provides a function for downloading robots.txt of any domain via HTTP.
module YesWeScan.RobotsTxt (robotsTxt) where
import Network.HTTP (simpleHTTP, getRequest, getResponseBody)
-- | Get the contents of a `robots.txt` associated with a URL.
robotsTxt :: String -> IO String
robotsTxt url =
getResponseBody =<< simpleHTTP
(getRequest $ url ++
if last url == '/'
then []
else "/" ++ "robots.txt")
| kmein/yes-we-scan | YesWeScan/RobotsTxt.hs | mit | 459 | 0 | 11 | 110 | 88 | 49 | 39 | 9 | 2 |
import Data.List
main = do
wholeFile <- readFile "p054_poker.txt"
print $ process wholeFile
process wholeFile = length player1Wins
where lists = map listsOfCards $ lines wholeFile
hands = map handsFromListsOfCards lists
player1Wins = winningHandsPlayer1 hands
listsOfCards line = toListsOfCards $ words line
handsFromListsOfCards (cards1, cards2) = (toPokerHand cards1, toPokerHand cards2)
winningHandsPlayer1 hands = filter (\(h1, h2) -> h1>h2) hands
toListsOfCards ws = (\(c1,c2) -> (reverse $ sort c1, reverse $ sort c2)) $ splitAt 5 $ map toCard ws
data Suit = Clubs | Spades | Hearts | Diamonds deriving (Show, Eq, Ord)
data Value = Val2 | Val3 | Val4 | Val5 | Val6 | Val7 | Val8 | Val9 | Val10 | ValJ | ValQ | ValK | ValA deriving (Show, Eq, Ord)
data Card = Card { value :: Value, suit :: Suit } deriving (Show, Eq, Ord)
toCard :: String -> Card
toCard (s0:s1:ss) = Card {value=theValue, suit=theSuit}
where theValue = toValue s0
theSuit = toSuit s1
toValue :: Char -> Value
toValue '2' = Val2
toValue '3' = Val3
toValue '4' = Val4
toValue '5' = Val5
toValue '6' = Val6
toValue '7' = Val7
toValue '8' = Val8
toValue '9' = Val9
toValue 'T' = Val10
toValue 'J' = ValJ
toValue 'Q' = ValQ
toValue 'K' = ValK
toValue 'A' = ValA
toSuit :: Char -> Suit
toSuit 'C' = Clubs
toSuit 'S' = Spades
toSuit 'H' = Hearts
toSuit 'D' = Diamonds
valueToInt :: Value -> Int
valueToInt Val2 = 2
valueToInt Val3 = 3
valueToInt Val4 = 4
valueToInt Val5 = 5
valueToInt Val6 = 6
valueToInt Val7 = 7
valueToInt Val8 = 8
valueToInt Val9 = 9
valueToInt Val10 = 10
valueToInt ValJ = 11
valueToInt ValQ = 12
valueToInt ValK = 13
valueToInt ValA = 14
data PokerHand a b = HighCard a b |
OnePair a b |
TwoPairs a b |
ThreeOfAKind a b |
Straight a |
Flush a b |
FullHouse a b |
FourOfAKind a b |
StraightFlush a |
RoyalFlush
deriving (Show, Eq, Ord)
toPokerHand :: [Card] -> PokerHand Value [Value]
toPokerHand cards@(c1:c2:c3:c4:c5:cs)
| isRoyalFlush cards = RoyalFlush
| isStraightFlush cards = StraightFlush (value c1)
| not (null foak2) = FourOfAKind foak1 foak2
| not (null full2) = FullHouse full1 full2
| isFlush cards = Flush (value c1) [value c2, value c3, value c4, value c5]
| isStraight cards = Straight (value c1)
| not (null thre2) = ThreeOfAKind thre1 thre2
| not (null twop2) = TwoPairs twop1 twop2
| not (null onep2) = OnePair onep1 onep2
| otherwise = HighCard (value c1) [value c2, value c3, value c4, value c5]
where (foak1, foak2) = matchFourOfAKind cards
(full1, full2) = matchFullHouse cards
(thre1, thre2) = matchThreeOfAKind cards
(twop1, twop2) = matchTwoPairs cards
(onep1, onep2) = matchOnePair cards
isRoyalFlush cards@(c1:c2:c3:c4:c5:cs) = isStraightFlush cards
&& value c5 == Val10
isStraightFlush cards@(c1:c2:c3:c4:c5:cs) = isFlush cards && isStraight cards
matchFourOfAKind cards@(c1:c2:c3:c4:c5:cs)
| allSameValue [c1, c2, c3, c4] = (value c1, [value c5])
| allSameValue [c2, c3, c4, c5] = (value c2, [value c1])
| otherwise = (Val2, [])
matchFullHouse cards@(c1:c2:c3:c4:c5:cs)
| allSameValue [c1, c2] && allSameValue [c3, c4, c5] = (value c3, [value c1])
| allSameValue [c1, c2, c3] && allSameValue [c4, c5] = (value c1, [value c4])
| otherwise = (Val2, [])
isFlush cards@(c1:c2:c3:c4:c5:cs) = allSameSuit cards
isStraight cards@(c1:c2:c3:c4:c5:cs) = (allDifferentValues cards)
&& (valueToInt $ value c1) - (valueToInt $ value c5) == 4
matchThreeOfAKind cards@(c1:c2:c3:c4:c5:cs)
| allSameValue [c1, c2, c3] = (value c1, [value c4, value c5])
| allSameValue [c2, c3, c4] = (value c2, [value c1, value c5])
| allSameValue [c3, c4, c5] = (value c3, [value c1, value c2])
| otherwise = (Val2, [])
matchTwoPairs cards@(c1:c2:c3:c4:c5:cs)
| allSameValue [c1, c2] && allSameValue [c3, c4] = (max (value c1) (value c3), [value c5])
| allSameValue [c1, c2] && allSameValue [c4, c5] = (max (value c1) (value c4), [value c3])
| allSameValue [c2, c3] && allSameValue [c4, c5] = (max (value c2) (value c4), [value c1])
| otherwise = (Val2, [])
matchOnePair cards@(c1:c2:c3:c4:c5:cs)
| allSameValue [c1, c2] = (value c1, [value c3, value c4, value c5])
| allSameValue [c2, c3] = (value c2, [value c1, value c4, value c5])
| allSameValue [c3, c4] = (value c3, [value c1, value c2, value c5])
| allSameValue [c4, c5] = (value c4, [value c1, value c2, value c3])
| otherwise = (Val2, [])
allSameSuit :: [Card] -> Bool
allSameSuit [c] = True
allSameSuit (c1:c2:cs) = (suit c1) == (suit c2) && allSameSuit (c2:cs)
allDifferentValues :: [Card] -> Bool
allDifferentValues [c] = True
allDifferentValues (c1:c2:cs) = (value c1) /= (value c2) && allDifferentValues (c2:cs)
allSameValue :: [Card] -> Bool
allSameValue [c] = True
allSameValue (c1:c2:cs) = (value c1) == (value c2) && allSameValue (c2:cs)
| arekfu/project_euler | p0054/p0054.hs | mit | 5,313 | 0 | 12 | 1,406 | 2,449 | 1,265 | 1,184 | 119 | 1 |
{-# language ScopedTypeVariables, QuasiQuotes, FlexibleInstances, InstanceSigs #-}
module Database where
import Database.PostgreSQL.Simple (Connection, fromOnly, query_, execute_)
import Control.Applicative
import Data.List (intercalate)
import Text.InterpolatedString.Perl6 (qc)
import Control.Monad (forM_, when, unless)
import Utils
data GrantType = SCHEMA | TABLE | VIEW deriving (Show, Eq)
type SchemaPattern = String
type Username = String
type DBObject = String -- TableName, SchemaName, ViewName
type PermissionType = String -- "SELECT", "ALL", "USAGE", specified in privileges.cfg file
type Query = String
-- constructing grant query for tables / schemas
grantQuery :: GrantType -> Username -> [DBObject] -> PermissionType -> Query
grantQuery grant username objects permission -- for Views grantting need TO NOT have granttype.
| null objects = ""
| grant == VIEW =
"BEGIN; GRANT " ++ permission ++ " ON " ++ intercalate ", " objects ++ " TO " ++ username ++ "; COMMIT;"
| otherwise =
"BEGIN; GRANT " ++ permission ++ " ON " ++ show grant ++ " " ++ intercalate ", " objects ++ " TO " ++ username ++ "; COMMIT;"
revokeQuery :: GrantType -> Username -> [DBObject] -> PermissionType -> Query
revokeQuery grant username objects permission -- for Views grantting need TO NOT have granttype.
| null objects = ""
| grant == VIEW =
"BEGIN; REVOKE " ++ permission ++ " ON " ++ intercalate ", " objects ++ " FROM " ++ username ++ "; COMMIT;"
| otherwise =
"BEGIN; REVOKE " ++ permission ++ " ON " ++ show grant ++ " " ++ intercalate ", " objects ++ " FROM " ++ username ++ "; COMMIT;"
transferOwnerQuery :: GrantType -> Username -> DBObject -> Query
transferOwnerQuery grant unameNew object =
"BEGIN; ALTER " ++ show grant ++ " " ++ object ++ " OWNER TO " ++ unameNew ++ "; COMMIT;"
doesUserExist :: Connection -> Username -> IO Bool
doesUserExist db uname = do
users :: [Username] <-
fmap fromOnly <$>
query_ db [qc|
SELECT usename
FROM pg_user
WHERE usename = '{uname}';
|]
return $ length users == 1
-- probably need to catch the error when length users > 1, but it is impossible anyway?
createUserIfMissing :: Connection -> User -> IO ()
createUserIfMissing db (User uname upassword) = do
exist <- doesUserExist db uname
unless exist $
tryExec $ execute_ db [qc|
CREATE USER {uname}
WITH PASSWORD '{upassword}';
|]
updateUserPasswordIfExists :: Connection -> User -> IO ()
updateUserPasswordIfExists db (User uname upassword) = do
exist <- doesUserExist db uname
when exist $
tryExec $ execute_ db [qc|
ALTER USER {uname}
WITH PASSWORD '{upassword}';
|]
-- SchemaPattern might be '%' to specify ALL Schema
revokeAllUsersPermissionIfExists :: Connection -> SchemaPattern -> Username -> IO ()
revokeAllUsersPermissionIfExists db schemaPattern uname = do
exist <- doesUserExist db uname
when exist $ do
putStrLn $ "Revoking all schema privilges for user: " ++ uname
uschemas :: [DBObject] <-
fmap fromOnly <$>
query_ db [qc|
SELECT schema_name
FROM information_schema.schemata
WHERE schema_name like '{schemaPattern}';
|]
unless (null uschemas) $
tryExec $ execute_ db [qc|
{revokeQuery SCHEMA uname uschemas "ALL"}
|]
putStrLn $ "Revoking all table privilges for user: " ++ uname
utables :: [DBObject] <-
fmap fromOnly <$>
query_ db [qc|
SELECT schemaname || '.' || '"' || tablename || '"'
FROM pg_tables
WHERE schemaname like '{schemaPattern}';
|]
unless (null utables) $
tryExec $ execute_ db [qc|
{revokeQuery TABLE uname utables "ALL"}
|]
putStrLn $ "Revoking all view privilges for user: " ++ uname
uviews :: [DBObject] <-
fmap fromOnly <$>
query_ db [qc|
SELECT schemaname || '.' || '"' || viewname || '"'
FROM pg_views
WHERE schemaname like '{schemaPattern}';
|]
unless (null uviews) $
tryExec $ execute_ db [qc|
{revokeQuery VIEW uname uviews "ALL"}
|]
grantUserPermissionIfExists :: Connection -> SchemaPattern -> User -> Permission -> IO ()
grantUserPermissionIfExists db schemaPattern (User uname _)
(Permission uschemaPermission utablePermission uviewPermission uschemaPattern utablePattern uviewPattern) = do
exist <- doesUserExist db uname
when exist $ do
putStrLn $ "Granting schema privilges for user: " ++ uname ++ " " ++ uschemaPermission ++ " " ++ uschemaPattern
uschemas :: [DBObject] <-
fmap fromOnly <$>
query_ db [qc|
SELECT schema_name
FROM information_schema.schemata
WHERE schema_name like '{uschemaPattern}'
AND schema_name like '{schemaPattern}';
|]
unless (null uschemas) $
tryExec $ execute_ db [qc|
{grantQuery SCHEMA uname uschemas uschemaPermission}
|]
putStrLn $ "Granting table privilges for user: " ++ uname ++ " " ++ utablePermission ++ " " ++ utablePattern
utables :: [DBObject] <-
fmap fromOnly <$>
query_ db [qc|
SELECT schemaname || '.' || '"' || tablename || '"'
FROM pg_tables
WHERE schemaname like '{uschemaPattern}'
AND tablename like '{utablePattern}'
AND schemaname like '{schemaPattern}';
|]
unless (null utables) $
tryExec $ execute_ db [qc|
{grantQuery TABLE uname utables utablePermission}
|]
putStrLn $ "Granting view privilges for user: " ++ uname ++ " " ++ uviewPermission ++ " " ++ uviewPattern
uviews :: [DBObject] <-
fmap fromOnly <$>
query_ db [qc|
SELECT schemaname || '.' || '"' || viewname || '"'
FROM pg_views
WHERE schemaname like '{uschemaPattern}'
AND viewname like '{uviewPattern}'
AND schemaname like '{schemaPattern}';
|]
unless (null uviews) $
tryExec $ execute_ db [qc|
{grantQuery VIEW uname uviews uviewPermission}
|]
dropUserIfExists :: Connection -> Username -> IO ()
dropUserIfExists db uname = do
exist <- doesUserExist db uname
when exist $
tryExec $ execute_ db [qc|
DROP USER {uname};
|]
transferAllOwnershipIfExists :: Connection -> Username -> Username -> IO ()
transferAllOwnershipIfExists db unameOld unameNew = do
existOld <- doesUserExist db unameOld
existNew <- doesUserExist db unameNew
when (existOld && existNew) $ do
putStrLn $ "Changing all schema ownerships from user: " ++ unameOld ++ " to user: " ++ unameNew
allschemas :: [DBObject] <-
fmap fromOnly <$>
query_ db [qc|
SELECT schema_name
FROM information_schema.schemata
WHERE schema_owner = '{unameOld}';
|]
forM_ allschemas $ \schema -> do
putStrLn schema
tryExec $ execute_ db [qc|
{transferOwnerQuery SCHEMA unameNew schema}
|]
putStrLn $ "Changing all table ownerships from user: " ++ unameOld ++ " to user: " ++ unameNew
alltables :: [DBObject] <-
fmap fromOnly <$>
query_ db [qc|
SELECT schemaname || '.' || '"' || tablename || '"'
FROM pg_tables
WHERE tableowner = '{unameOld}';
|]
forM_ alltables $ \table -> do
putStrLn table
tryExec $ execute_ db [qc|
{transferOwnerQuery TABLE unameNew table}
|]
putStrLn $ "Changing all view ownerships from user: " ++ unameOld ++ " to user: " ++ unameNew
allviews :: [DBObject] <-
fmap fromOnly <$>
query_ db [qc|
SELECT schemaname || '.' || '"' || viewname || '"'
FROM pg_views
WHERE viewowner = '{unameOld}';
|]
forM_ allviews $ \view -> do
putStrLn view
tryExec $ execute_ db [qc|
{transferOwnerQuery VIEW unameNew view}
|]
| commercializetv/postgresql-user-manager | src/Database.hs | mit | 8,947 | 0 | 15 | 3,111 | 1,681 | 853 | 828 | 123 | 1 |
{-# OPTIONS_GHC -O0 #-}
{-# LANGUAGE TupleSections, OverloadedStrings, ExistentialQuantification #-}
module Handler.Admin.Board where
import Import
import qualified Data.Text as T
import Handler.Common (deletePosts)
import Utils.YobaMarkup (doYobaMarkup)
import Handler.Admin.Modlog (addModlogEntry)
-------------------------------------------------------------------------------------------------------------
getManageBoardsR :: ManageBoardAction -> Text -> Handler Html
getManageBoardsR action board = do
maybeBoard <- runDB $ selectFirst [BoardName ==. board] []
groups <- map ((\x -> (x,x)) . groupName . entityVal) <$> runDB (selectList ([]::[Filter Group]) [])
bCategories <- map (id &&& id) <$> getConfig configBoardCategories
(formWidget, _) <- generateFormPost $ updateBoardForm maybeBoard action bCategories groups -- oops, ignored formEnctype
boards <- runDB $ selectList ([]::[Filter Board]) []
defaultLayout $ do
setUltDestCurrent
defaultTitleMsg MsgBoardManagement
$(widgetFile "admin/boards")
-------------------------------------------------------------------------------------------------------------
updateBoardForm :: Maybe (Entity Board) -> -- ^ Selected board
ManageBoardAction -> -- ^ What you are going to do
[(Text,Text)] -> -- ^ Board categories
[(Text,Text)] -> -- ^ User groups
Html -> -- ^ Extra
MForm Handler (FormResult BoardConfigurationForm, Widget)
updateBoardForm board action bCategories groups extra = do
msgrender <- getMessageRender
let helper :: forall a. (Board -> a) -> a -> Maybe (Maybe a)
helper g defaultValue
| action == NewBoard = Just $ Just defaultValue
| otherwise = (Just . g . entityVal) <$> board
-----------------------------------------------------------------------------
bool2Text True = Just $ Just "Enable"
bool2Text False = Just $ Just "Disable"
bool2Text :: Bool -> Maybe (Maybe Text)
-----------------------------------------------------------------------------
helper' g defaultValue
| action == NewBoard = Just $ Just defaultValue
| otherwise = maybe Nothing (bool2Text . g . entityVal) board
onoff = map (first msgrender) [(MsgEnable,"Enable"),(MsgDisable,"Disable")]
onoff :: [(Text, Text)]
onoffreq = map (first msgrender) [(MsgEnable,"Enabled"),(MsgDisable,"Disabled"),(MsgRequired,"Required")]
onoffreq :: [(Text, Text)]
-----------------------------------------------------------------------------
helper'' :: forall a. (Board -> a) -> Maybe a
helper'' g = (g . entityVal) <$> board
(nameRes , nameView ) <- mopt textField "" (helper boardName "")
(titleRes , titleView ) <- mopt textField "" (helper boardTitle "")
(summaryRes , summaryView ) <- mopt textField "" (helper boardSummary "")
(bumpLimitRes , bumpLimitView ) <- mopt intField "" (helper boardBumpLimit 500)
(numberFilesRes , numberFilesView ) <- mopt intField "" (helper boardNumberFiles 10)
(allowedTypesRes , allowedTypesView ) <- mopt textField "" (helper (pack . unwords . boardAllowedTypes) "jpg jpeg png gif webm swf rar zip 7z mp3 flac ogv txt")
(defaultNameRes , defaultNameView ) <- mopt textField "" (helper boardDefaultName "Anonymous")
(maxMsgLengthRes , maxMsgLengthView ) <- mopt intField "" (helper boardMaxMsgLength 20000)
(thumbSizeRes , thumbSizeView ) <- mopt intField "" (helper boardThumbSize 200)
(threadsPerPageRes , threadsPerPageView ) <- mopt intField "" (helper boardThreadsPerPage 10)
(previewsPerThreadRes, previewsPerThreadView) <- mopt intField "" (helper boardPreviewsPerThread 5)
(threadLimitRes , threadLimitView ) <- mopt intField "" (helper boardThreadLimit (-1))
(categoryRes , categoryView ) <- mopt (selectFieldList bCategories) "" (helper'' boardCategory)
(opFileRes , opFileView ) <- mopt (selectFieldList onoffreq) "" (helper boardOpFile "Enabled")
(replyFileRes , replyFileView ) <- mopt (selectFieldList onoffreq) "" (helper boardReplyFile "Enabled")
(isHiddenRes , isHiddenView ) <- mopt (selectFieldList onoff) "" (helper' boardHidden "Disable")
(enableCaptchaRes , enableCaptchaView ) <- mopt (selectFieldList onoff) "" (helper' boardEnableCaptcha "Disable")
(viewAccessRes , viewAccessView ) <- mopt (multiSelectFieldList groups) "" (helper'' boardViewAccess)
(replyAccessRes , replyAccessView ) <- mopt (multiSelectFieldList groups) "" (helper'' boardReplyAccess)
(threadAccessRes , threadAccessView ) <- mopt (multiSelectFieldList groups) "" (helper'' boardThreadAccess)
(opModerationRes , opModerationView ) <- mopt (selectFieldList onoff) "" (helper' boardOpModeration "Enable")
(extraRulesRes , extraRulesView ) <- mopt textareaField "" (helper (Textarea . T.intercalate ";" . boardExtraRules) (Textarea ""))
(enableGeoIpRes , enableGeoIpView ) <- mopt (selectFieldList onoff) "" (helper' boardEnableGeoIp "Disable")
(opEditingRes , opEditingView ) <- mopt (selectFieldList onoff) "" (helper' boardOpEditing "Enable")
(postEditingRes , postEditingView ) <- mopt (selectFieldList onoff) "" (helper' boardPostEditing "Enable")
(showEditHistoryRes , showEditHistoryView ) <- mopt (selectFieldList onoff) "" (helper' boardShowEditHistory "Enable")
(showPostDateRes , showPostDateView ) <- mopt (selectFieldList onoff) "" (helper' boardShowPostDate "Enable")
(enableForcedAnonRes , enableForcedAnonView ) <- mopt (selectFieldList onoff) "" (helper' boardEnableForcedAnon "Disable")
(requiredThreadTitleRes, requiredThreadTitleView ) <- mopt (selectFieldList onoff) "" (helper' boardRequiredThreadTitle "Disable")
(enablePMRes , enablePMView ) <- mopt (selectFieldList onoff) "" (helper' boardEnablePM "Disable")
(indexRes , indexView ) <- mopt intField "" (helper boardIndex 0)
(onionRes , onionView ) <- mopt (selectFieldList onoff) "" (helper' boardOnion "Disable")
let result = BoardConfigurationForm <$>
nameRes <*> titleRes <*> bumpLimitRes <*>
numberFilesRes <*> allowedTypesRes <*> defaultNameRes <*>
maxMsgLengthRes <*> thumbSizeRes <*> threadsPerPageRes <*>
previewsPerThreadRes <*> threadLimitRes <*> opFileRes <*>
replyFileRes <*> isHiddenRes <*> enableCaptchaRes <*>
categoryRes <*> viewAccessRes <*> replyAccessRes <*>
threadAccessRes <*> opModerationRes <*> extraRulesRes <*>
enableGeoIpRes <*> opEditingRes <*> postEditingRes <*>
showEditHistoryRes <*> showPostDateRes <*> summaryRes <*>
enableForcedAnonRes <*> requiredThreadTitleRes <*> indexRes <*>
enablePMRes <*> onionRes
bname = (boardName . entityVal) <$> board
widget = $(widgetFile "admin/boards-form")
return (result, widget)
-------------------------------------------------------------------------------------------------------------
postNewBoardsR :: Handler Html
postNewBoardsR = do
bCategories <- map (id &&& id) <$> getConfig configBoardCategories
groups <- map ((\x -> (x,x)) . groupName . entityVal) <$> runDB (selectList ([]::[Filter Group]) [])
((result, _), _) <- runFormPost $ updateBoardForm Nothing NewBoard bCategories groups
let msgRedirect msg = setMessageI msg >> redirect (ManageBoardsR NewBoard "")
case result of
FormFailure [] -> msgRedirect MsgBadFormData
FormFailure xs -> msgRedirect (MsgError $ T.intercalate "; " xs)
FormMissing -> msgRedirect MsgNoFormData
FormSuccess ( BoardConfigurationForm bName bTitle bBumpLimit bNumberFiles bAllowedTypes
bDefaultName bMaxMsgLen bThumbSize bThreadsPerPage bPrevPerThread
bThreadLimit bOpFile bReplyFile bIsHidden bEnableCaptcha
bCategory bViewAccess bReplyAccess bThreadAccess bOpModeration
bExtraRules bEnableGeoIp bOpEditing bPostEditing bShowEditHistory
bShowPostDate bSummary bEnableForcedAnon bRequiredThreadTitle bIndex
bEnablePM bOnion
) -> do
when (any isNothing [bName, bTitle, bAllowedTypes, bOpFile, bReplyFile] ||
any isNothing [bThreadLimit , bBumpLimit, bNumberFiles, bMaxMsgLen, bThumbSize, bThreadsPerPage, bPrevPerThread]) $
setMessageI MsgBadFormData >> redirect (ManageBoardsR NewBoard "")
let onoff (Just "Enable" ) = True
onoff (Just "Disable") = False
onoff _ = False
let newBoard = Board { boardName = fromJust bName
, boardTitle = fromJust bTitle
, boardSummary = fromMaybe "" bSummary
, boardBumpLimit = fromJust bBumpLimit
, boardNumberFiles = fromJust bNumberFiles
, boardAllowedTypes = words $ unpack $ fromJust bAllowedTypes
, boardDefaultName = fromMaybe "" bDefaultName
, boardMaxMsgLength = fromJust bMaxMsgLen
, boardThumbSize = fromJust bThumbSize
, boardThreadsPerPage = fromJust bThreadsPerPage
, boardPreviewsPerThread = fromJust bPrevPerThread
, boardThreadLimit = fromJust bThreadLimit
, boardOpFile = fromJust bOpFile
, boardReplyFile = fromJust bReplyFile
, boardHidden = onoff bIsHidden
, boardEnableCaptcha = onoff bEnableCaptcha
, boardCategory = bCategory
, boardViewAccess = bViewAccess
, boardReplyAccess = bReplyAccess
, boardThreadAccess = bThreadAccess
, boardOpModeration = onoff bOpModeration
, boardExtraRules = maybe [] (T.split (==';') . unTextarea) bExtraRules
, boardEnableGeoIp = onoff bEnableGeoIp
, boardOpEditing = onoff bOpEditing
, boardPostEditing = onoff bPostEditing
, boardShowEditHistory = onoff bShowEditHistory
, boardShowPostDate = onoff bShowPostDate
, boardEnableForcedAnon = onoff bEnableForcedAnon
, boardRequiredThreadTitle = onoff bRequiredThreadTitle
, boardEnablePM = onoff bEnablePM
, boardIndex = fromMaybe 0 bIndex
, boardOnion = onoff bOnion
}
void $ runDB $ insert newBoard
addModlogEntry $ MsgModlogNewBoard (fromJust bName)
msgRedirect MsgBoardAdded
postAllBoardsR :: Handler Html
postAllBoardsR = do
bCategories <- map (id &&& id) <$> getConfig configBoardCategories
groups <- map ((\x -> (x,x)) . groupName . entityVal) <$> runDB (selectList ([]::[Filter Group]) [])
((result, _), _) <- runFormPost $ updateBoardForm Nothing AllBoards bCategories groups
let msgRedirect msg = setMessageI msg >> redirect (ManageBoardsR AllBoards "")
case result of
FormFailure [] -> msgRedirect MsgBadFormData
FormFailure xs -> msgRedirect (MsgError $ T.intercalate "; " xs)
FormMissing -> msgRedirect MsgNoFormData
FormSuccess ( BoardConfigurationForm _ bTitle bBumpLimit bNumberFiles bAllowedTypes
bDefaultName bMaxMsgLen bThumbSize bThreadsPerPage bPrevPerThread
bThreadLimit bOpFile bReplyFile bIsHidden bEnableCaptcha
bCategory bViewAccess bReplyAccess bThreadAccess bOpModeration
bExtraRules bEnableGeoIp bOpEditing bPostEditing bShowEditHistory
bShowPostDate bSummary bEnableForcedAnon bRequiredThreadTitle bIndex
bEnablePM bOnion
) -> do
boards <- runDB $ selectList ([]::[Filter Board]) []
forM_ boards (\(Entity oldBoardId oldBoard) ->
let onoff (Just "Enable" ) _ = True
onoff (Just "Disable") _ = False
onoff _ f = f oldBoard
newBoard = Board { boardName = boardName oldBoard
, boardTitle = fromMaybe (boardTitle oldBoard) bTitle
, boardSummary = fromMaybe (boardSummary oldBoard) bSummary
, boardBumpLimit = fromMaybe (boardBumpLimit oldBoard) bBumpLimit
, boardNumberFiles = fromMaybe (boardNumberFiles oldBoard) bNumberFiles
, boardAllowedTypes = maybe (boardAllowedTypes oldBoard) (words . unpack) bAllowedTypes
, boardDefaultName = fromMaybe (boardDefaultName oldBoard) bDefaultName
, boardMaxMsgLength = fromMaybe (boardMaxMsgLength oldBoard) bMaxMsgLen
, boardThumbSize = fromMaybe (boardThumbSize oldBoard) bThumbSize
, boardThreadsPerPage = fromMaybe (boardThreadsPerPage oldBoard) bThreadsPerPage
, boardPreviewsPerThread = fromMaybe (boardPreviewsPerThread oldBoard) bPrevPerThread
, boardThreadLimit = fromMaybe (boardThreadLimit oldBoard) bThreadLimit
, boardOpFile = fromMaybe (boardOpFile oldBoard) bOpFile
, boardReplyFile = fromMaybe (boardReplyFile oldBoard) bReplyFile
, boardHidden = onoff bIsHidden boardHidden
, boardEnableCaptcha = onoff bEnableCaptcha boardEnableCaptcha
, boardCategory = mplus bCategory (boardCategory oldBoard)
, boardViewAccess = mplus bViewAccess (boardViewAccess oldBoard)
, boardReplyAccess = mplus bReplyAccess (boardReplyAccess oldBoard)
, boardThreadAccess = mplus bThreadAccess (boardThreadAccess oldBoard)
, boardOpModeration = onoff bOpModeration boardOpModeration
, boardExtraRules = maybe (boardExtraRules oldBoard) (T.split (==';') . unTextarea) bExtraRules
, boardEnableGeoIp = onoff bEnableGeoIp boardEnableGeoIp
, boardOpEditing = onoff bOpEditing boardOpEditing
, boardPostEditing = onoff bPostEditing boardPostEditing
, boardShowEditHistory = onoff bShowEditHistory boardShowEditHistory
, boardShowPostDate = onoff bShowPostDate boardShowPostDate
, boardEnableForcedAnon = onoff bEnableForcedAnon boardEnableForcedAnon
, boardRequiredThreadTitle= onoff bRequiredThreadTitle boardRequiredThreadTitle
, boardEnablePM = onoff bEnablePM boardEnablePM
, boardIndex = fromMaybe 0 bIndex
, boardOnion = onoff bOnion boardOnion
}
in runDB $ replace oldBoardId newBoard)
addModlogEntry $ MsgModlogUpdateAllBoards
msgRedirect MsgBoardsUpdated
postUpdateBoardsR :: Text -> Handler Html
postUpdateBoardsR board = do
maybeBoard <- runDB $ selectFirst [BoardName ==. board] []
bCategories <- map (id &&& id) <$> getConfig configBoardCategories
groups <- map ((\x -> (x,x)) . groupName . entityVal) <$> runDB (selectList ([]::[Filter Group]) [])
((result, _), _) <- runFormPost $ updateBoardForm maybeBoard UpdateBoard bCategories groups
let msgRedirect msg = setMessageI msg >> redirect (ManageBoardsR UpdateBoard board)
case result of
FormFailure [] -> msgRedirect MsgBadFormData
FormFailure xs -> msgRedirect (MsgError $ T.intercalate "; " xs)
FormMissing -> msgRedirect MsgNoFormData
FormSuccess ( BoardConfigurationForm bName bTitle bBumpLimit bNumberFiles bAllowedTypes
bDefaultName bMaxMsgLen bThumbSize bThreadsPerPage bPrevPerThread
bThreadLimit bOpFile bReplyFile bIsHidden bEnableCaptcha
bCategory bViewAccess bReplyAccess bThreadAccess bOpModeration
bExtraRules bEnableGeoIp bOpEditing bPostEditing bShowEditHistory
bShowPostDate bSummary bEnableForcedAnon bRequiredThreadTitle bIndex
bEnablePM bOnion
) -> do
let oldBoard = entityVal $ fromJust maybeBoard
oldBoardId = entityKey $ fromJust maybeBoard
onoff (Just "Enable" ) _ = True
onoff (Just "Disable") _ = False
onoff _ f = f oldBoard
newBoard = Board { boardName = fromMaybe (boardName oldBoard) bName
, boardTitle = fromMaybe (boardTitle oldBoard) bTitle
, boardSummary = fromMaybe "" bSummary
, boardBumpLimit = fromMaybe (boardBumpLimit oldBoard) bBumpLimit
, boardNumberFiles = fromMaybe (boardNumberFiles oldBoard) bNumberFiles
, boardAllowedTypes = maybe (boardAllowedTypes oldBoard) (words . unpack) bAllowedTypes
, boardDefaultName = fromMaybe "" bDefaultName
, boardMaxMsgLength = fromMaybe (boardMaxMsgLength oldBoard) bMaxMsgLen
, boardThumbSize = fromMaybe (boardThumbSize oldBoard) bThumbSize
, boardThreadsPerPage = fromMaybe (boardThreadsPerPage oldBoard) bThreadsPerPage
, boardPreviewsPerThread = fromMaybe (boardPreviewsPerThread oldBoard) bPrevPerThread
, boardThreadLimit = fromMaybe (boardThreadLimit oldBoard) bThreadLimit
, boardOpFile = fromMaybe (boardOpFile oldBoard) bOpFile
, boardReplyFile = fromMaybe (boardReplyFile oldBoard) bReplyFile
, boardHidden = onoff bIsHidden boardHidden
, boardEnableCaptcha = onoff bEnableCaptcha boardEnableCaptcha
, boardCategory = mplus bCategory Nothing
, boardViewAccess = mplus bViewAccess Nothing
, boardReplyAccess = mplus bReplyAccess Nothing
, boardThreadAccess = mplus bThreadAccess Nothing
, boardOpModeration = onoff bOpModeration boardOpModeration
, boardExtraRules = maybe (boardExtraRules oldBoard) (T.split (==';') . unTextarea) bExtraRules
, boardEnableGeoIp = onoff bEnableGeoIp boardEnableGeoIp
, boardOpEditing = onoff bOpEditing boardOpEditing
, boardPostEditing = onoff bPostEditing boardPostEditing
, boardShowPostDate = onoff bShowPostDate boardShowPostDate
, boardShowEditHistory = onoff bShowEditHistory boardShowEditHistory
, boardEnableForcedAnon = onoff bEnableForcedAnon boardEnableForcedAnon
, boardRequiredThreadTitle= onoff bRequiredThreadTitle boardRequiredThreadTitle
, boardEnablePM = onoff bEnablePM boardEnablePM
, boardIndex = fromMaybe 0 bIndex
, boardOnion = onoff bOnion boardOnion
}
runDB $ replace oldBoardId newBoard
addModlogEntry $ MsgModlogUpdateBoard (fromJust bName)
msgRedirect MsgBoardsUpdated
-------------------------------------------------------------------------------------------------------------
cleanBoard :: ManageBoardAction -> Text -> Handler ()
cleanBoard action board = case action of
AllBoards -> do
boards <- runDB $ selectList ([]::[Filter Board ]) []
postIDs <- forM boards $ \(Entity _ b) -> runDB $ selectList [PostBoard ==. boardName b] []
void $ deletePosts (concat postIDs) False
addModlogEntry $ MsgModlogCleanAllBoards
NewBoard -> msgRedirect MsgNoSuchBoard
_ -> do
maybeBoard <- runDB $ selectFirst [BoardName ==. board] []
when (isNothing maybeBoard) $ msgRedirect MsgNoSuchBoard
postIDs <- runDB $ selectList [PostBoard ==. board] []
void $ deletePosts postIDs False
addModlogEntry $ MsgModlogCleanBoard board
where msgRedirect msg = setMessageI msg >> redirect (ManageBoardsR UpdateBoard board)
getCleanBoardR :: ManageBoardAction -> Text -> Handler ()
getCleanBoardR action board = do
cleanBoard action board
setMessageI MsgBoardCleaned
redirect (ManageBoardsR UpdateBoard board)
getDeleteBoardR :: ManageBoardAction -> Text -> Handler ()
getDeleteBoardR action board = do
cleanBoard action board
case action of
AllBoards -> addModlogEntry MsgModlogDeleteAllBoards >> (runDB $ deleteWhere ([]::[Filter Board ]))
_ -> addModlogEntry (MsgModlogDeleteBoard board) >> (runDB $ deleteWhere [BoardName ==. board])
setMessageI MsgBoardDeleted
redirect (ManageBoardsR AllBoards "")
getRebuildPostsMessagesOnBoardR :: ManageBoardAction -> Text -> Handler ()
getRebuildPostsMessagesOnBoardR action board = case action of
UpdateBoard -> do
posts <- runDB $ selectList [PostBoard ==. board] []
void $ forM posts $ \(Entity pKey pVal) ->
when ((/="") $ postRawMessage pVal ) $ do
messageFormatted <- doYobaMarkup (Just $ Textarea $ postRawMessage pVal) (postBoard pVal) (postParent pVal)
runDB $ update pKey [PostMessage =. messageFormatted]
addModlogEntry $ MsgModlogRebuildPostsMessagesOn board
msgRedirect MsgBoardsUpdated
_ -> do
boards <- runDB $ selectList ([]::[Filter Board ]) []
void $ forM boards $ \(Entity _ b) -> do
posts <- runDB $ selectList [PostBoard ==. boardName b] []
void $ forM posts $ \(Entity pKey pVal) ->
when ((/="") $ postRawMessage pVal ) $ do
messageFormatted <- doYobaMarkup (Just $ Textarea $ postRawMessage pVal) (postBoard pVal) (postParent pVal)
runDB $ update pKey [PostMessage =. messageFormatted]
addModlogEntry $ MsgModlogRebuildPostsMessages
msgRedirect MsgBoardsUpdated
where msgRedirect msg = setMessageI msg >> redirect (ManageBoardsR AllBoards "")
-------------------------------------------------------------------------------------------------------------
chooseManageBoarUrl :: ManageBoardAction -> Maybe Text -> Route App
chooseManageBoarUrl NewBoard _ = NewBoardsR
chooseManageBoarUrl AllBoards _ = AllBoardsR
chooseManageBoarUrl UpdateBoard bname = UpdateBoardsR $ fromJust bname
| ahushh/Monaba | monaba/src/Handler/Admin/Board.hs | mit | 24,937 | 0 | 41 | 8,372 | 5,479 | 2,776 | 2,703 | -1 | -1 |
{- |
Module: Capnp.IO
Description: Utilities for reading and writing values to handles.
This module provides utilities for reading and writing values to and
from file 'Handle's.
-}
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeApplications #-}
{-# LANGUAGE TypeFamilies #-}
module Capnp.IO
( sGetMsg
, sPutMsg
, M.hGetMsg
, M.getMsg
, M.hPutMsg
, M.putMsg
, hGetParsed
, sGetParsed
, getParsed
, hPutParsed
, sPutParsed
, putParsed
, hGetRaw
, getRaw
, sGetRaw
) where
import Data.Bits
import Control.Exception (throwIO)
import Control.Monad.Trans.Class (lift)
import Network.Simple.TCP (Socket, recv, sendLazy)
import System.IO (Handle, stdin, stdout)
import System.IO.Error (eofErrorType, mkIOError)
import qualified Data.ByteString as BS
import qualified Data.ByteString.Builder as BB
import Capnp.Bits (WordCount, wordsToBytes)
import Capnp.Convert
(msgToLBS, msgToParsed, msgToRaw, parsedToBuilder, parsedToLBS)
import Capnp.Message (Mutability(..))
import Capnp.New.Classes (Parse)
import Capnp.TraversalLimit (evalLimitT)
import qualified Capnp.Message as M
import qualified Capnp.Repr as R
-- | Like 'hGetMsg', except that it takes a socket instead of a 'Handle'.
sGetMsg :: Socket -> WordCount -> IO (M.Message 'Const)
sGetMsg socket limit =
evalLimitT limit $ M.readMessage (lift read32) (lift . readSegment)
where
read32 = do
bytes <- recvFull 4
pure $
(fromIntegral (bytes `BS.index` 0) `shiftL` 0) .|.
(fromIntegral (bytes `BS.index` 1) `shiftL` 8) .|.
(fromIntegral (bytes `BS.index` 2) `shiftL` 16) .|.
(fromIntegral (bytes `BS.index` 3) `shiftL` 24)
readSegment !words =
M.fromByteString <$> recvFull (fromIntegral $ wordsToBytes words)
-- | Like recv, but (1) never returns less than `count` bytes, (2)
-- uses `socket`, rather than taking the socket as an argument, and (3)
-- throws an EOF exception when the connection is closed.
recvFull :: Int -> IO BS.ByteString
recvFull !count = do
maybeBytes <- recv socket count
case maybeBytes of
Nothing ->
throwIO $ mkIOError eofErrorType "Remote socket closed" Nothing Nothing
Just bytes
| BS.length bytes == count ->
pure bytes
| otherwise ->
(bytes <>) <$> recvFull (count - BS.length bytes)
-- | Like 'hPutMsg', except that it takes a 'Socket' instead of a 'Handle'.
sPutMsg :: Socket -> M.Message 'Const -> IO ()
sPutMsg socket = sendLazy socket . msgToLBS
-- | Read a struct from the handle in its parsed form, using the supplied
-- read limit.
hGetParsed :: forall a pa. (R.IsStruct a, Parse a pa) => Handle -> WordCount -> IO pa
hGetParsed handle limit = do
msg <- M.hGetMsg handle limit
evalLimitT limit $ msgToParsed @a msg
-- | Read a struct from the socket in its parsed form, using the supplied
-- read limit.
sGetParsed :: forall a pa. (R.IsStruct a, Parse a pa) => Socket -> WordCount -> IO pa
sGetParsed socket limit = do
msg <- sGetMsg socket limit
evalLimitT limit $ msgToParsed @a msg
-- | Read a struct from stdin in its parsed form, using the supplied
-- read limit.
getParsed :: (R.IsStruct a, Parse a pa) => WordCount -> IO pa
getParsed = hGetParsed stdin
-- | Write the parsed form of a struct to the handle
hPutParsed :: (R.IsStruct a, Parse a pa) => Handle -> pa -> IO ()
hPutParsed h value = do
bb <- evalLimitT maxBound $ parsedToBuilder value
BB.hPutBuilder h bb
-- | Write the parsed form of a struct to stdout
putParsed :: (R.IsStruct a, Parse a pa) => pa -> IO ()
putParsed = hPutParsed stdout
-- | Write the parsed form of a struct to the socket.
sPutParsed :: (R.IsStruct a, Parse a pa) => Socket -> pa -> IO ()
sPutParsed socket value = do
lbs <- evalLimitT maxBound $ parsedToLBS value
sendLazy socket lbs
-- | Read a struct from the handle using the supplied read limit,
-- and return its root pointer.
hGetRaw :: R.IsStruct a => Handle -> WordCount -> IO (R.Raw a 'Const)
hGetRaw h limit = do
msg <- M.hGetMsg h limit
evalLimitT limit $ msgToRaw msg
-- | Read a struct from stdin using the supplied read limit,
-- and return its root pointer.
getRaw :: R.IsStruct a => WordCount -> IO (R.Raw a 'Const)
getRaw = hGetRaw stdin
-- | Read a struct from the socket using the supplied read limit,
-- and return its root pointer.
sGetRaw :: R.IsStruct a => Socket -> WordCount -> IO (R.Raw a 'Const)
sGetRaw socket limit = do
msg <- sGetMsg socket limit
evalLimitT limit $ msgToRaw msg
| zenhack/haskell-capnp | lib/Capnp/IO.hs | mit | 4,853 | 0 | 18 | 1,192 | 1,219 | 649 | 570 | 93 | 2 |
module Signals where
samplesPerSecond :: (Num a) => a
samplesPerSecond = 44100
type SafeValue = Float
data SignalValue = SignalValue SafeValue
data Signal = Signal [SignalValue]
flatSignal val = toSignal [val, val..]
-- Need to make this be ok with signals that end pretty soon or I'll be in trouble.
takeSeconds s (Signal sigdata) = Signal $ take (floor (s * samplesPerSecond)) sigdata
dropSeconds s (Signal sigdata) = Signal $ drop (floor (s * samplesPerSecond)) sigdata
class SpecializedSignal s where
specialize :: Signal -> s
sanitize :: s -> [SafeValue]
data FrequencySignal = FrequencySignal [SignalValue]
instance SpecializedSignal FrequencySignal where
specialize (Signal sigvalues) = FrequencySignal sigvalues
sanitize (FrequencySignal sigvalues) = map sanitize sigvalues where
sanitize (SignalValue sigvalue) | sigvalue < 0 = 0
| sigvalue > 20000 = 20000
| otherwise = sigvalue
data AmplitudeSignal = AmplitudeSignal [SignalValue]
instance SpecializedSignal AmplitudeSignal where
specialize (Signal sigvalues) = AmplitudeSignal sigvalues
sanitize (AmplitudeSignal sigvalues) = map sanitize sigvalues where
sanitize (SignalValue sigvalue) = sigvalue
-- cut this out for now because of LFOs. maybe make that type carry over n shit
-- sanitize (SignalValue sigvalue) | sigvalue < 0 = 0
-- | sigvalue > 1 = 1
-- | otherwise = sigvalue
data PWMSignal = PWMSignal [SignalValue]
instance SpecializedSignal PWMSignal where
specialize (Signal sigvalues) = PWMSignal sigvalues
sanitize (PWMSignal sigvalues) = map sanitize sigvalues where
sanitize (SignalValue sigvalue) | sigvalue < -1 = -1
| sigvalue > 1 = 1
| otherwise = sigvalue
data SoundSignal = SoundSignal [SignalValue]
instance SpecializedSignal SoundSignal where
specialize (Signal sigvalues) = SoundSignal sigvalues
sanitize (SoundSignal sigvalues) = map sanitize sigvalues where
sanitize (SignalValue sigvalue) | sigvalue < -1 = -1
| sigvalue > 1 = 1
| otherwise = sigvalue
toSignal :: [SafeValue] -> Signal
toSignal values = Signal $ map SignalValue values
fromSignal :: Signal -> [SafeValue]
fromSignal (Signal sigvals) = map fromSigVal sigvals where
fromSigVal (SignalValue value) = value
catSignals :: [Signal] -> Signal
catSignals sigs = toSignal $ concat $ (map fromSignal sigs) where
clearEmptySignals signals = signals -- eventually as a memory saving technique I want to use this. for now it's just a reminder.
| orblivion/Haskell-Synth | Signals.hs | mit | 2,752 | 0 | 12 | 720 | 692 | 352 | 340 | 46 | 1 |
f x = 2*x
square x = x * x
foo x =
case x of
0 -> 1
1 -> 7
_ -> 2 * x
| MaxHorstmann/myfirsthaskell | test.hs | mit | 90 | 4 | 8 | 47 | 63 | 31 | 32 | 7 | 3 |
-----------------------------------------------------------------------------
-- |
-- Module : Language.Krill.Parser.Location
-- Description : Source location and span information
-- Maintainer : coskuacay@gmail.com
-- Stability : experimental
-----------------------------------------------------------------------------
module Language.Krill.Parser.Location
(
-- * Source location
SrcLoc
, makeSrcLoc
, srcFile, srcAbs, srcLine, srcCol
-- * Source span
, SrcSpan
, makeSrcSpan
, srcLocSpan
, makeSrcSpanLength, makeSrcSpanLengthEnd
, mergeSrcSpan
, spanFile, spanSLine, spanSCol, spanELine, spanECol
-- * Types with location information
, Located (..)
, mergeLocated
, Loc
, makeLoc, unLoc
) where
import Text.PrettyPrint
import Text.PrettyPrint.HughesPJClass (Pretty (..), prettyShow)
-- | Represents a single point within a file. Refer to 'locInvariant'.
data SrcLoc = SrcLoc
{ srcFile :: !FilePath -- ^ path to the source file
, srcAbs :: !Int -- ^ absolute character offset
, srcLine :: !Int -- ^ line number, counting from 1
, srcCol :: !Int -- ^ column number, counting from 1
}
deriving (Eq, Ord)
-- | Construct a 'SrcLoc' given the file, absolute character offset,
-- line number, and column number
makeSrcLoc :: FilePath -> Int -> Int -> Int -> SrcLoc
makeSrcLoc = SrcLoc
locInvariant :: SrcLoc -> Bool
locInvariant s = srcAbs s > 0 && srcLine s > 0 && srcCol s > 0
-- | Delimits a portion of a text file. The end position is defined
-- to be the column /after/ the end of the span. That is, a span of
-- (1,1)-(1,2) is one character long, and a span of (1,1)-(1,1) is
-- zero characters long.
data SrcSpan = SrcSpan
{ spanFile :: !FilePath
, spanSLine :: !Int
, spanSCol :: !Int
, spanELine :: !Int
, spanECol :: !Int
}
deriving (Eq, Ord)
-- | Construct a span using a start location and an end location.
-- Both locations need to have the same source file. Also note
-- 'spanInvariant'.
makeSrcSpan :: SrcLoc -> SrcLoc -> SrcSpan
makeSrcSpan start end = SrcSpan
{ spanFile = srcFile start
, spanSLine = srcLine start
, spanSCol = srcCol start
, spanELine = srcLine end
, spanECol = srcCol end
}
-- | Construct a span using a start location and the number of characters
-- in the span. The span will start and end on the same line.
makeSrcSpanLength :: SrcLoc -> Int -> SrcSpan
makeSrcSpanLength s l = makeSrcSpan s s{ srcCol = l + srcCol s }
-- | Construct a span using an end location and the number of characters
-- in the span. The span will start and end on the same line. The given
-- length /must/ be less than the current position on the line.
makeSrcSpanLengthEnd :: SrcLoc -> Int -> SrcSpan
makeSrcSpanLengthEnd e l = makeSrcSpan e{ srcCol = srcCol e - l } e
-- | Create a 'SrcSpan' corresponding to a single point
srcLocSpan :: SrcLoc -> SrcSpan
srcLocSpan loc = makeSrcSpan loc loc
-- All 'SrcSpan' instances should satisfy this invariant
spanInvariant :: SrcSpan -> Bool
spanInvariant s = spanSLine s <= spanELine s && spanSCol s <= spanECol s
{--------------------------------------------------------------------------
Operations
--------------------------------------------------------------------------}
-- | Fuse two spans together. Both spans need to be in the same file.
mergeSrcSpan :: SrcSpan -> SrcSpan -> SrcSpan
mergeSrcSpan s1 s2 | s1 > s2 = mergeSrcSpan s2 s1
mergeSrcSpan s1 s2 = SrcSpan
{ spanFile = spanFile s1
, spanSLine = spanSLine s1
, spanSCol = spanSCol s1
, spanELine = spanELine s2
, spanECol = spanECol s2
}
{--------------------------------------------------------------------------
Located class
--------------------------------------------------------------------------}
-- | An object with an attached 'SrcSpan'
class Located t where
location :: t -> SrcSpan
instance Located SrcSpan where
location = id
instance Located (Loc a) where
location (Loc s _) = s
-- | Marge the 'SrcSpan's of two Located objects
mergeLocated :: (Located t1, Located t2) => t1 -> t2 -> SrcSpan
mergeLocated t1 t2 = mergeSrcSpan (location t1) (location t2)
-- | Default way to attach location information
data Loc a = Loc SrcSpan a
makeLoc :: SrcSpan -> a -> Loc a
makeLoc = Loc
-- | Get the data out of a 'Loc'
unLoc :: Loc a -> a
unLoc (Loc _ a) = a
{--------------------------------------------------------------------------
Printing
--------------------------------------------------------------------------}
instance Pretty SrcLoc where
pPrint (SrcLoc f _ l c) = text f <> colon <> pPrint l <> comma <> pPrint c
instance Pretty SrcSpan where
pPrint s = text (spanFile s) <> colon <> start <> text "-" <> end
where
SrcSpan { spanSLine = sl, spanSCol = sc
, spanELine = el, spanECol = ec } = s
start :: Doc
start = pPrint sl <> comma <> pPrint sc
end :: Doc
end | sl == el = pPrint ec
| otherwise = pPrint el <> comma <> pPrint ec
instance Pretty e => Pretty (Loc e) where
pPrint l = pPrint (unLoc l) <+> parens (text "at" <+> pPrint (location l))
instance Show SrcLoc where
show = prettyShow
instance Show SrcSpan where
show = prettyShow
instance Show e => Show (Loc e) where
show l = show (unLoc l) ++ " (at " ++ show (location l) ++ ")"
| cacay/language-sill | Language/Krill/Parser/Location.hs | mit | 5,322 | 0 | 12 | 1,088 | 1,159 | 626 | 533 | 107 | 1 |
{- Problem
(*) Find the last element of a list.
Example in Haskell:
Prelude> myLast [1,2,3,4]
4
Prelude> myLast ['x','y','z']
'z'
-}
myLast :: [a] -> a
myLast [] = error "myLast does not work on empty list!"
myLast [x] = x
myLast (x:xs) = myLast xs
| gaoce/haskell_99 | 01.hs | mit | 252 | 0 | 7 | 50 | 56 | 29 | 27 | 4 | 1 |
{-# LANGUAGE ViewPatterns #-}
{-
Copyright (C) 2014 Matthew Pickering <matthewtpickering@gmail.com>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-}
{- |
Module : Text.Pandoc.Readers.Txt2Tags
Copyright : Copyright (C) 2014 Matthew Pickering
License : GNU GPL, version 2 or above
Maintainer : Matthew Pickering <matthewtpickering@gmail.com>
Conversion of txt2tags formatted plain text to 'Pandoc' document.
-}
module Text.Pandoc.Readers.Txt2Tags ( readTxt2Tags
, getT2TMeta
, T2TMeta (..)
, readTxt2TagsNoMacros)
where
import qualified Text.Pandoc.Builder as B
import Text.Pandoc.Builder ( Inlines, Blocks, (<>)
, trimInlines )
import Text.Pandoc.Definition
import Text.Pandoc.Options
import Text.Pandoc.Shared (escapeURI,compactify', compactify'DL)
import Text.Pandoc.Parsing hiding (space, spaces, uri, macro)
import Control.Applicative ((<$>), (<$), (<*>), (<*), (*>))
import Data.Char (toLower)
import Data.List (transpose, intersperse, intercalate)
import Data.Maybe (fromMaybe)
import Data.Monoid (Monoid, mconcat, mempty, mappend)
--import Network.URI (isURI) -- Not sure whether to use this function
import Control.Monad (void, guard, when)
import Data.Default
import Control.Monad.Reader (Reader, runReader, asks)
import Data.Time.LocalTime (getZonedTime)
import Text.Pandoc.Compat.Directory(getModificationTime)
import Data.Time.Format (formatTime)
import Text.Pandoc.Compat.Locale (defaultTimeLocale)
import System.IO.Error (catchIOError)
type T2T = ParserT String ParserState (Reader T2TMeta)
-- | An object for the T2T macros meta information
-- the contents of each field is simply substituted verbatim into the file
data T2TMeta = T2TMeta {
date :: String -- ^ Current date
, mtime :: String -- ^ Last modification time of infile
, infile :: FilePath -- ^ Input file
, outfile :: FilePath -- ^ Output file
} deriving Show
instance Default T2TMeta where
def = T2TMeta "" "" "" ""
-- | Get the meta information required by Txt2Tags macros
getT2TMeta :: [FilePath] -> FilePath -> IO T2TMeta
getT2TMeta inps out = do
curDate <- formatTime defaultTimeLocale "%F" <$> getZonedTime
let getModTime = fmap (formatTime defaultTimeLocale "%T") .
getModificationTime
curMtime <- case inps of
[] -> formatTime defaultTimeLocale "%T" <$> getZonedTime
_ -> catchIOError
(maximum <$> mapM getModTime inps)
(const (return ""))
return $ T2TMeta curDate curMtime (intercalate ", " inps) out
-- | Read Txt2Tags from an input string returning a Pandoc document
readTxt2Tags :: T2TMeta -> ReaderOptions -> String -> Pandoc
readTxt2Tags t opts s = flip runReader t $ readWithM parseT2T (def {stateOptions = opts}) (s ++ "\n\n")
-- | Read Txt2Tags (ignoring all macros) from an input string returning
-- a Pandoc document
readTxt2TagsNoMacros :: ReaderOptions -> String -> Pandoc
readTxt2TagsNoMacros = readTxt2Tags def
parseT2T :: T2T Pandoc
parseT2T = do
-- Parse header if standalone flag is set
standalone <- getOption readerStandalone
when standalone parseHeader
body <- mconcat <$> manyTill block eof
meta' <- stateMeta <$> getState
return $ Pandoc meta' (B.toList body)
parseHeader :: T2T ()
parseHeader = do
() <$ try blankline <|> header
meta <- stateMeta <$> getState
optional blanklines
config <- manyTill setting (notFollowedBy setting)
-- TODO: Handle settings better
let settings = foldr (\(k,v) -> B.setMeta k (MetaString v)) meta config
updateState (\s -> s {stateMeta = settings}) <* optional blanklines
header :: T2T ()
header = titleline >> authorline >> dateline
headerline :: B.ToMetaValue a => String -> T2T a -> T2T ()
headerline field p = (() <$ try blankline)
<|> (p >>= updateState . B.setMeta field)
titleline :: T2T ()
titleline =
headerline "title" (trimInlines . mconcat <$> manyTill inline newline)
authorline :: T2T ()
authorline =
headerline "author" (sepBy author (char ';') <* newline)
where
author = trimInlines . mconcat <$> many (notFollowedBy (char ';' <|> newline) >> inline)
dateline :: T2T ()
dateline = headerline "date" (trimInlines . mconcat <$> manyTill inline newline)
type Keyword = String
type Value = String
setting :: T2T (Keyword, Value)
setting = do
string "%!"
keyword <- ignoreSpacesCap (many1 alphaNum)
char ':'
value <- ignoreSpacesCap (manyTill anyChar (newline))
return (keyword, value)
-- Blocks
parseBlocks :: T2T Blocks
parseBlocks = mconcat <$> manyTill block eof
block :: T2T Blocks
block = do
choice
[ mempty <$ blanklines
, quote
, hrule -- hrule must go above title
, title
, commentBlock
, verbatim
, rawBlock
, taggedBlock
, list
, table
, para
]
title :: T2T Blocks
title = try $ balancedTitle '+' <|> balancedTitle '='
balancedTitle :: Char -> T2T Blocks
balancedTitle c = try $ do
spaces
level <- length <$> many1 (char c)
guard (level <= 5) -- Max header level 5
heading <- manyTill (noneOf "\n\r") (count level (char c))
label <- optionMaybe (enclosed (char '[') (char ']') (alphaNum <|> oneOf "_-"))
many spaceChar *> newline
let attr = maybe nullAttr (\x -> (x, [], [])) label
return $ B.headerWith attr level (trimInlines $ B.text heading)
para :: T2T Blocks
para = try $ do
ils <- parseInlines
nl <- option False (True <$ newline)
option (B.plain ils) (guard nl >> notFollowedBy listStart >> return (B.para ils))
where
listStart = try bulletListStart <|> orderedListStart
commentBlock :: T2T Blocks
commentBlock = try (blockMarkupArea (anyLine) (const mempty) "%%%") <|> comment
-- Seperator and Strong line treated the same
hrule :: T2T Blocks
hrule = try $ do
spaces
line <- many1 (oneOf "=-_")
guard (length line >= 20)
B.horizontalRule <$ blankline
quote :: T2T Blocks
quote = try $ do
lookAhead tab
rawQuote <- many1 (tab *> optional spaces *> anyLine)
contents <- parseFromString parseBlocks (intercalate "\n" rawQuote ++ "\n\n")
return $ B.blockQuote contents
commentLine :: T2T Inlines
commentLine = comment
-- List Parsing code from Org Reader
list :: T2T Blocks
list = choice [bulletList, orderedList, definitionList]
bulletList :: T2T Blocks
bulletList = B.bulletList . compactify'
<$> many1 (listItem bulletListStart parseBlocks)
orderedList :: T2T Blocks
orderedList = B.orderedList . compactify'
<$> many1 (listItem orderedListStart parseBlocks)
definitionList :: T2T Blocks
definitionList = try $ do
B.definitionList . compactify'DL <$>
many1 (listItem definitionListStart definitionListEnd)
definitionListEnd :: T2T (Inlines, [Blocks])
definitionListEnd = (,) <$> (mconcat <$> manyTill inline newline) <*> ((:[]) <$> parseBlocks)
genericListStart :: T2T Char
-> T2T Int
genericListStart listMarker = try $
(2+) <$> (length <$> many spaceChar
<* listMarker <* space <* notFollowedBy space)
-- parses bullet list \start and returns its length (excl. following whitespace)
bulletListStart :: T2T Int
bulletListStart = genericListStart (char '-')
orderedListStart :: T2T Int
orderedListStart = genericListStart (char '+' )
definitionListStart :: T2T Int
definitionListStart = genericListStart (char ':')
-- parse raw text for one list item, excluding start marker and continuations
listItem :: T2T Int
-> T2T a
-> T2T a
listItem start end = try $ do
markerLength <- try start
firstLine <- anyLineNewline
blank <- option "" ("\n" <$ blankline)
rest <- concat <$> many (listContinuation markerLength)
parseFromString end $ firstLine ++ blank ++ rest
-- continuation of a list item - indented and separated by blankline or endline.
-- Note: nested lists are parsed as continuations.
listContinuation :: Int
-> T2T String
listContinuation markerLength = try $
notFollowedBy' (blankline >> blankline)
*> (mappend <$> (concat <$> many1 listLine)
<*> many blankline)
where listLine = try $ indentWith markerLength *> anyLineNewline
anyLineNewline :: T2T String
anyLineNewline = (++ "\n") <$> anyLine
indentWith :: Int -> T2T String
indentWith n = count n space
-- Table
table :: T2T Blocks
table = try $ do
tableHeader <- fmap snd <$> option mempty (try headerRow)
rows <- many1 (many commentLine *> tableRow)
let columns = transpose rows
let ncolumns = length columns
let aligns = map (foldr1 findAlign) (map (map fst) columns)
let rows' = map (map snd) rows
let size = maximum (map length rows')
let rowsPadded = map (pad size) rows'
let headerPadded = if (not (null tableHeader)) then pad size tableHeader else mempty
return $ B.table mempty
(zip aligns (replicate ncolumns 0.0))
headerPadded rowsPadded
pad :: (Show a, Monoid a) => Int -> [a] -> [a]
pad n xs = xs ++ (replicate (n - length xs) mempty)
findAlign :: Alignment -> Alignment -> Alignment
findAlign x y
| x == y = x
| otherwise = AlignDefault
headerRow :: T2T [(Alignment, Blocks)]
headerRow = genericRow (string "||")
tableRow :: T2T [(Alignment, Blocks)]
tableRow = genericRow (char '|')
genericRow :: T2T a -> T2T [(Alignment, Blocks)]
genericRow start = try $ do
spaces *> start
manyTill tableCell newline <?> "genericRow"
tableCell :: T2T (Alignment, Blocks)
tableCell = try $ do
leftSpaces <- length <$> lookAhead (many1 space) -- Case of empty cell means we must lookAhead
content <- (manyTill inline (try $ lookAhead (cellEnd)))
rightSpaces <- length <$> many space
let align =
case compare leftSpaces rightSpaces of
LT -> AlignLeft
EQ -> AlignCenter
GT -> AlignRight
endOfCell
return $ (align, B.plain (B.trimInlines $ mconcat content))
where
cellEnd = (void newline <|> (many1 space *> endOfCell))
endOfCell :: T2T ()
endOfCell = try (skipMany1 $ char '|') <|> ( () <$ lookAhead newline)
-- Raw area
verbatim :: T2T Blocks
verbatim = genericBlock anyLineNewline B.codeBlock "```"
rawBlock :: T2T Blocks
rawBlock = genericBlock anyLineNewline (B.para . B.str) "\"\"\""
taggedBlock :: T2T Blocks
taggedBlock = do
target <- getTarget
genericBlock anyLineNewline (B.rawBlock target) "'''"
-- Generic
genericBlock :: Monoid a => T2T a -> (a -> Blocks) -> String -> T2T Blocks
genericBlock p f s = blockMarkupArea p f s <|> blockMarkupLine p f s
blockMarkupArea :: Monoid a => (T2T a) -> (a -> Blocks) -> String -> T2T Blocks
blockMarkupArea p f s = try $ (do
string s *> blankline
f . mconcat <$> (manyTill p (eof <|> void (string s *> blankline))))
blockMarkupLine :: T2T a -> (a -> Blocks) -> String -> T2T Blocks
blockMarkupLine p f s = try (f <$> (string s *> space *> p))
-- Can be in either block or inline position
comment :: Monoid a => T2T a
comment = try $ do
atStart
notFollowedBy macro
mempty <$ (char '%' *> anyLine)
-- Inline
parseInlines :: T2T Inlines
parseInlines = trimInlines . mconcat <$> many1 inline
inline :: T2T Inlines
inline = do
choice
[ endline
, macro
, commentLine
, whitespace
, url
, link
, image
, bold
, underline
, code
, raw
, tagged
, strike
, italic
, code
, str
, symbol
]
bold :: T2T Inlines
bold = inlineMarkup inline B.strong '*' (B.str)
underline :: T2T Inlines
underline = inlineMarkup inline B.emph '_' (B.str)
strike :: T2T Inlines
strike = inlineMarkup inline B.strikeout '-' (B.str)
italic :: T2T Inlines
italic = inlineMarkup inline B.emph '/' (B.str)
code :: T2T Inlines
code = inlineMarkup ((:[]) <$> anyChar) B.code '`' id
raw :: T2T Inlines
raw = inlineMarkup ((:[]) <$> anyChar) B.text '"' id
tagged :: T2T Inlines
tagged = do
target <- getTarget
inlineMarkup ((:[]) <$> anyChar) (B.rawInline target) '\'' id
-- Parser for markup indicated by a double character.
-- Inline markup is greedy and glued
-- Greedy meaning ***a*** = Bold [Str "*a*"]
-- Glued meaning that markup must be tight to content
-- Markup can't pass newlines
inlineMarkup :: Monoid a
=> (T2T a) -- Content parser
-> (a -> Inlines) -- Constructor
-> Char -- Fence
-> (String -> a) -- Special Case to handle ******
-> T2T Inlines
inlineMarkup p f c special = try $ do
start <- many1 (char c)
let l = length start
guard (l >= 2)
when (l == 2) (void $ notFollowedBy space)
-- We must make sure that there is no space before the start of the
-- closing tags
body <- optionMaybe (try $ manyTill (noneOf "\n\r") $
(try $ lookAhead (noneOf " " >> string [c,c] )))
case body of
Just middle -> do
lastChar <- anyChar
end <- many1 (char c)
let parser inp = parseFromString (mconcat <$> many p) inp
let start' = special (drop 2 start)
body' <- parser (middle ++ [lastChar])
let end' = special (drop 2 end)
return $ f (start' <> body' <> end')
Nothing -> do -- Either bad or case such as *****
guard (l >= 5)
let body' = (replicate (l - 4) c)
return $ f (special body')
link :: T2T Inlines
link = try imageLink <|> titleLink
-- Link with title
titleLink :: T2T Inlines
titleLink = try $ do
char '['
notFollowedBy space
tokens <- sepBy1 (many $ noneOf " ]") space
guard (length tokens >= 2)
char ']'
let link' = last tokens
guard (length link' > 0)
let tit = concat (intersperse " " (init tokens))
return $ B.link link' "" (B.text tit)
-- Link with image
imageLink :: T2T Inlines
imageLink = try $ do
char '['
body <- image
many1 space
l <- manyTill (noneOf "\n\r ") (char ']')
return (B.link l "" body)
macro :: T2T Inlines
macro = try $ do
name <- string "%%" *> oneOfStringsCI (map fst commands)
optional (try $ enclosed (char '(') (char ')') anyChar)
lookAhead (spaceChar <|> oneOf specialChars <|> newline)
maybe (return mempty) (\f -> B.str <$> asks f) (lookup name commands)
where
commands = [ ("date", date), ("mtime", mtime)
, ("infile", infile), ("outfile", outfile)]
-- raw URLs in text are automatically linked
url :: T2T Inlines
url = try $ do
(rawUrl, escapedUrl) <- (try uri <|> emailAddress)
return $ B.link rawUrl "" (B.str escapedUrl)
uri :: T2T (String, String)
uri = try $ do
address <- t2tURI
return (address, escapeURI address)
-- The definition of a URI in the T2T source differs from the
-- actual definition. This is a transcription of the definition in
-- the source of v2.6
--isT2TURI :: String -> Bool
--isT2TURI (parse t2tURI "" -> Right _) = True
--isT2TURI _ = False
t2tURI :: T2T String
t2tURI = do
start <- try ((++) <$> proto <*> urlLogin) <|> guess
domain <- many1 chars
sep <- many (char '/')
form' <- option mempty ((:) <$> char '?' <*> many1 form)
anchor' <- option mempty ((:) <$> char '#' <*> many anchor)
return (start ++ domain ++ sep ++ form' ++ anchor')
where
protos = ["http", "https", "ftp", "telnet", "gopher", "wais"]
proto = (++) <$> oneOfStrings protos <*> string "://"
guess = (++) <$> (((++) <$> stringAnyCase "www" <*> option mempty ((:[]) <$> oneOf "23"))
<|> stringAnyCase "ftp") <*> ((:[]) <$> char '.')
login = alphaNum <|> oneOf "_.-"
pass = many (noneOf " @")
chars = alphaNum <|> oneOf "%._/~:,=$@&+-"
anchor = alphaNum <|> oneOf "%._0"
form = chars <|> oneOf ";*"
urlLogin = option mempty $ try ((\x y z -> x ++ y ++ [z]) <$> many1 login <*> option mempty ((:) <$> char ':' <*> pass) <*> char '@')
image :: T2T Inlines
image = try $ do
-- List taken from txt2tags source
let extensions = [".jpg", ".jpeg", ".gif", ".png", ".eps", ".bmp"]
char '['
path <- manyTill (noneOf "\n\t\r ") (try $ lookAhead (oneOfStrings extensions))
ext <- oneOfStrings extensions
char ']'
return $ B.image (path ++ ext) "" mempty
-- Characters used in markup
specialChars :: String
specialChars = "%*-_/|:+;"
tab :: T2T Char
tab = char '\t'
space :: T2T Char
space = char ' '
spaces :: T2T String
spaces = many space
endline :: T2T Inlines
endline = try $ do
newline
notFollowedBy blankline
notFollowedBy hrule
notFollowedBy title
notFollowedBy verbatim
notFollowedBy rawBlock
notFollowedBy taggedBlock
notFollowedBy quote
notFollowedBy list
notFollowedBy table
return $ B.space
str :: T2T Inlines
str = try $ do
B.str <$> many1 (noneOf $ specialChars ++ "\n\r ")
whitespace :: T2T Inlines
whitespace = try $ B.space <$ spaceChar
symbol :: T2T Inlines
symbol = B.str . (:[]) <$> oneOf specialChars
-- Utility
getTarget :: T2T String
getTarget = do
mv <- lookupMeta "target" . stateMeta <$> getState
let MetaString target = fromMaybe (MetaString "html") mv
return target
atStart :: T2T ()
atStart = (sourceColumn <$> getPosition) >>= guard . (== 1)
ignoreSpacesCap :: T2T String -> T2T String
ignoreSpacesCap p = map toLower <$> (spaces *> p <* spaces)
| sapek/pandoc | src/Text/Pandoc/Readers/Txt2Tags.hs | gpl-2.0 | 17,838 | 0 | 19 | 4,060 | 5,644 | 2,865 | 2,779 | 409 | 3 |
module Paths_sudoku (
version,
getBinDir, getLibDir, getDataDir, getLibexecDir,
getDataFileName, getSysconfDir
) where
import qualified Control.Exception as Exception
import Data.Version (Version(..))
import System.Environment (getEnv)
import Prelude
catchIO :: IO a -> (Exception.IOException -> IO a) -> IO a
catchIO = Exception.catch
version :: Version
version = Version {versionBranch = [1,0], versionTags = []}
bindir, libdir, datadir, libexecdir, sysconfdir :: FilePath
bindir = "/Users/k.suchanek/Library/Haskell/bin"
libdir = "/Users/k.suchanek/Library/Haskell/ghc-7.8.3-x86_64/lib/sudoku-1.0"
datadir = "/Users/k.suchanek/Library/Haskell/share/ghc-7.8.3-x86_64/sudoku-1.0"
libexecdir = "/Users/k.suchanek/Library/Haskell/libexec"
sysconfdir = "/Users/k.suchanek/Library/Haskell/etc"
getBinDir, getLibDir, getDataDir, getLibexecDir, getSysconfDir :: IO FilePath
getBinDir = catchIO (getEnv "sudoku_bindir") (\_ -> return bindir)
getLibDir = catchIO (getEnv "sudoku_libdir") (\_ -> return libdir)
getDataDir = catchIO (getEnv "sudoku_datadir") (\_ -> return datadir)
getLibexecDir = catchIO (getEnv "sudoku_libexecdir") (\_ -> return libexecdir)
getSysconfDir = catchIO (getEnv "sudoku_sysconfdir") (\_ -> return sysconfdir)
getDataFileName :: FilePath -> IO FilePath
getDataFileName name = do
dir <- getDataDir
return (dir ++ "/" ++ name)
| wsuchy/sudoku_solver | dist/build/autogen/Paths_sudoku.hs | gpl-2.0 | 1,382 | 0 | 10 | 182 | 365 | 209 | 156 | 28 | 1 |
{-# LANGUAGE TemplateHaskell, DeriveDataTypeable, TypeSynonymInstances, MultiParamTypeClasses #-}
module RAM.Type where
-- $Id$
import RAM.Builtin
import Autolib.ToDoc
import Autolib.Reader
import Autolib.Size
import Autolib.TES.Identifier
import Data.Typeable
import Autolib.Xml
type Var = Identifier
instance Container Identifier String where
label _ = "Autolib.TES.Identifier"
pack = show
unpack = read
data Statement = Inc Var
| Dec Var
| Loop Var Program
| While Var Program
| Builtin { name :: Builtin, res :: Var, args :: [ Var ] }
deriving ( Eq, Ord, Typeable )
type Program = [ Statement ]
instance Size Program where
size = sum . map size
instance Size Statement where
size ( Loop v p ) = succ $ size p
size ( While v p ) = succ $ size p
size _ = 1
flatten :: Program -> Program
flatten ps = do
p <- ps
p : case p of Loop v q -> flatten q
While v q -> flatten q
_ -> []
$(derives [makeReader, makeToDoc] [''Statement])
-- Local variables:
-- mode: haskell;
-- end:
| Erdwolf/autotool-bonn | src/RAM/Type.hs | gpl-2.0 | 1,051 | 5 | 10 | 248 | 334 | 183 | 151 | 34 | 3 |
{-# LANGUAGE RecordWildCards, ScopedTypeVariables, MultiParamTypeClasses
, DeriveDataTypeable, OverloadedStrings, PatternSynonyms
, GeneralizedNewtypeDeriving, FlexibleContexts #-}
-- this module isn't finished, and there's heaps of warnings.
{-# OPTIONS_GHC -w #-}
-- |
-- Module : Yi.UI.Pango.Control
-- License : GPL
module Yi.UI.Pango.Control (
Control(..)
, ControlM(..)
, Buffer(..)
, View(..)
, Iter(..)
, startControl
, runControl
, controlIO
, liftYi
, getControl
, newBuffer
, newView
, getBuffer
, setBufferMode
, withCurrentBuffer
, setText
, getText
, keyTable
, newColor
, newForeground
, newBackground
, yiForeground
, yiBackground
, yiColor
, pangoScale
, fromPango
, toPango
) where
import Data.Text (unpack, pack, Text)
import qualified Data.Text as T
import Prelude hiding (concatMap, concat, foldl, elem, mapM_)
import Control.Exception (catch)
import Control.Monad hiding (mapM_, forM_)
import Control.Monad.Reader hiding (mapM_, forM_)
import Control.Applicative
import Control.Lens hiding (views, Action)
import Data.Foldable
import Data.Maybe (maybe, fromJust, fromMaybe)
import Data.Monoid
import Data.IORef
import Data.List (nub, filter, drop, zip, take, length)
import Data.Prototype
import Yi.Rope (toText, splitAtLine, YiString)
import qualified Yi.Rope as R
import qualified Data.Map as Map
import Yi.Core (startEditor, focusAllSyntax)
import Yi.Buffer
import Yi.Config
import Yi.Tab
import Yi.Window as Yi
import Yi.Editor
import Yi.Event
import Yi.Keymap
import Yi.Monad
import Yi.Style
import Yi.UI.Utils
import Yi.Utils
import Yi.Debug
import Control.Monad.Reader (ask, asks, MonadReader(..))
import Control.Monad.State (ap, get, put, modify)
import Control.Monad.Base
import Control.Concurrent (newMVar, modifyMVar, MVar, newEmptyMVar, putMVar,
readMVar, isEmptyMVar)
import Data.Typeable
import qualified Data.List.PointedList as PL (insertRight, withFocus,
PointedList(..), singleton)
import Yi.Regex
import Yi.String (showT)
import System.FilePath
import qualified Yi.UI.Common as Common
import Data.GI.Base
(withManagedPtr, gflagsToWord, gerrorMessage, GError)
import GI.Gtk
(noAdjustment, Clipboard, clipboardSetText, clipboardRequestText,
clipboardGetForDisplay, widgetGetDisplay,
onWidgetMotionNotifyEvent, onWidgetDraw, onWidgetScrollEvent,
onWidgetButtonReleaseEvent, widgetGrabFocus,
onWidgetButtonPressEvent, onWidgetKeyPressEvent,
scrolledWindowSetPolicy, scrolledWindowAddWithViewport,
scrolledWindowNew, widgetCreatePangoContext, widgetModifyBg,
drawingAreaNew, ScrolledWindow, DrawingArea, widgetSetSizeRequest,
widgetQueueDraw, mainQuit)
import qualified GI.Gtk as Gtk
(setWidgetCanFocus, widgetAddEvents, widgetGetAllocatedHeight,
widgetGetAllocatedWidth)
import GI.Pango
(attrListInsert, attrListNew, AttrList, attrWeightNew,
attrUnderlineNew, attrStyleNew, attrBackgroundNew,
attrForegroundNew, setAttributeEndIndex, setAttributeStartIndex,
AttrClass(..), Attribute(..), Color,
layoutIndexToPos, layoutGetCursorPos, layoutSetAttributes,
layoutNew, contextGetMetrics, contextGetLanguage,
layoutSetFontDescription, layoutXyToIndex, Layout,
fontMetricsGetDescent, fontMetricsGetApproximateCharWidth,
fontMetricsGetAscent, FontDescription, FontMetrics, Language,
layoutSetText, layoutGetText, layoutGetPixelExtents,
layoutSetWidth, layoutGetWidth, layoutGetFontDescription)
import GI.Gdk
(keyvalToUnicode, keyvalName, EventKey, atomIntern, pattern BUTTON_MIDDLE,
getEventMotionY, getEventMotionX, pattern BUTTON_PRIMARY, EventType,
setColorBlue, setColorGreen, setColorRed, getEventScrollDirection,
getEventButtonButton, getEventButtonType, getEventButtonY,
getEventButtonX)
import Yi.UI.Pango.Rectangle (getRectangleWidth, getRectangleX, Rectangle)
import GI.Gtk.Enums (PolicyType(..), StateType(..))
import Data.GI.Base.Signals (SignalHandlerId)
import GI.Gdk.Flags (EventMask(..))
import qualified Data.GI.Base as Gtk (on, set)
import Data.GI.Base.Attributes (AttrOp(..))
import GI.Pango.Enums
(AttrType(..), Weight(..), Underline(..), Style(..))
import GI.PangoCairo (showLayout)
import GI.Cairo (Context(..))
import qualified GI.Pango as Pango
(getRectangleHeight, getRectangleWidth, getRectangleY,
getRectangleX, Rectangle)
import Graphics.Rendering.Cairo.Types (Cairo(..))
import Graphics.Rendering.Cairo (setLineWidth)
import qualified Graphics.Rendering.Cairo as Cairo
(setLineWidth, setSourceRGB, stroke, rectangle, lineTo, moveTo)
import qualified GI.Gdk as Gtk
(ModifierType, getEventKeyState, getEventKeyKeyval,
ScrollDirection, Color(..))
import qualified Graphics.Rendering.Cairo.Internal as Cairo
(Render(..))
import Data.Word (Word32, Word16)
import Data.GI.Base.Constructible (Constructible(..))
import GI.Gdk.Enums (EventType(..))
import GI.GObject (signalHandlerDisconnect)
import qualified GI.Gdk.Enums as Gtk (ScrollDirection(..))
import qualified GI.Gdk.Flags as Gtk (ModifierType(..))
import Data.Int (Int32)
import Graphics.Rendering.Cairo.Internal (Render(..))
import Foreign.Ptr (castPtr)
data Control = Control
{ controlYi :: Yi
, tabCache :: IORef [TabInfo]
, views :: IORef (Map.Map WindowRef View)
}
-- { config :: Config
-- , editor :: Editor
-- , input :: Event -> IO ()
-- , output :: Action -> IO ()
-- }
data TabInfo = TabInfo
{ coreTab :: Tab
-- , page :: VBox
}
instance Show TabInfo where
show t = show (coreTab t)
--type ControlM = YiM
newtype ControlM a = ControlM { runControl'' :: ReaderT Control IO a }
deriving (Monad, MonadBase IO, MonadReader Control, Typeable,
Functor, Applicative)
-- Helper functions to avoid issues with mismatching monad libraries
controlIO :: IO a -> ControlM a
controlIO = liftBase
getControl :: ControlM Control
getControl = ask
liftYi :: YiM a -> ControlM a
liftYi m = do
yi <- asks controlYi
liftBase $ runReaderT (runYiM m) yi
--instance MonadState Editor ControlM where
-- get = readRef =<< editor <$> ask
-- put v = flip modifyRef (const v) =<< editor <$> ask
--instance MonadEditor ControlM where
-- askCfg = config <$> ask
-- withEditor f = do
-- r <- asks editor
-- cfg <- asks config
-- liftBase $ controlUnsafeWithEditor cfg r f
startControl :: Config -> ControlM () -> IO ()
startControl config main = startEditor (config { startFrontEnd = start main } ) Nothing
runControl' :: ControlM a -> MVar Control -> IO (Maybe a)
runControl' m yiMVar = do
empty <- isEmptyMVar yiMVar
if empty
then return Nothing
else do
yi <- readMVar yiMVar
result <- runControl m yi
return $ Just result
-- runControl :: ControlM a -> Yi -> IO a
-- runControl m yi = runReaderT (runYiM m) yi
runControl :: ControlM a -> Control -> IO a
runControl f = runReaderT (runControl'' f)
-- runControlEditor f yiMVar = yiMVar
runAction :: Action -> ControlM ()
runAction action = do
out <- liftYi $ asks yiOutput
liftBase $ out MustRefresh [action]
-- | Test 2
mkUI :: IO () -> MVar Control -> Common.UI Editor
mkUI main yiMVar = Common.dummyUI
{ Common.main = main
, Common.end = \_ -> void $ runControl' end yiMVar
, Common.suspend = void $ runControl' suspend yiMVar
, Common.refresh = \e -> void $ runControl' (refresh e) yiMVar
, Common.layout = \e -> fromMaybe e <$> runControl' (doLayout e) yiMVar
, Common.reloadProject = \f -> void $ runControl' (reloadProject f) yiMVar
}
start :: ControlM () -> UIBoot
start main cfg ch outCh ed =
catch (startNoMsg main cfg ch outCh ed) (\(e :: GError) ->
fail . unpack =<< gerrorMessage e)
makeControl :: MVar Control -> YiM ()
makeControl controlMVar = do
controlYi <- ask
tabCache <- liftBase $ newIORef []
views <- liftBase $ newIORef Map.empty
liftBase $ putMVar controlMVar Control{..}
startNoMsg :: ControlM () -> UIBoot
startNoMsg main config input output ed = do
control <- newEmptyMVar
let wrappedMain = do
output [makeAction $ makeControl control]
void (runControl' main control)
return (mkUI wrappedMain control)
end :: ControlM ()
end = do
liftBase $ putStrLn "Yi Control End"
liftBase mainQuit
suspend :: ControlM ()
suspend = do
liftBase $ putStrLn "Yi Control Suspend"
return ()
{-# ANN refresh ("HLint: ignore Redundant do" :: String) #-}
refresh :: Editor -> ControlM ()
refresh e = do
--contextId <- statusbarGetContextId (uiStatusbar ui) "global"
--statusbarPop (uiStatusbar ui) contextId
--statusbarPush (uiStatusbar ui) contextId $ intercalate " " $ statusLine e
updateCache e -- The cursor may have changed since doLayout
viewsRef <- asks views
vs <- liftBase $ readIORef viewsRef
forM_ (Map.elems vs) $ \v -> do
let b = findBufferWith (viewFBufRef v) e
-- when (not $ null $ b ^. pendingUpdatesA) $
do
-- sig <- readIORef (renderer w)
-- signalDisconnect sig
-- writeRef (renderer w)
-- =<< (textview w `onExpose` render e ui b (wkey (coreWin w)))
liftBase $ widgetQueueDraw (drawArea v)
doLayout :: Editor -> ControlM Editor
doLayout e = do
liftBase $ putStrLn "Yi Control Do Layout"
updateCache e
cacheRef <- asks tabCache
tabs <- liftBase $ readIORef cacheRef
dims <- concat <$> mapM (getDimensionsInTab e) tabs
let e' = (tabsA %~ fmap (mapWindows updateWin)) e
updateWin w = case find (\(ref,_,_,_) -> (wkey w == ref)) dims of
Nothing -> w
Just (_, wi, h,rgn) -> w { width = wi
, height = h
, winRegion = rgn }
-- Don't leak references to old Windows
let forceWin x w = height w `seq` winRegion w `seq` x
return $ (foldl . tabFoldl) forceWin e' (e' ^. tabsA)
pangoScale :: Num a => a
pangoScale = 1024
fromPango :: Int32 -> Double
fromPango = (/pangoScale) . fromIntegral
toPango :: Double -> Int32
toPango = round . (*pangoScale)
-- | Width, Height
getDimensionsInTab :: Editor -> TabInfo -> ControlM [(WindowRef,Int,Int,Region)]
getDimensionsInTab e tab = do
viewsRef <- asks views
vs <- liftBase $ readIORef viewsRef
foldlM (\a w ->
case Map.lookup (wkey w) vs of
Just v -> do
wi <- liftBase $ Gtk.widgetGetAllocatedWidth $ drawArea v
h <- liftBase $ Gtk.widgetGetAllocatedHeight $ drawArea v
ascent <- liftBase $ fontMetricsGetAscent (metrics v)
descent <- liftBase $ fontMetricsGetDescent (metrics v)
charWidth <- liftBase $ fontMetricsGetApproximateCharWidth (metrics v)
let lineHeight = ascent + descent
b0 = findBufferWith (viewFBufRef v) e
rgn <- shownRegion e v b0
let ret= (windowRef v, fromIntegral ((wi * pangoScale) `div` charWidth),
fromIntegral ((h * pangoScale) `div` lineHeight), rgn)
return $ a <> [ret]
Nothing -> return a)
[] (coreTab tab ^. tabWindowsA)
shownRegion :: Editor -> View -> FBuffer -> ControlM Region
shownRegion e v b = do
liftBase . print $ "shownRegion"
(tos, _, bos) <- updatePango e v b (layout v)
return $ mkRegion tos bos
updatePango :: Editor -> View -> FBuffer -> Layout
-> ControlM (Point, Point, Point)
updatePango e v b layout = do
width' <- liftBase $ Gtk.widgetGetAllocatedWidth $ drawArea v
height' <- liftBase $ Gtk.widgetGetAllocatedHeight $ drawArea v
liftBase . print $ "updatePango " ++ show (width', height')
font <- liftBase $ layoutGetFontDescription layout
--oldFont <- layoutGetFontDescription layout
--oldFontStr <- maybe (return Nothing)
-- (fmap Just . fontDescriptionToString) oldFont
--newFontStr <- Just <$> fontDescriptionToString font
--when (oldFontStr /= newFontStr)
-- (layoutSetFontDescription layout (Just font))
ascent <- liftBase $ fontMetricsGetAscent (metrics v)
descent <- liftBase $ fontMetricsGetDescent (metrics v)
let win = findWindowWith (windowRef v) e
[width'', height''] = map (*pangoScale) [width', height']
lineHeight = ascent + descent
winh = max 1 $ fromIntegral (height'' `div` lineHeight)
(tos, point, text) = askBuffer win b $ do
from <- (use . markPointA) =<< fromMark <$> askMarks
rope <- streamB Forward from
p <- pointB
let content = fst $ splitAtLine winh rope
-- allow BOS offset to be just after the last line
let addNL = if R.countNewLines content == winh
then id
else (`R.snoc` '\n')
return (from, p, R.toText $ addNL content)
config <- liftYi askCfg
if configLineWrap $ configUI config
then do oldWidth <- liftBase $ layoutGetWidth layout
when (oldWidth /= width'') $
liftBase $ layoutSetWidth layout width''
else liftBase $ do
(r, _) <- layoutGetPixelExtents layout
px <- Pango.getRectangleX r
pwidth <- Pango.getRectangleWidth r
widgetSetSizeRequest (drawArea v) (px+pwidth) (-1)
-- optimize for cursor movement
oldText <- liftBase $ layoutGetText layout
when (oldText /= text) $ liftBase $ layoutSetText layout text (-1)
(_, bosOffset, _) <- liftBase $ layoutXyToIndex layout width''
(fromIntegral winh * lineHeight - 1)
return (tos, point, tos + fromIntegral bosOffset + 1)
updateCache :: Editor -> ControlM ()
updateCache e = do
let tabs = e ^. tabsA
cacheRef <- asks tabCache
cache <- liftBase $ readIORef cacheRef
cache' <- syncTabs e (toList $ PL.withFocus tabs) cache
liftBase $ writeIORef cacheRef cache'
syncTabs :: Editor -> [(Tab, Bool)] -> [TabInfo] -> ControlM [TabInfo]
syncTabs e (tfocused@(t,focused):ts) (c:cs)
| t == coreTab c =
do when focused $ setTabFocus c
-- let vCache = views c
(:) <$> syncTab e c t <*> syncTabs e ts cs
| t `elem` map coreTab cs =
do removeTab c
syncTabs e (tfocused:ts) cs
| otherwise =
do c' <- insertTabBefore e t c
when focused $ setTabFocus c'
return (c':) `ap` syncTabs e ts (c:cs)
syncTabs e ts [] = mapM (\(t,focused) -> do
c' <- insertTab e t
when focused $ setTabFocus c'
return c') ts
syncTabs _ [] cs = mapM_ removeTab cs >> return []
syncTab :: Editor -> TabInfo -> Tab -> ControlM TabInfo
syncTab e tab ws =
-- TODO Maybe do something here
return tab
setTabFocus :: TabInfo -> ControlM ()
setTabFocus t =
-- TODO this needs to set the tab focus with callback
-- but only if the tab focus has changed
return ()
askBuffer :: Yi.Window -> FBuffer -> BufferM a -> a
askBuffer w b f = fst $ runBuffer w b f
setWindowFocus :: Editor -> TabInfo -> View -> ControlM ()
setWindowFocus e t v = do
let bufferName = shortIdentString (length $ commonNamePrefix e) $
findBufferWith (viewFBufRef v) e
window = findWindowWith (windowRef v) e
ml = askBuffer window (findBufferWith (viewFBufRef v) e) $
getModeLine (T.pack <$> commonNamePrefix e)
-- TODO
-- update (textview w) widgetIsFocus True
-- update (modeline w) labelText ml
-- update (uiWindow ui) windowTitle $ bufferName <> " - Yi"
-- update (uiNotebook ui) (notebookChildTabLabel (page t))
-- (tabAbbrevTitle bufferName)
return ()
removeTab :: TabInfo -> ControlM ()
removeTab t =
-- TODO this needs to close the views in the tab with callback
return ()
removeView :: TabInfo -> View -> ControlM ()
removeView tab view =
-- TODO this needs to close the view with callback
return ()
-- | Make a new tab.
newTab :: Editor -> Tab -> ControlM TabInfo
newTab e ws = do
let t' = TabInfo { coreTab = ws }
-- cache <- syncWindows e t' (toList $ PL.withFocus ws) []
return t' -- { views = cache }
{-# ANN insertTabBefore ("HLint: ignore Redundant do" :: String) #-}
insertTabBefore :: Editor -> Tab -> TabInfo -> ControlM TabInfo
insertTabBefore e ws c = do
-- Just p <- notebookPageNum (uiNotebook ui) (page c)
-- vb <- vBoxNew False 1
-- notebookInsertPage (uiNotebook ui) vb "" p
-- widgetShowAll $ vb
newTab e ws
{-# ANN insertTab ("HLint: ignore Redundant do" :: String) #-}
insertTab :: Editor -> Tab -> ControlM TabInfo
insertTab e ws = do
-- vb <- vBoxNew False 1
-- notebookAppendPage (uiNotebook ui) vb ""
-- widgetShowAll $ vb
newTab e ws
{-
insertWindowBefore :: Editor -> TabInfo -> Yi.Window -> WinInfo -> IO WinInfo
insertWindowBefore e ui tab w _c = insertWindow e ui tab w
insertWindowAtEnd :: Editor -> UI -> TabInfo -> Window -> IO WinInfo
insertWindowAtEnd e ui tab w = insertWindow e ui tab w
insertWindow :: Editor -> UI -> TabInfo -> Window -> IO WinInfo
insertWindow e ui tab win = do
let buf = findBufferWith (bufkey win) e
liftBase $ do w <- newWindow e ui win buf
set (page tab) $
[ containerChild := widget w
, boxChildPacking (widget w) :=
if isMini (coreWin w)
then PackNatural
else PackGrow
]
let ref = (wkey . coreWin) w
textview w `onButtonRelease` handleClick ui ref
textview w `onButtonPress` handleClick ui ref
textview w `onScroll` handleScroll ui ref
textview w `onConfigure` handleConfigure ui ref
widgetShowAll (widget w)
return w
-}
reloadProject :: FilePath -> ControlM ()
reloadProject _ = return ()
controlUnsafeWithEditor :: Config -> MVar Editor -> EditorM a -> IO a
controlUnsafeWithEditor cfg r f = modifyMVar r $ \e -> do
let (e',a) = runEditor cfg f e
-- Make sure that the result of runEditor is evaluated before
-- replacing the editor state. Otherwise, we might replace e
-- with an exception-producing thunk, which makes it impossible
-- to look at or update the editor state.
-- Maybe this could also be fixed by -fno-state-hack flag?
-- TODO: can we simplify this?
e' `seq` a `seq` return (e', a)
data Buffer = Buffer
{ fBufRef :: BufferRef
}
data View = View
{ viewFBufRef :: BufferRef
, windowRef :: WindowRef
, drawArea :: DrawingArea
, layout :: Layout
, language :: Language
, metrics :: FontMetrics
, scrollWin :: ScrolledWindow
, shownTos :: IORef Point
, winMotionSignal :: IORef (Maybe SignalHandlerId)
}
data Iter = Iter
{ iterFBufRef :: BufferRef
, point :: Point
}
newBuffer :: BufferId -> R.YiString -> ControlM Buffer
newBuffer id text = do
fBufRef <- liftYi . withEditor . newBufferE id $ text
return Buffer{..}
newView :: Buffer -> FontDescription -> ControlM View
newView buffer font = do
control <- ask
config <- liftYi askCfg
let viewFBufRef = fBufRef buffer
newWindow <-
fmap (\w -> w { height=50
, winRegion = mkRegion (Point 0) (Point 2000)
}) $ liftYi $ withEditor $ newWindowE False viewFBufRef
let windowRef = wkey newWindow
liftYi $ withEditor $ do
windowsA %= PL.insertRight newWindow
e <- get
put $ focusAllSyntax e
drawArea <- liftBase drawingAreaNew
liftBase . widgetModifyBg drawArea StateTypeNormal . Just =<< liftBase (newBackground
. Yi.Style.background . baseAttributes . configStyle $ configUI config)
context <- liftBase $ widgetCreatePangoContext drawArea
layout <- liftBase $ layoutNew context
liftBase $ layoutSetFontDescription layout (Just font)
language <- liftBase $ contextGetLanguage context
metrics <- liftBase $ contextGetMetrics context (Just font) (Just language)
liftBase $ layoutSetText layout "" (-1)
scrollWin <- liftBase $ scrolledWindowNew noAdjustment noAdjustment
liftBase $ do
scrolledWindowAddWithViewport scrollWin drawArea
scrolledWindowSetPolicy scrollWin PolicyTypeAutomatic PolicyTypeNever
initialTos <-
liftYi . withEditor . withGivenBufferAndWindow newWindow viewFBufRef $
(use . markPointA) =<< fromMark <$> askMarks
shownTos <- liftBase $ newIORef initialTos
winMotionSignal <- liftBase $ newIORef Nothing
let view = View {..}
liftBase $ Gtk.widgetAddEvents drawArea (gflagsToWord [EventMaskKeyPressMask])
liftBase $ Gtk.setWidgetCanFocus drawArea True
liftBase $ onWidgetKeyPressEvent drawArea $ \e -> do
-- putStrLn $ "Yi Control Key Press = " <> show event
liftBase $ runControl (runAction $ makeAction $ do
focusWindowE windowRef
switchToBufferE viewFBufRef) control
result <- processKeyEvent (yiInput $ controlYi control) e
liftBase $ widgetQueueDraw drawArea
return result
liftBase $ onWidgetButtonPressEvent drawArea $ \e -> do
x <- getEventButtonX e
y <- getEventButtonY e
click <- getEventButtonType e
button <- getEventButtonButton e
liftBase $ do
widgetGrabFocus drawArea
runControl (handleClick view x y click button) control
liftBase $ onWidgetButtonReleaseEvent drawArea $ \e -> do
x <- getEventButtonX e
y <- getEventButtonY e
click <- getEventButtonType e
button <- getEventButtonButton e
liftBase $ runControl (handleClick view x y click button) control
liftBase $ onWidgetScrollEvent drawArea $ \e -> do
direction <- getEventScrollDirection e
liftBase $ runControl (handleScroll view direction) control
liftBase $ onWidgetDraw drawArea $ \context -> do
(text, allAttrs, debug, tos, rel, point, inserting) <- liftIO $ runControl (liftYi $ withEditor $ do
window <- (findWindowWith windowRef) <$> get
(%=) buffersA (fmap (clearSyntax . clearHighlight))
let winh = height window
let tos = max 0 (regionStart (winRegion window))
let bos = regionEnd (winRegion window)
let rel p = fromIntegral (p - tos)
withGivenBufferAndWindow window viewFBufRef $ do
-- tos <- getMarkPointB =<< fromMark <$> askMarks
rope <- streamB Forward tos
point <- pointB
inserting <- use insertingA
modeNm <- gets (withMode0 modeName)
-- let (tos, point, text, picture) = do runBu
-- from <- getMarkPointB =<< fromMark <$> askMarks
-- rope <- streamB Forward from
-- p <- pointB
let content = fst $ splitAtLine winh rope
-- allow BOS offset to be just after the last line
let addNL = if R.countNewLines content == winh
then id
else (`R.snoc` '\n')
sty = configStyle $ configUI config
-- attributesPictureAndSelB sty (currentRegex e)
-- (mkRegion tos bos)
-- return (from, p, addNL $ Rope.toString content,
-- picture)
let text = R.toText $ addNL content
picture <- attributesPictureAndSelB sty Nothing
(mkRegion tos bos)
-- add color attributes.
let picZip = zip picture $ drop 1 (fst <$> picture) <> [bos]
strokes = [ (start',s,end') | ((start', s), end') <- picZip
, s /= emptyAttributes ]
rel p = fromIntegral (p - tos)
allAttrs :: MonadIO m => m AttrList
allAttrs = do
list <- attrListNew
forM_ strokes $ \(p1, Attributes fg bg _rv bd itlc udrl, p2) -> do
let atr :: MonadIO m => m Attribute -> m ()
atr newAttr = do
a <- newAttr
setAttributeStartIndex a $ rel p1
setAttributeEndIndex a $ rel p2
attrListInsert list a
let if' p x y = if p then x else y
mapM_ atr
[ yiForeground attrForegroundNew fg
, yiBackground attrBackgroundNew bg
, attrStyleNew $ if' itlc StyleItalic StyleNormal
, attrUnderlineNew $
if' udrl UnderlineSingle UnderlineNone
, attrWeightNew $ if' bd WeightBold WeightNormal
]
return list
return (text, allAttrs, (picture, strokes, modeNm,
window, tos, bos, winh),
tos, rel, point, inserting)) control
liftIO $ do
-- putStrLn $ "Setting Layout Attributes " <> show debug
layoutSetAttributes layout . Just =<< allAttrs
-- putStrLn "Done Stting Layout Attributes"
-- dw <- widgetGetDrawWindow drawArea
-- gc <- gcNew dw
oldText <- layoutGetText layout
when (text /= oldText) $ layoutSetText layout text (-1)
showLayout context layout
liftIO $ writeIORef shownTos tos
-- paint the cursor
(curRect, _) <- layoutGetCursorPos layout (rel point)
curx <- Pango.getRectangleX curRect
cury <- Pango.getRectangleY curRect
curw <- Pango.getRectangleWidth curRect
curh <- Pango.getRectangleHeight curRect
chRect <- liftIO $ layoutIndexToPos layout (rel point)
chx <- Pango.getRectangleX chRect
chy <- Pango.getRectangleY chRect
chw <- Pango.getRectangleWidth chRect
chh <- Pango.getRectangleHeight chRect
-- gcSetValues gc (newGCValues { Gtk.foreground = newForeground $ Yi.Style.foreground $ baseAttributes $ configStyle $ configUI config })
withManagedPtr context $ \cPtr -> (`runReaderT` Cairo (castPtr cPtr)) . runRender $ do
sourceCol True $ Yi.Style.foreground $ baseAttributes $ configStyle $ configUI config
Cairo.setLineWidth 2
if inserting
then do
Cairo.moveTo (fromPango curx) (fromPango cury)
Cairo.lineTo (fromPango curx + fromPango curw) (fromPango cury + fromPango curh)
else Cairo.rectangle (fromPango chx) (fromPango chy) (if chw > 0 then fromPango chw else 8) (fromPango chh)
Cairo.stroke
return True
liftBase $ widgetGrabFocus drawArea
tabsRef <- asks tabCache
ts <- liftBase $ readIORef tabsRef
-- TODO: the Tab idkey should be assigned using
-- Yi.Editor.newRef. But we can't modify that here, since our
-- access to 'Yi' is readonly.
liftBase $ writeIORef tabsRef (TabInfo (makeTab1 0 newWindow):ts)
viewsRef <- asks views
vs <- liftBase $ readIORef viewsRef
liftBase $ writeIORef viewsRef $ Map.insert windowRef view vs
liftBase . print $ "added window ref" ++ show windowRef
return view
where
clearHighlight fb =
-- if there were updates, then hide the selection.
let h = view highlightSelectionA fb
us = view pendingUpdatesA fb
in highlightSelectionA .~ (h && null us) $ fb
{-# ANN setBufferMode ("HLint: ignore Redundant do" :: String) #-}
setBufferMode :: FilePath -> Buffer -> ControlM ()
setBufferMode f buffer = do
let bufRef = fBufRef buffer
-- adjust the mode
tbl <- liftYi $ asks (modeTable . yiConfig)
contents <- liftYi $ withGivenBuffer bufRef elemsB
let header = R.toString $ R.take 1024 contents
hmode = case header =~ ("\\-\\*\\- *([^ ]*) *\\-\\*\\-" :: String) of
AllTextSubmatches [_,m] -> T.pack m
_ -> ""
Just mode = find (\(AnyMode m)-> modeName m == hmode) tbl <|>
find (\(AnyMode m)-> modeApplies m f contents) tbl <|>
Just (AnyMode emptyMode)
case mode of
AnyMode newMode -> do
-- liftBase $ putStrLn $ show (f, modeName newMode)
liftYi $ withEditor $ do
withGivenBuffer bufRef $ do
setMode newMode
modify clearSyntax
switchToBufferE bufRef
-- withEditor focusAllSyntax
withBuffer :: Buffer -> BufferM a -> ControlM a
withBuffer Buffer{fBufRef = b} f = liftYi $ withGivenBuffer b f
getBuffer :: View -> Buffer
getBuffer view = Buffer {fBufRef = viewFBufRef view}
setText :: Buffer -> YiString -> ControlM ()
setText b text = withBuffer b $ do
r <- regionOfB Document
replaceRegionB r text
getText :: Buffer -> Iter -> Iter -> ControlM Text
getText b Iter{point = p1} Iter{point = p2} =
fmap toText . withBuffer b . readRegionB $ mkRegion p1 p2
newColor :: MonadIO m => Word16 -> Word16 -> Word16 -> m Gtk.Color
newColor r g b = do
c <- new Gtk.Color []
setColorRed c r
setColorGreen c g
setColorBlue c b
return c
newForeground, newBackground :: MonadIO m => Yi.Style.Color -> m Gtk.Color
newForeground = yiForeground newColor
newBackground = yiBackground newColor
yiForeground, yiBackground :: (Word16 -> Word16 -> Word16 -> a) -> Yi.Style.Color -> a
yiForeground = yiColor 0
yiBackground = yiColor maxBound
yiColor :: Word16 -> (Word16 -> Word16 -> Word16 -> a) -> Yi.Style.Color -> a
yiColor d f Default = f d d d
yiColor _ f (RGB x y z) = f (fromIntegral x * 256)
(fromIntegral y * 256)
(fromIntegral z * 256)
sourceCol :: Bool -- ^ is foreground?
-> Yi.Style.Color -> Cairo.Render ()
sourceCol True Default = Cairo.setSourceRGB 0 0 0
sourceCol False Default = Cairo.setSourceRGB 1 1 1
sourceCol _ (RGB r g b) = Cairo.setSourceRGB (fromIntegral r / 255)
(fromIntegral g / 255)
(fromIntegral b / 255)
handleClick :: View -> Double -> Double -> EventType -> Word32 -> ControlM Bool
handleClick view x y click button = do
control <- ask
-- (_tabIdx,winIdx,w) <- getWinInfo ref <$> readIORef (tabCache ui)
logPutStrLn $ "Click: " <> showT (x, y, click)
-- retrieve the clicked offset.
(_,layoutIndex,_) <- io $ layoutXyToIndex (layout view) (toPango x) (toPango y)
tos <- liftBase $ readIORef (shownTos view)
let p1 = tos + fromIntegral layoutIndex
let winRef = windowRef view
-- maybe focus the window
-- logPutStrLn $ "Clicked inside window: " <> show view
-- let focusWindow = do
-- TODO: check that tabIdx is the focus?
-- (%=) windowsA (fromJust . PL.move winIdx)
liftBase $ case (click, fromIntegral button) of
(EventTypeButtonPress, BUTTON_PRIMARY) -> do
cid <- onWidgetMotionNotifyEvent (drawArea view) $ \e -> do
x <- getEventMotionX e
y <- getEventMotionY e
liftBase $ runControl (handleMove view p1 x y) control
writeIORef (winMotionSignal view) $ Just cid
_ -> do
maybe (return ()) (signalHandlerDisconnect (drawArea view)) =<< readIORef (winMotionSignal view)
writeIORef (winMotionSignal view) Nothing
case (click, fromIntegral button) of
(EventTypeButtonPress, BUTTON_PRIMARY) -> runAction . EditorA $ do
-- b <- gets $ (bkey . findBufferWith (viewFBufRef view))
-- focusWindow
window <- findWindowWith winRef <$> get
withGivenBufferAndWindow window (viewFBufRef view) $ do
moveTo p1
setVisibleSelection False
-- (Gtk.SingleClick, _) -> runAction focusWindow
(EventTypeButtonRelease, BUTTON_MIDDLE) -> do
disp <- liftBase $ widgetGetDisplay (drawArea view)
cb <- liftBase $ clipboardGetForDisplay disp =<< atomIntern "PRIMARY" False
let cbHandler :: Clipboard -> Maybe Text -> IO ()
cbHandler _ Nothing = return ()
cbHandler _ (Just txt) = runControl (runAction . EditorA $ do
window <- findWindowWith winRef <$> get
withGivenBufferAndWindow window (viewFBufRef view) $ do
pointB >>= setSelectionMarkPointB
moveTo p1
insertN (R.fromText txt)) control
liftBase $ clipboardRequestText cb cbHandler
_ -> return ()
liftBase $ widgetQueueDraw (drawArea view)
return True
handleScroll :: View -> Gtk.ScrollDirection -> ControlM Bool
handleScroll view direction = do
let editorAction =
withCurrentBuffer $ vimScrollB $ case direction of
Gtk.ScrollDirectionUp -> -1
Gtk.ScrollDirectionDown -> 1
_ -> 0 -- Left/right scrolling not supported
runAction $ EditorA editorAction
liftBase $ widgetQueueDraw (drawArea view)
return True
handleMove :: View -> Point -> Double -> Double -> ControlM Bool
handleMove view p0 x y = do
logPutStrLn $ "Motion: " <> showT (x, y)
-- retrieve the clicked offset.
(_,layoutIndex,_) <- liftBase $ layoutXyToIndex (layout view) (toPango x) (toPango y)
tos <- liftBase $ readIORef (shownTos view)
let p1 = tos + fromIntegral layoutIndex
let editorAction = do
txt <- withCurrentBuffer $
if p0 /= p1
then Just <$> do
m <- selMark <$> askMarks
markPointA m .= p0
moveTo p1
setVisibleSelection True
readRegionB =<< getSelectRegionB
else return Nothing
maybe (return ()) setRegE txt
runAction $ makeAction editorAction
-- drawWindowGetPointer (textview w) -- be ready for next message.
-- Relies on uiActionCh being synchronous
selection <- liftBase $ newIORef ""
let yiAction = do
txt <- withCurrentBuffer (readRegionB =<< getSelectRegionB)
:: YiM R.YiString
liftBase $ writeIORef selection txt
runAction $ makeAction yiAction
txt <- liftBase $ readIORef selection
disp <- liftBase $ widgetGetDisplay (drawArea view)
cb <- liftBase $ clipboardGetForDisplay disp =<< atomIntern "PRIMARY" False
liftBase $ clipboardSetText cb (R.toText txt) (-1)
liftBase $ widgetQueueDraw (drawArea view)
return True
processKeyEvent :: ([Event] -> IO ()) -> EventKey -> IO Bool
processKeyEvent ch ev = do
-- logPutStrLn $ "Gtk.Event: " <> show ev
-- logPutStrLn $ "Event: " <> show (gtkToYiEvent ev)
keyval <- Gtk.getEventKeyKeyval ev
name <- keyvalName keyval
mod <- Gtk.getEventKeyState ev
char <- toEnum . fromIntegral <$> keyvalToUnicode keyval
case gtkToYiEvent name mod char of
Nothing -> logPutStrLn $ "Event not translatable: " <> showT (name, mod, char)
Just e -> liftBase $ ch [e]
return True
gtkToYiEvent :: Maybe Text -> [Gtk.ModifierType] -> Char -> Maybe Event
gtkToYiEvent key evModifier char
= fmap (\k -> Event k $ (nub $ (if isShift then filter (/= MShift) else id) $ concatMap modif evModifier)) key'
where (key',isShift) =
case (char, key) of
(c, _) | c >= ' ' -> (Just $ KASCII c, True)
(_, Just k) -> (Map.lookup k keyTable, False)
_ -> (Nothing, False)
modif Gtk.ModifierTypeControlMask = [MCtrl]
modif Gtk.ModifierTypeMetaMask = [MMeta]
modif Gtk.ModifierTypeShiftMask = [MShift]
modif _ = []
-- | Map GTK long names to Keys
keyTable :: Map.Map Text Key
keyTable = Map.fromList
[("Down", KDown)
,("Up", KUp)
,("Left", KLeft)
,("Right", KRight)
,("Home", KHome)
,("End", KEnd)
,("BackSpace", KBS)
,("Delete", KDel)
,("Page_Up", KPageUp)
,("Page_Down", KPageDown)
,("Insert", KIns)
,("Escape", KEsc)
,("Return", KEnter)
,("Tab", KTab)
,("ISO_Left_Tab", KTab)
]
| leksah/yi | src/library/Yi/UI/Pango/Control.hs | gpl-2.0 | 37,041 | 12 | 36 | 10,758 | 9,743 | 4,990 | 4,753 | -1 | -1 |
{-# OPTIONS_GHC -fno-prof-auto #-}
{-# LANGUAGE DeriveLift #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeSynonymInstances #-}
module Types
( AllowNewer(..)
, CommitId(..)
, HashDigest
, Hash(..)
, ImplType(..)
, Percentage
, getPercentage
, mkPercentage
, percent
, renderPercentage
, validRational
) where
import Crypto.Hash (Digest, MD5, digestFromByteString)
import Data.Bifunctor (first)
import Data.ByteString (ByteString)
import Data.Maybe (fromJust)
import Data.Proxy (Proxy(Proxy))
import Data.Text (Text, pack, unpack)
import Database.Persist.Class (PersistField(..))
import Database.Persist.TH
import Database.Persist.Sql (PersistFieldSql(..))
import Numeric (showFFloat)
import ValidLiterals
data ImplType = Builtin | Core | Derived
deriving (Bounded, Enum, Eq, Ord, Read, Show)
derivePersistField "ImplType"
data AllowNewer = NoNewer | NewerResults | NewerImpls | AllNewer
deriving (Bounded, Enum, Eq, Ord, Read, Show)
derivePersistField "AllowNewer"
type HashDigest = Digest MD5
newtype Hash = Hash { getHash :: ByteString }
deriving (Read, Eq, PersistField, PersistFieldSql)
instance Show Hash where
show (Hash bs) = show . fromJust $
(digestFromByteString bs :: Maybe HashDigest)
newtype CommitId = CommitId { getCommitId :: Text }
deriving (Read, Eq, Ord, PersistField, PersistFieldSql)
instance Show CommitId where
show (CommitId hash) = unpack hash
newtype Percentage = Percentage { getPercentage :: Double}
deriving (Show, Eq, Ord, Lift)
percent :: Real n => n -> n -> Text
percent x y = pack $ showFFloat (Just 2) val "%"
where
val :: Double
val = 100 * realToFrac x / realToFrac y
renderPercentage :: Percentage -> Text
renderPercentage (Percentage d) = percent d 1
mkPercentage :: Double -> Maybe Percentage
mkPercentage = fromLiteral
instance Validate Double Percentage where
fromLiteralWithError d
| d < 0 = Left "Percentages can't be smaller than 0"
| d > 1 = Left "Percentages can't be larger than 1"
| otherwise = Right . Percentage $ d
instance Validate Rational Percentage where
fromLiteralWithError d = fromLiteralWithError (fromRational d :: Double)
instance PersistField Percentage where
toPersistValue = toPersistValue . getPercentage
fromPersistValue val = fromPersistValue val >>= toPercentage
where
toPercentage :: Double -> Either Text Percentage
toPercentage = first pack . fromLiteralWithError
instance PersistFieldSql Percentage where
sqlType _ = sqlType (Proxy :: Proxy Double)
| merijn/GPU-benchmarks | benchmark-analysis/src/Types.hs | gpl-3.0 | 2,756 | 0 | 9 | 528 | 757 | 418 | 339 | 71 | 1 |
module Model where
import Control.Concurrent.STM (TQueue)
import Control.Monad.Reader (ReaderT)
import Control.Monad.State.Strict (StateT)
import Linear
import qualified Graphics.UI.GLFW as G
import qualified Graphics.Rendering.OpenGL as GL
import Graphics.Object
import Utils
data Env = Env
{ envEventsChan :: TQueue Event
, envWindow :: !G.Window
}
data State = State
{ stateWindowWidth :: !Int
, stateWindowHeight :: !Int
, points :: Object
, viewer :: Viewer
}
data Event =
EventError !G.Error !String
| EventKey !G.Window !G.Key !Int !G.KeyState !G.ModifierKeys
| EventCursor !Double !Double
deriving Show
type RST r st m = ReaderT r (StateT st m)
type App = RST Env State IO ()
type Position = V3 Double
type Velocity = V3 Double
data Viewer = Viewer
{ position :: Position
, velocity :: Velocity
, horizontalAngle :: Double
, verticalAngle :: Double
}
initialViewer :: Viewer
initialViewer =
Viewer { position = V3 0 0 0
, velocity = V3 0 0 0
, horizontalAngle = 0
, verticalAngle = 0
}
direction :: Viewer -> V3 Double
direction viewer =
let h = horizontalAngle viewer + (-pi/2)
v = verticalAngle viewer
in V3 (cos h) (sin v) (-sin h)
| sgillis/HaskHull | src/Model.hs | gpl-3.0 | 1,367 | 1 | 12 | 413 | 402 | 228 | 174 | 66 | 1 |
module PetaVision.PVPFile.Utility where
import Data.Array.Unboxed as AU
import GHC.Float
import Prelude as P
sparse2Nonsparse
:: (Int,Int,Int) -> [(Int,Double)] -> AU.Array (Int,Int,Int) Double
sparse2Nonsparse (nf,nx,ny) frame =
accumArray (+)
0
((0,0,0),(nf - 1,nx - 1,ny - 1)) $
P.map (\(i,v) -> (indexMapping i,v)) frame
where indexMapping :: Int -> (Int,Int,Int)
indexMapping i = (a,b,c)
where n1 = nf * nx
n2 = nf
c = div i n1
n3 = (mod i n1)
b = div n3 n2
a = mod n3 n2
| XinhuaZhang/PetaVisionHaskell | PetaVision/PVPFile/Utility.hs | gpl-3.0 | 659 | 0 | 10 | 264 | 266 | 156 | 110 | 19 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Calendar.Events.Get
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Returns an event.
--
-- /See:/ <https://developers.google.com/google-apps/calendar/firstapp Calendar API Reference> for @calendar.events.get@.
module Network.Google.Resource.Calendar.Events.Get
(
-- * REST Resource
EventsGetResource
-- * Creating a Request
, eventsGet
, EventsGet
-- * Request Lenses
, egCalendarId
, egMaxAttendees
, egTimeZone
, egAlwaysIncludeEmail
, egEventId
) where
import Network.Google.AppsCalendar.Types
import Network.Google.Prelude
-- | A resource alias for @calendar.events.get@ method which the
-- 'EventsGet' request conforms to.
type EventsGetResource =
"calendar" :>
"v3" :>
"calendars" :>
Capture "calendarId" Text :>
"events" :>
Capture "eventId" Text :>
QueryParam "maxAttendees" (Textual Int32) :>
QueryParam "timeZone" Text :>
QueryParam "alwaysIncludeEmail" Bool :>
QueryParam "alt" AltJSON :> Get '[JSON] Event
-- | Returns an event.
--
-- /See:/ 'eventsGet' smart constructor.
data EventsGet =
EventsGet'
{ _egCalendarId :: !Text
, _egMaxAttendees :: !(Maybe (Textual Int32))
, _egTimeZone :: !(Maybe Text)
, _egAlwaysIncludeEmail :: !(Maybe Bool)
, _egEventId :: !Text
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'EventsGet' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'egCalendarId'
--
-- * 'egMaxAttendees'
--
-- * 'egTimeZone'
--
-- * 'egAlwaysIncludeEmail'
--
-- * 'egEventId'
eventsGet
:: Text -- ^ 'egCalendarId'
-> Text -- ^ 'egEventId'
-> EventsGet
eventsGet pEgCalendarId_ pEgEventId_ =
EventsGet'
{ _egCalendarId = pEgCalendarId_
, _egMaxAttendees = Nothing
, _egTimeZone = Nothing
, _egAlwaysIncludeEmail = Nothing
, _egEventId = pEgEventId_
}
-- | Calendar identifier. To retrieve calendar IDs call the calendarList.list
-- method. If you want to access the primary calendar of the currently
-- logged in user, use the \"primary\" keyword.
egCalendarId :: Lens' EventsGet Text
egCalendarId
= lens _egCalendarId (\ s a -> s{_egCalendarId = a})
-- | The maximum number of attendees to include in the response. If there are
-- more than the specified number of attendees, only the participant is
-- returned. Optional.
egMaxAttendees :: Lens' EventsGet (Maybe Int32)
egMaxAttendees
= lens _egMaxAttendees
(\ s a -> s{_egMaxAttendees = a})
. mapping _Coerce
-- | Time zone used in the response. Optional. The default is the time zone
-- of the calendar.
egTimeZone :: Lens' EventsGet (Maybe Text)
egTimeZone
= lens _egTimeZone (\ s a -> s{_egTimeZone = a})
-- | Deprecated and ignored. A value will always be returned in the email
-- field for the organizer, creator and attendees, even if no real email
-- address is available (i.e. a generated, non-working value will be
-- provided).
egAlwaysIncludeEmail :: Lens' EventsGet (Maybe Bool)
egAlwaysIncludeEmail
= lens _egAlwaysIncludeEmail
(\ s a -> s{_egAlwaysIncludeEmail = a})
-- | Event identifier.
egEventId :: Lens' EventsGet Text
egEventId
= lens _egEventId (\ s a -> s{_egEventId = a})
instance GoogleRequest EventsGet where
type Rs EventsGet = Event
type Scopes EventsGet =
'["https://www.googleapis.com/auth/calendar",
"https://www.googleapis.com/auth/calendar.events",
"https://www.googleapis.com/auth/calendar.events.readonly",
"https://www.googleapis.com/auth/calendar.readonly"]
requestClient EventsGet'{..}
= go _egCalendarId _egEventId _egMaxAttendees
_egTimeZone
_egAlwaysIncludeEmail
(Just AltJSON)
appsCalendarService
where go
= buildClient (Proxy :: Proxy EventsGetResource)
mempty
| brendanhay/gogol | gogol-apps-calendar/gen/Network/Google/Resource/Calendar/Events/Get.hs | mpl-2.0 | 4,786 | 0 | 17 | 1,129 | 657 | 387 | 270 | 98 | 1 |
import Test.Hspec
import Data.List
import Data.Array
-- Usy dynamic programming to avoid redoing same job multiple times.
-- Array is very slow in here...
paths_cache :: Int -> Array (Int, Int) Int
paths_cache m = array ((0,0),(mx,my))
(((mx,my),1) :
[((mx,j),1) | j <- [0..(my-1)]] ++ -- last column is 1s
[((i,my),1) | i <- [0..(mx-1)]] ++ -- last row is 1s
[((i,j),(paths_cache m)!(i+1,j) + (paths_cache m)!(i,j+1)) | i <- reverse [0..(mx-1)], j <- reverse [0..(my-1)]])
where mx = m
my = m
--
-- Same as 2d array implementation logic, but based on lists
--
-- Just generate 1 line above based on current.
gen_prev_row :: [Int] -> [Int]
gen_prev_row (a:[]) = [a]
gen_prev_row (a:xa) = next_element : new_tail
where new_tail = (gen_prev_row xa)
next_element = (head new_tail) + a
-- Generate matrix of paths_number
paths_cache_2 :: Int -> Int -> [[Int]]
paths_cache_2 0 y = [take (y+1) (cycle [1])]
paths_cache_2 x y = [gen_prev_row last_line] ++ matrix_bottom
where matrix_bottom = paths_cache_2 (x-1) y
last_line = head matrix_bottom
-- Same 2d list but nicer definition.
paths_cache_3_ mx my = [take (my+3) (cycle [1])] ++ [ 1 : [matrix!!(i-1)!!j + matrix!!i!!(j-1)| j <- [1..my] ] | i <- [1..mx]]
where matrix = paths_cache_3_ mx my
-- just reverse order in rows and collumns
paths_cache_3 mx my = reverse (map (reverse) (paths_cache_3_ mx my))
-- (x, y) coordinates on the grid
-- (max x, max y) max grid coordinates (grid size)
-- res number of paths, that can be created by right, down steps
lattice_paths_count_ :: (Int, Int) -> (Int, Int) -> Int
lattice_paths_count_ (x,y) (mx,my)
| (x == mx) && (y == my) = 1
| (x == mx) && (y /= my) = lattice_paths_count_ (x,y+1) (mx,my)
| (x /= mx) && (y == my) = lattice_paths_count_ (x+1,y) (mx,my)
| otherwise = 0
+ lattice_paths_count_ (x+1,y) (mx,my)
+ lattice_paths_count_ (x,y+1) (mx,my)
-- x - Grid size
-- res = number of paths to get to the end of the grid
lattice_paths_count :: Int -> Int
-- lattice_paths_count s = lattice_paths_count_ (0,0) (s,s)
-- lattice_paths_count s = paths_caches ! (0,0)
-- lattice_paths_count s = (paths_cache_2 s s !! 0) !! 0
lattice_paths_count s = (paths_cache_3 s s !! 0) !! 0
-- Tests + result print
main = hspec $ do
describe "Dummy" $ do
it "dummy test" $ do
True `shouldBe` True
describe "Euler test" $ do
it "lattise paths for 2x2 grid" $ do
True `shouldBe` (lattice_paths_count 2) == 6
describe "Euler actual problem" $ do
it "sum of big numbers" $ do
putStrLn ("res = " ++ show (lattice_paths_count 20))
| orbitgray/ProjectEuler | haskell/015.hs | lgpl-3.0 | 2,708 | 0 | 19 | 637 | 1,013 | 554 | 459 | 44 | 1 |
module Network.FQDN (fullyQualifiedDomainName) where
import Foreign
import Foreign.C
import Foreign.C.Types
foreign import ccall unsafe "c_fullyQualifiedDomainName"
c_fullyQualifiedDomainName :: CString -> CSize -> IO CInt
-- | Return the fully qualified domain name (if available). Not all hosts have
-- a FQDN defined. If the FQDN is not available, you can fall back to
-- `Network.BSD.getHostName`.
fullyQualifiedDomainName :: IO (Maybe String)
fullyQualifiedDomainName = do
allocaArray0 bufferLength $ \cstr -> do
res <- c_fullyQualifiedDomainName cstr (fromIntegral bufferLength)
if res == 0
then peekCString cstr >>= return . Just
else return Nothing
where
-- The max buffer size for the FQDN.
bufferLength = 200
| wereHamster/fqdn | Network/FQDN.hs | unlicense | 789 | 0 | 14 | 170 | 142 | 76 | 66 | 14 | 2 |
{-# LANGUAGE OverloadedStrings #-}
module Lst.Feat where
import Text.Parsec.Char (char, satisfy)
import Text.Parsec.Combinator (sepBy, sepBy1, many1)
import ClassyPrelude
import Modifications
import JEPFormula hiding (Add)
import Common
data FeatDefinition = Description String
| Visible Visibility
| Type [String]
| Multiple Bool
| CanStack Bool
| Benefit String
| Cost Formula
| AddSpellLevel Formula
| TemplateChoice [String]
| Template [String]
| AppliedName String
| Aspect String
deriving Show
parseDescription :: PParser String
parseDescription = tag "DESC" *> restOfTag
parseType :: PParser [String]
parseType = tag "TYPE" *> parseWordAndNumbers `sepBy1` char '.' where
parseWordAndNumbers = many1 $ satisfy $ inClass "-A-Za-z0-9, _/"
parseMult :: PParser Bool
parseMult = tag "MULT" *> yesOrNo
parseStack :: PParser Bool
parseStack = tag "STACK" *> yesOrNo
parseCost :: PParser Formula
parseCost = tag "COST" *> parseFormula
parseAddSpellLevel :: PParser Formula
parseAddSpellLevel = tag "ADDSPELLLEVEL" *> parseFormula
parseAppliedName :: PParser String
parseAppliedName = tag "APPLIEDNAME" *> restOfTag
parseTemplate :: PParser [String]
parseTemplate = tag "TEMPLATE" *> parseString `sepBy` char '|'
parseTemplateChoose :: PParser [String]
parseTemplateChoose = tag "TEMPLATE:CHOOSE" *> parseString `sepBy` char '|'
-- TODO:
-- parse properly (%1 especially)
parseAspect :: PParser String
parseAspect = tag "ASPECT" *> restOfTag
-- TODO:
-- parse properly
parseBenefit :: PParser String
parseBenefit = tag "BENEFIT" *> restOfTag
data Visibility = IsVisible
| NotVisible
| Display
| Export
deriving (Show, Eq)
parseVisible :: PParser Visibility
parseVisible = tag "VISIBLE" *> parseVisibility where
parseVisibility = IsVisible <$ labeled "YES"
<|> NotVisible <$ labeled "NO"
<|> Display <$ labeled "DISPLAY"
<|> Export <$ labeled "EXPORT"
parseFeatTag :: PParser FeatDefinition
parseFeatTag = Description <$> parseDescription
<|> Type <$> parseType
<|> Visible <$> parseVisible
<|> Multiple <$> parseMult
<|> CanStack <$> parseStack
<|> Benefit <$> parseBenefit
<|> Cost <$> parseCost
<|> AddSpellLevel <$> parseAddSpellLevel
<|> TemplateChoice <$> parseTemplateChoose
<|> Template <$> parseTemplate
<|> AppliedName <$> parseAppliedName
<|> Aspect <$> parseAspect
instance LSTObject FeatDefinition where
parseSpecificTags = parseFeatTag
| gamelost/pcgen-rules | src/Lst/Feat.hs | apache-2.0 | 2,850 | 0 | 27 | 815 | 622 | 335 | 287 | 70 | 1 |
module Helpers.Factorials (binomial, factorial) where
factorial :: Integer -> Integer
factorial n = factorial' n 1 where
factorial' 0 accum = accum
factorial' n accum = factorial' (n - 1) (accum * n)
binomial :: Integer -> Integer -> Integer
binomial _ 0 = 1
binomial 0 _ = 0
binomial n k
| n < k' = 0
| otherwise = product [k' + 1..n] `div` factorial (n - k') where
k' = max k (n - k)
| peterokagey/haskellOEIS | src/Helpers/Factorials.hs | apache-2.0 | 403 | 0 | 9 | 96 | 186 | 96 | 90 | 12 | 2 |
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE UnicodeSyntax #-}
import qualified Data.IntSet as IntSet
import System.Log.Logger (Priority(DEBUG),rootLoggerName,setLevel,updateGlobalLogger)
import Test.Framework
import Test.Framework.Providers.QuickCheck2
import Test.QuickCheck.Instances ()
import LogicGrowsOnTrees
import LogicGrowsOnTrees.Testing
main :: IO ()
main = do
updateGlobalLogger rootLoggerName (setLevel DEBUG)
defaultMain [tests]
tests :: Test
tests = testGroup "test helpers"
[testProperty "UniqueTree has unique results" $ \(UniqueTree tree) →
let results = exploreTree (fmap (:[]) tree )
in length results == IntSet.size (mconcat results)
]
| gcross/LogicGrowsOnTrees | LogicGrowsOnTrees/tests/test-helpers.hs | bsd-2-clause | 704 | 0 | 17 | 111 | 184 | 102 | 82 | 18 | 1 |
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE DeriveFunctor #-}
module Data.SRX
( SRX (..)
, Lang (..)
, Rule (..)
, rulesBy
-- , merge
-- , mergeBy
-- , ruleRE
) where
import Prelude hiding (break)
import Data.List (find)
import Data.Maybe (maybeToList)
import Text.Regex.Applicative
import Data.Regex.Parse
-- | Header is ignored for now. We assume, that 'cascading' flag is on.
newtype SRX t = SRX
{ unSRX :: [Lang t] }
deriving (Show, Functor)
data Lang t = Lang
{ langName :: String
, rules :: [Rule t] }
deriving (Show, Functor)
data Rule t = Rule
{ break :: Bool
, before :: t
, after :: t }
deriving (Show, Functor)
-- | Concatenate rules from chosed languages.
rulesBy :: [String] -> SRX a -> [Rule a]
rulesBy names srx = concat $
[ rules lang
| name <- names
, lang <- maybeToList (find ((name==) . langName) (unSRX srx)) ]
-- data Result a
-- = Break a a -- ^ When break rule fired
-- | NoBreak a -- ^ When no-break rule fired
-- deriving (Show, Functor)
--
-- merge :: [Rule (RE t [a])] -> RE t (Result [a])
-- merge [] = error "merge: empty list of rules"
-- merge rs = foldl1 (<|>) (map ruleRE rs)
--
-- -- | Merge rules from chosen languages.
-- mergeBy :: [String] -> SRX (RE t [a]) -> RE t (Result [a])
-- mergeBy names srx = merge . concat $
-- [ rules lang
-- | name <- names
-- , lang <- maybeToList (find ((name==) . langName) (unSRX srx)) ]
--
-- ruleRE :: Rule (RE t [a]) -> RE t (Result [a])
-- ruleRE Rule{..}
-- | break = Break <$> before <*> after
-- | otherwise = noBreak <$> before <*> after
-- where
-- noBreak x y = NoBreak (x ++ y)
| kawu/tokenize | Data/SRX.hs | bsd-2-clause | 1,685 | 0 | 14 | 433 | 300 | 189 | 111 | 29 | 1 |
{-| Implementation of the generic daemon functionality.
-}
{-
Copyright (C) 2011, 2012 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Ganeti.Daemon
( DaemonOptions(..)
, OptType
, CheckFn
, PrepFn
, MainFn
, defaultOptions
, oShowHelp
, oShowVer
, oNoDaemonize
, oNoUserChecks
, oDebug
, oPort
, oBindAddress
, oSyslogUsage
, oForceNode
, oNoVoting
, oYesDoIt
, parseArgs
, parseAddress
, cleanupSocket
, describeError
, genericMain
, getFQDN
) where
import Control.Concurrent
import Control.Exception
import Control.Monad
import Data.Maybe (fromMaybe, listToMaybe)
import Text.Printf
import Data.Word
import GHC.IO.Handle (hDuplicateTo)
import Network.BSD (getHostName)
import qualified Network.Socket as Socket
import System.Console.GetOpt
import System.Directory
import System.Exit
import System.Environment
import System.IO
import System.IO.Error (isDoesNotExistError, modifyIOError, annotateIOError)
import System.Posix.Directory
import System.Posix.Files
import System.Posix.IO
import System.Posix.Process
import System.Posix.Types
import System.Posix.Signals
import Ganeti.Common as Common
import Ganeti.Logging
import Ganeti.Runtime
import Ganeti.BasicTypes
import Ganeti.Utils
import qualified Ganeti.Constants as C
import qualified Ganeti.Ssconf as Ssconf
-- * Constants
-- | \/dev\/null path.
devNull :: FilePath
devNull = "/dev/null"
-- | Error message prefix, used in two separate paths (when forking
-- and when not).
daemonStartupErr :: String -> String
daemonStartupErr = ("Error when starting the daemon process: " ++)
-- * Data types
-- | Command line options structure.
data DaemonOptions = DaemonOptions
{ optShowHelp :: Bool -- ^ Just show the help
, optShowVer :: Bool -- ^ Just show the program version
, optShowComp :: Bool -- ^ Just show the completion info
, optDaemonize :: Bool -- ^ Whether to daemonize or not
, optPort :: Maybe Word16 -- ^ Override for the network port
, optDebug :: Bool -- ^ Enable debug messages
, optNoUserChecks :: Bool -- ^ Ignore user checks
, optBindAddress :: Maybe String -- ^ Listen on a custom address
, optSyslogUsage :: Maybe SyslogUsage -- ^ Override for Syslog usage
, optForceNode :: Bool -- ^ Ignore node checks
, optNoVoting :: Bool -- ^ skip voting for master
, optYesDoIt :: Bool -- ^ force dangerous options
}
-- | Default values for the command line options.
defaultOptions :: DaemonOptions
defaultOptions = DaemonOptions
{ optShowHelp = False
, optShowVer = False
, optShowComp = False
, optDaemonize = True
, optPort = Nothing
, optDebug = False
, optNoUserChecks = False
, optBindAddress = Nothing
, optSyslogUsage = Nothing
, optForceNode = False
, optNoVoting = False
, optYesDoIt = False
}
instance StandardOptions DaemonOptions where
helpRequested = optShowHelp
verRequested = optShowVer
compRequested = optShowComp
requestHelp o = o { optShowHelp = True }
requestVer o = o { optShowVer = True }
requestComp o = o { optShowComp = True }
-- | Abrreviation for the option type.
type OptType = GenericOptType DaemonOptions
-- | Check function type.
type CheckFn a = DaemonOptions -> IO (Either ExitCode a)
-- | Prepare function type.
type PrepFn a b = DaemonOptions -> a -> IO b
-- | Main execution function type.
type MainFn a b = DaemonOptions -> a -> b -> IO ()
-- * Command line options
oNoDaemonize :: OptType
oNoDaemonize =
(Option "f" ["foreground"]
(NoArg (\ opts -> Ok opts { optDaemonize = False }))
"Don't detach from the current terminal",
OptComplNone)
oDebug :: OptType
oDebug =
(Option "d" ["debug"]
(NoArg (\ opts -> Ok opts { optDebug = True }))
"Enable debug messages",
OptComplNone)
oNoUserChecks :: OptType
oNoUserChecks =
(Option "" ["no-user-checks"]
(NoArg (\ opts -> Ok opts { optNoUserChecks = True }))
"Ignore user checks",
OptComplNone)
oPort :: Int -> OptType
oPort def =
(Option "p" ["port"]
(reqWithConversion (tryRead "reading port")
(\port opts -> Ok opts { optPort = Just port }) "PORT")
("Network port (default: " ++ show def ++ ")"),
OptComplInteger)
oBindAddress :: OptType
oBindAddress =
(Option "b" ["bind"]
(ReqArg (\addr opts -> Ok opts { optBindAddress = Just addr })
"ADDR")
"Bind address (default is 'any' on either IPv4 or IPv6, depending \
\on cluster configuration)",
OptComplInetAddr)
oSyslogUsage :: OptType
oSyslogUsage =
(Option "" ["syslog"]
(reqWithConversion syslogUsageFromRaw
(\su opts -> Ok opts { optSyslogUsage = Just su })
"SYSLOG")
("Enable logging to syslog (except debug \
\messages); one of 'no', 'yes' or 'only' [" ++ C.syslogUsage ++
"]"),
OptComplChoices ["yes", "no", "only"])
oForceNode :: OptType
oForceNode =
(Option "" ["force-node"]
(NoArg (\ opts -> Ok opts { optForceNode = True }))
"Force the daemon to run on a different node than the master",
OptComplNone)
oNoVoting :: OptType
oNoVoting =
(Option "" ["no-voting"]
(NoArg (\ opts -> Ok opts { optNoVoting = True }))
"Skip node agreement check (dangerous)",
OptComplNone)
oYesDoIt :: OptType
oYesDoIt =
(Option "" ["yes-do-it"]
(NoArg (\ opts -> Ok opts { optYesDoIt = True }))
"Force a dangerous operation",
OptComplNone)
-- | Generic options.
genericOpts :: [OptType]
genericOpts = [ oShowHelp
, oShowVer
, oShowComp
]
-- | Annotates and transforms IOErrors into a Result type. This can be
-- used in the error handler argument to 'catch', for example.
ioErrorToResult :: String -> IOError -> IO (Result a)
ioErrorToResult description exc =
return . Bad $ description ++ ": " ++ show exc
-- | Small wrapper over getArgs and 'parseOpts'.
parseArgs :: String -> [OptType] -> IO (DaemonOptions, [String])
parseArgs cmd options = do
cmd_args <- getArgs
parseOpts defaultOptions cmd_args cmd (options ++ genericOpts) []
-- * Daemon-related functions
-- | PID file mode.
pidFileMode :: FileMode
pidFileMode = unionFileModes ownerReadMode ownerWriteMode
-- | PID file open flags.
pidFileFlags :: OpenFileFlags
pidFileFlags = defaultFileFlags { noctty = True, trunc = False }
-- | Writes a PID file and locks it.
writePidFile :: FilePath -> IO Fd
writePidFile path = do
fd <- openFd path ReadWrite (Just pidFileMode) pidFileFlags
setLock fd (WriteLock, AbsoluteSeek, 0, 0)
my_pid <- getProcessID
_ <- fdWrite fd (show my_pid ++ "\n")
return fd
-- | Helper function to ensure a socket doesn't exist. Should only be
-- called once we have locked the pid file successfully.
cleanupSocket :: FilePath -> IO ()
cleanupSocket socketPath =
catchJust (guard . isDoesNotExistError) (removeLink socketPath)
(const $ return ())
-- | Sets up a daemon's environment.
setupDaemonEnv :: FilePath -> FileMode -> IO ()
setupDaemonEnv cwd umask = do
changeWorkingDirectory cwd
_ <- setFileCreationMask umask
_ <- createSession
return ()
-- | Cleanup function, performing all the operations that need to be done prior
-- to shutting down a daemon.
finalCleanup :: FilePath -> IO ()
finalCleanup = removeFile
-- | Signal handler for the termination signal.
handleSigTerm :: ThreadId -> IO ()
handleSigTerm mainTID =
-- Throw termination exception to the main thread, so that the daemon is
-- actually stopped in the proper way, executing all the functions waiting on
-- "finally" statement.
Control.Exception.throwTo mainTID ExitSuccess
-- | Signal handler for reopening log files.
handleSigHup :: FilePath -> IO ()
handleSigHup path = do
setupDaemonFDs (Just path)
logInfo "Reopening log files after receiving SIGHUP"
-- | Sets up a daemon's standard file descriptors.
setupDaemonFDs :: Maybe FilePath -> IO ()
setupDaemonFDs logfile = do
null_in_handle <- openFile devNull ReadMode
null_out_handle <- openFile (fromMaybe devNull logfile) AppendMode
hDuplicateTo null_in_handle stdin
hDuplicateTo null_out_handle stdout
hDuplicateTo null_out_handle stderr
hClose null_in_handle
hClose null_out_handle
-- | Computes the default bind address for a given family.
defaultBindAddr :: Int -- ^ The port we want
-> Socket.Family -- ^ The cluster IP family
-> Result (Socket.Family, Socket.SockAddr)
defaultBindAddr port Socket.AF_INET =
Ok (Socket.AF_INET,
Socket.SockAddrInet (fromIntegral port) Socket.iNADDR_ANY)
defaultBindAddr port Socket.AF_INET6 =
Ok (Socket.AF_INET6,
Socket.SockAddrInet6 (fromIntegral port) 0 Socket.iN6ADDR_ANY 0)
defaultBindAddr _ fam = Bad $ "Unsupported address family: " ++ show fam
-- | Based on the options, compute the socket address to use for the
-- daemon.
parseAddress :: DaemonOptions -- ^ Command line options
-> Int -- ^ Default port for this daemon
-> IO (Result (Socket.Family, Socket.SockAddr))
parseAddress opts defport = do
let port = maybe defport fromIntegral $ optPort opts
def_family <- Ssconf.getPrimaryIPFamily Nothing
case optBindAddress opts of
Nothing -> return (def_family >>= defaultBindAddr port)
Just saddr -> Control.Exception.catch
(resolveAddr port saddr)
(ioErrorToResult $ "Invalid address " ++ saddr)
-- | Environment variable to override the assumed host name of the
-- current node.
vClusterHostNameEnvVar :: String
vClusterHostNameEnvVar = "GANETI_HOSTNAME"
-- | Get the real full qualified host name.
getFQDN' :: Maybe Socket.AddrInfo -> IO String
getFQDN' hints = do
hostname <- getHostName
addrInfos <- Socket.getAddrInfo hints (Just hostname) Nothing
let address = listToMaybe addrInfos >>= (Just . Socket.addrAddress)
case address of
Just a -> do
fqdn <- liftM fst $ Socket.getNameInfo [] True False a
return (fromMaybe hostname fqdn)
Nothing -> return hostname
-- | Return the full qualified host name, honoring the vcluster setup
-- and hints on the preferred socket type or protocol.
getFQDNwithHints :: Maybe Socket.AddrInfo -> IO String
getFQDNwithHints hints = do
let ioErrorToNothing :: IOError -> IO (Maybe String)
ioErrorToNothing _ = return Nothing
vcluster_node <- Control.Exception.catch
(liftM Just (getEnv vClusterHostNameEnvVar))
ioErrorToNothing
case vcluster_node of
Just node_name -> return node_name
Nothing -> getFQDN' hints
-- | Return the full qualified host name, honoring the vcluster setup.
getFQDN :: IO String
getFQDN = do
familyresult <- Ssconf.getPrimaryIPFamily Nothing
getFQDNwithHints
$ genericResult (const Nothing)
(\family -> Just $ Socket.defaultHints { Socket.addrFamily = family })
familyresult
-- | Returns if the current node is the master node.
isMaster :: IO Bool
isMaster = do
curNode <- getFQDN
masterNode <- Ssconf.getMasterNode Nothing
case masterNode of
Ok n -> return (curNode == n)
Bad _ -> return False
-- | Ensures that the daemon runs on the right node (and exits
-- gracefully if it doesnt)
ensureNode :: GanetiDaemon -> DaemonOptions -> IO ()
ensureNode daemon opts = do
is_master <- isMaster
when (daemonOnlyOnMaster daemon
&& not is_master
&& not (optForceNode opts)) $ do
putStrLn "Not master, exiting."
exitWith (ExitFailure C.exitNotmaster)
-- | Run an I\/O action that might throw an I\/O error, under a
-- handler that will simply annotate and re-throw the exception.
describeError :: String -> Maybe Handle -> Maybe FilePath -> IO a -> IO a
describeError descr hndl fpath =
modifyIOError (\e -> annotateIOError e descr hndl fpath)
-- | Run an I\/O action as a daemon.
--
-- WARNING: this only works in single-threaded mode (either using the
-- single-threaded runtime, or using the multi-threaded one but with
-- only one OS thread, i.e. -N1).
daemonize :: FilePath -> (Maybe Fd -> IO ()) -> IO ()
daemonize logfile action = do
(rpipe, wpipe) <- createPipe
-- first fork
_ <- forkProcess $ do
-- in the child
closeFd rpipe
let wpipe' = Just wpipe
setupDaemonEnv "/" (unionFileModes groupModes otherModes)
setupDaemonFDs (Just logfile) `Control.Exception.catch`
handlePrepErr False wpipe'
-- second fork, launches the actual child code; standard
-- double-fork technique
_ <- forkProcess (action wpipe')
exitImmediately ExitSuccess
closeFd wpipe
hndl <- fdToHandle rpipe
errors <- hGetContents hndl
ecode <- if null errors
then return ExitSuccess
else do
hPutStrLn stderr $ daemonStartupErr errors
return $ ExitFailure C.exitFailure
exitImmediately ecode
-- | Generic daemon startup.
genericMain :: GanetiDaemon -- ^ The daemon we're running
-> [OptType] -- ^ The available options
-> CheckFn a -- ^ Check function
-> PrepFn a b -- ^ Prepare function
-> MainFn a b -- ^ Execution function
-> IO ()
genericMain daemon options check_fn prep_fn exec_fn = do
let progname = daemonName daemon
(opts, args) <- parseArgs progname options
-- Modify handleClient in Ganeti.UDSServer to remove this logging from luxid.
when (optDebug opts && daemon == GanetiLuxid) .
hPutStrLn stderr $
printf C.debugModeConfidentialityWarning (daemonName daemon)
ensureNode daemon opts
exitUnless (null args) "This program doesn't take any arguments"
unless (optNoUserChecks opts) $ do
runtimeEnts <- runResultT getEnts
ents <- exitIfBad "Can't find required user/groups" runtimeEnts
verifyDaemonUser daemon ents
syslog <- case optSyslogUsage opts of
Nothing -> exitIfBad "Invalid cluster syslog setting" $
syslogUsageFromRaw C.syslogUsage
Just v -> return v
log_file <- daemonLogFile daemon
-- run the check function and optionally exit if it returns an exit code
check_result <- check_fn opts
check_result' <- case check_result of
Left code -> exitWith code
Right v -> return v
let processFn = if optDaemonize opts
then daemonize log_file
else \action -> action Nothing
_ <- installHandler lostConnection (Catch (handleSigHup log_file)) Nothing
processFn $ innerMain daemon opts syslog check_result' prep_fn exec_fn
-- | Full prepare function.
--
-- This is executed after daemonization, and sets up both the log
-- files (a generic functionality) and the custom prepare function of
-- the daemon.
fullPrep :: GanetiDaemon -- ^ The daemon we're running
-> DaemonOptions -- ^ The options structure, filled from the cmdline
-> SyslogUsage -- ^ Syslog mode
-> a -- ^ Check results
-> PrepFn a b -- ^ Prepare function
-> IO (FilePath, b)
fullPrep daemon opts syslog check_result prep_fn = do
logfile <- if optDaemonize opts
then return Nothing
else liftM Just $ daemonLogFile daemon
pidfile <- daemonPidFile daemon
let dname = daemonName daemon
setupLogging logfile dname (optDebug opts) True False syslog
_ <- describeError "writing PID file; already locked?"
Nothing (Just pidfile) $ writePidFile pidfile
logNotice $ dname ++ " daemon startup"
prep_res <- prep_fn opts check_result
tid <- myThreadId
_ <- installHandler sigTERM (Catch $ handleSigTerm tid) Nothing
return (pidfile, prep_res)
-- | Inner daemon function.
--
-- This is executed after daemonization.
innerMain :: GanetiDaemon -- ^ The daemon we're running
-> DaemonOptions -- ^ The options structure, filled from the cmdline
-> SyslogUsage -- ^ Syslog mode
-> a -- ^ Check results
-> PrepFn a b -- ^ Prepare function
-> MainFn a b -- ^ Execution function
-> Maybe Fd -- ^ Error reporting function
-> IO ()
innerMain daemon opts syslog check_result prep_fn exec_fn fd = do
(pidFile, prep_result) <- fullPrep daemon opts syslog check_result prep_fn
`Control.Exception.catch` handlePrepErr True fd
-- no error reported, we should now close the fd
maybeCloseFd fd
finally (exec_fn opts check_result prep_result)
(finalCleanup pidFile
>> logNotice (daemonName daemon ++ " daemon shutdown"))
-- | Daemon prepare error handling function.
handlePrepErr :: Bool -> Maybe Fd -> IOError -> IO a
handlePrepErr logging_setup fd err = do
let msg = show err
case fd of
-- explicitly writing to the fd directly, since when forking it's
-- better (safer) than trying to convert this into a full handle
Just fd' -> fdWrite fd' msg >> return ()
Nothing -> hPutStrLn stderr (daemonStartupErr msg)
when logging_setup $ logError msg
exitWith $ ExitFailure 1
-- | Close a file descriptor.
maybeCloseFd :: Maybe Fd -> IO ()
maybeCloseFd Nothing = return ()
maybeCloseFd (Just fd) = closeFd fd
| mbakke/ganeti | src/Ganeti/Daemon.hs | bsd-2-clause | 18,429 | 0 | 15 | 4,166 | 3,833 | 1,996 | 1,837 | 370 | 4 |
module Main (main) where
import Test.Tasty
import Test.Tasty.Program
import qualified Data.List as List
import Data.Maybe (fromMaybe)
import qualified System.Directory as Directory
import System.FilePath ((</>),(<.>))
import qualified System.IO.Unsafe as Unsafe
data BuildTarget
= VHDL | Verilog | Both
deriving Show
main :: IO ()
main =
defaultMain $ testGroup "tests"
[ testGroup "examples"
[runTest "examples" Both [] "ALU" (Just ("topEntity",False))
,runTest "examples" VHDL [] "Blinker" (Just ("topEntity",False))
,runTest "examples" Verilog [] "Blinker_v" (Just ("blinker",False))
,runTest "examples" Both [] "BlockRamTest" (Just ("topEntity",False))
,runTest "examples" Both [] "Calculator" (Just ("testbench",True ))
,runTest "examples" Both [] "CochleaPlus" (Just ("topEntity",False))
,runTest "examples" Both [] "FIR" (Just ("testbench",True ))
,runTest "examples" Both [] "Fifo" (Just ("topEntity",False))
,runTest "examples" Both [] "MAC" (Just ("testbench",True))
,runTest "examples" Both [] "MatrixVect" (Just ("testbench",True))
,runTest "examples" Both [] "Queens" (Just ("topEntity",False))
,runTest "examples" Both [] "Reducer" (Just ("topEntity",False))
,runTest "examples" Both [] "Sprockell" (Just ("topEntity",False))
,runTest "examples" Both [] "Windows" (Just ("topEntity",False))
,runTest ("examples" </> "crc32") Both [] "CRC32" (Just ("testbench",True))
,runTest ("examples" </> "i2c") Both [] "I2C" (Just ("topEntity",False))
]
, testGroup "unit-tests"
[ testGroup "Basic"
[ runTest ("tests" </> "shouldwork" </> "Basic") Both [] "Trace" (Just ("topEntity",False))
, runTest ("tests" </> "shouldwork" </> "Basic") Both [] "ByteSwap32" (Just ("testbench",True))
, runTest ("tests" </> "shouldwork" </> "Basic") Both [] "CharTest" (Just ("testbench",True))
, runTest ("tests" </> "shouldwork" </> "Basic") Both [] "ClassOps" (Just ("topEntity",False))
, runTest ("tests" </> "shouldwork" </> "Basic") Both [] "CountTrailingZeros" (Just ("testbench",True))
, runTest ("tests" </> "shouldwork" </> "Basic") Both [] "IrrefError" (Just ("topEntity",False))
, runTest ("tests" </> "shouldwork" </> "Basic") Both [] "NestedPrimitives" (Just ("topEntity",False))
, runTest ("tests" </> "shouldwork" </> "Basic") Both [] "NestedPrimitives2" (Just ("topEntity",False))
, runTest ("tests" </> "shouldwork" </> "Basic") Both [] "PatError" (Just ("topEntity",False))
, runTest ("tests" </> "shouldwork" </> "Basic") Both [] "PopCount" (Just ("testbench",True))
, runTest ("tests" </> "shouldwork" </> "Basic") Both [] "RecordSumOfProducts" (Just ("topEntity",False))
, runTest ("tests" </> "shouldwork" </> "Basic") Both [] "Shift" (Just ("topEntity",False))
, runTest ("tests" </> "shouldwork" </> "Basic") Both [] "SimpleConstructor" (Just ("topEntity",False))
, runTest ("tests" </> "shouldwork" </> "Basic") Both [] "TagToEnum" (Just ("topEntity",False))
, runTest ("tests" </> "shouldwork" </> "Basic") Both [] "TestIndex" (Just ("topEntity",False))
, runTest ("tests" </> "shouldwork" </> "Basic") Both [] "TwoFunctions" (Just ("topEntity",False))
]
, testGroup "BitVector"
[ runTest ("tests" </> "shouldwork" </> "BitVector") Both [] "Box" (Just ("testbench",True))
, runTest ("tests" </> "shouldwork" </> "BitVector") Both [] "BoxGrow" (Just ("testbench",True))
, runTest ("tests" </> "shouldwork" </> "BitVector") Both [] "RePack" (Just ("topEntity",False))
]
, testGroup "BoxedFunctions"
[ runTest ("tests" </> "shouldwork" </> "BoxedFunctions") Both [] "DeadRecursiveBoxed" (Just ("topEntity",False))
]
, testGroup "CSignal"
[ runTest ("tests" </> "shouldwork" </> "CSignal") Both [] "CBlockRamTest" (Just ("topEntity",False))
, runTest ("tests" </> "shouldwork" </> "CSignal") Both [] "MAC" (Just ("topEntity",False))
]
, testGroup "Feedback"
[ runTest ("tests" </> "shouldwork" </> "Feedback") Both [] "Fib" (Just ("testbench",True))
]
, testGroup "Fixed"
[ runTest ("tests" </> "shouldwork" </> "Fixed") Both [] "SFixedTest" (Just ("testbench",True))
, runTest ("tests" </> "shouldwork" </> "Fixed") Both [] "SatWrap" (Just ("topEntity",False))
]
, testGroup "HOPrim"
[ runTest ("tests" </> "shouldwork" </> "HOPrim") Both [] "HOImap" (Just ("testbench",True))
, runTest ("tests" </> "shouldwork" </> "HOPrim") Both [] "TestMap" (Just ("testbench",True))
, runTest ("tests" </> "shouldwork" </> "HOPrim") Both [] "Transpose" (Just ("testbench",True))
, runTest ("tests" </> "shouldwork" </> "HOPrim") Both [] "VecFun" (Just ("testbench",True))
]
, testGroup "Numbers"
[ runTest ("tests" </> "shouldwork" </> "Numbers") Both [] "Bounds" (Just ("testbench",True))
, runTest ("tests" </> "shouldwork" </> "Numbers") Both [] "Resize" (Just ("testbench",True))
, runTest ("tests" </> "shouldwork" </> "Numbers") Both [] "Resize2" (Just ("testbench",True))
, runTest ("tests" </> "shouldwork" </> "Numbers") Both [] "SatMult" (Just ("topEntity",False))
]
, testGroup "Polymorphism"
[ runTest ("tests" </> "shouldwork" </> "Polymorphism") Both [] "ExistentialBoxed" (Just ("topEntity",False))
, runTest ("tests" </> "shouldwork" </> "Polymorphism") Both [] "LocalPoly" (Just ("topEntity",False))
]
, testGroup "Signal"
[ runTest ("tests" </> "shouldwork" </> "Signal") Both [] "AlwaysHigh" (Just ("topEntity",False))
, runTest ("tests" </> "shouldwork" </> "Signal") Both [] "BlockRamFile" (Just ("testbench",True))
, runTest ("tests" </> "shouldwork" </> "Signal") Both [] "BlockRamTest" (Just ("topEntity",False))
, runTest ("tests" </> "shouldwork" </> "Signal") Both [] "MAC" (Just ("topEntity",False))
, runTest ("tests" </> "shouldwork" </> "Signal") Both [] "SigP" (Just ("topEntity",False))
, runTest ("tests" </> "shouldwork" </> "Signal") Both [] "Ram" (Just ("testbench",True))
, runTest ("tests" </> "shouldwork" </> "Signal") Both [] "Rom" (Just ("testbench",True))
, runTest ("tests" </> "shouldwork" </> "Signal") Both [] "RomFile" (Just ("testbench",True))
]
, testGroup "Testbench"
[ runTest ("tests" </> "shouldwork" </> "Testbench") Both ["-clash-inline-limit=0"] "TB" (Just ("testbench",True))
, runTest ("tests" </> "shouldwork" </> "Testbench") Both [] "SyncTB" (Just ("testbench",True))
]
, testGroup "Vector"
[ runTest ("tests" </> "shouldwork" </> "Vector") Both [] "Concat" (Just ("testbench",True))
, runTest ("tests" </> "shouldwork" </> "Vector") Both [] "DFold" (Just ("testbench",True))
, runTest ("tests" </> "shouldwork" </> "Vector") Both [] "DFold2" (Just ("testbench",True))
, runTest ("tests" </> "shouldwork" </> "Vector") Both [] "EnumTypes" (Just ("topEntity",False))
, runTest ("tests" </> "shouldwork" </> "Vector") Both [] "FindIndex" (Just ("testbench",True))
, runTest ("tests" </> "shouldwork" </> "Vector") Both [] "Fold" (Just ("testbench",True))
, runTest ("tests" </> "shouldwork" </> "Vector") Both [] "Foldr" (Just ("testbench",True))
, runTest ("tests" </> "shouldwork" </> "Vector") Both [] "HOClock" (Just ("topEntity",False))
, runTest ("tests" </> "shouldwork" </> "Vector") Both [] "HOCon" (Just ("topEntity",False))
, runTest ("tests" </> "shouldwork" </> "Vector") Both [] "HOPrim" (Just ("topEntity",False))
, runTest ("tests" </> "shouldwork" </> "Vector") Both [] "Minimum" (Just ("testbench",True))
, runTest ("tests" </> "shouldwork" </> "Vector") Both [] "MovingAvg" (Just ("topEntity", False))
, runTest ("tests" </> "shouldwork" </> "Vector") Both [] "PatHOCon" (Just ("topEntity",False))
, runTest ("tests" </> "shouldwork" </> "Vector") Both [] "Split" (Just ("topEntity",False))
, runTest ("tests" </> "shouldwork" </> "Vector") Both [] "ToList" (Just ("testbench",True))
, runTest ("tests" </> "shouldwork" </> "Vector") Both [] "Unconcat" (Just ("testbench",True))
, runTest ("tests" </> "shouldwork" </> "Vector") Both [] "VACC" (Just ("topEntity",False))
, runTest ("tests" </> "shouldwork" </> "Vector") Both [] "VIndex" (Just ("topEntity",False))
, runTest ("tests" </> "shouldwork" </> "Vector") Both [] "VecConst" (Just ("topEntity",False))
, runTest ("tests" </> "shouldwork" </> "Vector") Both [] "VecOfSum" (Just ("topEntity",False))
, runTest ("tests" </> "shouldwork" </> "Vector") Both [] "VFold" (Just ("testbench",True))
, runTest ("tests" </> "shouldwork" </> "Vector") Both [] "VMapAccum" (Just ("topEntity",False))
, runTest ("tests" </> "shouldwork" </> "Vector") Both [] "VMerge" (Just ("testbench",True))
, runTest ("tests" </> "shouldwork" </> "Vector") Both [] "VReplace" (Just ("topEntity",False))
, runTest ("tests" </> "shouldwork" </> "Vector") Both [] "VReverse" (Just ("testbench",True))
, runTest ("tests" </> "shouldwork" </> "Vector") Both [] "VScan" (Just ("topEntity",False))
, runTest ("tests" </> "shouldwork" </> "Vector") Both [] "VSelect" (Just ("testbench",True))
, runTest ("tests" </> "shouldwork" </> "Vector") Both [] "VZip" (Just ("topEntity",False))
]
]
]
clashHDL :: BuildTarget -> FilePath -> [String] -> String -> TestTree
clashHDL t env extraArgs modName =
testProgram ("CLaSH(" ++ show t ++ ")")
"stack"
(concat [["exec","clash","--"
,case t of { VHDL -> "--vhdl"
; Verilog -> "--verilog"
}
]
,extraArgs
,[modName <.> "hs"]
])
(Just env)
False
False
ghdlImport :: FilePath -> TestTree
ghdlImport dir = withResource (return dir) (const (return ()))
(\d -> testProgram "GHDL (import)" "ghdl" ("-i":"--workdir=work":"--std=93":vhdlFiles d) (Just dir) False False)
where
vhdlFiles :: IO FilePath -> [FilePath]
vhdlFiles d = Unsafe.unsafePerformIO
$ filter (List.isSuffixOf "vhdl")
<$> (Directory.getDirectoryContents =<< d)
ghdlMake :: FilePath -> String -> String -> TestTree
ghdlMake env modName entity = testProgram "GHDL (make)" "ghdl" ["-m","--workdir=work","--std=93",modName ++ "_" ++ entity] (Just env) False False
ghdlSim :: FilePath -> String -> TestTree
ghdlSim env modName = testProgram "GHDL (sim)" "ghdl" ["-r","--workdir=work","--std=93",modName ++ "_testbench","--assert-level=error"] (Just env) False False
iverilog :: FilePath -> String -> String -> TestTree
iverilog dir modName entity = withResource (return dir) (const (return ()))
(\d -> testProgram "iverilog" "iverilog" ("-g2":"-s":modEntity:"-o":modEntity:verilogFiles d) (Just dir) False False)
where
verilogFiles :: IO FilePath -> [FilePath]
verilogFiles d = Unsafe.unsafePerformIO
$ filter (List.isSuffixOf "v")
<$> (Directory.getDirectoryContents =<< d)
modEntity = modName ++ "_" ++ entity
vvp :: FilePath -> String -> TestTree
vvp env modName = testProgram "vvp" "vvp" [modName ++ "_testbench"] (Just env) False True
runTest :: FilePath
-> BuildTarget
-> [String]
-> String
-> Maybe (String,Bool)
-> TestTree
runTest env VHDL extraArgs modName entNameM = withResource aquire release (const grp)
where
vhdlDir = env </> "vhdl"
modDir = vhdlDir </> modName
workdir = modDir </> "work"
aquire = Directory.createDirectoryIfMissing True workdir
release _ = Directory.removeDirectoryRecursive vhdlDir
grp = testGroup modName
( clashHDL VHDL env extraArgs modName
: maybe [] doMakeAndRun entNameM
)
doMakeAndRun (entName,doRun) = [ ghdlImport modDir
, ghdlMake modDir modName entName
] ++ if doRun then [ghdlSim modDir modName] else []
runTest env Verilog extraArgs modName entNameM =
withResource (return ()) release (const grp)
where
verilogDir = env </> "verilog"
modDir = verilogDir </> modName
release _ = Directory.removeDirectoryRecursive verilogDir
grp = testGroup modName
( clashHDL Verilog env extraArgs modName
: maybe [] doMakeAndRun entNameM
)
doMakeAndRun (entName,doRun) = [ iverilog modDir modName entName
] ++ if doRun then [vvp modDir modName] else []
runTest env Both extraArgs modName entNameM = testGroup "VHDL & Verilog"
[ runTest env VHDL extraArgs modName entNameM
, runTest env Verilog extraArgs modName entNameM
]
| ggreif/clash-compiler | testsuite/Main.hs | bsd-2-clause | 13,898 | 0 | 15 | 3,744 | 4,564 | 2,443 | 2,121 | 181 | 3 |
{-# LANGUAGE ScopedTypeVariables #-}
import Criterion.Main
import Data.Vectro as TV
import Data.Vector as V
main = do
let l = [1..(10000::Int)]
let v = V.fromList l
let tv = TV.fromVector v
let vec = TV.fromVect tv
defaultMain [
bgroup "build" [
bench "tv" $ nf TV.fromList l
, bench "v" $ whnf V.fromList l
, bench "tvv" $ nf TV.fromVector v
],
bgroup "snoc" [
bench "v" $ nf (V.foldl' V.snoc v) v
, bench "tv" $ nf (V.foldl' TV.snoc tv) v
, bench "vec" $ nf (V.foldl' TV.snocV vec) v
],
bgroup "update" [
bench "v" $ whnf (V.foldl' (\v i -> v V.// [(i-1,0)]) v) v
, bench "tv" $ nf (V.foldl' (\t i -> TV.update t (i-1) 0) tv) v
]
]
| bos/vectro | tests/benchmarks/Benchmark.hs | bsd-2-clause | 794 | 0 | 20 | 285 | 355 | 178 | 177 | 21 | 1 |
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE ScopedTypeVariables #-}
module RandTools where
-- base
import Data.Foldable
import Data.Word
import Control.Monad.ST
-- primitive
import Control.Monad.Primitive (PrimState,PrimMonad)
-- mwc-random
import qualified System.Random.MWC as R
-- containers
import qualified Data.Sequence as Seq
-- vector
import qualified Data.Vector as V
-- http://stackoverflow.com/a/13782206/547223
sample :: (PrimMonad m) => [a] -> Int -> R.Gen (PrimState m) -> m [a]
sample ys size = go 0 (l - 1) (Seq.fromList ys)
where
l = length ys
go !n !i xs g | n >= size = return $! (toList . Seq.drop (l - size)) xs
| otherwise = do
j <- R.uniformR (0, i) g
let toI = xs `Seq.index` j
toJ = xs `Seq.index` i
next = (Seq.update i toI . Seq.update j toJ) xs
go (n + 1) (i - 1) next g
{-# INLINE sample #-}
type RandomSeed = V.Vector Word32
genSeed :: IO RandomSeed
genSeed = R.withSystemRandom aux
where aux (gen::R.GenST s) = R.uniformVector gen 256 :: ST s (V.Vector Word32)
| kgadek/evil-pareto-tests | src/RandTools.hs | bsd-3-clause | 1,155 | 0 | 17 | 331 | 396 | 214 | 182 | 25 | 1 |
{-# LANGUAGE ScopedTypeVariables #-}
module IOArrayTests (ioArrayTests) where
import TestHelpers
import Data.Array.Storable (StorableArray)
import Control.Parallel.MPI.Fast
import Data.Array.IO (IOArray, newListArray, getElems)
import Control.Concurrent (threadDelay)
import Control.Monad (when)
import Foreign.C.Types
import Data.List
root :: Rank
root = 0
ioArrayTests :: Rank -> [(String,TestRunnerTest)]
ioArrayTests rank =
[ mpiTestCase rank "send+recv IO array" $ syncSendRecvTest send
, mpiTestCase rank "ssend+recv IO array" $ syncSendRecvTest ssend
, mpiTestCase rank "rsend+recv IO array" $ rsendRecvTest
-- irecv only works for StorableArray at the moment. See comments in source.
-- , mpiTestCase rank "isend+irecv IO array" $ asyncSendRecvTest isend
-- , mpiTestCase rank "issend+irecv IO array" $ asyncSendRecvTest issend
, mpiTestCase rank "broadcast IO array" broadcastTest
, mpiTestCase rank "scatter IO array" scatterTest
, mpiTestCase rank "scatterv IO array" scattervTest
, mpiTestCase rank "gather IO array" gatherTest
, mpiTestCase rank "gatherv IO array" gathervTest
, mpiTestCase rank "allgather IO array" allgatherTest
, mpiTestCase rank "allgatherv IO array" allgathervTest
, mpiTestCase rank "alltoall IO array" alltoallTest
, mpiTestCase rank "alltoallv IO array" alltoallvTest
, mpiTestCase rank "reduce IO array" reduceTest
, mpiTestCase rank "allreduce IO array" allreduceTest
, mpiTestCase rank "reduceScatter IO array" reduceScatterTest
]
syncSendRecvTest :: (Comm -> Rank -> Tag -> ArrMsg -> IO ()) -> Rank -> IO ()
-- asyncSendRecvTest :: (Comm -> Rank -> Tag -> IOArray Int Int -> IO Request) -> Rank -> IO ()
rsendRecvTest, broadcastTest, scatterTest, scattervTest, gatherTest, gathervTest :: Rank -> IO ()
allgatherTest, allgathervTest, alltoallTest, alltoallvTest, reduceTest, allreduceTest, reduceScatterTest :: Rank -> IO ()
type ElementType = Double
type ArrMsg = IOArray Int ElementType
low,hi :: Int
range :: (Int, Int)
range@(low,hi) = (1,10)
arrMsgContent :: [ElementType]
arrMsgContent = map fromIntegral [low..hi]
arrMsg :: IO ArrMsg
arrMsg = newListArray range arrMsgContent
syncSendRecvTest sendf rank
| rank == sender = do msg <- arrMsg
sendf commWorld receiver 789 msg
| rank == receiver = do (newMsg::ArrMsg, status) <- intoNewArray range $ recv commWorld sender 789
checkStatus status sender 789
elems <- getElems newMsg
elems == arrMsgContent @? "Got wrong array: " ++ show elems
| otherwise = return ()
rsendRecvTest rank = do
when (rank == receiver) $ do (newMsg::ArrMsg, status) <- intoNewArray range $ recv commWorld sender 789
checkStatus status sender 789
elems <- getElems newMsg
elems == arrMsgContent @? "Got wrong array: " ++ show elems
when (rank == sender) $ do msg <- arrMsg
threadDelay (2* 10^(6 :: Integer))
rsend commWorld receiver 789 msg
return ()
{-
asyncSendRecvTest isendf rank
| rank == sender = do msg <- arrMsg
req <- isendf commWorld receiver 123456 msg
stat <- wait req
checkStatus stat sender 123456
-- XXX this type annotation is ugly. Is there a way to make it nicer?
| rank == receiver = do (newMsg, req) <- intoNewArray range $ (irecv commWorld sender 123456 :: IOArray Int Int -> IO Request)
stat <- wait req
checkStatus stat sender 123456
elems <- getElems newMsg
elems == [low..hi::Int] @? "Got wrong array: " ++ show elems
| otherwise = return ()
-}
broadcastTest myRank = do
msg <- arrMsg
expected <- arrMsg
if myRank == root
then bcastSend commWorld sender (msg :: ArrMsg)
else bcastRecv commWorld sender (msg :: ArrMsg)
elems <- getElems msg
expectedElems <- getElems expected
elems == expectedElems @? "IOArray bcast yielded garbled result: " ++ show elems
scatterTest myRank = do
numProcs <- commSize commWorld
let segRange = (1, segmentSize)
(segment::ArrMsg) <- if myRank == root then do
let bigRange@(low, hi) = (1, segmentSize * numProcs)
(msg :: ArrMsg) <- newListArray bigRange $ map fromIntegral [low..hi]
intoNewArray_ segRange $ scatterSend commWorld root msg
else intoNewArray_ segRange $ scatterRecv commWorld root
let myRankNo = fromRank myRank
expected = take 10 [myRankNo*10+1..]
recvMsg <- getElems segment
recvMsg == expected @? "Rank " ++ show myRank ++ " got segment " ++ show recvMsg ++ " instead of " ++ show expected
where
segmentSize = 10
-- scatter list [1..] in a way such that:
-- rank 0 will receive [1]
-- rank 1 will receive [2,3]
-- rank 2 will receive [3,4,5]
-- rank 3 will receive [6,7,8,9]
-- etc
scattervTest myRank = do
numProcs <- commSize commWorld
let bigRange@(low, hi) = (1, sum [1..numProcs])
recvRange = (0, myRankNo)
myRankNo = fromRank myRank
counts = [1..fromIntegral numProcs]
displs = (0:(Prelude.init $ scanl1 (+) $ [1..fromIntegral numProcs]))
(segment::ArrMsg) <- if myRank == root then do
(msg :: ArrMsg) <- newListArray bigRange $ map fromIntegral [low..hi]
let msgRange = (1, numProcs)
(packCounts :: StorableArray Int CInt) <- newListArray msgRange counts
(packDispls :: StorableArray Int CInt) <- newListArray msgRange displs
intoNewArray_ recvRange $ scattervSend commWorld root msg packCounts packDispls
else intoNewArray_ recvRange $ scattervRecv commWorld root
recvMsg <- getElems segment
let myCount = fromIntegral $ counts!!myRankNo
myDispl = fromIntegral $ displs!!myRankNo
expected = map fromIntegral $ take myCount $ drop myDispl [low..hi]
recvMsg == expected @? "Rank = " ++ show myRank ++ " got segment = " ++ show recvMsg ++ " instead of " ++ show expected
gatherTest myRank = do
numProcs <- commSize commWorld
let segRange@(low,hi) = (1, segmentSize)
(msg :: ArrMsg) <- newListArray segRange $ map fromIntegral [low..hi]
if myRank /= root
then gatherSend commWorld root msg
else do
let bigRange = (1, segmentSize * numProcs)
expected = map fromIntegral $ concat $ replicate numProcs [1..segmentSize]
(result::ArrMsg) <- intoNewArray_ bigRange $ gatherRecv commWorld root msg
recvMsg <- getElems result
recvMsg == expected @? "Rank " ++ show myRank ++ " got " ++ show recvMsg ++ " instead of " ++ show expected
where segmentSize = 10
gathervTest myRank = do
numProcs <- commSize commWorld
let bigRange = (1, sum [1..numProcs])
let myRankNo = fromRank myRank
sendRange = (0, myRankNo)
(msg :: ArrMsg) <- newListArray sendRange $ map fromIntegral [0..myRankNo]
if myRank /= root
then gathervSend commWorld root msg
else do
let msgRange = (1, numProcs)
counts = [1..fromIntegral numProcs]
displs = (0:(Prelude.init $ scanl1 (+) $ [1..fromIntegral numProcs]))
expected = map fromIntegral $ concat $ reverse $ take numProcs $ iterate Prelude.init [0..numProcs-1]
(packCounts :: StorableArray Int CInt) <- newListArray msgRange counts
(packDispls :: StorableArray Int CInt) <- newListArray msgRange displs
(segment::ArrMsg) <- intoNewArray_ bigRange $ gathervRecv commWorld root msg packCounts packDispls
recvMsg <- getElems segment
recvMsg == expected @? "Rank = " ++ show myRank ++ " got segment = " ++ show recvMsg ++ " instead of " ++ show expected
allgatherTest _ = do
numProcs <- commSize commWorld
let segRange@(low,hi) = (1, segmentSize)
(msg :: ArrMsg) <- newListArray segRange $ map fromIntegral [low..hi]
let bigRange = (1, segmentSize * numProcs)
expected = map fromIntegral $ concat $ replicate numProcs [1..segmentSize]
(result::ArrMsg) <- intoNewArray_ bigRange $ allgather commWorld msg
recvMsg <- getElems result
recvMsg == expected @? "Got " ++ show recvMsg ++ " instead of " ++ show expected
where segmentSize = 10
allgathervTest myRank = do
numProcs <- commSize commWorld
let bigRange = (1, sum [1..numProcs])
let myRankNo = fromRank myRank
sendRange = (0, myRankNo)
(msg :: ArrMsg) <- newListArray sendRange $ map fromIntegral [0..myRankNo]
let msgRange = (1, numProcs)
counts = [1..fromIntegral numProcs]
displs = (0:(Prelude.init $ scanl1 (+) $ [1..fromIntegral numProcs]))
expected = map fromIntegral $ concat $ reverse $ take numProcs $ iterate Prelude.init [0..numProcs-1]
(packCounts :: StorableArray Int CInt) <- newListArray msgRange counts
(packDispls :: StorableArray Int CInt) <- newListArray msgRange displs
(result::ArrMsg) <- intoNewArray_ bigRange $ allgatherv commWorld msg packCounts packDispls
recvMsg <- getElems result
recvMsg == expected @? "Got segment = " ++ show recvMsg ++ " instead of " ++ show expected
alltoallTest myRank = do
numProcs <- commSize commWorld
let myRankNo = fromRank myRank
sendRange = (0, numProcs-1)
(msg :: ArrMsg) <- newListArray sendRange $ take numProcs $ repeat myRankNo
let recvRange = sendRange
expected = map fromIntegral $ [0..numProcs-1]
(result::ArrMsg) <- intoNewArray_ recvRange $ alltoall commWorld msg 1 1
recvMsg <- getElems result
recvMsg == expected @? "Got segment = " ++ show recvMsg ++ " instead of " ++ show expected
-- Each rank sends its own number (Int) with sendCounts [1,2,3..]
-- Each rank receives Ints with recvCounts [rank+1,rank+1,rank+1,...]
-- Rank 0 should receive 0,1,2
-- Rank 1 should receive 0,0,1,1,2,2
-- Rank 2 should receive 0,0,0,1,1,1,2,2,2
-- etc
alltoallvTest myRank = do
numProcs <- commSize commWorld
let myRankNo :: CInt = fromRank myRank
sendCounts = take numProcs [1..]
msgLen = fromIntegral $ sum sendCounts
sendDispls = Prelude.init $ scanl1 (+) $ 0:sendCounts
recvCounts = take numProcs (repeat (myRankNo+1))
recvDispls = Prelude.init $ scanl1 (+) $ 0:recvCounts
expected = map fromIntegral $ concatMap (genericReplicate (myRankNo+1)) (take numProcs [(0::CInt)..])
(packSendCounts :: StorableArray Int CInt) <- newListArray (1, length sendCounts) sendCounts
(packSendDispls :: StorableArray Int CInt) <- newListArray (1, length sendDispls) sendDispls
(packRecvCounts :: StorableArray Int CInt) <- newListArray (1, length recvCounts) recvCounts
(packRecvDispls :: StorableArray Int CInt) <- newListArray (1, length recvDispls) recvDispls
(msg :: ArrMsg) <- newListArray (1, msgLen) $ map fromIntegral $ take msgLen $ repeat myRankNo
(result::ArrMsg) <- intoNewArray_ (1, length expected) $ alltoallv commWorld msg packSendCounts packSendDispls
packRecvCounts packRecvDispls
recvMsg <- getElems result
recvMsg == expected @? "Got " ++ show recvMsg ++ " instead of " ++ show expected
-- Reducing arrays [0,1,2....] with SUM should yield [0,numProcs,2*numProcs, ...]
reduceTest myRank = do
numProcs <- commSize commWorld
(src :: ArrMsg) <- newListArray (0,99) [0..99]
if myRank /= root
then reduceSend commWorld root sumOp src
else do
(result :: ArrMsg) <- intoNewArray_ (0,99) $ reduceRecv commWorld root sumOp src
recvMsg <- getElems result
let expected = map ((fromIntegral numProcs)*) [0..99::ElementType]
recvMsg == expected @? "Got " ++ show recvMsg ++ " instead of " ++ show expected
allreduceTest _ = do
numProcs <- commSize commWorld
(src :: ArrMsg) <- newListArray (0,99) [0..99]
(result :: ArrMsg) <- intoNewArray_ (0,99) $ allreduce commWorld sumOp src
recvMsg <- getElems result
let expected = map (fromIntegral.(numProcs*)) [0..99]
recvMsg == expected @? "Got " ++ show recvMsg ++ " instead of " ++ show expected
-- We reduce [0..] with SUM.
-- Each process gets (rank+1) elements of the result
reduceScatterTest myRank = do
numProcs <- commSize commWorld
let dataSize = sum [1..numProcs]
msg = take dataSize [0..]
myRankNo = fromRank myRank
(src :: ArrMsg) <- newListArray (1,dataSize) msg
(counts :: StorableArray Int CInt) <- newListArray (1, numProcs) [1..fromIntegral numProcs]
(result :: ArrMsg) <- intoNewArray_ (1,myRankNo + 1) $ reduceScatter commWorld sumOp counts src
recvMsg <- getElems result
let expected = map ((fromIntegral numProcs)*) $ take (myRankNo+1) $ drop (sum [0..myRankNo]) msg
recvMsg == expected @? "Got " ++ show recvMsg ++ " instead of " ++ show expected
| bjpop/haskell-mpi | test/IOArrayTests.hs | bsd-3-clause | 12,761 | 1 | 18 | 2,931 | 3,793 | 1,888 | 1,905 | 211 | 2 |
{-# LANGUAGE GADTs #-}
module CardEffect
where
{-
import Control.Monad.Operational
data DeckColumn = L | M | R
deriving (Show, Eq, Ord)
data DeckRow = F | B
deriving (Show, Eq, Ord)
data CardEffectOp a where
QueueDmg :: Int -> Int -> DeckColumn -> CardEffectOp ()
AddDP :: Int -> CardEffectOp ()
AddCard ::
type CardEffect = Program CardEffectOp ()
-}
| rubenpieters/gre-project | shared/src/CardEffect.hs | bsd-3-clause | 369 | 0 | 2 | 78 | 6 | 5 | 1 | 2 | 0 |
module DTD_ | FranklinChen/hugs98-plus-Sep2006 | packages/HaXml/bugs/GxlDtd.hs | bsd-3-clause | 11 | 0 | 2 | 1 | 4 | 3 | 1 | -1 | -1 |
{-# LANGUAGE FlexibleContexts, RankNTypes #-}
module EC2Tests.ImageTests
( runImageTests
)
where
import Control.Monad ((>=>))
import Data.Maybe (catMaybes)
import Data.Text (Text)
import Test.Hspec
import Cloud.AWS.EC2
import Cloud.AWS.EC2.Types
import Util
import EC2Tests.Util
region :: Text
region = "ap-northeast-1"
runImageTests :: IO ()
runImageTests = hspec $ do
describe "describeImages" $ do
it "doesn't throw any exception" $ do
testEC2 region (describeImages [] [] [] []) `miss` anyConnectionException
describe "{create,deregister}Image and {describe,modify}ImageAttribute" $ do
it "doesn't throw any exception" $ do
testEC2' region (
withInstance testRunInstancesRequest $ \Instance{instanceId = inst} -> do
waitForInstanceState InstanceStateRunning inst
let name = "createImageTest"
desc = "For HSpec testing"
snaps <- withImage inst name (Just desc) False [] $ \ami -> do
Image{imageBlockDeviceMappings = bdms} <- waitForImageState ImageStateAvailable ami
mapM_ (describeImageAttribute ami) allAttributes
let params =
[ LaunchPermissionItemGroup "all"
, LaunchPermissionItemUserId "111122223333"
, LaunchPermissionItemUserId "333322221111"
]
modifyImageAttribute ami (Just $ LaunchPermission params []) [] Nothing
mapM_ (describeImageAttribute ami) allAttributes
modifyImageAttribute ami (Just $ LaunchPermission [] params) [] Nothing
return $ catMaybes $ map (blockDeviceMappingEbs >=> ebsSnapshotId) bdms
-- Cleanup snapshots created by createImage
mapM_ deleteSnapshot snaps
) `miss` anyConnectionException
allAttributes :: [AMIAttribute]
allAttributes =
[ AMIDescription
, AMIKernel
, AMIRamdisk
, AMILaunchPermission
, AMIProductCodes
, AMIBlockDeviceMapping
]
| worksap-ate/aws-sdk | test/EC2Tests/ImageTests.hs | bsd-3-clause | 2,228 | 0 | 30 | 747 | 463 | 237 | 226 | 46 | 1 |
module Graphics.Square(
renderSquare
) where
import Control.Monad.IO.Class
import Data.Word
import Foreign.C.Types
import Game.Camera
import Game.GoreAndAsh
import Game.GoreAndAsh.SDL
import SDL
-- | Function of rendering player
renderSquare :: MonadIO m => Window -> Renderer -> Camera -> Double -> V2 Double -> V3 Double -> m ()
renderSquare window renderer c size pos col = do
wsize <- fmap (fmap fromIntegral) . get $ windowSize window
rendererDrawColor renderer $= transColor col
fillRect renderer $ Just $ transformedSquare wsize
where
transColor :: V3 Double -> V4 Word8
transColor (V3 r g b) = V4 (round $ r * 255) (round $ g * 255) (round $ b * 255) 255
modelMtx :: V2 Double -> M33 Double
modelMtx wsize = viewportTransform2D 0 wsize !*! cameraMatrix c !*! translate2D pos
transformedSquare :: V2 Double -> Rectangle CInt
transformedSquare wsize = Rectangle (P topleft) (botright - topleft)
where
topleft = fmap round . applyTransform2D (modelMtx wsize) $ V2 (-size/2) (-size/2)
botright = fmap round . applyTransform2D (modelMtx wsize) $ V2 (size/2) (size/2) | Teaspot-Studio/gore-and-ash-demo | src/client/Graphics/Square.hs | bsd-3-clause | 1,130 | 0 | 13 | 228 | 425 | 212 | 213 | 22 | 1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Simple.Configure
-- Copyright : Isaac Jones 2003-2005
-- License : BSD3
--
-- Maintainer : cabal-devel@haskell.org
-- Portability : portable
--
-- This deals with the /configure/ phase. It provides the 'configure' action
-- which is given the package description and configure flags. It then tries
-- to: configure the compiler; resolves any conditionals in the package
-- description; resolve the package dependencies; check if all the extensions
-- used by this package are supported by the compiler; check that all the build
-- tools are available (including version checks if appropriate); checks for
-- any required @pkg-config@ packages (updating the 'BuildInfo' with the
-- results)
--
-- Then based on all this it saves the info in the 'LocalBuildInfo' and writes
-- it out to the @dist\/setup-config@ file. It also displays various details to
-- the user, the amount of information displayed depending on the verbosity
-- level.
module Distribution.Simple.Configure (configure,
writePersistBuildConfig,
getConfigStateFile,
getPersistBuildConfig,
checkPersistBuildConfigOutdated,
tryGetPersistBuildConfig,
maybeGetPersistBuildConfig,
localBuildInfoFile,
getInstalledPackages, getPackageDBContents,
configCompiler, configCompilerAux,
configCompilerEx, configCompilerAuxEx,
ccLdOptionsBuildInfo,
checkForeignDeps,
interpretPackageDbFlags,
ConfigStateFileError(..),
tryGetConfigStateFile,
platformDefines,
etaHackageUrl)
where
import Distribution.Compiler
( CompilerId(..) )
import Distribution.Utils.NubList
import Distribution.Simple.Compiler
( CompilerFlavor(..), Compiler(..), compilerFlavor, compilerVersion
, compilerInfo
, showCompilerId, unsupportedLanguages, unsupportedExtensions
, PackageDB(..), PackageDBStack, reexportedModulesSupported
, packageKeySupported, renamingPackageFlagsSupported )
import Distribution.Simple.PreProcess ( platformDefines )
import Distribution.Package
( PackageName(PackageName), PackageIdentifier(..), PackageId
, packageName, packageVersion, Package(..)
, Dependency(Dependency), simplifyDependency
, InstalledPackageId(..), thisPackageVersion
, mkPackageKey, PackageKey(..), packageKeyLibraryName )
import qualified Distribution.InstalledPackageInfo as Installed
import Distribution.InstalledPackageInfo (InstalledPackageInfo, emptyInstalledPackageInfo)
import qualified Distribution.Simple.PackageIndex as PackageIndex
import Distribution.Simple.PackageIndex (InstalledPackageIndex)
import Distribution.PackageDescription as PD
( PackageDescription(..), specVersion, GenericPackageDescription(..)
, Library(..), hasLibs, Executable(..), BuildInfo(..), allExtensions
, HookedBuildInfo, updatePackageDescription, allBuildInfo
, Flag(flagName), FlagName(..), TestSuite(..), Benchmark(..)
, ModuleReexport(..) , defaultRenaming )
import Distribution.ModuleName
( ModuleName )
import Distribution.PackageDescription.Configuration
( finalizePackageDescription, mapTreeData )
import Distribution.PackageDescription.Check
( PackageCheck(..), checkPackage, checkPackageFiles )
import Distribution.Simple.Program
( Program(..), ProgramLocation(..), ConfiguredProgram(..)
, ProgramConfiguration, defaultProgramConfiguration
, ProgramSearchPathEntry(..), getProgramSearchPath, setProgramSearchPath
, configureAllKnownPrograms, knownPrograms, lookupKnownProgram
, userSpecifyArgss, userSpecifyPaths
, lookupProgram, requireProgram, requireProgramVersion
, pkgConfigProgram, gccProgram, rawSystemProgramStdoutConf )
import Distribution.Simple.Setup
( ConfigFlags(..), CopyDest(..), Flag(..), fromFlag, fromFlagOrDefault
, flagToMaybe )
import Distribution.Simple.InstallDirs
( InstallDirs(..), defaultInstallDirs, combineInstallDirs )
import Distribution.Simple.LocalBuildInfo
( LocalBuildInfo(..), Component(..), ComponentLocalBuildInfo(..)
, LibraryName(..)
, absoluteInstallDirs, prefixRelativeInstallDirs, inplacePackageId
, ComponentName(..), showComponentName, pkgEnabledComponents
, componentBuildInfo, componentName, checkComponentsCyclic )
import Distribution.Simple.BuildPaths
( autogenModulesDir )
import Distribution.Simple.Utils
( die, warn, info, setupMessage
, createDirectoryIfMissingVerbose, moreRecentFile
, intercalate, cabalVersion
, writeFileAtomic
, withTempFile )
import Distribution.System
( OS(..), buildOS, Platform (..), buildPlatform )
import Distribution.Version
( Version(..), anyVersion, orLaterVersion, withinRange, isAnyVersion )
import Distribution.Verbosity
( Verbosity, lessVerbose )
import qualified Distribution.Simple.GHC as GHC
import qualified Distribution.Simple.GHCJS as GHCJS
import qualified Distribution.Simple.ETA as ETA
import qualified Distribution.Simple.JHC as JHC
import qualified Distribution.Simple.LHC as LHC
import qualified Distribution.Simple.UHC as UHC
import qualified Distribution.Simple.HaskellSuite as HaskellSuite
-- Prefer the more generic Data.Traversable.mapM to Prelude.mapM
import Prelude hiding ( mapM )
import Control.Exception
( ErrorCall(..), Exception, evaluate, throw, throwIO, try )
import Control.Monad
( liftM, when, unless, foldM, filterM )
import Distribution.Compat.Binary ( decodeOrFailIO, encode )
import Data.ByteString.Lazy (ByteString)
import qualified Data.ByteString as BS
import qualified Data.ByteString.Lazy.Char8 as BLC8
import Data.List
( (\\), nub, partition, isPrefixOf, inits, stripPrefix )
import Data.Maybe
( isNothing, catMaybes, fromMaybe, isJust )
import Data.Either
( partitionEithers )
import qualified Data.Set as Set
#if __GLASGOW_HASKELL__ < 710
import Data.Monoid
( Monoid(..) )
#endif
import qualified Data.Map as Map
import Data.Map (Map)
import Data.Traversable
( mapM )
import Data.Typeable
import System.Directory
( doesFileExist, createDirectoryIfMissing, getTemporaryDirectory )
import System.FilePath
( (</>), isAbsolute )
import qualified System.Info
( compilerName, compilerVersion )
import System.IO
( hPutStrLn, hClose )
import Distribution.Text
( Text(disp), display, simpleParse )
import Text.PrettyPrint
( render, (<>), ($+$), char, text, comma
, quotes, punctuate, nest, sep, hsep )
import Distribution.Compat.Exception ( catchExit, catchIO )
data ConfigStateFileError
= ConfigStateFileNoHeader
| ConfigStateFileBadHeader
| ConfigStateFileNoParse
| ConfigStateFileMissing
| ConfigStateFileBadVersion PackageIdentifier PackageIdentifier (Either ConfigStateFileError LocalBuildInfo)
deriving (Typeable)
instance Show ConfigStateFileError where
show ConfigStateFileNoHeader =
"Saved package config file header is missing. "
++ "Try re-running the 'configure' command."
show ConfigStateFileBadHeader =
"Saved package config file header is corrupt. "
++ "Try re-running the 'configure' command."
show ConfigStateFileNoParse =
"Saved package config file body is corrupt. "
++ "Try re-running the 'configure' command."
show ConfigStateFileMissing = "Run the 'configure' command first."
show (ConfigStateFileBadVersion oldCabal oldCompiler _) =
"You need to re-run the 'configure' command. "
++ "The version of Cabal being used has changed (was "
++ display oldCabal ++ ", now "
++ display currentCabalId ++ ")."
++ badCompiler
where
badCompiler
| oldCompiler == currentCompilerId = ""
| otherwise =
" Additionally the compiler is different (was "
++ display oldCompiler ++ ", now "
++ display currentCompilerId
++ ") which is probably the cause of the problem."
instance Exception ConfigStateFileError
getConfigStateFile :: FilePath -> IO LocalBuildInfo
getConfigStateFile filename = do
exists <- doesFileExist filename
unless exists $ throwIO ConfigStateFileMissing
-- Read the config file into a strict ByteString to avoid problems with
-- lazy I/O, then convert to lazy because the binary package needs that.
contents <- BS.readFile filename
let (header, body) = BLC8.span (/='\n') (BLC8.fromChunks [contents])
headerParseResult <- try $ evaluate $ parseHeader header
let (cabalId, compId) =
case headerParseResult of
Left (ErrorCall _) -> throw ConfigStateFileBadHeader
Right x -> x
let getStoredValue = do
result <- decodeOrFailIO (BLC8.tail body)
case result of
Left _ -> throw ConfigStateFileNoParse
Right x -> return x
deferErrorIfBadVersion act
| cabalId /= currentCabalId = do
eResult <- try act
throw $ ConfigStateFileBadVersion cabalId compId eResult
| otherwise = act
deferErrorIfBadVersion getStoredValue
tryGetConfigStateFile :: FilePath
-> IO (Either ConfigStateFileError LocalBuildInfo)
tryGetConfigStateFile = try . getConfigStateFile
-- |Try to read the 'localBuildInfoFile'.
tryGetPersistBuildConfig :: FilePath
-> IO (Either ConfigStateFileError LocalBuildInfo)
tryGetPersistBuildConfig = try . getPersistBuildConfig
-- | Read the 'localBuildInfoFile'. Throw an exception if the file is
-- missing, if the file cannot be read, or if the file was created by an older
-- version of Cabal.
getPersistBuildConfig :: FilePath -> IO LocalBuildInfo
getPersistBuildConfig = getConfigStateFile . localBuildInfoFile
-- |Try to read the 'localBuildInfoFile'.
maybeGetPersistBuildConfig :: FilePath -> IO (Maybe LocalBuildInfo)
maybeGetPersistBuildConfig =
liftM (either (const Nothing) Just) . tryGetPersistBuildConfig
-- |After running configure, output the 'LocalBuildInfo' to the
-- 'localBuildInfoFile'.
writePersistBuildConfig :: FilePath -> LocalBuildInfo -> IO ()
writePersistBuildConfig distPref lbi = do
createDirectoryIfMissing False distPref
writeFileAtomic (localBuildInfoFile distPref) $
BLC8.unlines [showHeader pkgId, encode lbi]
where
pkgId = packageId $ localPkgDescr lbi
currentCabalId :: PackageIdentifier
currentCabalId = PackageIdentifier (PackageName "Cabal") cabalVersion
currentCompilerId :: PackageIdentifier
currentCompilerId = PackageIdentifier (PackageName System.Info.compilerName)
System.Info.compilerVersion
parseHeader :: ByteString -> (PackageIdentifier, PackageIdentifier)
parseHeader header = case BLC8.words header of
["Saved", "package", "config", "for", pkgId, "written", "by", cabalId, "using", compId] ->
fromMaybe (throw ConfigStateFileBadHeader) $ do
_ <- simpleParse (BLC8.unpack pkgId) :: Maybe PackageIdentifier
cabalId' <- simpleParse (BLC8.unpack cabalId)
compId' <- simpleParse (BLC8.unpack compId)
return (cabalId', compId')
_ -> throw ConfigStateFileNoHeader
showHeader :: PackageIdentifier -> ByteString
showHeader pkgId = BLC8.unwords
[ "Saved", "package", "config", "for"
, BLC8.pack $ display pkgId
, "written", "by"
, BLC8.pack $ display currentCabalId
, "using"
, BLC8.pack $ display currentCompilerId
]
-- |Check that localBuildInfoFile is up-to-date with respect to the
-- .cabal file.
checkPersistBuildConfigOutdated :: FilePath -> FilePath -> IO Bool
checkPersistBuildConfigOutdated distPref pkg_descr_file = do
pkg_descr_file `moreRecentFile` (localBuildInfoFile distPref)
-- |@dist\/setup-config@
localBuildInfoFile :: FilePath -> FilePath
localBuildInfoFile distPref = distPref </> "setup-config"
-- -----------------------------------------------------------------------------
-- * Configuration
-- -----------------------------------------------------------------------------
-- |Perform the \"@.\/setup configure@\" action.
-- Returns the @.setup-config@ file.
configure :: (GenericPackageDescription, HookedBuildInfo)
-> ConfigFlags -> IO LocalBuildInfo
configure (pkg_descr0, pbi) cfg
= do let distPref = fromFlag (configDistPref cfg)
buildDir' = distPref </> "build"
verbosity = fromFlag (configVerbosity cfg)
setupMessage verbosity "Configuring" (packageId pkg_descr0)
unless (configLibCoverage cfg == NoFlag) $ do
let enable | fromFlag (configLibCoverage cfg) = "enable"
| otherwise = "disable"
warn verbosity
("The flag --" ++ enable ++ "-library-coverage is deprecated. "
++ "Please use --" ++ enable ++ "-coverage instead.")
createDirectoryIfMissingVerbose (lessVerbose verbosity) True distPref
let programsConfig = mkProgramsConfig cfg (configPrograms cfg)
userInstall = fromFlag (configUserInstall cfg)
packageDbs = interpretPackageDbFlags userInstall
(configPackageDBs cfg)
-- detect compiler
(comp, compPlatform, programsConfig') <- configCompilerEx
(flagToMaybe $ configHcFlavor cfg)
(flagToMaybe $ configHcPath cfg) (flagToMaybe $ configHcPkg cfg)
programsConfig (lessVerbose verbosity)
let version = compilerVersion comp
flavor = compilerFlavor comp
-- Create a PackageIndex that makes *any libraries that might be*
-- defined internally to this package look like installed packages, in
-- case an executable should refer to any of them as dependencies.
--
-- It must be *any libraries that might be* defined rather than the
-- actual definitions, because these depend on conditionals in the .cabal
-- file, and we haven't resolved them yet. finalizePackageDescription
-- does the resolution of conditionals, and it takes internalPackageSet
-- as part of its input.
--
-- Currently a package can define no more than one library (which has
-- the same name as the package) but we could extend this later.
-- If we later allowed private internal libraries, then here we would
-- need to pre-scan the conditional data to make a list of all private
-- libraries that could possibly be defined by the .cabal file.
let pid = packageId pkg_descr0
internalPackage = emptyInstalledPackageInfo {
--TODO: should use a per-compiler method to map the source
-- package ID into an installed package id we can use
-- for the internal package set. The open-codes use of
-- InstalledPackageId . display here is a hack.
Installed.installedPackageId =
InstalledPackageId $ display $ pid,
Installed.sourcePackageId = pid
}
internalPackageSet = PackageIndex.fromList [internalPackage]
installedPackageSet <- getInstalledPackages (lessVerbose verbosity) comp
packageDbs programsConfig'
(allConstraints, requiredDepsMap) <- either die return $
combinedConstraints (configConstraints cfg)
(configDependencies cfg)
installedPackageSet
let exactConf = fromFlagOrDefault False (configExactConfiguration cfg)
-- Constraint test function for the solver
dependencySatisfiable d@(Dependency depName verRange)
| exactConf =
-- When we're given '--exact-configuration', we assume that all
-- dependencies and flags are exactly specified on the command
-- line. Thus we only consult the 'requiredDepsMap'. Note that
-- we're not doing the version range check, so if there's some
-- dependency that wasn't specified on the command line,
-- 'finalizePackageDescription' will fail.
--
-- TODO: mention '--exact-configuration' in the error message
-- when this fails?
(depName `Map.member` requiredDepsMap) || isInternalDep
| otherwise =
-- Normal operation: just look up dependency in the package
-- index.
not . null . PackageIndex.lookupDependency pkgs' $ d
where
pkgs' = PackageIndex.insert internalPackage installedPackageSet
isInternalDep = pkgName pid == depName
&& pkgVersion pid `withinRange` verRange
enableTest t = t { testEnabled = fromFlag (configTests cfg) }
flaggedTests = map (\(n, t) -> (n, mapTreeData enableTest t))
(condTestSuites pkg_descr0)
enableBenchmark bm = bm { benchmarkEnabled =
fromFlag (configBenchmarks cfg) }
flaggedBenchmarks = map (\(n, bm) ->
(n, mapTreeData enableBenchmark bm))
(condBenchmarks pkg_descr0)
pkg_descr0'' = pkg_descr0 { condTestSuites = flaggedTests
, condBenchmarks = flaggedBenchmarks }
(pkg_descr0', flags) <-
case finalizePackageDescription
(configConfigurationsFlags cfg)
dependencySatisfiable
compPlatform
(compilerInfo comp)
allConstraints
pkg_descr0''
of Right r -> return r
Left missing ->
die $ "At least the following dependencies are missing:\n"
++ (render . nest 4 . sep . punctuate comma
. map (disp . simplifyDependency)
$ missing)
-- Sanity check: if '--exact-configuration' was given, ensure that the
-- complete flag assignment was specified on the command line.
when exactConf $ do
let cmdlineFlags = map fst (configConfigurationsFlags cfg)
allFlags = map flagName . genPackageFlags $ pkg_descr0
diffFlags = allFlags \\ cmdlineFlags
when (not . null $ diffFlags) $
die $ "'--exact-conf' was given, "
++ "but the following flags were not specified: "
++ intercalate ", " (map show diffFlags)
-- add extra include/lib dirs as specified in cfg
-- we do it here so that those get checked too
let pkg_descr = addExtraIncludeLibDirs pkg_descr0'
unless (renamingPackageFlagsSupported comp ||
and [ rn == defaultRenaming
| bi <- allBuildInfo pkg_descr
, rn <- Map.elems (targetBuildRenaming bi)]) $
die $ "Your compiler does not support thinning and renaming on "
++ "package flags. To use this feature you probably must use "
++ "GHC 7.9 or later."
when (not (null flags)) $
info verbosity $ "Flags chosen: "
++ intercalate ", " [ name ++ "=" ++ display value
| (FlagName name, value) <- flags ]
when (maybe False (not.null.PD.reexportedModules) (PD.library pkg_descr)
&& not (reexportedModulesSupported comp)) $ do
die $ "Your compiler does not support module re-exports. To use "
++ "this feature you probably must use GHC 7.9 or later."
checkPackageProblems verbosity pkg_descr0
(updatePackageDescription pbi pkg_descr)
-- Handle hole instantiation
(holeDeps, hole_insts) <- configureInstantiateWith pkg_descr cfg installedPackageSet
let selectDependencies :: [Dependency] ->
([FailedDependency], [ResolvedDependency])
selectDependencies =
(\xs -> ([ x | Left x <- xs ], [ x | Right x <- xs ]))
. map (selectDependency internalPackageSet installedPackageSet
requiredDepsMap)
(failedDeps, allPkgDeps) =
selectDependencies (buildDepends pkg_descr)
internalPkgDeps = [ pkgid
| InternalDependency _ pkgid <- allPkgDeps ]
externalPkgDeps = [ pkg
| ExternalDependency _ pkg <- allPkgDeps ]
when (not (null internalPkgDeps)
&& not (newPackageDepsBehaviour pkg_descr)) $
die $ "The field 'build-depends: "
++ intercalate ", " (map (display . packageName) internalPkgDeps)
++ "' refers to a library which is defined within the same "
++ "package. To use this feature the package must specify at "
++ "least 'cabal-version: >= 1.8'."
reportFailedDependencies failedDeps
reportSelectedDependencies verbosity allPkgDeps
let installDeps = Map.elems
. Map.fromList
. map (\v -> (Installed.installedPackageId v, v))
$ externalPkgDeps ++ holeDeps
packageDependsIndex <-
case PackageIndex.dependencyClosure installedPackageSet
(map Installed.installedPackageId installDeps) of
Left packageDependsIndex -> return packageDependsIndex
Right broken ->
die $ "The following installed packages are broken because other"
++ " packages they depend on are missing. These broken "
++ "packages must be rebuilt before they can be used.\n"
++ unlines [ "package "
++ display (packageId pkg)
++ " is broken due to missing package "
++ intercalate ", " (map display deps)
| (pkg, deps) <- broken ]
let pseudoTopPkg = emptyInstalledPackageInfo {
Installed.installedPackageId =
InstalledPackageId (display (packageId pkg_descr)),
Installed.sourcePackageId = packageId pkg_descr,
Installed.depends =
map Installed.installedPackageId installDeps
}
case PackageIndex.dependencyInconsistencies
. PackageIndex.insert pseudoTopPkg
$ packageDependsIndex of
[] -> return ()
inconsistencies ->
warn verbosity $
"This package indirectly depends on multiple versions of the same "
++ "package. This is highly likely to cause a compile failure.\n"
++ unlines [ "package " ++ display pkg ++ " requires "
++ display (PackageIdentifier name ver)
| (name, uses) <- inconsistencies
, (pkg, ver) <- uses ]
-- Calculate the package key. We're going to store it in LocalBuildInfo
-- canonically, but ComponentsLocalBuildInfo also needs to know about it
-- XXX Do we need the internal deps?
-- NB: does *not* include holeDeps!
let pkg_key = mkPackageKey (packageKeySupported comp)
(package pkg_descr)
(map Installed.packageKey externalPkgDeps)
(map (\(k,(p,m)) -> (k,(Installed.packageKey p,m))) hole_insts)
-- internal component graph
buildComponents <-
case mkComponentsGraph pkg_descr internalPkgDeps of
Left componentCycle -> reportComponentCycle componentCycle
Right components ->
case mkComponentsLocalBuildInfo packageDependsIndex pkg_descr
internalPkgDeps externalPkgDeps holeDeps
(Map.fromList hole_insts)
pkg_key components of
Left problems -> reportModuleReexportProblems problems
Right components' -> return components'
-- installation directories
defaultDirs <- defaultInstallDirs flavor userInstall (hasLibs pkg_descr)
let installDirs = combineInstallDirs fromFlagOrDefault
defaultDirs (configInstallDirs cfg)
-- check languages and extensions
let langlist = nub $ catMaybes $ map defaultLanguage
(allBuildInfo pkg_descr)
let langs = unsupportedLanguages comp langlist
when (not (null langs)) $
die $ "The package " ++ display (packageId pkg_descr0)
++ " requires the following languages which are not "
++ "supported by " ++ display (compilerId comp) ++ ": "
++ intercalate ", " (map display langs)
let extlist = nub $ concatMap allExtensions (allBuildInfo pkg_descr)
let exts = unsupportedExtensions comp extlist
when (not (null exts)) $
die $ "The package " ++ display (packageId pkg_descr0)
++ " requires the following language extensions which are not "
++ "supported by " ++ display (compilerId comp) ++ ": "
++ intercalate ", " (map display exts)
-- configured known/required programs & external build tools
-- exclude build-tool deps on "internal" exes in the same package
let requiredBuildTools =
[ buildTool
| let exeNames = map exeName (executables pkg_descr)
, bi <- allBuildInfo pkg_descr
, buildTool@(Dependency (PackageName toolName) reqVer)
<- buildTools bi
, let isInternal =
toolName `elem` exeNames
-- we assume all internal build-tools are
-- versioned with the package:
&& packageVersion pkg_descr `withinRange` reqVer
, not isInternal ]
programsConfig'' <-
configureAllKnownPrograms (lessVerbose verbosity) programsConfig'
>>= configureRequiredPrograms verbosity requiredBuildTools
(pkg_descr', programsConfig''') <-
configurePkgconfigPackages verbosity pkg_descr programsConfig''
split_objs <-
if not (fromFlag $ configSplitObjs cfg)
then return False
else case flavor of
GHC | version >= Version [6,5] [] -> return True
GHCJS -> return True
_ -> do warn verbosity
("this compiler does not support " ++
"--enable-split-objs; ignoring")
return False
let ghciLibByDefault =
case compilerId comp of
CompilerId GHC _ ->
-- If ghc is non-dynamic, then ghci needs object files,
-- so we build one by default.
--
-- Technically, archive files should be sufficient for ghci,
-- but because of GHC bug #8942, it has never been safe to
-- rely on them. By the time that bug was fixed, ghci had
-- been changed to read shared libraries instead of archive
-- files (see next code block).
not (GHC.isDynamic comp)
CompilerId GHCJS _ ->
not (GHCJS.isDynamic comp)
CompilerId ETA _ -> False -- TODO: Support ETAi
_ -> False
let sharedLibsByDefault
| fromFlag (configDynExe cfg) =
-- build a shared library if dynamically-linked
-- executables are requested
True
| otherwise = case compilerId comp of
CompilerId GHC _ ->
-- if ghc is dynamic, then ghci needs a shared
-- library, so we build one by default.
GHC.isDynamic comp
CompilerId GHCJS _ ->
GHCJS.isDynamic comp
CompilerId ETA _ -> False -- TODO: Support ETAi
_ -> False
withSharedLib_ =
-- build shared libraries if required by GHC or by the
-- executable linking mode, but allow the user to force
-- building only static library archives with
-- --disable-shared.
fromFlagOrDefault sharedLibsByDefault $ configSharedLib cfg
withDynExe_ = fromFlag $ configDynExe cfg
when (withDynExe_ && not withSharedLib_) $ warn verbosity $
"Executables will use dynamic linking, but a shared library "
++ "is not being built. Linking will fail if any executables "
++ "depend on the library."
let withProfExe_ = fromFlagOrDefault False $ configProfExe cfg
withProfLib_ = fromFlagOrDefault withProfExe_ $ configProfLib cfg
when (withProfExe_ && not withProfLib_) $ warn verbosity $
"Executables will be built with profiling, but library "
++ "profiling is disabled. Linking will fail if any executables "
++ "depend on the library."
let configCoverage_ =
mappend (configCoverage cfg) (configLibCoverage cfg)
cfg' = cfg { configCoverage = configCoverage_ }
reloc <-
if not (fromFlag $ configRelocatable cfg)
then return False
else return True
let lbi = LocalBuildInfo {
configFlags = cfg',
extraConfigArgs = [], -- Currently configure does not
-- take extra args, but if it
-- did they would go here.
installDirTemplates = installDirs,
compiler = comp,
hostPlatform = compPlatform,
buildDir = buildDir',
componentsConfigs = buildComponents,
installedPkgs = packageDependsIndex,
pkgDescrFile = Nothing,
localPkgDescr = pkg_descr',
pkgKey = pkg_key,
instantiatedWith = hole_insts,
withPrograms = programsConfig''',
withVanillaLib = fromFlag $ configVanillaLib cfg,
withProfLib = withProfLib_,
withSharedLib = withSharedLib_,
withDynExe = withDynExe_,
withProfExe = withProfExe_,
withOptimization = fromFlag $ configOptimization cfg,
withDebugInfo = fromFlag $ configDebugInfo cfg,
withGHCiLib = fromFlagOrDefault ghciLibByDefault $
configGHCiLib cfg,
splitObjs = split_objs,
stripExes = fromFlag $ configStripExes cfg,
stripLibs = fromFlag $ configStripLibs cfg,
withPackageDB = packageDbs,
progPrefix = fromFlag $ configProgPrefix cfg,
progSuffix = fromFlag $ configProgSuffix cfg,
relocatable = reloc
}
when reloc (checkRelocatable verbosity pkg_descr lbi)
let dirs = absoluteInstallDirs pkg_descr lbi NoCopyDest
relative = prefixRelativeInstallDirs (packageId pkg_descr) lbi
unless (isAbsolute (prefix dirs)) $ die $
"expected an absolute directory name for --prefix: " ++ prefix dirs
info verbosity $ "Using " ++ display currentCabalId
++ " compiled by " ++ display currentCompilerId
info verbosity $ "Using compiler: " ++ showCompilerId comp
info verbosity $ "Using install prefix: " ++ prefix dirs
let dirinfo name dir isPrefixRelative =
info verbosity $ name ++ " installed in: " ++ dir ++ relNote
where relNote = case buildOS of
Windows | not (hasLibs pkg_descr)
&& isNothing isPrefixRelative
-> " (fixed location)"
_ -> ""
dirinfo "Binaries" (bindir dirs) (bindir relative)
dirinfo "Libraries" (libdir dirs) (libdir relative)
dirinfo "Private binaries" (libexecdir dirs) (libexecdir relative)
dirinfo "Data files" (datadir dirs) (datadir relative)
dirinfo "Documentation" (docdir dirs) (docdir relative)
dirinfo "Configuration files" (sysconfdir dirs) (sysconfdir relative)
sequence_ [ reportProgram verbosity prog configuredProg
| (prog, configuredProg) <- knownPrograms programsConfig''' ]
return lbi
where
addExtraIncludeLibDirs pkg_descr =
let extraBi = mempty { extraLibDirs = configExtraLibDirs cfg
, PD.includeDirs = configExtraIncludeDirs cfg}
modifyLib l = l{ libBuildInfo = libBuildInfo l
`mappend` extraBi }
modifyExecutable e = e{ buildInfo = buildInfo e
`mappend` extraBi}
in pkg_descr{ library = modifyLib `fmap` library pkg_descr
, executables = modifyExecutable `map`
executables pkg_descr}
mkProgramsConfig :: ConfigFlags -> ProgramConfiguration -> ProgramConfiguration
mkProgramsConfig cfg initialProgramsConfig = programsConfig
where
programsConfig = userSpecifyArgss (configProgramArgs cfg)
. userSpecifyPaths (configProgramPaths cfg)
. setProgramSearchPath searchpath
$ initialProgramsConfig
searchpath = getProgramSearchPath (initialProgramsConfig)
++ map ProgramSearchPathDir (fromNubList $ configProgramPathExtra cfg)
-- -----------------------------------------------------------------------------
-- Configuring package dependencies
reportProgram :: Verbosity -> Program -> Maybe ConfiguredProgram -> IO ()
reportProgram verbosity prog Nothing
= info verbosity $ "No " ++ programName prog ++ " found"
reportProgram verbosity prog (Just configuredProg)
= info verbosity $ "Using " ++ programName prog ++ version ++ location
where location = case programLocation configuredProg of
FoundOnSystem p -> " found on system at: " ++ p
UserSpecified p -> " given by user at: " ++ p
version = case programVersion configuredProg of
Nothing -> ""
Just v -> " version " ++ display v
hackageUrl :: String
hackageUrl = "http://hackage.haskell.org/package/"
etaHackageUrl :: String
etaHackageUrl = "https://github.com/typelead/eta-hackage"
data ResolvedDependency = ExternalDependency Dependency InstalledPackageInfo
| InternalDependency Dependency PackageId -- should be a
-- lib name
data FailedDependency = DependencyNotExists PackageName
| DependencyNoVersion Dependency
-- | Test for a package dependency and record the version we have installed.
selectDependency :: InstalledPackageIndex -- ^ Internally defined packages
-> InstalledPackageIndex -- ^ Installed packages
-> Map PackageName InstalledPackageInfo
-- ^ Packages for which we have been given specific deps to use
-> Dependency
-> Either FailedDependency ResolvedDependency
selectDependency internalIndex installedIndex requiredDepsMap
dep@(Dependency pkgname vr) =
-- If the dependency specification matches anything in the internal package
-- index, then we prefer that match to anything in the second.
-- For example:
--
-- Name: MyLibrary
-- Version: 0.1
-- Library
-- ..
-- Executable my-exec
-- build-depends: MyLibrary
--
-- We want "build-depends: MyLibrary" always to match the internal library
-- even if there is a newer installed library "MyLibrary-0.2".
-- However, "build-depends: MyLibrary >= 0.2" should match the installed one.
case PackageIndex.lookupPackageName internalIndex pkgname of
[(_,[pkg])] | packageVersion pkg `withinRange` vr
-> Right $ InternalDependency dep (packageId pkg)
_ -> case Map.lookup pkgname requiredDepsMap of
-- If we know the exact pkg to use, then use it.
Just pkginstance -> Right (ExternalDependency dep pkginstance)
-- Otherwise we just pick an arbitrary instance of the latest version.
Nothing -> case PackageIndex.lookupDependency installedIndex dep of
[] -> Left $ DependencyNotExists pkgname
pkgs -> Right $ ExternalDependency dep $
case last pkgs of
(_ver, pkginstances) -> head pkginstances
reportSelectedDependencies :: Verbosity
-> [ResolvedDependency] -> IO ()
reportSelectedDependencies verbosity deps =
info verbosity $ unlines
[ "Dependency " ++ display (simplifyDependency dep)
++ ": using " ++ display pkgid
| resolved <- deps
, let (dep, pkgid) = case resolved of
ExternalDependency dep' pkg' -> (dep', packageId pkg')
InternalDependency dep' pkgid' -> (dep', pkgid') ]
reportFailedDependencies :: [FailedDependency] -> IO ()
reportFailedDependencies [] = return ()
reportFailedDependencies failed =
die (intercalate "\n\n" (map reportFailedDependency failed))
where
reportFailedDependency (DependencyNotExists pkgname) =
"there is no version of " ++ display pkgname ++ " installed.\n"
++ "Perhaps you need to download and install it from\n"
++ hackageUrl ++ display pkgname ++ "?"
reportFailedDependency (DependencyNoVersion dep) =
"cannot satisfy dependency " ++ display (simplifyDependency dep) ++ "\n"
getInstalledPackages :: Verbosity -> Compiler
-> PackageDBStack -> ProgramConfiguration
-> IO InstalledPackageIndex
getInstalledPackages verbosity comp packageDBs progconf = do
when (null packageDBs) $
die $ "No package databases have been specified. If you use "
++ "--package-db=clear, you must follow it with --package-db= "
++ "with 'global', 'user' or a specific file."
info verbosity "Reading installed packages..."
case compilerFlavor comp of
GHC -> GHC.getInstalledPackages verbosity packageDBs progconf
GHCJS -> GHCJS.getInstalledPackages verbosity packageDBs progconf
ETA -> ETA.getInstalledPackages verbosity packageDBs progconf
JHC -> JHC.getInstalledPackages verbosity packageDBs progconf
LHC -> LHC.getInstalledPackages verbosity packageDBs progconf
UHC -> UHC.getInstalledPackages verbosity comp packageDBs progconf
HaskellSuite {} ->
HaskellSuite.getInstalledPackages verbosity packageDBs progconf
flv -> die $ "don't know how to find the installed packages for "
++ display flv
-- | Like 'getInstalledPackages', but for a single package DB.
getPackageDBContents :: Verbosity -> Compiler
-> PackageDB -> ProgramConfiguration
-> IO InstalledPackageIndex
getPackageDBContents verbosity comp packageDB progconf = do
info verbosity "Reading installed packages..."
case compilerFlavor comp of
GHC -> GHC.getPackageDBContents verbosity packageDB progconf
GHCJS -> GHCJS.getPackageDBContents verbosity packageDB progconf
ETA -> ETA.getPackageDBContents verbosity packageDB progconf
-- For other compilers, try to fall back on 'getInstalledPackages'.
_ -> getInstalledPackages verbosity comp [packageDB] progconf
-- | The user interface specifies the package dbs to use with a combination of
-- @--global@, @--user@ and @--package-db=global|user|clear|$file@.
-- This function combines the global/user flag and interprets the package-db
-- flag into a single package db stack.
--
interpretPackageDbFlags :: Bool -> [Maybe PackageDB] -> PackageDBStack
interpretPackageDbFlags userInstall specificDBs =
extra initialStack specificDBs
where
initialStack | userInstall = [GlobalPackageDB, UserPackageDB]
| otherwise = [GlobalPackageDB]
extra dbs' [] = dbs'
extra _ (Nothing:dbs) = extra [] dbs
extra dbs' (Just db:dbs) = extra (dbs' ++ [db]) dbs
newPackageDepsBehaviourMinVersion :: Version
newPackageDepsBehaviourMinVersion = Version [1,7,1] []
-- In older cabal versions, there was only one set of package dependencies for
-- the whole package. In this version, we can have separate dependencies per
-- target, but we only enable this behaviour if the minimum cabal version
-- specified is >= a certain minimum. Otherwise, for compatibility we use the
-- old behaviour.
newPackageDepsBehaviour :: PackageDescription -> Bool
newPackageDepsBehaviour pkg =
specVersion pkg >= newPackageDepsBehaviourMinVersion
-- We are given both --constraint="foo < 2.0" style constraints and also
-- specific packages to pick via --dependency="foo=foo-2.0-177d5cdf20962d0581".
--
-- When finalising the package we have to take into account the specific
-- installed deps we've been given, and the finalise function expects
-- constraints, so we have to translate these deps into version constraints.
--
-- But after finalising we then have to make sure we pick the right specific
-- deps in the end. So we still need to remember which installed packages to
-- pick.
combinedConstraints :: [Dependency] ->
[(PackageName, InstalledPackageId)] ->
InstalledPackageIndex ->
Either String ([Dependency],
Map PackageName InstalledPackageInfo)
combinedConstraints constraints dependencies installedPackages = do
when (not (null badInstalledPackageIds)) $
Left $ render $ text "The following package dependencies were requested"
$+$ nest 4 (dispDependencies badInstalledPackageIds)
$+$ text "however the given installed package instance does not exist."
when (not (null badNames)) $
Left $ render $ text "The following package dependencies were requested"
$+$ nest 4 (dispDependencies badNames)
$+$ text "however the installed package's name does not match the name given."
--TODO: we don't check that all dependencies are used!
return (allConstraints, idConstraintMap)
where
allConstraints :: [Dependency]
allConstraints = constraints
++ [ thisPackageVersion (packageId pkg)
| (_, _, Just pkg) <- dependenciesPkgInfo ]
idConstraintMap :: Map PackageName InstalledPackageInfo
idConstraintMap = Map.fromList
[ (packageName pkg, pkg)
| (_, _, Just pkg) <- dependenciesPkgInfo ]
-- The dependencies along with the installed package info, if it exists
dependenciesPkgInfo :: [(PackageName, InstalledPackageId,
Maybe InstalledPackageInfo)]
dependenciesPkgInfo =
[ (pkgname, ipkgid, mpkg)
| (pkgname, ipkgid) <- dependencies
, let mpkg = PackageIndex.lookupInstalledPackageId
installedPackages ipkgid
]
-- If we looked up a package specified by an installed package id
-- (i.e. someone has written a hash) and didn't find it then it's
-- an error.
badInstalledPackageIds =
[ (pkgname, ipkgid)
| (pkgname, ipkgid, Nothing) <- dependenciesPkgInfo ]
-- If someone has written e.g.
-- --dependency="foo=MyOtherLib-1.0-07...5bf30" then they have
-- probably made a mistake.
badNames =
[ (requestedPkgName, ipkgid)
| (requestedPkgName, ipkgid, Just pkg) <- dependenciesPkgInfo
, let foundPkgName = packageName pkg
, requestedPkgName /= foundPkgName ]
dispDependencies deps =
hsep [ text "--dependency="
<> quotes (disp pkgname <> char '=' <> disp ipkgid)
| (pkgname, ipkgid) <- deps ]
-- -----------------------------------------------------------------------------
-- Configuring hole instantiation
configureInstantiateWith :: PackageDescription
-> ConfigFlags
-> InstalledPackageIndex -- ^ installed packages
-> IO ([InstalledPackageInfo],
[(ModuleName, (InstalledPackageInfo, ModuleName))])
configureInstantiateWith pkg_descr cfg installedPackageSet = do
-- Holes: First, check and make sure the provided instantiation covers
-- all the holes we know about. Indefinite package installation is
-- not handled at all at this point.
-- NB: We union together /all/ of the requirements when calculating
-- the package key.
-- NB: For now, we assume that dependencies don't contribute signatures.
-- This will be handled by epm; as far as ./Setup is
-- concerned, the most important thing is to be provided correctly
-- built dependencies.
let signatures =
maybe [] (\lib -> requiredSignatures lib ++ exposedSignatures lib)
(PD.library pkg_descr)
signatureSet = Set.fromList signatures
instantiateMap = Map.fromList (configInstantiateWith cfg)
missing_impls = filter (not . flip Map.member instantiateMap) signatures
hole_insts0 = filter (\(k,_) -> Set.member k signatureSet) (configInstantiateWith cfg)
when (not (null missing_impls)) $
die $ "Missing signature implementations for these modules: "
++ intercalate ", " (map display missing_impls)
-- Holes: Next, we need to make sure we have packages to actually
-- provide the implementations we're talking about. This is on top
-- of the normal dependency resolution process.
-- TODO: internal dependencies (e.g. the test package depending on the
-- main library) is not currently supported
let selectHoleDependency (k,(i,m)) =
case PackageIndex.lookupInstalledPackageId installedPackageSet i of
Just pkginst -> Right (k,(pkginst, m))
Nothing -> Left i
(failed_hmap, hole_insts) = partitionEithers (map selectHoleDependency hole_insts0)
holeDeps = map (fst.snd) hole_insts -- could have dups
-- Holes: Finally, any dependencies selected this way have to be
-- included in the allPkgs index, as well as the buildComponents.
-- But don't report these as potential inconsistencies!
when (not (null failed_hmap)) $
die $ "Could not resolve these package IDs (from signature implementations): "
++ intercalate ", " (map display failed_hmap)
return (holeDeps, hole_insts)
-- -----------------------------------------------------------------------------
-- Configuring program dependencies
configureRequiredPrograms :: Verbosity -> [Dependency] -> ProgramConfiguration
-> IO ProgramConfiguration
configureRequiredPrograms verbosity deps conf =
foldM (configureRequiredProgram verbosity) conf deps
configureRequiredProgram :: Verbosity -> ProgramConfiguration -> Dependency
-> IO ProgramConfiguration
configureRequiredProgram verbosity conf
(Dependency (PackageName progName) verRange) =
case lookupKnownProgram progName conf of
Nothing -> die ("Unknown build tool " ++ progName)
Just prog
-- requireProgramVersion always requires the program have a version
-- but if the user says "build-depends: foo" ie no version constraint
-- then we should not fail if we cannot discover the program version.
| verRange == anyVersion -> do
(_, conf') <- requireProgram verbosity prog conf
return conf'
| otherwise -> do
(_, _, conf') <- requireProgramVersion verbosity prog verRange conf
return conf'
-- -----------------------------------------------------------------------------
-- Configuring pkg-config package dependencies
configurePkgconfigPackages :: Verbosity -> PackageDescription
-> ProgramConfiguration
-> IO (PackageDescription, ProgramConfiguration)
configurePkgconfigPackages verbosity pkg_descr conf
| null allpkgs = return (pkg_descr, conf)
| otherwise = do
(_, _, conf') <- requireProgramVersion
(lessVerbose verbosity) pkgConfigProgram
(orLaterVersion $ Version [0,9,0] []) conf
mapM_ requirePkg allpkgs
lib' <- mapM addPkgConfigBILib (library pkg_descr)
exes' <- mapM addPkgConfigBIExe (executables pkg_descr)
tests' <- mapM addPkgConfigBITest (testSuites pkg_descr)
benches' <- mapM addPkgConfigBIBench (benchmarks pkg_descr)
let pkg_descr' = pkg_descr { library = lib', executables = exes',
testSuites = tests', benchmarks = benches' }
return (pkg_descr', conf')
where
allpkgs = concatMap pkgconfigDepends (allBuildInfo pkg_descr)
pkgconfig = rawSystemProgramStdoutConf (lessVerbose verbosity)
pkgConfigProgram conf
requirePkg dep@(Dependency (PackageName pkg) range) = do
version <- pkgconfig ["--modversion", pkg]
`catchIO` (\_ -> die notFound)
`catchExit` (\_ -> die notFound)
case simpleParse version of
Nothing -> die "parsing output of pkg-config --modversion failed"
Just v | not (withinRange v range) -> die (badVersion v)
| otherwise -> info verbosity (depSatisfied v)
where
notFound = "The pkg-config package '" ++ pkg ++ "'"
++ versionRequirement
++ " is required but it could not be found."
badVersion v = "The pkg-config package '" ++ pkg ++ "'"
++ versionRequirement
++ " is required but the version installed on the"
++ " system is version " ++ display v
depSatisfied v = "Dependency " ++ display dep
++ ": using version " ++ display v
versionRequirement
| isAnyVersion range = ""
| otherwise = " version " ++ display range
-- Adds pkgconfig dependencies to the build info for a component
addPkgConfigBI compBI setCompBI comp = do
bi <- pkgconfigBuildInfo (pkgconfigDepends (compBI comp))
return $ setCompBI comp (compBI comp `mappend` bi)
-- Adds pkgconfig dependencies to the build info for a library
addPkgConfigBILib = addPkgConfigBI libBuildInfo $
\lib bi -> lib { libBuildInfo = bi }
-- Adds pkgconfig dependencies to the build info for an executable
addPkgConfigBIExe = addPkgConfigBI buildInfo $
\exe bi -> exe { buildInfo = bi }
-- Adds pkgconfig dependencies to the build info for a test suite
addPkgConfigBITest = addPkgConfigBI testBuildInfo $
\test bi -> test { testBuildInfo = bi }
-- Adds pkgconfig dependencies to the build info for a benchmark
addPkgConfigBIBench = addPkgConfigBI benchmarkBuildInfo $
\bench bi -> bench { benchmarkBuildInfo = bi }
pkgconfigBuildInfo :: [Dependency] -> IO BuildInfo
pkgconfigBuildInfo [] = return mempty
pkgconfigBuildInfo pkgdeps = do
let pkgs = nub [ display pkg | Dependency pkg _ <- pkgdeps ]
ccflags <- pkgconfig ("--cflags" : pkgs)
ldflags <- pkgconfig ("--libs" : pkgs)
return (ccLdOptionsBuildInfo (words ccflags) (words ldflags))
-- | Makes a 'BuildInfo' from C compiler and linker flags.
--
-- This can be used with the output from configuration programs like pkg-config
-- and similar package-specific programs like mysql-config, freealut-config etc.
-- For example:
--
-- > ccflags <- rawSystemProgramStdoutConf verbosity prog conf ["--cflags"]
-- > ldflags <- rawSystemProgramStdoutConf verbosity prog conf ["--libs"]
-- > return (ccldOptionsBuildInfo (words ccflags) (words ldflags))
--
ccLdOptionsBuildInfo :: [String] -> [String] -> BuildInfo
ccLdOptionsBuildInfo cflags ldflags =
let (includeDirs', cflags') = partition ("-I" `isPrefixOf`) cflags
(extraLibs', ldflags') = partition ("-l" `isPrefixOf`) ldflags
(extraLibDirs', ldflags'') = partition ("-L" `isPrefixOf`) ldflags'
in mempty {
PD.includeDirs = map (drop 2) includeDirs',
PD.extraLibs = map (drop 2) extraLibs',
PD.extraLibDirs = map (drop 2) extraLibDirs',
PD.ccOptions = cflags',
PD.ldOptions = ldflags''
}
-- -----------------------------------------------------------------------------
-- Determining the compiler details
configCompilerAuxEx :: ConfigFlags
-> IO (Compiler, Platform, ProgramConfiguration)
configCompilerAuxEx cfg = configCompilerEx (flagToMaybe $ configHcFlavor cfg)
(flagToMaybe $ configHcPath cfg)
(flagToMaybe $ configHcPkg cfg)
programsConfig
(fromFlag (configVerbosity cfg))
where
programsConfig = mkProgramsConfig cfg defaultProgramConfiguration
configCompilerEx :: Maybe CompilerFlavor -> Maybe FilePath -> Maybe FilePath
-> ProgramConfiguration -> Verbosity
-> IO (Compiler, Platform, ProgramConfiguration)
configCompilerEx Nothing _ _ _ _ = die "Unknown compiler"
configCompilerEx (Just hcFlavor) hcPath hcPkg conf verbosity = do
(comp, maybePlatform, programsConfig) <- case hcFlavor of
GHC -> GHC.configure verbosity hcPath hcPkg conf
GHCJS -> GHCJS.configure verbosity hcPath hcPkg conf
ETA -> ETA.configure verbosity hcPath hcPkg conf
JHC -> JHC.configure verbosity hcPath hcPkg conf
LHC -> do (_, _, ghcConf) <- GHC.configure verbosity Nothing hcPkg conf
LHC.configure verbosity hcPath Nothing ghcConf
UHC -> UHC.configure verbosity hcPath hcPkg conf
HaskellSuite {} -> HaskellSuite.configure verbosity hcPath hcPkg conf
_ -> die "Unknown compiler"
return (comp, fromMaybe buildPlatform maybePlatform, programsConfig)
-- Ideally we would like to not have separate configCompiler* and
-- configCompiler*Ex sets of functions, but there are many custom setup scripts
-- in the wild that are using them, so the versions with old types are kept for
-- backwards compatibility. Platform was added to the return triple in 1.18.
{-# DEPRECATED configCompiler
"'configCompiler' is deprecated. Use 'configCompilerEx' instead." #-}
configCompiler :: Maybe CompilerFlavor -> Maybe FilePath -> Maybe FilePath
-> ProgramConfiguration -> Verbosity
-> IO (Compiler, ProgramConfiguration)
configCompiler mFlavor hcPath hcPkg conf verbosity =
fmap (\(a,_,b) -> (a,b)) $ configCompilerEx mFlavor hcPath hcPkg conf verbosity
{-# DEPRECATED configCompilerAux
"configCompilerAux is deprecated. Use 'configCompilerAuxEx' instead." #-}
configCompilerAux :: ConfigFlags
-> IO (Compiler, ProgramConfiguration)
configCompilerAux = fmap (\(a,_,b) -> (a,b)) . configCompilerAuxEx
-- -----------------------------------------------------------------------------
-- Making the internal component graph
mkComponentsGraph :: PackageDescription
-> [PackageId]
-> Either [ComponentName]
[(Component, [ComponentName])]
mkComponentsGraph pkg_descr internalPkgDeps =
let graph = [ (c, componentName c, componentDeps c)
| c <- pkgEnabledComponents pkg_descr ]
in case checkComponentsCyclic graph of
Just ccycle -> Left [ cname | (_,cname,_) <- ccycle ]
Nothing -> Right [ (c, cdeps) | (c, _, cdeps) <- graph ]
where
-- The dependencies for the given component
componentDeps component =
[ CExeName toolname | Dependency (PackageName toolname) _
<- buildTools bi
, toolname `elem` map exeName
(executables pkg_descr) ]
++ [ CLibName | Dependency pkgname _ <- targetBuildDepends bi
, pkgname `elem` map packageName internalPkgDeps ]
where
bi = componentBuildInfo component
reportComponentCycle :: [ComponentName] -> IO a
reportComponentCycle cnames =
die $ "Components in the package depend on each other in a cyclic way:\n "
++ intercalate " depends on "
[ "'" ++ showComponentName cname ++ "'"
| cname <- cnames ++ [head cnames] ]
mkComponentsLocalBuildInfo :: InstalledPackageIndex
-> PackageDescription
-> [PackageId] -- internal package deps
-> [InstalledPackageInfo] -- external package deps
-> [InstalledPackageInfo] -- hole package deps
-> Map ModuleName (InstalledPackageInfo, ModuleName)
-> PackageKey
-> [(Component, [ComponentName])]
-> Either [(ModuleReexport, String)] -- errors
[(ComponentName, ComponentLocalBuildInfo,
[ComponentName])] -- ok
mkComponentsLocalBuildInfo installedPackages pkg_descr
internalPkgDeps externalPkgDeps holePkgDeps hole_insts
pkg_key graph =
sequence
[ do clbi <- componentLocalBuildInfo c
return (componentName c, clbi, cdeps)
| (c, cdeps) <- graph ]
where
-- The allPkgDeps contains all the package deps for the whole package
-- but we need to select the subset for this specific component.
-- we just take the subset for the package names this component
-- needs. Note, this only works because we cannot yet depend on two
-- versions of the same package.
componentLocalBuildInfo component =
case component of
CLib lib -> do
let exports = map (\n -> Installed.ExposedModule n Nothing Nothing)
(PD.exposedModules lib)
esigs = map (\n -> Installed.ExposedModule n Nothing
(fmap (\(pkg,m) -> Installed.OriginalModule
(Installed.installedPackageId pkg) m)
(Map.lookup n hole_insts)))
(PD.exposedSignatures lib)
reexports <- resolveModuleReexports installedPackages
(packageId pkg_descr)
externalPkgDeps lib
return LibComponentLocalBuildInfo {
componentPackageDeps = cpds,
componentLibraries = [ LibraryName ("HS" ++ packageKeyLibraryName (package pkg_descr) pkg_key) ],
componentPackageRenaming = cprns,
componentExposedModules = exports ++ reexports ++ esigs
}
CExe _ ->
return ExeComponentLocalBuildInfo {
componentPackageDeps = cpds,
componentPackageRenaming = cprns
}
CTest _ ->
return TestComponentLocalBuildInfo {
componentPackageDeps = cpds,
componentPackageRenaming = cprns
}
CBench _ ->
return BenchComponentLocalBuildInfo {
componentPackageDeps = cpds,
componentPackageRenaming = cprns
}
where
bi = componentBuildInfo component
dedup = Map.toList . Map.fromList
cpds = if newPackageDepsBehaviour pkg_descr
then dedup $
[ (Installed.installedPackageId pkg, packageId pkg)
| pkg <- selectSubset bi externalPkgDeps ]
++ [ (inplacePackageId pkgid, pkgid)
| pkgid <- selectSubset bi internalPkgDeps ]
++ [ (Installed.installedPackageId pkg, packageId pkg)
| pkg <- holePkgDeps ]
else [ (Installed.installedPackageId pkg, packageId pkg)
| pkg <- externalPkgDeps ]
cprns = if newPackageDepsBehaviour pkg_descr
then Map.unionWith mappend
-- We need hole dependencies passed to GHC, so add them here
-- (but note that they're fully thinned out. If they
-- appeared legitimately the monoid instance will
-- fill them out.
(Map.fromList [(packageName pkg, mempty) | pkg <- holePkgDeps])
(targetBuildRenaming bi)
-- Hack: if we have old package-deps behavior, it's impossible
-- for non-default renamings to be used, because the Cabal
-- version is too early. This is a good, because while all the
-- deps were bundled up in buildDepends, we didn't do this for
-- renamings, so it's not even clear how to get the merged
-- version. So just assume that all of them are the default..
else Map.fromList (map (\(_,pid) -> (packageName pid, defaultRenaming)) cpds)
selectSubset :: Package pkg => BuildInfo -> [pkg] -> [pkg]
selectSubset bi pkgs =
[ pkg | pkg <- pkgs, packageName pkg `elem` names bi ]
names bi = [ name | Dependency name _ <- targetBuildDepends bi ]
-- | Given the author-specified re-export declarations from the .cabal file,
-- resolve them to the form that we need for the package database.
--
-- An invariant of the package database is that we always link the re-export
-- directly to its original defining location (rather than indirectly via a
-- chain of re-exporting packages).
--
resolveModuleReexports :: InstalledPackageIndex
-> PackageId
-> [InstalledPackageInfo]
-> Library
-> Either [(ModuleReexport, String)] -- errors
[Installed.ExposedModule] -- ok
resolveModuleReexports installedPackages srcpkgid externalPkgDeps lib =
case partitionEithers (map resolveModuleReexport (PD.reexportedModules lib)) of
([], ok) -> Right ok
(errs, _) -> Left errs
where
-- A mapping from visible module names to their original defining
-- module name. We also record the package name of the package which
-- *immediately* provided the module (not the original) to handle if the
-- user explicitly says which build-depends they want to reexport from.
visibleModules :: Map ModuleName [(PackageName, Installed.ExposedModule)]
visibleModules =
Map.fromListWith (++) $
[ (Installed.exposedName exposedModule, [(exportingPackageName,
exposedModule)])
-- The package index here contains all the indirect deps of the
-- package we're configuring, but we want just the direct deps
| let directDeps = Set.fromList (map Installed.installedPackageId externalPkgDeps)
, pkg <- PackageIndex.allPackages installedPackages
, Installed.installedPackageId pkg `Set.member` directDeps
, let exportingPackageName = packageName pkg
, exposedModule <- visibleModuleDetails pkg
]
++ [ (visibleModuleName, [(exportingPackageName, exposedModule)])
| visibleModuleName <- PD.exposedModules lib
++ otherModules (libBuildInfo lib)
, let exportingPackageName = packageName srcpkgid
definingModuleName = visibleModuleName
-- we don't know the InstalledPackageId of this package yet
-- we will fill it in later, before registration.
definingPackageId = InstalledPackageId ""
originalModule = Installed.OriginalModule definingPackageId
definingModuleName
exposedModule = Installed.ExposedModule visibleModuleName
(Just originalModule)
Nothing
]
-- All the modules exported from this package and their defining name and
-- package (either defined here in this package or re-exported from some
-- other package). Return an ExposedModule because we want to hold onto
-- signature information.
visibleModuleDetails :: InstalledPackageInfo -> [Installed.ExposedModule]
visibleModuleDetails pkg = do
exposedModule <- Installed.exposedModules pkg
case Installed.exposedReexport exposedModule of
-- The first case is the modules actually defined in this package.
-- In this case the reexport will point to this package.
Nothing -> return exposedModule { Installed.exposedReexport =
Just (Installed.OriginalModule (Installed.installedPackageId pkg)
(Installed.exposedName exposedModule)) }
-- On the other hand, a visible module might actually be itself
-- a re-export! In this case, the re-export info for the package
-- doing the re-export will point us to the original defining
-- module name and package, so we can reuse the entry.
Just _ -> return exposedModule
resolveModuleReexport reexport@ModuleReexport {
moduleReexportOriginalPackage = moriginalPackageName,
moduleReexportOriginalName = originalName,
moduleReexportName = newName
} =
let filterForSpecificPackage =
case moriginalPackageName of
Nothing -> id
Just originalPackageName ->
filter (\(pkgname, _) -> pkgname == originalPackageName)
matches = filterForSpecificPackage
(Map.findWithDefault [] originalName visibleModules)
in
case (matches, moriginalPackageName) of
((_, exposedModule):rest, _)
-- TODO: Refine this check for signatures
| all (\(_, exposedModule') -> Installed.exposedReexport exposedModule
== Installed.exposedReexport exposedModule') rest
-> Right exposedModule { Installed.exposedName = newName }
([], Just originalPackageName)
-> Left $ (,) reexport
$ "The package " ++ display originalPackageName
++ " does not export a module " ++ display originalName
([], Nothing)
-> Left $ (,) reexport
$ "The module " ++ display originalName
++ " is not exported by any suitable package (this package "
++ "itself nor any of its 'build-depends' dependencies)."
(ms, _)
-> Left $ (,) reexport
$ "The module " ++ display originalName ++ " is exported "
++ "by more than one package ("
++ intercalate ", " [ display pkgname | (pkgname,_) <- ms ]
++ ") and so the re-export is ambiguous. The ambiguity can "
++ "be resolved by qualifying by the package name. The "
++ "syntax is 'packagename:moduleName [as newname]'."
-- Note: if in future Cabal allows directly depending on multiple
-- instances of the same package (e.g. backpack) then an additional
-- ambiguity case is possible here: (_, Just originalPackageName)
-- with the module being ambigious despite being qualified by a
-- package name. Presumably by that time we'll have a mechanism to
-- qualify the instance we're referring to.
reportModuleReexportProblems :: [(ModuleReexport, String)] -> IO a
reportModuleReexportProblems reexportProblems =
die $ unlines
[ "Problem with the module re-export '" ++ display reexport ++ "': " ++ msg
| (reexport, msg) <- reexportProblems ]
-- -----------------------------------------------------------------------------
-- Testing C lib and header dependencies
-- Try to build a test C program which includes every header and links every
-- lib. If that fails, try to narrow it down by preprocessing (only) and linking
-- with individual headers and libs. If none is the obvious culprit then give a
-- generic error message.
-- TODO: produce a log file from the compiler errors, if any.
checkForeignDeps :: PackageDescription -> LocalBuildInfo -> Verbosity -> IO ()
checkForeignDeps pkg lbi verbosity = do
ifBuildsWith allHeaders (commonCcArgs ++ makeLdArgs allLibs) -- I'm feeling
-- lucky
(return ())
(do missingLibs <- findMissingLibs
missingHdr <- findOffendingHdr
explainErrors missingHdr missingLibs)
where
allHeaders = collectField PD.includes
allLibs = collectField PD.extraLibs
ifBuildsWith headers args success failure = do
ok <- builds (makeProgram headers) args
if ok then success else failure
findOffendingHdr =
ifBuildsWith allHeaders ccArgs
(return Nothing)
(go . tail . inits $ allHeaders)
where
go [] = return Nothing -- cannot happen
go (hdrs:hdrsInits) =
-- Try just preprocessing first
ifBuildsWith hdrs cppArgs
-- If that works, try compiling too
(ifBuildsWith hdrs ccArgs
(go hdrsInits)
(return . Just . Right . last $ hdrs))
(return . Just . Left . last $ hdrs)
cppArgs = "-E":commonCppArgs -- preprocess only
ccArgs = "-c":commonCcArgs -- don't try to link
findMissingLibs = ifBuildsWith [] (makeLdArgs allLibs)
(return [])
(filterM (fmap not . libExists) allLibs)
libExists lib = builds (makeProgram []) (makeLdArgs [lib])
commonCppArgs = platformDefines lbi
++ [ "-I" ++ autogenModulesDir lbi ]
++ [ "-I" ++ dir | dir <- collectField PD.includeDirs ]
++ ["-I."]
++ collectField PD.cppOptions
++ collectField PD.ccOptions
++ [ "-I" ++ dir
| dep <- deps
, dir <- Installed.includeDirs dep ]
++ [ opt
| dep <- deps
, opt <- Installed.ccOptions dep ]
commonCcArgs = commonCppArgs
++ collectField PD.ccOptions
++ [ opt
| dep <- deps
, opt <- Installed.ccOptions dep ]
commonLdArgs = [ "-L" ++ dir | dir <- collectField PD.extraLibDirs ]
++ collectField PD.ldOptions
++ [ "-L" ++ dir
| dep <- deps
, dir <- Installed.libraryDirs dep ]
--TODO: do we also need dependent packages' ld options?
makeLdArgs libs = [ "-l"++lib | lib <- libs ] ++ commonLdArgs
makeProgram hdrs = unlines $
[ "#include \"" ++ hdr ++ "\"" | hdr <- hdrs ] ++
["int main(int argc, char** argv) { return 0; }"]
collectField f = concatMap f allBi
allBi = allBuildInfo pkg
deps = PackageIndex.topologicalOrder (installedPkgs lbi)
builds program args = do
tempDir <- getTemporaryDirectory
withTempFile tempDir ".c" $ \cName cHnd ->
withTempFile tempDir "" $ \oNname oHnd -> do
hPutStrLn cHnd program
hClose cHnd
hClose oHnd
_ <- rawSystemProgramStdoutConf verbosity
gccProgram (withPrograms lbi) (cName:"-o":oNname:args)
return True
`catchIO` (\_ -> return False)
`catchExit` (\_ -> return False)
explainErrors Nothing [] = return () -- should be impossible!
explainErrors _ _
| isNothing . lookupProgram gccProgram . withPrograms $ lbi
= die $ unlines $
[ "No working gcc",
"This package depends on foreign library but we cannot "
++ "find a working C compiler. If you have it in a "
++ "non-standard location you can use the --with-gcc "
++ "flag to specify it." ]
explainErrors hdr libs = die $ unlines $
[ if plural
then "Missing dependencies on foreign libraries:"
else "Missing dependency on a foreign library:"
| missing ]
++ case hdr of
Just (Left h) -> ["* Missing (or bad) header file: " ++ h ]
_ -> []
++ case libs of
[] -> []
[lib] -> ["* Missing C library: " ++ lib]
_ -> ["* Missing C libraries: " ++ intercalate ", " libs]
++ [if plural then messagePlural else messageSingular | missing]
++ case hdr of
Just (Left _) -> [ headerCppMessage ]
Just (Right h) -> [ (if missing then "* " else "")
++ "Bad header file: " ++ h
, headerCcMessage ]
_ -> []
where
plural = length libs >= 2
-- Is there something missing? (as opposed to broken)
missing = not (null libs)
|| case hdr of Just (Left _) -> True; _ -> False
messageSingular =
"This problem can usually be solved by installing the system "
++ "package that provides this library (you may need the "
++ "\"-dev\" version). If the library is already installed "
++ "but in a non-standard location then you can use the flags "
++ "--extra-include-dirs= and --extra-lib-dirs= to specify "
++ "where it is."
messagePlural =
"This problem can usually be solved by installing the system "
++ "packages that provide these libraries (you may need the "
++ "\"-dev\" versions). If the libraries are already installed "
++ "but in a non-standard location then you can use the flags "
++ "--extra-include-dirs= and --extra-lib-dirs= to specify "
++ "where they are."
headerCppMessage =
"If the header file does exist, it may contain errors that "
++ "are caught by the C compiler at the preprocessing stage. "
++ "In this case you can re-run configure with the verbosity "
++ "flag -v3 to see the error messages."
headerCcMessage =
"The header file contains a compile error. "
++ "You can re-run configure with the verbosity flag "
++ "-v3 to see the error messages from the C compiler."
-- | Output package check warnings and errors. Exit if any errors.
checkPackageProblems :: Verbosity
-> GenericPackageDescription
-> PackageDescription
-> IO ()
checkPackageProblems verbosity gpkg pkg = do
ioChecks <- checkPackageFiles pkg "."
let pureChecks = checkPackage gpkg (Just pkg)
errors = [ e | PackageBuildImpossible e <- pureChecks ++ ioChecks ]
warnings = [ w | PackageBuildWarning w <- pureChecks ++ ioChecks ]
if null errors
then mapM_ (warn verbosity) warnings
else die (intercalate "\n\n" errors)
-- | Preform checks if a relocatable build is allowed
checkRelocatable :: Verbosity
-> PackageDescription
-> LocalBuildInfo
-> IO ()
checkRelocatable verbosity pkg lbi
= sequence_ [ checkOS
, checkCompiler
, packagePrefixRelative
, depsPrefixRelative
]
where
-- Check if the OS support relocatable builds.
--
-- If you add new OS' to this list, and your OS supports dynamic libraries
-- and RPATH, make sure you add your OS to RPATH-support list of:
-- Distribution.Simple.GHC.getRPaths
checkOS
= unless (os `elem` [ OSX, Linux ])
$ die $ "Operating system: " ++ display os ++
", does not support relocatable builds"
where
(Platform _ os) = hostPlatform lbi
-- Check if the Compiler support relocatable builds
checkCompiler
= unless (compilerFlavor comp `elem` [ GHC ])
$ die $ "Compiler: " ++ show comp ++
", does not support relocatable builds"
where
comp = compiler lbi
-- Check if all the install dirs are relative to same prefix
packagePrefixRelative
= unless (relativeInstallDirs installDirs)
$ die $ "Installation directories are not prefix_relative:\n" ++
show installDirs
where
installDirs = absoluteInstallDirs pkg lbi NoCopyDest
p = prefix installDirs
relativeInstallDirs (InstallDirs {..}) =
all isJust
(fmap (stripPrefix p)
[ bindir, libdir, dynlibdir, libexecdir, includedir, datadir
, docdir, mandir, htmldir, haddockdir, sysconfdir] )
-- Check if the library dirs of the dependencies that are in the package
-- database to which the package is installed are relative to the
-- prefix of the package
depsPrefixRelative = do
pkgr <- GHC.pkgRoot verbosity lbi (last (withPackageDB lbi))
mapM_ (doCheck pkgr) ipkgs
where
doCheck pkgr ipkg
| maybe False (== pkgr) (Installed.pkgRoot ipkg)
= mapM_ (\l -> when (isNothing $ stripPrefix p l) (die (msg l)))
(Installed.libraryDirs ipkg)
| otherwise
= return ()
installDirs = absoluteInstallDirs pkg lbi NoCopyDest
p = prefix installDirs
ipkgs = PackageIndex.allPackages (installedPkgs lbi)
msg l = "Library directory of a dependency: " ++ show l ++
"\nis not relative to the installation prefix:\n" ++
show p
| typelead/epm | Cabal/Distribution/Simple/Configure.hs | bsd-3-clause | 80,673 | 1 | 26 | 25,702 | 14,071 | 7,415 | 6,656 | 1,188 | 17 |
module Main where
import Lib
import Data.Time.Clock
import Data.Time.Calendar
import System.Environment
import Data.Monoid
import Network.Mail.Mime hiding (mailFrom, mailTo)
import Options.Applicative
data CLIOptions = CLIOptions
{ filename :: String
, mailTo :: String
, mailFrom :: String
, mailFromName :: String
}
options :: Parser CLIOptions
options = CLIOptions
<$> strOption
( long "filename"
<> short 'f'
<> metavar "FILE"
<> help "Filename of Markdown-File"
)
<*> strOption
( long "to"
<> short 't'
<> metavar "TO"
<> help "Mail-address to send the reminder to"
)
<*> strOption
( long "from"
<> short 'f'
<> metavar "FROM"
<> help "Mail-address of the reminder"
)
<*> strOption
( long "name"
<> short 'n'
<> metavar "NAME"
<> help "Name in the reminder-mails"
)
opts = info (options <**> helper)
( fullDesc
<> progDesc "Send reminder from FILE to mail TO using FROM and NAME as identification for the sender"
<> header "md2mail - a small program for sending out reminder-mails from markdown"
)
main :: IO ()
main = do
args <- execParser opts
md <- readFile $ filename args
(UTCTime today _) <- getCurrentTime
sequence_ $ sequence . fmap (renderSendMail . snd) . filter (filterToday today) <$> getMails md (mailTo args) (mailFrom args) (mailFromName args)
filterToday :: Day -> (Day, Mail) -> Bool
filterToday d (d2,_) = day1 == day2 && m1 == m2
where
(_,m1,day1) = toGregorian d
(_,m2,day2) = toGregorian d2
| Drezil/markdown2reminder | app/Main.hs | bsd-3-clause | 1,938 | 0 | 14 | 753 | 469 | 240 | 229 | 49 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Client255.Client255
where
import System.IO
import qualified Data.ByteString as BS
import qualified Data.ByteString.Lazy as LBS
import qualified Data.Text as T
import qualified Data.Text.Encoding as TE
import Web.Authenticate.OAuth as OAuth
import Network.HTTP.Conduit
import Data.Conduit
import qualified Data.Conduit.Binary as CB
import Data.Conduit.Attoparsec
import Data.Aeson
import qualified Secrets
oauth :: OAuth.OAuth
oauth = OAuth.newOAuth
{ OAuth.oauthServerName = "twitter"
, OAuth.oauthRequestUri = "https://api.twitter.com/oauth/request_token"
, OAuth.oauthAccessTokenUri = "https://api.twitter.com/oauth/access_token"
, OAuth.oauthAuthorizeUri = "https://api.twitter.com/oauth/authorize"
, OAuth.oauthSignatureMethod = OAuth.HMACSHA1
, OAuth.oauthConsumerKey = Secrets.consumerKey
, OAuth.oauthConsumerSecret = Secrets.consumerSecret
, OAuth.oauthVersion = OAuth.OAuth10a
}
restAPI :: String -> String
restAPI endpoint = "https://api.twitter.com/1.1/" ++ endpoint
getCred :: IO Credential
getCred = do
tmp <- withManager $ getTemporaryCredential oauth
hPutStrLn stderr $ "URL: " ++ authorizeUrl oauth tmp
hPutStr stderr $ "Enter PIN: "
hFlush stderr
pin <- BS.getLine
let tmp' = injectVerifier pin tmp
cred <- withManager $ getTokenCredential oauth tmp'
return cred
jsonParser :: Conduit BS.ByteString (ResourceT IO) (PositionRange, Value)
jsonParser = conduitParser json
postData :: Credential -> T.Text -> IO (Response (ResumableSource (ResourceT IO) BS.ByteString))
postData cred postString = withManager $ \manager -> do
initReq <- parseUrl $ restAPI "statuses/update.json"
let postText = TE.encodeUtf8 postString
let request = urlEncodedBody [("status", postText)] initReq
signed <- signOAuth oauth cred request
http signed manager
getUserStream :: Credential -> Manager -> ResourceT IO (Response (ResumableSource (ResourceT IO) BS.ByteString))
getUserStream cred manager = do
initReq <- parseUrl "https://userstream.twitter.com/1.1/user.json"
req <- signOAuth oauth cred initReq
http req manager
getHomeTimeline :: Credential -> IO (BS.ByteString)
getHomeTimeline cred = do
withManager $ \manager -> do
initReq <- parseUrl $ restAPI "statuses/home_timeline.json?count=200"
req <- signOAuth oauth cred initReq
lbs <- httpLbs req manager
return $ (LBS.toStrict . responseBody) lbs
getFavorites :: Credential -> IO ()
getFavorites cred = do
withManager $ \manager -> do
initReq <- parseUrl $ restAPI "favorites/list.json?count=200"
req <- signOAuth oauth cred initReq
response <- http req manager
(responseBody response) $$+- CB.sinkFile "/tmp/favorites.json"
| c000/twitter-client255 | Client255/Client255.hs | bsd-3-clause | 2,808 | 0 | 15 | 502 | 741 | 380 | 361 | 64 | 1 |
module SlashSpec where
import Test.Hspec
import Test.Hspec.QuickCheck
import Test.QuickCheck hiding ((.&.))
import Control.Monad
import Data.Ratio
import Data.Ratio.Slash
spec :: Spec
spec = do
describe "rational" $ do
prop "read" $
\n d -> d /= 0 && n /= 0 && d > 1 && denominator ( n % d ) > 1 ==>
( (read $ join [show n,"/",show d] ) :: Slash Integer ) `shouldBe` ( Slash $ n % d )
prop "both ways" $
\n d -> d /= 0 && n /= 0 && d > 1 && denominator ( n % d ) > 1 ==>
( read . show $ Slash ( n % d ) :: Slash Integer ) `shouldBe` ( Slash $ n % d )
describe "integer" $ do
prop "read" $
\n -> True ==>
( (read $ show n ) :: Slash Integer ) `shouldBe` ( Slash $ n % 1 )
prop "both ways" $
\n -> True ==>
( read . show $ Slash ( n % 1 ) :: Slash Integer ) `shouldBe` ( Slash $ n % 1 )
| narumij/matrix-as-xyz | test/SlashSpec.hs | bsd-3-clause | 868 | 0 | 20 | 268 | 405 | 215 | 190 | 23 | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.