code stringlengths 5 1.03M | repo_name stringlengths 5 90 | path stringlengths 4 158 | license stringclasses 15 values | size int64 5 1.03M | n_ast_errors int64 0 53.9k | ast_max_depth int64 2 4.17k | n_whitespaces int64 0 365k | n_ast_nodes int64 3 317k | n_ast_terminals int64 1 171k | n_ast_nonterminals int64 1 146k | loc int64 -1 37.3k | cycloplexity int64 -1 1.31k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
module Text.Md.HtmlParser (
HtmlParseContext(..)
, pBlockElement
, pInlineElement
, pHtmlEscape
, pHtmlEscapeForce
)
where
import Control.Monad
import Debug.Trace
import System.IO
import qualified Text.HTML.TagSoup as TS
import Text.Md.MdParserDef
import Text.Md.ParseUtils
import Text.Parsec (Parsec, ParsecT, Stream, (<?>),
(<|>))
import qualified Text.Parsec as P
import qualified Text.ParserCombinators.Parsec as P hiding (try)
-- | ParseContext used to handle html input.
data HtmlParseContext a = HtmlParseContext { parserText :: Parsec String ParseContext a -- parser used for text framed by html tags
}
-- | Parse html tags such as '<div><ul><li>list1</li><li>list2</li></ul></div>'
-- without considering whether the top tag is actually block element or not.
pBlockElement :: HtmlParseContext Inline -> Parsec String ParseContext Block
pBlockElement context = BlockHtml <$> pHtmlElement context
-- | Parse html inline element.
-- Its work is almost same as `pBlockElement` but return `InlineHtml`.
pInlineElement :: HtmlParseContext Inline -> Parsec String ParseContext Inline
pInlineElement context = InlineHtml <$> pHtmlElement context
pHtmlElement context = P.try $ do
(tagStr, tagMaybe) <- pHtmlTag
case tagMaybe of
Just tag -> liftM2 concatTags2 (return [Str tagStr]) (pHtmlElementInside [tag] context)
Nothing -> return [Str tagStr]
pHtmlElementInside [] context = return [Str ""]
pHtmlElementInside stack context = P.try $ do
inlines <- P.many (P.notFollowedBy (P.char '<') >> parserText context)
(tagStr, tagMaybe) <- pHtmlTag
case tagMaybe of
Just tag -> case tag of
TS.TagOpen name _ -> render inlines tagStr (tag:stack) context
TS.TagClose name -> render inlines tagStr (tail stack) context
TS.TagComment str -> render inlines tagStr stack context
Nothing -> render inlines tagStr stack context
pHtmlTag = do
inside <- P.between (P.char '<') (P.char '>') (P.many1 (P.noneOf "<>"))
let tags = TS.parseTags $ "<" ++ inside ++ ">"
case length tags of
1 -> return (TS.renderTags tags, Just (head tags)) -- when the tag has any inside elements. e.g. '<div>'
2 -> return (TS.renderTags tags, Nothing) -- when the tag has no inside elements. e.g. '<div />'
render inlines tagStr stack context = liftM3 concatTags3 (return inlines) (return [Str tagStr]) (pHtmlElementInside stack context)
concatTags2 a b = a ++ b
concatTags3 a b c = a ++ b ++ c
-- | Perform html escaping.
-- There are more characters which should be escaped(ref. Text.Html.TagSoup.Entity.escapeXML), but
-- this function targets only four characters, '&', '<', '>', and '"'.
pHtmlEscape :: Parsec String ParseContext Inline
pHtmlEscape = do
let pEscapedString = P.choice (map (P.try . P.string . snd) escapePair)
escapes = map escape escapePair
escape (raw, escaped) = P.string raw *> return (Str escaped)
isEscaped <- P.optionMaybe $ P.try $ P.lookAhead pEscapedString
case isEscaped of
Just str -> Str <$> P.try pEscapedString
Nothing -> P.try (P.choice escapes <?> "html-parser")
-- | Perform html escaping.
-- Escape '&' even if it is a part of escaped phrases such as '<'
pHtmlEscapeForce :: Parsec String ParseContext Inline
pHtmlEscapeForce = do
let escapes = map escape escapePair
escape (raw, escaped) = P.string raw *> return (Str escaped)
P.try (P.choice escapes <?> "html-parser")
escapePair = [("&", "&"), ("<", "<"), (">", ">"), ("\"", """)]
| tiqwab/md-parser | src/Text/Md/HtmlParser.hs | bsd-3-clause | 3,792 | 0 | 16 | 865 | 1,012 | 527 | 485 | 62 | 4 |
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
module Quiz.Web ( quizApplication ) where
import Prelude hiding (id, (.))
import Control.Arrow
import Control.Category
import Quiz.Web.Prelude
import Servant
import Servant.Server
import qualified Data.Map as Map
import qualified Data.Text as Text
import qualified Data.Text.Lazy as Text (toStrict, fromStrict)
import Web.Cookie
import qualified Data.ByteString.Lazy as BL
import qualified Data.ByteString as BS
import qualified Data.ByteString.Char8 as BSC
import qualified Data.ByteString.Base64 as B64
import qualified Data.Binary as B
import Control.Concurrent.MVar
import Control.Concurrent.STM
import qualified Data.IntMap as IntMap
import Data.Text.Encoding
import Data.Monoid
import Servant.Utils.Links
import Network.HTTP.Types.URI
import Data.Maybe
import Text.Blaze.Renderer.Utf8
import Text.Markdown
import Data.Default
-- import Quiz.Web.Home
-- import Quiz.Web.Quiz
-- import Quiz.Web.Style
-- class Help h where
-- help :: h -> Text -> Maybe Text -> Handler Html
--
-- instance Monad m => Help (ReaderT a m) where
-- help m = run
instance (HasServer a, HasServer b) => HasServer (Prefix a (x :<|> y)) where
type ServerT (Prefix a (x :<|> y)) m = (a :> ServerT x m) :<|> (Prefix a (ServerT y m))
route Proxy context server =
route (Proxy )
-- | Execute the core QuickQuiz Wai application.
quizApplication :: TVar SessionMap -> TVar (Map Text Quiz) -> TVar StdGen -> Application
quizApplication sessionMap quizMap stdGen req respond = do
-- Create a new MVar for storing Set-Cookie response hints
setSessionMVar <- newEmptyMVar
-- Define a new respond function which accounts for session cookies
let respondSession response = respond . maybe response (flip mapResponseHeaders response . (:) . ("Set-Cookie",) . ("SESSION=" <>)) =<< tryReadMVar setSessionMVar
setSession = void . tryPutMVar setSessionMVar
-- Run the web handler
serve (Proxy :: Proxy WebAPI) (server $ WebEnv sessionMap quizMap stdGen setSession) req respondSession
-- | Servant server for the entirety of the QuickQuiz web server.
server :: (Maybe Session -> Maybe QuizSession -> WebEnv) -> Server WebAPI
server mkEnv = enter (runReaderTNat $ mkEnv Nothing Nothing) (toMarkup . DefaultLayout <$> webHome)
:<|> webQuiz
:<|> webStatic
where
-- | Concatenation of the separate QuickQuiz route handlers.
webQuiz :: Server QuizAPI
webQuiz = (\w x -> getQuizEnv w x >>= run webQuizOpen)
:<|> (\w x y -> getQuizEnv w x >>= run (webQuizGet y))
:<|> (\w x y z -> getQuizEnv w x >>= run (webQuizPost y z))
:<|> (\w x -> getQuizEnv w x >>= run webQuizReview)
:<|> (\w x -> getQuizEnv w x >>= run webQuizFinalize)
run x y = toMarkup . DefaultLayout <$> (runReaderT x y)
getQuizEnv :: Text -> Maybe Text -> Handler WebEnv
getQuizEnv sym cookie = do
let env = mkEnv Nothing Nothing
s@(Session self _ qmap) <- runReaderT (getSession cookie) env
-- Look for quiz among active session quizzes
case Map.lookup sym qmap of
Just qs ->
return $ mkEnv (Just s) (Just qs)
Nothing -> do
-- Look for quiz among all quizzes
env <- liftIO $ atomically $ do
qmap' <- readTVar $ _env_quiz_map_tvar env
s' <- readTVar self
case Map.lookup sym qmap' of
Just quiz -> do
let qs = QuizSession quiz 1 IntMap.empty
-- Add quiz to session
let s'' = s' { _session_quiz_map =
Map.insert (_quiz_symbol quiz)
qs
(_session_quiz_map s') }
writeTVar self s''
return $ Just $ mkEnv (Just s'') (Just qs)
Nothing ->
return Nothing
case env of
Just env' ->
return env'
Nothing ->
notFound $ "Quiz with symbol '" <> sym <> "' not found."
notFound :: ToMarkup m => m -> Handler a
notFound msg = throwError err404 { errBody = renderMarkup $ toMarkup $ DefaultLayout $ [shamlet|
<h1>
<span class="subject">404
<span class="topic">#{msg}
|] }
-- | Home page route handler.
webHome :: Web HomeAPI
webHome = do
-- Get all quizzes
qs <- get_quizzes
-- Render the Home page
return $ [shamlet|
<div class="quizzes">
$forall q <- qs
<a href=#{"/quiz/" <> _quiz_symbol q} class="quiz">
<span class="subject">#{_quiz_subject q}
<span class="topic">#{_quiz_topic q}
<p class="description">#{_quiz_description q} |]
webQuizOpen :: Web QuizOpenAPI
webQuizOpen = do
qmap <- _session_quiz_map . fromJust <$> reader _env_session
quizSym <- _quiz_symbol . _quiz_session_quiz . fromJust <$> reader _env_quiz_session
case _quiz_session_current_problem_index <$> Map.lookup quizSym qmap of
Just probIdx ->
-- Redirect to the current Quiz problem
redirect (Proxy :: Proxy (QuizPrefix QuizGetAPI)) (\mk -> mk quizSym probIdx)
Nothing ->
lift $ notFound [shamlet|Quiz with symbol '#{quizSym}' not found. Please help.|]
-- | GET request handler for a specific quiz problem.
webQuizGet :: Web QuizGetAPI
webQuizGet probIdx = do
(Session _ _ qmap) <- fromJust <$> reader _env_session
curQuiz <- _quiz_session_quiz . fromJust <$> reader _env_quiz_session
let quizSym = _quiz_symbol curQuiz
-- Lookup the current problem within the current quiz
let probs = _quiz_problems curQuiz
probsCnt = length probs
-- Render the current quiz problem page
if probIdx < 1 || probIdx > probsCnt then
lift $ notFound $ "Quiz problem number " <> Text.pack (show probIdx) <> " not found in '" <> quizSym <> "' quiz."
else do
let Problem desc (Solution sol) = probs !! (probIdx - 1)
return $ [shamlet|
<h1>
<span class="subject"> #{_quiz_subject curQuiz}
<span class="topic"> #{_quiz_topic curQuiz}
<div class="quiz-problem">
<div class="quiz-problem-number">
<span>#{probIdx}
<div class="quiz-problem-content">
#{renderMarkdown "" desc}
<form id="response-form" method="post">
#{either toMarkup toMarkup sol}
<input type="submit" class="submit-button" value="Next">
<div class="clear">
|]
webQuizPost :: Web QuizPostAPI
webQuizPost probIdx msg = do
(Session self _ qmap) <- fromJust <$> reader _env_session
(QuizSession quiz _ _) <- fromJust <$> reader _env_quiz_session
let quizSym = _quiz_symbol quiz
-- Parse form serialized form data
let getParam = flip Map.lookup $ Map.fromList msg
-- Acquire the current problem solution
-- TODO: Remove exception here. (w.r.t. !! and Just _)
let probs = _quiz_problems quiz
probsCnt = length probs
prob = probs !! (probIdx - 1)
Solution sol = _problem_solution prob
case sol of
Left (MultiChoice _ cs) ->
return undefined
Right (Explanation maybeExp) -> do
liftIO $ atomically $ modifyTVar self $ \s@(Session _ _ qmap) ->
s { _session_quiz_map = fromMaybe qmap $ do
exp <- getParam "explanation"
return (Map.adjust (\q@(QuizSession _ _ ans) -> q { _quiz_session_answers = IntMap.insert probIdx (Right exp) ans }) quizSym qmap) }
if probsCnt == probIdx then
redirect (Proxy :: Proxy (QuizPrefix QuizReviewAPI)) ($ quizSym)
else
redirect (Proxy :: Proxy (QuizPrefix QuizGetAPI)) (\mk -> mk quizSym (probIdx + 1))
webQuizReview :: Web QuizReviewAPI
webQuizReview = do
(QuizSession quiz _ ans) <- fromJust <$> reader _env_quiz_session
let xs = zip (_quiz_problems quiz) [1..]
answer prob i =
case (fromJust $ IntMap.lookup i ans, _problem_solution prob) of
(Left choiceIdx, Solution (Left mc)) ->
let xs = zip [1..] $ _multiple_choice_choices mc
in [shamlet|
<ul>
$forall (i, choice) <- xs
$if i == choiceIdx
<li class="selected">
#{_choice_body choice}
$else
<li>
#{_choice_body choice}
|]
(Right resp, Solution (Right exp)) ->
[shamlet|
<div class="explanation">
#{renderMarkdown (show i ++ "-explanation--") resp}
|]
return $ [shamlet|
<h1>
<span class="subject"> #{_quiz_subject quiz}
<span class="topic"> #{_quiz_topic quiz}
<div id="quiz-review">
$forall (prob, i) <- xs
<div class="quiz-problem">
<div class="quiz-problem-number">
<span>#{i}
<div class="quiz-problem-content">
#{renderMarkdown (show i ++ "--") $ _problem_body prob}
<div class="quiz-problem-answer">
#{answer prob i}
<a href=#{("/quiz/" <> _quiz_symbol quiz) <> "/finalize"} class="submit-button" id="finalize-button">
Finalize
<div class="clear">
|]
-- let Just (QuizSession curQuiz _ _) = Map.lookup quizSym qmap
webQuizFinalize :: Web QuizFinalizeAPI
webQuizFinalize = do
return $ [shamlet|
|]
handleSession :: (Session -> WebM a) -> Maybe Text -> WebM a
handleSession action = (action =<<) . getSession
-- | Find existing session data. If a client has no existing session, create a new one.
getSession :: Maybe Text -> WebM Session
getSession getCookies = do
-- Acquire the session map
smapvar <- reader _env_session_map_tvar
sessionMap <- liftIO $ readTVarIO smapvar
-- Lookup an existing session --
let session = do
-- Acquire cookie string
cookies <- getCookies
-- Extract the session identifier from the cookie string
reqSessionId <- Map.lookup "SESSION" $ Map.fromList $ parseCookies $ encodeUtf8 cookies
-- Lookup session identifier in map of cookies
IntMap.lookup (B.decode $ BL.fromStrict $ B64.decodeLenient reqSessionId) sessionMap
case session of
Just s ->
-- Return the existing session
liftIO $ readTVarIO s
Nothing -> do
rndgenvar <- reader _env_stdgen_tvar
-- Create a new session
newSession <- liftIO $ atomically $ do
-- Acquire the session map
smap <- readTVar smapvar
-- Acquire the random number generator
rndgen <- readTVar rndgenvar
-- Generate a new session identifier
let genId (i, g) =
if IntMap.member i smap then
-- Identifier exists
genId $ next g
else
-- New identifier found
(i, g)
(newId, rndgen') = genId $ next rndgen
-- Generate the new session map
let newSession' = Session undefined newId def
newSessionTVar <- newTVar newSession'
let newSession = newSession' { _session_self = newSessionTVar }
writeTVar newSessionTVar newSession
let newMap = IntMap.insert newId newSessionTVar smap
-- Write out the new session map
writeTVar smapvar newMap
-- Write out the new random number generator
writeTVar rndgenvar rndgen'
-- Return the new session
return newSession
-- Set the Set-Cookie header (for sending with the response)
liftIO . ($ B64.encode $ BL.toStrict $ B.encode $ _session_id newSession) =<< reader _env_set_session
-- Return the new session (into the request handler)
return newSession
-- handleQuiz :: ToMarkup a => (Session -> WebM a) -> Maybe Text -> WebM a
-- handleQuiz action getCookies = do
-- (body, curQuiz) <- handleSession (\s@(Session _ _ qmap)-> do
-- let Just (QuizSession curQuiz _ _) = Map.lookup quizSym qmap
-- body <- action s
-- return (body, curQuiz)) getCookies
-- return $ DefaultLayout $ [shamlet|
-- <h1>
-- <span class="subject"> #{_quiz_subject curQuiz}
-- <span class="topic"> #{_quiz_topic curQuiz}
-- <div>#{body}
-- |]
instance ToMarkup Explanation where
toMarkup (Explanation _) = [shamlet|<textarea name="explanation">|]
instance ToMarkup MultiChoice where
toMarkup (MultiChoice isRadio cs) =
[shamlet|
<ul>
$forall c <- cs
<li>#{c}
|]
instance ToMarkup Choice where
toMarkup (Choice desc _) = renderMarkdown "" desc
renderMarkdown :: String -> Text -> Html
renderMarkdown prefix = markdown def . Text.fromStrict -- toMarkup -- either (error . show) (writeHtml def { writerIdentifierPrefix = prefix, writerHTMLMathMethod = MathJax "" }) . readMarkdown def . Text.unpack
webStatic :: Web StaticAPI
webStatic = serveDirectory "www"
redirect :: (IsElem endpoint WebAPI, HasLink endpoint) => Proxy endpoint -> (MkLink endpoint -> URI) -> WebM a
redirect p f = throwError err302 { errHeaders = [("Location", BSC.pack $ ('/':) $ uriPath $ f $ safeLink (Proxy :: Proxy WebAPI) p)] }
-- instance ToMarkup a => ToMarkup [a] where
-- toMarkup xs = [shamlet|
-- <ul>
-- $forall x <- xs
-- <li>#{x}|]
-- instance ToMarkup a => ToMarkup QuizSummary where
-- toMarkup (QuizSummary q) = [shamlet|
-- <div class="quizzes">
-- $forall q <- qs
-- <a href=#{"/quiz/" <> _quiz_symbol q} class="quiz">
-- <span class="subject">#{_quiz_subject q}
-- <span class="topic">#{_quiz_topic q}
-- <p class="description">#{_quiz_description q}
-- |]
-- instance ToMarkup a => ToMarkup (x ) where
-- getQuizMap :: (QuizMap Quiz -> a) -> WebM a
-- getQuizMap = flip fmap (liftIO . readTVarIO =<< reader _env_quiz_map_tvar)
-- getSessQuizMap :: Int -> (QuizMap (Quiz, Int, [Answer]) -> a) -> WebM a
-- getSessQuizMap sid f = do
-- fmap (f . sessionState) (liftIO . readTVarIO =<< reader _env_session_map_tvar)
-- getQuizzes :: WebM [Quiz]
-- getQuizzes = getQuizMap qmAll
-- cookieToSessionId :: Text -> Int
-- cookieToSessionId cookie =
-- getQuiz
| michael-swan/quick-quiz | src/Quiz/Web.hs | bsd-3-clause | 16,345 | 4 | 30 | 5,625 | 2,855 | 1,489 | 1,366 | 200 | 4 |
{-# LANGUAGE GeneralizedNewtypeDeriving, OverloadedStrings #-}
-----------------------------------------------------------------------------
-- |
-- Module : Hasmin.Types.Numeric
-- Copyright : (c) 2017 Cristian Adrián Ontivero
-- License : BSD3
-- Stability : experimental
-- Portability : non-portable
--
-- CSS Numeric data types: \<number\>, \<percentage\>, and \<alphavalue\>.
-- All Rational newtypes to ensure dimension conversion precision.
--
-----------------------------------------------------------------------------
module Hasmin.Types.Numeric
( Percentage(..)
, toPercentage
, Number(..)
, toNumber
, fromNumber
, Alphavalue(..)
, toAlphavalue
, mkAlphavalue
) where
import Data.Text (pack)
import Hasmin.Class
import Hasmin.Utils
import Text.Printf (printf)
-- | The \<number\> data type. Real numbers, possibly with a fractional component.
-- When written literally, a number is either an integer, or zero or more
-- decimal digits followed by a dot (.) followed by one or more decimal digits
-- and optionally an exponent composed of "e" or "E" and an integer. It
-- corresponds to the \<number-token\> production in the CSS Syntax Module
-- [CSS3SYN]. As with integers, the first character of a number may be
-- immediately preceded by - or + to indicate the number’s sign.
-- Specifications:
--
-- 1. <https://drafts.csswg.org/css-values-3/#numbers CSS Values and Units Module Level 3 (§4.2)>
-- 2. <https://www.w3.org/TR/CSS2/syndata.html#numbers CSS2.1 (§4.3.1)>
-- 3. <https://www.w3.org/TR/CSS1/#units CSS1 (6 Units)>
newtype Number = Number { getRational :: Rational }
deriving (Eq, Show, Ord, Num, Fractional, Real, RealFrac)
instance ToText Number where
toText = pack . trimLeadingZeros . showRat . toRational -- check if scientific notation is shorter!
toNumber :: Real a => a -> Number
toNumber = Number . toRational
fromNumber :: Fractional a => Number -> a
fromNumber = fromRational . toRational
-- | The \<alphavalue\> data type. Syntactically a \<number\>. It is the
-- uniform opacity setting to be applied across an entire object. Any values
-- outside the range 0.0 (fully transparent) to 1.0 (fully opaque) are clamped
-- to this range. Specification:
--
-- 1. <https://www.w3.org/TR/css3-color/#transparency CSS Color Module Level 3 (§3.2)
newtype Alphavalue = Alphavalue Rational
deriving (Eq, Show, Ord, Real, RealFrac)
instance Num Alphavalue where
abs = id -- numbers are never negative, so abs doesn't do anything
a - b = mkAlphavalue $ toRational a - toRational b
a + b = mkAlphavalue $ toRational a + toRational b
a * b = mkAlphavalue $ toRational a * toRational b
fromInteger = toAlphavalue
signum a | toRational a == 0 = 0
| otherwise = 1
instance ToText Alphavalue where
toText = pack . trimLeadingZeros . showRat . toRational
instance Bounded Alphavalue where
minBound = 0
maxBound = 1
instance Fractional Alphavalue where
fromRational = mkAlphavalue
(Alphavalue a) / (Alphavalue b) = mkAlphavalue (toRational a / toRational b)
toAlphavalue :: Real a => a -> Alphavalue
toAlphavalue = mkAlphavalue . toRational
mkAlphavalue :: Rational -> Alphavalue
mkAlphavalue = Alphavalue . restrict 0 1
-- | The \<percentage\> data type. Many CSS properties can take percentage
-- values, often to define sizes in terms of parent objects. Percentages are
-- formed by a \<number\> immediately followed by the percentage sign %.
-- There is no space between the '%' and the number. Specification:
--
-- 1. <https://drafts.csswg.org/css-values-3/#percentages CSS Value and Units Module Level 3 (§4.3)
-- 2. <https://www.w3.org/TR/CSS2/syndata.html#percentage-units CSS2.1 (§4.3.3)
-- 3. <https://www.w3.org/TR/CSS1/#percentage-units CSS1 (§6.2)
newtype Percentage = Percentage Rational
deriving (Eq, Show, Ord, Num, Fractional, Real, RealFrac)
instance ToText Percentage where
toText = pack . (++ "%") . trimLeadingZeros . showRat . toRational
toPercentage :: Real a => a -> Percentage
toPercentage = Percentage . toRational
-- Note: printf used instead of show to avoid scientific notation
-- | Show a Rational in decimal notation, removing leading zeros,
-- and not displaying fractional part if the number is an integer.
showRat :: Rational -> String
showRat r
| abs (r - fromInteger x) < eps = printf "%d" x
| otherwise = printf "%f" d
where x = round r
d = fromRational r :: Double
trimLeadingZeros :: String -> String
trimLeadingZeros l@(x:xs)
| x == '-' = x : go xs
| otherwise = go l
where go ('0':y:ys) = go (y:ys)
go z = z
trimLeadingZeros [] = ""
| contivero/hasmin | src/Hasmin/Types/Numeric.hs | bsd-3-clause | 4,700 | 0 | 12 | 876 | 817 | 445 | 372 | 63 | 2 |
#!/usr/bin/env runhaskell
{-# LANGUAGE NamedFieldPuns
#-}
-- This script generates gnuplot plots.
-- Give it a .dat file as input... (or it will try to open results.dat)
import Text.PrettyPrint.HughesPJClass
import Text.Regex
import Data.List
import Data.Function
import Control.Monad
import System
import System.IO
import System.FilePath
import System.Environment
import HSH
-- import Graphics.Gnuplot.Simple
-- import Graphics.Gnuplot.Advanced
-- import Graphics.Gnuplot.Frame
-- import Graphics.Gnuplot.Frame.OptionSet
-- import qualified Graphics.Gnuplot.Terminal.X11
-- import Graphics.Gnuplot.Plot.TwoDimensional
-- import qualified Graphics.Gnuplot.Simple as Simple
-- import qualified Graphics.Gnuplot.Advanced as Plot
-- import qualified Graphics.Gnuplot.Terminal.X11 as X11
-- import qualified Graphics.Gnuplot.Frame as Frame
-- import qualified Graphics.Gnuplot.Frame.Option as Opt
-- import qualified Graphics.Gnuplot.Frame.OptionSet as Opts
-- import qualified Graphics.Gnuplot.Plot.ThreeDimensional as Plot3D
-- import qualified Graphics.Gnuplot.Plot.TwoDimensional as Plot2D
-- import qualified Graphics.Gnuplot.Graph.TwoDimensional as Graph2D
-- import Graphics.Gnuplot.Plot.TwoDimensional (linearScale, )
import Data.Array (listArray, )
import Data.Monoid (mappend, )
import Debug.Trace
-- linewidth = "4.0"
linewidth = "5.0"
-- Schedulers that we don't care to graph right now.
-- This happens BEFORE rename
--scheduler_MASK = [5,6,99,10]
scheduler_MASK = []
-- Ok, gunplot line type 6 is YELLOW... that's not to smart:
line_types = [0..5] ++ [7..]
-- Rename for the paper:
--translate 10 = 99
--translate 100 = 10
translate n = n
{-
--import qualified Graphics.Gnuplot.LineSpecification as LineSpec
simple2d :: Plot2D.T
simple2d =
Plot2D.function (linearScale 100 (-10,10::Double)) sin
circle2d :: Plot2D.T
circle2d =
fmap
(Graph2D.typ Graph2D.points)
(Plot2D.parameterFunction
(linearScale 24 (-pi,pi::Double))
(\t -> (cos t, sin t)))
overlay2d :: Frame.T Graph2D.T
overlay2d =
Frame.cons (Opts.size 1 0.4 $ Opts.remove Opt.key $ Opts.deflt) $
Plot2D.function (linearScale 100 (-pi,pi::Double)) cos
`mappend`
circle2d
-- mypath :: Graph2D.T
mypath :: Plot2D.T
mypath =
fmap (Graph2D.lineSpec $
LineSpec.title "blaht" $
LineSpec.lineWidth 3.0 $
LineSpec.pointSize 3.0 $
LineSpec.deflt) $
fmap (Graph2D.typ Graph2D.linesPoints) $
Plot2D.path [(0,0), (1,1), (3,2)]
spec :: LineSpec.T
spec = LineSpec.title "blah" LineSpec.deflt
myoverlay :: Frame.T Graph2D.T
myoverlay =
--Graph2D.lineSpec (LineSpec.title "blah" LineSpec.deflt) $
Frame.cons (Opts.deflt) $
mypath --(Graph2D.lineSpec spec mypath)
`mappend`
circle2d
-}
round_2digits :: Double -> Double
round_2digits n = (fromIntegral $round (n * 100)) / 100
--x11 = terminal Terminal.X11.cons
--x11 = terminal cons
--x11 = terminal Graphics.Gnuplot.Terminal.X11.cons
--x11 = terminal X11.cons
--------------------------------------------------------------------------------
-- Let's take a particular interpretation of Enum for pairs:
instance (Enum t1, Enum t2) => Enum (t1,t2) where
succ (a,b) = (succ a, succ b)
pred (a,b) = (pred a, pred b)
toEnum n = (toEnum n, toEnum n)
fromEnum (a,b) = case (fromEnum a, fromEnum b) of
(x,y) | x == y -> x
(x,y) -> error$ "fromEnum of pair: nonmatching numbers: " ++ show x ++" and "++ show y
-- Removes single blanks and separates lines into groups based on double blanks.
sepDoubleBlanks :: [String] -> [[String]]
sepDoubleBlanks ls = loop [] ls
where
loop acc [] = [reverse acc]
loop acc ("":"":t) = reverse acc : loop [] (stripLeadingBlanks t)
loop acc ("":t) = loop acc t
loop acc (h :t) = loop (h:acc) t
stripLeadingBlanks [] = []
stripLeadingBlanks ("":t) = stripLeadingBlanks t
stripLeadingBlanks ls = ls
remComments :: String -> [String] -> [String]
remComments commentchars ls = filter (pred . stripLeadingWhitespace) ls
where
pred str = not (take (length commentchars) str == commentchars)
stripLeadingWhitespace [] = []
stripLeadingWhitespace (' ':t) = stripLeadingWhitespace t
stripLeadingWhitespace ls = ls
--------------------------------------------------------------------------------
-- Here's the schema for the data from my timing tests:
data Entry = Entry {
name :: String,
variant :: String,
sched :: Int,
threads :: Int,
hashhack :: Bool,
tmin :: Double,
tmed :: Double,
tmax :: Double,
normfactor :: Double
}
deriving Show
instance Pretty Entry where
--pPrint x = pPrint$ show x
pPrint Entry { name, variant, sched, threads, tmin, tmed, tmax, normfactor } =
pPrint ("ENTRY", name, variant, sched, threads, (tmin, tmed, tmax), normfactor )
-- pPrint ("ENTRY", name, variant, sched, threads, tmin, tmed, tmax, normfactor)
parse [a,b,c,d,e,f,g,h] =
Entry { name = a,
variant = b,
sched = read c,
threads = read d,
hashhack = not (e == "0"),
tmin = read f,
tmed = read g,
tmax = read h,
normfactor = 1.0
}
parse [a,b,c,d,e,f,g,h,i] =
-- trace ("Got line with norm factor: "++ show [a,b,c,d,e,f,g,h,i])
(parse [a,b,c,d,e,f,g,h]) { normfactor = read i }
parse other = error$ "Cannot parse, wrong number of fields, "++ show (length other) ++" expected 8 or 9: "++ show other
groupSort fn =
(groupBy ((==) `on` fn)) .
(sortBy (compare `on` fn))
-- Add three more levels of list nesting to organize the data:
organize_data :: [Entry] -> [[[[Entry]]]]
organize_data =
(map (map (groupSort sched))) .
(map (groupSort variant)) .
(groupSort name)
newtype Mystr = Mystr String
instance Show Mystr where
show (Mystr s) = s
{-
-- I ended up giving up on using the gnuplot package on hackage:
-- mypath :: Graph2D.T
--Plot2D.T
--plot_benchmark :: [[[Entry]]] -> IO ()
--plot_benchmark :: [[[Entry]]] -> Plot2D.T
plot_benchmark [io, pure] =
--Plot.plot (X11.title "foobar" X11.cons) $
Plot.plot X11.cons $
Frame.cons (Opts.title ("Benchmark: " ++ benchname ++ " normalized to time " ++ show basetime) $ Opts.deflt) plots
where
benchname = name $ head $ head io
plots = foldl1 mappend (map persched io ++ map persched pure)
basetime = foldl1 min $ map tmed $
filter ((== 0) . threads) $
(concat io ++ concat pure)
persched :: [Entry] -> Plot2D.T
persched dat =
let
schd = sched$ head dat
var = variant$ head dat
mins = map tmin dat
meds = map tmed dat
maxs = map tmax dat
--zip4 = map$ \ a b c d -> (a,b,c,d)
zip4 s1 s2 s3 s4 = map (\ ((a,b), (c,d)) -> (a,b,c,d))
(zip (zip s1 s2) (zip s3 s4))
pairs = zip4 (map (fromIntegral . threads) dat)
(map (basetime / ) meds)
(map (basetime / ) mins)
(map (basetime / ) maxs)
quads = map (\ (a,b,c,d) -> Mystr (show a ++" "++ show b ++" "++ show d ++" "++ show c))
pairs
in
fmap (Graph2D.lineSpec $
LineSpec.title (var ++"/"++ show schd) $
LineSpec.lineWidth 3.0 $
LineSpec.pointSize 3.0 $
LineSpec.deflt) $
fmap (Graph2D.typ Graph2D.linesPoints) $
--Plot2D.path pairs
--Plot2D.path (map ( \ (a,b,c,d) -> (a,b)) pairs)
--fmap (Graph2D.typ Graph2D.errorBars) $
Plot2D.list quads
-}
-- Name, Scheduler, Threads, BestTime, Speedup
data Best = Best (String, Int, Int, Double, Double)
-- Plot a single benchmark as a gnuplot script:
plot_benchmark2 root [io, pure] =
do action $ filter goodSched (io ++ pure)
return$ Best (benchname, bestsched, bestthreads, best, basetime / best)
where
benchname = name $ head $ head io
-- What was the best single-threaded execution time across variants/schedulers:
goodSched [] = error "Empty block of data entries..."
goodSched (h:t) = not $ (sched h) `elem` scheduler_MASK
cat = concat io ++ concat pure
threads0 = filter ((== 0) . threads) cat
threads1 = filter ((== 1) . threads) cat
map_normalized_time = map (\x -> tmed x / normfactor x)
times0 = map_normalized_time threads0
times1 = map_normalized_time threads1
basetime = if not$ null times0
then foldl1 min times0
else if not$ null times1
then foldl1 min times1
else error$ "\nFor benchmark "++ show benchname ++ " could not find either 1-thread or 0-thread run.\n" ++
--"ALL entries: "++ show (pPrint cat) ++"\n"
"\nALL entries threads: "++ show (map threads cat)
best = foldl1 min $ map_normalized_time cat
Just best_index = elemIndex best $ map_normalized_time cat
bestsched = sched$ cat !! best_index
bestthreads = threads$ cat !! best_index
(filebase,_) = break (== '.') $ basename benchname
-- If all normfactors are the default 1.0 we print a different message:
--let is_norm = not$ all (== 1.0) $ map normfactor ponits
norms = map normfactor (concat io ++ concat pure)
default_norms = all (== 1.0) $ norms
max_norm = foldl1 max norms
scrub '_' = ' '
scrub x = x
-- scrub [] = []
-- scrub ('_':t) = "\\_"++ scrub t
-- scrub (h:t) = h : scrub t
action lines =
do
let scriptfile = root ++ filebase ++ ".gp"
putStrLn$ "Dumping gnuplot script to: "++ scriptfile
putStrLn$ "NORM FACTORS "++ show norms
runIO$ echo "set terminal postscript enhanced color\n" -|- appendTo scriptfile
runIO$ echo ("set output \""++filebase++".eps\"\n") -|- appendTo scriptfile
runIO$ echo ("set title \"Benchmark: "++ map scrub filebase ++
", speedup relative to serial time " ++ show (round_2digits $ basetime * max_norm) ++" seconds "++
-- "for input size " ++ show (round_2digits max_norm)
(if default_norms then "" else "for input size " ++ show (round max_norm))
--if is_norm then "normalized to work unit"
--if default_norms then "" else " per unit benchmark input"
++"\"\n") -|- appendTo scriptfile
runIO$ echo ("set xlabel \"Number of Threads\"\n") -|- appendTo scriptfile
runIO$ echo ("set ylabel \"Parallel Speedup\"\n") -|- appendTo scriptfile
runIO$ echo ("set xrange [1:]\n") -|- appendTo scriptfile
runIO$ echo ("set key left top\n") -|- appendTo scriptfile
runIO$ echo ("plot \\\n") -|- appendTo scriptfile
-- In this loop lets do the errorbars:
forM_ (zip [1..] lines) $ \(i,points) -> do
let datfile = root ++ filebase ++ show i ++".dat"
runIO$ echo (" \""++ basename datfile ++"\" using 1:2:3:4 with errorbars lt "++
show (line_types !! i)
++" title \"\", \\\n") -|- appendTo scriptfile
-- Now a second loop for the lines themselves and to dump the actual data to the .dat file:
forM_ (zip [1..] lines) $ \(i,points) -> do
let datfile = root ++ filebase ++ show i ++".dat"
let schd = sched$ head points -- should be the same across all point
let var = variant$ head points -- should be the same across all point
let nickname = var ++"/"++ show (translate schd)
runIO$ echo ("# Data for variant "++ nickname ++"\n") -|- appendTo datfile
forM_ points $ \x -> do
-- Here we print a line of output:
runIO$ echo (show (fromIntegral (threads x)) ++" "++
show (basetime / (tmed x / normfactor x)) ++" "++
show (basetime / (tmax x / normfactor x)) ++" "++
show (basetime / (tmin x / normfactor x)) ++" \n") -|- appendTo datfile
let comma = if i == length lines then "" else ",\\"
runIO$ echo (" \""++ basename datfile ++
"\" using 1:2 with lines linewidth "++linewidth++" lt "++
show (line_types !! i) ++" title \""++nickname++"\" "++comma++"\n")
-|- appendTo scriptfile
--putStrLn$ "Finally, running gnuplot..."
--runIO$ "(cd "++root++"; gnuplot "++basename scriptfile++")"
--runIO$ "(cd "++root++"; ps2pdf "++ filebase ++".eps )"
--plot_benchmark2 root ls = putStrLn$ "plot_benchmark2: Unexpected input, list len: "++ show (length ls)
plot_benchmark2 root [io] = plot_benchmark2 root [io,[]]
isMatch rg str = case matchRegex rg str of { Nothing -> False; _ -> True }
main = do
args <- getArgs
let file = case args of
[f] -> f
[] -> "results.dat"
dat <- run$ catFrom [file] -|- remComments "#"
-- let parsed = map (parse . filter (not (== "")) . splitRegex (mkRegex "[ \t]+"))
let parsed = map (parse . filter (not . (== "")) . splitRegex (mkRegex "[ \t]+"))
(filter (not . isMatch (mkRegex "ERR")) $
filter (not . isMatch (mkRegex "TIMEOUT")) $
filter (not . null) dat)
let organized = organize_data$ filter ((`elem` ["io","pure"]) . variant) parsed
-- Notdoing this anymore.. treat it as one big bag
-- let chunked = sepDoubleBlanks dat
-- let chopped = map (parse . splitRegex (mkRegex "[ \t]+"))
-- (chunked !! 0)
-- let bysched = groupBy ((==) `on` sched) $
-- sortBy (compare `on` sched)
-- chopped
-- putStrLn$ show (pPrint (map length chopped))
-- putStrLn$ show (pPrint (map parse chopped))
-- print "parsed:"; print parsed
--print "Organized:"; print organized
{-
putStrLn$ renderStyle (style { lineLength=150 }) (pPrint organized)
-}
--Plot.plot X11.cons myoverlay
--Simple.plotList [Simple.LineStyle 0 [Simple.LineTitle "foobar"]] [0,5..100]
--let root = "./graph_temp/"
let root = "./" ++ dropExtension file ++ "_graphs/"
-- For hygiene, completely anhilate output directory:
system$ "rm -rf " ++root ++"/"
system$ "mkdir -p "++root
bests <-
forM organized $ \ perbenchmark -> do
best <- plot_benchmark2 root perbenchmark
forM_ perbenchmark $ \ pervariant ->
forM_ pervariant $ \ persched ->
do let mins = map tmin persched
let pairs = (zip (map (fromIntegral . threads) persched) mins)
--putStrLn$ show pairs
--plot Graphics.Gnuplot.Terminal.X11.cons (path pairs)
--System.exitWith ExitSuccess
--plot x11 (path pairs)
return ()
return best
--forM_ organized $ \ perbenchmark ->
--plotLists [x11] [dat, [50..25]]
--plotLists [x11] [dat, [100,95..0]]
--plotDots [x11, Size$ Scale 3.0] dat
--plotDots [x11, LineStyle 0 [PointSize 5.0]] dat
putStrLn$ "Plotted list\n\n"
let summarize hnd = do
hPutStrLn hnd $ "# Benchmark, scheduler, best #threads, best median time, max parallel speedup: "
hPutStrLn hnd $ "# Summary for " ++ file
let pads n s = take (n - length s) $ repeat ' '
let pad n x = " " ++ (pads n (show x))
forM_ bests $ \ (Best(name, sched, threads, best, speed)) ->
hPutStrLn hnd$ " "++ name++ (pads 25 name) ++
show sched++ (pad 5 sched) ++
show threads++ (pad 5 threads)++
show best ++ (pad 15 best) ++
show speed
hPutStrLn hnd$ "\n\n"
summarize stdout
withFile (dropExtension file `addExtension` "summary") WriteMode $ summarize
| rrnewton/Haskell-CnC | scaling.hs | bsd-3-clause | 15,215 | 86 | 21 | 3,656 | 3,134 | 1,693 | 1,441 | 186 | 7 |
{-# LANGUAGE FlexibleInstances #-}
module Data.LFSR.Tap (
Tap (..),
bitsOfTap,
next, list,
tap2, tap4
) where
import Data.List (foldl', sort, group)
import Data.Array (Array, array, (!))
import Data.Bits (Bits, complementBit, xor, (.&.))
import Data.LFSR.Bits (unsignedShiftR)
class TapBits a where
bits :: a -> [Int]
data Tap a = Tap { width :: !Int
, tapBits :: !a
} deriving Show
tapOfPair :: TapBits t => (Int, t) -> (Int, Tap t)
tapOfPair (i, bs) = (i, Tap { width = i, tapBits = bs })
bitsOfTap :: (TapBits t, Bits a) => Tap t -> a
bitsOfTap = foldl' complementBit 0 . map pred . bits . tapBits
next :: Bits a => a -> a -> a
next tBits lfsr =
(lfsr `unsignedShiftR` 1)
`xor`
negate (lfsr .&. 1) .&. tBits
list :: (TapBits t, Bits a) => Tap t -> a -> [a]
list tap iv = rec where
rec = iv : map (next $ bitsOfTap tap) rec
type TwoBits = (Int, Int)
type Tap2 = Tap TwoBits
tap2Table :: Array Int Tap2
tap2Table =
array (2, 768)
. map tapOfPair
$ [
(2, (2, 1)),
(9, (9, 5)),
(17, (17, 14)),
(33, (33, 20)),
(65, (65, 47))
]
tap2 :: Int -> Tap2
tap2 = (tap2Table !)
type FourBits = (Int, Int, Int, Int)
type Tap4 = Tap FourBits
tap4Table :: Array Int Tap4
tap4Table =
array (5, 768)
. map tapOfPair
$ [
(8, (8, 6, 5, 4)),
-- (16, (16, 14, 13, 11)),
(16, (16, 15, 13, 4)),
-- (32, (32, 30, 26, 25)),
(32, (32, 22, 2, 1)),
(64, (64, 63, 61, 60)),
-- (128, (128, 127, 126, 121))
(128, (128, 126, 101, 99))
]
tap4 :: Int -> Tap4
tap4 = (tap4Table !)
instance TapBits TwoBits where
bits (p, q) = [p, q]
instance TapBits FourBits where
bits (p, q, r, s) = [p, q, r, s]
_testTap :: (TapBits t, Bits a, Ord a) => a -> Tap t -> Bool
_testTap iv = null . filter (/= 1) . map length . group . sort
. (take (2 * 1024 * 1024)) . (`list` iv)
-- _testTap 10797677
| khibino/haskell-lfsr | src/Data/LFSR/Tap.hs | bsd-3-clause | 1,932 | 0 | 14 | 540 | 909 | 530 | 379 | 66 | 1 |
{-# LANGUAGE CPP #-}
-----------------------------------------------------------------------------
--
-- Pretty-printing assembly language
--
-- (c) The University of Glasgow 1993-2005
--
-----------------------------------------------------------------------------
{-# OPTIONS_GHC -fno-warn-orphans #-}
module X86.Ppr (
pprNatCmmDecl,
pprBasicBlock,
pprSectionHeader,
pprData,
pprInstr,
pprSize,
pprImm,
pprDataItem,
)
where
#include "HsVersions.h"
#include "nativeGen/NCG.h"
import X86.Regs
import X86.Instr
import X86.Cond
import Instruction
import Size
import Reg
import PprBase
import BlockId
import BasicTypes (Alignment)
import DynFlags
import Cmm hiding (topInfoTable)
import CLabel
import Unique ( pprUnique, Uniquable(..) )
import Platform
import FastString
import Outputable
import Data.Word
import Data.Bits
-- -----------------------------------------------------------------------------
-- Printing this stuff out
pprNatCmmDecl :: NatCmmDecl (Alignment, CmmStatics) Instr -> SDoc
pprNatCmmDecl (CmmData section dats) =
pprSectionHeader section $$ pprDatas dats
pprNatCmmDecl proc@(CmmProc top_info lbl _ (ListGraph blocks)) =
case topInfoTable proc of
Nothing ->
case blocks of
[] -> -- special case for split markers:
pprLabel lbl
blocks -> -- special case for code without info table:
pprSectionHeader Text $$
pprLabel lbl $$ -- blocks guaranteed not null, so label needed
vcat (map (pprBasicBlock top_info) blocks) $$
pprSizeDecl lbl
Just (Statics info_lbl _) ->
sdocWithPlatform $ \platform ->
(if platformHasSubsectionsViaSymbols platform
then pprSectionHeader Text $$
ppr (mkDeadStripPreventer info_lbl) <> char ':'
else empty) $$
vcat (map (pprBasicBlock top_info) blocks) $$
-- above: Even the first block gets a label, because with branch-chain
-- elimination, it might be the target of a goto.
(if platformHasSubsectionsViaSymbols platform
then
-- See Note [Subsections Via Symbols]
text "\t.long "
<+> ppr info_lbl
<+> char '-'
<+> ppr (mkDeadStripPreventer info_lbl)
else empty) $$
pprSizeDecl info_lbl
-- | Output the ELF .size directive.
pprSizeDecl :: CLabel -> SDoc
pprSizeDecl lbl
= sdocWithPlatform $ \platform ->
if osElfTarget (platformOS platform)
then ptext (sLit "\t.size") <+> ppr lbl
<> ptext (sLit ", .-") <> ppr lbl
else empty
pprBasicBlock :: BlockEnv CmmStatics -> NatBasicBlock Instr -> SDoc
pprBasicBlock info_env (BasicBlock blockid instrs)
= maybe_infotable $$
pprLabel (mkAsmTempLabel (getUnique blockid)) $$
vcat (map pprInstr instrs)
where
maybe_infotable = case mapLookup blockid info_env of
Nothing -> empty
Just (Statics info_lbl info) ->
pprSectionHeader Text $$
vcat (map pprData info) $$
pprLabel info_lbl
pprDatas :: (Alignment, CmmStatics) -> SDoc
pprDatas (align, (Statics lbl dats))
= vcat (pprAlign align : pprLabel lbl : map pprData dats)
-- TODO: could remove if align == 1
pprData :: CmmStatic -> SDoc
pprData (CmmString str) = pprASCII str
pprData (CmmUninitialised bytes)
= sdocWithPlatform $ \platform ->
if platformOS platform == OSDarwin then ptext (sLit ".space ") <> int bytes
else ptext (sLit ".skip ") <> int bytes
pprData (CmmStaticLit lit) = pprDataItem lit
pprGloblDecl :: CLabel -> SDoc
pprGloblDecl lbl
| not (externallyVisibleCLabel lbl) = empty
| otherwise = ptext (sLit ".globl ") <> ppr lbl
pprTypeAndSizeDecl :: CLabel -> SDoc
pprTypeAndSizeDecl lbl
= sdocWithPlatform $ \platform ->
if osElfTarget (platformOS platform) && externallyVisibleCLabel lbl
then ptext (sLit ".type ") <> ppr lbl <> ptext (sLit ", @object")
else empty
pprLabel :: CLabel -> SDoc
pprLabel lbl = pprGloblDecl lbl
$$ pprTypeAndSizeDecl lbl
$$ (ppr lbl <> char ':')
pprASCII :: [Word8] -> SDoc
pprASCII str
= vcat (map do1 str) $$ do1 0
where
do1 :: Word8 -> SDoc
do1 w = ptext (sLit "\t.byte\t") <> int (fromIntegral w)
pprAlign :: Int -> SDoc
pprAlign bytes
= sdocWithPlatform $ \platform ->
ptext (sLit ".align ") <> int (alignment platform)
where
alignment platform = if platformOS platform == OSDarwin
then log2 bytes
else bytes
log2 :: Int -> Int -- cache the common ones
log2 1 = 0
log2 2 = 1
log2 4 = 2
log2 8 = 3
log2 n = 1 + log2 (n `quot` 2)
-- -----------------------------------------------------------------------------
-- pprInstr: print an 'Instr'
instance Outputable Instr where
ppr instr = pprInstr instr
pprReg :: Size -> Reg -> SDoc
pprReg s r
= case r of
RegReal (RealRegSingle i) ->
sdocWithPlatform $ \platform ->
if target32Bit platform then ppr32_reg_no s i
else ppr64_reg_no s i
RegReal (RealRegPair _ _) -> panic "X86.Ppr: no reg pairs on this arch"
RegVirtual (VirtualRegI u) -> text "%vI_" <> pprUnique u
RegVirtual (VirtualRegHi u) -> text "%vHi_" <> pprUnique u
RegVirtual (VirtualRegF u) -> text "%vF_" <> pprUnique u
RegVirtual (VirtualRegD u) -> text "%vD_" <> pprUnique u
RegVirtual (VirtualRegSSE u) -> text "%vSSE_" <> pprUnique u
where
ppr32_reg_no :: Size -> Int -> SDoc
ppr32_reg_no II8 = ppr32_reg_byte
ppr32_reg_no II16 = ppr32_reg_word
ppr32_reg_no _ = ppr32_reg_long
ppr32_reg_byte i = ptext
(case i of {
0 -> sLit "%al"; 1 -> sLit "%bl";
2 -> sLit "%cl"; 3 -> sLit "%dl";
_ -> sLit "very naughty I386 byte register"
})
ppr32_reg_word i = ptext
(case i of {
0 -> sLit "%ax"; 1 -> sLit "%bx";
2 -> sLit "%cx"; 3 -> sLit "%dx";
4 -> sLit "%si"; 5 -> sLit "%di";
6 -> sLit "%bp"; 7 -> sLit "%sp";
_ -> sLit "very naughty I386 word register"
})
ppr32_reg_long i = ptext
(case i of {
0 -> sLit "%eax"; 1 -> sLit "%ebx";
2 -> sLit "%ecx"; 3 -> sLit "%edx";
4 -> sLit "%esi"; 5 -> sLit "%edi";
6 -> sLit "%ebp"; 7 -> sLit "%esp";
_ -> ppr_reg_float i
})
ppr64_reg_no :: Size -> Int -> SDoc
ppr64_reg_no II8 = ppr64_reg_byte
ppr64_reg_no II16 = ppr64_reg_word
ppr64_reg_no II32 = ppr64_reg_long
ppr64_reg_no _ = ppr64_reg_quad
ppr64_reg_byte i = ptext
(case i of {
0 -> sLit "%al"; 1 -> sLit "%bl";
2 -> sLit "%cl"; 3 -> sLit "%dl";
4 -> sLit "%sil"; 5 -> sLit "%dil"; -- new 8-bit regs!
6 -> sLit "%bpl"; 7 -> sLit "%spl";
8 -> sLit "%r8b"; 9 -> sLit "%r9b";
10 -> sLit "%r10b"; 11 -> sLit "%r11b";
12 -> sLit "%r12b"; 13 -> sLit "%r13b";
14 -> sLit "%r14b"; 15 -> sLit "%r15b";
_ -> sLit "very naughty x86_64 byte register"
})
ppr64_reg_word i = ptext
(case i of {
0 -> sLit "%ax"; 1 -> sLit "%bx";
2 -> sLit "%cx"; 3 -> sLit "%dx";
4 -> sLit "%si"; 5 -> sLit "%di";
6 -> sLit "%bp"; 7 -> sLit "%sp";
8 -> sLit "%r8w"; 9 -> sLit "%r9w";
10 -> sLit "%r10w"; 11 -> sLit "%r11w";
12 -> sLit "%r12w"; 13 -> sLit "%r13w";
14 -> sLit "%r14w"; 15 -> sLit "%r15w";
_ -> sLit "very naughty x86_64 word register"
})
ppr64_reg_long i = ptext
(case i of {
0 -> sLit "%eax"; 1 -> sLit "%ebx";
2 -> sLit "%ecx"; 3 -> sLit "%edx";
4 -> sLit "%esi"; 5 -> sLit "%edi";
6 -> sLit "%ebp"; 7 -> sLit "%esp";
8 -> sLit "%r8d"; 9 -> sLit "%r9d";
10 -> sLit "%r10d"; 11 -> sLit "%r11d";
12 -> sLit "%r12d"; 13 -> sLit "%r13d";
14 -> sLit "%r14d"; 15 -> sLit "%r15d";
_ -> sLit "very naughty x86_64 register"
})
ppr64_reg_quad i = ptext
(case i of {
0 -> sLit "%rax"; 1 -> sLit "%rbx";
2 -> sLit "%rcx"; 3 -> sLit "%rdx";
4 -> sLit "%rsi"; 5 -> sLit "%rdi";
6 -> sLit "%rbp"; 7 -> sLit "%rsp";
8 -> sLit "%r8"; 9 -> sLit "%r9";
10 -> sLit "%r10"; 11 -> sLit "%r11";
12 -> sLit "%r12"; 13 -> sLit "%r13";
14 -> sLit "%r14"; 15 -> sLit "%r15";
_ -> ppr_reg_float i
})
ppr_reg_float :: Int -> LitString
ppr_reg_float i = case i of
16 -> sLit "%fake0"; 17 -> sLit "%fake1"
18 -> sLit "%fake2"; 19 -> sLit "%fake3"
20 -> sLit "%fake4"; 21 -> sLit "%fake5"
24 -> sLit "%xmm0"; 25 -> sLit "%xmm1"
26 -> sLit "%xmm2"; 27 -> sLit "%xmm3"
28 -> sLit "%xmm4"; 29 -> sLit "%xmm5"
30 -> sLit "%xmm6"; 31 -> sLit "%xmm7"
32 -> sLit "%xmm8"; 33 -> sLit "%xmm9"
34 -> sLit "%xmm10"; 35 -> sLit "%xmm11"
36 -> sLit "%xmm12"; 37 -> sLit "%xmm13"
38 -> sLit "%xmm14"; 39 -> sLit "%xmm15"
_ -> sLit "very naughty x86 register"
pprSize :: Size -> SDoc
pprSize x
= ptext (case x of
II8 -> sLit "b"
II16 -> sLit "w"
II32 -> sLit "l"
II64 -> sLit "q"
FF32 -> sLit "ss" -- "scalar single-precision float" (SSE2)
FF64 -> sLit "sd" -- "scalar double-precision float" (SSE2)
FF80 -> sLit "t"
)
pprSize_x87 :: Size -> SDoc
pprSize_x87 x
= ptext $ case x of
FF32 -> sLit "s"
FF64 -> sLit "l"
FF80 -> sLit "t"
_ -> panic "X86.Ppr.pprSize_x87"
pprCond :: Cond -> SDoc
pprCond c
= ptext (case c of {
GEU -> sLit "ae"; LU -> sLit "b";
EQQ -> sLit "e"; GTT -> sLit "g";
GE -> sLit "ge"; GU -> sLit "a";
LTT -> sLit "l"; LE -> sLit "le";
LEU -> sLit "be"; NE -> sLit "ne";
NEG -> sLit "s"; POS -> sLit "ns";
CARRY -> sLit "c"; OFLO -> sLit "o";
PARITY -> sLit "p"; NOTPARITY -> sLit "np";
ALWAYS -> sLit "mp"})
pprImm :: Imm -> SDoc
pprImm (ImmInt i) = int i
pprImm (ImmInteger i) = integer i
pprImm (ImmCLbl l) = ppr l
pprImm (ImmIndex l i) = ppr l <> char '+' <> int i
pprImm (ImmLit s) = s
pprImm (ImmFloat _) = ptext (sLit "naughty float immediate")
pprImm (ImmDouble _) = ptext (sLit "naughty double immediate")
pprImm (ImmConstantSum a b) = pprImm a <> char '+' <> pprImm b
pprImm (ImmConstantDiff a b) = pprImm a <> char '-'
<> lparen <> pprImm b <> rparen
pprAddr :: AddrMode -> SDoc
pprAddr (ImmAddr imm off)
= let pp_imm = pprImm imm
in
if (off == 0) then
pp_imm
else if (off < 0) then
pp_imm <> int off
else
pp_imm <> char '+' <> int off
pprAddr (AddrBaseIndex base index displacement)
= sdocWithPlatform $ \platform ->
let
pp_disp = ppr_disp displacement
pp_off p = pp_disp <> char '(' <> p <> char ')'
pp_reg r = pprReg (archWordSize (target32Bit platform)) r
in
case (base, index) of
(EABaseNone, EAIndexNone) -> pp_disp
(EABaseReg b, EAIndexNone) -> pp_off (pp_reg b)
(EABaseRip, EAIndexNone) -> pp_off (ptext (sLit "%rip"))
(EABaseNone, EAIndex r i) -> pp_off (comma <> pp_reg r <> comma <> int i)
(EABaseReg b, EAIndex r i) -> pp_off (pp_reg b <> comma <> pp_reg r
<> comma <> int i)
_ -> panic "X86.Ppr.pprAddr: no match"
where
ppr_disp (ImmInt 0) = empty
ppr_disp imm = pprImm imm
pprSectionHeader :: Section -> SDoc
pprSectionHeader seg =
sdocWithPlatform $ \platform ->
case platformOS platform of
OSDarwin
| target32Bit platform ->
case seg of
Text -> text ".text\n\t.align 2"
Data -> text ".data\n\t.align 2"
ReadOnlyData -> text ".const\n\t.align 2"
RelocatableReadOnlyData
-> text ".const_data\n\t.align 2"
UninitialisedData -> text ".data\n\t.align 2"
ReadOnlyData16 -> text ".const\n\t.align 4"
OtherSection _ -> panic "X86.Ppr.pprSectionHeader: unknown section"
| otherwise ->
case seg of
Text -> text ".text\n\t.align 3"
Data -> text ".data\n\t.align 3"
ReadOnlyData -> text ".const\n\t.align 3"
RelocatableReadOnlyData
-> text ".const_data\n\t.align 3"
UninitialisedData -> text ".data\n\t.align 3"
ReadOnlyData16 -> text ".const\n\t.align 4"
OtherSection _ -> panic "PprMach.pprSectionHeader: unknown section"
_
| target32Bit platform ->
case seg of
Text -> text ".text\n\t.align 4,0x90"
Data -> text ".data\n\t.align 4"
ReadOnlyData -> text ".section .rodata\n\t.align 4"
RelocatableReadOnlyData
-> text ".section .data\n\t.align 4"
UninitialisedData -> text ".section .bss\n\t.align 4"
ReadOnlyData16 -> text ".section .rodata\n\t.align 16"
OtherSection _ -> panic "X86.Ppr.pprSectionHeader: unknown section"
| otherwise ->
case seg of
Text -> text ".text\n\t.align 8"
Data -> text ".data\n\t.align 8"
ReadOnlyData -> text ".section .rodata\n\t.align 8"
RelocatableReadOnlyData
-> text ".section .data\n\t.align 8"
UninitialisedData -> text ".section .bss\n\t.align 8"
ReadOnlyData16 -> text ".section .rodata.cst16\n\t.align 16"
OtherSection _ -> panic "PprMach.pprSectionHeader: unknown section"
pprDataItem :: CmmLit -> SDoc
pprDataItem lit = sdocWithDynFlags $ \dflags -> pprDataItem' dflags lit
pprDataItem' :: DynFlags -> CmmLit -> SDoc
pprDataItem' dflags lit
= vcat (ppr_item (cmmTypeSize $ cmmLitType dflags lit) lit)
where
platform = targetPlatform dflags
imm = litToImm lit
-- These seem to be common:
ppr_item II8 _ = [ptext (sLit "\t.byte\t") <> pprImm imm]
ppr_item II16 _ = [ptext (sLit "\t.word\t") <> pprImm imm]
ppr_item II32 _ = [ptext (sLit "\t.long\t") <> pprImm imm]
ppr_item FF32 (CmmFloat r _)
= let bs = floatToBytes (fromRational r)
in map (\b -> ptext (sLit "\t.byte\t") <> pprImm (ImmInt b)) bs
ppr_item FF64 (CmmFloat r _)
= let bs = doubleToBytes (fromRational r)
in map (\b -> ptext (sLit "\t.byte\t") <> pprImm (ImmInt b)) bs
ppr_item II64 _
= case platformOS platform of
OSDarwin
| target32Bit platform ->
case lit of
CmmInt x _ ->
[ptext (sLit "\t.long\t")
<> int (fromIntegral (fromIntegral x :: Word32)),
ptext (sLit "\t.long\t")
<> int (fromIntegral
(fromIntegral (x `shiftR` 32) :: Word32))]
_ -> panic "X86.Ppr.ppr_item: no match for II64"
| otherwise ->
[ptext (sLit "\t.quad\t") <> pprImm imm]
_
| target32Bit platform ->
[ptext (sLit "\t.quad\t") <> pprImm imm]
| otherwise ->
-- x86_64: binutils can't handle the R_X86_64_PC64
-- relocation type, which means we can't do
-- pc-relative 64-bit addresses. Fortunately we're
-- assuming the small memory model, in which all such
-- offsets will fit into 32 bits, so we have to stick
-- to 32-bit offset fields and modify the RTS
-- appropriately
--
-- See Note [x86-64-relative] in includes/rts/storage/InfoTables.h
--
case lit of
-- A relative relocation:
CmmLabelDiffOff _ _ _ ->
[ptext (sLit "\t.long\t") <> pprImm imm,
ptext (sLit "\t.long\t0")]
_ ->
[ptext (sLit "\t.quad\t") <> pprImm imm]
ppr_item _ _
= panic "X86.Ppr.ppr_item: no match"
pprInstr :: Instr -> SDoc
pprInstr (COMMENT _) = empty -- nuke 'em
{-
pprInstr (COMMENT s) = ptext (sLit "# ") <> ftext s
-}
pprInstr (DELTA d)
= pprInstr (COMMENT (mkFastString ("\tdelta = " ++ show d)))
pprInstr (NEWBLOCK _)
= panic "PprMach.pprInstr: NEWBLOCK"
pprInstr (LDATA _ _)
= panic "PprMach.pprInstr: LDATA"
{-
pprInstr (SPILL reg slot)
= hcat [
ptext (sLit "\tSPILL"),
char ' ',
pprUserReg reg,
comma,
ptext (sLit "SLOT") <> parens (int slot)]
pprInstr (RELOAD slot reg)
= hcat [
ptext (sLit "\tRELOAD"),
char ' ',
ptext (sLit "SLOT") <> parens (int slot),
comma,
pprUserReg reg]
-}
-- Replace 'mov $0x0,%reg' by 'xor %reg,%reg', which is smaller and cheaper.
-- The code generator catches most of these already, but not all.
pprInstr (MOV size (OpImm (ImmInt 0)) dst@(OpReg _))
= pprInstr (XOR size' dst dst)
where size' = case size of
II64 -> II32 -- 32-bit version is equivalent, and smaller
_ -> size
pprInstr (MOV size src dst)
= pprSizeOpOp (sLit "mov") size src dst
pprInstr (CMOV cc size src dst)
= pprCondOpReg (sLit "cmov") size cc src dst
pprInstr (MOVZxL II32 src dst) = pprSizeOpOp (sLit "mov") II32 src dst
-- 32-to-64 bit zero extension on x86_64 is accomplished by a simple
-- movl. But we represent it as a MOVZxL instruction, because
-- the reg alloc would tend to throw away a plain reg-to-reg
-- move, and we still want it to do that.
pprInstr (MOVZxL sizes src dst) = pprSizeOpOpCoerce (sLit "movz") sizes II32 src dst
-- zero-extension only needs to extend to 32 bits: on x86_64,
-- the remaining zero-extension to 64 bits is automatic, and the 32-bit
-- instruction is shorter.
pprInstr (MOVSxL sizes src dst)
= sdocWithPlatform $ \platform ->
pprSizeOpOpCoerce (sLit "movs") sizes (archWordSize (target32Bit platform)) src dst
-- here we do some patching, since the physical registers are only set late
-- in the code generation.
pprInstr (LEA size (OpAddr (AddrBaseIndex (EABaseReg reg1) (EAIndex reg2 1) (ImmInt 0))) dst@(OpReg reg3))
| reg1 == reg3
= pprSizeOpOp (sLit "add") size (OpReg reg2) dst
pprInstr (LEA size (OpAddr (AddrBaseIndex (EABaseReg reg1) (EAIndex reg2 1) (ImmInt 0))) dst@(OpReg reg3))
| reg2 == reg3
= pprSizeOpOp (sLit "add") size (OpReg reg1) dst
pprInstr (LEA size (OpAddr (AddrBaseIndex (EABaseReg reg1) EAIndexNone displ)) dst@(OpReg reg3))
| reg1 == reg3
= pprInstr (ADD size (OpImm displ) dst)
pprInstr (LEA size src dst) = pprSizeOpOp (sLit "lea") size src dst
pprInstr (ADD size (OpImm (ImmInt (-1))) dst)
= pprSizeOp (sLit "dec") size dst
pprInstr (ADD size (OpImm (ImmInt 1)) dst)
= pprSizeOp (sLit "inc") size dst
pprInstr (ADD size src dst) = pprSizeOpOp (sLit "add") size src dst
pprInstr (ADC size src dst) = pprSizeOpOp (sLit "adc") size src dst
pprInstr (SUB size src dst) = pprSizeOpOp (sLit "sub") size src dst
pprInstr (SBB size src dst) = pprSizeOpOp (sLit "sbb") size src dst
pprInstr (IMUL size op1 op2) = pprSizeOpOp (sLit "imul") size op1 op2
pprInstr (ADD_CC size src dst)
= pprSizeOpOp (sLit "add") size src dst
pprInstr (SUB_CC size src dst)
= pprSizeOpOp (sLit "sub") size src dst
{- A hack. The Intel documentation says that "The two and three
operand forms [of IMUL] may also be used with unsigned operands
because the lower half of the product is the same regardless if
(sic) the operands are signed or unsigned. The CF and OF flags,
however, cannot be used to determine if the upper half of the
result is non-zero." So there.
-}
-- Use a 32-bit instruction when possible as it saves a byte.
-- Notably, extracting the tag bits of a pointer has this form.
-- TODO: we could save a byte in a subsequent CMP instruction too,
-- but need something like a peephole pass for this
pprInstr (AND II64 src@(OpImm (ImmInteger mask)) dst)
| 0 <= mask && mask < 0xffffffff
= pprInstr (AND II32 src dst)
pprInstr (AND size src dst) = pprSizeOpOp (sLit "and") size src dst
pprInstr (OR size src dst) = pprSizeOpOp (sLit "or") size src dst
pprInstr (XOR FF32 src dst) = pprOpOp (sLit "xorps") FF32 src dst
pprInstr (XOR FF64 src dst) = pprOpOp (sLit "xorpd") FF64 src dst
pprInstr (XOR size src dst) = pprSizeOpOp (sLit "xor") size src dst
pprInstr (POPCNT size src dst) = pprOpOp (sLit "popcnt") size src (OpReg dst)
pprInstr (BSF size src dst) = pprOpOp (sLit "bsf") size src (OpReg dst)
pprInstr (BSR size src dst) = pprOpOp (sLit "bsr") size src (OpReg dst)
pprInstr (PREFETCH NTA size src ) = pprSizeOp_ (sLit "prefetchnta") size src
pprInstr (PREFETCH Lvl0 size src) = pprSizeOp_ (sLit "prefetcht0") size src
pprInstr (PREFETCH Lvl1 size src) = pprSizeOp_ (sLit "prefetcht1") size src
pprInstr (PREFETCH Lvl2 size src) = pprSizeOp_ (sLit "prefetcht2") size src
pprInstr (NOT size op) = pprSizeOp (sLit "not") size op
pprInstr (BSWAP size op) = pprSizeOp (sLit "bswap") size (OpReg op)
pprInstr (NEGI size op) = pprSizeOp (sLit "neg") size op
pprInstr (SHL size src dst) = pprShift (sLit "shl") size src dst
pprInstr (SAR size src dst) = pprShift (sLit "sar") size src dst
pprInstr (SHR size src dst) = pprShift (sLit "shr") size src dst
pprInstr (BT size imm src) = pprSizeImmOp (sLit "bt") size imm src
pprInstr (CMP size src dst)
| is_float size = pprSizeOpOp (sLit "ucomi") size src dst -- SSE2
| otherwise = pprSizeOpOp (sLit "cmp") size src dst
where
-- This predicate is needed here and nowhere else
is_float FF32 = True
is_float FF64 = True
is_float FF80 = True
is_float _ = False
pprInstr (TEST size src dst) = sdocWithPlatform $ \platform ->
let size' = case (src,dst) of
-- Match instructions like 'test $0x3,%esi' or 'test $0x7,%rbx'.
-- We can replace them by equivalent, but smaller instructions
-- by reducing the size of the immediate operand as far as possible.
-- (We could handle masks larger than a single byte too,
-- but it would complicate the code considerably
-- and tag checks are by far the most common case.)
(OpImm (ImmInteger mask), OpReg dstReg)
| 0 <= mask && mask < 256 -> minSizeOfReg platform dstReg
_ -> size
in pprSizeOpOp (sLit "test") size' src dst
where
minSizeOfReg platform (RegReal (RealRegSingle i))
| target32Bit platform && i <= 3 = II8 -- al, bl, cl, dl
| target32Bit platform && i <= 7 = II16 -- si, di, bp, sp
| not (target32Bit platform) && i <= 15 = II8 -- al .. r15b
minSizeOfReg _ _ = size -- other
pprInstr (PUSH size op) = pprSizeOp (sLit "push") size op
pprInstr (POP size op) = pprSizeOp (sLit "pop") size op
-- both unused (SDM):
-- pprInstr PUSHA = ptext (sLit "\tpushal")
-- pprInstr POPA = ptext (sLit "\tpopal")
pprInstr NOP = ptext (sLit "\tnop")
pprInstr (CLTD II32) = ptext (sLit "\tcltd")
pprInstr (CLTD II64) = ptext (sLit "\tcqto")
pprInstr (SETCC cond op) = pprCondInstr (sLit "set") cond (pprOperand II8 op)
pprInstr (JXX cond blockid)
= pprCondInstr (sLit "j") cond (ppr lab)
where lab = mkAsmTempLabel (getUnique blockid)
pprInstr (JXX_GBL cond imm) = pprCondInstr (sLit "j") cond (pprImm imm)
pprInstr (JMP (OpImm imm) _) = ptext (sLit "\tjmp ") <> pprImm imm
pprInstr (JMP op _) = sdocWithPlatform $ \platform ->
ptext (sLit "\tjmp *") <> pprOperand (archWordSize (target32Bit platform)) op
pprInstr (JMP_TBL op _ _ _) = pprInstr (JMP op [])
pprInstr (CALL (Left imm) _) = ptext (sLit "\tcall ") <> pprImm imm
pprInstr (CALL (Right reg) _) = sdocWithPlatform $ \platform ->
ptext (sLit "\tcall *") <> pprReg (archWordSize (target32Bit platform)) reg
pprInstr (IDIV sz op) = pprSizeOp (sLit "idiv") sz op
pprInstr (DIV sz op) = pprSizeOp (sLit "div") sz op
pprInstr (IMUL2 sz op) = pprSizeOp (sLit "imul") sz op
-- x86_64 only
pprInstr (MUL size op1 op2) = pprSizeOpOp (sLit "mul") size op1 op2
pprInstr (MUL2 size op) = pprSizeOp (sLit "mul") size op
pprInstr (FDIV size op1 op2) = pprSizeOpOp (sLit "div") size op1 op2
pprInstr (CVTSS2SD from to) = pprRegReg (sLit "cvtss2sd") from to
pprInstr (CVTSD2SS from to) = pprRegReg (sLit "cvtsd2ss") from to
pprInstr (CVTTSS2SIQ sz from to) = pprSizeSizeOpReg (sLit "cvttss2si") FF32 sz from to
pprInstr (CVTTSD2SIQ sz from to) = pprSizeSizeOpReg (sLit "cvttsd2si") FF64 sz from to
pprInstr (CVTSI2SS sz from to) = pprSizeOpReg (sLit "cvtsi2ss") sz from to
pprInstr (CVTSI2SD sz from to) = pprSizeOpReg (sLit "cvtsi2sd") sz from to
-- FETCHGOT for PIC on ELF platforms
pprInstr (FETCHGOT reg)
= vcat [ ptext (sLit "\tcall 1f"),
hcat [ ptext (sLit "1:\tpopl\t"), pprReg II32 reg ],
hcat [ ptext (sLit "\taddl\t$_GLOBAL_OFFSET_TABLE_+(.-1b), "),
pprReg II32 reg ]
]
-- FETCHPC for PIC on Darwin/x86
-- get the instruction pointer into a register
-- (Terminology note: the IP is called Program Counter on PPC,
-- and it's a good thing to use the same name on both platforms)
pprInstr (FETCHPC reg)
= vcat [ ptext (sLit "\tcall 1f"),
hcat [ ptext (sLit "1:\tpopl\t"), pprReg II32 reg ]
]
-- -----------------------------------------------------------------------------
-- i386 floating-point
-- Simulating a flat register set on the x86 FP stack is tricky.
-- you have to free %st(7) before pushing anything on the FP reg stack
-- so as to preclude the possibility of a FP stack overflow exception.
pprInstr g@(GMOV src dst)
| src == dst
= empty
| otherwise
= pprG g (hcat [gtab, gpush src 0, gsemi, gpop dst 1])
-- GLD sz addr dst ==> FLDsz addr ; FSTP (dst+1)
pprInstr g@(GLD sz addr dst)
= pprG g (hcat [gtab, text "fld", pprSize_x87 sz, gsp,
pprAddr addr, gsemi, gpop dst 1])
-- GST sz src addr ==> FLD dst ; FSTPsz addr
pprInstr g@(GST sz src addr)
| src == fake0 && sz /= FF80 -- fstt instruction doesn't exist
= pprG g (hcat [gtab,
text "fst", pprSize_x87 sz, gsp, pprAddr addr])
| otherwise
= pprG g (hcat [gtab, gpush src 0, gsemi,
text "fstp", pprSize_x87 sz, gsp, pprAddr addr])
pprInstr g@(GLDZ dst)
= pprG g (hcat [gtab, text "fldz ; ", gpop dst 1])
pprInstr g@(GLD1 dst)
= pprG g (hcat [gtab, text "fld1 ; ", gpop dst 1])
pprInstr (GFTOI src dst)
= pprInstr (GDTOI src dst)
pprInstr g@(GDTOI src dst)
= pprG g (vcat [
hcat [gtab, text "subl $8, %esp ; fnstcw 4(%esp)"],
hcat [gtab, gpush src 0],
hcat [gtab, text "movzwl 4(%esp), ", reg,
text " ; orl $0xC00, ", reg],
hcat [gtab, text "movl ", reg, text ", 0(%esp) ; fldcw 0(%esp)"],
hcat [gtab, text "fistpl 0(%esp)"],
hcat [gtab, text "fldcw 4(%esp) ; movl 0(%esp), ", reg],
hcat [gtab, text "addl $8, %esp"]
])
where
reg = pprReg II32 dst
pprInstr (GITOF src dst)
= pprInstr (GITOD src dst)
pprInstr g@(GITOD src dst)
= pprG g (hcat [gtab, text "pushl ", pprReg II32 src,
text " ; fildl (%esp) ; ",
gpop dst 1, text " ; addl $4,%esp"])
pprInstr g@(GDTOF src dst)
= pprG g (vcat [gtab <> gpush src 0,
gtab <> text "subl $4,%esp ; fstps (%esp) ; flds (%esp) ; addl $4,%esp ;",
gtab <> gpop dst 1])
{- Gruesome swamp follows. If you're unfortunate enough to have ventured
this far into the jungle AND you give a Rat's Ass (tm) what's going
on, here's the deal. Generate code to do a floating point comparison
of src1 and src2, of kind cond, and set the Zero flag if true.
The complications are to do with handling NaNs correctly. We want the
property that if either argument is NaN, then the result of the
comparison is False ... except if we're comparing for inequality,
in which case the answer is True.
Here's how the general (non-inequality) case works. As an
example, consider generating the an equality test:
pushl %eax -- we need to mess with this
<get src1 to top of FPU stack>
fcomp <src2 location in FPU stack> and pop pushed src1
-- Result of comparison is in FPU Status Register bits
-- C3 C2 and C0
fstsw %ax -- Move FPU Status Reg to %ax
sahf -- move C3 C2 C0 from %ax to integer flag reg
-- now the serious magic begins
setpo %ah -- %ah = if comparable(neither arg was NaN) then 1 else 0
sete %al -- %al = if arg1 == arg2 then 1 else 0
andb %ah,%al -- %al &= %ah
-- so %al == 1 iff (comparable && same); else it holds 0
decb %al -- %al == 0, ZeroFlag=1 iff (comparable && same);
else %al == 0xFF, ZeroFlag=0
-- the zero flag is now set as we desire.
popl %eax
The special case of inequality differs thusly:
setpe %ah -- %ah = if incomparable(either arg was NaN) then 1 else 0
setne %al -- %al = if arg1 /= arg2 then 1 else 0
orb %ah,%al -- %al = if (incomparable || different) then 1 else 0
decb %al -- if (incomparable || different) then (%al == 0, ZF=1)
else (%al == 0xFF, ZF=0)
-}
pprInstr g@(GCMP cond src1 src2)
| case cond of { NE -> True; _ -> False }
= pprG g (vcat [
hcat [gtab, text "pushl %eax ; ",gpush src1 0],
hcat [gtab, text "fcomp ", greg src2 1,
text "; fstsw %ax ; sahf ; setpe %ah"],
hcat [gtab, text "setne %al ; ",
text "orb %ah,%al ; decb %al ; popl %eax"]
])
| otherwise
= pprG g (vcat [
hcat [gtab, text "pushl %eax ; ",gpush src1 0],
hcat [gtab, text "fcomp ", greg src2 1,
text "; fstsw %ax ; sahf ; setpo %ah"],
hcat [gtab, text "set", pprCond (fix_FP_cond cond), text " %al ; ",
text "andb %ah,%al ; decb %al ; popl %eax"]
])
where
{- On the 486, the flags set by FP compare are the unsigned ones!
(This looks like a HACK to me. WDP 96/03)
-}
fix_FP_cond :: Cond -> Cond
fix_FP_cond GE = GEU
fix_FP_cond GTT = GU
fix_FP_cond LTT = LU
fix_FP_cond LE = LEU
fix_FP_cond EQQ = EQQ
fix_FP_cond NE = NE
fix_FP_cond _ = panic "X86.Ppr.fix_FP_cond: no match"
-- there should be no others
pprInstr g@(GABS _ src dst)
= pprG g (hcat [gtab, gpush src 0, text " ; fabs ; ", gpop dst 1])
pprInstr g@(GNEG _ src dst)
= pprG g (hcat [gtab, gpush src 0, text " ; fchs ; ", gpop dst 1])
pprInstr g@(GSQRT sz src dst)
= pprG g (hcat [gtab, gpush src 0, text " ; fsqrt"] $$
hcat [gtab, gcoerceto sz, gpop dst 1])
pprInstr g@(GSIN sz l1 l2 src dst)
= pprG g (pprTrigOp "fsin" False l1 l2 src dst sz)
pprInstr g@(GCOS sz l1 l2 src dst)
= pprG g (pprTrigOp "fcos" False l1 l2 src dst sz)
pprInstr g@(GTAN sz l1 l2 src dst)
= pprG g (pprTrigOp "fptan" True l1 l2 src dst sz)
-- In the translations for GADD, GMUL, GSUB and GDIV,
-- the first two cases are mere optimisations. The otherwise clause
-- generates correct code under all circumstances.
pprInstr g@(GADD _ src1 src2 dst)
| src1 == dst
= pprG g (text "\t#GADD-xxxcase1" $$
hcat [gtab, gpush src2 0,
text " ; faddp %st(0),", greg src1 1])
| src2 == dst
= pprG g (text "\t#GADD-xxxcase2" $$
hcat [gtab, gpush src1 0,
text " ; faddp %st(0),", greg src2 1])
| otherwise
= pprG g (hcat [gtab, gpush src1 0,
text " ; fadd ", greg src2 1, text ",%st(0)",
gsemi, gpop dst 1])
pprInstr g@(GMUL _ src1 src2 dst)
| src1 == dst
= pprG g (text "\t#GMUL-xxxcase1" $$
hcat [gtab, gpush src2 0,
text " ; fmulp %st(0),", greg src1 1])
| src2 == dst
= pprG g (text "\t#GMUL-xxxcase2" $$
hcat [gtab, gpush src1 0,
text " ; fmulp %st(0),", greg src2 1])
| otherwise
= pprG g (hcat [gtab, gpush src1 0,
text " ; fmul ", greg src2 1, text ",%st(0)",
gsemi, gpop dst 1])
pprInstr g@(GSUB _ src1 src2 dst)
| src1 == dst
= pprG g (text "\t#GSUB-xxxcase1" $$
hcat [gtab, gpush src2 0,
text " ; fsubrp %st(0),", greg src1 1])
| src2 == dst
= pprG g (text "\t#GSUB-xxxcase2" $$
hcat [gtab, gpush src1 0,
text " ; fsubp %st(0),", greg src2 1])
| otherwise
= pprG g (hcat [gtab, gpush src1 0,
text " ; fsub ", greg src2 1, text ",%st(0)",
gsemi, gpop dst 1])
pprInstr g@(GDIV _ src1 src2 dst)
| src1 == dst
= pprG g (text "\t#GDIV-xxxcase1" $$
hcat [gtab, gpush src2 0,
text " ; fdivrp %st(0),", greg src1 1])
| src2 == dst
= pprG g (text "\t#GDIV-xxxcase2" $$
hcat [gtab, gpush src1 0,
text " ; fdivp %st(0),", greg src2 1])
| otherwise
= pprG g (hcat [gtab, gpush src1 0,
text " ; fdiv ", greg src2 1, text ",%st(0)",
gsemi, gpop dst 1])
pprInstr GFREE
= vcat [ ptext (sLit "\tffree %st(0) ;ffree %st(1) ;ffree %st(2) ;ffree %st(3)"),
ptext (sLit "\tffree %st(4) ;ffree %st(5)")
]
-- Atomics
pprInstr (LOCK i) = ptext (sLit "\tlock") $$ pprInstr i
pprInstr MFENCE = ptext (sLit "\tmfence")
pprInstr (XADD size src dst) = pprSizeOpOp (sLit "xadd") size src dst
pprInstr (CMPXCHG size src dst) = pprSizeOpOp (sLit "cmpxchg") size src dst
pprInstr _
= panic "X86.Ppr.pprInstr: no match"
pprTrigOp :: String -> Bool -> CLabel -> CLabel
-> Reg -> Reg -> Size -> SDoc
pprTrigOp op -- fsin, fcos or fptan
isTan -- we need a couple of extra steps if we're doing tan
l1 l2 -- internal labels for us to use
src dst sz
= -- We'll be needing %eax later on
hcat [gtab, text "pushl %eax;"] $$
-- tan is going to use an extra space on the FP stack
(if isTan then hcat [gtab, text "ffree %st(6)"] else empty) $$
-- First put the value in %st(0) and try to apply the op to it
hcat [gpush src 0, text ("; " ++ op)] $$
-- Now look to see if C2 was set (overflow, |value| >= 2^63)
hcat [gtab, text "fnstsw %ax"] $$
hcat [gtab, text "test $0x400,%eax"] $$
-- If we were in bounds then jump to the end
hcat [gtab, text "je " <> ppr l1] $$
-- Otherwise we need to shrink the value. Start by
-- loading pi, doubleing it (by adding it to itself),
-- and then swapping pi with the value, so the value we
-- want to apply op to is in %st(0) again
hcat [gtab, text "ffree %st(7); fldpi"] $$
hcat [gtab, text "fadd %st(0),%st"] $$
hcat [gtab, text "fxch %st(1)"] $$
-- Now we have a loop in which we make the value smaller,
-- see if it's small enough, and loop if not
(ppr l2 <> char ':') $$
hcat [gtab, text "fprem1"] $$
-- My Debian libc uses fstsw here for the tan code, but I can't
-- see any reason why it should need to be different for tan.
hcat [gtab, text "fnstsw %ax"] $$
hcat [gtab, text "test $0x400,%eax"] $$
hcat [gtab, text "jne " <> ppr l2] $$
hcat [gtab, text "fstp %st(1)"] $$
hcat [gtab, text op] $$
(ppr l1 <> char ':') $$
-- Pop the 1.0 tan gave us
(if isTan then hcat [gtab, text "fstp %st(0)"] else empty) $$
-- Restore %eax
hcat [gtab, text "popl %eax;"] $$
-- And finally make the result the right size
hcat [gtab, gcoerceto sz, gpop dst 1]
--------------------------
-- coerce %st(0) to the specified size
gcoerceto :: Size -> SDoc
gcoerceto FF64 = empty
gcoerceto FF32 = empty --text "subl $4,%esp ; fstps (%esp) ; flds (%esp) ; addl $4,%esp ; "
gcoerceto _ = panic "X86.Ppr.gcoerceto: no match"
gpush :: Reg -> RegNo -> SDoc
gpush reg offset
= hcat [text "fld ", greg reg offset]
gpop :: Reg -> RegNo -> SDoc
gpop reg offset
= hcat [text "fstp ", greg reg offset]
greg :: Reg -> RegNo -> SDoc
greg reg offset = text "%st(" <> int (gregno reg - firstfake+offset) <> char ')'
gsemi :: SDoc
gsemi = text " ; "
gtab :: SDoc
gtab = char '\t'
gsp :: SDoc
gsp = char ' '
gregno :: Reg -> RegNo
gregno (RegReal (RealRegSingle i)) = i
gregno _ = --pprPanic "gregno" (ppr other)
999 -- bogus; only needed for debug printing
pprG :: Instr -> SDoc -> SDoc
pprG fake actual
= (char '#' <> pprGInstr fake) $$ actual
pprGInstr :: Instr -> SDoc
pprGInstr (GMOV src dst) = pprSizeRegReg (sLit "gmov") FF64 src dst
pprGInstr (GLD sz src dst) = pprSizeAddrReg (sLit "gld") sz src dst
pprGInstr (GST sz src dst) = pprSizeRegAddr (sLit "gst") sz src dst
pprGInstr (GLDZ dst) = pprSizeReg (sLit "gldz") FF64 dst
pprGInstr (GLD1 dst) = pprSizeReg (sLit "gld1") FF64 dst
pprGInstr (GFTOI src dst) = pprSizeSizeRegReg (sLit "gftoi") FF32 II32 src dst
pprGInstr (GDTOI src dst) = pprSizeSizeRegReg (sLit "gdtoi") FF64 II32 src dst
pprGInstr (GITOF src dst) = pprSizeSizeRegReg (sLit "gitof") II32 FF32 src dst
pprGInstr (GITOD src dst) = pprSizeSizeRegReg (sLit "gitod") II32 FF64 src dst
pprGInstr (GDTOF src dst) = pprSizeSizeRegReg (sLit "gdtof") FF64 FF32 src dst
pprGInstr (GCMP co src dst) = pprCondRegReg (sLit "gcmp_") FF64 co src dst
pprGInstr (GABS sz src dst) = pprSizeRegReg (sLit "gabs") sz src dst
pprGInstr (GNEG sz src dst) = pprSizeRegReg (sLit "gneg") sz src dst
pprGInstr (GSQRT sz src dst) = pprSizeRegReg (sLit "gsqrt") sz src dst
pprGInstr (GSIN sz _ _ src dst) = pprSizeRegReg (sLit "gsin") sz src dst
pprGInstr (GCOS sz _ _ src dst) = pprSizeRegReg (sLit "gcos") sz src dst
pprGInstr (GTAN sz _ _ src dst) = pprSizeRegReg (sLit "gtan") sz src dst
pprGInstr (GADD sz src1 src2 dst) = pprSizeRegRegReg (sLit "gadd") sz src1 src2 dst
pprGInstr (GSUB sz src1 src2 dst) = pprSizeRegRegReg (sLit "gsub") sz src1 src2 dst
pprGInstr (GMUL sz src1 src2 dst) = pprSizeRegRegReg (sLit "gmul") sz src1 src2 dst
pprGInstr (GDIV sz src1 src2 dst) = pprSizeRegRegReg (sLit "gdiv") sz src1 src2 dst
pprGInstr _ = panic "X86.Ppr.pprGInstr: no match"
pprDollImm :: Imm -> SDoc
pprDollImm i = ptext (sLit "$") <> pprImm i
pprOperand :: Size -> Operand -> SDoc
pprOperand s (OpReg r) = pprReg s r
pprOperand _ (OpImm i) = pprDollImm i
pprOperand _ (OpAddr ea) = pprAddr ea
pprMnemonic_ :: LitString -> SDoc
pprMnemonic_ name =
char '\t' <> ptext name <> space
pprMnemonic :: LitString -> Size -> SDoc
pprMnemonic name size =
char '\t' <> ptext name <> pprSize size <> space
pprSizeImmOp :: LitString -> Size -> Imm -> Operand -> SDoc
pprSizeImmOp name size imm op1
= hcat [
pprMnemonic name size,
char '$',
pprImm imm,
comma,
pprOperand size op1
]
pprSizeOp_ :: LitString -> Size -> Operand -> SDoc
pprSizeOp_ name size op1
= hcat [
pprMnemonic_ name ,
pprOperand size op1
]
pprSizeOp :: LitString -> Size -> Operand -> SDoc
pprSizeOp name size op1
= hcat [
pprMnemonic name size,
pprOperand size op1
]
pprSizeOpOp :: LitString -> Size -> Operand -> Operand -> SDoc
pprSizeOpOp name size op1 op2
= hcat [
pprMnemonic name size,
pprOperand size op1,
comma,
pprOperand size op2
]
pprOpOp :: LitString -> Size -> Operand -> Operand -> SDoc
pprOpOp name size op1 op2
= hcat [
pprMnemonic_ name,
pprOperand size op1,
comma,
pprOperand size op2
]
pprSizeReg :: LitString -> Size -> Reg -> SDoc
pprSizeReg name size reg1
= hcat [
pprMnemonic name size,
pprReg size reg1
]
pprSizeRegReg :: LitString -> Size -> Reg -> Reg -> SDoc
pprSizeRegReg name size reg1 reg2
= hcat [
pprMnemonic name size,
pprReg size reg1,
comma,
pprReg size reg2
]
pprRegReg :: LitString -> Reg -> Reg -> SDoc
pprRegReg name reg1 reg2
= sdocWithPlatform $ \platform ->
hcat [
pprMnemonic_ name,
pprReg (archWordSize (target32Bit platform)) reg1,
comma,
pprReg (archWordSize (target32Bit platform)) reg2
]
pprSizeOpReg :: LitString -> Size -> Operand -> Reg -> SDoc
pprSizeOpReg name size op1 reg2
= sdocWithPlatform $ \platform ->
hcat [
pprMnemonic name size,
pprOperand size op1,
comma,
pprReg (archWordSize (target32Bit platform)) reg2
]
pprCondOpReg :: LitString -> Size -> Cond -> Operand -> Reg -> SDoc
pprCondOpReg name size cond op1 reg2
= hcat [
char '\t',
ptext name,
pprCond cond,
space,
pprOperand size op1,
comma,
pprReg size reg2
]
pprCondRegReg :: LitString -> Size -> Cond -> Reg -> Reg -> SDoc
pprCondRegReg name size cond reg1 reg2
= hcat [
char '\t',
ptext name,
pprCond cond,
space,
pprReg size reg1,
comma,
pprReg size reg2
]
pprSizeSizeRegReg :: LitString -> Size -> Size -> Reg -> Reg -> SDoc
pprSizeSizeRegReg name size1 size2 reg1 reg2
= hcat [
char '\t',
ptext name,
pprSize size1,
pprSize size2,
space,
pprReg size1 reg1,
comma,
pprReg size2 reg2
]
pprSizeSizeOpReg :: LitString -> Size -> Size -> Operand -> Reg -> SDoc
pprSizeSizeOpReg name size1 size2 op1 reg2
= hcat [
pprMnemonic name size2,
pprOperand size1 op1,
comma,
pprReg size2 reg2
]
pprSizeRegRegReg :: LitString -> Size -> Reg -> Reg -> Reg -> SDoc
pprSizeRegRegReg name size reg1 reg2 reg3
= hcat [
pprMnemonic name size,
pprReg size reg1,
comma,
pprReg size reg2,
comma,
pprReg size reg3
]
pprSizeAddrReg :: LitString -> Size -> AddrMode -> Reg -> SDoc
pprSizeAddrReg name size op dst
= hcat [
pprMnemonic name size,
pprAddr op,
comma,
pprReg size dst
]
pprSizeRegAddr :: LitString -> Size -> Reg -> AddrMode -> SDoc
pprSizeRegAddr name size src op
= hcat [
pprMnemonic name size,
pprReg size src,
comma,
pprAddr op
]
pprShift :: LitString -> Size -> Operand -> Operand -> SDoc
pprShift name size src dest
= hcat [
pprMnemonic name size,
pprOperand II8 src, -- src is 8-bit sized
comma,
pprOperand size dest
]
pprSizeOpOpCoerce :: LitString -> Size -> Size -> Operand -> Operand -> SDoc
pprSizeOpOpCoerce name size1 size2 op1 op2
= hcat [ char '\t', ptext name, pprSize size1, pprSize size2, space,
pprOperand size1 op1,
comma,
pprOperand size2 op2
]
pprCondInstr :: LitString -> Cond -> SDoc -> SDoc
pprCondInstr name cond arg
= hcat [ char '\t', ptext name, pprCond cond, space, arg]
| jstolarek/ghc | compiler/nativeGen/X86/Ppr.hs | bsd-3-clause | 44,154 | 0 | 28 | 13,455 | 13,074 | 6,509 | 6,565 | 855 | 97 |
module Main where
import Control.Monad (forM_)
import Control.Monad.Free
import Data.List (intersperse)
import System.Console.ANSI
data Tree next = Branch Int next
| Trunk next
| Done
deriving (Functor)
data STree = Pad Int | Peak | LBranch Int | STrunk | RBranch Int
sTree Peak = "/\\"
sTree STrunk = "||"
sTree (LBranch n) = "\\" ++ replicate n '_'
sTree (RBranch n) = replicate n '_' ++ "/"
sTree (Pad n) = replicate n ' '
ioTree :: STree -> IO ()
ioTree t =
do
setSGR [SetColor Foreground (brightness t) (color t)]
putStr (sTree t)
where
color STrunk = Green
color _ = Green
brightness STrunk = Dull
brightness _ = Vivid
type TreeM = Free Tree
branch :: Int -> TreeM ()
branch n = liftF (Branch n ())
trunk :: TreeM ()
trunk = liftF (Trunk ())
maxTreeBranch :: TreeM a -> Int
maxTreeBranch (Free (Branch n k)) = max n (maxTreeBranch k) -- pretty good chance this is a space leak
maxTreeBranch (Free (Trunk k)) = maxTreeBranch k
maxTreeBranch _ = 0
runList :: TreeM a -> [[STree]]
runList t = runList' t
where
trunkLhsCol = length (sTree $ LBranch $ maxTreeBranch t)
runList' (Free (Branch 0 k)) = [Pad trunkLhsCol, Peak]:(runList' k)
runList' (Free (Branch n k)) = [Pad (trunkLhsCol - (n + 1)), LBranch n, STrunk, RBranch n]:(runList' k)
runList' (Free (Trunk k)) = [Pad trunkLhsCol, STrunk]:(runList' k)
runList' (Free (Done)) = []
runList' (Pure _) = []
tree :: TreeM ()
tree = do
forM_ [0, 2..40] $ \n -> do
branch n
trunk
trunk
trunk
trunk
main = do
let commands = runList tree
let lines = [map ioTree commandLine | commandLine <- commands]
forM_ lines $ \line -> do
sequence line
putStrLn ""
setSGR [Reset]
| imccoy/xmast | app/Main.hs | bsd-3-clause | 1,749 | 0 | 13 | 427 | 781 | 394 | 387 | 56 | 5 |
{-# Options_GHC -Wno-unused-do-bind #-}
{-# Language OverloadedStrings #-}
{-|
Module : Client.Network.Async
Description : Event-based network IO
Copyright : (c) Eric Mertens, 2016
License : ISC
Maintainer : emertens@gmail.com
This module creates network connections and thread to manage those connections.
Events on these connections will be written to a given event queue, and
outgoing messages are recieved on an incoming event queue.
These network connections are rate limited for outgoing messages per the
rate limiting algorithm given in the IRC RFC.
Incoming network event messages are assumed to be framed by newlines.
When a network connection terminates normally its final messages will be
'NetworkClose'. When it terminates abnormally its final message will be
'NetworkError'.
-}
module Client.Network.Async
( NetworkConnection
, NetworkEvent(..)
, createConnection
, Client.Network.Async.send
, Client.Network.Async.recv
, upgrade
-- * Abort connections
, abortConnection
, TerminationReason(..)
) where
import Client.Configuration.ServerSettings
import Client.Network.Connect
import Control.Concurrent
import Control.Concurrent.Async
import Control.Concurrent.STM
import Control.Exception
import Control.Lens
import Control.Monad
import Data.ByteString (ByteString)
import qualified Data.ByteString as B
import Data.Foldable
import Data.List
import Data.List.Split (chunksOf)
import Data.Text (Text)
import qualified Data.Text as Text
import Data.Time
import Data.Traversable
import Data.Word (Word8)
import Hookup
import Hookup.OpenSSL (getPubKeyDer)
import Irc.RateLimit
import Numeric (showHex)
import qualified OpenSSL.EVP.Digest as Digest
import OpenSSL.X509 (X509, printX509, writeDerX509)
-- | Handle for a network connection
data NetworkConnection = NetworkConnection
{ connOutQueue :: TQueue ByteString
, connInQueue :: TQueue NetworkEvent
, connAsync :: Async ()
, connUpgrade :: MVar (IO ())
}
-- | Signals that the server is ready to initiate the TLS handshake.
-- This is a no-op when not in a starttls state.
upgrade :: NetworkConnection -> IO ()
upgrade c = join (swapMVar (connUpgrade c) (pure ()))
-- | The sum of incoming events from a network connection. All events
-- are annotated with a network ID matching that given when the connection
-- was created as well as the time at which the message was recieved.
data NetworkEvent
-- | Event for successful connection to host (certificate lines)
= NetworkOpen !ZonedTime
-- | Event indicating TLS is in effect
| NetworkTLS [Text]
-- | Event for a new recieved line (newline removed)
| NetworkLine !ZonedTime !ByteString
-- | Report an error on network connection network connection failed
| NetworkError !ZonedTime !SomeException
-- | Final message indicating the network connection finished
| NetworkClose !ZonedTime
instance Show NetworkConnection where
showsPrec p _ = showParen (p > 10)
$ showString "NetworkConnection _"
-- | Exceptions used to kill connections manually.
data TerminationReason
= PingTimeout -- ^ sent when ping timer expires
| ForcedDisconnect -- ^ sent when client commands force disconnect
| StsUpgrade -- ^ sent when the client disconnects due to sts policy
| StartTLSFailed -- ^ STARTTLS was expected by server had an error
| BadCertFingerprint ByteString (Maybe ByteString)
| BadPubkeyFingerprint ByteString (Maybe ByteString)
deriving Show
instance Exception TerminationReason where
displayException PingTimeout = "connection killed due to ping timeout"
displayException ForcedDisconnect = "connection killed by client command"
displayException StsUpgrade = "connection killed by sts policy"
displayException StartTLSFailed = "connection killed due to failed STARTTLS"
displayException (BadCertFingerprint expect got) =
"Expected certificate fingerprint: " ++ formatDigest expect ++
"; got: " ++ maybe "none" formatDigest got
displayException (BadPubkeyFingerprint expect got) =
"Expected public key fingerprint: " ++ formatDigest expect ++
"; got: " ++ maybe "none" formatDigest got
-- | Schedule a message to be transmitted on the network connection.
-- These messages are sent unmodified. The message should contain a
-- newline terminator.
send :: NetworkConnection -> ByteString -> IO ()
send c msg = atomically (writeTQueue (connOutQueue c) msg)
recv :: NetworkConnection -> STM [NetworkEvent]
recv = flushTQueue . connInQueue
-- | Force the given connection to terminate.
abortConnection :: TerminationReason -> NetworkConnection -> IO ()
abortConnection reason c = cancelWith (connAsync c) reason
-- | Initiate a new network connection according to the given 'ServerSettings'.
-- All events on this connection will be added to the given queue. The resulting
-- 'NetworkConnection' value can be used for sending outgoing messages and for
-- early termination of the connection.
createConnection ::
Int {- ^ delay in seconds -} ->
ServerSettings ->
IO NetworkConnection
createConnection delay settings =
do outQueue <- newTQueueIO
inQueue <- newTQueueIO
upgradeMVar <- newEmptyMVar
supervisor <- async $
threadDelay (delay * 1000000) >>
withConnection settings
(startConnection settings inQueue outQueue upgradeMVar)
let recordFailure :: SomeException -> IO ()
recordFailure ex =
do now <- getZonedTime
atomically (writeTQueue inQueue (NetworkError now ex))
recordNormalExit :: IO ()
recordNormalExit =
do now <- getZonedTime
atomically (writeTQueue inQueue (NetworkClose now))
-- Having this reporting thread separate from the supervisor ensures
-- that canceling the supervisor with abortConnection doesn't interfere
-- with carefully reporting the outcome
forkIO $ do outcome <- waitCatch supervisor
case outcome of
Right{} -> recordNormalExit
Left e -> recordFailure e
return NetworkConnection
{ connOutQueue = outQueue
, connInQueue = inQueue
, connAsync = supervisor
, connUpgrade = upgradeMVar
}
startConnection ::
ServerSettings ->
TQueue NetworkEvent ->
TQueue ByteString ->
MVar (IO ()) ->
Connection ->
IO ()
startConnection settings inQueue outQueue upgradeMVar h =
do reportNetworkOpen inQueue
ready <- presend
when ready $
do checkFingerprints
race_ receiveMain sendMain
where
receiveMain = receiveLoop h inQueue
sendMain =
do rate <- newRateLimit (view ssFloodPenalty settings)
(view ssFloodThreshold settings)
sendLoop h outQueue rate
presend =
case view ssTls settings of
TlsNo -> True <$ putMVar upgradeMVar (pure ())
TlsYes ->
do txts <- describeCertificates h
putMVar upgradeMVar (pure ())
atomically (writeTQueue inQueue (NetworkTLS txts))
pure True
TlsStart ->
do Hookup.send h "STARTTLS\n"
r <- withAsync receiveMain $ \t ->
do putMVar upgradeMVar (cancel t)
waitCatch t
case r of
-- network connection closed
Right () -> pure False
-- pre-receiver was killed by a call to 'upgrade'
Left e | Just AsyncCancelled <- fromException e ->
do Hookup.upgradeTls (tlsParams settings) (view ssHostName settings) h
txts <- describeCertificates h
atomically (writeTQueue inQueue (NetworkTLS txts))
pure True
-- something else went wrong with network IO
Left e -> throwIO e
checkFingerprints =
case view ssTls settings of
TlsNo -> pure ()
_ ->
do for_ (view ssTlsCertFingerprint settings) (checkCertFingerprint h)
for_ (view ssTlsPubkeyFingerprint settings) (checkPubkeyFingerprint h)
checkCertFingerprint :: Connection -> Fingerprint -> IO ()
checkCertFingerprint h fp =
do (expect, got) <-
case fp of
FingerprintSha1 expect -> (,) expect <$> getPeerCertFingerprintSha1 h
FingerprintSha256 expect -> (,) expect <$> getPeerCertFingerprintSha256 h
FingerprintSha512 expect -> (,) expect <$> getPeerCertFingerprintSha512 h
unless (Just expect == got)
(throwIO (BadCertFingerprint expect got))
checkPubkeyFingerprint :: Connection -> Fingerprint -> IO ()
checkPubkeyFingerprint h fp =
do (expect, got) <-
case fp of
FingerprintSha1 expect -> (,) expect <$> getPeerPubkeyFingerprintSha1 h
FingerprintSha256 expect -> (,) expect <$> getPeerPubkeyFingerprintSha256 h
FingerprintSha512 expect -> (,) expect <$> getPeerPubkeyFingerprintSha512 h
unless (Just expect == got)
(throwIO (BadPubkeyFingerprint expect got))
reportNetworkOpen :: TQueue NetworkEvent -> IO ()
reportNetworkOpen inQueue =
do now <- getZonedTime
atomically (writeTQueue inQueue (NetworkOpen now))
describeCertificates :: Connection -> IO [Text]
describeCertificates h =
do mbServer <- getPeerCertificate h
mbClient <- getClientCertificate h
cTxts <- certText "Server" mbServer
sTxts <- certText "Client" mbClient
pure (reverse (cTxts ++ sTxts))
certText :: String -> Maybe X509 -> IO [Text]
certText label mbX509 =
case mbX509 of
Nothing -> pure []
Just x509 ->
do str <- printX509 x509
fps <- getFingerprints x509
pure $ map Text.pack
$ ('\^B' : label)
: map colorize (lines str ++ fps)
where
colorize x@(' ':_) = x
colorize xs = "\^C07" ++ xs
getFingerprints :: X509 -> IO [String]
getFingerprints x509 =
do certDer <- writeDerX509 x509
spkiDer <- getPubKeyDer x509
xss <- for ["sha1", "sha256", "sha512"] $ \alg ->
do mb <- Digest.getDigestByName alg
pure $ case mb of
Nothing -> []
Just d ->
("Certificate " ++ alg ++ " fingerprint:")
: fingerprintLines (Digest.digestLBS d certDer)
++ ("SPKI " ++ alg ++ " fingerprint:")
: fingerprintLines (Digest.digestBS d spkiDer)
pure (concat xss)
fingerprintLines :: ByteString -> [String]
fingerprintLines
= map (" "++)
. chunksOf (16*3)
. formatDigest
formatDigest :: ByteString -> String
formatDigest
= intercalate ":"
. map showByte
. B.unpack
showByte :: Word8 -> String
showByte x
| x < 0x10 = '0' : showHex x ""
| otherwise = showHex x ""
sendLoop :: Connection -> TQueue ByteString -> RateLimit -> IO a
sendLoop h outQueue rate =
forever $
do msg <- atomically (readTQueue outQueue)
tickRateLimit rate
Hookup.send h msg
ircMaxMessageLength :: Int
ircMaxMessageLength = 512
receiveLoop :: Connection -> TQueue NetworkEvent -> IO ()
receiveLoop h inQueue =
do mb <- recvLine h (4*ircMaxMessageLength)
for_ mb $ \msg ->
do unless (B.null msg) $ -- RFC says to ignore empty messages
do now <- getZonedTime
atomically $ writeTQueue inQueue
$ NetworkLine now msg
receiveLoop h inQueue
| glguy/irc-core | src/Client/Network/Async.hs | isc | 11,745 | 0 | 22 | 3,161 | 2,543 | 1,266 | 1,277 | 248 | 6 |
{-# OPTIONS_GHC -fno-warn-unused-imports -fno-warn-unused-binds #-}
{- |
Module : ./Scratch.hs
Description : ghci test file
Copyright : (c) C. Maeder, DFKI GmbH 2010
License : GPLv2 or higher, see LICENSE.txt
Maintainer : Christian.Maeder@dfki.de
Stability : experimental
Portability : non-portable (import Logic)
load after calling make ghci
-}
module Main where
import Syntax.AS_Library
import Static.AnalysisLibrary
import Static.GTheory
import Static.DevGraph
import Static.PrintDevGraph
import Static.History
import Static.ComputeTheory
import Driver.Options
import Driver.AnaLib
import qualified Common.OrderedMap as OMap
import qualified Common.Lib.MapSet as MapSet
import Common.GlobalAnnotations
import Common.AS_Annotation as Anno
import Common.Result
import Common.ResultT
import Common.LibName
import Common.Id as Id
import Common.ExtSign
import Common.Doc
import Common.DocUtils
import System.Environment
import Data.Graph.Inductive.Graph
import qualified Data.Map as Map
import qualified Data.Set as Set
import Data.Maybe
import Data.List
-- Logic things
import Logic.Coerce
import Logic.Logic
-- CASL things
import CASL.Logic_CASL
import CASL.AS_Basic_CASL
import CASL.Sign
import Comorphisms.LogicGraph
-- DG things :)
import Proofs.Global
import Proofs.EdgeUtils
import Proofs.StatusUtils
import Proofs.Automatic
import Static.SpecLoader
myHetcatsOpts :: HetcatsOpts
myHetcatsOpts = defaultHetcatsOpts { libdirs = ["../Hets-lib"] }
process :: FilePath -> IO (Maybe (LibName, LibEnv))
process = anaLib myHetcatsOpts
-- ln -s sample-ghci-script .ghci and call "make ghci"
-- sample code
getDevGraph :: FilePath -> IO DGraph
getDevGraph fname = do
res <- process fname
case res of
Nothing -> error "getDevGraph: process"
Just (ln, lenv) -> case Map.lookup ln lenv of
Nothing -> error "getDevGraph: lookup"
Just dg -> return dg
main :: IO ()
main = do
files <- getArgs
mapM_ process files
return ()
proceed :: FilePath -> ResultT IO (LibName, LibEnv)
proceed = proceed' myHetcatsOpts
-- Test functions for CASL signature
-- read in a CASL file and return the basic theory
getCASLSigSens :: String -- filename
-> String -- name of spec
-> IO (CASLSign, [(String, CASLFORMULA)])
getCASLSigSens fname sp = do
res <- getSigSens myHetcatsOpts CASL fname sp
let f z = (senAttr z, sentence z)
return (sigsensSignature res, map f $ sigsensNamedSentences res)
{- myTest for globDecomp(or more possiblely for removeContraryChanges
from Proofs/StatusUtils.hs -}
{- try to print the DGChanges list before and after executing
removeContraryChanges, in order to see what exactly is going on -}
myTest :: IO ()
myTest = do
res <- process "../Hets-lib/Basic/RelationsAndOrders.casl"
-- not ok with "RelationsAndOrders.casl " :(
case res of
Nothing -> error "myTest"
Just (ln, lenv) -> do
{-
(edges2, dgchanges2) <- myGlobal ln 2 lenv
print dgchanges
-- print the DGChanges before execusion
putStrLn $ "!!!!!The DGChanges before excecuting of " ++
"removeContraryChanges by the third excecuting of " ++
"GlobalDecomposition!!!!!"
print $ myPrintShow dgchanges2
putStrLn $ "!!!!!the DGChanges afterwards by the third excecuting"
++ " of GlobalDecomposition!!!!!"
print $ myPrintShow $
-- removeContraryChanges dgchanges2
print $ myPrintEdges edges2
-}
(edges3, dgchanges3) <- myGlobal ln 3 lenv
putStrLn $ "The global thm Edges before executing globDecomp for " ++
"the fourth time"
print $ myPrintEdges edges3
{-
putStrLn $ "!!!!!The DGChanges before excecuting of " ++
"removeContraryChanges by the fouth excecuting of " ++
"GlobalDecomposition!!!!!"
print $ myPrintShow dgchanges3
-}
putStrLn $ "!!!!!the DGChanges by the fouth excecuting of " ++
"GlobalDecomposition: !!!!!"
print $ myPrintDGChanges dgchanges3
print $ myPrintDGChanges $ removeContraryChanges dgchanges3
{-
print (removeContraryChanges dgchanges)
-- print after...
print $ countD $ removeContraryChanges dgchanges
dgchanges4<- myGlobal ln 4 lenv
putStrLn "aaa"
-}
myPrintEdges :: [LEdge DGLinkLab] -> [String]
myPrintEdges = map showLEdge
showDGChange :: DGChange -> String
showDGChange c = showDoc c ""
myPrintDGChanges :: [DGChange] -> [String]
myPrintDGChanges = map showDGChange
countD :: [DGChange] -> Int
countD = length . filter (isPrefixOf "delete edge" . showDGChange)
-- my simulated execusion of globDecomp
myGlobal :: LibName -> Int -> LibEnv -> IO ([LEdge DGLinkLab], [DGChange])
myGlobal ln n lenv = do
let newLenv = executeGlobalDecompByNTimes n ln lenv
-- try to do n times globDecomp
dgraph = lookupDGraph ln newLenv
globalThmEdges = filter (liftE isUnprovenGlobalThm) (labEdgesDG dgraph)
ngraph = foldl globDecompAux dgraph globalThmEdges
defEdgesToSource = myGoingIntoGTE dgraph globalThmEdges []
putStrLn "all the edges going into global Thm Edges"
print defEdgesToSource
return (globalThmEdges,
flatHistory $ snd $ splitHistory dgraph ngraph)
-- get the DGChanges by executing globDecomp
myGoingIntoGTE :: DGraph -> [LEdge DGLinkLab] -> [String] -> [String]
myGoingIntoGTE _ [] res = res
myGoingIntoGTE dgraph ((source, _ , _) : ys) res =
let defEdgesToSource = [e | e@(_, t, l) <- labEdgesDG dgraph,
isDefEdge (dgl_type l), t == source]
in myGoingIntoGTE dgraph ys $ res ++ myPrintEdges defEdgesToSource
-- execute globDecomp by n times :)
executeGlobalDecompByNTimes :: Int -> LibName -> LibEnv -> LibEnv
executeGlobalDecompByNTimes n ln lenv = case compare n 0 of
LT -> error "excecuteGlobalDecompByNTimes"
EQ -> lenv
GT -> executeGlobalDecompByNTimes (n - 1) ln $ globDecomp ln lenv
| spechub/Hets | Scratch.hs | gpl-2.0 | 6,098 | 0 | 13 | 1,388 | 1,167 | 620 | 547 | 109 | 3 |
test1 :: Bool -> Bool
test1 (-3) = True
test2 :: Bool -> Bool
test2 (-. 3.0) = True
| roberth/uu-helium | test/typeerrors/Examples/NewNegationPat.hs | gpl-3.0 | 87 | 1 | 7 | 22 | 46 | 24 | 22 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
module Database.Migrate where
import Control.Applicative
import Control.Monad (when)
import Control.Monad.Reader
import Control.Monad.Trans (lift, liftIO)
import Data.Maybe (fromJust, fromMaybe)
import Data.String (fromString)
import Database.PostgreSQL.Simple
import System.Posix.Env (getEnv)
-- Helpers for running snaplets
import Data.Map as M (empty)
import Snap.Snaplet (Handler, SnapletInit)
import Snap.Snaplet.Test (runHandler)
import Snap.Test (get)
data Env = Env { unConn :: Connection, unAppEnv :: String, unTable :: String }
type MigrateT m a = ReaderT Env m a
-- NOTE(dbp 2014-05-28): We're using environment variables to configure libpq.
getEnvSettings :: IO Env
getEnvSettings = do conn <- connectPostgreSQL ""
t <- fromMaybe "migrations" <$> getEnv "PGTABLE"
e <- fromMaybe "devel" <$> getEnv "MIGRATION_APPENV"
return (Env conn e t)
runMain :: MigrateT IO () -> IO ()
runMain act = do e <- getEnvSettings
runReaderT act e
runMainSnap :: SnapletInit b b -> MigrateT (Handler b b) () -> IO ()
runMainSnap app act = do e <- getEnvSettings
runH (runReaderT act e) (unAppEnv e)
where runH h env = do r <- runHandler (Just env) (get "" M.empty) h app
case r of
Left err -> error (show err)
Right _ -> return ()
-- NOTE(dbp 2014-05-27): Need to know when things were successful, to mark as migrated.
up :: (Functor m, MonadIO m) => m Bool -> MigrateT m ()
up migration =
do upMode <- liftIO $ (== Just "up") <$> getEnv "MIGRATION_MODE"
nm <- liftIO $ fromJust <$> getEnv "MIGRATION_NAME"
when upMode $
do s <- lift migration
(Env c appenv t) <- ask
if s then
do void $ liftIO $ execute c (fromString $ "INSERT INTO " ++
t ++ " (name) values (?)") (Only nm)
liftIO $ putStrLn $ "Applied migration in " ++ appenv ++ ": " ++ nm
else liftIO $ putStrLn $ "Applying migration in " ++ appenv ++ "failed: " ++ nm
down :: (Functor m, MonadIO m) => m Bool -> MigrateT m ()
down migration =
do downMode <- liftIO $ (== Just "down") <$> getEnv "MIGRATION_MODE"
nm <- liftIO $ fromJust <$> getEnv "MIGRATION_NAME"
when downMode $
do s <- lift migration
(Env c appenv t) <- ask
if s then
do void $ liftIO $ execute c (fromString $ "DELETE FROM " ++
t ++ " WHERE name = ?") (Only nm)
liftIO $ putStrLn $ "Reverted migration in " ++ appenv ++ ": " ++ nm
else liftIO $ putStrLn $ "Reverting migration in " ++ appenv ++ " failed: " ++ nm
upSql :: (Functor m, MonadIO m) => String -> MigrateT m ()
upSql sql = do upMode <- liftIO $ (== Just "up") <$> getEnv "MIGRATION_MODE"
nm <- liftIO $ fromJust <$> getEnv "MIGRATION_NAME"
when upMode $
do (Env c appenv t) <- ask
liftIO $ execute_ c (fromString sql)
void $ liftIO $ execute c (fromString $ "INSERT INTO " ++
t ++ " (name) values (?)") (Only nm)
liftIO $ putStrLn $ "Applied migration in " ++ appenv ++ ": " ++ nm
downSql :: (Functor m, MonadIO m) => String -> MigrateT m ()
downSql sql = do downMode <- liftIO $ (== Just "down") <$> getEnv "MIGRATION_MODE"
nm <- liftIO $ fromJust <$> getEnv "MIGRATION_NAME"
when downMode $
do (Env c appenv t) <- ask
liftIO $ execute_ c (fromString sql)
void $ liftIO $ execute c (fromString $ "DELETE FROM " ++
t ++ " WHERE name = ?") (Only nm)
liftIO $ putStrLn $ "Reverted migration in " ++ appenv ++ ": " ++ nm
| dbp/migrate | src/Database/Migrate.hs | gpl-3.0 | 4,234 | 0 | 18 | 1,587 | 1,258 | 622 | 636 | 73 | 2 |
{-# LANGUAGE OverloadedStrings #-}
module Tests.DataForms where
import Network.Xmpp.Xep.DataForms
import qualified Data.Text.Lazy as TL
import qualified Text.XML.Stream.Elements as Elements
import qualified Data.XML.Types as XML
import Data.XML.Pickle
exampleXML1 = TL.concat $
["<x xmlns='jabber:x:data' type='form'>"
,"<title>Bot Configuration</title>"
,"<instructions>Fill out this form to configure your new bot!</instructions>"
,"<field type='hidden'"
,"var='FORM_TYPE'>"
,"<value>jabber:bot</value>"
,"</field>"
,"<field type='fixed'><value>Section 1: Bot Info</value></field>"
,"<field type='text-single'"
,"label='The name of your bot'"
,"var='botname'/>"
,"<field type='text-multi'"
,"label='Helpful description of your bot'"
,"var='description'/>"
,"<field type='boolean'"
,"label='Public bot?'"
,"var='public'>"
,"<required/>"
,"</field>"
,"<field type='text-private'"
,"label='Password for special access'"
,"var='password'/>"
,"<field type='fixed'><value>Section 2: Features</value></field>"
,"<field type='list-multi'"
,"label='What features will the bot support?'"
,"var='features'>"
,"<option label='Contests'><value>contests</value></option>"
,"<option label='News'><value>news</value></option>"
,"<option label='Polls'><value>polls</value></option>"
,"<option label='Reminders'><value>reminders</value></option>"
,"<option label='Search'><value>search</value></option>"
,"<value>news</value>"
,"<value>search</value>"
,"</field>"
,"<field type='fixed'><value>Section 3: Subscriber List</value></field>"
,"<field type='list-single'"
,"label='Maximum number of subscribers'"
,"var='maxsubs'>"
,"<value>20</value>"
,"<option label='10'><value>10</value></option>"
,"<option label='20'><value>20</value></option>"
,"<option label='30'><value>30</value></option>"
,"<option label='50'><value>50</value></option>"
,"<option label='100'><value>100</value></option>"
,"<option label='None'><value>none</value></option>"
,"</field>"
,"<field type='fixed'><value>Section 4: Invitations</value></field>"
,"<field type='jid-multi'"
,"label='People to invite'"
,"var='invitelist'>"
,"<desc>Tell all your friends about your new bot!</desc>"
,"</field>"
,"</x>"]
exampleXml2 = TL.concat [
" <x xmlns='jabber:x:data' type='submit'>"
," <field type='hidden' var='FORM_TYPE'>"
," <value>jabber:bot</value>"
," </field>"
," <field type='text-single' var='botname'>"
," <value>The Jabber Google Bot</value>"
," </field>"
," <field type='text-multi' var='description'>"
," <value>This bot enables you to send requests to</value>"
," <value>Google and receive the search results right</value>"
," <value>in your Jabber client. It' really cool!</value>"
," <value>It even supports Google News!</value>"
," </field>"
," <field type='boolean' var='public'>"
," <value>0</value>"
," </field>"
," <field type='text-private' var='password'>"
," <value>v3r0na</value>"
," </field>"
," <field type='list-multi' var='features'>"
," <value>news</value>"
," <value>search</value>"
," </field>"
," <field type='list-single' var='maxsubs'>"
," <value>50</value>"
," </field>"
," <field type='jid-multi' var='invitelist'>"
," <value>juliet@capulet.com</value>"
," <value>benvolio@montague.net</value>"
," </field>"
," </x>"]
exampleXml3 = TL.concat [
" <x xmlns='jabber:x:data' type='result'>"
, " <field type='hidden' var='FORM_TYPE'>"
, " <value>jabber:bot</value>"
, " </field>"
, " <field type='text-single' var='botname'>"
, " <value>The Jabber Google Bot</value>"
, " </field>"
, " <field type='boolean' var='public'>"
, " <value>0</value>"
, " </field>"
, " <field type='text-private' var='password'>"
, " <value>v3r0na</value>"
, " </field>"
, " <field type='list-multi' var='features'>"
, " <value>news</value>"
, " <value>search</value>"
, " </field>"
, " <field type='list-single' var='maxsubs'>"
, " <value>50</value>"
, " </field>"
, " <field type='jid-multi' var='invitelist'>"
, " <value>juliet@capulet.com</value>"
, " <value>benvolio@montague.net</value>"
, " </field>"
, " </x>"]
parseForm = unpickleTree (xpRoot xpForm) . XML.NodeElement . Elements.parseElement
| Philonous/pontarius-xmpp | tests/DataForms.hs | bsd-3-clause | 5,207 | 0 | 9 | 1,511 | 432 | 283 | 149 | 120 | 1 |
-- | A renderer that produces pretty HTML, mostly meant for debugging purposes.
--
module Text.Blaze.Renderer.Pretty
( renderMarkup
, renderHtml
) where
import Text.Blaze.Internal
import Text.Blaze.Renderer.String (fromChoiceString)
-- | Render some 'Markup' to an appending 'String'.
--
renderString :: Markup -- ^ Markup to render
-> String -- ^ String to append
-> String -- ^ Resulting String
renderString = go 0 id
where
go :: Int -> (String -> String) -> MarkupM b -> String -> String
go i attrs (Parent _ open close content) =
ind i . getString open . attrs . (">\n" ++) . go (inc i) id content
. ind i . getString close . ('\n' :)
go i attrs (CustomParent tag content) =
ind i . ('<' :) . fromChoiceString tag . attrs . (">\n" ++) .
go (inc i) id content . ind i . ("</" ++) . fromChoiceString tag .
(">\n" ++)
go i attrs (Leaf _ begin end) =
ind i . getString begin . attrs . getString end . ('\n' :)
go i attrs (CustomLeaf tag close) =
ind i . ('<' :) . fromChoiceString tag . attrs .
((if close then " />\n" else ">\n") ++)
go i attrs (AddAttribute _ key value h) = flip (go i) h $
getString key . fromChoiceString value . ('"' :) . attrs
go i attrs (AddCustomAttribute key value h) = flip (go i) h $
(' ' : ) . fromChoiceString key . ("=\"" ++) . fromChoiceString value .
('"' :) . attrs
go i _ (Content content) = ind i . fromChoiceString content . ('\n' :)
go i _ (Comment comment) = ind i .
("<!-- " ++) . fromChoiceString comment . (" -->\n" ++)
go i attrs (Append h1 h2) = go i attrs h1 . go i attrs h2
go _ _ Empty = id
{-# NOINLINE go #-}
-- Increase the indentation
inc = (+) 4
-- Produce appending indentation
ind i = (replicate i ' ' ++)
{-# INLINE renderString #-}
-- | Render markup to a lazy 'String'. The result is prettified.
--
renderMarkup :: Markup -> String
renderMarkup html = renderString html ""
{-# INLINE renderMarkup #-}
renderHtml :: Markup -> String
renderHtml = renderMarkup
{-# INLINE renderHtml #-}
{-# DEPRECATED renderHtml
"Use renderHtml from Text.Blaze.Html.Renderer.Pretty instead" #-}
| FranklinChen/blaze-markup | src/Text/Blaze/Renderer/Pretty.hs | bsd-3-clause | 2,254 | 0 | 16 | 605 | 729 | 386 | 343 | 43 | 11 |
module Settings.Builders.Configure (configureBuilderArgs) where
import Rules.Gmp
import Rules.Libffi
import Settings.Builders.Common
configureBuilderArgs :: Args
configureBuilderArgs = do
gmpPath <- expr gmpBuildPath
libffiPath <- expr libffiBuildPath
mconcat [ builder (Configure gmpPath) ? do
hostPlatform <- getSetting HostPlatform
buildPlatform <- getSetting BuildPlatform
pure [ "--enable-shared=no"
, "--host=" ++ hostPlatform
, "--build=" ++ buildPlatform ]
, builder (Configure libffiPath) ? do
top <- expr topDirectory
targetPlatform <- getSetting TargetPlatform
pure [ "--prefix=" ++ top -/- libffiPath -/- "inst"
, "--libdir=" ++ top -/- libffiPath -/- "inst/lib"
, "--enable-static=yes"
, "--enable-shared=no" -- TODO: add support for yes
, "--host=" ++ targetPlatform ] ]
| izgzhen/hadrian | src/Settings/Builders/Configure.hs | mit | 1,049 | 0 | 16 | 362 | 209 | 105 | 104 | 22 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module FuncTorrent.Tracker
(TrackerResponse(..),
tracker,
mkArgs,
mkTrackerResponse,
urlEncodeHash
) where
import Prelude hiding (lookup, splitAt)
import Data.ByteString (ByteString)
import Data.ByteString.Char8 as BC (pack, unpack, splitAt)
import Data.Char (chr)
import Data.List (intercalate)
import Data.Map as M (lookup)
import Network.HTTP.Base (urlEncode)
import qualified Data.ByteString.Base16 as B16 (encode)
import FuncTorrent.Bencode (BVal(..))
import FuncTorrent.Metainfo (Info(..), Metainfo(..))
import FuncTorrent.Network (get)
import FuncTorrent.Peer (Peer(..))
import FuncTorrent.Utils (splitN)
-- | Tracker response
data TrackerResponse = TrackerResponse {
interval :: Maybe Integer
, peers :: [Peer]
, complete :: Maybe Integer
, incomplete :: Maybe Integer
} deriving (Show, Eq)
-- | Deserialize tracker response
mkTrackerResponse :: BVal -> Either ByteString TrackerResponse
mkTrackerResponse resp =
case lookup "failure reason" body of
Just (Bstr err) -> Left err
Just _ -> Left "Unknown failure"
Nothing ->
let (Just (Bint i)) = lookup "interval" body
(Just (Bstr peersBS)) = lookup "peers" body
pl = map makePeer (splitN 6 peersBS)
in Right TrackerResponse {
interval = Just i
, peers = pl
, complete = Nothing
, incomplete = Nothing
}
where
(Bdict body) = resp
toInt :: String -> Integer
toInt = read
toPort :: ByteString -> Integer
toPort = read . ("0x" ++) . unpack . B16.encode
toIP :: ByteString -> String
toIP = Data.List.intercalate "." .
map (show . toInt . ("0x" ++) . unpack) .
splitN 2 . B16.encode
makePeer :: ByteString -> Peer
makePeer peer = Peer "" (toIP ip') (toPort port')
where (ip', port') = splitAt 4 peer
-- | Connect to a tracker and get peer info
tracker :: Metainfo -> String -> IO ByteString
tracker m peer_id = get (head . announceList $ m) $ mkArgs m peer_id
--- | URL encode hash as per RFC1738
--- TODO: Add tests
--- REVIEW: Why is this not written in terms of `Network.HTTP.Base.urlEncode` or
--- equivalent library function?
urlEncodeHash :: ByteString -> String
urlEncodeHash bs = concatMap (encode' . unpack) (splitN 2 bs)
where encode' b@[c1, c2] = let c = chr (read ("0x" ++ b))
in escape c c1 c2
encode' _ = ""
escape i c1 c2 | i `elem` nonSpecialChars = [i]
| otherwise = "%" ++ [c1] ++ [c2]
nonSpecialChars = ['A'..'Z'] ++ ['a'..'z'] ++ ['0'..'9'] ++ "-_.~"
-- | Make arguments that should be posted to tracker.
-- This is a separate pure function for testability.
mkArgs :: Metainfo -> String -> [(String, ByteString)]
mkArgs m peer_id = [("info_hash", pack . urlEncodeHash . B16.encode . infoHash $ m),
("peer_id", pack . urlEncode $ peer_id),
("port", "6881"),
("uploaded", "0"),
("downloaded", "0"),
("left", pack . show . lengthInBytes $ info m),
("compact", "1"),
("event", "started")]
| harshavardhana/functorrent | src/FuncTorrent/Tracker.hs | gpl-3.0 | 3,327 | 0 | 15 | 972 | 977 | 547 | 430 | 71 | 3 |
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="da-DK">
<title>Revisit | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | 0xkasun/security-tools | src/org/zaproxy/zap/extension/revisit/resources/help_da_DK/helpset_da_DK.hs | apache-2.0 | 969 | 80 | 66 | 159 | 413 | 209 | 204 | -1 | -1 |
listPrinters =
[(''[]
,\(typeVariable:_) _automaticPrinter ->
(let presentVar = varE (presentVarName typeVariable)
in lamE [varP (presentVarName typeVariable)]
[|(let typeString = "[" ++ fst $(presentVar) ++ "]"
in (typeString
,\xs ->
case fst $(presentVar) of
"GHC.Types.Char" ->
ChoicePresentation
"String"
[("String"
,StringPresentation "String"
(concatMap getCh (map (snd $(presentVar)) xs)))
,("List of characters"
,ListPresentation typeString
(map (snd $(presentVar)) xs))]
where getCh (CharPresentation "GHC.Types.Char" ch) =
ch
getCh (ChoicePresentation _ ((_,CharPresentation _ ch):_)) =
ch
getCh _ = ""
_ ->
ListPresentation typeString
(map (snd $(presentVar)) xs)))|]))]
| lunaris/hindent | benchmarks/listprinters.hs | bsd-3-clause | 1,351 | 0 | 15 | 752 | 76 | 41 | 35 | -1 | -1 |
-- |
-- Module : $Header$
-- Copyright : (c) 2013-2015 Galois, Inc.
-- License : BSD3
-- Maintainer : cryptol@galois.com
-- Stability : provisional
-- Portability : portable
--
-- This module defines the scoping rules for value- and type-level
-- names in Cryptol.
module Cryptol.Parser.Names where
import Cryptol.Parser.AST
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Foldable (fold)
modExports :: Module -> ExportSpec
modExports m = fold (concat [ exportedNames d | d <- mDecls m ])
where
names by td = [ td { tlValue = thing n } | n <- fst (by (tlValue td)) ]
exportedNames (Decl td) = map exportBind (names namesD td)
++ map exportType (names tnamesD td)
exportedNames (TDNewtype nt) = map exportType (names tnamesNT nt)
exportedNames (Include {}) = []
-- | The names defined by a newtype.
tnamesNT :: Newtype -> ([Located QName], ())
tnamesNT x = ([ nName x ], ())
-- | The names defined and used by a group of mutually recursive declarations.
namesDs :: [Decl] -> ([Located QName], Set QName)
namesDs ds = (defs, boundNames defs (Set.unions frees))
where
defs = concat defss
(defss,frees) = unzip (map namesD ds)
-- | The names defined and used by a single declarations.
namesD :: Decl -> ([Located QName], Set QName)
namesD decl =
case decl of
DBind b -> namesB b
DPatBind p e -> (namesP p, namesE e)
DSignature {} -> ([],Set.empty)
DFixity{} -> ([],Set.empty)
DPragma {} -> ([],Set.empty)
DType {} -> ([],Set.empty)
DLocated d _ -> namesD d
-- | The names defined and used by a single declarations in such a way
-- that they cannot be duplicated in a file. For example, it is fine
-- to use @x@ on the RHS of two bindings, but not on the LHS of two
-- type signatures.
allNamesD :: Decl -> [Located QName]
allNamesD decl =
case decl of
DBind b -> fst (namesB b)
DPatBind p _ -> namesP p
DSignature ns _ -> ns
DFixity _ ns -> ns
DPragma ns _ -> ns
DType ts -> [tsName ts]
DLocated d _ -> allNamesD d
tsName :: TySyn -> Located QName
tsName (TySyn lqn _ _) = lqn
-- | The names defined and used by a single binding.
namesB :: Bind -> ([Located QName], Set QName)
namesB b = ([bName b], boundNames (namesPs (bParams b)) (namesDef (thing (bDef b))))
namesDef :: BindDef -> Set QName
namesDef DPrim = Set.empty
namesDef (DExpr e) = namesE e
-- | The names used by an expression.
namesE :: Expr -> Set QName
namesE expr =
case expr of
EVar x -> Set.singleton x
ELit _ -> Set.empty
ETuple es -> Set.unions (map namesE es)
ERecord fs -> Set.unions (map (namesE . value) fs)
ESel e _ -> namesE e
EList es -> Set.unions (map namesE es)
EFromTo _ _ _ -> Set.empty
EInfFrom e e' -> Set.union (namesE e) (maybe Set.empty namesE e')
EComp e arms -> let (dss,uss) = unzip (map namesArm arms)
in Set.union (boundNames (concat dss) (namesE e))
(Set.unions uss)
EApp e1 e2 -> Set.union (namesE e1) (namesE e2)
EAppT e _ -> namesE e
EIf e1 e2 e3 -> Set.union (namesE e1) (Set.union (namesE e2) (namesE e3))
EWhere e ds -> let (bs,xs) = namesDs ds
in Set.union (boundNames bs (namesE e)) xs
ETyped e _ -> namesE e
ETypeVal _ -> Set.empty
EFun ps e -> boundNames (namesPs ps) (namesE e)
ELocated e _ -> namesE e
EParens e -> namesE e
EInfix a o _ b-> Set.insert (thing o) (Set.union (namesE a) (namesE b))
-- | The names defined by a group of patterns.
namesPs :: [Pattern] -> [Located QName]
namesPs = concatMap namesP
-- | The names defined by a pattern. These will always be unqualified names.
namesP :: Pattern -> [Located QName]
namesP pat =
case pat of
PVar x -> [fmap mkUnqual x]
PWild -> []
PTuple ps -> namesPs ps
PRecord fs -> namesPs (map value fs)
PList ps -> namesPs ps
PTyped p _ -> namesP p
PSplit p1 p2 -> namesPs [p1,p2]
PLocated p _ -> namesP p
-- | The names defined and used by a match.
namesM :: Match -> ([Located QName], Set QName)
namesM (Match p e) = (namesP p, namesE e)
namesM (MatchLet b) = namesB b
-- | The names defined and used by an arm of alist comprehension.
namesArm :: [Match] -> ([Located QName], Set QName)
namesArm = foldr combine ([],Set.empty) . map namesM
where combine (ds1,fs1) (ds2,fs2) =
( filter ((`notElem` map thing ds2) . thing) ds1 ++ ds2
, Set.union fs1 (boundNames ds1 fs2)
)
-- | Remove some defined variables from a set of free variables.
boundNames :: [Located QName] -> Set QName -> Set QName
boundNames bs xs = Set.difference xs (Set.fromList (map thing bs))
-- | Given the set of type variables that are in scope,
-- compute the type synonyms used by a type.
namesT :: Set QName -> Type -> Set QName
namesT vs = go
where
go ty =
case ty of
TWild -> Set.empty
TFun t1 t2 -> Set.union (go t1) (go t2)
TSeq t1 t2 -> Set.union (go t1) (go t2)
TBit -> Set.empty
TNum _ -> Set.empty
TChar _ -> Set.empty
TInf -> Set.empty
TApp _ ts -> Set.unions (map go ts)
TTuple ts -> Set.unions (map go ts)
TRecord fs -> Set.unions (map (go . value) fs)
TLocated t _ -> go t
TUser x [] | x `Set.member` vs
-> Set.empty
TUser x ts -> Set.insert x (Set.unions (map go ts))
TParens t -> namesT vs t
TInfix a _ _ b-> Set.union (namesT vs a) (namesT vs b)
-- | The type names defined and used by a group of mutually recursive declarations.
tnamesDs :: [Decl] -> ([Located QName], Set QName)
tnamesDs ds = (defs, boundNames defs (Set.unions frees))
where
defs = concat defss
(defss,frees) = unzip (map tnamesD ds)
-- | The type names defined and used by a single declaration.
tnamesD :: Decl -> ([Located QName], Set QName)
tnamesD decl =
case decl of
DSignature _ s -> ([], tnamesS s)
DFixity {} -> ([], Set.empty)
DPragma {} -> ([], Set.empty)
DBind b -> ([], tnamesB b)
DPatBind _ e -> ([], tnamesE e)
DLocated d _ -> tnamesD d
DType (TySyn n ps t) -> ([n], Set.difference (tnamesT t) (Set.fromList (map tpQName ps)))
-- | The type names used by a single binding.
tnamesB :: Bind -> Set QName
tnamesB b = Set.unions [setS, setP, setE]
where
setS = maybe Set.empty tnamesS (bSignature b)
setP = Set.unions (map tnamesP (bParams b))
setE = tnamesDef (thing (bDef b))
tnamesDef :: BindDef -> Set QName
tnamesDef DPrim = Set.empty
tnamesDef (DExpr e) = tnamesE e
-- | The type names used by an expression.
tnamesE :: Expr -> Set QName
tnamesE expr =
case expr of
EVar _ -> Set.empty
ELit _ -> Set.empty
ETuple es -> Set.unions (map tnamesE es)
ERecord fs -> Set.unions (map (tnamesE . value) fs)
ESel e _ -> tnamesE e
EList es -> Set.unions (map tnamesE es)
EFromTo a b c -> Set.union (tnamesT a)
(Set.union (maybe Set.empty tnamesT b) (maybe Set.empty tnamesT c))
EInfFrom e e' -> Set.union (tnamesE e) (maybe Set.empty tnamesE e')
EComp e mss -> Set.union (tnamesE e) (Set.unions (map tnamesM (concat mss)))
EApp e1 e2 -> Set.union (tnamesE e1) (tnamesE e2)
EAppT e fs -> Set.union (tnamesE e) (Set.unions (map tnamesTI fs))
EIf e1 e2 e3 -> Set.union (tnamesE e1) (Set.union (tnamesE e2) (tnamesE e3))
EWhere e ds -> let (bs,xs) = tnamesDs ds
in Set.union (boundNames bs (tnamesE e)) xs
ETyped e t -> Set.union (tnamesE e) (tnamesT t)
ETypeVal t -> tnamesT t
EFun ps e -> Set.union (Set.unions (map tnamesP ps)) (tnamesE e)
ELocated e _ -> tnamesE e
EParens e -> tnamesE e
EInfix a _ _ b-> Set.union (tnamesE a) (tnamesE b)
tnamesTI :: TypeInst -> Set QName
tnamesTI (NamedInst f) = tnamesT (value f)
tnamesTI (PosInst t) = tnamesT t
-- | The type names used by a pattern.
tnamesP :: Pattern -> Set QName
tnamesP pat =
case pat of
PVar _ -> Set.empty
PWild -> Set.empty
PTuple ps -> Set.unions (map tnamesP ps)
PRecord fs -> Set.unions (map (tnamesP . value) fs)
PList ps -> Set.unions (map tnamesP ps)
PTyped p t -> Set.union (tnamesP p) (tnamesT t)
PSplit p1 p2 -> Set.union (tnamesP p1) (tnamesP p2)
PLocated p _ -> tnamesP p
-- | The type names used by a match.
tnamesM :: Match -> Set QName
tnamesM (Match p e) = Set.union (tnamesP p) (tnamesE e)
tnamesM (MatchLet b) = tnamesB b
-- | The type names used by a type schema.
tnamesS :: Schema -> Set QName
tnamesS (Forall params props ty _) =
Set.difference (Set.union (Set.unions (map tnamesC props)) (tnamesT ty))
(Set.fromList (map tpQName params))
-- | The type names used by a prop.
tnamesC :: Prop -> Set QName
tnamesC prop =
case prop of
CFin t -> tnamesT t
CEqual t1 t2 -> Set.union (tnamesT t1) (tnamesT t2)
CGeq t1 t2 -> Set.union (tnamesT t1) (tnamesT t2)
CArith t -> tnamesT t
CCmp t -> tnamesT t
CLocated p _ -> tnamesC p
CType t -> tnamesT t
-- | Compute the type synonyms/type variables used by a type.
tnamesT :: Type -> Set QName
tnamesT ty =
case ty of
TWild -> Set.empty
TFun t1 t2 -> Set.union (tnamesT t1) (tnamesT t2)
TSeq t1 t2 -> Set.union (tnamesT t1) (tnamesT t2)
TBit -> Set.empty
TNum _ -> Set.empty
TChar __ -> Set.empty
TInf -> Set.empty
TApp _ ts -> Set.unions (map tnamesT ts)
TTuple ts -> Set.unions (map tnamesT ts)
TRecord fs -> Set.unions (map (tnamesT . value) fs)
TLocated t _ -> tnamesT t
TUser x ts -> Set.insert x (Set.unions (map tnamesT ts))
TParens t -> tnamesT t
TInfix a _ _ c-> Set.union (tnamesT a) (tnamesT c)
| iblumenfeld/cryptol | src/Cryptol/Parser/Names.hs | bsd-3-clause | 10,145 | 0 | 14 | 2,941 | 3,995 | 1,960 | 2,035 | 207 | 19 |
{- This test checks that specialisations can apply inside
wrappers. In particular, the wrapper for 'foo' should
look like
Tmpl= \ (n_aal [Occ=Once!] :: GHC.Types.Int) ->
case n_aal of _ { GHC.Types.I# ipv_smZ [Occ=Once] ->
case Roman.foo_$s$wgo ipv_smZ 6 of ww_spp { __DEFAULT ->
GHC.Types.I# ww_spp
}
}}]
Note the $s$wgo. That in turn allows $wgo to be dead code.
-}
module Roman where
foo :: Int -> Int
foo n = n `seq` go (Just n) (Just (6::Int))
where
go u (Just x)
= x `seq`
case u of
Nothing -> go (Just 10) (Just m)
Just n
| n <= 0 -> 0
| n < 100 -> go (Just (n-2)) (Just x)
| n < 500 -> go (Just (n-3)) (Just m)
| otherwise -> go (Just (n-1)) (Just (m+m))
where
m = x+x+x+x+x+x+x
| ezyang/ghc | testsuite/tests/simplCore/should_compile/spec-inline.hs | bsd-3-clause | 924 | 0 | 16 | 374 | 266 | 135 | 131 | 13 | 2 |
module PackageTests.PathsModule.Executable.Check (suite) where
import PackageTests.PackageTester
(PackageSpec(..), SuiteConfig, assertBuildSucceeded, cabal_build)
import System.FilePath
import Test.Tasty.HUnit
suite :: SuiteConfig -> Assertion
suite config = do
let spec = PackageSpec
{ directory = "PackageTests" </> "PathsModule" </> "Executable"
, distPref = Nothing
, configOpts = []
}
result <- cabal_build config spec
assertBuildSucceeded result
| trskop/cabal | Cabal/tests/PackageTests/PathsModule/Executable/Check.hs | bsd-3-clause | 518 | 0 | 13 | 117 | 121 | 69 | 52 | 13 | 1 |
import Break020b
line1 _ = return ()
line2 _ = return ()
in_another_decl _ = do line1 0
line2 0
main = do
line1 0
line2 0
in_another_decl 0
in_another_module 0
line2 1
return () | wxwxwwxxx/ghc | testsuite/tests/ghci.debugger/scripts/break020.hs | bsd-3-clause | 215 | 0 | 8 | 72 | 93 | 38 | 55 | 12 | 1 |
{-# LANGUAGE TupleSections, OverloadedStrings, ScopedTypeVariables #-}
module Handler.Swag
( getSwagR
, postSwagBuyR
) where
import Import
import Utils
import Handler.SwagForms
getSwagR :: Handler Html
getSwagR = do
(LdapUser _ _ _ _ fina) <- getLdapUser
swags <- map entityVal `fmap` (runDB $ selectList [] [Asc SwagCost, Asc SwagName])
forms <- mapM (\_ -> generateFormPost swagBuyForm) swags
let swagsandforms = zip swags forms
defaultLayout $ do
setTitle "CSH Swag"
$(widgetFile "swag")
postSwagBuyR :: Int -> Handler Html
postSwagBuyR sidi = do
let sid = fromIntegral sidi
user <- getUser `fmap` waiRequest
(LdapUser cn mail act onfl _) <- getLdapUser
((result, widget), enctype) <- runFormPost $ swagBuyForm
conf <- getExtra
case result of
FormSuccess (SwagBuy num) -> do
msg <- runDB $ do
swag <- get $ SwagKey sid
lastsale <- (map entityVal . (take 1)) `fmap` selectList [] [Desc SaleSuid]
let next_suid = case lastsale of
[(Sale _ suid _ _ _ _)] -> suid + 1
_ -> 0
case swag of
Just (Swag _ n _ _ _ c a) -> do
if a - num >= 0 && num > 0
then do
let sale = Sale sid
next_suid
user
num
(c * (fromIntegral num))
False
_ <- insert sale
update (SwagKey sid) [SwagAmount =. (a - num)]
let to = pack $ "financial@csh.rit.edu"
subject = pack $ "Order placed by " ++ (unpack cn) ++ " for " ++ (show num) ++ " of " ++ (unpack n)
message = pack $ (unpack cn) ++ " should be reachable at " ++ (unpack mail) ++ " Active: " ++ (show act) ++ " On Floor: " ++ (show onfl)
liftBase $ sendEmail
(smtpserver conf)
(emailfrom conf)
(emailuser conf)
(emailpassword conf)
to
subject
message
return "Order successful"
else return $ "We only have " ++ (show a) ++ " of those in stock. You asked for " ++ (show num) ++ "."
Nothing -> return "You seem to have requested nonexistent swag. Please try again, or email financial@csh.rit.edu with what you want."
setMessage $ toHtml msg
redirect SwagR
_ -> defaultLayout
[whamlet|
<p>There was a problem with your input. Please try again.
<form method=post action=@{SwagBuyR sidi} enctype=#{enctype}>
^{widget}
<input type="submit" value="Buy">
|]
| dgonyeo/lambdollars | Handler/Swag.hs | mit | 3,419 | 0 | 35 | 1,707 | 768 | 377 | 391 | -1 | -1 |
module Util.Vector2 where
import Text.Printf
data Vector2 = Vector2 Float Float
fromAngle :: Float -> Vector2
fromAngle a = Vector2 (cos a) (sin a)
vadd :: Vector2 -> Vector2 -> Vector2
vadd (Vector2 x1 y1) (Vector2 x2 y2) = Vector2 (x1 + x2) (y1 + y2)
vsub :: Vector2 -> Vector2 -> Vector2
vsub (Vector2 x1 y1) (Vector2 x2 y2) = Vector2 (x1 - x2) (y1 - y2)
vneg :: Vector2 -> Vector2
vneg (Vector2 x y) = Vector2 (-x) (-y)
vlength :: Vector2 -> Float
vlength (Vector2 x y) = sqrt (x*x + y*y)
vscale :: Vector2 -> Float -> Vector2
vscale (Vector2 x y) s = Vector2 (x * s) (y * s)
vunit :: Vector2 -> Vector2
vunit v = vscale v $ 1 / vlength v
vdot :: Vector2 -> Vector2 -> Float
vdot (Vector2 x1 y1) (Vector2 x2 y2) = x1 * x2 + y1 * y2
vscalar :: Vector2 -> Vector2 -> Float
vscalar (Vector2 x1 y1) (Vector2 x2 y2) = (x1 * y2) - (x2 * y1)
instance PrintfArg Vector2 where
formatArg (Vector2 x y) = formatArg ("(" ++ show x ++ ", " ++ show y ++ ")")
| kaisellgren/ankka | src/Util/Vector2.hs | mit | 965 | 0 | 12 | 212 | 496 | 255 | 241 | 23 | 1 |
{-| Implementation of peer discovery using using Kademlia Distributed Hash Table.
For more details regarding DHT see this package on hackage:
<https://hackage.haskell.org/package/kademlia>
-}
module Pos.Infra.DHT.Real
( module Pos.Infra.DHT.Real.CLI
, module Pos.Infra.DHT.Real.Param
, module Pos.Infra.DHT.Real.Real
, module Pos.Infra.DHT.Real.Types
) where
import Pos.Infra.DHT.Real.CLI
import Pos.Infra.DHT.Real.Param
import Pos.Infra.DHT.Real.Real
import Pos.Infra.DHT.Real.Types
| input-output-hk/pos-haskell-prototype | infra/src/Pos/Infra/DHT/Real.hs | mit | 570 | 0 | 5 | 133 | 79 | 60 | 19 | 9 | 0 |
{-|
- Module : StermsParser
- Description : S-term parser
- Copyright : (c) Maciej Bendkowski
-
- Maintainer : maciej.bendkowski@gmail.com
- Stability : experimental
-}
module StermsParser where
import ParserUtils
import Sterms
-- S-term grammar
-- <combinator> := "S"
-- <subterm> := <combinator> | "(" <term> ")"
-- <term> := <subterm>+
-- | A primitive S-term combinator parser.
combinatorParser :: Parser Term
combinatorParser = do
_ <- symb "S"
return S
-- | An S-term subterm parser.
subtermParser :: Parser Term
subtermParser = termParser' `dmplus` combinatorParser where
termParser' :: Parser Term
termParser' = do
_ <- symb "("
t <- termParser
_ <- symb ")"
return t
-- | An S-term term parser.
termParser :: Parser Term
termParser = subtermsParser where
subtermsParser :: Parser Term
subtermsParser = do
ts <- pstar subtermParser
return $ apply' (head ts) (tail ts) where
apply' :: Term -> [Term] -> Term
apply' t [] = t
apply' t (t':[]) = App t t'
apply' t (t':ts) = apply' (App t t') ts
-- | Attempts to parse an S-term term
-- from the given string.
parseSterm :: String -> Maybe Term
parseSterm s = case apply termParser s of
(x:_) -> if null (snd x) then
Just $ fst x else Nothing
_ -> Nothing
| maciej-bendkowski/blaz | src/StermsParser.hs | mit | 1,550 | 0 | 12 | 553 | 334 | 171 | 163 | 30 | 3 |
{-# LANGUAGE TupleSections #-}
module Hashiwokakero where
import Control.Applicative
import Data.Attoparsec.ByteString
import Data.Attoparsec.ByteString.Char8
import Data.Char (ord)
import Data.SBV
import Control.Monad.Writer
import Data.Map (Map, (!))
import Data.List (transpose, findIndices)
import Data.Maybe (fromJust, catMaybes, isJust)
import Util
-- Hashiwokakero
-- Some cells have numbers. You draw "bridges" or edges horizontally and vertically
-- between numbered cells.
-- At most 2 bridges between any two cells.
-- The number is the number of edges on that cell.
-- Bridges cannot cross
type HashiwokakeroInst = [[Maybe Integer]]
type Edge = ((Int, Int), (Int, Int))
toVarName :: Edge -> String
toVarName ((x1, y1), (x2, y2)) = show x1 ++ "-" ++ show y1 ++ "-" ++ show x2 ++ "-" ++ show y2
allPairs :: [a] -> [(a,a)]
allPairs [] = []
allPairs (x:xs) = map (x,) xs ++ allPairs xs
pairIntersects :: Edge -> Edge -> Bool
pairIntersects ((x1, y1), (x2, y2)) ((x3, y3), (x4, y4)) =
if x1 == x2 && y3 == y4 then
inBetween x3 x1 x4 && inBetween y1 y3 y2
else if y1 == y2 && x3 == x4 then
inBetween y3 y1 y4 && inBetween x1 x3 x2
else
False
where
inBetween a b c = (a < b && b < c) || (c < b && b < a)
getEdges :: HashiwokakeroInst -> [Edge]
getEdges inst =
getHorizontalEdges inst ++ map (\((r,c), (r',c')) -> ((c,r), (c',r'))) (getHorizontalEdges (transpose inst))
where
getHorizontalEdges inst = concat $ map (\(r, row) ->
map (\(c1, c2) -> ((r, c1), (r, c2))) (consecutivePairs (getIndices row))
) (zip [0..] inst)
consecutivePairs [] = []
consecutivePairs (x : xs) = zip (x : xs) xs
getIndices row = findIndices isJust row
rules :: HashiwokakeroInst -> Symbolic SBool
rules inst = do
let width = length (head inst)
let height = length inst
let edges = getEdges inst
edgesWithVars <- forM edges $ \edge -> do
var <- symbolic (toVarName edge)
return (edge, var)
addConstraints $ do
forM_ edgesWithVars $ \(_, var) -> do
addConstraint $ var .>= 0 &&& var .<= 2
forM_ (filter (\((e1, _), (e2, _)) -> pairIntersects e1 e2) (allPairs edgesWithVars)) $ \((_, v1), (_, v2)) ->
addConstraint $ v1 .== 0 ||| v2 .== 0
forM_ [0..height-1] $ \r ->
forM_ [0..width-1] $ \c ->
case (inst !! r) !! c
of Nothing -> return ()
Just i -> addConstraint $ literal i .== sum (map snd $ filter (\((p1, p2), _) -> p1 == (r,c) || p2 == (r,c)) edgesWithVars)
getSolution :: HashiwokakeroInst -> Map String CW -> String
getSolution inst m =
let
width = length (head inst)
height = length inst
edges = getEdges inst
getHoriz = getHorizVert (\(r1, c1) -> \(r2, c2) -> \(r3, c3) -> r1 == r2 && r2 == r3 && ((c1 < c2 && c2 < c3) || (c3 < c2 && c2 < c1)))
getVert = getHorizVert (\(r1, c1) -> \(r2, c2) -> \(r3, c3) -> c1 == c2 && c2 == c3 && ((r1 < r2 && r2 < r3) || (r3 < r2 && r2 < r1)))
getHorizVert fn r c =
case (
catMaybes $
map (\edge ->
case (fromCW (m ! toVarName edge) :: Word32)
of 0 -> Nothing
i -> Just i
) $ filter (\(p1, p2) -> fn p1 (r, c) p2) edges
) of x : _ -> Just x
[] -> Nothing
in
concat (map (\r ->
concat (map (\c ->
case (inst !! r) !! c
of Just i -> show i
Nothing ->
case getHoriz r c
of Just 2 -> "="
Just 1 -> "-"
Nothing ->
case getVert r c
of Just 2 -> "‖"
Just 1 -> "|"
Nothing -> " "
) [0 .. width-1]) ++ "\n"
) [0 .. height-1])
solvePuzzle :: Symbolic SBool -> (Map String CW -> a) -> IO [a]
solvePuzzle prob fn = do
res <- allSat prob
return $ map fn (getModelDictionaries res)
hashiwokakero :: HashiwokakeroInst -> IO ()
hashiwokakero puzzle = do
res <- solvePuzzle (rules puzzle) (getSolution puzzle)
putStrLn $ (show $ length res) ++ " solution(s)"
forM_ res $ \soln ->
putStrLn $ soln
-- Note that there needs to be an empty line to signal the end of the puzzle.
hashiwokakeroParser :: Parser HashiwokakeroInst
hashiwokakeroParser = do
puz <- many $ do
row <- many1 cellParser
endOfLine
pure row
endOfLine
pure puz
where
cellParser :: Parser (Maybe Integer)
cellParser =
(Just . subtract 48 . toInteger . ord <$> digit)
<|> (char '#' *> pure (Nothing))
<|> (char '.' *> pure (Nothing))
| tjhance/logic-puzzles | src/Hashiwokakero.hs | mit | 4,869 | 0 | 27 | 1,617 | 1,932 | 1,025 | 907 | 110 | 8 |
--
--
--
------------------
-- Exercise 13.17.
------------------
--
--
--
module E'13'17 where
{- GHCi>
:t 37
:t True
-}
-- 37 :: Num a => a
--
-- True :: Bool
---------------------------
-- f n = 37 + n -- 1
-- f True = 34 -- 2
---------------------------
--
-- I see an ambiguity in the argument type: in the first line the argument "n" is used in a numeric context
-- and in the second line the argument is a boolean "True".
{- GHCi>
:{
let f n = 37 + n
f True = 34
:}
-}
--
-- <interactive>:3:19:
-- No instance for (Num Bool) arising from a use of `+'
-- In the expression: 37 + n
-- In an equation for `f': f n = 37 + n
----------------------
-- g 0 = 37 -- 1
-- g n = True -- 2
----------------------
--
-- I see an ambiguity in the return type. In the first line it is numeric "37"
-- and in the second line it is a boolean "True".
{- GHCi>
:{
let g 0 = 37
g n = True
:}
-}
--
-- <interactive>:3:13:
-- Could not deduce (Num Bool) arising from the literal `37'
-- from the context (Num a, Eq a)
-- bound by the inferred type of g :: (Num a, Eq a) => a -> Bool
-- at <interactive>:(3,5)-(4,16)
-- In the expression: 37
-- In an equation for `g': g 0 = 37
--------------------------------
-- h x -- 1
-- | x > 0 = True -- 2
-- | otherwise = 37 -- 3
--------------------------------
--
-- I see an ambiguity in the clause return type: in the second line it is a boolean "True"
-- and in the third line it is a numeric "37".
{- GHCi>
:{
let h x
| x > 0 = True
| otherwise = 37
:}
-}
--
-- <interactive>:5:23:
-- Could not deduce (Num Bool) arising from the literal `37'
-- from the context (Ord a, Num a)
-- bound by the inferred type of h :: (Ord a, Num a) => a -> Bool
-- at <interactive>:(3,5)-(5,24)
-- In the expression: 37
-- In an equation for `h':
-- h x
-- | x > 0 = True
-- | otherwise = 37
--------------------
-- k x = 34 -- 1
-- k 0 = 35 -- 2
--------------------
--
-- I see an ambiguity in the pattern definitions: the first pattern will consume everything.
-- Thats why the second pattern becomes useless.
{- GHCi>
:{
let k x = 34
k 0 = 35
:}
-}
--
-- <interactive>:3:5: Warning:
-- Pattern match(es) are overlapped
-- In an equation for `k': k 0 = ...
| pascal-knodel/haskell-craft | Chapter 13/E'13'17.hs | mit | 2,445 | 0 | 2 | 736 | 78 | 77 | 1 | 1 | 0 |
{-# LANGUAGE ExistentialQuantification #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE OverloadedStrings #-}
module Database.Persist.MySQL.Extra
( rawSqlSource
, selectKeysBy
, insertOrUpdateMany_
, insertOrUpdate_
, insertOrUpdateUniqueMany_
, insertOrUpdateUniqueMany
, insertOrUpdateUnique_
--, insertOrUpdateUnique
, repsertUniqueMany_
, repsertUniqueMany
, repsertUnique_
, repsertUnique
, insertMany_
, SqlWaitException
, SqlPriority (..)
, DupUpdate (..)
) where
import Control.Arrow (left)
import Control.Applicative ((<$>), (<*>))
import Control.Exception (Exception, throwIO, bracket)
import Control.Monad (forM_, when)
import Control.Monad.Catch (MonadThrow)
import Control.Monad.IO.Class (MonadIO (..), liftIO)
import Control.Monad.Trans (lift)
import Control.Monad.Trans.Reader (ReaderT)
import Control.Monad.Trans.Resource (MonadResource, MonadResourceBase)
import Control.Monad.Trans.Control (MonadBaseControl, liftBaseOp)
import Control.Monad.Reader (MonadReader, ask)
import Data.Conduit
import qualified Data.Conduit.List as CL
import Data.IORef
import Data.List hiding (insert, insertBy,
maximum, minimum)
import Data.List.Split (chunksOf)
import Data.Map (Map)
import qualified Data.Map as M
import Data.Maybe
import Data.Monoid ((<>))
import Data.Proxy
import Data.Text (Text)
import qualified Data.Text as T
import Data.Typeable (Typeable)
import Database.Persist.Class hiding (insertMany_)
import Database.Persist.Sql hiding (insertMany_)
--import Database.Persist.Types
import Control.Concurrent.MVar.Lifted
--import Control.Exception.Lifted (bracket, )
import Prelude hiding (head, init, last, tail)
import Safe
import System.IO.Unsafe (unsafePerformIO)
-- | SqlPriority is similar to LOW PRIORITY option with on myisam table, but a lock is used on the server itself
-- and will throw an exception when the number of queued queries exceeds some limit
data SqlWaitException = SqlWaitException Text
deriving (Show, Typeable)
data SqlPriority = LowPriority | NormalPriority -- TODO: Delayed (allows the thread to continue while a new thread waits for lock access)
-- | Field to use when updating a duplicate entity
-- TODO: insertOrUpdate with [Update val / EntityField]
data DupUpdate record = forall typ. PersistField typ => DupUpdateField (EntityField record typ)
instance Exception SqlWaitException
--instance Error SqlWaitException where
-- strMsg = PersistError . pack
-- Use these semaphores to implement low priority updates in mysql with InnoDB tables
updateSems :: IORef (Map Text (MVar (), IORef Int))
{-# NOINLINE updateSems #-}
updateSems = unsafePerformIO $ newIORef M.empty
-- The maximum number of waiting updates. Whenever LowPriority inserts/updates are used, these methods will throw an exception whenever the maximum number of queries become enqueued.
-- (This could happen during periods of high traffic or in a denial-of-service attack)
maxWaitingQueries :: Int
maxWaitingQueries = 20
-- | Execute a raw SQL statement and return its results as a
-- Source.
rawSqlSource :: (MonadResource m, MonadReader SqlBackend m, RawSql a)
=> Text -- ^ SQL statement, possibly with placeholders.
-> [PersistValue] -- ^ Values to fill the placeholders.
-> Source m a
rawSqlSource stmt = run
where
getType :: (x -> Source m a) -> a
getType = error "rawSqlSource.getType"
x = getType run
process :: (RawSql a) => [PersistValue] -> Either Text a
process = rawSqlProcessRow
-- withStmt' :: (MonadResource m) => [Text] -> [PersistValue] -> Source m [PersistValue]
withStmt' colSubsts params = rawQuery sql params
where
sql = T.concat $ makeSubsts colSubsts $ T.splitOn placeholder stmt
placeholder = "??"
makeSubsts (s:ss) (t:ts) = t : s : makeSubsts ss ts
makeSubsts [] [] = []
makeSubsts [] ts = [T.intercalate placeholder ts]
makeSubsts ss [] = error (concat err)
where
err = [ "rawsql: there are still ", show (length ss)
, "'??' placeholder substitutions to be made "
, "but all '??' placeholders have already been "
, "consumed. Please read 'rawSql's documentation "
, "on how '??' placeholders work."
]
-- run :: (RawSql a, MonadSqlPersist m, MonadResource m) => [PersistValue] -> Source m a
run params = do
conn <- ask
let (colCount, colSubsts) = rawSqlCols (connEscapeName conn) x
withStmt' colSubsts params $= getRow colCount
getRow :: (RawSql a, MonadResource m) => Int -> Conduit [PersistValue] m a
getRow colCount = awaitForever $ \row -> do
if colCount == length row
then getter row >>= yield
else fail $ concat
[ "rawSql: wrong number of columns, got "
, show (length row), " but expected ", show colCount
, " (", rawSqlColCountReason x, ")."
]
getter :: (RawSql a, MonadResource m) => [PersistValue] -> m a
getter row = case process row of
Left err -> fail (T.unpack err)
Right r -> return r
--selectKeysBy :: (MonadResourceBase m, PersistEntity val, PersistEntityBackend val ~ PersistMonadBackend m, MonadSqlPersist m, MonadLogger m) =>
-- [Unique val] -> [SelectOpt val] -> Source m (Key val)
--selectKeysBy [] _ = CL.sourceList []
--selectKeysBy uniqs opts = do
-- conn <- lift askSqlConn
-- if map (orderClause False conn) orders /= []
-- then error "ORDER BY clause is not supported by selectKeysBy, use selectKeysByUnordered instead"
-- else do
-- let esc = connEscapeName conn
-- cols = case entityPrimary t of
-- Just pdef -> T.intercalate "," $ map (esc . snd) $ primaryFields pdef
-- Nothing -> esc $ entityID t
-- wher uniq = " WHERE (" <> (flip T.snoc ')' . wherKey . map snd $ persistUniqueToFieldNames uniq)
-- wherKey fs = T.intercalate " AND " $ map ((<> " <=> ?") . esc) fs
-- sql = connLimitOffset conn (limit,offset) True $
-- T.intercalate "\n UNION ALL " $ map (\uniq -> "SELECT " <> cols <> " FROM " <> (esc $ entityDB t) <> wher uniq) uniqs
-- vals = concatMap persistUniqueToValues uniqs
-- rawQuery sql vals $= CL.mapM parse
-- where
-- t = entityDef $ proxyFromUniqs uniqs
-- (limit, offset, orders) = limitOffsetOrder opts
-- --parse :: [PersistValue] -> [Key val]
-- parse xs = case entityPrimary t of
-- Nothing ->
-- case xs of
-- [PersistInt64 x] -> return $ Key $ PersistInt64 x
-- [PersistDouble x] -> return $ Key $ PersistInt64 (truncate x) -- oracle returns Double
-- _ -> liftIO $ throwIO $ PersistMarshalError $ "Unexpected in selectKeysBy False: " <> T.pack (show xs)
-- Just pdef ->
-- let pks = map fst $ primaryFields pdef
-- keyvals = map snd $ filter (\(a, _) -> let ret=isJust (find (== a) pks) in ret) $ zip (map fieldHaskell $ entityFields t) xs
-- in return $ Key $ PersistList keyvals
selectKeysBy
:: (MonadReader SqlBackend m,ToBackendKey SqlBackend val,MonadResource m,PersistEntity val)
=> [Unique val] -> Source m (Maybe (Key val))
selectKeysBy [] = CL.sourceList []
selectKeysBy uniqs = do
conn <- ask
let esc = connEscapeName conn
cols = case entityPrimary t of
Just pdef -> T.intercalate "," $ map (esc . fieldDB) $ compositeFields pdef
Nothing -> esc $ fieldDB $ entityId t
wher uniq = " WHERE (" <> (flip T.snoc ')' . wherKey . map snd $ persistUniqueToFieldNames uniq)
wherKey fs = T.intercalate " AND " $ map ((<> " <=> ?") . esc) fs
forM_ uniqs $ \uniq -> do
let sql = "SELECT " <> cols <> " FROM " <> (esc $ entityDB t) <> wher uniq
vals = persistUniqueToValues uniq
r <- lift $ listToMaybe <$> (rawQuery sql vals $= CL.mapM parse $$ CL.consume)
yield r
--forM_ uniqs $ \uniq -> do
-- let sql = "SELECT " <> cols <> " FROM " <> (esc $ entityDB t) <> wher uniq
-- vals = persistUniqueToValues uniq
-- rawQuery sql vals $= CL.mapM parse
where
proxy = proxyFromUniqs uniqs
t = entityDef proxy
--parse :: [PersistValue] -> [Key val]
parse xs = case entityPrimary t of
Nothing ->
case xs of
[PersistInt64 x] -> return $ toSqlKey x
[PersistDouble x] -> return $ toSqlKey (truncate x) -- oracle returns Double
_ -> liftIO $ throwIO $ PersistMarshalError $ "Unexpected in selectKeysBy False: " <> T.pack (show xs)
Just pdef ->
let pks = map fieldHaskell $ compositeFields pdef
keyvals = map snd $ filter
(\(a, _) -> let ret = isJust (find (== a) pks) in ret) $
zip (map fieldHaskell $ entityFields t) xs
in case keyvals of
[PersistInt64 x] -> return $ toSqlKey x
[PersistDouble x] -> return $ toSqlKey (truncate x) -- oracle returns Double
_ -> liftIO $ throwIO $ PersistMarshalError $ "Unexpected in selectKeysBy False: " <> T.pack (show xs)
-- | Insert or update values in the database (when a duplicate primary key already exists)
insertOrUpdateMany_'
:: (MonadIO m,MonadThrow m,MonadBaseControl IO m,PersistEntity r,PersistEntityBackend r ~ SqlBackend)
=> SqlPriority
-> [Entity r]
-> [FieldDef]
-> Control.Monad.Trans.Reader.ReaderT SqlBackend m ()
insertOrUpdateMany_' _ [] _ = return ()
insertOrUpdateMany_' priority es [] = withPriority priority (entityDB . entityDef $ proxyFromEntities es) $ mapM_ (\(Entity key val) -> insertKey key val) es
insertOrUpdateMany_' priority es ufs = withPriority priority (entityDB t) $ do
conn <- ask
let esc = connEscapeName conn
insertFields = esc (fieldDB (entityId t)) : map (esc . fieldDB) (entityFields t)
cols = T.intercalate (T.singleton ',') insertFields
placeholders = replicateQ $ length insertFields
updateFields = map (esc . fieldDB) ufs
updateCols = (T.intercalate ", ") $ map (\name -> name <> "=VALUES(" <> name <> ")") updateFields
rawExecute ( "INSERT INTO "
<> esc (entityDB t)
<> " ("
<> cols
<> ") VALUES ("
<> T.intercalate "),(" (replicate (length es) placeholders)
<> ") ON DUPLICATE KEY UPDATE "
<> updateCols
) $ concatMap (\e -> keyToValues (entityKey e) ++ (map toPersistValue . toPersistFields $ entityVal e)) es
where
t = entityDef $ proxyFromEntities es
replicateQ :: Int -> Text
replicateQ = T.intersperse ',' . (flip T.replicate $ T.singleton '?')
insertOrUpdateMany_
:: (PersistEntity t,PersistEntity r,MonadBaseControl IO m,MonadThrow m,MonadIO m,PersistEntityBackend r ~ SqlBackend)
=> SqlPriority
-> Int
-> Bool
-> [Entity r]
-> [DupUpdate t]
-> ReaderT SqlBackend m ()
insertOrUpdateMany_ priority chunk commitChunks rs ufs = mapM_ insertOrUpdateChunk $ chunksOf chunk rs
where
fs = map dupUpdateFieldDef ufs
-- insertOrUpdateChunk :: (MonadResourceBase m, PersistEntity val, PersistStore m) =>
-- [Entity val] -> m ()
insertOrUpdateChunk rs' = do
insertOrUpdateMany_' priority rs' fs
when commitChunks transactionSave
insertOrUpdate_
:: (MonadIO m,MonadThrow m,MonadBaseControl IO m,PersistEntity r,PersistEntity t,PersistEntityBackend r ~ SqlBackend)
=> Entity r -> [DupUpdate t] -> ReaderT SqlBackend (ReaderT SqlBackend m) ()
insertOrUpdate_ r = insertOrUpdateMany_ NormalPriority 1 False [r]
-- | Insert or update values in the database (when a duplicate already exists)
insertOrUpdateUniqueMany_'
:: (MonadIO m,MonadThrow m,MonadBaseControl IO m,PersistEntity record,PersistEntityBackend record ~ SqlBackend)
=> SqlPriority -> [record] -> [FieldDef] -> ReaderT SqlBackend m ()
insertOrUpdateUniqueMany_' _ [] _ = return ()
insertOrUpdateUniqueMany_' priority rs [] = withPriority priority (entityDB . entityDef $ proxyFromRecords rs) $ mapM_ insertUnique rs -- TODO: insertUniqueMany
insertOrUpdateUniqueMany_' priority rs ufs = withPriority priority (entityDB t) $ do
conn <- ask
let esc = connEscapeName conn
insertFields = map (esc . fieldDB) $ entityFields t
cols = T.intercalate (T.singleton ',') insertFields
placeholders = replicateQ $ length insertFields
updateFields = map (esc . fieldDB) ufs
updateCols = (T.intercalate ", ") $ map (\name -> name <> "=VALUES(" <> name <> ")") updateFields
rawExecute ( "INSERT INTO "
<> esc (entityDB t)
<> " ("
<> cols
<> ") VALUES ("
<> T.intercalate "),(" (replicate (length rs) placeholders)
<> ") ON DUPLICATE KEY UPDATE "
<> updateCols
) $ concatMap (map toPersistValue . toPersistFields) rs
where
t = entityDef $ proxyFromRecords rs
replicateQ :: Int -> Text
replicateQ = T.intersperse ',' . (flip T.replicate $ T.singleton '?')
insertOrUpdateUniqueMany_
:: (MonadIO m,MonadThrow m,MonadBaseControl IO m,PersistEntity record,PersistEntity t,PersistEntityBackend record ~ SqlBackend)
=> SqlPriority
-> Int
-> Bool
-> [record]
-> [DupUpdate t]
-> ReaderT SqlBackend m ()
insertOrUpdateUniqueMany_ priority chunk commitChunks rs ufs =
forM_ (chunksOf chunk rs) $ \rs' -> do
insertOrUpdateUniqueMany_' priority rs' (map dupUpdateFieldDef ufs)
when commitChunks transactionSave
insertOrUpdateUnique_ :: (MonadIO m,MonadThrow m,MonadBaseControl IO m,PersistEntity record,PersistEntity t,PersistEntityBackend record ~ SqlBackend)
=> SqlPriority
-> record
-> [DupUpdate t]
-> ReaderT SqlBackend m ()
insertOrUpdateUnique_ priority r = insertOrUpdateUniqueMany_ priority 1 False [r]
insertOrUpdateUniqueMany'
:: (ToBackendKey SqlBackend val,MonadResource m,MonadBaseControl IO m)
=> SqlPriority
-> [val]
-> [FieldDef]
-> ConduitM () (Key val) (ReaderT SqlBackend m) ()
insertOrUpdateUniqueMany' _ [] _ = CL.sourceList []
insertOrUpdateUniqueMany' priority rs [] = do
es <- lift $ withPriority priority (entityDB . entityDef $ proxyFromRecords rs) $ mapM insertBy rs
CL.sourceList $ map (fromEither . left entityKey) es -- TODO: insertUniqueMany
where
fromEither (Left x) = x
fromEither (Right x) = x
insertOrUpdateUniqueMany' priority rs ufs = do
let uniqs = map (headNote "Could not find any unique keys to use with insertOrUpdate" . persistUniqueKeys) rs
lift $ insertOrUpdateUniqueMany_' priority rs ufs
(selectKeysBy uniqs $= CL.map fromJust)
insertOrUpdateUniqueMany
:: (MonadResource m,ToBackendKey SqlBackend val,MonadBaseControl IO m,PersistEntity t)
=> SqlPriority
-> Int
-> Bool
-> [val]
-> [DupUpdate t]
-> ConduitM () (Key val) (ReaderT SqlBackend m) ()
insertOrUpdateUniqueMany priority chunk commitChunks rs ufs = do
mapM_ insertOrUpdateChunk (chunksOf chunk rs)
where
-- insertOrUpdateChunk :: (MonadResource m, MonadResourceBase m, PersistEntity val, PersistUnique m, PersistStore m) =>
-- [val] -> Source m (Key val)
insertOrUpdateChunk rs' = do
insertOrUpdateUniqueMany' priority rs' $ map dupUpdateFieldDef ufs
when commitChunks (lift transactionSave)
-- | Replace or insert many records using uniqueness constraints instead of the entity key
repsertUniqueMany_ :: (MonadIO m,MonadThrow m,MonadBaseControl IO m,PersistEntity record,PersistEntityBackend record ~ SqlBackend)
=> SqlPriority
-> Int
-> Bool
-> [record]
-> ReaderT SqlBackend m ()
repsertUniqueMany_ _ _ _ [] = return ()
repsertUniqueMany_ priority chunk commitChunks rs = mapM_ insertOrUpdateChunk $ chunksOf chunk rs
where
t = entityDef $ proxyFromRecords rs
fs = entityFields t --TODO: Exclude unique fields since they are unnecesary
-- insertOrUpdateChunk :: (MonadResourceBase m, PersistEntity val, PersistUnique m, PersistStore m) =>
-- [val] -> m ()
insertOrUpdateChunk rs' = do
insertOrUpdateUniqueMany_' priority rs' fs
when commitChunks transactionSave
repsertUniqueMany
:: (MonadResource m,ToBackendKey SqlBackend val,MonadBaseControl IO m)
=> SqlPriority
-> Int
-> Bool
-> [val]
-> ConduitM () (Key val) (ReaderT SqlBackend m) ()
repsertUniqueMany priority chunk commitChunks rs = do
mapM_ insertOrUpdateChunk $ chunksOf chunk rs
where
t = entityDef $ proxyFromRecords rs
fs = entityFields t --TODO: Exclude unique fields since they are unnecesary
-- TODO: Use replace into in the future
-- insertOrUpdateChunk :: (MonadResource m, MonadResourceBase m, PersistEntity val, PersistUnique m, PersistStore m) =>
-- [val] -> Source m (Key val)
insertOrUpdateChunk rs' = do
insertOrUpdateUniqueMany' priority rs' fs
when commitChunks (lift transactionSave)
-- | Replace or insert a record using uniqueness constraints instead of the entity key
repsertUnique_
:: (MonadIO m,MonadThrow m,MonadBaseControl IO m,PersistEntity record,PersistEntityBackend record ~ SqlBackend)
=> record -> ReaderT SqlBackend m ()
repsertUnique_ r = repsertUniqueMany_ NormalPriority 1 False [r]
repsertUnique
:: (MonadIO m,MonadThrow m,MonadBaseControl IO m,PersistEntity record,PersistEntityBackend record ~ SqlBackend)
=> record -> ReaderT SqlBackend m (Key record)
repsertUnique r = do
repsertUnique_ r
Just (Entity k _) <- getByValue r -- TODO: Speed this up (no need to return the entire entity for repsert)
return k
-- | Insert many values into the database in large chunks
insertMany_'
:: (MonadIO m,MonadThrow m,MonadBaseControl IO m,PersistEntity record)
=> SqlPriority -> Bool -> [record] -> ReaderT SqlBackend m ()
insertMany_' _ _ [] = return ()
insertMany_' priority ignoreErrors rs = withPriority priority (entityDB t) $ do
conn <- ask
let esc = connEscapeName conn
insertFields = map (esc . fieldDB) (entityFields t)
cols = T.intercalate (T.singleton ',') insertFields
placeholders = replicateQ $ length insertFields
rawExecute ( "INSERT "
<> (if ignoreErrors then "IGNORE " else "")
<> "INTO "
<> esc (entityDB t)
<> " ("
<> cols
<> ") VALUES ("
<> T.intercalate "),(" (replicate (length rs) placeholders)
<> ")"
) $ concatMap (map toPersistValue . toPersistFields) rs
where
t = entityDef $ proxyFromRecords rs
replicateQ :: Int -> Text
replicateQ = T.intersperse ',' . (flip T.replicate $ T.singleton '?')
insertMany_ :: (MonadIO m,MonadThrow m,MonadBaseControl IO m,PersistEntity record)
=> SqlPriority
-> Bool
-> Int
-> Bool
-> [record]
-> ReaderT SqlBackend m ()
insertMany_ priority ignoreErrors chunk commitChunks rs = do
mapM_ (\rs' -> insertMany_' priority ignoreErrors rs' >> when commitChunks transactionSave) $ chunksOf chunk rs
-- Helpers
-- See persistent/Database/Persist/Sql/Orphan/PersistQuery.hs
proxyFromUniqs :: PersistEntity val => [Unique val] -> Proxy val
proxyFromUniqs _ = Proxy
proxyFromRecords :: PersistEntity val => [val] -> Proxy val
proxyFromRecords _ = Proxy
proxyFromEntities :: PersistEntity val => [Entity val] -> Proxy val
proxyFromEntities _ = Proxy
-- keyFromProxy :: PersistEntity val => Proxy val -> m (Key val)
-- keyFromProxy _ = error "keyFromProxy must not be evaluated"
dupUpdateFieldDef :: PersistEntity t
=> DupUpdate t -> FieldDef
dupUpdateFieldDef (DupUpdateField f) = persistFieldDef f
withPriority :: (MonadResourceBase m) => SqlPriority -> DBName -> m a -> m a
withPriority NormalPriority _ op = op
withPriority LowPriority dbName op = do
let s = unDBName dbName
-- Make sure that the counter exists (returning the existing counter if it does)
(lock, counter) <- liftIO $ do
newSem <- (,) <$> newMVar () <*> newIORef 0
atomicModifyIORef' updateSems $ \sems ->
case s `M.lookup` sems of
Just existingSem -> (sems, existingSem)
Nothing -> (M.insert s newSem sems, newSem)
-- Lock the table and increment our counter (fail if too many queries are queued up)
withMVar lock $ \_ -> do -- use withMVar to release the lock if any exceptions occur
-- No need to be thread-safe with with the io refs since we're protecting this whole function with a lock
let acquire = do { c <- readIORef counter ; writeIORef counter (c + 1) ; return (c + 1) }
release c = writeIORef counter (c - 1)
liftBaseOp (bracket acquire release) $ \c -> do -- use bracket to reset the counter if any exceptions occur (see Control.Exception.Lifted (bracket))
-- Fail if the maximum number of queries have been enqueued
liftIO $ when (c >= maxWaitingQueries) . throwIO . SqlWaitException $ "Maximum " <> (T.pack . show) c <> " of " <> (T.pack . show) maxWaitingQueries <> " insert/update queries queued on table `" <> s <> "`."
-- Safely perform the query inside of the lock (any query exceptions will release resources)
op
| circuithub/persistent-mysql-extra | Database/Persist/MySQL/Extra.hs | mit | 22,872 | 0 | 27 | 6,441 | 5,170 | 2,659 | 2,511 | 347 | 7 |
{-# OPTIONS_GHC -Wall #-}
{-# OPTIONS_GHC -fno-warn-type-defaults #-}
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE FlexibleContexts #-}
module Numeric.Sampling (
-- * Without replacement
sample
, sampleIO
-- * With replacement
, resample
, resampleIO
-- * Unequal probability, without replacement
, psample
, psampleIO
-- * Unequal probability, with replacement
, presample
, presampleIO
-- * Re-exported
, module System.Random.MWC
) where
import qualified Control.Foldl as F
import Control.Monad.Primitive (PrimMonad, PrimState)
import qualified Data.Foldable as Foldable
#if __GLASGOW_HASKELL__ < 710
import Data.Foldable (Foldable)
#endif
import Data.Function (on)
import Data.List (sortBy)
#if __GLASGOW_HASKELL__ < 800
import Data.Monoid
#endif
import qualified Data.Sequence as S
import qualified Data.Vector as V (toList)
import Numeric.Sampling.Internal
import System.Random.MWC
-- | (/O(n)/) Sample uniformly, without replacement.
--
-- Returns Nothing if the desired sample size is larger than the collection
-- being sampled from.
sample
:: (PrimMonad m, Foldable f)
=> Int -> f a -> Gen (PrimState m) -> m (Maybe [a])
sample n xs gen
| n < 0 = return Nothing
| otherwise = do
collected <- F.foldM (randomN n gen) xs
return $ fmap V.toList collected
{-# INLINABLE sample #-}
-- | (/O(n)/) 'sample' specialized to IO.
sampleIO :: Foldable f => Int -> f a -> IO (Maybe [a])
sampleIO n xs = withSystemRandom . asGenIO $ sample n xs
{-# INLINABLE sampleIO #-}
-- | (/O(n log n)/) Sample uniformly with replacement (bootstrap).
resample
:: (PrimMonad m, Foldable f)
=> Int -> f a -> Gen (PrimState m) -> m [a]
resample n xs = presample n weighted where
weight = recip (F.fold F.genericLength xs)
weighted = zip (repeat weight) (Foldable.toList xs)
{-# INLINABLE resample #-}
-- | (/O(n log n)/) 'resample' specialized to IO.
resampleIO :: Foldable f => Int -> f a -> IO [a]
resampleIO n xs = withSystemRandom . asGenIO $ resample n xs
{-# INLINABLE resampleIO #-}
-- | (/O(n log n)/) Unequal probability sampling.
--
-- Returns Nothing if the desired sample size is larger than the collection
-- being sampled from.
psample
:: (PrimMonad m, Foldable f)
=> Int -> f (Double, a) -> Gen (PrimState m) -> m (Maybe [a])
psample n weighted gen = do
let sorted = sortProbs weighted
computeSample n sorted gen
where
computeSample
:: PrimMonad m
=> Int -> [(Double, a)] -> Gen (PrimState m) -> m (Maybe [a])
computeSample size xs g = go 1 [] size (S.fromList xs) where
go !mass !acc j vs
| j < 0 = return Nothing
| j <= 0 = return (Just acc)
| otherwise = do
z <- fmap (* mass) (uniform g)
let cumulative = S.drop 1 $ S.scanl (\s (pr, _) -> s + pr) 0 vs
midx = S.findIndexL (>= z) cumulative
idx = case midx of
Nothing -> error "psample: no index found"
Just x -> x
(p, val) = S.index vs idx
(l, r) = S.splitAt idx vs
deleted = l <> S.drop 1 r
go (mass - p) (val:acc) (pred j) deleted
{-# INLINABLE psample #-}
-- | (/O(n log n)/) 'psample' specialized to IO.
psampleIO :: Foldable f => Int -> f (Double, a) -> IO (Maybe [a])
psampleIO n weighted = withSystemRandom . asGenIO $ psample n weighted
{-# INLINABLE psampleIO #-}
-- | (/O(n log n)/) Unequal probability resampling.
presample
:: (PrimMonad m, Foldable f)
=> Int -> f (Double, a) -> Gen (PrimState m) -> m [a]
presample n weighted gen
| n <= 0 = return []
| otherwise = do
let (bprobs, vals) = unzip $ sortProbs weighted
probs = drop 1 (F.scan F.sum bprobs)
cumulative = zip probs vals
computeSample n cumulative gen
where
computeSample
:: PrimMonad m => Int -> [(Double, a)] -> Gen (PrimState m) -> m [a]
computeSample size xs g = go [] size where
go !acc s
| s <= 0 = return acc
| otherwise = do
z <- uniform g
case F.fold (F.find ((>= z) . fst)) xs of
Just (_, val) -> go (val:acc) (pred s)
Nothing -> return acc
{-# INLINABLE presample #-}
-- | (/O(n log n)/) 'presample' specialized to IO.
presampleIO :: (Foldable f) => Int -> f (Double, a) -> IO [a]
presampleIO n weighted = withSystemRandom . asGenIO $ presample n weighted
{-# INLINABLE presampleIO #-}
sortProbs :: (Foldable f, Ord a) => f (a, b) -> [(a, b)]
sortProbs = sortBy (flip compare `on` fst) . Foldable.toList
{-# INLINABLE sortProbs #-}
| jtobin/sampling | lib/Numeric/Sampling.hs | mit | 4,691 | 0 | 19 | 1,249 | 1,502 | 783 | 719 | 94 | 2 |
module Job.Common
( mailchimpPostRequest
, mailchimpPatchRequest
) where
import Import
import qualified Crypto.Hash as CH
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import qualified Network.HTTP.Simple as HTTP
hexMD5 :: Text -> String
hexMD5 s = show (CH.hash (T.encodeUtf8 s) :: CH.Digest CH.MD5)
mailchimpPostRequest :: ToJSON a => App -> Language -> a -> Request
mailchimpPostRequest master lang body = do
let url = mailchimpEndpoint master lang "POST" ""
mailchimpRequest master body url
mailchimpPatchRequest :: ToJSON a => App -> Language -> a -> Text -> Request
mailchimpPatchRequest master lang body mail = do
let url = mailchimpEndpoint master lang "PATCH" $ hexMD5 mail
mailchimpRequest master body url
mailchimpEndpoint :: App -> Language -> String -> String -> Request
mailchimpEndpoint master lang httpType extra = do
let mailchimpApiLocation = mcApiLocation . appMailchimp $ appSettings master
let mailchimpListId = case lang of
Danish -> mcListIdDanish . mcListId . appMailchimp $ appSettings master
Swedish -> mcListIdSwedish . mcListId . appMailchimp $ appSettings master
Norwegian -> mcListIdNorwegian . mcListId . appMailchimp $ appSettings master
let mailchimpApiEndpoint = T.unpack $ "http://" <> mailchimpApiLocation <> ".api.mailchimp.com/3.0/lists/" <> mailchimpListId <> "/members/"
parseRequest_ $ httpType <> " " <> mailchimpApiEndpoint <> extra
mailchimpRequest :: ToJSON a => App -> a -> Request -> Request
mailchimpRequest master body url = do
let mailchimpApiUser = T.encodeUtf8 . mcApiUser . appMailchimp $ appSettings master
let mailchimpApiKey = T.encodeUtf8 . mcApiKey . appMailchimp $ appSettings master
HTTP.setRequestBasicAuth mailchimpApiUser mailchimpApiKey . HTTP.setRequestIgnoreStatus $ HTTP.setRequestBodyJSON body url
| Tehnix/campaigns | Job/Common.hs | mit | 1,886 | 0 | 15 | 339 | 532 | 266 | 266 | 32 | 3 |
-- a block is just an integer
type Block = Int
-- a pile is a list of blocks from top to bottom
type Pile = [Block]
-- a configuration is a list of piles
type Config = [Pile]
-- a board gives you the next block to discard plus a configuration
data Board = B (Int,Config)
deriving Eq
-- a state is just a board
type State = Board
-- a history gives you all the states in the path so far from current to initial state
type History = [State]
-- initial state
initialState :: State
--initialState = B (7,[[-1],[-1],[2,1,4,3,6,5,7,-1]])
--initialState = B (7,[[-1],[-1],[1,2,3,4,5,6,7,-1]])
--initialState = B (7,[[-1],[-1],[7,6,5,4,3,2,1,-1]])
--initialState = B (5,[[-1],[-1],[4,3,2,1,5,-1]])
--initialState = B (6,[[-1],[-1],[5,4,3,2,1,6,-1]])
initialState = B (6,[[-1],[-1],[5,4,3,2,1,6,-1]])
--initialState = B (3,[[2,-1],[1,3,-1]])
-- empty list if there is no solution
loser :: History
loser = []
-- when the next block to dicard is 0 we are done
goalTest :: State -> Bool
goalTest (B (x,c)) = x == 0
-- This heuristic merely takes the current block to be removed as cost to goal.
-- This is because it will always be the biggest, and in the worst case, will be
-- at the bottom of a stack of all the other blocks.
h :: State -> Int
h (B (x, c)) = x
-- gives list of all states resulting from a legal move of a
-- block from one pile to another
moveall :: Board -> [Board]
moveall (B (x,c)) = removedup [B (x, move (p1,p2) c) | p1 <- c, p2 <- c, head p1 > head p2]
-- Removes duplicate boards in a list
removedup :: [Board] -> [Board]
removedup [] = []
removedup (b:bs)
| elem b bs = removedup bs
| otherwise = b : removedup bs
-- move block from one pile to another
-- as a result those two piles are placed in beginning of the config
move :: (Pile,Pile) -> Config -> Config
move ((p1:p1s),p2) c = (p1:p2) : (p1s : cminus)
where cminus = remove (p1:p1s) (remove p2 c)
-- remove an element from a list
remove :: Eq a => a -> [a] -> [a]
remove x (y:ys)
| x == y = ys
| otherwise = y : remove x ys
-- discard next block if possible
-- if it is possible, a singleton new state is given, otherwise empty
discard :: Board -> [Board]
discard (B (x,c))
| b == x = [B (x-1, (bs:ps))]
| otherwise = []
where ((b:bs):ps) = makenfirst x c
-- place pile with n on top at beginning of config
-- we know there is at most one such config
makenfirst :: Int -> Config -> Config
makenfirst n c = pileswithnontop ++ removeall pileswithnontop c
where pileswithnontop = [p | p <- c, head p == n]
-- remove all piles from the configuration
removeall :: [Pile] -> Config -> Config
removeall [] c = c
removeall (p:ps) c = removeall ps (remove p c)
-- the operator either puts a block on top of a smaller one
-- or it discards a block if it matches the number
operator :: History -> [History]
operator (b:bs) = [newboard:(b:bs) | newboard <- moveall b ++ discard b]
-- a node stores a state, its g value, and its h value
type Node = (History, Int, Int)
-- prints out list so it is easier to read
instance Show Board where
show (B x) = show x ++ "\n"
-- let's you do IO
-- x = do
-- print a
-- putStr "It took this many steps: "
-- print b
-- where (a,b) = astar
-- this is the funciton you call to run everything
astar :: (Node,Int)
astar = aStarSearch [([initialState],0,h initialState)] [] 0
-- this is the search function
--
-- it takes
-- 1. a sorted list of nodes to be processed
-- 2. the list of states that have already been processed
-- 3. the number of nodes that have been processed so far
--
-- it returns a pair containing a goal node and the number of nodes processed
--
-- if the set of nodes to be processed is empty you lose
-- if the first node to be processed is a goal node then we are done
-- if the first node to be processed has already been processed it ignores it
-- otherwise it applies the operator to the first node to be processed
-- and inserts all the new nodes into the sorted list
aStarSearch :: [Node] -> [State] -> Int -> (Node,Int)
aStarSearch [] used counter = ((loser,0,0),counter)
aStarSearch (((x:xs),gVal,hVal):ss) used counter
| goalTest x = (((x:xs),gVal,hVal),counter)
| elem x used = aStarSearch ss used counter
| otherwise = aStarSearch (insertAll newNodes ss) (x:used) (counter + 1)
where newNodes = [((n:ns),gVal+1,h n) | (n:ns) <- (operator (x:xs))]
-- insert a list of nodes into a sorted list
insertAll :: [Node] -> [Node] -> [Node]
insertAll [] ns = ns
insertAll (m:ms) ns = insertAll ms (insert m ns)
-- insert one node into a sorted list
insert :: Node -> [Node] -> [Node]
insert x [] = [x]
insert (x1,g1,h1) ((x2,g2,h2):rest)
| g1 + h1 <= g2 + h2 = (x1,g1,h1):((x2,g2,h2):rest)
| otherwise = (x2,g2,h2):(insert (x1,g1,h1) rest)
| fultonms/artificial-intelligence | a2/blockA.hs | mit | 4,775 | 0 | 13 | 997 | 1,398 | 786 | 612 | 62 | 1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Stackage.Database.Query
(
-- * Snapshot
newestSnapshot
, newestLTS
, newestLTSMajor
, newestNightly
, getSnapshots
, countSnapshots
, ltsMajorVersions
, snapshotBefore
, lookupSnapshot
, snapshotTitle
, snapshotsJSON
, getLatestLtsByGhc
, getLatestLtsNameWithHoogle
, getSnapshotModules
, getSnapshotPackageModules
-- * Package
, getAllPackages
, getPackagesForSnapshot
, getPackagesForSnapshotDiff
, getPackageVersionForSnapshot
, getLatests
, getHackageLatestVersion
, getSnapshotPackageInfo
, getSnapshotPackageInfoQuery
, getSnapshotPackageLatestVersion
, getSnapshotPackageLatestVersionQuery
, getSnapshotPackagePageInfo
, getSnapshotPackagePageInfoQuery
, getPackageInfo
, getPackageInfoQuery
, getSnapshotsForPackage
-- ** Dependencies
, getForwardDeps
, getReverseDeps
, getDepsCount
-- ** Deprecations
, getDeprecated
, getDeprecatedQuery
, setDeprecations
-- * Needed for Cron Job
-- ** Re-exports from Pantry
, loadBlobById
, getTreeForKey
, treeCabal
, getVersionId
-- ** Stackage server
, CabalFileIds
, addCabalFile
, getCabalFileIds
, addSnapshotPackage
, getHackageCabalByRev0
, getHackageCabalByKey
, snapshotMarkUpdated
, insertSnapshotName
, markModuleHasDocs
, insertDeps
-- ** For Hoogle db creation
, lastLtsNightlyWithoutHoogleDb
, getSnapshotPackageCabalBlob
, checkInsertSnapshotHoogleDb
) where
import qualified Data.Aeson as A
import qualified Data.List as L
import Database.Esqueleto
import Database.Esqueleto.Internal.Language (FromPreprocess)
import Database.Esqueleto.Internal.Sql
import Distribution.Types.PackageId (PackageIdentifier(PackageIdentifier))
import Distribution.PackageDescription (packageDescription)
import Distribution.Types.PackageDescription (PackageDescription(package))
import qualified Database.Persist as P
import Pantry.Internal.Stackage (EntityField(..), PackageName,
Version, getBlobKey, getPackageNameById,
getPackageNameId, getTreeForKey, getVersionId,
loadBlobById, storeBlob, mkSafeFilePath, versionVersion)
import RIO hiding (on, (^.))
import qualified RIO.Map as Map
import qualified RIO.Set as Set
import qualified RIO.Text as T
import RIO.Time (Day, UTCTime)
import Stackage.Database.PackageInfo
import Stackage.Database.Schema
import Stackage.Database.Types
-- | Construct a pretty title for the snapshot
snapshotTitle :: Snapshot -> Text
snapshotTitle s = snapshotPrettyName (snapshotName s) (snapshotCompiler s)
-- | Get the snapshot from the database.
lookupSnapshot :: GetStackageDatabase env m => SnapName -> m (Maybe (Entity Snapshot))
lookupSnapshot name = run $ getBy $ UniqueSnapshot name
-- | A way to lookup a name of the newest snapshot per type: 'lts', 'lts-x' and 'nightly'. This is
-- used for resolving a snapshot
newestSnapshot :: GetStackageDatabase env m => SnapshotBranch -> m (Maybe SnapName)
newestSnapshot LtsBranch = fmap (uncurry SNLts) <$> newestLTS
newestSnapshot NightlyBranch = fmap SNNightly <$> newestNightly
newestSnapshot (LtsMajorBranch x) = fmap (SNLts x) <$> newestLTSMajor x
-- | Get the latest known LTS snapshot
newestLTS :: GetStackageDatabase env m => m (Maybe (Int, Int))
newestLTS =
run $ liftM (fmap go) $ selectFirst [] [P.Desc LtsMajor, P.Desc LtsMinor]
where
go (Entity _ lts) = (ltsMajor lts, ltsMinor lts)
-- | Get the minor version 'y' of latest known LTS snapshot for the major version 'x' in 'lts-x.y'
newestLTSMajor :: GetStackageDatabase env m => Int -> m (Maybe Int)
newestLTSMajor x =
run $ liftM (fmap $ ltsMinor . entityVal) $ P.selectFirst [LtsMajor P.==. x] [P.Desc LtsMinor]
ltsMajorVersions :: GetStackageDatabase env m => m [(Int, Int)]
ltsMajorVersions =
run $ liftM (dropOldMinors . map (toPair . entityVal))
$ P.selectList [] [P.Desc LtsMajor, P.Desc LtsMinor]
where
toPair (Lts _ x y) = (x, y)
dropOldMinors [] = []
dropOldMinors (l@(x, _):rest) =
l : dropOldMinors (dropWhile sameMinor rest)
where
sameMinor (y, _) = x == y
-- | Look up the date 'in the newest nightly snapshot.
newestNightly :: GetStackageDatabase env m => m (Maybe Day)
newestNightly = run $ liftM (fmap $ nightlyDay . entityVal) $ selectFirst [] [P.Desc NightlyDay]
-- | Get the snapshot which precedes the given one with respect to it's branch (nightly/lts)
snapshotBefore :: GetStackageDatabase env m => SnapName -> m (Maybe (SnapshotId, SnapName))
snapshotBefore (SNLts x y) = ltsBefore x y
snapshotBefore (SNNightly day) = nightlyBefore day
nightlyBefore :: GetStackageDatabase env m => Day -> m (Maybe (SnapshotId, SnapName))
nightlyBefore day = do
run $ liftM (fmap go) $ P.selectFirst [NightlyDay P.<. day] [P.Desc NightlyDay]
where
go (Entity _ nightly) = (nightlySnap nightly, SNNightly $ nightlyDay nightly)
ltsBefore :: GetStackageDatabase env m => Int -> Int -> m (Maybe (SnapshotId, SnapName))
ltsBefore x y = do
run $ liftM (fmap go) $ selectFirst
( [LtsMajor P.<=. x, LtsMinor P.<. y] P.||.
[LtsMajor P.<. x]
)
[P.Desc LtsMajor, P.Desc LtsMinor]
where
go (Entity _ lts) = (ltsSnap lts, SNLts (ltsMajor lts) (ltsMinor lts))
lastLtsNightlyWithoutHoogleDb :: Int -> Int -> RIO StackageCron [(SnapshotId, SnapName)]
lastLtsNightlyWithoutHoogleDb ltsCount nightlyCount = do
currentHoogleVersionId <- scHoogleVersionId <$> ask
let getSnapshotsWithoutHoogeDb snapId snapCount =
map (unValue *** unValue) <$>
select
(from $ \(snap `InnerJoin` snapshot) -> do
on $ snap ^. snapId ==. snapshot ^. SnapshotId
where_ $
notExists $
from $ \snapshotHoogleDb ->
where_ $
(snapshotHoogleDb ^. SnapshotHoogleDbSnapshot ==. snapshot ^.
SnapshotId) &&.
(snapshotHoogleDb ^. SnapshotHoogleDbVersion ==.
val currentHoogleVersionId)
orderBy [desc (snapshot ^. SnapshotCreated)]
limit $ fromIntegral snapCount
pure (snapshot ^. SnapshotId, snapshot ^. SnapshotName))
run $ do
lts <- getSnapshotsWithoutHoogeDb LtsSnap ltsCount
nightly <- getSnapshotsWithoutHoogeDb NightlySnap nightlyCount
pure $ lts ++ nightly
snapshotsJSON :: GetStackageDatabase env m => m A.Value
snapshotsJSON = do
mlatestNightly <- newestNightly
ltses <- ltsMajorVersions
let lts =
case ltses of
[] -> []
majorVersions@(latest:_) -> ("lts" A..= printLts latest) : map toObj majorVersions
nightly =
case mlatestNightly of
Nothing -> id
Just n -> (("nightly" A..= printNightly n) :)
return $ A.object $ nightly lts
where
toObj lts@(major, _) = T.pack ("lts-" <> show major) A..= printLts lts
printLts (major, minor) = "lts-" <> show major <> "." <> show minor
printNightly day = "nightly-" <> T.pack (show day)
getLatestLtsByGhc :: GetStackageDatabase env m => m [(Int, Int, Text, Day)]
getLatestLtsByGhc =
run $ fmap (dedupe . map toTuple) $ do
select $
from $ \(lts `InnerJoin` snapshot) -> do
on $ lts ^. LtsSnap ==. snapshot ^. SnapshotId
orderBy [desc (lts ^. LtsMajor), desc (lts ^. LtsMinor)]
groupBy
( snapshot ^. SnapshotCompiler
, lts ^. LtsId
, lts ^. LtsMajor
, lts ^. LtsMinor
, snapshot ^. SnapshotId)
return (lts, snapshot)
where
toTuple (Entity _ lts, Entity _ snapshot) =
( ltsMajor lts
, ltsMinor lts
, textDisplay (snapshotCompiler snapshot)
, snapshotCreated snapshot)
dedupe [] = []
dedupe (x:xs) = x : dedupe (dropWhile (\y -> thd x == thd y) xs)
thd (_, _, x, _) = x
getLatestLtsNameWithHoogle :: GetStackageDatabase env m => m Text
getLatestLtsNameWithHoogle =
run $ do
currentHoogleVersionId <- getCurrentHoogleVersionId
maybe "lts" (textDisplay . unValue) . listToMaybe <$>
select
(from $ \(lts `InnerJoin` snapshot `InnerJoin` snapshotHoogleDb) -> do
on $ snapshotHoogleDb ^. SnapshotHoogleDbSnapshot ==. snapshot ^. SnapshotId
on $ lts ^. LtsSnap ==. snapshot ^. SnapshotId
where_ $
snapshotHoogleDb ^. SnapshotHoogleDbVersion ==. val currentHoogleVersionId
orderBy [desc (lts ^. LtsMajor), desc (lts ^. LtsMinor)]
limit 1
return (snapshot ^. SnapshotName))
-- | Count snapshots that belong to a specific SnapshotBranch
countSnapshots :: (GetStackageDatabase env m) => Maybe SnapshotBranch -> m Int
countSnapshots Nothing = run $ P.count ([] :: [P.Filter Snapshot])
countSnapshots (Just NightlyBranch) = run $ P.count ([] :: [P.Filter Nightly])
countSnapshots (Just LtsBranch) = run $ P.count ([] :: [P.Filter Lts])
countSnapshots (Just (LtsMajorBranch x)) = run $ P.count [LtsMajor P.==. x]
-- | Get snapshots that belong to a specific SnapshotBranch
getSnapshots :: (GetStackageDatabase env m)
=> Maybe SnapshotBranch
-> Int -- ^ limit
-> Int -- ^ offset
-> m [Entity Snapshot]
getSnapshots mBranch l o =
run $
case mBranch of
Nothing -> P.selectList [] [P.LimitTo l, P.OffsetBy o, P.Desc SnapshotCreated]
Just NightlyBranch ->
select $
from $ \(nightly `InnerJoin` snapshot) -> do
on $ nightly ^. NightlySnap ==. snapshot ^. SnapshotId
orderBy [desc (nightly ^. NightlyDay)]
limit $ fromIntegral l
offset $ fromIntegral o
pure snapshot
Just LtsBranch -> do
select $
from $ \(lts `InnerJoin` snapshot) -> do
on $ lts ^. LtsSnap ==. snapshot ^. SnapshotId
orderBy [desc (lts ^. LtsMajor), desc (lts ^. LtsMinor)]
limit $ fromIntegral l
offset $ fromIntegral o
pure snapshot
Just (LtsMajorBranch v) -> do
select $
from $ \(lts `InnerJoin` snapshot) -> do
on $ lts ^. LtsSnap ==. snapshot ^. SnapshotId
orderBy [desc (lts ^. LtsMinor)]
where_ ((lts ^. LtsMajor) ==. (val v))
limit $ fromIntegral l
offset $ fromIntegral o
pure snapshot
getSnapshotModules :: GetStackageDatabase env m => SnapshotId -> m [ModuleListingInfo]
getSnapshotModules sid =
run $ do
map toModuleListingInfo <$>
select
(from $ \(spm `InnerJoin` m `InnerJoin` sp `InnerJoin` pn `InnerJoin` v) -> do
on $ sp ^. SnapshotPackageVersion ==. v ^. VersionId
on $ sp ^. SnapshotPackagePackageName ==. pn ^. PackageNameId
on $ spm ^. SnapshotPackageModuleSnapshotPackage ==. sp ^. SnapshotPackageId
on $ spm ^. SnapshotPackageModuleModule ==. m ^. ModuleNameId
where_ $
(sp ^. SnapshotPackageSnapshot ==. val sid) &&.
(spm ^. SnapshotPackageModuleHasDocs ==. val True)
orderBy [asc (m ^. ModuleNameName), asc (pn ^. PackageNameName)]
pure (m ^. ModuleNameName, pn ^. PackageNameName, v ^. VersionVersion))
where
toModuleListingInfo (Value moduleName, Value packageName, Value version) =
ModuleListingInfo
{ mliModuleName = moduleName
, mliPackageIdentifier = PackageIdentifierP packageName version
}
getSnapshotPackageModules
:: SnapshotPackageId
-> Bool
-> ReaderT SqlBackend (RIO env) [ModuleNameP]
getSnapshotPackageModules snapshotPackageId hasDocs =
map unValue <$>
select
(from $ \(spm `InnerJoin` m) -> do
on $ spm ^. SnapshotPackageModuleModule ==. m ^. ModuleNameId
where_ $
(spm ^. SnapshotPackageModuleSnapshotPackage ==. val snapshotPackageId) &&.
(spm ^. SnapshotPackageModuleHasDocs ==. val hasDocs)
orderBy [asc (m ^. ModuleNameName)]
pure (m ^. ModuleNameName))
getAllPackages :: GetStackageDatabase env m => m [(SnapName, PackageListingInfo)]
getAllPackages =
run (map toPackageListingInfo <$>
select
(from $ \(sp `InnerJoin` snap `InnerJoin` pn `InnerJoin` v) ->
distinctOn [don (pn ^. PackageNameName)] $ do
on (sp ^. SnapshotPackageVersion ==. v ^. VersionId)
on (sp ^. SnapshotPackagePackageName ==. pn ^. PackageNameId)
on (sp ^. SnapshotPackageSnapshot ==. snap ^. SnapshotId)
orderBy
[ asc (pn ^. PackageNameName)
, desc (versionArray v)
, desc (sp ^. SnapshotPackageRevision)
, desc (snap ^. SnapshotCreated)
]
pure
( snap ^. SnapshotName
, pn ^. PackageNameName
, v ^. VersionVersion
, sp ^. SnapshotPackageSynopsis
, sp ^. SnapshotPackageOrigin)))
where
toPackageListingInfo (Value snapName, name, version, synopsis, origin) =
( snapName
, PackageListingInfo
{ pliName = unValue name
, pliVersion = unValue version
, pliSynopsis = unValue synopsis
, pliOrigin = unValue origin
})
getPackagesForSnapshot :: GetStackageDatabase env m => SnapshotId -> m [PackageListingInfo]
getPackagesForSnapshot snapshotId =
run (map toPackageListingInfo <$>
select
(from $ \(sp `InnerJoin` pn `InnerJoin` v) -> do
on (sp ^. SnapshotPackageVersion ==. v ^. VersionId)
on (sp ^. SnapshotPackagePackageName ==. pn ^. PackageNameId)
where_ (sp ^. SnapshotPackageSnapshot ==. val snapshotId)
orderBy [asc (pn ^. PackageNameName)]
pure
( pn ^. PackageNameName
, v ^. VersionVersion
, sp ^. SnapshotPackageSynopsis
, sp ^. SnapshotPackageOrigin)))
where
toPackageListingInfo (Value pliName, Value pliVersion, Value pliSynopsis, Value pliOrigin) =
PackageListingInfo {pliName, pliVersion, pliSynopsis, pliOrigin}
getPackagesForSnapshotDiff :: GetStackageDatabase env m => SnapshotId -> m [(PackageNameP, VersionP)]
getPackagesForSnapshotDiff snapshotId =
run (map toPackageListingInfo <$>
select
(from $ \(sp `InnerJoin` pn `InnerJoin` v) -> do
on (sp ^. SnapshotPackageVersion ==. v ^. VersionId)
on (sp ^. SnapshotPackagePackageName ==. pn ^. PackageNameId)
where_ (sp ^. SnapshotPackageSnapshot ==. val snapshotId)
orderBy [asc (pn ^. PackageNameName)]
pure
( pn ^. PackageNameName
, v ^. VersionVersion
)))
where
toPackageListingInfo (Value name, Value version) = (name, version)
getPackageVersionForSnapshot
:: GetStackageDatabase env m
=> SnapshotId -> PackageNameP -> m (Maybe VersionP)
getPackageVersionForSnapshot snapshotId pname =
run $
selectApplyMaybe
unValue
(from $ \(sp `InnerJoin` pn `InnerJoin` v) -> do
on (sp ^. SnapshotPackageVersion ==. v ^. VersionId)
on (sp ^. SnapshotPackagePackageName ==. pn ^. PackageNameId)
where_
((sp ^. SnapshotPackageSnapshot ==. val snapshotId) &&.
(pn ^. PackageNameName ==. val pname))
pure (v ^. VersionVersion))
getLatest ::
FromPreprocess t
=> PackageNameId
-> (t -> SqlExpr (Value SnapshotId))
-> (t -> SqlQuery ())
-> ReaderT SqlBackend (RIO env) (Maybe SnapshotPackageId)
getLatest pnameid onWhich orderWhich =
selectApplyMaybe
unValue
(from $ \(which `InnerJoin` snap `InnerJoin` sp) -> do
on (sp ^. SnapshotPackageSnapshot ==. snap ^. SnapshotId)
on (snap ^. SnapshotId ==. onWhich which)
where_ (sp ^. SnapshotPackagePackageName ==. val pnameid)
orderWhich which
limit 1
pure (sp ^. SnapshotPackageId))
getLatests :: PackageNameP -> ReaderT SqlBackend (RIO env) [LatestInfo]
getLatests pname = do
pid <- getPackageNameId $ unPackageNameP pname
mlts <-
getLatest
pid
(^. LtsSnap)
(\lts -> orderBy [desc (lts ^. LtsMajor), desc (lts ^. LtsMinor)])
mnightly <-
getLatest
pid
(^. NightlySnap)
(\nightly -> orderBy [desc (nightly ^. NightlyDay)])
for (catMaybes [mlts, mnightly]) $ \spid -> do
sp <- maybe (error "impossible") id <$> get spid
snap <- maybe (error "impossible") id <$> get (snapshotPackageSnapshot sp)
version <- maybe (error "impossible") id <$> get (snapshotPackageVersion sp)
pure LatestInfo
{ liSnapName = snapshotName snap
, liVersionRev = toVersionMRev (versionVersion version) (snapshotPackageRevision sp)
}
-- | Looks up in pantry the latest information about the package on Hackage.
getHackageLatestVersion ::
PackageNameP -> ReaderT SqlBackend (RIO env) (Maybe HackageCabalInfo)
getHackageLatestVersion pname =
selectApplyMaybe toHackageCabalInfo $
from
(\(hc `InnerJoin` pn `InnerJoin` v) -> do
on (hc ^. HackageCabalVersion ==. v ^. VersionId)
on (hc ^. HackageCabalName ==. pn ^. PackageNameId)
where_ (pn ^. PackageNameName ==. val pname)
orderBy [desc (versionArray v), desc (hc ^. HackageCabalRevision)]
limit 1
pure
( hc ^. HackageCabalId
, hc ^. HackageCabalCabal
, v ^. VersionVersion
, hc ^. HackageCabalRevision))
where
toHackageCabalInfo (cid, cbid, v, rev) =
HackageCabalInfo
{ hciCabalId = unValue cid
, hciCabalBlobId = unValue cbid
, hciPackageName = pname
, hciVersionRev = toVersionRev (unValue v) (unValue rev)
}
getSnapshotPackageInfo ::
GetStackageDatabase env m => SnapName -> PackageNameP -> m (Maybe SnapshotPackageInfo)
getSnapshotPackageInfo snapName pname = run $ getSnapshotPackageInfoQuery snapName pname
getSnapshotPackageInfoQuery ::
SnapName -> PackageNameP -> ReaderT SqlBackend (RIO env) (Maybe SnapshotPackageInfo)
getSnapshotPackageInfoQuery snapName pname =
fmap snd . listToMaybe <$>
(snapshotPackageInfoQuery $ \_sp s pn _v spiQ -> do
where_ ((s ^. SnapshotName ==. val snapName) &&. (pn ^. PackageNameName ==. val pname))
pure ((), spiQ))
getSnapshotPackagePageInfoQuery :: SnapshotPackageInfo -> Int -> ReaderT SqlBackend (RIO env) SnapshotPackagePageInfo
getSnapshotPackagePageInfoQuery spi maxDisplayedDeps = do
mhciLatest <- getHackageLatestVersion $ spiPackageName spi
-- TODO: check for `spiOrigin spi` once other than `Hackage` are implemented
forwardDepsCount <- getForwardDepsCount spi
reverseDepsCount <- getReverseDepsCount spi
forwardDeps <-
if forwardDepsCount > 0
then getForwardDeps spi (Just maxDisplayedDeps)
else pure []
reverseDeps <-
if reverseDepsCount > 0
then getReverseDeps spi (Just maxDisplayedDeps)
else pure []
latestInfo <- getLatests (spiPackageName spi)
moduleNames <- getModuleNames (spiSnapshotPackageId spi)
mcabalBlobKey <- traverse getBlobKey $ spiCabalBlobId spi
pure
SnapshotPackagePageInfo
{ sppiSnapshotPackageInfo = spi
, sppiLatestHackageCabalInfo = mhciLatest
, sppiForwardDeps = map (first dropVersionRev) forwardDeps
, sppiForwardDepsCount = forwardDepsCount
, sppiReverseDeps = map (first dropVersionRev) reverseDeps
, sppiReverseDepsCount = reverseDepsCount
, sppiLatestInfo = latestInfo
, sppiModuleNames = moduleNames
, sppiPantryCabal =
mcabalBlobKey RIO.<&> \cabalBlobKey ->
PantryCabal
{ pcPackageName = spiPackageName spi
, pcVersion = spiVersion spi
, pcCabalKey = cabalBlobKey
}
, sppiVersion =
listToMaybe
[ spiVersionRev spi
| VersionRev ver mrev <-
maybe [] (pure . hciVersionRev) mhciLatest ++
map liVersionRev latestInfo
, ver > curVer ||
(ver == curVer &&
fromMaybe (Revision 0) mrev > fromMaybe (Revision 0) mcurRev)
]
}
where
VersionRev curVer mcurRev = spiVersionRev spi
getSnapshotPackagePageInfo ::
GetStackageDatabase env m => SnapshotPackageInfo -> Int -> m SnapshotPackagePageInfo
getSnapshotPackagePageInfo spi maxDisplayedDeps = run $ getSnapshotPackagePageInfoQuery spi maxDisplayedDeps
type SqlExprSPI
= ( SqlExpr (Value SnapshotPackageId)
, SqlExpr (Value SnapshotId)
, SqlExpr (Value SnapName)
, SqlExpr (Value PackageNameP)
, SqlExpr (Value (Maybe BlobId))
, SqlExpr (Value VersionP)
, SqlExpr (Value (Maybe Revision))
, SqlExpr (Value Origin)
, SqlExpr (Value (Maybe TreeEntryId))
, SqlExpr (Value (Maybe TreeEntryId))
)
snapshotPackageInfoQuery ::
(SqlSelect a b)
=> ( SqlExpr (Entity SnapshotPackage)
-> SqlExpr (Entity Snapshot)
-> SqlExpr (Entity PackageName)
-> SqlExpr (Entity Version)
-> SqlExprSPI
-> SqlQuery (a, SqlExprSPI)
)
-> ReaderT SqlBackend (RIO env) [(b, SnapshotPackageInfo)]
snapshotPackageInfoQuery customize =
fmap (\(extraValue, spiValues) -> (extraValue, toSnapshotPackageInfo spiValues)) <$>
select
(from $ \(sp `InnerJoin` s `InnerJoin` pn `InnerJoin` v) -> do
on (sp ^. SnapshotPackageVersion ==. v ^. VersionId)
on (sp ^. SnapshotPackagePackageName ==. pn ^. PackageNameId)
on (sp ^. SnapshotPackageSnapshot ==. s ^. SnapshotId)
customize sp s pn v $
( sp ^. SnapshotPackageId
, s ^. SnapshotId
, s ^. SnapshotName
, pn ^. PackageNameName
, sp ^. SnapshotPackageCabal
, v ^. VersionVersion
, sp ^. SnapshotPackageRevision
, sp ^. SnapshotPackageOrigin
, sp ^. SnapshotPackageReadme
, sp ^. SnapshotPackageChangelog))
where
toSnapshotPackageInfo (spid, sid, sn, pn, spc, v, spr, spo, rm, cl) =
SnapshotPackageInfo
{ spiSnapshotPackageId = unValue spid
, spiSnapshotId = unValue sid
, spiCabalBlobId = unValue spc
, spiSnapName = unValue sn
, spiPackageName = unValue pn
, spiVersion = unValue v
, spiRevision = unValue spr
, spiOrigin = unValue spo
, spiReadme = unValue rm
, spiChangelog = unValue cl
}
getSnapshotPackageLatestVersionQuery ::
PackageNameP -> ReaderT SqlBackend (RIO env) (Maybe SnapshotPackageInfo)
getSnapshotPackageLatestVersionQuery pname =
fmap snd . listToMaybe <$>
(snapshotPackageInfoQuery $ \_sp s pn v spiQ -> do
where_ (pn ^. PackageNameName ==. val pname)
orderBy [desc (s ^. SnapshotId)]
limit 1
pure ((), spiQ))
getSnapshotPackageLatestVersion ::
GetStackageDatabase env m
=> PackageNameP
-> m (Maybe SnapshotPackageInfo)
getSnapshotPackageLatestVersion pname = run (getSnapshotPackageLatestVersionQuery pname)
-- | A helper function that expects at most one element to be returned by a `select` and applies a
-- function to the returned result
selectApplyMaybe ::
(SqlSelect a b, MonadIO m) => (b -> r) -> SqlQuery a -> ReaderT SqlBackend m (Maybe r)
selectApplyMaybe f = fmap (fmap f . listToMaybe) . select
-- | Convert a string representation of a version to an array so it can be used for sorting.
versionArray :: SqlExpr (Entity Version) -> SqlExpr (Value [Int64])
versionArray v = stringsToInts (stringToArray (v ^. VersionVersion) (val ("." :: String)))
stringsToInts :: SqlExpr (Value [String]) -> SqlExpr (Value [Int64])
stringsToInts = unsafeSqlCastAs "INTEGER[]"
-- | Define postgresql native function in Haskell with Esqueleto
stringToArray ::
(SqlString s1, SqlString s2)
=> SqlExpr (Value s1)
-> SqlExpr (Value s2)
-> SqlExpr (Value [String])
stringToArray s1 s2 = unsafeSqlFunction "string_to_array" (s1, s2)
getSnapshotsForPackage
:: GetStackageDatabase env m
=> PackageNameP
-> Maybe Int
-> m [(CompilerP, SnapshotPackageInfo)]
getSnapshotsForPackage pname mlimit =
fmap (first unValue) <$>
run (snapshotPackageInfoQuery $ \_sp s pn _v spiQ -> do
where_ (pn ^. PackageNameName ==. val pname)
orderBy [desc (s ^. SnapshotCreated)]
forM_ mlimit (limit . fromIntegral)
pure (s ^. SnapshotCompiler, spiQ))
getPackageInfoQuery :: Either HackageCabalInfo SnapshotPackageInfo -> ReaderT SqlBackend (RIO env) PackageInfo
getPackageInfoQuery (Left hci) = do
cabalBlob <- loadBlobById (hciCabalBlobId hci)
pure $ toPackageInfo (parseCabalBlob cabalBlob) Nothing Nothing
getPackageInfoQuery (Right spi) = do
case spiCabalBlobId spi of
Just cabalBlobId -> do
gpd <- parseCabalBlob <$> loadBlobById cabalBlobId
mreadme <- maybe (pure Nothing) getFileByTreeEntryId (spiReadme spi)
mchangelog <- maybe (pure Nothing) getFileByTreeEntryId (spiChangelog spi)
pure $
toPackageInfo
gpd
(toContentFile Readme <$> mreadme)
(toContentFile Changelog <$> mchangelog)
Nothing -> error "FIXME: handle a case when cabal file isn't available but package.yaml is"
where
toContentFile :: (ByteString -> Bool -> a) -> (SafeFilePath, ByteString) -> a
toContentFile con (path, bs) = con bs (isMarkdownFilePath path)
getPackageInfo ::
GetStackageDatabase env m => Either HackageCabalInfo SnapshotPackageInfo -> m PackageInfo
getPackageInfo args = run $ getPackageInfoQuery args
getFileByTreeEntryId ::
TreeEntryId
-> ReaderT SqlBackend (RIO env) (Maybe (SafeFilePath, ByteString))
getFileByTreeEntryId teid =
selectApplyMaybe (bimap unValue unValue) $
from $ \(te `InnerJoin` fp `InnerJoin` b) -> do
on $ te ^. TreeEntryBlob ==. b ^. BlobId
on $ te ^. TreeEntryPath ==. fp ^. FilePathId
where_ $ te ^. TreeEntryId ==. val teid
pure (fp ^. FilePathPath, b ^. BlobContents)
getModuleNames :: SnapshotPackageId -> ReaderT SqlBackend (RIO env) (Map ModuleNameP Bool)
getModuleNames spid =
Map.fromList . map (\(md, hs) -> (unValue md, unValue hs)) <$>
select
(from $ \(spm `InnerJoin` pm) -> do
on (spm ^. SnapshotPackageModuleModule ==. pm ^. ModuleNameId)
where_ (spm ^. SnapshotPackageModuleSnapshotPackage ==. val spid)
orderBy [desc (pm ^. ModuleNameName)]
pure (pm ^. ModuleNameName, spm ^. SnapshotPackageModuleHasDocs))
------ Dependencies
getForwardDeps ::
SnapshotPackageInfo
-> Maybe Int
-> ReaderT SqlBackend (RIO env) [(PackageVersionRev, VersionRangeP)]
getForwardDeps spi mlimit =
fmap toDepRange <$>
select
(from $ \(user `InnerJoin` uses `InnerJoin` pn `InnerJoin` v) -> do
on (uses ^. SnapshotPackageVersion ==. v ^. VersionId)
on (uses ^. SnapshotPackagePackageName ==. pn ^. PackageNameId)
on (user ^. DepUses ==. uses ^. SnapshotPackagePackageName)
where_ $
(user ^. DepUser ==. val (spiSnapshotPackageId spi)) &&.
(uses ^. SnapshotPackageSnapshot ==. val (spiSnapshotId spi))
orderBy [asc (pn ^. PackageNameName)]
maybe (pure ()) (limit . fromIntegral) mlimit
pure
( pn ^. PackageNameName
, v ^. VersionVersion
, uses ^. SnapshotPackageRevision
, user ^. DepRange))
where
toDepRange (pn, v, rev, range) =
(PackageVersionRev (unValue pn) (toVersionMRev (unValue v) (unValue rev)), unValue range)
getForwardDepsCount :: SnapshotPackageInfo -> ReaderT SqlBackend (RIO env) Int
getForwardDepsCount spi = P.count [DepUser P.==. spiSnapshotPackageId spi]
getReverseDepsCount :: SnapshotPackageInfo -> ReaderT SqlBackend (RIO env) Int
getReverseDepsCount spi =
fromMaybe 0 <$>
selectApplyMaybe unValue
(from $ \(sp `InnerJoin` dep `InnerJoin` curPn) -> do
on (dep ^. DepUses ==. curPn ^. PackageNameId)
on (sp ^. SnapshotPackageId ==. dep ^. DepUser)
where_ $
(curPn ^. PackageNameName ==. val (spiPackageName spi)) &&.
(sp ^. SnapshotPackageSnapshot ==. val (spiSnapshotId spi))
pure countRows)
getDepsCount :: GetStackageDatabase env m => SnapshotPackageInfo -> m (Int, Int)
getDepsCount spi =
run $
(,) <$> getForwardDepsCount spi <*>
getReverseDepsCount spi
getReverseDeps ::
SnapshotPackageInfo
-> Maybe Int -- ^ Optionally limit number of dependencies
-> ReaderT SqlBackend (RIO env) [(PackageVersionRev, VersionRangeP)]
getReverseDeps spi mlimit =
fmap toDepRange <$>
select
(from $ \(sp `InnerJoin` dep `InnerJoin` pn `InnerJoin` v `InnerJoin` curPn) -> do
on (dep ^. DepUses ==. curPn ^. PackageNameId)
on (sp ^. SnapshotPackageVersion ==. v ^. VersionId)
on (sp ^. SnapshotPackagePackageName ==. pn ^. PackageNameId)
on (sp ^. SnapshotPackageId ==. dep ^. DepUser)
where_ $
(curPn ^. PackageNameName ==. val (spiPackageName spi)) &&.
(sp ^. SnapshotPackageSnapshot ==. val (spiSnapshotId spi))
orderBy [asc (pn ^. PackageNameName)]
maybe (pure ()) (limit . fromIntegral) mlimit
pure
( pn ^. PackageNameName
, v ^. VersionVersion
, sp ^. SnapshotPackageRevision
, dep ^. DepRange))
where
toDepRange (pn, v, rev, range) =
(PackageVersionRev (unValue pn) (toVersionMRev (unValue v) (unValue rev)), unValue range)
----- Deprecated
getDeprecatedQuery :: PackageNameP -> ReaderT SqlBackend (RIO env) (Bool, [PackageNameP])
getDeprecatedQuery pname =
lookupPackageNameId pname >>= \case
Just pnid ->
P.getBy (UniqueDeprecated pnid) >>= \case
Just (Entity _ (Deprecated _ inFavourOfIds)) -> do
names <- mapM lookupPackageNameById inFavourOfIds
return (True, catMaybes names)
Nothing -> return defRes
Nothing -> return defRes
where
defRes = (False, [])
-- | See if a package is deprecated on hackage and in favour of which packages.
getDeprecated :: GetStackageDatabase env m => PackageNameP -> m (Bool, [PackageNameP])
getDeprecated pname = run $ getDeprecatedQuery pname
--------------------------
-- Cron related queries --
--------------------------
snapshotMarkUpdated :: GetStackageDatabase env m => SnapshotId -> UTCTime -> m ()
snapshotMarkUpdated snapKey updatedOn =
run $ P.update snapKey [SnapshotUpdatedOn P.=. Just updatedOn]
insertSnapshotName :: GetStackageDatabase env m => SnapshotId -> SnapName -> m ()
insertSnapshotName snapKey snapName =
run $
case snapName of
SNLts major minor -> void $ insertUnique $ Lts snapKey major minor
SNNightly day -> void $ insertUnique $ Nightly snapKey day
-- | Add a map of all dependencies for the package together with version bounds. Returns a set of
-- all dependencies that could not be found in pantry
insertDeps ::
HasLogFunc env
=> PackageIdentifierP -- ^ For error reporting only.
-> SnapshotPackageId
-> Map PackageNameP VersionRangeP
-> ReaderT SqlBackend (RIO env) (Set PackageNameP)
insertDeps pid snapshotPackageId dependencies =
Map.keysSet <$> Map.traverseMaybeWithKey insertDep dependencies
where
insertDep dep range =
lookupPackageNameId dep >>= \case
Just packageNameId -> do
void $ insertBy (Dep snapshotPackageId packageNameId range)
return Nothing
Nothing -> do
lift $
logWarn $
"Couldn't find a dependency of " <> display pid <> " in Pantry with name: " <>
display dep
return $ Just dep
data CabalFileIds = CabalFileIds
{ cfiPackageNameId :: !PackageNameId
, cfiVersionId :: !VersionId
, cfiCabalBlobId :: !(Maybe BlobId)
, cfiModuleNameIds :: ![ModuleNameId]
}
getCabalFileIds ::
HasLogFunc env
=> BlobId
-> GenericPackageDescription
-> ReaderT SqlBackend (RIO env) CabalFileIds
getCabalFileIds cabalBlobId gpd = do
let PackageIdentifier name ver = package (packageDescription gpd)
packageNameId <- getPackageNameId name
versionId <- getVersionId ver
moduleNameIds <- mapM insertModuleSafe (extractModuleNames gpd)
pure
CabalFileIds
{ cfiPackageNameId = packageNameId
, cfiVersionId = versionId
, cfiCabalBlobId = Just cabalBlobId
, cfiModuleNameIds = moduleNameIds
}
addCabalFile ::
HasLogFunc env
=> PackageIdentifierP
-> ByteString
-> ReaderT SqlBackend (RIO env) (Maybe (GenericPackageDescription, CabalFileIds))
addCabalFile pid cabalBlob = do
mgpd <- lift $ parseCabalBlobMaybe pid cabalBlob
forM mgpd $ \gpd -> do
(cabalBlobId, _) <- storeBlob cabalBlob
cabalIds <- getCabalFileIds cabalBlobId gpd
pure (gpd, cabalIds)
getPackageIds ::
GenericPackageDescription
-> Either CabalFileIds (Entity Tree)
-> ReaderT SqlBackend (RIO env) (CabalFileIds, Maybe (TreeId, BlobId))
getPackageIds gpd =
\case
Left cabalFileIds -> pure (cabalFileIds, Nothing)
Right (Entity treeId tree)
-- -- TODO: Remove Maybe from cfiCabalBlobId and
-- -- Generate cabal file from package.yaml:
-- case treeCabal tree of
-- Just cabalBlobId -> pure cabalBlobId
-- Nothing -> do
-- let rawMetaData = RawPackageMetadata {
-- rpmName = Just pname
-- , rpmVersion = Just pver
-- , rpmTreeKey = treeKey tree
-- }
-- rpli = ... get
-- generateHPack (RPLIArchive / RPLIRepo ..) treeId treeVersion tree
-- ...
-> do
moduleNameIds <- mapM insertModuleSafe (extractModuleNames gpd)
let cabalFileIds =
CabalFileIds
{ cfiPackageNameId = treeName tree
, cfiVersionId = treeVersion tree
, cfiCabalBlobId = treeCabal tree
, cfiModuleNameIds = moduleNameIds
}
pure (cabalFileIds, Just (treeId, treeKey tree))
-- TODO: Optimize, whenever package is already in one snapshot only create the modules and new
-- SnapshotPackage
addSnapshotPackage ::
HasLogFunc env
=> SnapshotId
-> CompilerP
-> Origin
-> Either CabalFileIds (Entity Tree)
-> Maybe HackageCabalId
-> Bool
-> Map FlagNameP Bool
-> PackageIdentifierP
-> GenericPackageDescription
-> ReaderT SqlBackend (RIO env) ()
addSnapshotPackage snapshotId compiler origin eCabalTree mHackageCabalId isHidden flags pid gpd = do
(CabalFileIds{..}, mTree) <- getPackageIds gpd eCabalTree
let mTreeId = fst <$> mTree
mrevision <- maybe (pure Nothing) getHackageRevision mHackageCabalId
mreadme <- fromMaybe (pure Nothing) $ getContentTreeEntryId <$> mTreeId <*> mreadmeQuery
mchangelog <- fromMaybe (pure Nothing) $ getContentTreeEntryId <$> mTreeId <*> mchangelogQuery
let snapshotPackage =
SnapshotPackage
{ snapshotPackageSnapshot = snapshotId
, snapshotPackagePackageName = cfiPackageNameId
, snapshotPackageVersion = cfiVersionId
, snapshotPackageRevision = mrevision
, snapshotPackageCabal = cfiCabalBlobId
, snapshotPackageTreeBlob = snd <$> mTree
, snapshotPackageOrigin = origin
, snapshotPackageOriginUrl = "" -- TODO: add
, snapshotPackageSynopsis = getSynopsis gpd
, snapshotPackageReadme = mreadme
, snapshotPackageChangelog = mchangelog
, snapshotPackageIsHidden = isHidden
, snapshotPackageFlags = flags
}
checkForDuplicate =
\case
Right key -> pure $ Just key
Left entity
-- Make sure this package still comes from the same place and update
-- all the fields to newest values. Necessary for making sure global
-- hints do not overwrite hackage packages, but still allows for
-- updating package info in case of a forceful update.
| snapshotPackageOrigin (entityVal entity) == origin -> do
P.replace (entityKey entity) snapshotPackage
pure $ Just (entityKey entity)
_ -> pure Nothing
msnapshotPackageId <- checkForDuplicate =<< P.insertBy snapshotPackage
forM_ msnapshotPackageId $ \snapshotPackageId -> do
_ <- insertDeps pid snapshotPackageId (extractDependencies compiler flags gpd)
-- TODO: collect all missing dependencies and make a report
forM_ cfiModuleNameIds $ \modNameId -> do
void $ P.insertBy (SnapshotPackageModule snapshotPackageId modNameId False)
getContentTreeEntryId ::
TreeId
-> (SqlExpr (Value SafeFilePath) -> SqlExpr (Value Bool))
-> ReaderT SqlBackend (RIO env) (Maybe TreeEntryId)
getContentTreeEntryId treeId filePathQuery = do
(mteid, _isMarkdown) <- foldl' preferMarkdown (Nothing, False) <$>
select
(from $ \(te `InnerJoin` p) -> do
on $ te ^. TreeEntryPath ==. p ^. FilePathId
where_ $ (te ^. TreeEntryTree ==. val treeId) &&. filePathQuery (p ^. FilePathPath)
pure (p ^. FilePathPath, te ^. TreeEntryId))
pure mteid
where preferMarkdown (_, False) (Value path, Value teid) = (Just teid, isMarkdownFilePath path)
preferMarkdown pref@(_, True) _ = pref
mchangelogQuery :: Maybe (SqlExpr (Value SafeFilePath) -> SqlExpr (Value Bool))
mchangelogQuery = do
changelog <- mkSafeFilePath "changelog."
changes <- mkSafeFilePath "changes."
pure (\ path -> (path `ilike` val changelog ++. (%)) ||. (path `ilike` val changes ++. (%)))
mreadmeQuery :: Maybe (SqlExpr (Value SafeFilePath) -> SqlExpr (Value Bool))
mreadmeQuery = do
readme <- mkSafeFilePath "readme."
pure (\ path -> path `ilike` val readme ++. (%))
getHackageRevision :: MonadIO m => HackageCabalId -> ReaderT SqlBackend m (Maybe Revision)
getHackageRevision hcid =
selectApplyMaybe unValue $
from $ \hc -> do
where_ (hc ^. HackageCabalId ==. val hcid)
pure (hc ^. HackageCabalRevision)
lookupPackageNameId :: PackageNameP -> ReaderT SqlBackend (RIO env) (Maybe PackageNameId)
lookupPackageNameId pname = fmap entityKey <$> getBy (UniquePackageName pname)
lookupPackageNameById :: PackageNameId -> ReaderT SqlBackend (RIO env) (Maybe PackageNameP)
lookupPackageNameById pnid = fmap PackageNameP <$> getPackageNameById pnid
-- | Add or updates package deprecation and its "in favor" list. Returns the Id if package
-- was found in pantry.
addDeprecated :: HasLogFunc env => Deprecation -> ReaderT SqlBackend (RIO env) ()
addDeprecated (Deprecation pname inFavourOfNameSet) = do
mPackageNameId <- lookupPackageNameId pname
case mPackageNameId of
Just packageNameId -> do
let inFavourOfNames = Set.toList inFavourOfNameSet
inFavourOfAllIds <- mapM lookupPackageNameId inFavourOfNames
let (badNames, inFavourOfIds) =
partitionEithers $
L.zipWith
(\name mid -> maybe (Left name) Right mid)
inFavourOfNames
inFavourOfAllIds
void $
upsertBy
(UniqueDeprecated packageNameId)
(Deprecated packageNameId inFavourOfIds)
[DeprecatedInFavourOf P.=. inFavourOfIds]
unless (null badNames) $
lift $
logError $
mconcat
("Couldn't find in Pantry names of packages in deprecation list: " :
L.intersperse ", " (map display badNames))
Nothing ->
lift $
logError $
"Package name: " <> display pname <> " from deprecation list was not found in Pantry."
-- | In a single transaction clear out all deprecatons and add the new ones.
setDeprecations :: GetStackageDatabase env m => [Deprecation] -> m ()
setDeprecations deprecations = run $ do
delete $ from $ \(_deprecation :: SqlExpr (Entity Deprecated)) -> pure ()
mapM_ addDeprecated deprecations
getHackageCabalByRev0 ::
PackageIdentifierP
-> ReaderT SqlBackend (RIO env) (Maybe (HackageCabalId, BlobId, Maybe TreeId))
getHackageCabalByRev0 pid = getHackageCabalByRev pid Nothing
getHackageCabalByRev ::
PackageIdentifierP
-> Maybe Revision
-> ReaderT SqlBackend (RIO env) (Maybe (HackageCabalId, BlobId, Maybe TreeId))
getHackageCabalByRev (PackageIdentifierP pname ver) mrev =
selectApplyMaybe (\(Value hcid, Value bid, Value mtid) -> (hcid, bid, mtid)) $
from $ \(hc `InnerJoin` pn `InnerJoin` v) -> do
on (hc ^. HackageCabalVersion ==. v ^. VersionId)
on (hc ^. HackageCabalName ==. pn ^. PackageNameId)
where_
((pn ^. PackageNameName ==. val pname) &&. (v ^. VersionVersion ==. val ver) &&.
(hc ^. HackageCabalRevision ==. val (fromMaybe (Revision 0) mrev)))
return (hc ^. HackageCabalId, hc ^. HackageCabalCabal, hc ^. HackageCabalTree)
-- | This query will return `Nothing` if the tarball for the hackage cabal file hasn't been loaded
-- yet.
getHackageCabalByKey ::
PackageIdentifierP
-> BlobKey
-> ReaderT SqlBackend (RIO env) (Maybe (HackageCabalId, Maybe TreeId))
getHackageCabalByKey (PackageIdentifierP pname ver) (BlobKey sha size) =
selectApplyMaybe (\(Value hcid, Value mtid) -> (hcid, mtid)) $
from $ \(hc `InnerJoin` pn `InnerJoin` v `InnerJoin` b) -> do
on (hc ^. HackageCabalCabal ==. b ^. BlobId)
on (hc ^. HackageCabalVersion ==. v ^. VersionId)
on (hc ^. HackageCabalName ==. pn ^. PackageNameId)
where_
((pn ^. PackageNameName ==. val pname) &&. (v ^. VersionVersion ==. val ver) &&.
(b ^. BlobSha ==. val sha) &&.
(b ^. BlobSize ==. val size))
return (hc ^. HackageCabalId, hc ^. HackageCabalTree)
getSnapshotPackageId ::
SnapshotId
-> PackageIdentifierP
-> ReaderT SqlBackend (RIO env) (Maybe SnapshotPackageId)
getSnapshotPackageId snapshotId (PackageIdentifierP pname ver) =
selectApplyMaybe unValue $
from $ \(sp `InnerJoin` pn `InnerJoin` v) -> do
on (sp ^. SnapshotPackageVersion ==. v ^. VersionId)
on (sp ^. SnapshotPackagePackageName ==. pn ^. PackageNameId)
where_
((sp ^. SnapshotPackageSnapshot ==. val snapshotId) &&.
(pn ^. PackageNameName ==. val pname) &&.
(v ^. VersionVersion ==. val ver))
return (sp ^. SnapshotPackageId)
getSnapshotPackageCabalBlob ::
GetStackageDatabase env m => SnapshotId -> PackageNameP -> m (Maybe ByteString)
getSnapshotPackageCabalBlob snapshotId pname =
run $ selectApplyMaybe unValue $
from $ \(blob `InnerJoin` sp `InnerJoin` pn) -> do
on (sp ^. SnapshotPackagePackageName ==. pn ^. PackageNameId)
on (just (blob ^. BlobId) ==. sp ^. SnapshotPackageCabal)
where_
((sp ^. SnapshotPackageSnapshot ==. val snapshotId) &&.
(pn ^. PackageNameName ==. val pname))
return (blob ^. BlobContents)
-- | Idempotent and thread safe way of adding a new module.
insertModuleSafe :: ModuleNameP -> ReaderT SqlBackend (RIO env) ModuleNameId
insertModuleSafe modName = do
rawExecute
"INSERT INTO module_name(name) VALUES (?) ON CONFLICT DO NOTHING"
[toPersistValue modName]
mModId <-
select $
from $ \m -> do
where_ (m ^. ModuleNameName ==. val modName)
return (m ^. ModuleNameId)
case mModId of
[Value modId] -> return modId
_ -> error $ "Module name: " ++ show modName ++ " should have been inserted by now"
markModuleHasDocs ::
SnapshotId
-> PackageIdentifierP
-> Maybe SnapshotPackageId
-- ^ If we know ahead of time the SnapshotPackageId it will speed things up, since we don't have
-- to look it up in the database.
-> ModuleNameP
-> ReaderT SqlBackend (RIO env) (Maybe SnapshotPackageId)
markModuleHasDocs snapshotId pid mSnapshotPackageId modName =
maybe (getSnapshotPackageId snapshotId pid) (pure . Just) mSnapshotPackageId >>= \case
Just snapshotPackageId -> do
rawExecute
"UPDATE snapshot_package_module \
\SET has_docs = true \
\FROM module_name \
\WHERE module_name.id = snapshot_package_module.module \
\AND module_name.name = ? \
\AND snapshot_package_module.snapshot_package = ?"
[toPersistValue modName, toPersistValue snapshotPackageId]
return $ Just snapshotPackageId
Nothing -> return Nothing
-- | We can either check or insert hoogle db for current hoogle version for current
-- snapshot. Returns True if current hoogle version was not in the database.
checkInsertSnapshotHoogleDb :: Bool -> SnapshotId -> RIO StackageCron Bool
checkInsertSnapshotHoogleDb shouldInsert snapshotId = do
hoogleVersionId <- scHoogleVersionId <$> ask
let sh = SnapshotHoogleDb snapshotId hoogleVersionId
run $
if shouldInsert
then do
mhver <-
(fmap unValue . listToMaybe) <$>
select
(from
(\v -> do
where_ $ v ^. VersionId ==. val hoogleVersionId
pure (v ^. VersionVersion)))
forM_ mhver $ \hver ->
lift $
logInfo $
"Marking hoogle database for version " <> display hver <> " as available."
isJust <$> P.insertUniqueEntity sh
else isJust <$> P.checkUnique sh
| fpco/stackage-server | src/Stackage/Database/Query.hs | mit | 49,026 | 0 | 24 | 14,874 | 12,709 | 6,466 | 6,243 | 958 | 4 |
Right
(fromList
[((νx. x) + (μy. (νx. x) + y),fromList [
(κ₁ 〈〉,κ₁ 〈〉),
(κ₂ (α (κ₁ 〈〉)),κ₂ (α (κ₁ 〈〉)))
]
),
(μy. (νx. x) + y,fromList [
(α (κ₁ 〈〉),α (κ₁ 〈〉)),
(α (κ₂ (α (κ₁ 〈〉))),α (κ₂ (α (κ₁ 〈〉))))
]
),
((μy. (νx. x) + y) × (νy. (μy. (νx. x) + y) × y),fromList [
(o1Out,o2Out),
(o1Out,o3Out)
]
),
(νx. x,fromList [
(〈〉,〈〉)
]
),
(νy. (μy. (νx. x) + y) × y,fromList [
(o1,o2),
(o1,o3)
]
)
])
Just
(Left
(fromList
[(Sum oneT nat,
fromList
[(inj1 (f),inj1 (f)),
(inj2 (in (inj1 (f))),inj2 (in (inj1 (f))))
]),
(nat,
fromList
[(in (inj1 (f)),in (inj1 (f))),
(in (inj2 (in (inj1 (f)))),in (inj2 (in (inj1 (f)))))
]),
(Prod nat (stream nat),
fromList [(o1Out,o1Out)]),
(Gfp "x" (PT "x"),
fromList [(f,f)]),
(stream nat,
fromList [(o1,o1)])
]
)
)
Left
(fromList
[(nat × (stream nat),
fromList [ξ funky,ξ (π₂ f)]),
stream nat,
fromList [π₂ f,π₂ (ξ (π₂ f))])
]
)
Left
(fromList
[(nat × stream nat,
fromList [ξ (π₂ f)]),
(stream nat,
fromList [π₂ f,π₂ (ξ (π₂ f))])])
| hbasold/Sandbox | OTTTests/Tmp.hs | mit | 1,397 | 43 | 21 | 484 | 852 | 459 | 393 | -1 | -1 |
module Ch20.IdentityFold where
data Identity a =
Identity a
deriving (Eq, Show)
-- Identity foldable less about combining values into a summary value
-- more about consuming the value inside the Identity constructor
instance Foldable Identity where
foldr f x (Identity a) = f a x
foldl f x (Identity a) = f x a
foldMap f (Identity a) = f a
| andrewMacmurray/haskell-book-solutions | src/ch20/IdentityFold.hs | mit | 352 | 0 | 8 | 74 | 102 | 53 | 49 | 8 | 0 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE TupleSections #-}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ec2-networkaclentry-portrange.html
module Stratosphere.ResourceProperties.EC2NetworkAclEntryPortRange where
import Stratosphere.ResourceImports
-- | Full data type definition for EC2NetworkAclEntryPortRange. See
-- 'ec2NetworkAclEntryPortRange' for a more convenient constructor.
data EC2NetworkAclEntryPortRange =
EC2NetworkAclEntryPortRange
{ _eC2NetworkAclEntryPortRangeFrom :: Maybe (Val Integer)
, _eC2NetworkAclEntryPortRangeTo :: Maybe (Val Integer)
} deriving (Show, Eq)
instance ToJSON EC2NetworkAclEntryPortRange where
toJSON EC2NetworkAclEntryPortRange{..} =
object $
catMaybes
[ fmap (("From",) . toJSON) _eC2NetworkAclEntryPortRangeFrom
, fmap (("To",) . toJSON) _eC2NetworkAclEntryPortRangeTo
]
-- | Constructor for 'EC2NetworkAclEntryPortRange' containing required fields
-- as arguments.
ec2NetworkAclEntryPortRange
:: EC2NetworkAclEntryPortRange
ec2NetworkAclEntryPortRange =
EC2NetworkAclEntryPortRange
{ _eC2NetworkAclEntryPortRangeFrom = Nothing
, _eC2NetworkAclEntryPortRangeTo = Nothing
}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ec2-networkaclentry-portrange.html#cfn-ec2-networkaclentry-portrange-from
ecnaeprFrom :: Lens' EC2NetworkAclEntryPortRange (Maybe (Val Integer))
ecnaeprFrom = lens _eC2NetworkAclEntryPortRangeFrom (\s a -> s { _eC2NetworkAclEntryPortRangeFrom = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ec2-networkaclentry-portrange.html#cfn-ec2-networkaclentry-portrange-to
ecnaeprTo :: Lens' EC2NetworkAclEntryPortRange (Maybe (Val Integer))
ecnaeprTo = lens _eC2NetworkAclEntryPortRangeTo (\s a -> s { _eC2NetworkAclEntryPortRangeTo = a })
| frontrowed/stratosphere | library-gen/Stratosphere/ResourceProperties/EC2NetworkAclEntryPortRange.hs | mit | 1,924 | 0 | 12 | 205 | 264 | 151 | 113 | 27 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE TupleSections #-}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-budgets-budget-budgetdata.html
module Stratosphere.ResourceProperties.BudgetsBudgetBudgetData where
import Stratosphere.ResourceImports
import Stratosphere.ResourceProperties.BudgetsBudgetSpend
import Stratosphere.ResourceProperties.BudgetsBudgetCostTypes
import Stratosphere.ResourceProperties.BudgetsBudgetTimePeriod
-- | Full data type definition for BudgetsBudgetBudgetData. See
-- 'budgetsBudgetBudgetData' for a more convenient constructor.
data BudgetsBudgetBudgetData =
BudgetsBudgetBudgetData
{ _budgetsBudgetBudgetDataBudgetLimit :: Maybe BudgetsBudgetSpend
, _budgetsBudgetBudgetDataBudgetName :: Maybe (Val Text)
, _budgetsBudgetBudgetDataBudgetType :: Val Text
, _budgetsBudgetBudgetDataCostFilters :: Maybe Object
, _budgetsBudgetBudgetDataCostTypes :: Maybe BudgetsBudgetCostTypes
, _budgetsBudgetBudgetDataTimePeriod :: Maybe BudgetsBudgetTimePeriod
, _budgetsBudgetBudgetDataTimeUnit :: Val Text
} deriving (Show, Eq)
instance ToJSON BudgetsBudgetBudgetData where
toJSON BudgetsBudgetBudgetData{..} =
object $
catMaybes
[ fmap (("BudgetLimit",) . toJSON) _budgetsBudgetBudgetDataBudgetLimit
, fmap (("BudgetName",) . toJSON) _budgetsBudgetBudgetDataBudgetName
, (Just . ("BudgetType",) . toJSON) _budgetsBudgetBudgetDataBudgetType
, fmap (("CostFilters",) . toJSON) _budgetsBudgetBudgetDataCostFilters
, fmap (("CostTypes",) . toJSON) _budgetsBudgetBudgetDataCostTypes
, fmap (("TimePeriod",) . toJSON) _budgetsBudgetBudgetDataTimePeriod
, (Just . ("TimeUnit",) . toJSON) _budgetsBudgetBudgetDataTimeUnit
]
-- | Constructor for 'BudgetsBudgetBudgetData' containing required fields as
-- arguments.
budgetsBudgetBudgetData
:: Val Text -- ^ 'bbbdBudgetType'
-> Val Text -- ^ 'bbbdTimeUnit'
-> BudgetsBudgetBudgetData
budgetsBudgetBudgetData budgetTypearg timeUnitarg =
BudgetsBudgetBudgetData
{ _budgetsBudgetBudgetDataBudgetLimit = Nothing
, _budgetsBudgetBudgetDataBudgetName = Nothing
, _budgetsBudgetBudgetDataBudgetType = budgetTypearg
, _budgetsBudgetBudgetDataCostFilters = Nothing
, _budgetsBudgetBudgetDataCostTypes = Nothing
, _budgetsBudgetBudgetDataTimePeriod = Nothing
, _budgetsBudgetBudgetDataTimeUnit = timeUnitarg
}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-budgets-budget-budgetdata.html#cfn-budgets-budget-budgetdata-budgetlimit
bbbdBudgetLimit :: Lens' BudgetsBudgetBudgetData (Maybe BudgetsBudgetSpend)
bbbdBudgetLimit = lens _budgetsBudgetBudgetDataBudgetLimit (\s a -> s { _budgetsBudgetBudgetDataBudgetLimit = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-budgets-budget-budgetdata.html#cfn-budgets-budget-budgetdata-budgetname
bbbdBudgetName :: Lens' BudgetsBudgetBudgetData (Maybe (Val Text))
bbbdBudgetName = lens _budgetsBudgetBudgetDataBudgetName (\s a -> s { _budgetsBudgetBudgetDataBudgetName = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-budgets-budget-budgetdata.html#cfn-budgets-budget-budgetdata-budgettype
bbbdBudgetType :: Lens' BudgetsBudgetBudgetData (Val Text)
bbbdBudgetType = lens _budgetsBudgetBudgetDataBudgetType (\s a -> s { _budgetsBudgetBudgetDataBudgetType = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-budgets-budget-budgetdata.html#cfn-budgets-budget-budgetdata-costfilters
bbbdCostFilters :: Lens' BudgetsBudgetBudgetData (Maybe Object)
bbbdCostFilters = lens _budgetsBudgetBudgetDataCostFilters (\s a -> s { _budgetsBudgetBudgetDataCostFilters = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-budgets-budget-budgetdata.html#cfn-budgets-budget-budgetdata-costtypes
bbbdCostTypes :: Lens' BudgetsBudgetBudgetData (Maybe BudgetsBudgetCostTypes)
bbbdCostTypes = lens _budgetsBudgetBudgetDataCostTypes (\s a -> s { _budgetsBudgetBudgetDataCostTypes = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-budgets-budget-budgetdata.html#cfn-budgets-budget-budgetdata-timeperiod
bbbdTimePeriod :: Lens' BudgetsBudgetBudgetData (Maybe BudgetsBudgetTimePeriod)
bbbdTimePeriod = lens _budgetsBudgetBudgetDataTimePeriod (\s a -> s { _budgetsBudgetBudgetDataTimePeriod = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-budgets-budget-budgetdata.html#cfn-budgets-budget-budgetdata-timeunit
bbbdTimeUnit :: Lens' BudgetsBudgetBudgetData (Val Text)
bbbdTimeUnit = lens _budgetsBudgetBudgetDataTimeUnit (\s a -> s { _budgetsBudgetBudgetDataTimeUnit = a })
| frontrowed/stratosphere | library-gen/Stratosphere/ResourceProperties/BudgetsBudgetBudgetData.hs | mit | 4,793 | 0 | 13 | 457 | 690 | 394 | 296 | 57 | 1 |
module Main where
import NLP.GenI.Test
main = runTests
| kowey/GenI | geni-test/MainTest.hs | gpl-2.0 | 57 | 0 | 4 | 10 | 15 | 10 | 5 | 3 | 1 |
{-# LANGUAGE TemplateHaskell #-}
module H7 where
import Test.QuickCheck
import Test.QuickCheck.All
data NestedList a = Elem a | List [NestedList a]
deriving (Show, Eq)
instance Arbitrary a => Arbitrary (NestedList a) where
arbitrary = do
n <- choose (1, 3) :: Gen Int
case n of
1 -> do x <- arbitrary
return (Elem x)
2 -> return (List [])
3 -> do x <- arbitrary
case x of
Elem _ -> return (List [x])
List xs -> do x' <- arbitrary
return (List (x':xs))
flatten :: NestedList a -> [a]
flatten (Elem x) = [x]
flatten (List []) = []
flatten (List (x:xs)) = flatten x ++ flatten (List xs)
prop_flatten xs = flatten (List (map Elem $ flatten xs)) == flatten xs
return []
runTests = $quickCheckAll
main = do
runTests
| hsinhuang/codebase | h99/H7.hs | gpl-2.0 | 888 | 0 | 22 | 311 | 367 | 181 | 186 | 27 | 1 |
{-# OPTIONS_GHC -fno-warn-name-shadowing #-}
module Property
(Property(..),
showPropertyName,
renameProperty,
PropertyType(..),
PropertyContent(..),
Formula(..),
Op(..),
Term(..),
PropResult(..),
resultAnd,
resultOr,
resultNot,
resultsAnd,
resultsOr)
where
import PetriNet
import Structure
data Term a =
Var a
| Const Integer
| Minus (Term a)
| Term a :+: Term a
| Term a :-: Term a
| Term a :*: Term a
deriving (Eq)
instance (Show a) => Show (Term a) where
show (Var x) = show x
show (Const c) = show c
show (Minus t) = "-" ++ show t
show (t :+: u) = "(" ++ show t ++ " + " ++ show u ++ ")"
show (t :-: u) = "(" ++ show t ++ " - " ++ show u ++ ")"
show (t :*: u) = show t ++ " * " ++ show u
instance Functor Term where
fmap f (Var x) = Var (f x)
fmap _ (Const c) = Const c
fmap f (Minus t) = Minus (fmap f t)
fmap f (t :+: u) = fmap f t :+: fmap f u
fmap f (t :-: u) = fmap f t :-: fmap f u
fmap f (t :*: u) = fmap f t :*: fmap f u
data Op = Gt | Ge | Eq | Ne | Le | Lt deriving (Eq)
instance Show Op where
show Gt = ">"
show Ge = "≥"
show Eq = "="
show Ne = "≠"
show Le = "≤"
show Lt = "<"
data Formula a =
FTrue | FFalse
| LinearInequation (Term a) Op (Term a)
| Neg (Formula a)
| Formula a :&: Formula a
| Formula a :|: Formula a
deriving (Eq)
infixr 3 :&:
infixr 2 :|:
instance (Show a) => Show (Formula a) where
show FTrue = "true"
show FFalse = "false"
show (LinearInequation lhs op rhs) =
show lhs ++ " " ++ show op ++ " " ++ show rhs
show (Neg p) = "¬" ++ "(" ++ show p ++ ")"
show (p :&: q) = "(" ++ show p ++ " ∧ " ++ show q ++ ")"
show (p :|: q) = "(" ++ show p ++ " ∨ " ++ show q ++ ")"
instance Functor Formula where
fmap _ FTrue = FTrue
fmap _ FFalse = FFalse
fmap f (LinearInequation lhs op rhs) =
LinearInequation (fmap f lhs) op (fmap f rhs)
fmap f (Neg p) = Neg (fmap f p)
fmap f (p :&: q) = fmap f p :&: fmap f q
fmap f (p :|: q) = fmap f p :|: fmap f q
-- TODO: add functions to transform formula to CNF/DNF
data PropertyType = SafetyType
| LivenessType
| StructuralType
data PropertyContent = Safety (Formula Place)
| Liveness (Formula Transition)
| Structural Structure
showPropertyType :: PropertyContent -> String
showPropertyType (Safety _) = "safety"
showPropertyType (Liveness _) = "liveness"
showPropertyType (Structural _) = "structural"
showPropertyContent :: PropertyContent -> String
showPropertyContent (Safety f) = show f
showPropertyContent (Liveness f) = show f
showPropertyContent (Structural s) = show s
data Property = Property {
pname :: String,
pcont :: PropertyContent
}
instance Show Property where
show p =
showPropertyName p ++
" { " ++ showPropertyContent (pcont p) ++ " }"
renameProperty :: (String -> String) -> Property -> Property
renameProperty f (Property pn (Safety pf)) =
Property pn (Safety (fmap (renamePlace f) pf))
renameProperty f (Property pn (Liveness pf)) =
Property pn (Liveness (fmap (renameTransition f) pf))
renameProperty _ (Property pn (Structural pc)) = Property pn (Structural pc)
showPropertyName :: Property -> String
showPropertyName p = showPropertyType (pcont p) ++ " property" ++
(if null (pname p) then "" else " " ++ show (pname p))
data PropResult = Satisfied | Unsatisfied | Unknown deriving (Show,Read,Eq)
resultAnd :: PropResult -> PropResult -> PropResult
resultAnd Satisfied x = x
resultAnd Unsatisfied _ = Unsatisfied
resultAnd _ Unsatisfied = Unsatisfied
resultAnd Unknown _ = Unknown
resultOr :: PropResult -> PropResult -> PropResult
resultOr Satisfied _ = Satisfied
resultOr _ Satisfied = Satisfied
resultOr Unsatisfied x = x
resultOr Unknown _ = Unknown
resultNot :: PropResult -> PropResult
resultNot Satisfied = Unsatisfied
resultNot Unsatisfied = Unsatisfied
resultNot Unknown = Unknown
resultsAnd :: [PropResult] -> PropResult
resultsAnd = foldr resultAnd Satisfied
resultsOr :: [PropResult] -> PropResult
resultsOr = foldr resultOr Unsatisfied
| cryptica/slapnet | src/Property.hs | gpl-3.0 | 4,478 | 0 | 11 | 1,342 | 1,670 | 860 | 810 | 122 | 2 |
module FPIS where
fib :: Int -> Int
fib 1 = 0
fib 2 = 1
fib n = go 3 0 1
where go n' l m = if n' == n then m' else go (succ n') m m'
where m' = l + m
isSorted :: [a] -> (a -> a -> Bool) -> Bool
isSorted [] _ = True
isSorted (_:[]) _ = True
isSorted (x:y:xs) f = f x y && isSorted (y:xs) f
main :: IO ()
main = do
print $ fib 9
print $ isSorted [1,2,3,5,4] (<)
| Szczyp/fpis | Main.hs | gpl-3.0 | 379 | 0 | 10 | 114 | 248 | 130 | 118 | 15 | 2 |
import System.Console.GetOpt
import Control.Monad
import qualified Data.ByteString as BS
import qualified Data.ByteString.Lazy as BSL
import qualified Data.ByteString.Lazy.UTF8 as BUL
import Hellnet
import Hellnet.Crypto
import Hellnet.ExternalChunks
import Hellnet.Meta
import Hellnet.Network
import Hellnet.Storage
import Hellnet.Utils
import Data.Char
import Data.Maybe
import Data.Digest.SHA512 as SHA512
import Data.List
import System.Directory
import System.Environment
import System.FilePath
import System.IO
import Text.HJson as JSON
import Text.Printf
data Opts = Opts {
fixErrors :: Bool
, checkStore :: Bool
, checkExternal :: Bool
, checkMeta :: Bool
}
defaultOptions = Opts {
fixErrors = False
, checkStore = True
, checkExternal = True
, checkMeta = True
}
disableAllChecks o = o {
checkStore = False
, checkExternal = False
, checkMeta = False
}
options = [
Option ['f'] ["fix-errors"]
(NoArg (\o -> o {fixErrors = True})) "Fix errors",
Option ['s'] ["store"]
(NoArg (\o -> (disableAllChecks o) {checkStore = True})) "Check only store",
Option ['e'] ["external"]
(NoArg (\o -> (disableAllChecks o) {checkExternal = True})) "Check only external chunks",
Option ['m'] ["meta"]
(NoArg (\o -> (disableAllChecks o) {checkMeta = True})) "Check only metas"
]
filterDots = return . filter (not . (`elem` [".", ".."]))
nprintfn s = printf (concat ["\n", s, "\n"])
checkHashesInDirectory opts d = do
putStr $ d ++ "..."
dfp <- (toFullPath $ "store" </> d)
dirConts <- getDirectoryContents dfp >>= filterDots
mapM (checkChunk opts) $ zip (repeat d) dirConts
checkChunk opts (d, c) = do
let hsh = decrockford (d++c)
fp <- toFullPath $ joinPath ["store", d, c]
datM <- getFile fp
case datM of
Just dat -> if BSL.length dat > fromInteger chunkSize then do
nprintfn "Chunk %s%s is too big, removing" d c
when (fixErrors opts) $ removeFile fp
else if Hellnet.Crypto.hash dat == hsh then
return ()
else do
nprintfn "Hash mismatch in %s%s, removing" d c
when (fixErrors opts) $ removeFile fp
Nothing -> return ()
checkMappingsInDirectory opts d = do
putStr $ d ++ "..."
dfp <- (toFullPath $ "chunkmap" </> d)
dirConts <- getDirectoryContents dfp >>= filterDots
mapM (checkMapping opts) $ zip (repeat d) dirConts
checkMapping opts (d, c) = do
let hsh = decrockford (d++c)
fp <- toFullPath $ joinPath ["chunkmap", d, c]
datM <- getFile fp
case datM of
Just dat -> case fmap (fromJson) $ JSON.fromString $ BUL.toString dat of
Right (Just fl) -> do
ch <- getExternalChunk fl
case ch of
Just ch -> do
when (Hellnet.Crypto.hash ch /= hsh) $ do
nprintfn "Hash mismatch in %s%s, removing" d c
when (fixErrors opts) $ removeFile fp
Nothing -> do
nprintfn "Failed to fetch external chunk %s%s, removing" d c
when (fixErrors opts) $ removeFile fp
otherwise -> do
nprintfn "Failed to read external chunk reference %s%s, removing" d c
when (fixErrors opts) $ removeFile fp
Nothing -> return ()
checkMetasInDirectory opts d = do
putStr $ d ++ "..."
dfp <- (toFullPath $ "meta" </> d)
dirConts <- getDirectoryContents dfp >>= filterDots
mapM (checkMetas opts) $ zip (repeat d) dirConts
checkMetas opts (d, c) = do
fp <- toFullPath $ joinPath ["meta", d, c]
datM <- getFile fp
case datM of
Just dat -> do
case Hellnet.Meta.fromByteString dat of
Just meta -> do
result <- verifyMeta meta
case result of
Just True -> return ()
Just False -> do
nprintfn "Meta %s/%s signature check failed, removing" d c
when (fixErrors opts) $ removeFile fp
Nothing -> do
nprintfn "Failed to get key for %s, skipping" d
return ()
Nothing -> do
printf "Couldn't parse meta %s/%s, removing" d c
when (fixErrors opts) $ removeFile fp
Nothing -> return ()
main = do
hSetBuffering stdout NoBuffering
args <- getArgs
let (optz, argz, errs) = getOpt Permute options args
let opts = processOptions defaultOptions optz
when (checkStore opts) $ do
putStr "Checking store: "
storePath <- toFullPath "store"
storeConts <- getDirectoryContents storePath >>= filterDots
mapM (checkHashesInDirectory opts) $ sort storeConts
putStrLn "done"
when (checkExternal opts) $ do
putStr "Checking external chunks: "
chunkMapPath <- toFullPath "chunkmap"
cmConts <- getDirectoryContents chunkMapPath >>= filterDots
mapM (checkMappingsInDirectory opts) $ sort cmConts
putStrLn "done"
when (checkMeta opts) $ do
putStr "Checking meta: "
metasPath <- toFullPath "meta"
mConts <- getDirectoryContents metasPath >>= filterDots
mapM (checkMetasInDirectory opts) $ sort mConts
putStrLn "done" | Voker57/hellnet | hell-fsck.hs | gpl-3.0 | 4,704 | 36 | 26 | 970 | 1,712 | 843 | 869 | 139 | 5 |
module Mudblood.Mapper
( module Mudblood.Mapper.Map
, module Mudblood.Mapper.Walk
) where
import Mudblood.Mapper.Map
import Mudblood.Mapper.Walk
| talanis85/mudblood | src/Mudblood/Mapper.hs | gpl-3.0 | 158 | 0 | 5 | 27 | 34 | 23 | 11 | 5 | 0 |
-- |
-- Module : BlastItWithPiss.CaptchaServer
-- Copyright : 2013 kudah
-- License : GPL-3
-- Maintainer : kudah <kudahkukarek@gmail.com>
-- Stability : experimental
-- Portability : ghc-only
--
-- 1. решаем по капче для каждого агента
-- 2. меряем скорость
-- 3. постоянно обновляем скорость
-- 4. правим лимит капч в складе соответствии со скоростью
-- (solvingSpeed / timeoutSeconds) * numProxies
-- 5. Лимит не может быть < numProxies.
-- 6. Если решенных капч меньше лимита — запускать больше тредов с решением,
-- если больше — ждать.
{-# OPTIONS -Wall #-}
module BlastItWithPiss.CaptchaServer
( primitiveCaptchaServer
, presolvingCaptchaServer
, CaptchaCache
, newCaptchaCache
, CaptchaKeysStore
, newCaptchaKeysStore
) where
import Import
import BlastItWithPiss.Types
import BlastItWithPiss.Captcha
import BlastItWithPiss.Image
import BlastItWithPiss.Blast
import BlastItWithPiss.Board
import BlastItWithPiss.MonadChoice
import Control.Concurrent (forkIO)
import Control.Concurrent.Lifted
import Control.Concurrent.STM
import Control.Concurrent.STM.TLQueue
import Control.Monad.Trans.Resource
import Control.Monad.Trans.Reader
-- | Old captcha solver with a minor difference: it will use whatever captchas
-- are stored in the 'CaptchaCache' first, before solving anything by itself,
-- so that you can switch freely between presolving and primitive captcha
-- solvers without fear that your previously solved captcha will be wasted.
--
-- WARNING:
-- All the quirks of the old solver are intact, e.g. it won't bypass cloudflare
--
-- NOTE FIXME TODO HACK WARNING: finalization of the old captcha server is done
-- AFTER this captcha server is constructed. If the old captcha server sets
-- startSignal to False, AFTER this server is constructed, then startSignal will
-- never be set to True, and the wipe won't start. Thankly, captcha servers and
-- GtkBlast currently only set startsignal to True; it's set to False only
-- on start and end of the wipe, captcha servers are restarted afterwards.
primitiveCaptchaServer
:: (MonadChoice m, MonadResource m')
=> CaptchaCache m m'
-> TVar Bool
-> IO (CaptchaServer m m')
primitiveCaptchaServer tlq start = do
atomically $ writeTVar start True
return $ CaptchaServer captchaServer
where
captchaServer thread = do
BlastLogData { bBoard } <- ask
fix $ \recurse -> do
mx <- liftIO $ atomically $ Just <$> readTLQueue tlq <|> pure Nothing
case mx of
Just x -> return $ Just x
Nothing -> do
blastLog "Fetching challenge"
mbbytes <- blast $ getNewCaptcha bBoard thread ""
case mbbytes of
Left f -> do
blastLog $ "Got presolved captcha " ++ show f
++ ". Either server said \"OK\" or we're \"VIP\"."
return (Just (CAWR f (\_ -> return ())))
Right (chKey :: CurrentSsachCaptchaType) -> do
blastLog "Downloading captcha"
(bytes, ct) <- blast $ getCaptchaImage chKey
cconf <- blast $ getCaptchaConf chKey
fname <- mkImageFileName ct
blastLog "Got captcha image, sending captcha mvar"
m <- newEmptyMVar
blastCaptcha $
CaptchaRequest
{captchaType = CaptchaPosting
,captchaBytes = bytes
,captchaSend = (putMVar m $!!)
,captchaConf = cconf
,captchaFilename = fname
}
blastLog "blocking on captcha mvar"
answer <- takeMVar m
blastLog $ "got captcha mvar, answer is... " ++ show answer
case answer of
Answer string report -> do
f <- blast $ applyCaptcha chKey string
return $ Just $ CAWR f $ liftIO . report
ReloadCaptcha -> recurse
AbortCaptcha -> return Nothing -- no reason to keep this
data Mean a
= Mean
{getMean :: a
,meanWeight :: a
}
addMean :: (Fractional a, Ord a) => a -> Mean a -> Mean a
addMean x (Mean m n) =
Mean (m + ((x - m) / n')) n'
where
n' = n + 1
type CaptchaCache m m'
= TLQueue (CaptchaAnswerWithReport m m')
{-# INLINABLE newCaptchaCache #-}
newCaptchaCache :: MonadIO m => m (CaptchaCache m0 m1)
newCaptchaCache = liftIO $ atomically newTLQueue
type CaptchaKeysStore m m'
-- yandex captchas timeout within an hour or so.
= TLQueue (IO (Maybe (CaptchaAnswerWithReport m m')))
{-# INLINABLE newCaptchaKeysStore #-}
newCaptchaKeysStore :: MonadIO m => m (CaptchaKeysStore m0 m1)
newCaptchaKeysStore = liftIO $ atomically newTLQueue
-- | Presolves captcha to sustain maximum posting speed.
-- Stores solved captcha in the supplied 'CaptchaCache'.
--
-- WARNING:
-- Solver threads created by the server wont't get killed in the finalizer!
--
-- WARNING:
-- Better not keep it running while nothing happens.
-- (Will spawn more solvers when unneeded?)
-- Recreate captcha server each time you restart wipe.
presolvingCaptchaServer
:: (MonadChoice m, MonadResource m')
=> CaptchaCache m m'
-> CaptchaKeysStore m m'
-- | 'writeLog'
-> (Text -> IO ())
-- | [(board, (activeAgents, getCurrentPostTimeout)]
-> [(Board, TVar Int, STM NominalDiffTime)]
-- | Switch to activate wipe.
--
-- Will flip when CaptchaCache gets filled up with enough captcha to
-- sustain maximum posting speed.
-> TVar Bool
-> IO (CaptchaServer m m', IO (), STM PresolverState)
presolvingCaptchaServer tlq ckstore logIO boardsWithData start = do
aLog "[presolve] Starting presolvingCaptchaServer"
solversOnline <- newTVarIO 0
maxLimit <- newTVarIO $ Mean 0 0
let accessPresolverState = do
presolverStored <- lengthTLQueue tlq
presolverEnRoute <- readTVar solversOnline
presolverMaxLimit <- getMean <$> readTVar maxLimit
presolverKeysIn <- lengthTLQueue ckstore
return PresolverState{..}
deadswitch <- newTVarIO False
tid <- forkIO $ go solversOnline maxLimit accessPresolverState
`finally` atomically (writeTVar deadswitch True)
return
( CaptchaServer $ \mthread -> do
blastLogControl $ \runInIO -> do
-- while we wait for captcha, grab some captcha keys.
-- we flip a switch instead of throwing an exception
-- so that at least one captcha key is acquired every time
liftIO $ bracket (do
dswitch <- newTVarIO False
_ <- forkIO $ runInIO $
(fix $ \recurse -> do
timeToDie <- liftIO $ readTVarIO dswitch
started <- liftIO $ readTVarIO start
if timeToDie && started
then return ()
else do
(do ck <- getCaptchaKey mthread accessPresolverState
liftIO $ atomically $ writeTLQueue ckstore ck
) `catch` (\(e::HttpException) -> do
blastLog $ "[presolve] http exception: " ++ show e
case e of
StatusCodeException Status{statusCode = 503} _ _
-> do blastLog $
"[presolve] 503. Waiting 5 seconds."
liftIO $ threadDelay (5 & millions)
StatusCodeException Status{statusCode = 403} _ _
-> do blastLog $
"[presolve] 403. Waiting 20 seconds."
liftIO $ threadDelay (20 & millions)
_ -> do blastLog $
"[presolve] other. Waiting 2 seconds."
liftIO $ threadDelay (2 & millions)
)
recurse
) `catch` \(e::SomeException) ->
blastLog $ "[presolve] killed. exception was: " ++ show e
return dswitch
)
(\dswitch -> atomically $ writeTVar dswitch True)
(\_ -> atomically $ do
(Just <$> readTLQueue tlq)
<|> (Nothing <$ (check =<< readTVar deadswitch)))
,do killThread tid
aLog "[presolve] shutting DOWN"
, accessPresolverState
)
where
getCaptchaKey
:: (MonadChoice m, MonadResource m')
=> Maybe Int
-> STM PresolverState
-> BlastLog (IO (Maybe (CaptchaAnswerWithReport m m')))
getCaptchaKey mthread accessPresolverState = do
BlastLogData { bBoard, bCaptchaOut } <- ask
blastLog "[presolve] Fetching challenge"
mbbytes <- blast $ getNewCaptcha bBoard mthread ""
case mbbytes of
Left f -> do
blastLog $ "[presolve] Got presolved captcha " ++ show f
++ ". Either server said \"OK\" or we're \"VIP\"."
return $
return $ Just $ CAWR f $ \_ -> return ()
Right (chKey :: CurrentSsachCaptchaType) -> do
blastLog "[presolve] Downloading captcha"
(bytes, ct) <- blast $ getCaptchaImage chKey
cconf <- blast $ getCaptchaConf chKey
fname <- mkImageFileName ct
blastLog "[presolve] Got captcha image"
blastLogControl $ \liftToIO -> return $ liftToIO $ do
m <- newEmptyMVar
liftIO $ bCaptchaOut PresolverCaptcha $
CaptchaRequest
{captchaType = CaptchaPosting
,captchaBytes = bytes
,captchaSend = (putMVar m $!!)
,captchaConf = cconf
,captchaFilename = fname
}
blastLog "[presolve] blocking on captcha mvar"
answer <- takeMVar m
blastLog $
"[presolve] got captcha mvar, answer is... " ++ show answer
case answer of
Answer string report -> do
f <- blast $ applyCaptcha chKey string
return $ Just $ CAWR f $ liftIO . report
ReloadCaptcha ->
liftIO =<< getCaptchaKey mthread accessPresolverState
AbortCaptcha ->
return Nothing
-- | num agents + average post timeout
getBoardData :: STM (Int, NominalDiffTime)
getBoardData = second getMean <$> foldM aux (0, Mean 0 0) boardsWithData
where
aux :: (Int, Mean NominalDiffTime)
-> (Board, TVar Int, STM NominalDiffTime)
-> STM (Int, Mean NominalDiffTime)
aux (!agents, !ptimeout) (_, active, getPostTimeout) = do
a <- readTVar active
t <- getPostTimeout
return
( agents + a
, foldl' (flip addMean) ptimeout (replicate a t) )
go :: TVar Int -> TVar (Mean Rational) -> STM PresolverState -> IO ()
go solversOnline tmaxLimit accessPresolverState =
forever $ do
md <- atomically (do
p@PresolverState{..} <- do
p <- accessPresolverState
(allAgents, _) <- getBoardData
when (allAgents <= 0) retry
if presolverMaxLimit p < fromIntegral allAgents
then do
let nagents = realToFrac allAgents
writeTVar tmaxLimit (Mean nagents 0)
return p{presolverMaxLimit = nagents}
else
return p
let rgot = fromIntegral presolverStored
+ fromIntegral presolverEnRoute
if rgot < presolverMaxLimit
then do
let need = ceiling $ presolverMaxLimit - rgot
return $ Just (need, p)
else do
started <- readTVar start
-- we've reached hard limit, time to start wipe
if not started && presolverEnRoute == 0
then do
writeTVar start True
return Nothing
else retry
)
whenJust md $ \(need, ps) -> do
aLog $
"[presolve] Launching threads to solve " ++ show need
++ " captchas while there are " ++ show (presolverStored ps)
++ " captchas stored and " ++ show (presolverEnRoute ps)
++ " captchas currently in solving."
++ " Current limit is "
++ show (realToFrac (presolverMaxLimit ps) :: Double)
++ ". There are " ++ show (presolverKeysIn ps)
++ " captcha keys left."
replicateM_ need $ forkIO $ bracket_
(atomically $ modifyTVar' solversOnline (\x -> x + 1))
(atomically $ modifyTVar' solversOnline (\x -> x - 1))
$ handle (\(ex::SomeException) ->
aLog $ "[presolve] solver thread died due to uncaught exception: "
++ show ex) $ do
before <- getCurrentTime
-- x <- _retrieveAndSolve_'presolve' accessPresolverState
x <- join $ atomically $ readTLQueue ckstore
after <- getCurrentTime
let timeTook = diffUTCTime after before
-- We don't discard refusals
-- (we should probably sum time for failed
-- captchas with the next succesful captcha)
whenJust x $ atomically . writeTLQueue tlq
atomically $ do
(allAgents, averagePostTimeout) <- getBoardData
modifyTVar' tmaxLimit $ \oldMean ->
let
nagents :: Fractional a => a
nagents = realToFrac allAgents
-- limit weight to 1.5 num agents
lmean =
if meanWeight oldMean >= nagents * 1.5
then oldMean{meanWeight = nagents * 1.5 - 1}
else oldMean
nmean =
let rounds = if averagePostTimeout /= 0
then timeTook / averagePostTimeout
else 0.000001
in addMean
(realToFrac $ rounds * nagents)
lmean
in
if getMean nmean < nagents
then
-- make sure there at least as many captchas as the agents
nmean{getMean = nagents}
else
nmean
{-# INLINE aLog #-}
aLog :: MonadIO m => Text -> m ()
aLog = liftIO . logIO
| exbb2/BlastItWithPiss | src/BlastItWithPiss/CaptchaServer.hs | gpl-3.0 | 15,055 | 0 | 46 | 5,455 | 2,997 | 1,496 | 1,501 | -1 | -1 |
module Test.Parser where
import Text.ParserCombinators.Parsec as P
import Text.ParserCombinators.Parsec.Char as Ch
import Text.ParserCombinators.Parsec.Combinator as Co
import Text.ParserCombinators.Parsec.Prim as Pr
import Text.ParserCombinators.Parsec.Token as T
import Parsecy
import Types
{-
cmdMap = [("comm", pzComm)]
pzComm :: Parser Record
pzComm = do
let c = Types.Comm "Sym" True "Type" "Unit" "Epic" "Name" Nothing Nothing
return $ RecComm c
-}
data Spam = Hammy Int | Jammy String deriving Show
s1 = [Hammy 2, Jammy "hi"]
data Goop = Comm | Return | Gloop Bool deriving Show
g1 = Gloop True
p1 = parse dataParser "" " foo \n bar # fudge guz \nyum"
p2 = do
text <- readFile "/home/mcarter/.sifi/data.txt"
let res = parse dataParser "data.txt" text
print res
-- parse
| blippy/sifi | test/Parsecy.hs | gpl-3.0 | 808 | 0 | 10 | 148 | 167 | 100 | 67 | 17 | 1 |
import SimpleGraphics
tree1 :: Int -> Graphic
tree1 0 = withStroke Green 1 $ line (0,0) (0,3)
tree1 n = withStroke Black (fromIntegral n) (line (0,0) (0,len))
`overGraphic` translate (0,len) (rotate angle subTree)
`overGraphic` translate (0,len) (rotate (-angle) subTree) where
angle = 3 + 6 * fromIntegral n
len = 7 * fromIntegral n
subTree = tree1 (n-1)
tree2 :: Int -> Graphic
tree2 = t1 where
t1 0 = emptyGraphic
t1 n = telo
`overGraphic` (translate (-1,30) . scale 0.7 . rotate 45 $ t1 (n-1))
`overGraphic` (translate (0,33) . scale 0.55 . rotate (-50) $ t2 (n-1))
where
telo = polygon [(0,0),(0,40),(9,45),(10,10)]
t2 0 = emptyGraphic
t2 n = telo
`overGraphic` (translate (0,30) . scale 0.6 . rotate 50 $ t1 (n-1))
`overGraphic` (translate (1,32) . scale 0.7 . rotate (-45) $ t2 (n-1))
where
telo = polygon [(0,0),(0,40),(-9,45),(-10,10)]
kvietok :: Graphic
kvietok = withFill (RGB 255 255 210) (circle 15)
`overGraphic` withFill (RGB 255 200 200) (overGraphics lupene) where
lupene = zipWith translate pozicie . repeat $ circle 20
pozicie =
[ (25,0)
, (7.72542485937369,23.7764129073788)
, (-20.2254248593737,14.6946313073118)
, (-20.2254248593737,-14.6946313073118)
, (7.72542485937368,-23.7764129073788)
]
tree3 :: Int -> Graphic
tree3 (-1) = kvietok
tree3 0 = emptyGraphic
tree3 n = (translate (-1,40) . scale 0.7 . rotate 45 . translate (-10,-10) $ tree3 (n-1))
`overGraphic` (translate (0,40) . scale 0.6 . rotate (-90) $ tree3 (n-2))
`overGraphic` telo
where
telo = polygon [(0,0),(0,40),(7,36),(10,10)]
main :: IO ()
main = do
putStrLn "Writing frac-tree1.svg"
writeImage "frac-tree1.svg" (translate (300,400) . scale (-2) . withStroke Black 1 $ tree1 7)
putStrLn "Writing frac-tree2.svg"
writeImage "frac-tree2.svg" (translate (200,200) . scale (-2) $ tree2 8)
putStrLn "Writing frac-tree3.svg"
writeImage "frac-tree3.svg" (translate (300,450) . scale (-5) . withFill (RGB 70 30 10) $ tree3 11)
| xkollar/handy-haskell | svg-graphic-teach/examples/frac-tree.hs | gpl-3.0 | 2,127 | 0 | 15 | 507 | 1,043 | 560 | 483 | 46 | 3 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE ParallelListComp #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# OPTIONS_HADDOCK prune #-}
{- |
Module : Tct.Method.Poly.NaturalPI
Copyright : (c) Martin Avanzini <martin.avanzini@uibk.ac.at>,
Georg Moser <georg.moser@uibk.ac.at>,
Andreas Schnabl <andreas.schnabl@uibk.ac.at>
License : LGPL (see COPYING)
Maintainer : Andreas Schnabl <andreas.schnabl@uibk.ac.at>
Stability : unstable
Portability : unportable
This module defines the processor for polynomial interpretations
with natural coefficients.
-}
module Tct.Method.Poly.NaturalPI
(
PolynomialOrder (..)
, NaturalPI
, polyProcessor
, poly
, simplePolynomial
, linearPolynomial
, stronglyLinearPolynomial
, simpleMixedPolynomial
, quadraticPolynomial
, customPolynomial
)
where
import Prelude hiding ((&&),not)
import Control.Monad (liftM,foldM)
import qualified Data.Map as Map
import qualified Data.Set as Set
import Data.Typeable
import Text.PrettyPrint.HughesPJ
import Qlogic.Boolean
import Qlogic.Diophantine
import qualified Qlogic.NatSat as N
import qualified Qlogic.SatSolver as SatSolver
import Qlogic.Semiring
import qualified Qlogic.Semiring as SR
import Qlogic.PropositionalFormula
import Qlogic.SatSolver ((:&:)(..), SatSolver (..))
import qualified Qlogic.MemoizedFormula as MFormula
import qualified Termlib.FunctionSymbol as F
import qualified Termlib.Problem as Prob
import qualified Termlib.Rule as R
import qualified Termlib.Trs as Trs
import qualified Termlib.Term as T
import qualified Termlib.Types as Tpe
import Termlib.Types ((:::)(..))
import Termlib.Utils
import qualified Termlib.Variable as V
import qualified Termlib.ArgumentFiltering as AF
import qualified Tct.Method.RuleSelector as RS
import qualified Tct.Certificate as C
import Tct.Encoding.AbstractInterpretation
import Tct.Encoding.Natring ()
import Tct.Encoding.Polynomial as Poly
import Tct.Encoding.UsablePositions hiding (empty, toXml)
import qualified Tct.Encoding.UsableRules as UREnc
import qualified Tct.Encoding.ArgumentFiltering as AFEnc
import Tct.Method.Poly.PolynomialInterpretation
import Tct.Processor (Answer(..))
import Tct.Processor.Args hiding (toXml)
import qualified Tct.Processor.Args as A
import Tct.Processor.Args.Instances
import Tct.Processor.Orderings
import Tct.Utils.PPrint (indent)
import qualified Tct.Utils.Xml as Xml
import qualified Tct.Processor as P
import qualified Tct.Processor.Standard as S
data PolynomialOrder =
PolynomialOrder { ordInter :: PolyInter Int
, param :: PIKind
, uargs :: UsablePositions
, input :: Prob.Problem
, argFilter :: Maybe AF.ArgumentFiltering
, usymbols :: [F.Symbol]
}
data NaturalPI = NaturalPI deriving (Typeable, Show)
instance P.ComplexityProof PolynomialOrder where
pprintProof order _ =
(case knd of
TypeBased {} ->
paragraph "We consider the following typing:"
$+$ text "" $+$ indent (pprint (typing knd, sig)) $+$ text ""
_ -> empty)
$+$ (if uargs order == fullWithSignature sig
then empty
else (paragraph "The following argument positions are considered usable:"
$+$ text "" $+$ indent (pprint (uargs order, sig))) $+$ text "")
$+$ paragraph ("TcT has computed the following " ++ ppknd)
$+$ text ""
$+$ pprint inter
$+$ text ""
$+$ paragraph "This order satisfies the following ordering constraints."
$+$ text ""
$+$ indent (pprintOrientRules inter sig vars rs)
where
ppknd =
case knd of
UnrestrictedPoly {} -> ppshp
ConstructorBased {} -> "constructor-restricted " ++ ppshp
TypeBased {} -> "constructor-restricted typed" ++ ppshp
ppshp =
case shape knd of
SimpleShape s -> show s ++ " polynomial interpretation."
CustomShape {} -> "polynomial interpretation."
inter = ordInter order
prob = input order
knd = param order
sig = Prob.signature prob
vars = Prob.variables prob
rs = [ rl | rl <- Trs.rules $ Prob.allComponents prob
, let rt = T.root $ R.lhs rl
in or [ rt == Right f | f <- us ] ]
us = usymbols order
answer order = CertAnswer $ C.certified (C.unknown, degree order)
toXml order =
Xml.elt "interpretation" [] (toXml ord par ua)
where ord = ordInter order
par = param order
ua = uargs order
instance S.Processor NaturalPI where
name NaturalPI = "poly"
description NaturalPI = [ "This processor orients the problem using polynomial interpretation over natural numbers." ]
type ArgumentsOf NaturalPI = (Arg PolyShape) :+: (Arg Nat) :+: (Arg (Maybe Nat)) :+: (Arg (Maybe Nat)) :+: (Arg Bool) :+: (Arg Bool) :+: (Arg Bool) :+: (Arg PolyShape) :+: (Arg (Maybe Nat))
arguments NaturalPI = opt { A.name = "kind"
, A.description = unwords [ "This argument specifies the shape of the polynomials used in the interpretation."
, "Allowed values are 'stronglylinear', 'linear', 'simple', 'simplemixed', 'quadratic',"
, "and 'upto <nat>'"
, "referring to the respective shapes of the abstract polynomials used." ]
, A.defaultValue = SimpleShape Linear }
:+:
opt { A.name = "bound"
, A.description = unwords [ "This argument specifies an upper-bound on coefficients appearing in the interpretation."
, "Such an upper-bound is necessary as we employ bit-blasting to SAT internally"
, "when searching for compatible matrix interpretations."]
, A.defaultValue = Nat 3 }
:+:
opt { A.name = "bits"
, A.description = unwords [ "This argument plays the same role as 'bound',"
, "but instead of an upper-bound the number of bits is specified."
, "This argument overrides the argument 'bound'."]
, A.defaultValue = Nothing }
:+:
opt { A.name = "cbits"
, A.description = unwords [ "This argument specifies the number of bits used for intermediate results, "
, "as for instance coefficients of matrices obtained by interpreting"
, "left- and right-hand sides."]
, A.defaultValue = Nothing }
:+:
opt { A.name = "uargs"
, A.description = unwords [ "This argument specifies whether usable arguments are computed (if applicable)"
, "in order to relax the monotonicity constraints on the interpretation."]
, A.defaultValue = True }
:+:
opt { A.name = "urules"
, A.description = unwords [ "This argument specifies whether usable rules modulo argument filtering is applied"
, "in order to decrease the number of rules that have to be orient. "]
, A.defaultValue = True }
:+:
opt { A.name = "type-based"
, A.description = unwords [ "If set, type-based constructor restricted interpretations are used for runtime complexity analysis."
, "See flag 'constructor-kind' to specify the interpretation shape of constructor symbols, "
, "and the flag 'degree'." ]
, A.defaultValue = True }
:+:
opt { A.name = "constructor-kind"
, A.description = unwords [ "Specifies the shape of interpretations of constructor symbols."
, "The given shape is automatically restricted so that polynomial bounds can be inferred."
, "This argument is ignored if the flag 'type-based' is not set."]
, A.defaultValue = SimpleShape Linear }
:+:
opt { A.name = "degree"
, A.description = unwords [ "Specifies an induced upper bound for type-based constructor restricted interpretations."
, "This argument is ignored if the flag 'type-based' is not set."]
, A.defaultValue = Nothing }
instanceName _ = "polynomial interpretation" -- TODO: show kind (shape $ S.processorArgs inst) ++
type ProofOf NaturalPI = OrientationProof PolynomialOrder
solve inst prob = orient rs prob (S.processorArgs inst)
where rs = RS.rsSelect (RS.selAllOf RS.selStricts) prob
solvePartial inst rs prob = mkProof `liftM` orient rs prob (S.processorArgs inst)
where
mkProof res@(Order ord@(PolynomialOrder {})) =
P.PartialProof { P.ppInputProblem = prob
, P.ppResult = res
, P.ppRemovableDPs = Trs.toRules $ strictRules inter $ Prob.dpComponents prob
, P.ppRemovableTrs = Trs.toRules $ strictRules inter $ Prob.trsComponents prob }
where inter = ordInter ord
mkProof res =
P.PartialProof { P.ppInputProblem = prob
, P.ppResult = res
, P.ppRemovableDPs = []
, P.ppRemovableTrs = [] }
polyProcessor :: S.StdProcessor NaturalPI
polyProcessor = S.StdProcessor NaturalPI
-- argument accessors
kind :: Domains (S.ArgumentsOf NaturalPI) -> Prob.Problem -> PIKind
kind (shp :+: _ :+: _ :+: _ :+: _ :+: _ :+: True :+: cshp :+: mdeg) prob =
case Prob.startTerms prob of
(Prob.BasicTerms _ cs) ->
let types = Tpe.infer (Prob.signature prob) (Trs.toRules (Prob.allComponents prob))
(constrTypes, defTypes) = Tpe.partition (\f _ -> f `Set.member` cs) types
equivs = Tpe.equivs constrTypes
a1 `equiv` a2 = any (\ es -> a1 `elem` es && a2 `elem` es) equivs
in TypeBased { shape = shp
, shapeConstructors = cshp
, equivType = equiv
, constructorTyping = constrTypes
, definedsTyping = defTypes
, typing = types
, enforcedDegree = mdeg}
_ -> UnrestrictedPoly shp
kind (shp :+: _ :+: _ :+: _ :+: _ :+: _ :+: False :+: _) prob =
case Prob.startTerms prob of
(Prob.BasicTerms _ cs) ->
ConstructorBased { constructors = cs, shape = shp}
_ -> UnrestrictedPoly shp
bound :: Domains (S.ArgumentsOf NaturalPI) -> N.Size
bound (_ :+: Nat bnd :+: mbits :+: _ :+: _) = case mbits of
Just (Nat b) -> N.Bits b
Nothing -> N.Bound bnd
cbits :: Domains (S.ArgumentsOf NaturalPI) -> Maybe N.Size
cbits (_ :+: _ :+: _ :+: b :+: _) = do Nat n <- b
return $ N.Bits n
isUargsOn :: Domains (S.ArgumentsOf NaturalPI) -> Bool
isUargsOn (_ :+: _ :+: _ :+: _ :+: ua :+: _) = ua
isUrulesOn :: Domains (S.ArgumentsOf NaturalPI) -> Bool
isUrulesOn (_ :+: _ :+: _ :+: _ :+: _ :+: ur :+: _) = ur
data PolyDP = PWithDP | PNoDP deriving (Show, Eq)
data Strict = Strict R.Rule deriving (Eq, Ord, Show, Typeable)
instance PropAtom Strict
data ValDeg = DegVal (Tpe.Type String) deriving (Eq, Ord, Show, Typeable)
instance PropAtom ValDeg
orient :: P.SolverM m => P.SelectorExpression -> Prob.Problem -> Domains (S.ArgumentsOf NaturalPI) -> m (S.ProofOf NaturalPI)
orient rs prob args = catchException $ do
case pdp of
PWithDP -> solve initial mkOrder
where
mkOrder (pv :&: af :&: us) =
PolynomialOrder { ordInter = mkInter pv
, param = pk
, uargs = ua
, input = prob
, argFilter = if allowAF then Just af else Nothing
, usymbols = us }
initial = abspi :&: AFEnc.initial sig :&: UREnc.initialUsables prob
PNoDP -> solve abspi mkOrder
where
mkOrder pv =
PolynomialOrder { ordInter = mkInter pv
, param = pk
, uargs = ua
, input = prob
, argFilter = Nothing
, usymbols = Set.toList $ Trs.definedSymbols $ Prob.trsComponents prob }
where
solve :: (SatSolver.Decoder e a, P.SolverM m) => e -> ( e -> PolynomialOrder) -> m (OrientationProof PolynomialOrder)
solve initial mkOrder = do
let pform = do
pform1 <- MFormula.toFormula usableConstraints
pform2 <- runDio orientConstraints
pform3 <- N.toFormula typingConstraints
SatSolver.addFormula (pform1 && pform2 && pform3)
mpi <- P.minisatValue pform initial
return $ case mpi of
Nothing -> Incompatible
Just o -> Order $ mkOrder o
runDio :: (Ord l, SatSolver.Solver s l) => DioFormula l DioVar Int -> SatSolver s l (PropFormula l)
runDio = toFormula (N.bound `liftM` cbits args) (N.bound $ bound args)
mkInter pv = pint {interpretations = Map.map (unEmpty . shallowSimp) $ interpretations pint}
where
pint = fmap (\x -> x n) pv
n = bound args
abspi = abstractInterpretation pk sig :: PolyInter (N.Size -> Int)
orientConstraints =
monotonicity pdp st uaOn absi
&& bigAnd [ usable r --> interpretTerm absi (R.lhs r) .>=. (modify r $ interpretTerm absi (R.rhs r)) | r <- Trs.rules $ trsrules]
&& bigAnd [ interpretTerm absi (R.lhs r) .>=. (modify r $ interpretTerm absi (R.rhs r)) | r <- Trs.rules $ dprules]
&& orientSelected rs
&& filteringConstraints
-- && typingConstraints
where
usable
| allowUR = UREnc.usable prob
| otherwise = const top
strictVar = restrictvar . Strict
modify r (Poly monos) = Poly $ Mono (strictVar r) [] : monos
orientSelected (P.SelectDP r) = strictVar r .>. SR.zero
orientSelected (P.SelectTrs r) = strictVar r .>. SR.zero
orientSelected (P.BigAnd es) = bigAnd [ orientSelected e | e <- es]
orientSelected (P.BigOr es) = bigOr [ orientSelected e | e <- es]
monotonicity PWithDP _ u = safeRedpairConstraints ua u
monotonicity PNoDP Prob.TermAlgebra {} _ = monotoneConstraints
monotonicity PNoDP (Prob.BasicTerms _ _) True = uargMonotoneConstraints ua
monotonicity PNoDP (Prob.BasicTerms _ _) False = monotoneConstraints
filteringConstraints :: (Eq l, Ord l) => DioFormula l DioVar Int
filteringConstraints
| not allowAF = top
| otherwise =
bigAnd [ bigAnd [ c .>. SR.zero --> bigAnd [ atom (AFEnc.InFilter f i) | Poly.Pow (V.Canon i) _ <- powers ]
| Poly.Mono c powers <- monos ]
| (f,Poly.Poly monos) <- Map.toList $ interpretations absi ]
usableConstraints = MFormula.liftSat $ MFormula.toFormula $ UREnc.validUsableRulesEncoding prob isUnfiltered
where
isUnfiltered f i | allowAF = AFEnc.isInFilter f i
| otherwise = top
typingConstraints :: (Eq l, Ord l, Monad s, SatSolver.Solver s l) => N.NatMonad s l (PropFormula l)
typingConstraints =
case pk of
UnrestrictedPoly {} -> top
ConstructorBased {} -> top
TypeBased {} -> maybe top enforceDegree (enforcedDegree pk)
where
enforceDegree :: (Eq l, Ord l, SatSolver.Solver s l) => Nat -> N.NatMonad s l (PropFormula l)
enforceDegree (Nat deg) =
bigAnd [ (liftDio $ i .>. SR.zero) -->
(sumPowers powers `mleq` return (natConst deg))
| (f ::: decl) <- Tpe.decls (definedsTyping pk)
, let Poly monos = typedInterpretation absi (f ::: decl)
, Mono i powers <- monos]
&&
bigAnd [ (liftDio $ i .>. SR.zero) -->
sumPowers powers `mleq` (return (degValOf otype))
| (c ::: decl) <- Tpe.decls (constructorTyping pk)
, let otype = Tpe.outputType decl
, let Poly monos = typedInterpretation absi (c ::: decl)
, Mono i powers <- monos
]
where
ma `mleq` mb = do {a <- ma; b <- mb; b `N.mGeq` a}
degValOf t = N.natAtom (N.Bound $ max 1 deg) (DegVal t)
sumPowers powers = sumM [natConst k `N.mTimesNO` degValOf tv | Pow (_:::tv) k <- powers ]
where
sumM ms = sequence ms >>= foldM (\ n a -> n `N.mAdd` a) (natConst 0)
natConst = N.natToFormula
liftDio :: Ord l => SatSolver.Solver s l => DioFormula l DioVar Int -> N.NatMonad s l (PropFormula l)
liftDio dio = N.liftN (runDio dio)
ua = usableArgsWhereApplicable (pdp == PWithDP) sig st uaOn strat allrules
absi = abstractInterpretation pk sig :: PolyInter (DioPoly DioVar Int)
pdp = if Trs.isEmpty (Prob.strictTrs prob) && Prob.isDPProblem prob
then PWithDP
else PNoDP
allowUR = isUrulesOn args && Prob.isDPProblem prob
allowAF = pdp == PWithDP && allowUR
pk = kind args prob
uaOn = isUargsOn args
sig = Prob.signature prob
st = Prob.startTerms prob
strat = Prob.strategy prob
allrules = Prob.allComponents prob
dprules = Prob.dpComponents prob
trsrules = Prob.trsComponents prob
-- handlefun in the next two functions could be replaced by something else,
-- e.g. a criterion by Friedl
uargMonotoneConstraints :: AbstrOrdSemiring a b => UsablePositions -> PolyInter a -> b
uargMonotoneConstraints uarg i = bigAnd $ Map.mapWithKey handlefun $ interpretations i
where handlefun f p = bigAnd $ map (\n -> getCoeff [Pow (V.Canon n) 1] p .>=. SR.one) $ usablePositions f uarg
monotoneConstraints :: AbstrOrdSemiring a b => PolyInter a -> b
monotoneConstraints i = bigAnd $ Map.mapWithKey handlefun $ interpretations i
where sig = signature i
handlefun f p = bigAnd $ map (\n -> getCoeff [Pow (V.Canon n) 1] p .>=. SR.one) [1..F.arity sig f]
safeRedpairConstraints :: AbstrOrdSemiring a b => UsablePositions -> Bool -> PolyInter a -> b
safeRedpairConstraints uarg uaOn i = bigAnd $ Map.mapWithKey handlefun $ compInterpretations i
where sig = signature i
compInterpretations = Map.filterWithKey isCompound . interpretations
isCompound f _ = F.isCompound sig f
handlefun f p = bigAnd $ map (\n -> getCoeff [Pow (V.Canon n) 1] p .>=. SR.one) $ fposs f
fposs f = if uaOn then usablePositions f uarg else [1..F.arity sig f]
strictRules :: PolyInter Int -> Trs.Trs -> Trs.Trs
strictRules i = Trs.filterRules $ strictRuleConstraints i
-- instance declarations
class PIEntry a
instance PIEntry Int
instance PIEntry (DioPoly DioVar Int)
instance PIEntry (DioFormula l DioVar Int)
instance PIEntry a => PIEntry (Polynomial V.Variable a)
instance (AbstrEq a b, Semiring a, PIEntry a) => AbstrEq (Polynomial V.Variable a) b where
(Poly []) .==. (Poly []) = top
p@(Poly []) .==. q = q .==. p
p@(Poly (Mono _ vs:_)) .==. q@(Poly _) = (getCoeff vs p .==. getCoeff vs q) && (deleteCoeff vs p .==. deleteCoeff vs q)
instance (AbstrOrd a b, Semiring a, PIEntry a) => AbstrOrd (Polynomial V.Variable a) b where
p .<. q = (getCoeff [] p .<. getCoeff [] q) && (deleteCoeff [] p .<=. deleteCoeff [] q)
(Poly []) .<=. (Poly _) = top
p@(Poly (Mono _ vs:_)) .<=. q@(Poly _) = (getCoeff vs p .<=. getCoeff vs q) && (deleteCoeff vs p .<=. deleteCoeff vs q)
instance (Ord l, SatSolver.Solver s l) => MSemiring s l (N.NatFormula l) DioVar Int where
plus = N.mAddNO
prod = N.mTimesNO
zero = N.natToFormula 0
one = N.natToFormula 1
geq = N.mGeq
grt = N.mGrt
equ = N.mEqu
constToFormula = N.natToFormula
formAtom = N.natAtomM . N.Bound
truncFormTo = N.mTruncTo
padFormTo n f = N.padBots (max n l - l) f
where l = length f
instance SatSolver.Decoder (PolyInter (N.Size -> Int)) (N.PLVec DioVar) where
add (N.PLVec (DioVar y) k) i = case cast y of
Nothing -> i
Just x -> i{interpretations = Map.adjust newint fun (interpretations i)}
where newint p = case splitFirstCoeff vs p of
(Nothing, Poly p') -> Poly $ Mono (newval $ const 0) vs:p'
(Just ov, Poly p') -> Poly $ Mono (newval ov) vs:p'
newval old n = old n + (2 ^ ((if r then 1 else N.bits n) - k))
r = restrict x
fun = varfun x
vs = argpos x
-- | This processor implements polynomial interpretations.
polynomialInterpretation :: PolyShape -> S.ProcessorInstance NaturalPI
polynomialInterpretation k = polyProcessor `S.withArgs` (k :+: nat 3 :+: Just (nat 2) :+: Just (nat 3) :+: True :+: True :+: True :+: SimpleShape Linear :+: Nothing)
poly :: S.ProcessorInstance NaturalPI
poly = simplePolynomial
-- | Options for @simple@ polynomial interpretations.
simplePolynomial :: S.ProcessorInstance NaturalPI
simplePolynomial = polynomialInterpretation $ SimpleShape Simple
-- | Options for @linear@ polynomial interpretations.
linearPolynomial :: S.ProcessorInstance NaturalPI
linearPolynomial = polynomialInterpretation $ SimpleShape Linear
-- | Options for @strongly linear@ polynomial interpretations.
stronglyLinearPolynomial :: S.ProcessorInstance NaturalPI
stronglyLinearPolynomial = polynomialInterpretation $ SimpleShape StronglyLinear
-- | Options for @simple mixed@ polynomial interpretations.
simpleMixedPolynomial :: S.ProcessorInstance NaturalPI
simpleMixedPolynomial = polynomialInterpretation $ SimpleShape SimpleMixed
-- | Options for @quadratic mixed@ polynomial interpretations.
quadraticPolynomial :: S.ProcessorInstance NaturalPI
quadraticPolynomial = polynomialInterpretation $ SimpleShape Quadratic
-- | Option for polynomials of custom shape, as defined by the first argument.
-- This function receives a list of variables
-- denoting the @n@ arguments of the interpretation function. The return value of type ['SimpleMonomial']
-- corresponds to the list of monomials of the constructed interpretation function.
-- A polynomial is a list of unique 'SimpleMonomial', where 'SimpleMonomial' are
-- considered equal if the set variables together with powers match.
-- 'SimpleMonomial' can be build using '^^^', 'constant' and 'mono'.
-- For instance, linear interpretations are constructed using the function
-- @
-- \vs -> [constant] ++ [ v^^^1 | v <- vs]
-- @
-- .
customPolynomial :: ([V.Variable] -> [SimpleMonomial]) -> S.ProcessorInstance NaturalPI
customPolynomial mk = polynomialInterpretation $ CustomShape mk
degree :: PolynomialOrder -> C.Complexity
degree order =
case knd of
ConstructorBased cs _ -> C.poly (Just deg)
where deg = max' [ d | (f, d) <- degrees pint, not $ f `Set.member` cs]
UnrestrictedPoly {}
| isStrong && deg <= 1 -> C.poly (Just 1)
| deg <= 1 -> C.expo (Just 1)
| otherwise -> C.expo (Just 2)
where
deg = max' [ d | (_, d) <- degrees pint ]
isStrong = all (all (\ (Mono n _) -> n <= 1)) [ monos | (_,Poly monos) <- inters]
TypeBased {} -> C.poly (Just deg)
where
degValue tpe =
max' [ degMono monomial
| (c ::: decl) <- Tpe.decls (constructorTyping knd)
, Tpe.outputType decl `equiv` tpe
, let Poly monos = typedInterpretation pint (c ::: decl)
, Mono i monomial <- monos, i > 0]
where
degMono monomial
| any ofTpe monomial = 1
| otherwise = sum [ k * degValue tv | Pow (_::: tv) k <- monomial]
ofTpe (Pow (_::: tv) _) = tv `equiv` tpe
equiv = equivType knd
deg =
max' [ sum [ k * degValue tv | Pow (_::: tv) k <- monomial]
| (f ::: decl) <- Tpe.decls (definedsTyping knd)
, let Poly monos = typedInterpretation pint (f ::: decl)
, Mono i monomial <- monos, i > 0]
where
knd = param order
pint = ordInter order
inters = Map.toList $ interpretations $ pint
max' = foldl max 0
| mzini/TcT | source/Tct/Method/Poly/NaturalPI.hs | gpl-3.0 | 26,850 | 0 | 24 | 9,357 | 6,943 | 3,617 | 3,326 | -1 | -1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.CloudResourceManager.Folders.Get
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Retrieves a folder identified by the supplied resource name. Valid
-- folder resource names have the format \`folders\/{folder_id}\` (for
-- example, \`folders\/1234\`). The caller must have
-- \`resourcemanager.folders.get\` permission on the identified folder.
--
-- /See:/ <https://cloud.google.com/resource-manager Cloud Resource Manager API Reference> for @cloudresourcemanager.folders.get@.
module Network.Google.Resource.CloudResourceManager.Folders.Get
(
-- * REST Resource
FoldersGetResource
-- * Creating a Request
, foldersGet
, FoldersGet
-- * Request Lenses
, fgXgafv
, fgUploadProtocol
, fgAccessToken
, fgUploadType
, fgName
, fgCallback
) where
import Network.Google.Prelude
import Network.Google.ResourceManager.Types
-- | A resource alias for @cloudresourcemanager.folders.get@ method which the
-- 'FoldersGet' request conforms to.
type FoldersGetResource =
"v3" :>
Capture "name" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :> Get '[JSON] Folder
-- | Retrieves a folder identified by the supplied resource name. Valid
-- folder resource names have the format \`folders\/{folder_id}\` (for
-- example, \`folders\/1234\`). The caller must have
-- \`resourcemanager.folders.get\` permission on the identified folder.
--
-- /See:/ 'foldersGet' smart constructor.
data FoldersGet =
FoldersGet'
{ _fgXgafv :: !(Maybe Xgafv)
, _fgUploadProtocol :: !(Maybe Text)
, _fgAccessToken :: !(Maybe Text)
, _fgUploadType :: !(Maybe Text)
, _fgName :: !Text
, _fgCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'FoldersGet' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'fgXgafv'
--
-- * 'fgUploadProtocol'
--
-- * 'fgAccessToken'
--
-- * 'fgUploadType'
--
-- * 'fgName'
--
-- * 'fgCallback'
foldersGet
:: Text -- ^ 'fgName'
-> FoldersGet
foldersGet pFgName_ =
FoldersGet'
{ _fgXgafv = Nothing
, _fgUploadProtocol = Nothing
, _fgAccessToken = Nothing
, _fgUploadType = Nothing
, _fgName = pFgName_
, _fgCallback = Nothing
}
-- | V1 error format.
fgXgafv :: Lens' FoldersGet (Maybe Xgafv)
fgXgafv = lens _fgXgafv (\ s a -> s{_fgXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
fgUploadProtocol :: Lens' FoldersGet (Maybe Text)
fgUploadProtocol
= lens _fgUploadProtocol
(\ s a -> s{_fgUploadProtocol = a})
-- | OAuth access token.
fgAccessToken :: Lens' FoldersGet (Maybe Text)
fgAccessToken
= lens _fgAccessToken
(\ s a -> s{_fgAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
fgUploadType :: Lens' FoldersGet (Maybe Text)
fgUploadType
= lens _fgUploadType (\ s a -> s{_fgUploadType = a})
-- | Required. The resource name of the folder to retrieve. Must be of the
-- form \`folders\/{folder_id}\`.
fgName :: Lens' FoldersGet Text
fgName = lens _fgName (\ s a -> s{_fgName = a})
-- | JSONP
fgCallback :: Lens' FoldersGet (Maybe Text)
fgCallback
= lens _fgCallback (\ s a -> s{_fgCallback = a})
instance GoogleRequest FoldersGet where
type Rs FoldersGet = Folder
type Scopes FoldersGet =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/cloud-platform.read-only"]
requestClient FoldersGet'{..}
= go _fgName _fgXgafv _fgUploadProtocol
_fgAccessToken
_fgUploadType
_fgCallback
(Just AltJSON)
resourceManagerService
where go
= buildClient (Proxy :: Proxy FoldersGetResource)
mempty
| brendanhay/gogol | gogol-resourcemanager/gen/Network/Google/Resource/CloudResourceManager/Folders/Get.hs | mpl-2.0 | 4,789 | 0 | 15 | 1,077 | 704 | 414 | 290 | 99 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.TagManager.Accounts.Containers.Folders.Get
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Gets a GTM Folder.
--
-- /See:/ <https://developers.google.com/tag-manager/api/v1/ Tag Manager API Reference> for @tagmanager.accounts.containers.folders.get@.
module Network.Google.Resource.TagManager.Accounts.Containers.Folders.Get
(
-- * REST Resource
AccountsContainersFoldersGetResource
-- * Creating a Request
, accountsContainersFoldersGet
, AccountsContainersFoldersGet
-- * Request Lenses
, acfgContainerId
, acfgFolderId
, acfgAccountId
) where
import Network.Google.Prelude
import Network.Google.TagManager.Types
-- | A resource alias for @tagmanager.accounts.containers.folders.get@ method which the
-- 'AccountsContainersFoldersGet' request conforms to.
type AccountsContainersFoldersGetResource =
"tagmanager" :>
"v1" :>
"accounts" :>
Capture "accountId" Text :>
"containers" :>
Capture "containerId" Text :>
"folders" :>
Capture "folderId" Text :>
QueryParam "alt" AltJSON :> Get '[JSON] Folder
-- | Gets a GTM Folder.
--
-- /See:/ 'accountsContainersFoldersGet' smart constructor.
data AccountsContainersFoldersGet = AccountsContainersFoldersGet'
{ _acfgContainerId :: !Text
, _acfgFolderId :: !Text
, _acfgAccountId :: !Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'AccountsContainersFoldersGet' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'acfgContainerId'
--
-- * 'acfgFolderId'
--
-- * 'acfgAccountId'
accountsContainersFoldersGet
:: Text -- ^ 'acfgContainerId'
-> Text -- ^ 'acfgFolderId'
-> Text -- ^ 'acfgAccountId'
-> AccountsContainersFoldersGet
accountsContainersFoldersGet pAcfgContainerId_ pAcfgFolderId_ pAcfgAccountId_ =
AccountsContainersFoldersGet'
{ _acfgContainerId = pAcfgContainerId_
, _acfgFolderId = pAcfgFolderId_
, _acfgAccountId = pAcfgAccountId_
}
-- | The GTM Container ID.
acfgContainerId :: Lens' AccountsContainersFoldersGet Text
acfgContainerId
= lens _acfgContainerId
(\ s a -> s{_acfgContainerId = a})
-- | The GTM Folder ID.
acfgFolderId :: Lens' AccountsContainersFoldersGet Text
acfgFolderId
= lens _acfgFolderId (\ s a -> s{_acfgFolderId = a})
-- | The GTM Account ID.
acfgAccountId :: Lens' AccountsContainersFoldersGet Text
acfgAccountId
= lens _acfgAccountId
(\ s a -> s{_acfgAccountId = a})
instance GoogleRequest AccountsContainersFoldersGet
where
type Rs AccountsContainersFoldersGet = Folder
type Scopes AccountsContainersFoldersGet =
'["https://www.googleapis.com/auth/tagmanager.edit.containers",
"https://www.googleapis.com/auth/tagmanager.readonly"]
requestClient AccountsContainersFoldersGet'{..}
= go _acfgAccountId _acfgContainerId _acfgFolderId
(Just AltJSON)
tagManagerService
where go
= buildClient
(Proxy :: Proxy AccountsContainersFoldersGetResource)
mempty
| rueshyna/gogol | gogol-tagmanager/gen/Network/Google/Resource/TagManager/Accounts/Containers/Folders/Get.hs | mpl-2.0 | 3,976 | 0 | 16 | 885 | 465 | 278 | 187 | 77 | 1 |
module Main
(main)
where
import Test.Erd.Render (testRender)
import Test.Tasty
import Test.Text.Parsec.Erd.Parser (testEr)
main :: IO ()
main = defaultMain $ testGroup "Erd Tests" tests
tests :: [TestTree]
tests = [testEr,
testRender]
| BurntSushi/erd | test/Spec.hs | unlicense | 291 | 0 | 6 | 87 | 81 | 49 | 32 | 10 | 1 |
import Data.Monoid
mzip :: (Monoid a, Monoid b) => [a] -> [b] -> [(a, b)]
mzip (a:as) (b:bs) = (a, b) : mzip as bs
mzip [] (b:bs) = (mempty, b) : mzip [] bs
mzip (a:as) [] = (a, mempty) : mzip as []
mzip _ _ = []
| seckcoder/lang-learn | haskell/test.hs | unlicense | 236 | 0 | 9 | 73 | 172 | 93 | 79 | 6 | 1 |
-- import Char (toLower)
import Data.Char (toLower)
import Control.Monad (filterM, forM, liftM)
import System.Directory (doesDirectoryExist, getDirectoryContents, Permissions(..), getModificationTime, getPermissions)
-- import System.Time (ClockTime(..)) -- deprecated
import Data.Time.Clock
import System.FilePath (takeExtension, takeFileName, (</>))
import Control.Exception (bracket, handle, SomeException)
import System.IO (IOMode(..), hClose, hFileSize, openFile)
import ControlledVisit
-- p. 230/270
-- Think of filesystem traversal as a fold over the directory hierarchy.
-- Iterate controls our fold.
data Iterate seed = Done { unwrap :: seed } -- cease and return unwrap
| Skip { unwrap :: seed } -- do not recurse
| Continue { unwrap :: seed } -- use "unwrap" as input to the next call of fold function
deriving (Show)
-- alias or the function that we fold with.
type Iterator seed = seed -> Info -> Iterate seed
-- Logically a left fold.
-- The seed for each step is the result of the prior step.
foldTree :: Iterator a -> a -> FilePath -> IO a
foldTree iter initSeed path = do
endSeed <- fold initSeed path
return (unwrap endSeed)
where
-- both walk and fold return a seed wrapped in an Iterate.
fold seed subpath = getUsefulContents subpath >>= walk seed
-- walk is tail recursive (instead of calling forM) so we can stop anytime.
walk seed (name:names) = do
let path' = path </> name
info <- getInfo path'
case iter seed info of
done@(Done _) -> return done
Skip seed' -> walk seed' names
Continue seed'
| isDirectory info -> do
next <- fold seed' path'
case next of
done@(Done _) -> return done
seed'' -> walk (unwrap seed'') names
| otherwise -> walk seed' names
walk seed _ = return (Continue seed)
-- p. 231/271
atMostThreePictures :: Iterator [FilePath]
atMostThreePictures paths info
| length paths == 3
= Done paths
| isDirectory info && takeFileName path == ".svn"
= Skip paths
| extension `elem` [".jpg", ".png"]
= Continue (path : paths)
| otherwise
= Continue paths
where extension = map toLower (takeExtension path)
path = infoPath info
countDirectories count info =
Continue (if isDirectory info
then count + 1
else count)
| haroldcarr/learn-haskell-coq-ml-etc | haskell/book/2009-Real_World_Haskell/FoldDir.hs | unlicense | 2,557 | 0 | 20 | 756 | 629 | 330 | 299 | 48 | 5 |
module Kornel.LineHandler.Clojure
( setup
) where
import Data.Aeson
import qualified Data.Attoparsec.Text as P
import Kornel.Common
import Kornel.LineHandler
import qualified Network.HTTP.Client.TLS as HTTPS
import Network.HTTP.Simple
import Prelude hiding (Handler, handle)
setup :: (Help, HandlerRaw)
setup =
(cmdHelp, ) . onlySimple . pure $ \respond _ request ->
case parseMaybe cmdParser request of
Nothing -> pure ()
Just sexpr ->
asyncWithLog "Clojure" $ eval sexpr >>= mapM_ (respond . Privmsg)
cmdParser :: P.Parser Text
cmdParser =
P.skipSpace *> (P.asciiCI "@clojure" <|> P.asciiCI "@clj") *> skipSpace1 *>
P.takeText
cmdHelp :: Help
cmdHelp = Help [(["clojure", "clj"], "<sexpr>")]
data TryCljResponse = TryCljResponse
{ result :: Maybe Text
, message :: Maybe Text
} deriving (Show, Generic)
instance FromJSON TryCljResponse
eval :: Text -> IO (Maybe Text)
eval sexpr = do
manager <- HTTPS.newTlsManager
let request =
setRequestManager manager .
setRequestQueryString [("expr", Just $ encodeUtf8 sexpr)] $
"http://www.tryclj.com/eval.json"
response <- getResponseBody <$> httpJSON request
return (render <$> response)
where
render :: TryCljResponse -> Text
render r =
flip fromMaybe (message r) $
fromMaybe "Error: got neither a ‘result’ nor a ‘message’." (result r)
| michalrus/kornel | src/Kornel/LineHandler/Clojure.hs | apache-2.0 | 1,458 | 0 | 16 | 348 | 425 | 227 | 198 | -1 | -1 |
module Main where
import CodeJam
import System.Environment
import Data.List
import QR.Beam
readFileToCases:: String -> IO (Int, [String])
readFileToCases x = do
s <- readFile x
let (x:xs) = lines s
caseCount = read x::Int
return (caseCount,xs)
readCaseAndToOutputV2 :: (CodeJam a) => (a -> String) -> (Int, a) -> IO String
readCaseAndToOutputV2 f (x, y) = do
--putStrLn ("Case #" ++ (show x) ++ ": " ++ y)
--putStrLn ("Case #" ++ (show x) ++ ":" ++ (head (getData y)))
putStrLn ("Case #" ++ (show x) ++ ":" ++ (concat (intersperse "," (getData y))))
let ret = f y
putStrLn ("ret:" ++ ret)
return ("Case #" ++ (show x) ++ ": " ++ ret)
-- "resources/codejam.in" "resources/codejam.out"
main = do
(arg1:argRest) <- getArgs
--caseDatas <- readFileToCases arg1
print "hello"
--outputs <- mapM (readCaseAndToOutput getBathroomStalls) caseDatas
--mapM_ putStrLn $ fmap snd caseDatas
-- (datCount, datas) <- readFileToCases arg1
-- let caseDatas = zip [1..] $ ((splitDatas datas getMinChange))
-- outputs <- mapM (readCaseAndToOutputV2 codeJamCalc) caseDatas
-- --mapM_ putStrLn $ fmap outputs
-- writeFile (head argRest) $ unlines outputs
| lihlcnkr/codejam | app/Main.hs | apache-2.0 | 1,185 | 0 | 15 | 229 | 293 | 155 | 138 | 20 | 1 |
{-# LANGUAGE GADTs, TypeFamilies, TypeOperators, FlexibleInstances,
ScopedTypeVariables, FlexibleContexts, MultiParamTypeClasses,
BangPatterns #-}
module Flow.Builder
(
-- * Abstract data flow
Flow, flow
-- * Strategy
, Strategy
, runStrategy
, uniq, implementing, calculate
-- * Kernel binding
, IsReprs(..), IsReprKern(..), IsKernelDef(..)
, kernel, Kernel
, bind, rebind, bindRule, bindNew
, recover, hints
) where
import Control.Monad
import Control.Monad.State.Strict
import qualified Data.HashMap.Strict as HM
import Data.Maybe
import Data.Typeable
import DNA (ProfileHint)
import Flow.Internal
-- | Class for rasoning about lists of flows
class IsFlows fs where
type Pars fs
toList :: fs -> [FlowI]
wilds :: Int -> fs
fromList :: [FlowI] -> fs -- unsafe!
instance IsFlows Z where
type Pars Z = Z
toList Z = []
fromList [] = Z
fromList _ = error "Internal error: fromList expected empty list!"
wilds _ = Z
instance IsFlows fs => IsFlows (Flow a :. fs) where
type Pars (Flow a :. fs) = a :. Pars fs
toList (Flow f :. fs) = f : toList fs
fromList (f:fs) = Flow f :. fromList fs
fromList _ = error "Internal error: fromList expected non-empty list!"
wilds i = wildFlow i :. wilds (i+1)
-- | Support class for allowing to pass lists of flows using curried
-- parameters
class IsFlows (Flows fs) => IsCurriedFlows fs where
type Flows fs
type FlowsRet fs
type FlowsKernFun fs
curryFlow :: (Flows fs -> Flow (FlowsRet fs)) -> fs
uncurryFlow :: fs -> Flows fs -> Flow (FlowsRet fs)
uncurryKernFun :: fs -> FlowsKernFun fs -> Flows fs -> Kernel (FlowsRet fs)
instance IsCurriedFlows (Flow a) where
type Flows (Flow a) = Z
type FlowsRet (Flow a) = a
type FlowsKernFun (Flow a) = Kernel a
curryFlow f = f Z
uncurryFlow fl _ = fl
uncurryKernFun _ kfl _ = kfl
instance IsCurriedFlows fs => IsCurriedFlows (Flow f -> fs) where
type Flows (Flow f -> fs) = Flow f :. Flows fs
type FlowsRet (Flow f -> fs) = FlowsRet fs
type FlowsKernFun (Flow f -> fs) = Flow f -> FlowsKernFun fs
curryFlow f fl = curryFlow (f . (fl :.))
uncurryFlow f (fl :. fls) = uncurryFlow (f fl) fls
uncurryKernFun _ f (fl :. fls) = uncurryKernFun (undefined :: fs) (f fl) fls
-- | Class for reasoning about lists of data representations
class (IsFlows (ReprFlows rs), Pars (ReprFlows rs) ~ ReprTypes rs) => IsReprs rs where
type ReprTypes rs
type ReprFlows rs
toReprsI :: rs -> [ReprI]
instance IsReprs Z where
type ReprTypes Z = Z
type ReprFlows Z = Z
toReprsI _ = []
instance (DataRepr r, IsReprs rs) => IsReprs (r :. rs) where
type ReprTypes (r :. rs) = ReprType r :. ReprTypes rs
type ReprFlows (r :. rs) = Flow (ReprType r) :. ReprFlows rs
toReprsI (r:.rs) = ReprI r : toReprsI rs
-- | Class for reasoning about producing kernels from curried lists of flows
class IsReprs rs => IsReprKern a rs where
type ReprKernFun a rs
curryReprs :: rs -> (ReprFlows rs -> Kernel a) -> ReprKernFun a rs
instance IsReprKern a Z where
type ReprKernFun a Z = Kernel a
curryReprs _ f = f Z
instance (DataRepr r, IsReprKern a rs) => IsReprKern a (r :. rs) where
type ReprKernFun a (r :. rs) = Flow (ReprType r) -> ReprKernFun a rs
curryReprs _ f fl = curryReprs (undefined :: rs) (f . (fl :.))
-- | Functions going from "Flow"s to a "Kernel". Useful for modifing
-- kernel code.
class IsKernelDef kf where
type KernelDefRet kf
mapKernelDef :: (Kernel (KernelDefRet kf) -> Kernel (KernelDefRet kf)) -> kf -> kf
instance IsKernelDef (Kernel r) where
type KernelDefRet (Kernel r) = r
mapKernelDef = id
instance IsKernelDef kf => IsKernelDef (Flow f -> kf) where
type KernelDefRet (Flow f -> kf) = KernelDefRet kf
mapKernelDef f kf = \x -> mapKernelDef f (kf x)
-- | Create a "Flow" function. This can take an arbitrary
-- number of input "Flow"s to produce a new output "Flow". So for
-- example:
--
-- > constantFlow :: Flow X
-- > constantFlow = flow "constant X data"
--
-- is a constant flow, which should correspond to constant
-- data. However we can create a "Flow" function as follows:
--
-- > flowFunction :: Flow X -> Flow Y
-- > flowFunction = flow "perform F to make Y from X"
--
-- The name is an implicit and un-enforced specification that the
-- given functionality is going to get implemented. Using such "flow"
-- functions arbitrary data flow networks can be built, which can be
-- bound to kernels by "Strategy"s.
flow :: IsCurriedFlows fs => String -> fs
flow name = curryFlow (mkFlow name . toList)
-- | Makes a data flow unique. This means that the flow will get a new
-- identity, and anything bound to the old flow will no longer apply
-- to the new flow. No rule will ever mach a unique flow.
uniq :: Flow a -> Strategy (Flow a)
uniq (Flow fi) = Strategy $ state $ \ss ->
(mkFlow (flName fi ++ "." ++ show (ssKernelId ss)) (flDepends fi),
ss {ssKernelId = 1 + ssKernelId ss})
-- | Creates a new kernel using the given data representations for
-- input values. Needs to be bound to input flows.
kernel :: forall r rs. (DataRepr r, IsReprs rs, IsReprKern (ReprType r) rs)
=> String -> rs -> r -> KernelCode -> ReprKernFun (ReprType r) rs
kernel name parReprs retRep code
= curryReprs (undefined :: rs) $ \fs ->
Kernel name [] code (zip (toList fs) (toReprsI parReprs)) retRep
-- | Prepares the given kernel. This means checking its parameters and
-- adding it to the kernel list. However, it will not automatically be
-- added to the current scope.
prepareKernel :: Kernel r -> Flow r -> Strategy KernelBind
prepareKernel (Kernel kname khints kcode pars retRep) (Flow fi) = do
-- Look up dependencies
kis <- mapM (uncurry (prepareDependency kname fi)) $
zip [1..] $ filter (not . isNoReprI . snd) pars
-- Make kernel, add to kernel list
i <- freshKernelId
let typeCheck (ReprI inR) = maybe False (reprCompatible retRep) (cast inR)
kern = KernelBind { kernId = i
, kernFlow = fi
, kernName = kname
, kernRepr = ReprI retRep
, kernDeps = kis
, kernCode = kcode
, kernReprCheck = typeCheck
, kernHints = khints
}
return kern
-- | Prepares a concrete data dependency for a kernel implementing the
-- flow. This means finding the kernel that produces the result,
-- possibly aplying a rule, and finally doing a type-check to ensure
-- that data representations match.
prepareDependency :: String -> FlowI -> Int -> (FlowI, ReprI)
-> Strategy KernelDep
prepareDependency kname fi parn (p, prep) = do
-- Parameter flows must all be in the dependency tree of the flow
-- to calculate. Yes, this means that theoretically flows are
-- allowed to depend on very early flows. This is most likely not
-- a good idea though.
let hasDepend f
| p == f = True
| otherwise = any hasDepend $ flDepends f
when (not $ hasDepend fi) $
fail $ "Parameter " ++ show p ++ " not a dependency of " ++ show fi ++ "!"
-- Look up latest kernel ID
ss <- Strategy get
let check kern
| kernReprCheck kern prep = return $ KernelDep (kernId kern) prep
| otherwise = fail $ concat
[ "Data representation mismatch when binding kernel "
, kname, " to implement "
, flName fi, ": Expected ", show prep, " for parameter "
, show parn, ", but kernel ", kernName kern, " produced "
, show (kernRepr kern), "!"
]
case HM.lookup p (ssMap ss) of
Just sme -> check sme
Nothing -> do
-- Not defined yet? Attempt to match a rule to produce it
m_strat <- findRule (Flow p)
case m_strat of
Nothing -> fail $ "When binding kernel " ++ kname ++ " to implement " ++
flName fi ++ ": Could not find a kernel calculating flow " ++
show p ++ "!"
Just strat -> do
-- Execute rule
strat
-- Lookup again. The rule should normaly guarantee that
-- this doesn't fail any more.
ss' <- Strategy get
case HM.lookup p (ssMap ss') of
Just krn -> check krn
Nothing -> fail $ "When binding kernel " ++ kname ++ " to implement " ++
flName fi ++ ": Failed to apply rule to calculate " ++ show p ++ "! " ++
"This should be impossible!"
-- | Bind the given flow to a kernel. For this to succeed, three
-- conditions must be met:
--
-- 1. All input flows of the 'Kernel' must be direct or indirect data
-- dependencies of the given flow. If this flow was bound before, this
-- includes the flow itself (see also 'rebind').
--
-- 2. All input flows have either been bound, or can be bound
-- automatically using rules registered by 'bindRule'.
--
-- 3. The bound kernels produce data representations that match
-- ('reprCompatible') the expected input data representations.
bind :: Flow r -> Kernel r -> Strategy ()
bind fl kfl = do
kern <- prepareKernel kfl fl
addStep $ KernelStep kern
let fi = kernFlow kern
Strategy $ modify $ \ss -> ss{ ssMap = HM.insert fi kern (ssMap ss)}
-- | Add profiling hints to the kernel. This will enable 'DNA'
-- profiling for the kernel in question, resulting in suitable data
-- getting produced for the profiling report.
hints :: IsKernelDef kd => [ProfileHint] -> kd -> kd
hints hs' = mapKernelDef $ \(Kernel nm hs k xs r) ->
Kernel nm (hs ++ hs') k xs r
-- | Rebinds the given flow. This is a special case of 'bind' for
-- kernels that allow data modification - for example to change data
-- representations (say, sorting).
--
-- Note that due to the conditions of 'bind', '@rebind f k@' is
-- actually exactly the same as '@bind f (k f)@'. This function is
-- meant as a short-cut, as well as for making it more obvious when
-- flows get re-bound in a strategy.
rebind :: Flow a -> (Flow a -> Kernel a) -> Strategy ()
rebind fl f = bind fl (f fl)
-- | Recover from crashes while calculating the given flow using fail-out.
-- This works exactly the same as 'bind', with the difference that the
-- 'Kernel' is only going to get called for regions that got lost due
-- to a previous crash. If there is no cash, the given kernel will not
-- get called at all.
--
-- This is only allowed when the flow has already been bound, and the
-- output data representation matches the previous kernel
-- binding. Furthermore, in contrast to 'bind' it makes no sense for
-- the kernel to depend on the recovered 'Flow'.
recover :: Flow r -> Kernel r -> Strategy ()
recover fl@(Flow fi) kfl = do
-- Look up the flow binding, calculating it if required
m_kb <- HM.lookup fi . ssMap <$> Strategy get
when (isNothing m_kb) $ calculate fl
Just kb <- HM.lookup fi . ssMap <$> Strategy get
-- Now prepare recovery kernel
kern <- prepareKernel kfl fl
-- Check data representation
let typeCheck (ReprI r0) (ReprI r1) = maybe False (reprCompatible r0) (cast r1)
when (not $ kernRepr kb `typeCheck` kernRepr kern) $
fail $ "recover: " ++ show kern ++ " cannot recover regions of " ++ show kb ++
" because of data representation mismatch!"
-- Add step and new flow binding
addStep $ RecoverStep kern (kernId kb)
-- | Registers a new rule for automatically binding kernels given a
-- certain data flow pattern. This is used by "calculate" to figure
-- out what to do. Furthermore, "bind" and friends will use them in
-- order to materialise missing data dependencies.
rule :: IsCurriedFlows fs
=> fs -- ^ Abstract data flow to match
-> (Flows fs -> Strategy ()) -- ^ Code for binding the data flow
-> Strategy ()
rule flf strat = do
-- Pass wildcard flows to function to get pattern
let (Flow pat) = uncurryFlow flf (wilds 0)
-- Rule is now to match the given pattern, and if successful
-- execute strategy and check that it actually implements the
-- node.
--
-- TODO: We probably want to make a closure of the binds
stratRule = StratRule $ \fi ->
matchPattern fi pat >>=
return . void . implementing (Flow fi) . strat
Strategy $ modify $ \ss -> ss{ ssRules = stratRule : ssRules ss }
-- | Registers a new "rule" that automatically binds a kernel whenever a "Flow"
-- of the given shape is required as a data dependency. The kernel
-- input types must exactly match the flow inputs for this to work.
bindRule :: forall fs. IsCurriedFlows fs
=> fs
-> FlowsKernFun fs
-> Strategy ()
bindRule flf kern =
rule flf $ \inp ->
bind (uncurryFlow flf inp) (uncurryKernFun (undefined :: fs) kern inp)
-- | Check whether a flow matches a pattern.
matchPattern :: forall fs. IsFlows fs => FlowI -> FlowI -> Maybe fs
matchPattern fi pat
| fi == pat = Just (wilds 0)
| Just i <- flWildcard pat
= Just $ fromList $ set i fi $ toList (wilds 0 :: fs)
| flName fi == flName pat,
length (flDepends fi) == length (flDepends pat),
Just matches <- zipWithM matchPattern (flDepends fi) (flDepends pat)
= mergeMatches matches
| otherwise
= Nothing
where -- Sets n-th element in list. Edward Kmett is probably hating me
-- now.
set :: Int -> b -> [b] -> [b]
set 0 x (_:ys) = x:ys
set i x (y:ys) = y:set (i-1) x ys
set _ _ [] = error "internal error: matchPattern/set used wrong list index!"
-- | Merges data flow pattern match results
mergeMatches :: IsFlows fs => [fs] -> Maybe fs
mergeMatches [] = Just (wilds 0)
mergeMatches [fs] = Just fs
mergeMatches (fs0:fs1:rest) = do
let merge :: FlowI -> FlowI -> Maybe FlowI
merge f0 f1
| Just{} <- flWildcard f0 = Just f1
| Just{} <- flWildcard f1 = Just f0
| f0 == f1 = Just f0
| otherwise = Nothing
fs' <- fromList <$> zipWithM merge (toList fs0) (toList fs1)
mergeMatches (fs':rest)
-- | Calculate a flow. This can only succeed if there is a rule in scope that
-- explains how to do this, see 'bindRule'.
calculate :: Flow a -> Strategy ()
calculate fl = do
m_strat <- findRule fl
case m_strat of
Nothing -> fail $ "calculate: Could not find a rule matching " ++ show fl ++ "!"
Just strat -> strat
-- | Calculate a flow. This can only succeed if there is a rule in scope that
-- explains how to do this.
findRule :: Flow a -> Strategy (Maybe (Strategy ()))
findRule (Flow fi) = do
-- Find a matching rule
rules <- ssRules <$> Strategy get
let apply (StratRule r) = r fi
return $ listToMaybe $ mapMaybe apply rules
-- TODO: Warn about rule overlap?
-- | Documents that the given strategy code is meant to bind the
-- indicated flow. An error will be raised if it fails to do so.
implementing :: Flow a -> Strategy () -> Strategy ()
implementing (Flow fi) strat = do
-- Execute strategy
strat
-- Now verify that given flow was actually implemented
ss <- Strategy get
case HM.lookup fi (ssMap ss) of
Just{} -> return ()
Nothing -> fail $ "Flow " ++ show fi ++ " was not implemented!"
-- | Binds a 'Kernel' to a new unique 'Flow' for the result. This is
-- useful both for input streams (the roots of the data flow graph) as
-- well as output 'Kernel's that do not actually produce any data.
--
-- Note that 'bindNew' is basically just a combination of 'flow',
-- 'uniq' and 'bind'. The only magic bit is that the 'Flow' name gets
-- chosen automatically from the 'Kernel' name.
bindNew :: Kernel r -> Strategy (Flow r)
bindNew kern@(Kernel name _ _ inps _) = do
fl <- uniq (mkFlow (name ++ "-call") (map fst inps))
bind fl kern
return fl
| SKA-ScienceDataProcessor/RC | MS5/dna/flow/Flow/Builder.hs | apache-2.0 | 15,761 | 0 | 27 | 3,927 | 3,993 | 2,035 | 1,958 | 236 | 4 |
{-# LANGUAGE TemplateHaskell, DeriveFunctor #-}
{-| Some common Ganeti types.
This holds types common to both core work, and to htools. Types that
are very core specific (e.g. configuration objects) should go in
'Ganeti.Objects', while types that are specific to htools in-memory
representation should go into 'Ganeti.HTools.Types'.
-}
{-
Copyright (C) 2012, 2013, 2014 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Ganeti.Types
( AllocPolicy(..)
, allocPolicyFromRaw
, allocPolicyToRaw
, InstanceStatus(..)
, instanceStatusFromRaw
, instanceStatusToRaw
, DiskTemplate(..)
, diskTemplateToRaw
, diskTemplateFromRaw
, diskTemplateMovable
, TagKind(..)
, tagKindToRaw
, tagKindFromRaw
, NonNegative
, fromNonNegative
, mkNonNegative
, Positive
, fromPositive
, mkPositive
, Negative
, fromNegative
, mkNegative
, NonEmpty
, fromNonEmpty
, mkNonEmpty
, NonEmptyString
, QueryResultCode
, IPv4Address
, mkIPv4Address
, IPv4Network
, mkIPv4Network
, IPv6Address
, mkIPv6Address
, IPv6Network
, mkIPv6Network
, MigrationMode(..)
, migrationModeToRaw
, VerifyOptionalChecks(..)
, verifyOptionalChecksToRaw
, DdmSimple(..)
, DdmFull(..)
, ddmFullToRaw
, CVErrorCode(..)
, cVErrorCodeToRaw
, Hypervisor(..)
, hypervisorFromRaw
, hypervisorToRaw
, OobCommand(..)
, oobCommandToRaw
, OobStatus(..)
, oobStatusToRaw
, StorageType(..)
, storageTypeToRaw
, EvacMode(..)
, evacModeToRaw
, FileDriver(..)
, fileDriverToRaw
, InstCreateMode(..)
, instCreateModeToRaw
, RebootType(..)
, rebootTypeToRaw
, ExportMode(..)
, exportModeToRaw
, IAllocatorTestDir(..)
, iAllocatorTestDirToRaw
, IAllocatorMode(..)
, iAllocatorModeToRaw
, NICMode(..)
, nICModeToRaw
, JobStatus(..)
, jobStatusToRaw
, jobStatusFromRaw
, FinalizedJobStatus(..)
, finalizedJobStatusToRaw
, JobId
, fromJobId
, makeJobId
, makeJobIdS
, RelativeJobId
, JobIdDep(..)
, JobDependency(..)
, absoluteJobDependency
, getJobIdFromDependency
, OpSubmitPriority(..)
, opSubmitPriorityToRaw
, parseSubmitPriority
, fmtSubmitPriority
, OpStatus(..)
, opStatusToRaw
, opStatusFromRaw
, ELogType(..)
, eLogTypeToRaw
, ReasonElem
, ReasonTrail
, StorageUnit(..)
, StorageUnitRaw(..)
, StorageKey
, addParamsToStorageUnit
, diskTemplateToStorageType
, VType(..)
, vTypeFromRaw
, vTypeToRaw
, NodeRole(..)
, nodeRoleToRaw
, roleDescription
, DiskMode(..)
, diskModeToRaw
, BlockDriver(..)
, blockDriverToRaw
, AdminState(..)
, adminStateFromRaw
, adminStateToRaw
, AdminStateSource(..)
, adminStateSourceFromRaw
, adminStateSourceToRaw
, StorageField(..)
, storageFieldToRaw
, DiskAccessMode(..)
, diskAccessModeToRaw
, LocalDiskStatus(..)
, localDiskStatusFromRaw
, localDiskStatusToRaw
, localDiskStatusName
, ReplaceDisksMode(..)
, replaceDisksModeToRaw
, RpcTimeout(..)
, rpcTimeoutFromRaw -- FIXME: no used anywhere
, rpcTimeoutToRaw
, HotplugTarget(..)
, hotplugTargetToRaw
, HotplugAction(..)
, hotplugActionToRaw
, SshKeyType(..)
, sshKeyTypeToRaw
, Private(..)
, showPrivateJSObject
, Secret(..)
, showSecretJSObject
, revealValInJSObject
, redacted
, HvParams
, OsParams
, OsParamsPrivate
, TimeStampObject(..)
, UuidObject(..)
, ForthcomingObject(..)
, SerialNoObject(..)
, TagsObject(..)
) where
import Control.Monad (liftM)
import Control.Monad.Fail (MonadFail)
import qualified Text.JSON as JSON
import Text.JSON (JSON, readJSON, showJSON)
import Data.Ratio (numerator, denominator)
import System.Time (ClockTime)
import qualified Ganeti.ConstantUtils as ConstantUtils
import Ganeti.JSON (Container, HasStringRepr(..))
import qualified Ganeti.THH as THH
import Ganeti.THH.Field (TagSet)
import Ganeti.Utils
-- * Generic types
-- | Type that holds a non-negative value.
newtype NonNegative a = NonNegative { fromNonNegative :: a }
deriving (Show, Eq, Ord)
-- | Smart constructor for 'NonNegative'.
mkNonNegative :: (MonadFail m, Num a, Ord a, Show a) => a -> m (NonNegative a)
mkNonNegative i | i >= 0 = return (NonNegative i)
| otherwise = fail $ "Invalid value for non-negative type '" ++
show i ++ "'"
instance (JSON.JSON a, Num a, Ord a, Show a) => JSON.JSON (NonNegative a) where
showJSON = JSON.showJSON . fromNonNegative
readJSON v = JSON.readJSON v >>= mkNonNegative
-- | Type that holds a positive value.
newtype Positive a = Positive { fromPositive :: a }
deriving (Show, Eq, Ord)
-- | Smart constructor for 'Positive'.
mkPositive :: (MonadFail m, Num a, Ord a, Show a) => a -> m (Positive a)
mkPositive i | i > 0 = return (Positive i)
| otherwise = fail $ "Invalid value for positive type '" ++
show i ++ "'"
instance (JSON.JSON a, Num a, Ord a, Show a) => JSON.JSON (Positive a) where
showJSON = JSON.showJSON . fromPositive
readJSON v = JSON.readJSON v >>= mkPositive
-- | Type that holds a negative value.
newtype Negative a = Negative { fromNegative :: a }
deriving (Show, Eq, Ord)
-- | Smart constructor for 'Negative'.
mkNegative :: (MonadFail m, Num a, Ord a, Show a) => a -> m (Negative a)
mkNegative i | i < 0 = return (Negative i)
| otherwise = fail $ "Invalid value for negative type '" ++
show i ++ "'"
instance (JSON.JSON a, Num a, Ord a, Show a) => JSON.JSON (Negative a) where
showJSON = JSON.showJSON . fromNegative
readJSON v = JSON.readJSON v >>= mkNegative
-- | Type that holds a non-null list.
newtype NonEmpty a = NonEmpty { fromNonEmpty :: [a] }
deriving (Show, Eq, Ord)
-- | Smart constructor for 'NonEmpty'.
mkNonEmpty :: (MonadFail m) => [a] -> m (NonEmpty a)
mkNonEmpty [] = fail "Received empty value for non-empty list"
mkNonEmpty xs = return (NonEmpty xs)
instance (JSON.JSON a) => JSON.JSON (NonEmpty a) where
showJSON = JSON.showJSON . fromNonEmpty
readJSON v = JSON.readJSON v >>= mkNonEmpty
-- | A simple type alias for non-empty strings.
type NonEmptyString = NonEmpty Char
type QueryResultCode = Int
newtype IPv4Address = IPv4Address { fromIPv4Address :: String }
deriving (Show, Eq, Ord)
-- FIXME: this should check that 'address' is a valid ip
mkIPv4Address :: Monad m => String -> m IPv4Address
mkIPv4Address address =
return IPv4Address { fromIPv4Address = address }
instance JSON.JSON IPv4Address where
showJSON = JSON.showJSON . fromIPv4Address
readJSON v = JSON.readJSON v >>= mkIPv4Address
newtype IPv4Network = IPv4Network { fromIPv4Network :: String }
deriving (Show, Eq, Ord)
-- FIXME: this should check that 'address' is a valid ip
mkIPv4Network :: Monad m => String -> m IPv4Network
mkIPv4Network address =
return IPv4Network { fromIPv4Network = address }
instance JSON.JSON IPv4Network where
showJSON = JSON.showJSON . fromIPv4Network
readJSON v = JSON.readJSON v >>= mkIPv4Network
newtype IPv6Address = IPv6Address { fromIPv6Address :: String }
deriving (Show, Eq, Ord)
-- FIXME: this should check that 'address' is a valid ip
mkIPv6Address :: Monad m => String -> m IPv6Address
mkIPv6Address address =
return IPv6Address { fromIPv6Address = address }
instance JSON.JSON IPv6Address where
showJSON = JSON.showJSON . fromIPv6Address
readJSON v = JSON.readJSON v >>= mkIPv6Address
newtype IPv6Network = IPv6Network { fromIPv6Network :: String }
deriving (Show, Eq, Ord)
-- FIXME: this should check that 'address' is a valid ip
mkIPv6Network :: Monad m => String -> m IPv6Network
mkIPv6Network address =
return IPv6Network { fromIPv6Network = address }
instance JSON.JSON IPv6Network where
showJSON = JSON.showJSON . fromIPv6Network
readJSON v = JSON.readJSON v >>= mkIPv6Network
-- * Ganeti types
-- | Instance disk template type. The disk template is a name for the
-- constructor of the disk configuration 'DiskLogicalId' used for
-- serialization, configuration values, etc.
$(THH.declareLADT ''String "DiskTemplate"
[ ("DTDiskless", "diskless")
, ("DTFile", "file")
, ("DTSharedFile", "sharedfile")
, ("DTPlain", "plain")
, ("DTBlock", "blockdev")
, ("DTDrbd8", "drbd")
, ("DTRbd", "rbd")
, ("DTExt", "ext")
, ("DTGluster", "gluster")
])
$(THH.makeJSONInstance ''DiskTemplate)
instance THH.PyValue DiskTemplate where
showValue = show . diskTemplateToRaw
instance HasStringRepr DiskTemplate where
fromStringRepr = diskTemplateFromRaw
toStringRepr = diskTemplateToRaw
-- | Predicate on disk templates indicating if instances based on this
-- disk template can freely be moved (to any node in the node group).
diskTemplateMovable :: DiskTemplate -> Bool
-- Note: we deliberately do not use wildcard pattern to force an
-- update of this function whenever a new disk template is added.
diskTemplateMovable DTDiskless = True
diskTemplateMovable DTFile = False
diskTemplateMovable DTSharedFile = True
diskTemplateMovable DTPlain = False
diskTemplateMovable DTBlock = False
diskTemplateMovable DTDrbd8 = False
diskTemplateMovable DTRbd = True
diskTemplateMovable DTExt = True
diskTemplateMovable DTGluster = True
-- | Data type representing what items the tag operations apply to.
$(THH.declareLADT ''String "TagKind"
[ ("TagKindInstance", "instance")
, ("TagKindNode", "node")
, ("TagKindGroup", "nodegroup")
, ("TagKindCluster", "cluster")
, ("TagKindNetwork", "network")
])
$(THH.makeJSONInstance ''TagKind)
-- | The Group allocation policy type.
--
-- Note that the order of constructors is important as the automatic
-- Ord instance will order them in the order they are defined, so when
-- changing this data type be careful about the interaction with the
-- desired sorting order.
$(THH.declareLADT ''String "AllocPolicy"
[ ("AllocPreferred", "preferred")
, ("AllocLastResort", "last_resort")
, ("AllocUnallocable", "unallocable")
])
$(THH.makeJSONInstance ''AllocPolicy)
-- | The Instance real state type.
$(THH.declareLADT ''String "InstanceStatus"
[ ("StatusDown", "ADMIN_down")
, ("StatusOffline", "ADMIN_offline")
, ("ErrorDown", "ERROR_down")
, ("ErrorUp", "ERROR_up")
, ("NodeDown", "ERROR_nodedown")
, ("NodeOffline", "ERROR_nodeoffline")
, ("Running", "running")
, ("UserDown", "USER_down")
, ("WrongNode", "ERROR_wrongnode")
])
$(THH.makeJSONInstance ''InstanceStatus)
-- | Migration mode.
$(THH.declareLADT ''String "MigrationMode"
[ ("MigrationLive", "live")
, ("MigrationNonLive", "non-live")
])
$(THH.makeJSONInstance ''MigrationMode)
-- | Verify optional checks.
$(THH.declareLADT ''String "VerifyOptionalChecks"
[ ("VerifyNPlusOneMem", "nplusone_mem")
])
$(THH.makeJSONInstance ''VerifyOptionalChecks)
-- | Cluster verify error codes.
$(THH.declareLADT ''String "CVErrorCode"
[ ("CvECLUSTERCFG", "ECLUSTERCFG")
, ("CvECLUSTERCERT", "ECLUSTERCERT")
, ("CvECLUSTERCLIENTCERT", "ECLUSTERCLIENTCERT")
, ("CvECLUSTERFILECHECK", "ECLUSTERFILECHECK")
, ("CvECLUSTERDANGLINGNODES", "ECLUSTERDANGLINGNODES")
, ("CvECLUSTERDANGLINGINST", "ECLUSTERDANGLINGINST")
, ("CvEINSTANCEBADNODE", "EINSTANCEBADNODE")
, ("CvEINSTANCEDOWN", "EINSTANCEDOWN")
, ("CvEINSTANCELAYOUT", "EINSTANCELAYOUT")
, ("CvEINSTANCEMISSINGDISK", "EINSTANCEMISSINGDISK")
, ("CvEINSTANCEFAULTYDISK", "EINSTANCEFAULTYDISK")
, ("CvEINSTANCEWRONGNODE", "EINSTANCEWRONGNODE")
, ("CvEINSTANCESPLITGROUPS", "EINSTANCESPLITGROUPS")
, ("CvEINSTANCEPOLICY", "EINSTANCEPOLICY")
, ("CvEINSTANCEUNSUITABLENODE", "EINSTANCEUNSUITABLENODE")
, ("CvEINSTANCEMISSINGCFGPARAMETER", "EINSTANCEMISSINGCFGPARAMETER")
, ("CvENODEDRBD", "ENODEDRBD")
, ("CvENODEDRBDVERSION", "ENODEDRBDVERSION")
, ("CvENODEDRBDHELPER", "ENODEDRBDHELPER")
, ("CvENODEFILECHECK", "ENODEFILECHECK")
, ("CvENODEHOOKS", "ENODEHOOKS")
, ("CvENODEHV", "ENODEHV")
, ("CvENODELVM", "ENODELVM")
, ("CvENODEN1", "ENODEN1")
, ("CvENODENET", "ENODENET")
, ("CvENODEOS", "ENODEOS")
, ("CvENODEORPHANINSTANCE", "ENODEORPHANINSTANCE")
, ("CvENODEORPHANLV", "ENODEORPHANLV")
, ("CvENODERPC", "ENODERPC")
, ("CvENODESSH", "ENODESSH")
, ("CvENODEVERSION", "ENODEVERSION")
, ("CvENODESETUP", "ENODESETUP")
, ("CvENODETIME", "ENODETIME")
, ("CvENODEOOBPATH", "ENODEOOBPATH")
, ("CvENODEUSERSCRIPTS", "ENODEUSERSCRIPTS")
, ("CvENODEFILESTORAGEPATHS", "ENODEFILESTORAGEPATHS")
, ("CvENODEFILESTORAGEPATHUNUSABLE", "ENODEFILESTORAGEPATHUNUSABLE")
, ("CvENODESHAREDFILESTORAGEPATHUNUSABLE",
"ENODESHAREDFILESTORAGEPATHUNUSABLE")
, ("CvENODEGLUSTERSTORAGEPATHUNUSABLE",
"ENODEGLUSTERSTORAGEPATHUNUSABLE")
, ("CvEGROUPDIFFERENTPVSIZE", "EGROUPDIFFERENTPVSIZE")
, ("CvEEXTAGS", "EEXTAGS")
])
$(THH.makeJSONInstance ''CVErrorCode)
-- | Dynamic device modification, just add/remove version.
$(THH.declareLADT ''String "DdmSimple"
[ ("DdmSimpleAdd", "add")
, ("DdmSimpleAttach", "attach")
, ("DdmSimpleRemove", "remove")
, ("DdmSimpleDetach", "detach")
])
$(THH.makeJSONInstance ''DdmSimple)
-- | Dynamic device modification, all operations version.
--
-- TODO: DDM_SWAP, DDM_MOVE?
$(THH.declareLADT ''String "DdmFull"
[ ("DdmFullAdd", "add")
, ("DdmFullAttach", "attach")
, ("DdmFullRemove", "remove")
, ("DdmFullDetach", "detach")
, ("DdmFullModify", "modify")
])
$(THH.makeJSONInstance ''DdmFull)
-- | Hypervisor type definitions.
$(THH.declareLADT ''String "Hypervisor"
[ ("Kvm", "kvm")
, ("XenPvm", "xen-pvm")
, ("Chroot", "chroot")
, ("XenHvm", "xen-hvm")
, ("Lxc", "lxc")
, ("Fake", "fake")
])
$(THH.makeJSONInstance ''Hypervisor)
instance THH.PyValue Hypervisor where
showValue = show . hypervisorToRaw
instance HasStringRepr Hypervisor where
fromStringRepr = hypervisorFromRaw
toStringRepr = hypervisorToRaw
-- | Oob command type.
$(THH.declareLADT ''String "OobCommand"
[ ("OobHealth", "health")
, ("OobPowerCycle", "power-cycle")
, ("OobPowerOff", "power-off")
, ("OobPowerOn", "power-on")
, ("OobPowerStatus", "power-status")
])
$(THH.makeJSONInstance ''OobCommand)
-- | Oob command status
$(THH.declareLADT ''String "OobStatus"
[ ("OobStatusCritical", "CRITICAL")
, ("OobStatusOk", "OK")
, ("OobStatusUnknown", "UNKNOWN")
, ("OobStatusWarning", "WARNING")
])
$(THH.makeJSONInstance ''OobStatus)
-- | Storage type.
$(THH.declareLADT ''String "StorageType"
[ ("StorageFile", "file")
, ("StorageSharedFile", "sharedfile")
, ("StorageGluster", "gluster")
, ("StorageLvmPv", "lvm-pv")
, ("StorageLvmVg", "lvm-vg")
, ("StorageDiskless", "diskless")
, ("StorageBlock", "blockdev")
, ("StorageRados", "rados")
, ("StorageExt", "ext")
])
$(THH.makeJSONInstance ''StorageType)
-- | Storage keys are identifiers for storage units. Their content varies
-- depending on the storage type, for example a storage key for LVM storage
-- is the volume group name.
type StorageKey = String
-- | Storage parameters
type SPExclusiveStorage = Bool
-- | Storage units without storage-type-specific parameters
data StorageUnitRaw = SURaw StorageType StorageKey
-- | Full storage unit with storage-type-specific parameters
data StorageUnit = SUFile StorageKey
| SUSharedFile StorageKey
| SUGluster StorageKey
| SULvmPv StorageKey SPExclusiveStorage
| SULvmVg StorageKey SPExclusiveStorage
| SUDiskless StorageKey
| SUBlock StorageKey
| SURados StorageKey
| SUExt StorageKey
deriving (Eq)
instance Show StorageUnit where
show (SUFile key) = showSUSimple StorageFile key
show (SUSharedFile key) = showSUSimple StorageSharedFile key
show (SUGluster key) = showSUSimple StorageGluster key
show (SULvmPv key es) = showSULvm StorageLvmPv key es
show (SULvmVg key es) = showSULvm StorageLvmVg key es
show (SUDiskless key) = showSUSimple StorageDiskless key
show (SUBlock key) = showSUSimple StorageBlock key
show (SURados key) = showSUSimple StorageRados key
show (SUExt key) = showSUSimple StorageExt key
instance JSON StorageUnit where
showJSON (SUFile key) = showJSON (StorageFile, key, []::[String])
showJSON (SUSharedFile key) = showJSON (StorageSharedFile, key, []::[String])
showJSON (SUGluster key) = showJSON (StorageGluster, key, []::[String])
showJSON (SULvmPv key es) = showJSON (StorageLvmPv, key, [es])
showJSON (SULvmVg key es) = showJSON (StorageLvmVg, key, [es])
showJSON (SUDiskless key) = showJSON (StorageDiskless, key, []::[String])
showJSON (SUBlock key) = showJSON (StorageBlock, key, []::[String])
showJSON (SURados key) = showJSON (StorageRados, key, []::[String])
showJSON (SUExt key) = showJSON (StorageExt, key, []::[String])
-- FIXME: add readJSON implementation
readJSON _ = fail "Not implemented"
-- | Composes a string representation of storage types without
-- storage parameters
showSUSimple :: StorageType -> StorageKey -> String
showSUSimple st sk = show (storageTypeToRaw st, sk, []::[String])
-- | Composes a string representation of the LVM storage types
showSULvm :: StorageType -> StorageKey -> SPExclusiveStorage -> String
showSULvm st sk es = show (storageTypeToRaw st, sk, [es])
-- | Mapping from disk templates to storage types.
diskTemplateToStorageType :: DiskTemplate -> StorageType
diskTemplateToStorageType DTExt = StorageExt
diskTemplateToStorageType DTFile = StorageFile
diskTemplateToStorageType DTSharedFile = StorageSharedFile
diskTemplateToStorageType DTDrbd8 = StorageLvmVg
diskTemplateToStorageType DTPlain = StorageLvmVg
diskTemplateToStorageType DTRbd = StorageRados
diskTemplateToStorageType DTDiskless = StorageDiskless
diskTemplateToStorageType DTBlock = StorageBlock
diskTemplateToStorageType DTGluster = StorageGluster
-- | Equips a raw storage unit with its parameters
addParamsToStorageUnit :: SPExclusiveStorage -> StorageUnitRaw -> StorageUnit
addParamsToStorageUnit _ (SURaw StorageBlock key) = SUBlock key
addParamsToStorageUnit _ (SURaw StorageDiskless key) = SUDiskless key
addParamsToStorageUnit _ (SURaw StorageExt key) = SUExt key
addParamsToStorageUnit _ (SURaw StorageFile key) = SUFile key
addParamsToStorageUnit _ (SURaw StorageSharedFile key) = SUSharedFile key
addParamsToStorageUnit _ (SURaw StorageGluster key) = SUGluster key
addParamsToStorageUnit es (SURaw StorageLvmPv key) = SULvmPv key es
addParamsToStorageUnit es (SURaw StorageLvmVg key) = SULvmVg key es
addParamsToStorageUnit _ (SURaw StorageRados key) = SURados key
-- | Node evac modes.
--
-- This is part of the 'IAllocator' interface and it is used, for
-- example, in 'Ganeti.HTools.Loader.RqType'. However, it must reside
-- in this module, and not in 'Ganeti.HTools.Types', because it is
-- also used by 'Ganeti.Constants'.
$(THH.declareLADT ''String "EvacMode"
[ ("ChangePrimary", "primary-only")
, ("ChangeSecondary", "secondary-only")
, ("ChangeAll", "all")
])
$(THH.makeJSONInstance ''EvacMode)
-- | The file driver type.
$(THH.declareLADT ''String "FileDriver"
[ ("FileLoop", "loop")
, ("FileBlktap", "blktap")
, ("FileBlktap2", "blktap2")
])
$(THH.makeJSONInstance ''FileDriver)
-- | The instance create mode.
$(THH.declareLADT ''String "InstCreateMode"
[ ("InstCreate", "create")
, ("InstImport", "import")
, ("InstRemoteImport", "remote-import")
])
$(THH.makeJSONInstance ''InstCreateMode)
-- | Reboot type.
$(THH.declareLADT ''String "RebootType"
[ ("RebootSoft", "soft")
, ("RebootHard", "hard")
, ("RebootFull", "full")
])
$(THH.makeJSONInstance ''RebootType)
-- | Export modes.
$(THH.declareLADT ''String "ExportMode"
[ ("ExportModeLocal", "local")
, ("ExportModeRemote", "remote")
])
$(THH.makeJSONInstance ''ExportMode)
-- | IAllocator run types (OpTestIAllocator).
$(THH.declareLADT ''String "IAllocatorTestDir"
[ ("IAllocatorDirIn", "in")
, ("IAllocatorDirOut", "out")
])
$(THH.makeJSONInstance ''IAllocatorTestDir)
-- | IAllocator mode. FIXME: use this in "HTools.Backend.IAlloc".
$(THH.declareLADT ''String "IAllocatorMode"
[ ("IAllocatorAlloc", "allocate")
, ("IAllocatorAllocateSecondary", "allocate-secondary")
, ("IAllocatorMultiAlloc", "multi-allocate")
, ("IAllocatorReloc", "relocate")
, ("IAllocatorNodeEvac", "node-evacuate")
, ("IAllocatorChangeGroup", "change-group")
])
$(THH.makeJSONInstance ''IAllocatorMode)
-- | Network mode.
$(THH.declareLADT ''String "NICMode"
[ ("NMBridged", "bridged")
, ("NMRouted", "routed")
, ("NMOvs", "openvswitch")
, ("NMPool", "pool")
])
$(THH.makeJSONInstance ''NICMode)
-- | The JobStatus data type. Note that this is ordered especially
-- such that greater\/lesser comparison on values of this type makes
-- sense.
$(THH.declareLADT ''String "JobStatus"
[ ("JOB_STATUS_QUEUED", "queued")
, ("JOB_STATUS_WAITING", "waiting")
, ("JOB_STATUS_CANCELING", "canceling")
, ("JOB_STATUS_RUNNING", "running")
, ("JOB_STATUS_CANCELED", "canceled")
, ("JOB_STATUS_SUCCESS", "success")
, ("JOB_STATUS_ERROR", "error")
])
$(THH.makeJSONInstance ''JobStatus)
-- | Finalized job status.
$(THH.declareLADT ''String "FinalizedJobStatus"
[ ("JobStatusCanceled", "canceled")
, ("JobStatusSuccessful", "success")
, ("JobStatusFailed", "error")
])
$(THH.makeJSONInstance ''FinalizedJobStatus)
-- | The Ganeti job type.
newtype JobId = JobId { fromJobId :: Int }
deriving (Show, Eq, Ord)
-- | Builds a job ID.
makeJobId :: (MonadFail m) => Int -> m JobId
makeJobId i | i >= 0 = return $ JobId i
| otherwise = fail $ "Invalid value for job ID ' " ++ show i ++ "'"
-- | Builds a job ID from a string.
makeJobIdS :: (MonadFail m) => String -> m JobId
makeJobIdS s = tryRead "parsing job id" s >>= makeJobId
-- | Parses a job ID.
parseJobId :: (MonadFail m) => JSON.JSValue -> m JobId
parseJobId (JSON.JSString x) = makeJobIdS $ JSON.fromJSString x
parseJobId (JSON.JSRational _ x) =
if denominator x /= 1
then fail $ "Got fractional job ID from master daemon?! Value:" ++ show x
-- FIXME: potential integer overflow here on 32-bit platforms
else makeJobId . fromIntegral . numerator $ x
parseJobId x = fail $ "Wrong type/value for job id: " ++ show x
instance JSON.JSON JobId where
showJSON = JSON.showJSON . fromJobId
readJSON = parseJobId
-- | Relative job ID type alias.
type RelativeJobId = Negative Int
-- | Job ID dependency.
data JobIdDep = JobDepRelative RelativeJobId
| JobDepAbsolute JobId
deriving (Show, Eq, Ord)
instance JSON.JSON JobIdDep where
showJSON (JobDepRelative i) = showJSON i
showJSON (JobDepAbsolute i) = showJSON i
readJSON v =
case JSON.readJSON v::JSON.Result (Negative Int) of
-- first try relative dependency, usually most common
JSON.Ok r -> return $ JobDepRelative r
JSON.Error _ -> liftM JobDepAbsolute (parseJobId v)
-- | From job ID dependency and job ID, compute the absolute dependency.
absoluteJobIdDep :: (MonadFail m) => JobIdDep -> JobId -> m JobIdDep
absoluteJobIdDep (JobDepAbsolute jid) _ = return $ JobDepAbsolute jid
absoluteJobIdDep (JobDepRelative rjid) jid =
liftM JobDepAbsolute . makeJobId $ fromJobId jid + fromNegative rjid
-- | Job Dependency type.
data JobDependency = JobDependency JobIdDep [FinalizedJobStatus]
deriving (Show, Eq, Ord)
instance JSON JobDependency where
showJSON (JobDependency dep status) = showJSON (dep, status)
readJSON = liftM (uncurry JobDependency) . readJSON
-- | From job dependency and job id compute an absolute job dependency.
absoluteJobDependency :: (MonadFail m) =>
JobDependency -> JobId -> m JobDependency
absoluteJobDependency (JobDependency jdep fstats) jid =
liftM (flip JobDependency fstats) $ absoluteJobIdDep jdep jid
-- | From a job dependency get the absolute job id it depends on,
-- if given absolutely.
getJobIdFromDependency :: JobDependency -> [JobId]
getJobIdFromDependency (JobDependency (JobDepAbsolute jid) _) = [jid]
getJobIdFromDependency _ = []
-- | Valid opcode priorities for submit.
$(THH.declareIADT "OpSubmitPriority"
[ ("OpPrioLow", 'ConstantUtils.priorityLow)
, ("OpPrioNormal", 'ConstantUtils.priorityNormal)
, ("OpPrioHigh", 'ConstantUtils.priorityHigh)
])
$(THH.makeJSONInstance ''OpSubmitPriority)
-- | Parse submit priorities from a string.
parseSubmitPriority :: (MonadFail m) => String -> m OpSubmitPriority
parseSubmitPriority "low" = return OpPrioLow
parseSubmitPriority "normal" = return OpPrioNormal
parseSubmitPriority "high" = return OpPrioHigh
parseSubmitPriority str = fail $ "Unknown priority '" ++ str ++ "'"
-- | Format a submit priority as string.
fmtSubmitPriority :: OpSubmitPriority -> String
fmtSubmitPriority OpPrioLow = "low"
fmtSubmitPriority OpPrioNormal = "normal"
fmtSubmitPriority OpPrioHigh = "high"
-- | Our ADT for the OpCode status at runtime (while in a job).
$(THH.declareLADT ''String "OpStatus"
[ ("OP_STATUS_QUEUED", "queued")
, ("OP_STATUS_WAITING", "waiting")
, ("OP_STATUS_CANCELING", "canceling")
, ("OP_STATUS_RUNNING", "running")
, ("OP_STATUS_CANCELED", "canceled")
, ("OP_STATUS_SUCCESS", "success")
, ("OP_STATUS_ERROR", "error")
])
$(THH.makeJSONInstance ''OpStatus)
-- | Type for the job message type.
$(THH.declareLADT ''String "ELogType"
[ ("ELogMessage", "message")
, ("ELogMessageList", "message-list")
, ("ELogRemoteImport", "remote-import")
, ("ELogJqueueTest", "jqueue-test")
, ("ELogDelayTest", "delay-test")
])
$(THH.makeJSONInstance ''ELogType)
-- | Type of one element of a reason trail, of form
-- @(source, reason, timestamp)@.
type ReasonElem = (String, String, Integer)
-- | Type representing a reason trail.
type ReasonTrail = [ReasonElem]
-- | The VTYPES, a mini-type system in Python.
$(THH.declareLADT ''String "VType"
[ ("VTypeString", "string")
, ("VTypeMaybeString", "maybe-string")
, ("VTypeBool", "bool")
, ("VTypeSize", "size")
, ("VTypeInt", "int")
, ("VTypeFloat", "float")
])
$(THH.makeJSONInstance ''VType)
instance THH.PyValue VType where
showValue = THH.showValue . vTypeToRaw
-- * Node role type
$(THH.declareLADT ''String "NodeRole"
[ ("NROffline", "O")
, ("NRDrained", "D")
, ("NRRegular", "R")
, ("NRCandidate", "C")
, ("NRMaster", "M")
])
$(THH.makeJSONInstance ''NodeRole)
-- | The description of the node role.
roleDescription :: NodeRole -> String
roleDescription NROffline = "offline"
roleDescription NRDrained = "drained"
roleDescription NRRegular = "regular"
roleDescription NRCandidate = "master candidate"
roleDescription NRMaster = "master"
-- * Disk types
$(THH.declareLADT ''String "DiskMode"
[ ("DiskRdOnly", "ro")
, ("DiskRdWr", "rw")
])
$(THH.makeJSONInstance ''DiskMode)
-- | The persistent block driver type. Currently only one type is allowed.
$(THH.declareLADT ''String "BlockDriver"
[ ("BlockDrvManual", "manual")
])
$(THH.makeJSONInstance ''BlockDriver)
-- * Instance types
$(THH.declareLADT ''String "AdminState"
[ ("AdminOffline", "offline")
, ("AdminDown", "down")
, ("AdminUp", "up")
])
$(THH.makeJSONInstance ''AdminState)
$(THH.declareLADT ''String "AdminStateSource"
[ ("AdminSource", "admin")
, ("UserSource", "user")
])
$(THH.makeJSONInstance ''AdminStateSource)
instance THH.PyValue AdminStateSource where
showValue = THH.showValue . adminStateSourceToRaw
-- * Storage field type
$(THH.declareLADT ''String "StorageField"
[ ( "SFUsed", "used")
, ( "SFName", "name")
, ( "SFAllocatable", "allocatable")
, ( "SFFree", "free")
, ( "SFSize", "size")
])
$(THH.makeJSONInstance ''StorageField)
-- * Disk access protocol
$(THH.declareLADT ''String "DiskAccessMode"
[ ( "DiskUserspace", "userspace")
, ( "DiskKernelspace", "kernelspace")
])
$(THH.makeJSONInstance ''DiskAccessMode)
-- | Local disk status
--
-- Python code depends on:
-- DiskStatusOk < DiskStatusUnknown < DiskStatusFaulty
$(THH.declareILADT "LocalDiskStatus"
[ ("DiskStatusOk", 1)
, ("DiskStatusSync", 2)
, ("DiskStatusUnknown", 3)
, ("DiskStatusFaulty", 4)
])
localDiskStatusName :: LocalDiskStatus -> String
localDiskStatusName DiskStatusFaulty = "faulty"
localDiskStatusName DiskStatusOk = "ok"
localDiskStatusName DiskStatusSync = "syncing"
localDiskStatusName DiskStatusUnknown = "unknown"
-- | Replace disks type.
$(THH.declareLADT ''String "ReplaceDisksMode"
[ -- Replace disks on primary
("ReplaceOnPrimary", "replace_on_primary")
-- Replace disks on secondary
, ("ReplaceOnSecondary", "replace_on_secondary")
-- Change secondary node
, ("ReplaceNewSecondary", "replace_new_secondary")
, ("ReplaceAuto", "replace_auto")
])
$(THH.makeJSONInstance ''ReplaceDisksMode)
-- | Basic timeouts for RPC calls.
$(THH.declareILADT "RpcTimeout"
[ ("Urgent", 60) -- 1 minute
, ("Fast", 5 * 60) -- 5 minutes
, ("Normal", 15 * 60) -- 15 minutes
, ("Slow", 3600) -- 1 hour
, ("FourHours", 4 * 3600) -- 4 hours
, ("OneDay", 86400) -- 1 day
])
-- | Hotplug action.
$(THH.declareLADT ''String "HotplugAction"
[ ("HAAdd", "hotadd")
, ("HARemove", "hotremove")
, ("HAMod", "hotmod")
])
$(THH.makeJSONInstance ''HotplugAction)
-- | Hotplug Device Target.
$(THH.declareLADT ''String "HotplugTarget"
[ ("HTDisk", "disk")
, ("HTNic", "nic")
])
$(THH.makeJSONInstance ''HotplugTarget)
-- | SSH key type.
$(THH.declareLADT ''String "SshKeyType"
[ ("RSA", "rsa")
, ("DSA", "dsa")
, ("ECDSA", "ecdsa")
])
$(THH.makeJSONInstance ''SshKeyType)
-- * Private type and instances
redacted :: String
redacted = "<redacted>"
-- | A container for values that should be happy to be manipulated yet
-- refuses to be shown unless explicitly requested.
newtype Private a = Private { getPrivate :: a }
deriving (Eq, Ord, Functor)
instance (Show a, JSON.JSON a) => JSON.JSON (Private a) where
readJSON = liftM Private . JSON.readJSON
showJSON (Private x) = JSON.showJSON x
-- | "Show" the value of the field.
--
-- It would be better not to implement this at all.
-- Alas, Show OpCode requires Show Private.
instance Show a => Show (Private a) where
show _ = redacted
instance THH.PyValue a => THH.PyValue (Private a) where
showValue (Private x) = "Private(" ++ THH.showValue x ++ ")"
instance Applicative Private where
pure = Private
Private f <*> Private x = Private (f x)
instance Monad Private where
(Private x) >>= f = f x
return = Private
showPrivateJSObject :: (JSON.JSON a) =>
[(String, a)] -> JSON.JSObject (Private JSON.JSValue)
showPrivateJSObject value = JSON.toJSObject $ map f value
where f (k, v) = (k, Private $ JSON.showJSON v)
-- * Secret type and instances
-- | A container for values that behaves like Private, but doesn't leak the
-- value through showJSON
newtype Secret a = Secret { getSecret :: a }
deriving (Eq, Ord, Functor)
instance (Show a, JSON.JSON a) => JSON.JSON (Secret a) where
readJSON = liftM Secret . JSON.readJSON
showJSON = const . JSON.JSString $ JSON.toJSString redacted
instance Show a => Show (Secret a) where
show _ = redacted
instance THH.PyValue a => THH.PyValue (Secret a) where
showValue (Secret x) = "Secret(" ++ THH.showValue x ++ ")"
instance Applicative Secret where
pure = Secret
Secret f <*> Secret x = Secret (f x)
instance Monad Secret where
(Secret x) >>= f = f x
return = Secret
-- | We return "\<redacted\>" here to satisfy the idempotence of serialization
-- and deserialization, although this will impact the meaningfulness of secret
-- parameters within configuration tests.
showSecretJSObject :: (JSON.JSON a) =>
[(String, a)] -> JSON.JSObject (Secret JSON.JSValue)
showSecretJSObject value = JSON.toJSObject $ map f value
where f (k, _) = (k, Secret $ JSON.showJSON redacted)
revealValInJSObject :: JSON.JSObject (Secret JSON.JSValue)
-> JSON.JSObject (Private JSON.JSValue)
revealValInJSObject object = JSON.toJSObject . map f $ JSON.fromJSObject object
where f (k, v) = (k, Private $ getSecret v)
-- | The hypervisor parameter type. This is currently a simple map,
-- without type checking on key/value pairs.
type HvParams = Container JSON.JSValue
-- | The OS parameters type. This is, and will remain, a string
-- container, since the keys are dynamically declared by the OSes, and
-- the values are always strings.
type OsParams = Container String
type OsParamsPrivate = Container (Private String)
-- | Class of objects that have timestamps.
class TimeStampObject a where
cTimeOf :: a -> ClockTime
mTimeOf :: a -> ClockTime
-- | Class of objects that have an UUID.
class UuidObject a where
uuidOf :: a -> String
-- | Class of objects that can be forthcoming.
class ForthcomingObject a where
isForthcoming :: a -> Bool
-- | Class of object that have a serial number.
class SerialNoObject a where
serialOf :: a -> Int
-- | Class of objects that have tags.
class TagsObject a where
tagsOf :: a -> TagSet
| ganeti/ganeti | src/Ganeti/Types.hs | bsd-2-clause | 35,265 | 0 | 11 | 6,923 | 8,183 | 4,597 | 3,586 | 719 | 2 |
{-# LANGUAGE OverloadedStrings #-}
import qualified System.Environment as Env
import qualified System.Exit as Exit
import qualified System.Directory as Dir
import qualified Data.ByteString as B
import qualified Control.Monad as M
import qualified Data.Text as T
import qualified Data.Text.IO as T
import Data.Maybe (fromMaybe, isJust)
import Data.Monoid ((<>))
import qualified Data.DateTime as D
import qualified Geekingfrog.Constants as Constants
import qualified Geekingfrog.Parse as Parse
import qualified Geekingfrog.GhostTypes as Types --(Post, Tag, PostTag)
main = do
filename <- getImportFile
rawContent <- B.readFile filename
let ghostExport = Parse.parseGhostExport rawContent
Dir.createDirectoryIfMissing True Constants.postsLocation
case ghostExport of
Left parseError -> putStrLn parseError
Right (errors, (posts, tags, postTags)) -> M.mapM_ (savePost tags postTags) posts
getImportFile :: IO String
getImportFile = do
args <- Env.getArgs
progName <- Env.getProgName
if null args
then Exit.die $ "usage: " ++ show progName ++ " <ghost-export.json>"
else do
let filename = head args
fileExists <- Dir.doesFileExist filename
if fileExists
then return filename
else Exit.die $ show filename ++ " not found"
savePost :: [Types.Tag] -> [Types.PostTag] -> Types.Post -> IO ()
savePost tags postTags post = do
let (year, month, day) = D.toGregorian' $ Types.postCreatedAt post
let ts = getTagsForPost tags postTags post
let prefix = T.pack (show year) <> "-" <> formatDate month <> "-" <> formatDate day
let cleanPost = T.replace "`" "" (Types.postSlug post)
let fileName = prefix <> "-" <> cleanPost <> ".md"
let filePath = Constants.postsLocation ++ T.unpack fileName
let header = T.intercalate "\n"
[ "---"
, "title: " <> Types.postTitle post
, "tags: " <> T.intercalate ", " (map Types.tagSlug ts)
, "status: " <> if isJust (Types.postPublishedAt post) then "published" else "draft"
, "---"
, "\n"
]
-- fix images paths
let content = T.replace "/content/images/" "/static/images/" (Types.postMarkdown post)
T.writeFile filePath (header <> content)
formatDate :: Int -> T.Text
formatDate d = if d < 10 then "0" <> T.pack (show d) else T.pack (show d)
getTagsForPost :: [Types.Tag] -> [Types.PostTag] -> Types.Post -> [Types.Tag]
getTagsForPost tags postTags post =
let
postId = Types.postId post
pt = map Types.postTagTagId $ filter ((== postId) . Types.postTagPostId) postTags
filteredTags = filter (\t -> Types.tagId t `elem` pt) tags
in
filteredTags
| geekingfrog/geekingfrog.com | src/FileImport.hs | bsd-3-clause | 2,618 | 0 | 17 | 508 | 850 | 445 | 405 | 60 | 3 |
module Language.DemonL.AST where
import Data.List (intercalate, find)
import qualified Data.Map as M
import Language.DemonL.Types
data Decl = Decl
{ declName :: String,
declType :: Type
} deriving (Eq, Ord)
declsToMap = foldr ( \ d -> M.insert (declName d) (declType d)) M.empty
instance Show Decl where
show (Decl name typ) = name ++ ": " ++ show typ
data Clause a = Clause
{ clauseName :: String
, clauseExpr :: a
} deriving Show
type ProcedureU = Procedure Expr
data Procedure exp =
Procedure
{
prcdName :: String,
prcdArgs :: [Decl],
prcdResult :: Type,
prcdReq :: [Clause exp],
prcdEns :: [Clause exp]
} deriving Show
type DomainU = Domain Expr
data Domain e =
Domain
{ domStructs :: [Struct]
, domProcs :: [Procedure e]
, domFuncs :: [Procedure e]
} deriving Show
findProc dom name = find ((== name) . prcdName) (domProcs dom)
findProcUnsafe dom name = let Just p = findProc dom name in p
data Struct =
Struct
{ structName :: String
, structDecls :: [Decl]
} deriving Show
data BinOp = Add
| Sub
| Mul
| Div
| Or
| And
| Implies
| ArrayIndex
| RelOp ROp
deriving (Ord, Eq)
data ROp = Lte
| Lt
| Eq
| Neq
| Gt
| Gte
deriving (Ord, Eq)
data UnOp = Not
| Neg
| Old
deriving (Ord, Eq)
data Expr =
Call String [Expr]
| BinOpExpr BinOp Expr Expr
| UnOpExpr UnOp Expr
| Access Expr String
| Var String
| ResultVar
| Cast Type Expr
| ForAll [Decl] Expr
| LitInt Integer
| LitBool Bool
| LitDouble Double
| LitNull
deriving (Ord, Eq)
instance Show BinOp where
show Add = "+"
show Sub = "-"
show Mul = "*"
show Div = "/"
show Or = "or"
show And = "and"
show Implies = "implies"
show (RelOp op) = show op
instance Show ROp where
show Lte = "<="
show Lt = "<"
show Eq = "="
show Neq = "/="
show Gt = ">"
show Gte = ">="
instance Show UnOp where
show Old = "old"
show Neg = "-"
show Not = "not"
instance Show Expr where
show (Call s args)
= s ++ "(" ++ intercalate "," (map show args) ++ ")"
show (BinOpExpr op e1 e2)
= "(" ++ show e1 ++ ") " ++ show op ++ " (" ++ show e2 ++ ")"
show (UnOpExpr op e) = show op ++ " (" ++ show e ++ ")"
show (Access e f) = show e ++ "." ++ f
show (Var s) = s
show (ForAll decls e) =
"forall: " ++ intercalate "," (map show decls) ++ " " ++ show e
show ResultVar = "Result"
show (Cast t e) = "{" ++ show t ++ "}" ++ show e
show (LitInt i) = show i
show (LitBool b) = show b
show (LitDouble d) = show d
show LitNull = "null"
| scottgw/demonL | Language/DemonL/AST.hs | bsd-3-clause | 2,840 | 0 | 12 | 976 | 1,057 | 567 | 490 | 109 | 1 |
-----------------------------------------------------------------------------
-- |
-- Module : Data.SBV.SMT.SMT
-- Copyright : (c) Levent Erkok
-- License : BSD3
-- Maintainer : erkokl@gmail.com
-- Stability : experimental
--
-- Abstraction of SMT solvers
-----------------------------------------------------------------------------
{-# LANGUAGE ScopedTypeVariables #-}
module Data.SBV.SMT.SMT where
import qualified Control.Exception as C
import Control.Concurrent (newEmptyMVar, takeMVar, putMVar, forkIO)
import Control.DeepSeq (NFData(..))
import Control.Monad (when, zipWithM)
import Data.Char (isSpace)
import Data.Int (Int8, Int16, Int32, Int64)
import Data.List (intercalate, isPrefixOf, isInfixOf)
import Data.Maybe (isNothing, fromJust)
import Data.Word (Word8, Word16, Word32, Word64)
import System.Directory (findExecutable)
import System.Process (readProcessWithExitCode, runInteractiveProcess, waitForProcess)
import System.Exit (ExitCode(..))
import System.IO (hClose, hFlush, hPutStr, hGetContents, hGetLine)
import Data.SBV.BitVectors.AlgReals
import Data.SBV.BitVectors.Data
import Data.SBV.BitVectors.PrettyNum
import Data.SBV.Utils.TDiff
-- | Solver configuration. See also 'z3', 'yices', 'cvc4', and 'boolector, which are instantiations of this type for those solvers, with
-- reasonable defaults. In particular, custom configuration can be created by varying those values. (Such as @z3{verbose=True}@.)
--
-- Most fields are self explanatory. The notion of precision for printing algebraic reals stems from the fact that such values does
-- not necessarily have finite decimal representations, and hence we have to stop printing at some depth. It is important to
-- emphasize that such values always have infinite precision internally. The issue is merely with how we print such an infinite
-- precision value on the screen. The field 'printRealPrec' controls the printing precision, by specifying the number of digits after
-- the decimal point. The default value is 16, but it can be set to any positive integer.
--
-- When printing, SBV will add the suffix @...@ at the and of a real-value, if the given bound is not sufficient to represent the real-value
-- exactly. Otherwise, the number will be written out in standard decimal notation. Note that SBV will always print the whole value if it
-- is precise (i.e., if it fits in a finite number of digits), regardless of the precision limit. The limit only applies if the representation
-- of the real value is not finite, i.e., if it is not rational.
data SMTConfig = SMTConfig {
verbose :: Bool -- ^ Debug mode
, timing :: Bool -- ^ Print timing information on how long different phases took (construction, solving, etc.)
, timeOut :: Maybe Int -- ^ How much time to give to the solver. (In seconds)
, printBase :: Int -- ^ Print integral literals in this base (2, 8, and 10, and 16 are supported.)
, printRealPrec :: Int -- ^ Print algebraic real values with this precision. (SReal, default: 16)
, solverTweaks :: [String] -- ^ Additional lines of script to give to the solver (user specified)
, satCmd :: String -- ^ Usually "(check-sat)". However, users might tweak it based on solver characteristics.
, smtFile :: Maybe FilePath -- ^ If Just, the generated SMT script will be put in this file (for debugging purposes mostly)
, useSMTLib2 :: Bool -- ^ If True, we'll treat the solver as using SMTLib2 input format. Otherwise, SMTLib1
, solver :: SMTSolver -- ^ The actual SMT solver.
, roundingMode :: RoundingMode -- ^ Rounding mode to use for floating-point conversions
}
-- | An SMT engine
type SMTEngine = SMTConfig -> Bool -> [(Quantifier, NamedSymVar)] -> [(String, UnintKind)] -> [Either SW (SW, [SW])] -> String -> IO SMTResult
-- | An SMT solver
data SMTSolver = SMTSolver {
name :: String -- ^ Printable name of the solver
, executable :: String -- ^ The path to its executable
, options :: [String] -- ^ Options to provide to the solver
, engine :: SMTEngine -- ^ The solver engine, responsible for interpreting solver output
, xformExitCode :: ExitCode -> ExitCode -- ^ Should we re-interpret exit codes. Most solvers behave rationally, i.e., id will do. Some (like CVC4) don't.
, capabilities :: SolverCapabilities -- ^ Various capabilities of the solver
}
-- | A model, as returned by a solver
data SMTModel = SMTModel {
modelAssocs :: [(String, CW)]
, modelArrays :: [(String, [String])] -- very crude!
, modelUninterps :: [(String, [String])] -- very crude!
}
deriving Show
-- | The result of an SMT solver call. Each constructor is tagged with
-- the 'SMTConfig' that created it so that further tools can inspect it
-- and build layers of results, if needed. For ordinary uses of the library,
-- this type should not be needed, instead use the accessor functions on
-- it. (Custom Show instances and model extractors.)
data SMTResult = Unsatisfiable SMTConfig -- ^ Unsatisfiable
| Satisfiable SMTConfig SMTModel -- ^ Satisfiable with model
| Unknown SMTConfig SMTModel -- ^ Prover returned unknown, with a potential (possibly bogus) model
| ProofError SMTConfig [String] -- ^ Prover errored out
| TimeOut SMTConfig -- ^ Computation timed out (see the 'timeout' combinator)
-- | A script, to be passed to the solver.
data SMTScript = SMTScript {
scriptBody :: String -- ^ Initial feed
, scriptModel :: Maybe String -- ^ Optional continuation script, if the result is sat
}
-- | Extract the final configuration from a result
resultConfig :: SMTResult -> SMTConfig
resultConfig (Unsatisfiable c) = c
resultConfig (Satisfiable c _) = c
resultConfig (Unknown c _) = c
resultConfig (ProofError c _) = c
resultConfig (TimeOut c) = c
instance NFData SMTResult where
rnf (Unsatisfiable _) = ()
rnf (Satisfiable _ xs) = rnf xs `seq` ()
rnf (Unknown _ xs) = rnf xs `seq` ()
rnf (ProofError _ xs) = rnf xs `seq` ()
rnf (TimeOut _) = ()
instance NFData SMTModel where
rnf (SMTModel assocs unints uarrs) = rnf assocs `seq` rnf unints `seq` rnf uarrs `seq` ()
-- | A 'prove' call results in a 'ThmResult'
newtype ThmResult = ThmResult SMTResult
-- | A 'sat' call results in a 'SatResult'
-- The reason for having a separate 'SatResult' is to have a more meaningful 'Show' instance.
newtype SatResult = SatResult SMTResult
-- | An 'allSat' call results in a 'AllSatResult'. The boolean says whether
-- we should warn the user about prefix-existentials.
newtype AllSatResult = AllSatResult (Bool, [SMTResult])
instance Show ThmResult where
show (ThmResult r) = showSMTResult "Q.E.D."
"Unknown" "Unknown. Potential counter-example:\n"
"Falsifiable" "Falsifiable. Counter-example:\n" r
instance Show SatResult where
show (SatResult r) = showSMTResult "Unsatisfiable"
"Unknown" "Unknown. Potential model:\n"
"Satisfiable" "Satisfiable. Model:\n" r
-- NB. The Show instance of AllSatResults have to be careful in being lazy enough
-- as the typical use case is to pull results out as they become available.
instance Show AllSatResult where
show (AllSatResult (e, xs)) = go (0::Int) xs
where uniqueWarn | e = " (Unique up to prefix existentials.)"
| True = ""
go c (s:ss) = let c' = c+1
(ok, o) = sh c' s
in c' `seq` if ok then o ++ "\n" ++ go c' ss else o
go c [] = case c of
0 -> "No solutions found."
1 -> "This is the only solution." ++ uniqueWarn
_ -> "Found " ++ show c ++ " different solutions." ++ uniqueWarn
sh i c = (ok, showSMTResult "Unsatisfiable"
"Unknown" "Unknown. Potential model:\n"
("Solution #" ++ show i ++ ":\n[Backend solver returned no assignment to variables.]") ("Solution #" ++ show i ++ ":\n") c)
where ok = case c of
Satisfiable{} -> True
_ -> False
-- | Instances of 'SatModel' can be automatically extracted from models returned by the
-- solvers. The idea is that the sbv infrastructure provides a stream of 'CW''s (constant-words)
-- coming from the solver, and the type @a@ is interpreted based on these constants. Many typical
-- instances are already provided, so new instances can be declared with relative ease.
--
-- Minimum complete definition: 'parseCWs'
class SatModel a where
-- | Given a sequence of constant-words, extract one instance of the type @a@, returning
-- the remaining elements untouched. If the next element is not what's expected for this
-- type you should return 'Nothing'
parseCWs :: [CW] -> Maybe (a, [CW])
-- | Given a parsed model instance, transform it using @f@, and return the result.
-- The default definition for this method should be sufficient in most use cases.
cvtModel :: (a -> Maybe b) -> Maybe (a, [CW]) -> Maybe (b, [CW])
cvtModel f x = x >>= \(a, r) -> f a >>= \b -> return (b, r)
-- | Parse a signed/sized value from a sequence of CWs
genParse :: Integral a => Kind -> [CW] -> Maybe (a, [CW])
genParse k (x@(CW _ (CWInteger i)):r) | kindOf x == k = Just (fromIntegral i, r)
genParse _ _ = Nothing
-- Base case, that comes in handy if there are no real variables
instance SatModel () where
parseCWs xs = return ((), xs)
instance SatModel Bool where
parseCWs xs = do (x, r) <- genParse (KBounded False 1) xs
return ((x :: Integer) /= 0, r)
instance SatModel Word8 where
parseCWs = genParse (KBounded False 8)
instance SatModel Int8 where
parseCWs = genParse (KBounded True 8)
instance SatModel Word16 where
parseCWs = genParse (KBounded False 16)
instance SatModel Int16 where
parseCWs = genParse (KBounded True 16)
instance SatModel Word32 where
parseCWs = genParse (KBounded False 32)
instance SatModel Int32 where
parseCWs = genParse (KBounded True 32)
instance SatModel Word64 where
parseCWs = genParse (KBounded False 64)
instance SatModel Int64 where
parseCWs = genParse (KBounded True 64)
instance SatModel Integer where
parseCWs = genParse KUnbounded
instance SatModel AlgReal where
parseCWs (CW KReal (CWAlgReal i) : r) = Just (i, r)
parseCWs _ = Nothing
instance SatModel CW where
parseCWs (cw : r) = Just (cw, r)
parseCWs [] = Nothing
-- when reading a list; go as long as we can (maximal-munch)
-- note that this never fails..
instance SatModel a => SatModel [a] where
parseCWs [] = Just ([], [])
parseCWs xs = case parseCWs xs of
Just (a, ys) -> case parseCWs ys of
Just (as, zs) -> Just (a:as, zs)
Nothing -> Just ([], ys)
Nothing -> Just ([], xs)
instance (SatModel a, SatModel b) => SatModel (a, b) where
parseCWs as = do (a, bs) <- parseCWs as
(b, cs) <- parseCWs bs
return ((a, b), cs)
instance (SatModel a, SatModel b, SatModel c) => SatModel (a, b, c) where
parseCWs as = do (a, bs) <- parseCWs as
((b, c), ds) <- parseCWs bs
return ((a, b, c), ds)
instance (SatModel a, SatModel b, SatModel c, SatModel d) => SatModel (a, b, c, d) where
parseCWs as = do (a, bs) <- parseCWs as
((b, c, d), es) <- parseCWs bs
return ((a, b, c, d), es)
instance (SatModel a, SatModel b, SatModel c, SatModel d, SatModel e) => SatModel (a, b, c, d, e) where
parseCWs as = do (a, bs) <- parseCWs as
((b, c, d, e), fs) <- parseCWs bs
return ((a, b, c, d, e), fs)
instance (SatModel a, SatModel b, SatModel c, SatModel d, SatModel e, SatModel f) => SatModel (a, b, c, d, e, f) where
parseCWs as = do (a, bs) <- parseCWs as
((b, c, d, e, f), gs) <- parseCWs bs
return ((a, b, c, d, e, f), gs)
instance (SatModel a, SatModel b, SatModel c, SatModel d, SatModel e, SatModel f, SatModel g) => SatModel (a, b, c, d, e, f, g) where
parseCWs as = do (a, bs) <- parseCWs as
((b, c, d, e, f, g), hs) <- parseCWs bs
return ((a, b, c, d, e, f, g), hs)
-- | Various SMT results that we can extract models out of.
class Modelable a where
-- | Is there a model?
modelExists :: a -> Bool
-- | Extract a model, the result is a tuple where the first argument (if True)
-- indicates whether the model was "probable". (i.e., if the solver returned unknown.)
getModel :: SatModel b => a -> Either String (Bool, b)
-- | A simpler variant of 'getModel' to get a model out without the fuss.
extractModel :: SatModel b => a -> Maybe b
extractModel a = case getModel a of
Right (_, b) -> Just b
_ -> Nothing
-- | Return all the models from an 'allSat' call, similar to 'extractModel' but
-- is suitable for the case of multiple results.
extractModels :: SatModel a => AllSatResult -> [a]
extractModels (AllSatResult (_, xs)) = [ms | Right (_, ms) <- map getModel xs]
instance Modelable ThmResult where
getModel (ThmResult r) = getModel r
modelExists (ThmResult r) = modelExists r
instance Modelable SatResult where
getModel (SatResult r) = getModel r
modelExists (SatResult r) = modelExists r
instance Modelable SMTResult where
getModel (Unsatisfiable _) = Left "SBV.getModel: Unsatisfiable result"
getModel (Unknown _ m) = Right (True, parseModelOut m)
getModel (ProofError _ s) = error $ unlines $ "Backend solver complains: " : s
getModel (TimeOut _) = Left "Timeout"
getModel (Satisfiable _ m) = Right (False, parseModelOut m)
modelExists (Satisfiable{}) = True
modelExists (Unknown{}) = False -- don't risk it
modelExists _ = False
-- | Extract a model out, will throw error if parsing is unsuccessful
parseModelOut :: SatModel a => SMTModel -> a
parseModelOut m = case parseCWs [c | (_, c) <- modelAssocs m] of
Just (x, []) -> x
Just (_, ys) -> error $ "SBV.getModel: Partially constructed model; remaining elements: " ++ show ys
Nothing -> error $ "SBV.getModel: Cannot construct a model from: " ++ show m
-- | Given an 'allSat' call, we typically want to iterate over it and print the results in sequence. The
-- 'displayModels' function automates this task by calling 'disp' on each result, consecutively. The first
-- 'Int' argument to 'disp' 'is the current model number. The second argument is a tuple, where the first
-- element indicates whether the model is alleged (i.e., if the solver is not sure, returing Unknown)
displayModels :: SatModel a => (Int -> (Bool, a) -> IO ()) -> AllSatResult -> IO Int
displayModels disp (AllSatResult (_, ms)) = do
inds <- zipWithM display [a | Right a <- map (getModel . SatResult) ms] [(1::Int)..]
return $ last (0:inds)
where display r i = disp i r >> return i
-- | Show an SMTResult; generic version
showSMTResult :: String -> String -> String -> String -> String -> SMTResult -> String
showSMTResult unsatMsg unkMsg unkMsgModel satMsg satMsgModel result = case result of
Unsatisfiable _ -> unsatMsg
Satisfiable _ (SMTModel [] [] []) -> satMsg
Satisfiable _ m -> satMsgModel ++ showModel cfg m
Unknown _ (SMTModel [] [] []) -> unkMsg
Unknown _ m -> unkMsgModel ++ showModel cfg m
ProofError _ [] -> "*** An error occurred. No additional information available. Try running in verbose mode"
ProofError _ ls -> "*** An error occurred.\n" ++ intercalate "\n" (map ("*** " ++) ls)
TimeOut _ -> "*** Timeout"
where cfg = resultConfig result
-- | Show a model in human readable form
showModel :: SMTConfig -> SMTModel -> String
showModel cfg m = intercalate "\n" (map shM assocs ++ concatMap shUI uninterps ++ concatMap shUA arrs)
where assocs = modelAssocs m
uninterps = modelUninterps m
arrs = modelArrays m
shM (s, v) = " " ++ s ++ " = " ++ shCW cfg v
-- | Show a constant value, in the user-specified base
shCW :: SMTConfig -> CW -> String
shCW = sh . printBase
where sh 2 = binS
sh 10 = show
sh 16 = hexS
sh n = \w -> show w ++ " -- Ignoring unsupported printBase " ++ show n ++ ", use 2, 10, or 16."
-- | Print uninterpreted function values from models. Very, very crude..
shUI :: (String, [String]) -> [String]
shUI (flong, cases) = (" -- uninterpreted: " ++ f) : map shC cases
where tf = dropWhile (/= '_') flong
f = if null tf then flong else tail tf
shC s = " " ++ s
-- | Print uninterpreted array values from models. Very, very crude..
shUA :: (String, [String]) -> [String]
shUA (f, cases) = (" -- array: " ++ f) : map shC cases
where shC s = " " ++ s
-- | Helper function to spin off to an SMT solver.
pipeProcess :: SMTConfig -> String -> String -> [String] -> SMTScript -> (String -> String) -> IO (Either String [String])
pipeProcess cfg nm execName opts script cleanErrs = do
mbExecPath <- findExecutable execName
case mbExecPath of
Nothing -> return $ Left $ "Unable to locate executable for " ++ nm
++ "\nExecutable specified: " ++ show execName
Just execPath -> do (ec, contents, allErrors) <- runSolver cfg execPath opts script
let errors = dropWhile isSpace (cleanErrs allErrors)
case (null errors, xformExitCode (solver cfg) ec) of
(True, ExitSuccess) -> return $ Right $ map clean (filter (not . null) (lines contents))
(_, ec') -> let errors' = if null errors
then (if null (dropWhile isSpace contents)
then "(No error message printed on stderr by the executable.)"
else contents)
else errors
finalEC = case (ec', ec) of
(ExitFailure n, _) -> n
(_, ExitFailure n) -> n
_ -> 0 -- can happen if ExitSuccess but there is output on stderr
in return $ Left $ "Failed to complete the call to " ++ nm
++ "\nExecutable : " ++ show execPath
++ "\nOptions : " ++ unwords opts
++ "\nExit code : " ++ show finalEC
++ "\nSolver output: "
++ "\n" ++ line ++ "\n"
++ intercalate "\n" (filter (not . null) (lines errors'))
++ "\n" ++ line
++ "\nGiving up.."
where clean = reverse . dropWhile isSpace . reverse . dropWhile isSpace
line = replicate 78 '='
-- | A standard solver interface. If the solver is SMT-Lib compliant, then this function should suffice in
-- communicating with it.
standardSolver :: SMTConfig -> SMTScript -> (String -> String) -> ([String] -> a) -> ([String] -> a) -> IO a
standardSolver config script cleanErrs failure success = do
let msg = when (verbose config) . putStrLn . ("** " ++)
smtSolver= solver config
exec = executable smtSolver
opts = options smtSolver
isTiming = timing config
nmSolver = name smtSolver
msg $ "Calling: " ++ show (unwords (exec:opts))
case smtFile config of
Nothing -> return ()
Just f -> do putStrLn $ "** Saving the generated script in file: " ++ show f
writeFile f (scriptBody script)
contents <- timeIf isTiming nmSolver $ pipeProcess config nmSolver exec opts script cleanErrs
msg $ nmSolver ++ " output:\n" ++ either id (intercalate "\n") contents
case contents of
Left e -> return $ failure (lines e)
Right xs -> return $ success (mergeSExpr xs)
-- | A variant of 'readProcessWithExitCode'; except it knows about continuation strings
-- and can speak SMT-Lib2 (just a little).
runSolver :: SMTConfig -> FilePath -> [String] -> SMTScript -> IO (ExitCode, String, String)
runSolver cfg execPath opts script
| isNothing $ scriptModel script
= let checkCmd | useSMTLib2 cfg = '\n' : satCmd cfg
| True = ""
in readProcessWithExitCode execPath opts (scriptBody script ++ checkCmd)
| True
= do (send, ask, cleanUp) <- do
(inh, outh, errh, pid) <- runInteractiveProcess execPath opts Nothing Nothing
let send l = hPutStr inh (l ++ "\n") >> hFlush inh
recv = hGetLine outh `C.catch` (\(_ :: C.SomeException) -> return "")
ask l = send l >> recv
cleanUp r = do outMVar <- newEmptyMVar
out <- hGetContents outh
_ <- forkIO $ C.evaluate (length out) >> putMVar outMVar ()
err <- hGetContents errh
_ <- forkIO $ C.evaluate (length err) >> putMVar outMVar ()
hClose inh
takeMVar outMVar
takeMVar outMVar
hClose outh
hClose errh
ex <- waitForProcess pid
-- if the status is unknown, prepare for the possibility of not having a model
-- TBD: This is rather crude and potentially Z3 specific
return $ if "unknown" `isPrefixOf` r && "error" `isInfixOf` (out ++ err)
then (ExitSuccess, r , "")
else (ex, r ++ "\n" ++ out, err)
return (send, ask, cleanUp)
mapM_ send (lines (scriptBody script))
r <- ask $ satCmd cfg
when (any (`isPrefixOf` r) ["sat", "unknown"]) $ do
let mls = lines (fromJust (scriptModel script))
when (verbose cfg) $ do putStrLn "** Sending the following model extraction commands:"
mapM_ putStrLn mls
mapM_ send mls
cleanUp r
-- | In case the SMT-Lib solver returns a response over multiple lines, compress them so we have
-- each S-Expression spanning only a single line. We'll ignore things line parentheses inside quotes
-- etc., as it should not be an issue
mergeSExpr :: [String] -> [String]
mergeSExpr [] = []
mergeSExpr (x:xs)
| d == 0 = x : mergeSExpr xs
| True = let (f, r) = grab d xs in unwords (x:f) : mergeSExpr r
where d = parenDiff x
parenDiff :: String -> Int
parenDiff = go 0
where go i "" = i
go i ('(':cs) = let i'= i+1 in i' `seq` go i' cs
go i (')':cs) = let i'= i-1 in i' `seq` go i' cs
go i (_ :cs) = go i cs
grab i ls
| i <= 0 = ([], ls)
grab _ [] = ([], [])
grab i (l:ls) = let (a, b) = grab (i+parenDiff l) ls in (l:a, b)
| dylanmc/cryptol | sbv/Data/SBV/SMT/SMT.hs | bsd-3-clause | 24,941 | 0 | 33 | 8,262 | 5,878 | 3,117 | 2,761 | 325 | 8 |
{-# LANGUAGE NoImplicitPrelude #-}
-- |
-- Module: $HEADER$
-- Description: All test cases aggregated and exported as tests :: [Test].
-- Copyright: (c) 2013, 2014 Peter Trsko
-- License: BSD3
--
-- Maintainer: peter.trsko@gmail.com
-- Stability: stable
-- Portability: NoImplicitPrelude
--
-- All test cases aggregated and exported as @tests :: ['Test']@.
module TestCase (tests)
where
import Test.Framework (Test, testGroup)
import qualified TestCase.Data.PkgVersion.Internal.RpmVerCmp as RpmVerCmp
(tests)
tests :: [Test]
tests =
[ testGroup "Data.PkgVersion.Internal.RpmVerCmp" LockFile.tests
]
| trskop/pkg-version | test/TestCase.hs | bsd-3-clause | 638 | 0 | 7 | 116 | 73 | 51 | 22 | 8 | 1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
import Prelude hiding ((**))
import Data.Monoid.Average
import Data.Ord (comparing)
import Music.Prelude hiding (elements, unit, (**), Note)
-- import Data.VectorSpace hiding (Sum)
import Music.Time (Note)
import Test.Tasty
import Test.Tasty.QuickCheck
import Test.QuickCheck
import Data.Typeable
import Data.Maybe
import Data.Semigroup
import Control.Monad
import Control.Applicative
import Control.Comonad
import qualified Data.Set as Set
import qualified Data.Map as Map
import qualified Data.List
type Checkable a = (Eq a, Show a, Arbitrary a)
_Semigroup :: (Checkable a, Semigroup a) => a -> Property
_Semigroup t = property assoc
where
assoc a b c = (a <> b) <> c === a <> (b <> c .: t)
_Monoid :: (Checkable a, Semigroup a, Monoid a) => a -> Property
_Monoid t = idL .&&. idR .&&. assoc
where
idL m = m <> mempty === m .: t
idR m = mempty <> m === m .: t
assoc a b c = (a <> b) <> c === a <> (b <> c .: t)
{-
prop_functor :: (Functor f, Eq (f ()), Arbitrary (f ()), Show (f ()), Eq (f c), Arbitrary (f a), Show (f a)) => f () -> (b -> c) -> (a -> b) -> Property
prop_functor typ f g = fc .&&. fi
where
fi x = x == (sameType typ x)
fc x = (fmap f . fmap g) x == (fmap (f . g) $ sameType1 typ x)
-- prop_applicative typ
unit :: Applicative f => f ()
unit = pure ()
(**) :: Applicative f => f a -> f b -> f (a, b)
a ** b = liftA2 (,) a b
-- prop_app typ = appLId .&&. appRId .&&. appAssoc
-- where
-- appLId v = property $ unit ** v == fmap ((),) (sameType typ v)
-- appRId u = property $ u ** unit == fmap (,()) (sameType typ u)
-- appAssoc u v w = property $ u ** (v ** w) == (fmap unass $ (u ** v) ** sameType typ w)
appLId :: (Eq (f ((), b)), Applicative f) => f b -> Property
appLId v = property $ unit ** v == fmap ((),) v
appRId :: (Eq (f (a, ())), Applicative f) => f a -> Property
appRId u = property $ u ** unit == fmap (,()) u
appAssoc :: (Eq (f (a, (b, c))), Applicative f) => f a -> f b -> f c -> Property
appAssoc u v w = property $ u ** (v ** w) == (fmap unass $ (u ** v) ** w)
unass :: ((a, b), c) -> (a, (b, c))
unass = \((a, b), c) -> (a, (b, c))
-}
{-
transform mempty = id
transform (s <> t) = transform s . transform t
transform (s <> negateV s) = id
-}
_Transformable :: (Checkable a, Transformable a) => a -> Property
_Transformable t = te .&&. tc .&&. tn
where
te x = True ==> transform mempty x === x .: t
tc s t x = isForwardSpan s && isForwardSpan t ==> transform (s <> t) x === transform s (transform t $ x .: t)
tn s x = isForwardSpan s ==> transform (s <> negateV s) x === x .: t
{-
Duration vs. onset and offset
_duration x = (offset x .-. onset x)
Transform vs. onset and offset
_onset (delay n a) = n ^+. _onset a
_offset (delay n a) = n ^+. _offset a
_duration (stretch n a) = n ^* _duration a
More generally?
(s `transform` a) `_position` p = s `transform` (a `_position` p)
-}
_HasDuration :: (Checkable a, Transformable a, HasDuration a) => a -> Property
_HasDuration t = property cd
where
-- cd n a = n /= 0 ==> _duration (stretch (n) (a .: t)) === (n) * _duration a
cd n a = n >= 0 ==> _duration (stretch (n) (a .: t)) === (n) * _duration a
_HasPosition :: (Checkable a, Transformable a, HasPosition a) => a -> Property
_HasPosition t = eqd .&&. ond .&&. ofd .&&. sd .&&. ass
where
eqd a = True ==> _duration a === _offset a .-. _onset (a .: t)
ond n a = n /= 0 ==> _onset (delay n $ a .: t) === _onset a .+^ n
ofd n a = n /= 0 ==> _offset (delay n $ a .: t) === _offset a .+^ n
sd n a = n /= 0 ==> _duration (stretch n $ a .: t) === n * _duration a
ass s a p = True ==> (s `transform` (a .: t)) `_position` p === s `transform` (a `_position` p)
-- TODO more general
{-
_duration (beginning t x) + _duration (ending t x) = _duration x
_duration (beginning t x) = t `min` _duration x
iff t >= 0
-}
_Splittable :: (Checkable a, Transformable a, Splittable a, HasDuration a) => a -> Property
_Splittable t = sameDur .&&. minBegin
where
sameDur t a = True ==> _duration (beginning t a) ^+^ _duration (ending t a) === _duration (a .: t)
minBegin = True ==> 1 === (1::Int)
-- ond n a = n /= 0 ==> _onset (delay n $ a .: t) === _onset a .+^ n
-- ofd n a = n /= 0 ==> _offset (delay n $ a .: t) === _offset a .+^ n
-- sd n a = n /= 0 ==> _duration (stretch n $ a .: t) === n * _duration a
-- cd n a = n /= 0 ==> _duration (stretch (1/n) $ a .: t) === (1/n) * _duration a
-- TODO more general
data BadMonoid = BM | BM2
deriving (Eq, Ord, Show, Typeable)
instance Monoid BadMonoid where
mempty = BM
_ `mappend` _ = BM
instance Semigroup BadMonoid where
(<>) = mappend
instance Arbitrary BadMonoid where
arbitrary = elements [BM, BM2]
data BadFunctor a = BF1 | BF2
deriving (Eq, Ord, Show, Typeable)
instance Functor BadFunctor where
fmap f BF1 = BF2 -- lawless
fmap f BF2 = BF1
instance Applicative BadFunctor where
pure _ = BF1
f <*> BF1 = BF2
f <*> BF2 = BF1
instance Arbitrary (BadFunctor a) where
arbitrary = elements [BF2, BF1]
sameType :: a -> a -> a
sameType _ x = x
infixl 9 .:
x .: t = x `asTypeOf` t
sameType1 :: f a -> f b -> f b
sameType1 _ x = x
instance Arbitrary Time where
arbitrary = fmap toTime (arbitrary::Gen Double)
where
toTime :: Real a => a -> Time
toTime = realToFrac
instance Arbitrary Duration where
arbitrary = fmap toDuration (arbitrary::Gen Double)
where
toDuration :: Real a => a -> Duration
toDuration = realToFrac
instance Arbitrary Span where
arbitrary = liftA2 (<->) arbitrary arbitrary
instance Arbitrary a => Arbitrary (Placed a) where
arbitrary = fmap (view placed) arbitrary
instance Arbitrary a => Arbitrary (Note a) where
arbitrary = fmap (view note) arbitrary
instance Arbitrary a => Arbitrary (Event a) where
arbitrary = fmap (view event) arbitrary
instance Arbitrary a => Arbitrary (AddMeta a) where
arbitrary = fmap pure arbitrary
instance (Ord a, Arbitrary a) => Arbitrary (Set.Set a) where
arbitrary = fmap Set.fromList arbitrary
instance (Ord k, Arbitrary k, Ord a, Arbitrary a) => Arbitrary (Map.Map k a) where
arbitrary = fmap Map.fromList $ liftA2 zip arbitrary arbitrary
instance Arbitrary a => Arbitrary (Voice a) where
arbitrary = fmap (view voice) arbitrary
-- instance Arbitrary a => Arbitrary (Chord a) where
-- arbitrary = fmap (view chord) arbitrary
instance Arbitrary a => Arbitrary (Score a) where
arbitrary = fmap (view score) arbitrary
instance Arbitrary a => Arbitrary (Track a) where
arbitrary = fmap (view track) arbitrary
-- instance Arbitrary a => Arbitrary (Reactive a) where
-- arbitrary = liftA2 zip arbitrary arbitrary
instance Arbitrary a => Arbitrary (Sum a) where
arbitrary = fmap Sum arbitrary
instance Arbitrary a => Arbitrary (Product a) where
arbitrary = fmap Product arbitrary
instance Arbitrary a => Arbitrary (Average a) where
arbitrary = fmap Average arbitrary
-- TODO move
instance Semigroup a => Semigroup (Placed a) where
(<>) = liftA2 (<>)
instance Monoid a => Monoid (Placed a) where
mempty = pure mempty
mappend = liftA2 mappend
instance Semigroup a => Semigroup (Note a) where
(<>) = liftA2 (<>)
instance Monoid a => Monoid (Note a) where
mempty = pure mempty
mappend = liftA2 mappend
instance Semigroup a => Semigroup (Event a) where
(<>) = liftA2 (<>)
instance Monoid a => Monoid (Event a) where
mempty = pure mempty
mappend = liftA2 mappend
-- instance Ord a => Ord (Event a) where
-- x `compare` y = (x^.from note) `compare` (y^.from note)
instance Eq a => Eq (Score a) where
x == y = Data.List.sortBy (comparing (^.era)) (x^.events) == Data.List.sortBy (comparing (^.era)) (y^.events)
instance Splittable Integer where
split _ x = (x,x)
instance (Transformable a, HasPosition a, Splittable a) => Splittable [a] where
split t = unzipR . fmap (split t)
unzipR f = (fmap fst f, fmap snd f)
-- main = quickCheck $ \() () -> True
#define A_TEST(EXPR) (testProperty "EXPR" $ EXPR)
#define I_TEST(CLASS,TYPE) (testProperty "instance CLASS TYPE" $ (CLASS (undefined::TYPE)))
main = defaultMain $ testGroup "Instances" $ [
I_TEST(_Monoid, ()),
I_TEST(_Monoid, Sum Int),
I_TEST(_Monoid, [Int]),
-- SLOW I_TEST(_Monoid, Average Rational)
I_TEST(_Monoid, Average Double),
I_TEST(_Monoid, Time),
I_TEST(_Monoid, Duration),
I_TEST(_Monoid, Span),
I_TEST(_Monoid, Event ()),
I_TEST(_Monoid, Placed ()),
I_TEST(_Monoid, Note ()),
I_TEST(_Monoid, Voice Int),
-- I_TEST(_Monoid, Chord Int),
I_TEST(_Monoid, Score Int),
I_TEST(_Transformable, Time),
I_TEST(_Transformable, Duration),
I_TEST(_Transformable, Span),
I_TEST(_Transformable, [Time]),
I_TEST(_Transformable, [Duration]),
I_TEST(_Transformable, [Span]),
I_TEST(_Transformable, Set.Set Time),
I_TEST(_Transformable, Set.Set Duration),
I_TEST(_Transformable, Set.Set Span),
I_TEST(_Transformable, Map.Map Int Time),
I_TEST(_Transformable, Map.Map Int Duration),
I_TEST(_Transformable, Map.Map Int Span),
I_TEST(_Transformable, Int),
I_TEST(_Transformable, Double),
I_TEST(_Transformable, Event Int),
I_TEST(_Transformable, Event Double),
I_TEST(_Transformable, Note Int),
I_TEST(_Transformable, Note Double),
I_TEST(_Transformable, Placed Int),
I_TEST(_Transformable, Placed Double),
I_TEST(_Transformable, AddMeta (Placed Double)),
-- TODO how to test "pointwise" for Segment and Behavior
-- I_TEST(_Transformable, Reactive Int),
I_TEST(_Transformable, Voice Int),
-- I_TEST(_Transformable, Chord Int),
I_TEST(_Transformable, Score Int),
-- SLOW I_TEST(_Transformable, [Voice Int]),
-- SLOW I_TEST(_Transformable, [Chord Int]),
-- SLOW I_TEST(_Transformable, [Score Int]),
-- I_TEST(_HasDuration, Time),
I_TEST(_HasDuration, Span),
I_TEST(_HasDuration, Event Int),
I_TEST(_HasDuration, Event Double),
-- I_TEST(_HasDuration, Placed Int),
-- I_TEST(_HasDuration, Placed Double),
I_TEST(_HasDuration, Score Int),
-- I_TEST(_HasDuration, Chord Int),
-- TODO remove instance I_TEST(_HasDuration, [Score Int]),
-- TODO remove instance I_TEST(_HasDuration, [Chord Int]),
-- I_TEST(_HasPosition, Time),
I_TEST(_HasPosition, Span),
I_TEST(_HasPosition, Event Int),
I_TEST(_HasPosition, Event Double),
-- I_TEST(_HasPosition, Placed Int),
-- I_TEST(_HasPosition, Placed Double),
I_TEST(_HasPosition, Score Int),
I_TEST(_HasPosition, Event (Event Int)),
I_TEST(_HasPosition, Event (Score Int)),
I_TEST(_HasPosition, Score (Placed Int)),
-- I_TEST(_HasPosition, AddMeta (Placed Duration)),
-- I_TEST(_HasPosition, Chord Int),
-- TODO remove instance I_TEST(_HasPosition, [Score Int]),
-- TODO remove instance I_TEST(_HasPosition, [Chord Int]),
-- Test meaningless... I_TEST(_Splittable, ()),
I_TEST(_Splittable, Duration),
-- I_TEST(_Splittable, Span),
-- TODO arbitrary I_TEST(_Splittable, Meta),
-- TODO arbitrary I_TEST(_Splittable, Attribute),
I_TEST(_Splittable, AddMeta Duration),
-- TODO remove instance I_TEST(_Splittable, [Duration]),
-- TODO remove instance I_TEST(_Splittable, [Span]),
-- I_TEST(_Splittable, Set.Set Duration),
-- I_TEST(_Splittable, Set.Set Span),
-- I_TEST(_Splittable, Map.Map Int Duration),
-- I_TEST(_Splittable, Map.Map Int Span),
-- I_TEST(_Splittable, Int),
-- I_TEST(_Splittable, Double),
-- I_TEST(_Splittable, Event Int),
-- I_TEST(_Splittable, Event Double),
-- I_TEST(_Splittable, Note Int),
-- I_TEST(_Splittable, Note Double),
-- I_TEST(_Splittable, Placed Int),
-- I_TEST(_Splittable, Placed Double),
-- TODO how to test "pointwise" for Segment and Behavior
-- I_TEST(_Splittable, Reactive Int),
-- I_TEST(_Splittable, Voice Int),
-- I_TEST(_Splittable, Track Int),
-- I_TEST(_Splittable, Chord Int),
-- I_TEST(_Splittable, Score Int),
-- I_TEST(_Splittable, Event (Event Int)),
I_TEST(_Transformable, Note [Event Int])
]
{-
FAIL
>>> let t = -8.127617881083488
>>> let s = [((5032080227011183/1125899906842624) <-> (3258010814518333/140737488355328),3)^.note,((4567817857326597/562949953421312) <-> (-372699739887573/8796093022208),-4)^.note,((6664901794497075/562949953421312) <-> (-9025068628947/1099511627776),-5)^.note,((300602057893123/8796093022208) <-> (2046761023586943/1125899906842624),-3)^.note]^.score
>>> stretch t (_duration s)
(-24057681795560885390114262061183/158456325028528675187087900672)
>>> _duration (stretch t s)
(96270418646142908887001729741/154742504910672534362390528)>>>
>>> let t = -8
>>> let s = [(5 <-> 23,3)^.note,(3 <-> (-3),-4)^.note]^.score
>>> stretch t (_duration s)
>>> _duration (stretch t s)
Caused by negative notes, should help to normalize spans before returning position/duration!
-}
| music-suite/music-preludes | tests/instances.hs | bsd-3-clause | 13,245 | 0 | 14 | 2,739 | 3,152 | 1,689 | 1,463 | 185 | 1 |
module Rules.IntegerGmp (
integerGmpRules, integerGmpObjects, integerGmpLibraryH, integerGmpDependencies
) where
import Base
import Expression
import GHC
import Oracles.Config.Setting
import Rules.Actions
import Settings.User
integerGmpBase :: FilePath
integerGmpBase = "libraries/integer-gmp/gmp"
integerGmpBuild :: FilePath
integerGmpBuild = integerGmpBase -/- "gmpbuild"
integerGmpObjects :: FilePath
integerGmpObjects = integerGmpBase -/- "objs"
integerGmpLibrary :: FilePath
integerGmpLibrary = integerGmpBase -/- "libgmp.a"
integerGmpLibraryInTreeH :: FilePath
integerGmpLibraryInTreeH = integerGmpBase -/- "gmp.h"
integerGmpLibraryH :: FilePath
integerGmpLibraryH = pkgPath integerGmp -/- "include/ghc-gmp.h"
integerGmpLibraryFakeH :: FilePath
integerGmpLibraryFakeH = integerGmpBase -/- "ghc-gmp.h"
integerGmpDependencies :: [FilePath]
integerGmpDependencies = [integerGmpLibraryH]
-- relative to integerGmpBuild
integerGmpPatch :: FilePath
integerGmpPatch = ".." -/- "tarball" -/- "gmp-5.0.4.patch"
target :: PartialTarget
target = PartialTarget Stage0 integerGmp
-- TODO: See Libffi.hs about removing code duplication.
configureEnvironment :: Action [CmdOption]
configureEnvironment = do
sequence [ builderEnv "CC" $ Gcc Stage1
, builderEnv "AR" Ar
, builderEnv "NM" Nm ]
where
builderEnv var builder = do
needBuilder False builder
path <- builderPath builder
return $ AddEnv var path
configureArguments :: Action [String]
configureArguments = do
hostPlatform <- setting HostPlatform
buildPlatform <- setting BuildPlatform
return [ "--enable-shared=no"
, "--host=" ++ hostPlatform
, "--build=" ++ buildPlatform]
-- TODO: we rebuild integer-gmp every time.
integerGmpRules :: Rules ()
integerGmpRules = do
-- TODO: split into multiple rules
integerGmpLibraryH %> \_ -> do
when trackBuildSystem $ need [sourcePath -/- "Rules/IntegerGmp.hs"]
-- remove the old build folder, if it exists.
liftIO $ removeFiles integerGmpBuild ["//*"]
liftIO $ removeFiles (integerGmpObjects) ["//*"]
-- unpack the gmp tarball.
-- Note: We use a tarball like gmp-4.2.4-nodoc.tar.bz2, which is
-- gmp-4.2.4.tar.bz2 repacked without the doc/ directory contents.
-- That's because the doc/ directory contents are under the GFDL,
-- which causes problems for Debian.
tarballs <- getDirectoryFiles "" [integerGmpBase -/- "tarball/gmp*.tar.bz2"]
when (length tarballs /= 1) $
putError $ "integerGmpRules: exactly one tarball expected"
++ "(found: " ++ show tarballs ++ ")."
need tarballs
build $ fullTarget target Tar tarballs [integerGmpBase]
-- move gmp-<version> to gmpbuild
let filename = dropExtension . dropExtension . takeFileName $ head tarballs
suffix = "-nodoc-patched"
unless (suffix `isSuffixOf` filename) $
putError $ "integerGmpRules: expected suffix " ++ suffix
++ " (found: " ++ filename ++ ")."
let libname = take (length filename - length suffix) filename
moveDirectory (integerGmpBase -/- libname) integerGmpBuild
-- apply patches
-- TODO: replace "patch" with PATCH_CMD
unit . quietly $ cmd Shell (EchoStdout False) [Cwd integerGmpBase] "patch -p0 < gmpsrc.patch"
putBuild $ "| Apply " ++ (integerGmpBase -/- "gmpsrc.patch")
unit . quietly $ cmd Shell (EchoStdout False) [Cwd integerGmpBuild] "patch -p1 < " [integerGmpPatch]
putBuild $ "| Apply " ++ (integerGmpBase -/- integerGmpPatch)
-- TODO: What's `chmod +x libraries/integer-gmp/gmp/ln` for?
envs <- configureEnvironment
args <- configureArguments
runConfigure integerGmpBuild envs args
-- TODO: currently we configure integerGmp package twice -- optimise
runConfigure (pkgPath integerGmp) [] []
-- check whether we need to build in tree gmp
-- this is indicated by line "HaveFrameworkGMP = YES" in `config.mk`
configMk <- liftIO . readFile $ integerGmpBase -/- "config.mk"
if "HaveFrameworkGMP = YES" `isInfixOf` configMk
then do
putBuild "| GMP framework detected and will be used"
copyFile integerGmpLibraryFakeH integerGmpLibraryH
else do
putBuild "| No GMP framework detected; in tree GMP will be built"
runMake integerGmpBuild ["MAKEFLAGS="]
copyFile (integerGmpBuild -/- "gmp.h") integerGmpLibraryInTreeH
copyFile (integerGmpBuild -/- "gmp.h") integerGmpLibraryH
-- TODO: why copy library, can we move it instead?
copyFile (integerGmpBuild -/- ".libs/libgmp.a") integerGmpLibrary
createDirectory integerGmpObjects
build $ fullTarget target Ar [integerGmpLibrary] [integerGmpObjects]
runBuilder Ranlib [integerGmpLibrary]
putSuccess "| Successfully built custom library 'integer-gmp'"
integerGmpLibraryInTreeH %> \_ -> need [integerGmpLibraryH]
| quchen/shaking-up-ghc | src/Rules/IntegerGmp.hs | bsd-3-clause | 5,162 | 0 | 19 | 1,219 | 958 | 483 | 475 | 87 | 2 |
{-# OPTIONS_HADDOCK hide #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE InstanceSigs #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE UndecidableInstances #-}
--------------------------------------------------------------------------------
-- |
-- Module : Data.Generics.Internal.GenericN
-- Copyright : (C) 2020 Csongor Kiss
-- License : BSD3
-- Maintainer : Csongor Kiss <kiss.csongor.kiss@gmail.com>
-- Stability : experimental
-- Portability : non-portable
--
-- Generic representation of types with multiple parameters
--
--------------------------------------------------------------------------------
module Data.Generics.Internal.GenericN
( Param (..)
, Rec (Rec, unRec)
, GenericN (..)
) where
import Data.Kind
import GHC.Generics
import GHC.TypeLits
import Data.Coerce
data family Param :: Nat -> j -> k
newtype instance Param n (a :: Type)
= StarParam { getStarParam :: a}
type family Indexed (t :: k) (i :: Nat) :: k where
Indexed (t a) i = Indexed t (i + 1) (Param i a)
Indexed t _ = t
newtype Rec (p :: Type) a x = Rec { unRec :: K1 R a x }
class
( Coercible (Rep a) (RepN a)
, Generic a
) => GenericN (a :: Type) where
type family RepN (a :: Type) :: Type -> Type
type instance RepN a = Rep (Indexed a 0)
toN :: RepN a x -> a
fromN :: a -> RepN a x
instance
( Coercible (Rep a) (RepN a)
, Generic a
) => GenericN a where
toN :: forall x. RepN a x -> a
toN = coerce (to :: Rep a x -> a)
{-# INLINE toN #-}
fromN :: forall x. a -> RepN a x
fromN = coerce (from :: a -> Rep a x)
{-# INLINE fromN #-}
| kcsongor/generic-lens | generic-lens-core/src/Data/Generics/Internal/GenericN.hs | bsd-3-clause | 1,809 | 0 | 9 | 423 | 450 | 267 | 183 | 46 | 0 |
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree.
{-# LANGUAGE OverloadedStrings #-}
module Duckling.Distance.DE.Corpus
( corpus
) where
import Data.String
import Prelude
import Duckling.Distance.Types
import Duckling.Locale
import Duckling.Resolve
import Duckling.Testing.Types
corpus :: Corpus
corpus = (testContext {locale = makeLocale DE Nothing}, testOptions, allExamples)
allExamples :: [Example]
allExamples = concat
[ examples (simple Kilometre 3)
[ "3 kilometer"
, "3 km"
, "3km"
, "3,0 km"
]
, examples (simple Mile 8)
[ "acht meilen"
, "8 meilen"
]
, examples (simple Metre 9)
[ "9m"
]
, examples (simple Centimetre 2)
[ "2cm"
, "2 zentimeter"
]
, examples (simple Inch 5)
[ "5''"
, "fünf zoll"
, "5\""
]
, examples (simple Metre 1.87)
[ "1,87 meter"
]
, examples (between Kilometre (3, 5))
[ "zwischen 3 und 5 kilometern"
, "von 3km bis 5km"
, "um die 3-5 kilometer"
, "etwa 3km-5km"
, "3-5 kilometer"
]
, examples (under Mile 3.5)
[ "unter 3,5 meilen"
, "weniger als 3,5meilen"
--, "niedriger als dreikommafünf meilen"
]
, examples (above Inch 5)
[ "mehr als fünf zoll"
, "mindestens 5''"
, "über 5\""
]
, examples (between Millimetre (5, 6))
[ "zwischen 5 und sechs Millimetern"
, "zwischen 5 und sechs millimeter"
, "5-6 mm"
]
]
| facebookincubator/duckling | Duckling/Distance/DE/Corpus.hs | bsd-3-clause | 1,917 | 0 | 10 | 748 | 348 | 203 | 145 | 49 | 1 |
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree.
{-# LANGUAGE NoRebindableSyntax #-}
{-# LANGUAGE OverloadedStrings #-}
module Duckling.Api.Tests
( tests
) where
import Data.List (sortOn)
import Data.Text (Text)
import Prelude
import Test.Tasty
import Test.Tasty.HUnit
import qualified Data.HashMap.Strict as HashMap
import qualified Data.HashSet as HashSet
import Duckling.Api
import Duckling.Dimensions.Types
import Duckling.Locale
import Duckling.Testing.Asserts
import Duckling.Testing.Types
import Duckling.Types
import qualified Duckling.Numeral.Types as TNumeral
tests :: TestTree
tests = testGroup "API Tests"
[ parseTest
, rankTest
, rangeTest
, supportedDimensionsTest
]
parseTest :: TestTree
parseTest = testCase "Parse Test" $
case parse sentence testContext testOptions [Seal Numeral] of
[] -> assertFailure "empty result"
(Entity dim body (RVal _ v) start end _ _:_) -> do
assertEqual "dim" "number" dim
assertEqual "body" "42" body
assertEqual "value" val (toJText v)
assertEqual "start" 4 start
assertEqual "end" 6 end
where
sentence = "hey 42 there"
val = toJText TNumeral.NumeralValue {TNumeral.vValue = 42.0}
rankTest :: TestTree
rankTest = testGroup "Rank Tests"
[ rankFilterTest
, rankOrderTest
]
rankFilterTest :: TestTree
rankFilterTest = testCase "Rank Filter Tests" $ do
mapM_ check
[ ( "in 2 minutes"
, [Seal Numeral, Seal Duration, Seal Time]
, [Seal Time]
)
, ( "in 2 minutes, about 42 degrees"
, [Seal Numeral, Seal Temperature, Seal Time]
, [Seal Time, Seal Temperature]
)
, ( "today works... and tomorrow at 9pm too"
, [Seal Numeral, Seal Time]
, [Seal Time, Seal Time]
)
, ( "between 9:30 and 11:00 on thursday or Saturday and Thanksgiving Day"
, [Seal Numeral, Seal Time]
, [Seal Time, Seal Time, Seal Time]
)
, ("the day after tomorrow 5pm", [Seal Time], [Seal Time])
, ("the day after tomorrow 5pm", [Seal Time, Seal Numeral], [Seal Time])
, ("the day after tomorrow 5pm", [], [Seal Time])
]
where
check :: (Text, [Seal Dimension], [Seal Dimension]) -> IO ()
check (sentence, targets, expected) =
let go = analyze sentence testContext testOptions $ HashSet.fromList targets
actual = flip map go $
\(Resolved{node=Node{token=Token d _}}) -> Seal d
in assertEqual ("wrong winners for " ++ show sentence) expected actual
rankOrderTest :: TestTree
rankOrderTest = testCase "Rank Order Tests" $ do
mapM_ check
[ ("tomorrow at 5PM or 8PM", [Seal Time])
, ("321 12 3456 ... 7", [Seal Numeral])
, ("42 today 23 tomorrow", [Seal Numeral, Seal Time])
]
where
check (s, targets) =
let tokens = analyze s testContext testOptions $ HashSet.fromList targets
in assertEqual "wrong ordering" (sortOn range tokens) tokens
rangeTest :: TestTree
rangeTest = testCase "Range Tests" $ do
mapM_ (analyzedFirstTest testContext testOptions) xs
where
xs = map (\(input, targets, range) -> (input, targets, f range))
[ ( "order status 3233763377", [Seal PhoneNumber], Range 13 23 )
, ( " 3233763377 " , [Seal PhoneNumber], Range 2 12 )
, ( " -3233763377" , [Seal PhoneNumber], Range 2 12 )
, ( " now" , [Seal Time] , Range 2 5 )
, ( " Monday " , [Seal Time] , Range 3 9 )
, ( " next week " , [Seal Time] , Range 2 13 )
, ( " 42\n\n" , [Seal Numeral] , Range 3 5 )
]
f :: Range -> TestPredicate
f expected _ (Resolved {range = actual}) = expected == actual
supportedDimensionsTest :: TestTree
supportedDimensionsTest = testCase "Supported Dimensions Test" $ do
mapM_ check
[ ( AR
, [ Seal Email, Seal AmountOfMoney, Seal PhoneNumber, Seal Url
, Seal Duration, Seal Numeral, Seal Ordinal, Seal Time, Seal Volume
, Seal Temperature, Seal Quantity, Seal CreditCardNumber
]
)
, ( PL
, [ Seal Email, Seal AmountOfMoney, Seal PhoneNumber, Seal Url
, Seal Duration, Seal Numeral, Seal Ordinal, Seal Time
, Seal CreditCardNumber
]
)
]
where
check :: (Lang, [Seal Dimension]) -> IO ()
check (l, expected) = case HashMap.lookup l supportedDimensions of
Nothing -> assertFailure $ "no dimensions for " ++ show l
Just actual ->
assertEqual ("wrong dimensions for " ++ show l)
(HashSet.fromList expected) (HashSet.fromList actual)
| facebookincubator/duckling | tests/Duckling/Api/Tests.hs | bsd-3-clause | 4,813 | 0 | 20 | 1,303 | 1,414 | 764 | 650 | 103 | 2 |
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE CPP #-}
#if __GLASGOW_HASKELL__ >= 701
{-# LANGUAGE DefaultSignatures #-}
#endif
module Generics.Deriving.Traversable (
-- * GTraversable class
GTraversable(..)
-- * Default method
, gtraversedefault
) where
import Control.Applicative
import Generics.Deriving.Base
import Generics.Deriving.Foldable
import Generics.Deriving.Functor
import Generics.Deriving.Instances ()
--------------------------------------------------------------------------------
-- Generic traverse
--------------------------------------------------------------------------------
class GTraversable' t where
gtraverse' :: Applicative f => (a -> f b) -> t a -> f (t b)
instance GTraversable' U1 where
gtraverse' _ U1 = pure U1
instance GTraversable' Par1 where
gtraverse' f (Par1 a) = Par1 <$> f a
instance GTraversable' (K1 i c) where
gtraverse' _ (K1 a) = pure (K1 a)
instance (GTraversable f) => GTraversable' (Rec1 f) where
gtraverse' f (Rec1 a) = Rec1 <$> gtraverse f a
instance (GTraversable' f) => GTraversable' (M1 i c f) where
gtraverse' f (M1 a) = M1 <$> gtraverse' f a
instance (GTraversable' f, GTraversable' g) => GTraversable' (f :+: g) where
gtraverse' f (L1 a) = L1 <$> gtraverse' f a
gtraverse' f (R1 a) = R1 <$> gtraverse' f a
instance (GTraversable' f, GTraversable' g) => GTraversable' (f :*: g) where
gtraverse' f (a :*: b) = (:*:) <$> gtraverse' f a <*> gtraverse' f b
instance (GTraversable f, GTraversable' g) => GTraversable' (f :.: g) where
gtraverse' f (Comp1 x) = Comp1 <$> gtraverse (gtraverse' f) x
class (GFunctor t, GFoldable t) => GTraversable t where
gtraverse :: Applicative f => (a -> f b) -> t a -> f (t b)
#if __GLASGOW_HASKELL__ >= 701
default gtraverse :: (Generic1 t, GTraversable' (Rep1 t), Applicative f)
=> (a -> f b) -> t a -> f (t b)
gtraverse = gtraversedefault
#endif
gsequenceA :: Applicative f => t (f a) -> f (t a)
gsequenceA = gtraverse id
gmapM :: Monad m => (a -> m b) -> t a -> m (t b)
gmapM f = unwrapMonad . gtraverse (WrapMonad . f)
gsequence :: Monad m => t (m a) -> m (t a)
gsequence = gmapM id
gtraversedefault :: (Generic1 t, GTraversable' (Rep1 t), Applicative f)
=> (a -> f b) -> t a -> f (t b)
gtraversedefault f x = to1 <$> gtraverse' f (from1 x)
-- Base types instances
instance GTraversable Maybe where
gtraverse = gtraversedefault
instance GTraversable [] where
gtraverse = gtraversedefault
| ekmett/generic-deriving | src/Generics/Deriving/Traversable.hs | bsd-3-clause | 2,604 | 0 | 13 | 583 | 915 | 469 | 446 | 45 | 1 |
module Library.Page.EditPatron where
import Graphics.UI.Threepenny.Core hiding (row)
import Foundation.Common
import Foundation.Input
-- import Foundation.Layout
import Library
import Library.DB.Types
import Library.Page.PatronInfo
import Library.Page.PatronSearch
import Database.SQLite.Simple
import Control.Monad
editPatronPage :: Connection -> Page
editPatronPage conn = patronSearch' "EditPatron" conn
viewPatronInfo
viewPatronInfo :: PatronSearch ()
viewPatronInfo (drawArea,btnArea) conn
pat searchResults
_ = void $ do
backBtn <- toElement $
Button (LabelStr "Back")
(alertBtn radiusBtnStyle) $ \self -> do
delete self
displayPatronTable' (drawArea,btnArea) conn
searchResults viewPatronInfo
pInf <- patronInfo "ViewPatronInfo" conn
editPatronAction (patronDBId pat)
(loadPatronFields backBtn btnArea pat)
element drawArea # set children [] #+
[ panel #+ pInf ]
loadPatronFields :: Element -> Element -> Patron -> PatronInfoLoad
loadPatronFields btn btnArea pat _ pf = do
void $ element btnArea #+ [ element btn ]
setValue (fstNameFld pf) $ firstName pat
setValue (lstNameFld pf) $ lastName pat
setValue (phoneFld pf) $ phoneNumber pat
setValue (emailFld pf) $ emailAddr pat
setValue (prefFld pf) $ Just pref
setValue (home1Fld pf) $ homeAddr1 pat
setValue (home2Fld pf) $ homeAddr2 pat
setValue (cityFld pf) $ viewCity $ cityStateZip pat
setValue (stateFld pf) $ viewState $ cityStateZip pat
setValue (zipFld pf) $ viewZipCd $ cityStateZip pat
setValue (patNumFld pf) $ show $ patronNum pat
where
pref = case prefContact pat of
Email -> "Email"
Phone -> "Phone"
editPatronAction :: PatronInfo (Maybe Integer)
editPatronAction (alertArea,_) conn _
fstNm lstNm
phone email pref
home1 home2
csz mpn
mId = case (mpn,mId) of
(Nothing,_) -> displayFailure alertArea
"Patron Number Missing"
(_,Nothing) -> fail "Bug: Wasn't given a DB Id Number for Patron"
(Just pn,Just idNo) -> do
taken <- patronNumberTaken conn idNo pn
if taken
then displayFailure alertArea
"Patron Number Is Taken"
else do
displaySuccess alertArea "Patron Updated!"
let pat = Patron mId
fstNm lstNm
phone email pref
home1 home2
csz pn
updatePatron conn pat
| kylcarte/threepenny-extras | src/Library/Page/EditPatron.hs | bsd-3-clause | 2,486 | 0 | 16 | 649 | 738 | 359 | 379 | 68 | 4 |
module Parser.Lexer where
import Parser.Data ( Parser )
import qualified Text.Megaparsec.Char.Lexer as L
import Control.Monad ( void )
import Text.Megaparsec.Char
( alphaNumChar, char, letterChar, lowerChar, upperChar, spaceChar )
import Text.Megaparsec ( (<|>), between, many, MonadParsec(try) )
import Text.Printf ( printf )
-- spaces & comments
sc :: Parser ()
sc = L.space (void spaceChar) lineCmnt blockCmnt
where
lineCmnt = L.skipLineComment "--"
blockCmnt = L.skipBlockComment "{-" "-}"
lexeme :: Parser a -> Parser a
lexeme = L.lexeme sc
symbol :: String -> Parser String
symbol = L.symbol sc
-- brackets
roundBr, angleBr, boxBr, curvyBr :: Parser a -> Parser a
roundBr = between (symbol "(") (symbol ")")
angleBr = between (symbol "<") (symbol ">")
boxBr = between (symbol "[") (symbol "]")
curvyBr = between (symbol "{") (symbol "}")
identLetters :: Parser Char
identLetters =
char '_' <|> alphaNumChar <|> char '\''
notReserved :: Monad m => [String] -> String -> m String
notReserved reserved x | x `elem` reserved = fail $ printf "%s is reserved" (show x)
notReserved reserved x = return x
lIdentifier :: [String] -> Parser String
lIdentifier reserved =
(lexeme . try) (p >>= notReserved reserved)
where
p = (:) <$> lowerChar <*> many identLetters
uIdentifier :: [String] -> Parser String
uIdentifier reserved =
(lexeme . try) (p >>= notReserved reserved)
where
p = (:) <$> upperChar <*> many identLetters
identifier :: [String] -> Parser String
identifier reserved =
(lexeme . try) (p >>= notReserved reserved)
where
p = (:) <$> letterChar <*> many identLetters
comma :: Parser String
comma = symbol ","
| kajigor/uKanren_transformations | src/Parser/Lexer.hs | bsd-3-clause | 1,678 | 0 | 9 | 316 | 617 | 330 | 287 | 41 | 1 |
module Language.Verilog.Parser
( parseFile
, preprocess
) where
import Language.Verilog.AST
import Language.Verilog.Parser.Lex
import Language.Verilog.Parser.Parse
import Language.Verilog.Parser.Preprocess
import Language.Verilog.Parser.Tokens
-- | Parses a file given a table of predefined macros, the file name, and the file contents.
parseFile :: [(String, String)] -> FilePath -> String -> [Module]
parseFile env file content = modules tokens
where
tokens = map relocate $ alexScanTokens $ preprocess env file content
relocate :: Token -> Token
relocate (Token t s (Position _ l c)) = Token t s $ Position file l c
| tomahawkins/verilog | Language/Verilog/Parser.hs | bsd-3-clause | 636 | 0 | 11 | 104 | 172 | 97 | 75 | 13 | 1 |
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
-- | A nonoptimising interpreter for Futhark. It makes no assumptions of
-- the form of the input program, and in particular permits shadowing.
-- This interpreter should be considered the primary benchmark for
-- judging the correctness of a program, but note that it is not by
-- itself sufficient. The interpreter does not perform in-place
-- updates like the native code generator, and bugs related to
-- uniqueness will therefore not be detected. Of course, the type
-- checker should catch such error.
--
-- To run an Futhark program, you would normally run the interpreter as
-- @'runFun' 'defaultEntryPoint' args prog@.
module Futhark.Interpreter
( runFun
, runFunWithShapes
, InterpreterError(..) )
where
import Control.Applicative
import Control.Monad.Reader
import Control.Monad.Writer
import Control.Monad.Except
import Data.Array
import Data.List
import Data.Loc
import qualified Data.HashMap.Strict as HM
import Data.Maybe
import Prelude
import Futhark.Representation.SOACS
import Futhark.Util
-- | An error happened during execution, and this is why.
data InterpreterError =
MissingEntryPoint Name
-- ^ The specified start function does not exist.
| InvalidFunctionArguments Name (Maybe [TypeBase Rank NoUniqueness]) [TypeBase Rank NoUniqueness]
-- ^ The arguments given to a function were mistyped.
| IndexOutOfBounds String [Int] [Int]
-- ^ First @Int@ is array shape, second is attempted index.
| SplitOutOfBounds String [Int] [Int]
-- ^ First @[Int]@ is array shape, second is attempted split
-- sizes.
| NegativeIota Int
-- ^ Called @iota(n)@ where @n@ was negative.
| NegativeReplicate Int
-- ^ Called @replicate(n, x)@ where @n@ was negative.
| InvalidArrayShape Exp [Int] [Int]
-- ^ First @Int@ is old shape, second is attempted new shape.
| ZipError [Int]
-- ^ The arguments to @zip@ were of different lengths.
| AssertFailed SrcLoc
-- ^ Assertion failed at this location.
| TypeError String
-- ^ Some value was of an unexpected type.
| DivisionByZero
-- ^ Attempted to divide by zero.
instance Show InterpreterError where
show (MissingEntryPoint fname) =
"Program entry point '" ++ nameToString fname ++ "' not defined."
show (InvalidFunctionArguments fname Nothing got) =
"Function '" ++ nameToString fname ++ "' did not expect argument(s) of type " ++
intercalate ", " (map pretty got) ++ "."
show (InvalidFunctionArguments fname (Just expected) got) =
"Function '" ++ nameToString fname ++ "' expected argument(s) of type " ++
intercalate ", " (map pretty expected) ++
" but got argument(s) of type " ++
intercalate ", " (map pretty got) ++ "."
show (IndexOutOfBounds var arrsz i) =
"Array index " ++ show i ++ " out of bounds in array '" ++
var ++ "', of size " ++ show arrsz ++ "."
show (SplitOutOfBounds var arrsz sizes) =
"Split not valid for sizes " ++ show sizes ++
" on array '" ++ var ++ "', with shape " ++ show arrsz ++ "."
show (NegativeIota n) =
"Argument " ++ show n ++ " to iota at is negative."
show (NegativeReplicate n) =
"Argument " ++ show n ++ " to replicate is negative."
show (TypeError s) =
"Type error during interpretation: " ++ s
show (InvalidArrayShape e shape newshape) =
"Invalid array reshaping " ++ pretty e ++
", from " ++ show shape ++ " to " ++ show newshape
show (ZipError lengths) =
"Array arguments to zip must have same length, but arguments have lengths " ++ intercalate ", " (map show lengths) ++ "."
show (AssertFailed loc) =
"Assertion failed at " ++ locStr loc ++ "."
show DivisionByZero =
"Division by zero."
type FunTable = HM.HashMap Name ([Value] -> FutharkM [Value])
type VTable = HM.HashMap VName Value
data FutharkEnv = FutharkEnv { envVtable :: VTable
, envFtable :: FunTable
}
newtype FutharkM a = FutharkM (ReaderT FutharkEnv
(Except InterpreterError) a)
deriving (Monad, Applicative, Functor,
MonadReader FutharkEnv)
runFutharkM :: FutharkM a -> FutharkEnv
-> Either InterpreterError a
runFutharkM (FutharkM m) env = runExcept $ runReaderT m env
bad :: InterpreterError -> FutharkM a
bad = FutharkM . throwError
asPrimitive :: String -> Value -> FutharkM PrimValue
asPrimitive _ (PrimVal v) = return v
asPrimitive w _ = bad $ TypeError $ w ++ " asPrimitive"
asInt32 :: String -> Value -> FutharkM Int32
asInt32 _ (PrimVal (IntValue (Int32Value v))) = return v
asInt32 w _ = bad $ TypeError $ w ++ " asInt32"
asInt :: String -> Value -> FutharkM Int
asInt _ (PrimVal (IntValue (Int32Value v))) = return $ fromIntegral v
asInt w _ = bad $ TypeError $ w ++ " asInt"
bindVar :: Bindage -> Value
-> FutharkM Value
bindVar BindVar val =
return val
bindVar (BindInPlace _ src is) val = do
srcv <- lookupVar src
is' <- mapM (asInt "bindInPlace" <=< evalSubExp) is
case srcv of
ArrayVal arr bt shape -> do
flatidx <- indexArray (textual src) shape is'
if length is' == length shape then
case val of
PrimVal bv ->
return $ ArrayVal (arr // [(flatidx, bv)]) bt shape
_ ->
bad $ TypeError "bindVar BindInPlace, full indices given, but replacement value is not a prim value"
else
case val of
ArrayVal valarr _ valshape ->
let updates =
[ (flatidx + i, valarr ! i) | i <- [0..product valshape-1] ]
in return $ ArrayVal (arr // updates) bt shape
PrimVal _ ->
bad $ TypeError "bindVar BindInPlace, incomplete indices given, but replacement value is not array"
_ ->
bad $ TypeError "bindVar BindInPlace, source is not array"
bindVars :: [(Ident, Bindage, Value)]
-> FutharkM VTable
bindVars bnds = do
let (idents, bindages, vals) = unzip3 bnds
HM.fromList . zip (map identName idents) <$>
zipWithM bindVar bindages vals
binding :: [(Ident, Bindage, Value)]
-> FutharkM a
-> FutharkM a
binding bnds m = do
vtable <- bindVars bnds
local (extendVtable vtable) $ do
checkBoundShapes bnds
m
where extendVtable vtable env = env { envVtable = vtable <> envVtable env }
checkBoundShapes = mapM_ checkShape
checkShape (ident, BindVar, val) = do
let valshape = map (PrimVal . value) $ valueShape val
vardims = arrayDims $ identType ident
varshape <- mapM evalSubExp vardims
when (varshape /= valshape) $
bad $ TypeError $
"checkPatSizes:\n" ++
pretty ident ++ " is specified to have shape [" ++
intercalate "," (zipWith ppDim vardims varshape) ++
"], but is being bound to value " ++ pretty val ++
" of shape [" ++ intercalate "," (map pretty valshape) ++ "]."
checkShape _ = return ()
ppDim (Constant v) _ = pretty v
ppDim e v = pretty e ++ "=" ++ pretty v
lookupVar :: VName -> FutharkM Value
lookupVar vname = do
val <- asks $ HM.lookup vname . envVtable
case val of Just val' -> return val'
Nothing -> bad $ TypeError $ "lookupVar " ++ pretty vname
lookupFun :: Name -> FutharkM ([Value] -> FutharkM [Value])
lookupFun fname = do
fun <- asks $ HM.lookup fname . envFtable
case fun of Just fun' -> return fun'
Nothing -> bad $ TypeError $ "lookupFun " ++ pretty fname
arrToList :: Value -> FutharkM [Value]
arrToList (ArrayVal l _ [_]) =
return $ map PrimVal $ elems l
arrToList (ArrayVal l bt (_:rowshape)) =
return [ ArrayVal (listArray (0,rowsize-1) vs) bt rowshape
| vs <- chunk rowsize $ elems l ]
where rowsize = product rowshape
arrToList _ = bad $ TypeError "arrToList"
arrayVal :: [Value] -> PrimType -> [Int] -> Value
arrayVal vs bt shape =
ArrayVal (listArray (0,product shape-1) vs') bt shape
where vs' = concatMap flatten vs
flatten (PrimVal bv) = [bv]
flatten (ArrayVal arr _ _) = elems arr
arrays :: [Type] -> [[Value]] -> FutharkM [Value]
arrays ts vs = zipWithM arrays' ts vs'
where vs' = case vs of
[] -> replicate (length ts) []
_ -> transpose vs
arrays' rt r = do
rowshape <- mapM (asInt32 "arrays" <=< evalSubExp) $ arrayDims rt
return $ arrayVal r (elemType rt) $ length r : map fromIntegral rowshape
soacArrays :: SubExp -> [VName] -> FutharkM [[Value]]
soacArrays w [] = do
w' <- asInt32 "soacArrays" =<< evalSubExp w
return $ genericReplicate w' []
soacArrays _ names = transpose <$> mapM (arrToList <=< lookupVar) names
indexArray :: String -> [Int] -> [Int]
-> FutharkM Int
indexArray name shape is
| and (zipWith (<=) is shape),
all (0<=) is,
length is <= length shape =
let slicesizes = map product $ drop 1 $ tails shape
in return $ sum $ zipWith (*) is slicesizes
| otherwise =
bad $ IndexOutOfBounds name shape is
--------------------------------------------------
------- Interpreting an arbitrary function -------
--------------------------------------------------
-- | @runFun name args prog@ invokes the @name@ function of program
-- @prog@, with the parameters bound in order to the values in @args@.
-- Returns either an error or the return value of @fun@.
--
-- Note that if 'prog' is not type-correct, you cannot be sure that
-- you'll get an error from the interpreter - it may just as well
-- silently return a wrong value. You are, however, guaranteed that
-- the initial call to 'prog' is properly checked.
runFun :: Name -> [Value] -> Prog
-> Either InterpreterError [Value]
runFun fname mainargs prog = do
let ftable = buildFunTable prog
futharkenv = FutharkEnv { envVtable = HM.empty
, envFtable = ftable
}
case (funDefByName fname prog, HM.lookup fname ftable) of
(Nothing, Nothing) -> Left $ MissingEntryPoint fname
(Just fundec, _) ->
runThisFun fundec mainargs ftable
(_ , Just fun) -> -- It's a builtin function, it'll do its own
-- error checking.
runFutharkM (fun mainargs) futharkenv
-- | Like 'runFun', but prepends parameters corresponding to the
-- required shape context of the function being called.
runFunWithShapes :: Name -> [Value] -> Prog
-> Either InterpreterError [Value]
runFunWithShapes fname valargs prog = do
let ftable = buildFunTable prog
futharkenv = FutharkEnv { envVtable = HM.empty
, envFtable = ftable
}
case (funDefByName fname prog, HM.lookup fname ftable) of
(Nothing, Nothing) -> Left $ MissingEntryPoint fname
(Just fundec, _) ->
let args' = shapes (funDefParams fundec) ++ valargs
in runThisFun fundec args' ftable
(_ , Just fun) -> -- It's a builtin function, it'll do its own
-- error checking.
runFutharkM (fun valargs) futharkenv
where shapes params =
let (shapeparams, valparams) =
splitAt (length params - length valargs) params
shapemap = shapeMapping'
(map paramType valparams)
(map valueShape valargs)
in map (PrimVal . IntValue . Int32Value . fromIntegral . fromMaybe 0 .
flip HM.lookup shapemap .
paramName)
shapeparams
runThisFun :: FunDef -> [Value] -> FunTable
-> Either InterpreterError [Value]
runThisFun (FunDef _ fname _ fparams _) args ftable
| argtypes == paramtypes =
runFutharkM (evalFuncall fname args) futharkenv
| otherwise =
Left $ InvalidFunctionArguments fname
(Just paramtypes)
argtypes
where argtypes = map (rankShaped . valueType) args
paramtypes = map (rankShaped . paramType) fparams
futharkenv = FutharkEnv { envVtable = HM.empty
, envFtable = ftable
}
buildFunTable :: Prog -> FunTable
buildFunTable = foldl expand builtins . progFunctions
where -- We assume that the program already passed the type checker, so
-- we don't check for duplicate definitions.
expand ftable' (FunDef _ name _ params body) =
let fun funargs = binding (zip3 (map paramIdent params) (repeat BindVar) funargs) $
evalBody body
in HM.insert name fun ftable'
--------------------------------------------
--------------------------------------------
------------- BUILTIN FUNCTIONS ------------
--------------------------------------------
--------------------------------------------
builtins :: HM.HashMap Name ([Value] -> FutharkM [Value])
builtins = HM.fromList $ map namify
[("sqrt32", builtin "sqrt32")
,("log32", builtin "log32")
,("exp32", builtin "exp32")
,("cos32", builtin "cos32")
,("sin32", builtin "sin32")
,("atan2_32", builtin "atan2_32")
,("isinf32", builtin "isinf32")
,("isnan32", builtin "isnan32")
,("sqrt64", builtin "sqrt64")
,("log64", builtin "log64")
,("exp64", builtin "exp64")
,("cos64", builtin "cos64")
,("sin64", builtin "sin64")
,("atan2_64", builtin "atan2_64")
,("isinf64", builtin "isinf64")
,("isnan64", builtin "isnan64")
]
where namify (k,v) = (nameFromString k, v)
builtin :: String -> [Value] -> FutharkM [Value]
builtin "sqrt32" [PrimVal (FloatValue (Float32Value x))] =
return [PrimVal $ FloatValue $ Float32Value $ sqrt x]
builtin "log32" [PrimVal (FloatValue (Float32Value x))] =
return [PrimVal $ FloatValue $ Float32Value $ log x]
builtin "exp32" [PrimVal (FloatValue (Float32Value x))] =
return [PrimVal $ FloatValue $ Float32Value $ exp x]
builtin "cos32" [PrimVal (FloatValue (Float32Value x))] =
return [PrimVal $ FloatValue $ Float32Value $ cos x]
builtin "sin32" [PrimVal (FloatValue (Float32Value x))] =
return [PrimVal $ FloatValue $ Float32Value $ sin x]
builtin "atan2_32" [PrimVal (FloatValue (Float32Value x)),
PrimVal (FloatValue (Float32Value y))] =
return [PrimVal $ FloatValue $ Float32Value $ atan2 x y]
builtin "isnan32" [PrimVal (FloatValue (Float32Value x))] =
return [PrimVal $ BoolValue $ isNaN x]
builtin "isinf32" [PrimVal (FloatValue (Float32Value x))] =
return [PrimVal $ BoolValue $ isInfinite x]
builtin "sqrt64" [PrimVal (FloatValue (Float64Value x))] =
return [PrimVal $ FloatValue $ Float64Value $ sqrt x]
builtin "log64" [PrimVal (FloatValue (Float64Value x))] =
return [PrimVal $ FloatValue $ Float64Value $ log x]
builtin "exp64" [PrimVal (FloatValue (Float64Value x))] =
return [PrimVal $ FloatValue $ Float64Value $ exp x]
builtin "cos64" [PrimVal (FloatValue (Float64Value x))] =
return [PrimVal $ FloatValue $ Float64Value $ cos x]
builtin "sin64" [PrimVal (FloatValue (Float64Value x))] =
return [PrimVal $ FloatValue $ Float64Value $ sin x]
builtin "atan2_64" [PrimVal (FloatValue (Float64Value x)),
PrimVal (FloatValue (Float64Value y))] =
return [PrimVal $ FloatValue $ Float64Value $ atan2 x y]
builtin "isnan64" [PrimVal (FloatValue (Float64Value x))] =
return [PrimVal $ BoolValue $ isNaN x]
builtin "isinf64" [PrimVal (FloatValue (Float64Value x))] =
return [PrimVal $ BoolValue $ isInfinite x]
builtin fname args =
bad $ InvalidFunctionArguments (nameFromString fname) Nothing $
map (rankShaped . valueType) args
single :: Value -> [Value]
single v = [v]
evalSubExp :: SubExp -> FutharkM Value
evalSubExp (Var ident) = lookupVar ident
evalSubExp (Constant v) = return $ PrimVal v
evalBody :: Body -> FutharkM [Value]
evalBody (Body _ [] es) =
mapM evalSubExp es
evalBody (Body () (Let pat _ e:bnds) res) = do
v <- evalExp e
binding (zip3
(map patElemIdent patElems)
(map patElemBindage patElems)
v) $
evalBody $ Body () bnds res
where patElems = patternElements pat
evalExp :: Exp -> FutharkM [Value]
evalExp (If e1 e2 e3 rettype) = do
v <- evalSubExp e1
vs <- case v of PrimVal (BoolValue True) -> evalBody e2
PrimVal (BoolValue False) -> evalBody e3
_ -> bad $ TypeError "evalExp If"
return $ valueShapeContext rettype vs ++ vs
evalExp (Apply fname args rettype) = do
args' <- mapM (evalSubExp . fst) args
vs <- evalFuncall fname args'
return $ valueShapeContext (retTypeValues rettype) vs ++ vs
evalExp (PrimOp op) = evalPrimOp op
evalExp (DoLoop ctxmerge valmerge (ForLoop loopvar boundexp) loopbody) = do
bound <- evalSubExp boundexp
mergestart <- mapM evalSubExp mergeexp
case bound of
PrimVal (IntValue (Int32Value n)) -> do
vs <- foldM iteration mergestart [0..n-1]
binding (zip3 (map paramIdent mergepat) (repeat BindVar) vs) $
mapM (lookupVar . paramName) $
loopResultContext (map fst ctxmerge) (map fst valmerge) ++ map fst valmerge
_ -> bad $ TypeError "evalBody DoLoop for"
where merge = ctxmerge ++ valmerge
(mergepat, mergeexp) = unzip merge
iteration mergeval i =
binding [(Ident loopvar $ Prim int32, BindVar, PrimVal $ value i)] $
binding (zip3 (map paramIdent mergepat) (repeat BindVar) mergeval) $
evalBody loopbody
evalExp (DoLoop ctxmerge valmerge (WhileLoop cond) loopbody) = do
mergestart <- mapM evalSubExp mergeexp
iteration mergestart
where merge = ctxmerge ++ valmerge
(mergepat, mergeexp) = unzip merge
iteration mergeval =
binding (zip3 (map paramIdent mergepat) (repeat BindVar) mergeval) $ do
condv <- lookupVar cond
case condv of
PrimVal (BoolValue False) ->
mapM (lookupVar . paramName) $
loopResultContext (map fst ctxmerge) (map fst valmerge) ++ map fst valmerge
PrimVal (BoolValue True) ->
iteration =<< evalBody loopbody
_ ->
bad $ TypeError "evalBody DoLoop while"
evalExp (Op op) = evalSOAC op
evalPrimOp :: PrimOp -> FutharkM [Value]
evalPrimOp (SubExp se) =
single <$> evalSubExp se
evalPrimOp (ArrayLit es rt) = do
rowshape <- mapM (asInt "evalPrimOp ArrayLit" <=< evalSubExp) $ arrayDims rt
single <$> (arrayVal <$>
mapM evalSubExp es <*>
pure (elemType rt) <*>
pure (length es : rowshape))
evalPrimOp binop@(BinOp op e1 e2) = do
v1 <- asPrimitive "BinOp" =<< evalSubExp e1
v2 <- asPrimitive "BinOp" =<< evalSubExp e2
case doBinOp op v1 v2 of
Just v -> return [PrimVal v]
Nothing -> bad $ TypeError $ "Cannot BinOp: " ++ unwords [pretty binop, pretty v1, pretty v2]
evalPrimOp e@(CmpOp cmp e1 e2) = do
v1 <- asPrimitive "CmpOp" =<< evalSubExp e1
v2 <- asPrimitive "CmpOp" =<< evalSubExp e2
case doCmpOp cmp v1 v2 of
Just b -> return [PrimVal $ BoolValue b]
Nothing -> bad $ TypeError $ "Cannot compare: " ++ unwords [pretty e, pretty v1, pretty v2]
evalPrimOp e@(ConvOp op x) = do
v <- asPrimitive "ConvOp" =<< evalSubExp x
case doConvOp op v of
Just v' -> return [PrimVal v']
Nothing -> bad $ TypeError $ "Cannot convert: " ++ unwords [pretty e, pretty v]
evalPrimOp unop@(UnOp op e) = do
v <- asPrimitive "UnOp" =<< evalSubExp e
case doUnOp op v of
Just v' -> return [PrimVal v']
Nothing -> bad $ TypeError $ "Cannot UnOp: " ++ unwords [pretty unop, pretty v]
evalPrimOp (Index _ ident idxs) = do
v <- lookupVar ident
idxs' <- mapM (asInt "Index" <=< evalSubExp) idxs
pure <$> indexArrayValue (textual ident) v idxs'
evalPrimOp (Iota e x s) = do
v1 <- evalSubExp e
v2 <- evalSubExp x
v3 <- evalSubExp s
case (v1, v2, v3) of
(PrimVal (IntValue (Int32Value e')),
PrimVal (IntValue (Int32Value x')),
PrimVal (IntValue (Int32Value s')))
| e' >= 0 ->
return [ArrayVal (listArray (0,fromIntegral e'-1) $ map value [x',x'+s'..x'+(e'-1)*s'])
int32 [fromIntegral e']]
| otherwise ->
bad $ NegativeIota $ fromIntegral x'
_ -> bad $ TypeError "evalPrimOp Iota"
evalPrimOp (Replicate e1 e2) = do
v1 <- evalSubExp e1
v2 <- evalSubExp e2
case v1 of
PrimVal (IntValue (Int32Value x))
| x >= 0 ->
case v2 of
PrimVal bv ->
return [ArrayVal (listArray (0,fromIntegral x-1) (genericReplicate x bv))
(primValueType bv)
[fromIntegral x]]
ArrayVal arr bt shape ->
return [ArrayVal (listArray (0,fromIntegral x*product shape-1)
(concat $ genericReplicate x $ elems arr))
bt (fromIntegral x:shape)]
| otherwise -> bad $ NegativeReplicate $ fromIntegral x
_ -> bad $ TypeError "evalPrimOp Replicate"
evalPrimOp (Scratch bt shape) = do
shape' <- mapM (asInt "evalPrimOp Scratch" <=< evalSubExp) shape
let nelems = product shape'
vals = genericReplicate nelems v
return [ArrayVal (listArray (0,fromIntegral nelems-1) vals) bt shape']
where v = blankPrimValue bt
evalPrimOp e@(Reshape _ shapeexp arrexp) = do
shape <- mapM (asInt "evalPrimOp Reshape" <=< evalSubExp) $ newDims shapeexp
arr <- lookupVar arrexp
case arr of
ArrayVal vs bt oldshape
| product oldshape == product shape ->
return $ single $ ArrayVal vs bt shape
| otherwise ->
bad $ InvalidArrayShape (PrimOp e) oldshape shape
_ ->
bad $ TypeError "Reshape given a non-array argument"
evalPrimOp (Rearrange _ perm arrexp) =
single . permuteArray perm <$> lookupVar arrexp
evalPrimOp (Rotate _ offsets arrexp) = do
offsets' <- mapM (asInt "evalPrimOp rotate" <=< evalSubExp) offsets
single . rotateArray offsets' <$> lookupVar arrexp
evalPrimOp (Split _ i sizeexps arrexp) = do
sizes <- mapM (asInt "evalPrimOp Split" <=< evalSubExp) sizeexps
arr <- lookupVar arrexp
return $ splitArray i sizes arr
evalPrimOp (Concat _ i arr1exp arr2exps _) = do
arr1 <- lookupVar arr1exp
arr2s <- mapM lookupVar arr2exps
return [foldl (concatArrays i) arr1 arr2s]
evalPrimOp (Copy v) = single <$> lookupVar v
evalPrimOp (Assert e loc) = do
v <- evalSubExp e
case v of PrimVal (BoolValue True) ->
return [PrimVal Checked]
_ ->
bad $ AssertFailed loc
evalPrimOp (Partition _ n flags arrs) = do
flags_elems <- arrToList =<< lookupVar flags
arrvs <- mapM lookupVar arrs
let ets = map (elemType . valueType) arrvs
arrs_elems <- mapM arrToList arrvs
partitions <- mapM (partitionArray flags_elems) arrs_elems
return $
case partitions of
[] ->
replicate n $ PrimVal $ IntValue $ Int32Value 0
first_part:_ ->
map (PrimVal . IntValue . Int32Value . genericLength) first_part ++
[arrayVal (concat part) et (valueShape arrv) |
(part,et,arrv) <- zip3 partitions ets arrvs]
where partitionArray flagsv arrv =
map reverse <$>
foldM divide (replicate n []) (zip flagsv arrv)
divide partitions (PrimVal (IntValue (Int32Value i)), v)
| i' < 0 =
bad $ TypeError $ "Partition key " ++ show i ++ " is negative"
| i' < n =
return $ genericTake i partitions ++ [v : (partitions!!i')] ++ genericDrop (i'+1) partitions
| otherwise =
return partitions
where i' = fromIntegral i
divide _ (i,_) =
bad $ TypeError $ "Partition key " ++ pretty i ++ " is not an integer."
evalSOAC :: SOAC SOACS -> FutharkM [Value]
evalSOAC (Stream _ w form elam arrs) = do
let accs = getStreamAccums form
accvals <- mapM evalSubExp accs
arrvals <- mapM lookupVar arrs
let ExtLambda elam_params elam_body elam_rtp = elam
let fun funargs = binding (zip3 (map paramIdent elam_params)
(repeat BindVar)
funargs) $
evalBody elam_body
-- get the outersize of the input array(s), and use it as chunk!
chunkval <- evalSubExp w
vs <- fun (chunkval:accvals++arrvals)
return $ valueShapeContext elam_rtp vs ++ vs
evalSOAC (Map _ w fun arrexps) = do
vss' <- mapM (applyLambda fun) =<< soacArrays w arrexps
arrays (lambdaReturnType fun) vss'
evalSOAC (Reduce _ w _ fun inputs) = do
let (accexps, arrexps) = unzip inputs
startaccs <- mapM evalSubExp accexps
let foldfun acc x = applyLambda fun $ acc ++ x
foldM foldfun startaccs =<< soacArrays w arrexps
evalSOAC (Scan _ w fun inputs) = do
let (accexps, arrexps) = unzip inputs
startvals <- mapM evalSubExp accexps
(acc, vals') <- foldM scanfun (startvals, []) =<<
soacArrays w arrexps
arrays (map valueType acc) $ reverse vals'
where scanfun (acc, l) x = do
acc' <- applyLambda fun $ acc ++ x
return (acc', acc' : l)
evalSOAC (Redomap cs w _ redfun foldfun accexp arrexps) = do
-- SO LAZY: redomap is scanomap, after which we index the last elements.
w' <- asInt "evalPrimOp Split" =<< evalSubExp w
vs <- evalSOAC $ Scanomap cs w redfun foldfun accexp arrexps
let (acc_arrs, arrs) = splitAt (length accexp) vs
accs <- forM acc_arrs $ \acc_arr ->
indexArrayValue "<redomap result>" acc_arr [w' - 1]
return $ accs++arrs
evalSOAC (Scanomap _ w _ innerfun accexp arrexps) = do
startaccs <- mapM evalSubExp accexp
if res_len == acc_len
then do (acc, vals) <- foldM foldfun (startaccs, []) =<< soacArrays w arrexps
arrays (map valueType acc) $ reverse vals
else do let startaccs'= (startaccs, [], replicate (res_len - acc_len) [])
(acc_res, vals, arr_res) <- foldM foldfun' startaccs' =<<
soacArrays w arrexps
vals' <- arrays (map valueType acc_res) $ reverse vals
arr_res_fut <- arrays lam_ret_arr_tp $ transpose $ map reverse arr_res
return $ vals' ++ arr_res_fut
where
lam_ret_tp = lambdaReturnType innerfun
res_len = length lam_ret_tp
acc_len = length accexp
lam_ret_arr_tp = drop acc_len lam_ret_tp
foldfun (acc, l) x = do
acc' <- applyLambda innerfun $ acc ++ x
return (acc', acc':l)
foldfun' (acc, l, arr) x = do
res_lam <- applyLambda innerfun $ acc ++ x
let res_acc = take acc_len res_lam
res_arr = drop acc_len res_lam
acc_arr = zipWith (:) res_arr arr
return (res_acc, res_acc:l, acc_arr)
evalSOAC (Write _cs len lam ivs as) = do
let valInt :: Value -> FutharkM Int
valInt (PrimVal (IntValue (Int32Value l))) = return $ fromIntegral l
valInt _ = bad $ TypeError "evalSOAC Write: Wrong type for length"
len' <- valInt =<< evalSubExp len
as' <- mapM (lookupVar . snd) as
-- Calculate all indexes and values.
ivs' <- soacArrays len ivs
ivs'' <- mapM (applyLambda lam) ivs'
let ivsLen = length (lambdaReturnType lam) `div` 2
is = transpose $ map (take ivsLen) ivs''
vs = transpose $ map (drop ivsLen) ivs''
is' <- mapM (mapM valInt) is
(aArrs, aPrimTypes, aShapes) <-
unzip3 <$> mapM (toArrayVal "evalSOAC Write: Wrong type for 'array' array") as'
let handleIteration :: [Array Int PrimValue] -> Int -> FutharkM [Array Int PrimValue]
handleIteration arrs iter = do
let updatess =
[ if idx < 0 || idx >= length (elems a)
then []
else case val of
PrimVal pval -> [(idx, pval)]
ArrayVal arr _ _ ->
zip [idx * fromIntegral (length (elems arr))..] (elems arr)
| (i, v, a) <- zip3 is' vs arrs,
let idx = i !! iter
val = v !! iter
]
return [ arr // updates
| (arr, updates) <- zip arrs updatess
]
ress <- foldM handleIteration aArrs [0..fromIntegral len' - 1]
return $ zipWith3 ArrayVal ress aPrimTypes aShapes
toArrayVal :: String -> Value -> FutharkM (Array Int PrimValue, PrimType, [Int])
toArrayVal err v = case v of
ArrayVal a b c -> return (a, b, c)
_ -> bad $ TypeError err
indexArrayValue :: String -> Value -> [Int] -> FutharkM Value
indexArrayValue ident (ArrayVal arr bt shape) idxs = do
flatidx <- indexArray ident shape idxs
if length idxs == length shape
then return $ PrimVal $ arr ! flatidx
else let resshape = drop (length idxs) shape
ressize = product resshape
in return $ ArrayVal (listArray (0,ressize-1)
[ arr ! (flatidx+i) | i <- [0..ressize-1] ])
bt resshape
indexArrayValue _ _ _ = bad $ TypeError "indexArrayValue: ident is not an array"
evalFuncall :: Name -> [Value] -> FutharkM [Value]
evalFuncall fname args = do
fun <- lookupFun fname
fun args
applyLambda :: Lambda -> [Value] -> FutharkM [Value]
applyLambda (Lambda params body rettype) args = do
v <- binding (zip3 (map paramIdent params) (repeat BindVar) args) $
evalBody body
checkReturnShapes (staticShapes rettype) v
return v
checkReturnShapes :: [TypeBase ExtShape u] -> [Value] -> FutharkM ()
checkReturnShapes = zipWithM_ checkShape
where checkShape t val = do
let valshape = map (PrimVal . IntValue . Int32Value . fromIntegral) $ valueShape val
retdims = extShapeDims $ arrayShape t
evalExtDim (Free se) = do v <- evalSubExp se
return $ Just (se, v)
evalExtDim (Ext _) = return Nothing
matches (Just (_, v1), v2) = v1 == v2
matches (Nothing, _) = True
retshape <- mapM evalExtDim retdims
unless (all matches $ zip retshape valshape) $
bad $ TypeError $
"checkReturnShapes:\n" ++
"Return type specifies shape [" ++
intercalate "," (map ppDim retshape) ++
"], but returned value is of shape [" ++
intercalate "," (map pretty valshape) ++ "]."
ppDim (Just (Constant v, _)) = pretty v
ppDim (Just (Var e, v)) = pretty e ++ "=" ++ pretty v
ppDim Nothing = "?"
| mrakgr/futhark | src/Futhark/Interpreter.hs | bsd-3-clause | 30,501 | 0 | 27 | 8,309 | 9,978 | 4,901 | 5,077 | -1 | -1 |
{-# LANGUAGE PatternSynonyms #-}
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.GL.SGIS.PointLineTexgen
-- Copyright : (c) Sven Panne 2019
-- License : BSD3
--
-- Maintainer : Sven Panne <svenpanne@gmail.com>
-- Stability : stable
-- Portability : portable
--
--------------------------------------------------------------------------------
module Graphics.GL.SGIS.PointLineTexgen (
-- * Extension Support
glGetSGISPointLineTexgen,
gl_SGIS_point_line_texgen,
-- * Enums
pattern GL_EYE_DISTANCE_TO_LINE_SGIS,
pattern GL_EYE_DISTANCE_TO_POINT_SGIS,
pattern GL_EYE_LINE_SGIS,
pattern GL_EYE_POINT_SGIS,
pattern GL_OBJECT_DISTANCE_TO_LINE_SGIS,
pattern GL_OBJECT_DISTANCE_TO_POINT_SGIS,
pattern GL_OBJECT_LINE_SGIS,
pattern GL_OBJECT_POINT_SGIS
) where
import Graphics.GL.ExtensionPredicates
import Graphics.GL.Tokens
| haskell-opengl/OpenGLRaw | src/Graphics/GL/SGIS/PointLineTexgen.hs | bsd-3-clause | 920 | 0 | 5 | 119 | 82 | 57 | 25 | 14 | 0 |
module Cauterize.Specification ( module X ) where
import Cauterize.Specification.Types as X
import Cauterize.Specification.Parser as X
import Cauterize.Specification.Compile as X
| cauterize-tools/cauterize | src/Cauterize/Specification.hs | bsd-3-clause | 180 | 0 | 4 | 20 | 36 | 26 | 10 | 4 | 0 |
{-# LANGUAGE LiberalTypeSynonyms, ImplicitParams, TypeOperators, CPP #-}
module Data.TrieMap.TrieKey.SetOp (
IsectM, UnionM, DiffM,
isectM, unionM, diffM,
Isect, Union, Diff,
SetOp(..)) where
import Data.TrieMap.Sized
import Data.TrieMap.TrieKey.Subset
type IsectM f a b c = f a -> f b -> Maybe (f c)
type UnionM f a = f a -> f a -> Maybe (f a)
type DiffM f a b = f a -> f b -> Maybe (f a)
type Isect f a b c = f a -> f b -> f c
type Union f a = f a -> f a -> f a
type Diff f a b = f a -> f b -> f a
type Id a = a
class SetOp f where
isect :: Sized c => IsectM Id a b c -> Isect f a b c
union :: Sized a => UnionM Id a -> Union f a
diff :: Sized a => DiffM Id a b -> Diff f a b
instance SetOp Maybe where
{-# INLINE isect #-}
{-# INLINE union #-}
{-# INLINE diff #-}
isect f (Just a) (Just b) = f a b
isect _ _ _ = Nothing
union f (Just a) (Just b) = f a b
union _ (Just a) Nothing = Just a
union _ Nothing (Just b) = Just b
union _ Nothing Nothing = Nothing
diff f (Just a) (Just b) = f a b
diff _ (Just a) Nothing = Just a
diff _ Nothing _ = Nothing
{-# INLINE isectM #-}
isectM :: (Nullable f, SetOp f, Sized c) => IsectM Id a b c -> IsectM f a b c
isectM f a b = guardNull (isect f a b)
{-# INLINE diffM #-}
diffM :: (Nullable f, SetOp f, Sized a) => DiffM Id a b -> DiffM f a b
diffM f a b = guardNull (diff f a b)
{-# INLINE unionM #-}
unionM :: (Nullable f, SetOp f, Sized a) => UnionM Id a -> UnionM f a
unionM f a b = guardNull (union f a b) | lowasser/TrieMap | Data/TrieMap/TrieKey/SetOp.hs | bsd-3-clause | 1,498 | 0 | 9 | 394 | 745 | 386 | 359 | 41 | 1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TupleSections #-}
{-# OPTIONS_GHC -Wno-missing-signatures #-}
module Nix.Utils (module Nix.Utils, module X) where
import Control.Arrow ( (&&&) )
import Control.Monad
import Control.Monad.Fix
import Control.Monad.Free
import Control.Monad.Trans.Control ( MonadTransControl(..) )
import qualified Data.Aeson as A
import qualified Data.Aeson.Encoding as A
import Data.Fix
import Data.Hashable
import Data.HashMap.Lazy ( HashMap )
import qualified Data.HashMap.Lazy as M
import Data.List ( sortOn )
import Data.Monoid ( Endo
, (<>)
)
import Data.Text ( Text )
import qualified Data.Text as Text
import qualified Data.Vector as V
import Lens.Family2 as X
import Lens.Family2.Stock ( _1
, _2
)
import Lens.Family2.TH
#if ENABLE_TRACING
import Debug.Trace as X
#else
import Prelude as X
hiding ( putStr
, putStrLn
, print
)
trace :: String -> a -> a
trace = const id
traceM :: Monad m => String -> m ()
traceM = const (return ())
#endif
$(makeLensesBy (\n -> Just ("_" ++ n)) ''Fix)
type DList a = Endo [a]
type AttrSet = HashMap Text
-- | An f-algebra defines how to reduced the fixed-point of a functor to a
-- value.
type Alg f a = f a -> a
type AlgM f m a = f a -> m a
-- | An "transform" here is a modification of a catamorphism.
type Transform f a = (Fix f -> a) -> Fix f -> a
(<&>) :: Functor f => f a -> (a -> c) -> f c
(<&>) = flip (<$>)
(??) :: Functor f => f (a -> b) -> a -> f b
fab ?? a = fmap ($ a) fab
loeb :: Functor f => f (f a -> a) -> f a
loeb x = go where go = fmap ($ go) x
loebM :: (MonadFix m, Traversable t) => t (t a -> m a) -> m (t a)
loebM f = mfix $ \a -> mapM ($ a) f
para :: Functor f => (f (Fix f, a) -> a) -> Fix f -> a
para f = f . fmap (id &&& para f) . unFix
paraM :: (Traversable f, Monad m) => (f (Fix f, a) -> m a) -> Fix f -> m a
paraM f = f <=< traverse (\x -> (x, ) <$> paraM f x) . unFix
cataP :: Functor f => (Fix f -> f a -> a) -> Fix f -> a
cataP f x = f x . fmap (cataP f) . unFix $ x
cataPM :: (Traversable f, Monad m) => (Fix f -> f a -> m a) -> Fix f -> m a
cataPM f x = f x <=< traverse (cataPM f) . unFix $ x
transport :: Functor g => (forall x . f x -> g x) -> Fix f -> Fix g
transport f (Fix x) = Fix $ fmap (transport f) (f x)
lifted
:: (MonadTransControl u, Monad (u m), Monad m)
=> ((a -> m (StT u b)) -> m (StT u b))
-> (a -> u m b)
-> u m b
lifted f k = liftWith (\run -> f (run . k)) >>= restoreT . return
freeToFix :: Functor f => (a -> Fix f) -> Free f a -> Fix f
freeToFix f = go
where
go (Pure a) = f a
go (Free v) = Fix (fmap go v)
fixToFree :: Functor f => Fix f -> Free f a
fixToFree = Free . go where go (Fix f) = fmap (Free . go) f
-- | adi is Abstracting Definitional Interpreters:
--
-- https://arxiv.org/abs/1707.04755
--
-- Essentially, it does for evaluation what recursion schemes do for
-- representation: allows threading layers through existing structure, only
-- in this case through behavior.
adi :: Functor f => (f a -> a) -> ((Fix f -> a) -> Fix f -> a) -> Fix f -> a
adi f g = g (f . fmap (adi f g) . unFix)
adiM
:: (Traversable t, Monad m)
=> (t a -> m a)
-> ((Fix t -> m a) -> Fix t -> m a)
-> Fix t
-> m a
adiM f g = g ((f <=< traverse (adiM f g)) . unFix)
class Has a b where
hasLens :: Lens' a b
instance Has a a where
hasLens f = f
instance Has (a, b) a where
hasLens = _1
instance Has (a, b) b where
hasLens = _2
toEncodingSorted :: A.Value -> A.Encoding
toEncodingSorted = \case
A.Object m ->
A.pairs
. mconcat
. fmap (\(k, v) -> A.pair k $ toEncodingSorted v)
. sortOn fst
$ M.toList m
A.Array l -> A.list toEncodingSorted $ V.toList l
v -> A.toEncoding v
data NixPathEntryType = PathEntryPath | PathEntryURI deriving (Show, Eq)
-- | @NIX_PATH@ is colon-separated, but can also contain URLs, which have a colon
-- (i.e. @https://...@)
uriAwareSplit :: Text -> [(Text, NixPathEntryType)]
uriAwareSplit = go where
go str = case Text.break (== ':') str of
(e1, e2)
| Text.null e2
-> [(e1, PathEntryPath)]
| Text.pack "://" `Text.isPrefixOf` e2
-> let ((suffix, _) : path) = go (Text.drop 3 e2)
in (e1 <> Text.pack "://" <> suffix, PathEntryURI) : path
| otherwise
-> (e1, PathEntryPath) : go (Text.drop 1 e2)
alterF
:: (Eq k, Hashable k, Functor f)
=> (Maybe v -> f (Maybe v))
-> k
-> HashMap k v
-> f (HashMap k v)
alterF f k m = f (M.lookup k m) <&> \case
Nothing -> M.delete k m
Just v -> M.insert k v m
| jwiegley/hnix | src/Nix/Utils.hs | bsd-3-clause | 5,544 | 0 | 18 | 1,898 | 2,033 | 1,069 | 964 | -1 | -1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE TypeApplications #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE UndecidableInstances #-}
module Data.Diverse.ManySpec (main, spec) where
import Data.Diverse
import Data.Int
import Data.Tagged
import Data.Typeable
import Test.Hspec
-- `main` is here so that this module can be run from GHCi on its own. It is
-- not needed for automatic spec dicovery.
main :: IO ()
main = hspec spec
--------------------------------
data Foo
data Bar
spec :: Spec
spec = do
describe "Many" $ do
-- it "Test user friendly compile errors" $ do
-- let y = (5 :: Int) ./ False ./ 'X' ./ Just 'O' ./ (6 :: Int) ./ Just 'A' ./ nil
-- -- ghc 8.0.2: IndexOf error: ‘Maybe Bool’ is not a member of ...
-- -- ghc 8.0.1 has terrible error message: "No instance for NatToInt"
-- grab @(Maybe Bool) y `shouldBe` (Just False)
-- -- Not unique error: ‘Maybe Char’ is a duplicate in ...
-- grab @(Maybe Bool) y `shouldBe` (Just False)
it "is a Typeable" $ do
let x = (5 :: Int) ./ False ./ nil
y = cast x :: Maybe (Many '[Int, String])
z = cast x :: Maybe (Many '[Int, Bool])
y `shouldBe` Nothing
z `shouldBe` Just x
#if __GLASGOW_HASKELL__ >= 802
let expected = "Many (': * Int (': * Bool ('[] *)))"
#else
let expected = "Many (': * Int (': * Bool '[]))"
#endif
(show . typeRep . (pure @Proxy) $ x) `shouldBe` expected
it "is a Read and Show" $ do
let s = "5 ./ False ./ 'X' ./ Just 'O' ./ nil"
s' = "5 ./ False ./ 'X' ./ (Just 'O' ./ (nil))"
x = read s :: Many '[Int, Bool, Char, Maybe Char]
x' = read s' :: Many '[Int, Bool, Char, Maybe Char]
show x `shouldBe` s
show x' `shouldBe` s
it "is a Eq" $ do
let s = "5 ./ False ./ 'X' ./ Just 'O' ./ nil"
x = read s :: Many '[Int, Bool, Char, Maybe Char]
y = 5 ./ False ./ 'X' ./ Just 'O' ./ nil
x `shouldBe` y
it "is an Ord" $ do
let s = "5 ./ False ./ 'X' ./ Just 'O' ./ nil"
x = read s :: Many '[Int, Bool, Char, Maybe Char]
y5o = (5 :: Int) ./ False ./ 'X' ./ Just 'O' ./ nil
y4o = (4 :: Int) ./ False ./ 'X' ./ Just 'O' ./ nil
y5p = (5 :: Int) ./ False ./ 'X' ./ Just 'P' ./ nil
compare x y5o `shouldBe` EQ
compare y4o y5o `shouldBe` LT
compare y5o y4o `shouldBe` GT
compare y5o y5p `shouldBe` LT
compare y5p y5o `shouldBe` GT
it "can converted to and from a tuple" $ do
let x = (5 :: Int) ./ False ./ 'X' ./ Just 'O' ./ nil
t = ((5 :: Int), False, 'X', Just 'O')
x `shouldBe` toMany' t
t `shouldBe` fromMany' x
it "can construct using 'single', 'nil', 'consMany', 'snocMany', 'append'" $ do
let x = (5 :: Int) ./ False ./ 'X' ./ Just 'O' ./ nil
x' = (5 :: Int) `consMany` False `consMany` 'X' `consMany` Just 'O' `consMany` nil
y = single (5 :: Int) \. False \. 'X' \. Just 'O'
y' = single (5 :: Int) `snocMany` False `snocMany` 'X' `snocMany` Just 'O'
a = single (5 :: Int) `snocMany` False
b = single 'X' `snocMany` Just 'O'
x `shouldBe` y
x `shouldBe` x'
y `shouldBe` y'
a /./ b `shouldBe` x
a `append` b `shouldBe` x
it "can 'snocMany'' a value only if that type doesn't already exist" $ do
let x = (5 :: Int) ./ False ./ 'X' ./ Just 'O' ./ nil
y = x `snocMany'` True
y `shouldBe` x
-- it "can 'append'' the unique types from another Many" $ do
-- let x = (5 :: Int) ./ False ./ 'X' ./ Just 'O' ./ nil
-- y = (5 :: Int) ./ Just True ./ 'X' ./ Just False ./ Just (6 :: Int) ./ Just 'O' ./ nil
-- (x `append'` y) `shouldBe` (5 :: Int) ./ False ./ 'X' ./ Just 'O' ./ Just True ./ Just (6 :: Int) ./ nil
it "can contain multiple fields of the same type" $ do
let x = (5 :: Int) ./ False ./ 'X' ./ Just 'O' ./ nil
y = (5 :: Int) ./ False ./ 'X' ./ Just 'O' ./ (6 :: Int) ./ Just 'A' ./ nil
(x /./ (6 :: Int) ./ Just 'A' ./ nil) `shouldBe` y
it "can destruct using 'front', 'back', 'aft', 'fore'" $ do
let a = (x ./ y) \. z
x = 5 :: Int
y = single False ./ 'X' ./ nil
z = Just 'O'
front a `shouldBe` x
back a `shouldBe` z
aft a `shouldBe` (y \. z)
fore a `shouldBe` x ./ y
it "has getter for unique fields using 'grab'" $ do
let x = (5 :: Int) ./ False ./ 'X' ./ Just 'O' ./ nil
grab @Int x `shouldBe` 5
grab @Bool x `shouldBe` False
grab @Char x `shouldBe` 'X'
grab @(Maybe Char) x `shouldBe` Just 'O'
it "has getter for for unique fields using 'grabN'" $ do
let x = (5 :: Int) ./ False ./ 'X' ./ Just 'O' ./ nil
grabN @0 x `shouldBe` 5
grabN @1 x `shouldBe` False
grabN @2 x `shouldBe` 'X'
grabN @3 x `shouldBe` Just 'O'
it "has getter for duplicate fields using 'grabN'" $ do
let y = (5 :: Int) ./ False ./ 'X' ./ Just 'O' ./ (6 :: Int) ./ Just 'A' ./ nil
grabN @0 y `shouldBe` 5
grabN @1 y `shouldBe` False
grabN @2 y `shouldBe` 'X'
grabN @3 y `shouldBe` Just 'O'
grabN @4 y `shouldBe` 6
grabN @5 y `shouldBe` Just 'A'
it "with duplicate fields can still use 'grab' for unique fields" $ do
let x = (5 :: Int) ./ False ./ 'X' ./ Just 'O' ./ (6 :: Int) ./ Just 'A' ./ nil
grab @Bool x `shouldBe` False
grab @Char x `shouldBe` 'X'
it "has getter for unique labels using 'grabL'" $ do
let y = (5 :: Int) ./ False ./ Tagged @Foo 'X' ./ Tagged @"Hello" (6 :: Int) ./ nil
grab @(Tagged Foo _) y `shouldBe` Tagged @Foo 'X'
grabL @Foo y `shouldBe` Tagged @Foo 'X'
grabL @"Hello" y `shouldBe` Tagged @"Hello" (6 :: Int)
it "has setter for unique fields using 'replace''" $ do
let x = (5 :: Int) ./ False ./ 'X' ./ Just 'O' ./ nil
replace' @Int x 6 `shouldBe` (6 :: Int) ./ False ./ 'X' ./ Just 'O' ./ nil
replace' x True `shouldBe` (5 :: Int) ./ True ./ 'X' ./ Just 'O' ./ nil
replace' x 'O' `shouldBe` (5 :: Int) ./ False ./ 'O' ./ Just 'O' ./ nil
replace' x (Just 'P') `shouldBe` (5 :: Int) ./ False ./ 'X' ./ Just 'P' ./ nil
it "has polymorphic setter for unique fields using 'replace'" $ do
let x = (5 :: Int) ./ False ./ 'X' ./ Just 'O' ./ nil
replace @Int x 'Z' `shouldBe` 'Z' ./ False ./ 'X' ./ Just 'O' ./ nil
replace @Bool x 'Z' `shouldBe` (5 :: Int) ./ 'Z' ./ 'X' ./ Just 'O' ./ nil
replace @(Maybe Char) x 'Z' `shouldBe` (5 :: Int) ./ False ./ 'X' ./ 'Z' ./ nil
it "has setter for unique labels using 'replaceL''" $ do
let y = (5 :: Int) ./ False ./ Tagged @Foo 'X' ./ Tagged @"Hello" (6 :: Int) ./ nil
replace' @(Tagged Foo _) y (Tagged @Foo 'Y') `shouldBe`
(5 :: Int) ./ False ./ Tagged @Foo 'Y' ./ Tagged @"Hello" (6 :: Int) ./ nil
replaceL' @Foo y (Tagged @Foo 'Y') `shouldBe`
(5 :: Int) ./ False ./ Tagged @Foo 'Y' ./ Tagged @"Hello" (6 :: Int) ./ nil
replaceL' @"Hello" y (Tagged @"Hello" 7) `shouldBe`
(5 :: Int) ./ False ./ Tagged @Foo 'X' ./ Tagged @"Hello" (7 :: Int) ./ nil
it "has polymorphic setter for unique labels using 'replaceL'" $ do
let y = (5 :: Int) ./ False ./ Tagged @Foo 'X' ./ Tagged @"Hello" (6 :: Int) ./ nil
replace @(Tagged Foo Char) y (Tagged @Bar 'Y') `shouldBe`
(5 :: Int) ./ False ./ Tagged @Bar 'Y' ./ Tagged @"Hello" (6 :: Int) ./ nil
replaceL @Foo y (Tagged @Bar 'Y') `shouldBe`
(5 :: Int) ./ False ./ Tagged @Bar 'Y' ./ Tagged @"Hello" (6 :: Int) ./ nil
replaceL @"Hello" y (Tagged @"Hello" False) `shouldBe`
(5 :: Int) ./ False ./ Tagged @Foo 'X' ./ Tagged @"Hello" False ./ nil
it "has setter for unique fields using 'replaceN''" $ do
let x = (5 :: Int) ./ False ./ 'X' ./ Just 'O' ./ nil
replaceN' @0 x (7 :: Int) `shouldBe`
(7 :: Int) ./ False ./ 'X' ./ Just 'O' ./ nil
replaceN' @1 x True `shouldBe`
(5 :: Int) ./ True ./ 'X' ./ Just 'O' ./ nil
replaceN' @2 x 'Y' `shouldBe`
(5 :: Int) ./ False ./ 'Y' ./ Just 'O' ./ nil
replaceN' @3 x (Just 'P') `shouldBe`
(5 :: Int) ./ False ./ 'X' ./ Just 'P' ./ nil
it "has polymorphic setter using 'replaceN''" $ do
let x = (5 :: Int) ./ False ./ 'X' ./ Just 'O' ./ nil
replaceN @0 x True `shouldBe`
True ./ False ./ 'X' ./ Just 'O' ./ nil
let y = (5 :: Int) ./ False ./ 'X' ./ Just 'O' ./ (6 :: Int) ./ Just 'A' ./ nil
replaceN @1 y 'Y' `shouldBe`
(5 :: Int) ./ 'Y' ./ 'X' ./ Just 'O' ./ (6 :: Int) ./ Just 'A' ./ nil
replaceN @5 y 'Y' `shouldBe`
(5 :: Int) ./ False ./ 'X' ./ Just 'O' ./ (6 :: Int) ./ 'Y' ./ nil
it "has setter for duplicate fields using 'replaceN''" $ do
let y = (5 :: Int) ./ False ./ 'X' ./ Just 'O' ./ (6 :: Int) ./ Just 'A' ./ nil
replaceN' @0 y (7 :: Int) `shouldBe`
(7 :: Int) ./ False ./ 'X' ./ Just 'O' ./ (6 :: Int) ./ Just 'A' ./ nil
replaceN' @1 y True `shouldBe`
(5 :: Int) ./ True ./ 'X' ./ Just 'O' ./ (6 :: Int) ./ Just 'A' ./ nil
replaceN' @2 y 'Y' `shouldBe`
(5 :: Int) ./ False ./ 'Y' ./ Just 'O' ./ (6 :: Int) ./ Just 'A' ./ nil
replaceN' @3 y (Just 'P') `shouldBe`
(5 :: Int) ./ False ./ 'X' ./ Just 'P' ./ (6 :: Int) ./ Just 'A' ./ nil
replaceN' @4 y (8 :: Int) `shouldBe`
(5 :: Int) ./ False ./ 'X' ./ Just 'O' ./ (8 :: Int) ./ Just 'A' ./ nil
replaceN' @5 y (Just 'B') `shouldBe`
(5 :: Int) ./ False ./ 'X' ./ Just 'O' ./ (6 :: Int) ./ Just 'B' ./ nil
it "has setter for unique fields using 'replace'' (even if there are other duplicate fields)" $ do
let y = (5 :: Int) ./ False ./ 'X' ./ Just 'O' ./ (6 :: Int) ./ Just 'A' ./ nil
replace' @Bool y True `shouldBe`
(5 :: Int) ./ True ./ 'X' ./ Just 'O' ./ (6 :: Int) ./ Just 'A' ./ nil
replace' @Char y 'Y' `shouldBe`
(5 :: Int) ./ False ./ 'Y' ./ Just 'O' ./ (6 :: Int) ./ Just 'A' ./ nil
it "has getter for multiple fields using 'select'" $ do
let x = (5 :: Int) ./ False ./ 'X' ./ Just 'O' ./ nil
select @'[Int, Maybe Char] x `shouldBe` (5 :: Int) ./ Just 'O' ./ nil
it "has getter for multiple labelled fields using 'selectL'" $ do
let x = False ./ Tagged @"Hi" (5 :: Int) ./ Tagged @Foo False ./ Tagged @Bar 'X' ./ Tagged @"Bye" 'O' ./ nil
selectL @'[Foo, Bar] x `shouldBe` Tagged @Foo False ./ Tagged @Bar 'X' ./ nil
selectL @'["Hi", "Bye"] x `shouldBe` Tagged @"Hi" (5 :: Int) ./ Tagged @"Bye" 'O' ./ nil
-- below won't compile because the type of labels must match
-- selectL @'["Hi", 'Foo, "Bye"] x `shouldBe` Tagged @"Hi" (5 :: Int) ./ Tagged @Foo False ./ Tagged @"Bye" 'O' ./ nil
it "can reorder fields using 'select' or 'selectN'" $ do
let x = (5 :: Int) ./ False ./ 'X' ./ Just 'O' ./ nil
select @'[Bool, Int, Maybe Char] x `shouldBe` False ./ (5 :: Int) ./ Just 'O' ./ nil
let y = (5 :: Int) ./ False ./ 'X' ./ Just 'O' ./ (6 :: Int) ./ Just 'A' ./ nil
selectN @'[5, 4, 0, 1, 3, 2] y `shouldBe`
Just 'A' ./ (6 :: Int) ./ (5 ::Int) ./ False ./ Just 'O' ./ 'X' ./ nil
it "has getter for multiple fields with duplicates using 'selectN'" $ do
let x = (5 :: Int) ./ False ./ 'X' ./ Just 'O' ./ (6 :: Int) ./ Just 'A' ./ nil
selectN @'[5, 4, 0] x `shouldBe` Just 'A' ./ (6 :: Int) ./ (5 ::Int) ./ nil
it "can't select into types from indistinct fields" $ do
let x = (5 :: Int) ./ False ./ 'X' ./ Just 'O' ./ (6 :: Int) ./ Just 'A' ./ nil
-- Compile error: Int is a duplicate
-- select @[Bool, Char, Int] x `shouldBe` False ./ 'X' ./ (5 :: Int) ./ nil
x `shouldBe` x
it "with duplicate fields has getter for multiple unique fields 'select'" $ do
let x = (5 :: Int) ./ False ./ 'X' ./ Just 'O' ./ (6 :: Int) ./ Just 'A' ./ nil
select @'[Bool, Char] x `shouldBe` False ./ 'X' ./ nil
it "has setter for multiple fields using 'amend''" $ do
let x = (5 :: Int) ./ False ./ 'X' ./ Just 'O' ./ nil
amend' @'[Int, Maybe Char] x ((6 :: Int) ./ Just 'P' ./ nil) `shouldBe` (6 :: Int) ./ False ./ 'X' ./ Just 'P' ./ nil
it "has polymorphc setter for multiple fields using 'amend'" $ do
let x = (5 :: Int) ./ False ./ 'X' ./ Just 'O' ./ nil
amend @'[Int, Maybe Char] x ("Foo" ./ "Bar" ./ nil) `shouldBe` "Foo" ./ False ./ 'X' ./ "Bar" ./ nil
it "has setter for multiple labelled fields using 'amendL''" $ do
let x = False ./ Tagged @"Hi" (5 :: Int) ./ Tagged @Foo False ./ Tagged @Bar 'X' ./ Tagged @"Bye" 'O' ./ nil
amendL' @'[Foo, Bar] x (Tagged @Foo True ./ Tagged @Bar 'Y' ./ nil) `shouldBe`
False ./ Tagged @"Hi" (5 :: Int) ./ Tagged @Foo True ./ Tagged @Bar 'Y' ./ Tagged @"Bye" 'O' ./ nil
amendL' @'["Hi", "Bye"] x (Tagged @"Hi" (6 :: Int) ./ Tagged @"Bye" 'P' ./ nil) `shouldBe`
False ./ Tagged @"Hi" (6 :: Int) ./ Tagged @Foo False ./ Tagged @Bar 'X' ./ Tagged @"Bye" 'P' ./ nil
it "has polymorphic setter for multiple labelled fields using 'amendL'" $ do
let x = False ./ Tagged @"Hi" (5 :: Int) ./ Tagged @Foo False ./ Tagged @Bar 'X' ./ Tagged @"Bye" 'O' ./ nil
amendL @'[Foo, Bar] x ('Y' ./ True ./ nil) `shouldBe`
False ./ Tagged @"Hi" (5 :: Int) ./ 'Y' ./ True ./ Tagged @"Bye" 'O' ./ nil
amendL @'["Hi", "Bye"] x (True ./ Tagged @"Changed" True ./ nil) `shouldBe`
False ./ True ./ Tagged @Foo False ./ Tagged @Bar 'X' ./ Tagged @"Changed" True ./ nil
it "has setter for multiple fields with duplicates using 'amendN''" $ do
let x = (5 :: Int) ./ False ./ 'X' ./ Just 'O' ./ (6 :: Int) ./ Just 'A' ./ nil
amendN' @'[5, 4, 0] x (Just 'B' ./ (8 :: Int) ./ (4 ::Int) ./ nil) `shouldBe`
(4 :: Int) ./ False ./ 'X' ./ Just 'O' ./ (8 :: Int) ./ Just 'B' ./ nil
it "has polymorphic setter for multiple fields with duplicates using 'amendN''" $ do
let x = (5 :: Int) ./ False ./ 'X' ./ Just 'O' ./ (6 :: Int) ./ Just 'A' ./ nil
amendN @'[5, 4, 0] x ("Foo" ./ Just 'B' ./ 'Z' ./ nil) `shouldBe`
'Z' ./ False ./ 'X' ./ Just 'O' ./ Just 'B' ./ "Foo" ./ nil
it "can't amend into types from indistinct fields" $ do
let x = (5 :: Int) ./ False ./ 'X' ./ Just 'O' ./ (6 :: Int) ./ Just 'A' ./ nil
-- Compile error: Int is a duplicate
-- amend' @ '[Bool, Char, Int] x (True ./ 'B' ./ (8 :: Int) ./ nil) `shouldBe`
-- (5 :: Int) ./ True ./ 'B' ./ Just 'O' ./ (8 :: Int) ./ Just 'A' ./ nil
x `shouldBe` x
it "with duplicate fields has setter for unique fields 'amend''" $ do
let x = (5 :: Int) ./ False ./ 'X' ./ Just 'O' ./ (6 :: Int) ./ Just 'A' ./ nil
amend' @ '[Bool, Char] x (True ./ 'B' ./ nil) `shouldBe`
(5 :: Int) ./ True ./ 'B' ./ Just 'O' ./ (6 :: Int) ./ Just 'A' ./ nil
it "can be folded with 'Many' handlers using 'forMany' or 'collect'" $ do
let x = (5 :: Int) ./ False ./ 'X' ./ Just 'O' ./ (6 :: Int) ./ Just 'A' ./ nil
y = show @Int ./ show @Char ./ show @(Maybe Char) ./ show @Bool ./ nil
ret = ["5", "False", "'X'", "Just 'O'", "6", "Just 'A'"]
afoldr (:) [] (collect x (cases y)) `shouldBe` ret
afoldr (:) [] (forMany (cases y) x) `shouldBe` ret
afoldr (:) [] (forMany (cases y) x) `shouldBe` ret
it "can be folded with polymorphic 'CaseFunc' handlers using 'forMany' or 'collect'" $ do
let x = (5 :: Int) ./ False ./ 'X' ./ Just 'O' ./ (6 :: Int) ./ Just 'A' ./ nil
afoldr (:) [] (forMany (CaseFunc @Typeable (show . typeRep . (pure @Proxy))) x) `shouldBe` ["Int", "Bool", "Char", "Maybe Char", "Int", "Maybe Char"]
it "can be folded with 'Many' handlers in index order using 'forManyN' or 'collectN'" $ do
let x = (5 :: Int) ./ False ./ 'X' ./ Just 'O' ./ (6 :: Int) ./ Just 'A' ./ nil
y = show @Int ./ show @Bool ./ show @Char ./ show @(Maybe Char) ./ show @Int ./ show @(Maybe Char) ./ nil
ret = ["5", "False", "'X'", "Just 'O'", "6", "Just 'A'"]
afoldr (:) [] (collectN x (casesN y)) `shouldBe` ret
afoldr (:) [] (forManyN (casesN y) x) `shouldBe` ret
it "every hasLens can be mapped into a different type in a Functor-like fashion with using 'afmap'" $ do
let x = (5 :: Int) ./ (6 :: Int8) ./ (7 :: Int16) ./ (8 :: Int32) ./ nil
y = (15 :: Int) ./ (16 :: Int8) ./ (17 :: Int16) ./ (18 :: Int32) ./ nil
z = ("5" :: String) ./ ("6" :: String) ./ ("7" :: String) ./ ("8" :: String) ./ nil
mx = (Just 5 :: Maybe Int) ./ ([6] :: [Int8]) ./ nil
my = (Just 15 :: Maybe Int) ./ ([16] :: [Int8]) ./ nil
mz = (Just "5" :: Maybe String) ./ (["6"] :: [String]) ./ nil
afmap (CaseFunc' @Num (+10)) x `shouldBe` y
afmap (CaseFunc @Show @String show) x `shouldBe` z
afmap (CaseFunc1' @C0 @Functor @Num (fmap (+10))) mx `shouldBe` my
afmap (CaseFunc1 @C0 @Functor @(C2 Show Read) @String (fmap show)) mx `shouldBe` mz
| louispan/data-diverse | test/Data/Diverse/ManySpec.hs | bsd-3-clause | 18,822 | 0 | 25 | 6,558 | 6,989 | 3,598 | 3,391 | -1 | -1 |
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE InstanceSigs #-}
module PFDS.Sec9.Ex13_2 where
-- リストを使ってTreeの数に関する明示的な制約を犠牲に実装を簡素にしたバージョン
import PFDS.Commons.RandomAccessList
import Prelude hiding (head, tail, lookup)
data Tree a = Leaf a | Node Int (Tree a) (Tree a) deriving (Show, Eq)
data Digit a =
Zero -- Red
| Ones [[Tree a]] -- Yellow
| Two [Tree a] -- Green
| Threes [[Tree a]] -- Yellow
| Four [Tree a] -- Red
deriving (Show, Eq)
type RList a = [Digit a]
type instance Elem (RList a) = a
instance RandomAccessList (RList a) where
empty :: RList a
empty = []
isEmpty :: RList a -> Bool
isEmpty [] = True
isEmpty _ = False
cons :: a -> RList a -> RList a
cons x ds = fixup (consTree (Leaf x) ds)
head :: RList a -> a
head ds = let (Leaf x, _) = unconsTree ds in x
tail :: RList a -> RList a
tail ds = let (_, ds') = unconsTree ds in fixup ds'
lookup :: Int -> RList a -> a
lookup i [] = error "Subscript"
lookup i (d : ds) = lookupTrees i (unwrapTrees d) ds
update :: Int -> a -> RList a -> RList a
update = undefined
-- helper functions
size :: Tree a -> Int
size (Leaf _) = 1
size (Node w _ _) = w
link :: Tree a -> Tree a -> Tree a
link t1 t2 = Node (size t1 + size t2) t1 t2
consTree :: Tree a -> RList a -> RList a
consTree t [] = [Ones [[t]]]
consTree t (Ones (ts : tss) : ds) = Two (t : ts) : ones tss ds
consTree t (Two ts : ds) = threes [t : ts] ds
consTree t (Threes (ts : tss) : ds) = Four (t : ts) : threes tss ds
unconsTree :: RList a -> (Tree a, RList a)
unconsTree [] = error "Empty"
unconsTree [Ones [[t]]] = (t, [])
unconsTree (Ones ([t] : tss) : ds) = (t, Zero : ones tss ds)
unconsTree (Two (t : ts) : ds) = (t, ones [ts] ds)
unconsTree (Threes ((t : ts) : tss) : ds) = (t, Two ts : threes tss ds)
unwrapTrees :: Digit a -> [Tree a]
unwrapTrees Zero = []
unwrapTrees (Ones tss) = concat tss
unwrapTrees (Two ts) = ts
unwrapTrees (Threes tss) = concat tss
unwrapTrees (Four ts) = ts
lookupTrees :: Int -> [Tree a] -> RList a -> a
lookupTrees i [] ds = lookup i ds
lookupTrees i (t : ts) ds = if i < size t
then lookupTree i t
else lookupTrees (i - size t) ts ds
lookupTree :: Int -> Tree a -> a
lookupTree 0 (Leaf x) = x
lookupTree i (Leaf _) = error "Subscript"
lookupTree i (Node w t1 t2) = if i < w `div` 2
then lookupTree i t1
else lookupTree (i - w `div` 2) t2
-- digit helpers
ones :: [[Tree a]] -> RList a -> RList a
ones [] ds = ds
ones tss1 (Ones tss2 : ds) = Ones (tss1 ++ tss2) : ds
ones tss1 ds = Ones tss1 : ds
threes :: [[Tree a]] -> RList a -> RList a
threes [] ds = ds
threes tss1 (Threes tss2 : ds) = Threes (tss1 ++ tss2) : ds
threes tss1 ds = Threes tss1 : ds
fixup :: RList a -> RList a
fixup (Zero : ds) = let (Node _ t1 t2, ds') = unconsTree ds in Two [t1, t2] : ds'
fixup (Ones ts : Zero : ds) = Ones ts : fixup (Zero : ds)
fixup (Threes ts : Four t : ds) = Threes ts : fixup (Four t : ds)
fixup (Four [t1, t2, t3, t4] : ds) = Two [t1, t2] : consTree (link t3 t4) ds
fixup ds = ds
| matonix/pfds | src/PFDS/Sec9/Ex13_2.hs | bsd-3-clause | 3,150 | 0 | 12 | 737 | 1,632 | 835 | 797 | 81 | 2 |
import Hsbot
import Hsbot.Config
main :: IO ()
main = hsbot defaultConfig
| adyxax/hsbot | Main.hs | bsd-3-clause | 76 | 0 | 6 | 14 | 27 | 14 | 13 | 4 | 1 |
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# LANGUAGE TemplateHaskell #-}
module Application
( makeApplication
, getApplicationDev
, makeFoundation
) where
import Control.Monad.Logger (runLoggingT)
import Data.Default (def)
import qualified Database.Persist
import Database.Persist.Sql (runMigration)
import Import
import Network.HTTP.Client.Conduit (newManager)
import Network.Wai.Logger (clockDateCacher)
import Network.Wai.Middleware.RequestLogger (IPAddrSource (..),
OutputFormat (..),
destination,
mkRequestLogger,
outputFormat)
import qualified Network.Wai.Middleware.RequestLogger as RequestLogger
import Settings
import System.Log.FastLogger (defaultBufSize,
newStdoutLoggerSet)
import Yesod.Auth
import Yesod.Core.Types (Logger (Logger),
loggerSet)
import Yesod.Default.Config
import Yesod.Default.Handlers
import Yesod.Default.Main
-- Import all relevant handler modules here.
-- Don't forget to add new modules to your cabal file!
import Handler.Home
-- This line actually creates our YesodDispatch instance. It is the second half
-- of the call to mkYesodData which occurs in Foundation.hs. Please see the
-- comments there for more details.
mkYesodDispatch "App" resourcesApp
-- This function allocates resources (such as a database connection pool),
-- performs initialization and creates a WAI application. This is also the
-- place to put your migrate statements to have automatic database
-- migrations handled by Yesod.
makeApplication :: AppConfig DefaultEnv Extra -> IO (Application, LogFunc)
makeApplication conf = do
foundation <- makeFoundation conf
-- Initialize the logging middleware
logWare <- mkRequestLogger def
{ outputFormat =
if development
then Detailed True
else Apache FromSocket
, destination = RequestLogger.Logger $ loggerSet $ appLogger foundation
}
-- Create the WAI application and apply middlewares
app <- toWaiAppPlain foundation
let logFunc = messageLoggerSource foundation (appLogger foundation)
return (logWare $ defaultMiddlewaresNoLogging app, logFunc)
-- | Loads up any necessary settings, creates your foundation datatype, and
-- performs some initialization.
makeFoundation :: AppConfig DefaultEnv Extra -> IO App
makeFoundation conf = do
manager <- newManager
s <- staticSite
dbconf <- withYamlEnvironment "config/sqlite.yml" (appEnv conf)
Database.Persist.loadConfig >>=
Database.Persist.applyEnv
p <- Database.Persist.createPoolConfig (dbconf :: Settings.PersistConf)
loggerSet' <- newStdoutLoggerSet defaultBufSize
(getter, _) <- clockDateCacher
eventChannel <- atomically newBroadcastTChan
let logger = Yesod.Core.Types.Logger loggerSet' getter
foundation = App { settings = conf
, getStatic = s
, connPool = p
, httpManager = manager
, persistConfig = dbconf
, appLogger = logger
, socketEvents = eventChannel}
-- Perform database migration using our application's logging settings.
runLoggingT
(Database.Persist.runPool dbconf (runMigration migrateAll) p)
(messageLoggerSource foundation logger)
return foundation
-- for yesod devel
getApplicationDev :: IO (Int, Application)
getApplicationDev =
defaultDevelApp loader (fmap fst . makeApp)
where
loader = Yesod.Default.Config.loadConfig (configSettings Development)
{ csParseExtra = parseExtra
}
makeApp :: AppConfig DefaultEnv Extra -> IO (Application, LogFunc)
makeApp conf = do (app, logFunc) <- makeApplication conf
return (app, logFunc)
| Codas/campaign-next | Application.hs | bsd-3-clause | 4,371 | 0 | 13 | 1,428 | 702 | 393 | 309 | 73 | 2 |
module Settings where
{-
Functions below defines a settings of classic GoL.
-}
-- Minimal number of neighbours to be alive.
aliveCellsMin :: Int
aliveCellsMin = 2
-- Maximum number of neighbours to be alive.
aliveCellsMax :: Int
aliveCellsMax = 3
-- Number of neihbours required to spawn a new cell.
spawnCellsNum :: Int
spawnCellsNum = 3
| wowofbob/gol | src/Settings.hs | bsd-3-clause | 349 | 0 | 4 | 67 | 38 | 25 | 13 | 7 | 1 |
{- |
Module : ./GUI/hets_cgi.hs
Copyright : (c) Uni Magdeburg 2004-2017
License : GPLv2 or higher, see LICENSE.txt
Maintainer : till@iks.cs.ovgu.de
Stability : provisional
Portability : non-portable(imports Logic.Logic)
Interface for web page with WASH/CGI
-}
module Main where
import WASH.CGI.CGI as CGI
import Driver.Options
import Driver.WriteLibDefn
import Driver.ReadLibDefn
import Driver.Version
import qualified Data.Set as Set
import qualified Common.Result as CRes
import Common.Doc (renderText)
import Common.GlobalAnnotations
import Common.LibName
import Common.ResultT
import Common.PrintLaTeX
import Comorphisms.LogicGraph
import Static.AnalysisLibrary
import Static.DevGraph
import Syntax.AS_Library
import Syntax.Print_AS_Structured
import Syntax.ToXml
import Text.XML.Light
import System.Random
import System.IO
import System.Time
import System.Process
import System.Posix.IO
import System.Posix.Types
import System.Posix.Files
import System.Posix.Process
import Control.Monad
-- ---- Configuration section -------------------------
-- - site specific configuration
-- a valid email address for the contact field / link
contactUrl :: String
contactUrl = "mailto:" ++ contactText
-- the text displayed with the previous link
contactText :: String
contactText = "hets-devel@informatik.uni-bremen.de"
{- a directory which must be accessable and exposed by the web server,
where all the generated files are stored. This string must end with
a slash! -}
baseDirGenerated :: String
baseDirGenerated = "/home/www.informatik.uni-bremen.de/cofi/hets-tmp/"
-- path to the log file
logFile :: String
logFile = baseDirGenerated ++ "hets.log"
-- the url pointing to the above specified directory
baseUrlGenerated :: String
baseUrlGenerated = "http://www.informatik.uni-bremen.de/cofi/hets-tmp/"
{- the directory where the Hets-lib repository is checked out. Must be
accessable by the cgi script -}
caslLibDir :: String
caslLibDir = "/home/cofi/Hets-lib"
-- where is the pdflatex command for the generation of PDF-files
pdflatexCmd :: String
pdflatexCmd = "/opt/csw/bin/pdflatex"
-- - site independant configuration
cofiUrl :: String
cofiUrl =
"http://www.cofi.info/"
-- link to the homepage of hetcasl
hetcaslUrl :: String
hetcaslUrl = "http://dol-omg.org/"
hetsUrl :: String
hetsUrl = "http://hets.eu/"
-- link to the manual of Hets
hetsManualUrl :: String
hetsManualUrl = hetsUrl ++ "UserGuide.pdf"
-- link to the hetcasl.sty file
hetcaslStyUrl :: String
hetcaslStyUrl = hetsUrl ++ "hetcasl.sty"
-- ---- End of Configuration section ------------------
webOpts :: HetcatsOpts
webOpts = defaultHetcatsOpts
{ outputToStdout = False
, libdirs = [caslLibDir]
, verbose = 0 }
data SelectedBoxes = SB
{ outputTree :: Bool
, outputTxt :: Bool
, outputTex :: Bool
, archive :: Bool
} deriving Show
data Output = OP
{ asciiTxt :: String
, parseTree :: String }
defaultOutput :: Output
defaultOutput = OP "" ""
newtype RESL = RESL (CRes.Result Output)
instance Read RESL where
readsPrec _ _ = error "Read for \"RESL\" not implemented!!"
instance Show RESL where
showsPrec _ _ _ = error "Show for \"RESL\" not implemented!!"
main :: IO ()
main = run mainCGI
mainCGI :: CGI ()
mainCGI = ask $ html $ do
CGI.head $ title $ text "Hets Web Interface"
CGI.body $ makeForm $ page1 hetsVersion
page1 :: String -> WithHTML x CGI ()
page1 title1 = do
h1 $ text title1
p $ do
text "You may also want to try out our experimental "
hlink (URL "http://rest.hets.eu/")
$ text "Hets Server"
p $ do
text "Enter a "
hlink (URL hetcaslUrl) $ text "CASL or DOL"
text
" specification or library in the input zone, then press SUBMIT:"
-- Input field
input <- p $ makeTextarea "" (attr "rows" "22" ## attr "cols" "68")
-- check box
selectTxt <- checkboxInputField (attr "valus" "yes")
text "output pretty print ASCII"
selectTex <- checkboxInputField (attr "valus" "yes")
text "output pretty print LaTeX"
selectTree <- checkboxInputField (attr "valus" "yes")
text "output xml tree"
selectAchiv <- p $ b $ checkboxInputField (attr "checked" "checked") ##
text "If this checkbox is selected, your input will be logged!"
-- submit/reset botton
p $ do
submit (F5 input selectTree selectTxt selectTex selectAchiv)
handle (fieldVALUE "Submit")
submit0 mainCGI (fieldVALUE "reset")
hr_S CGI.empty
p $ do
text "Contact address: "
hlink (URL contactUrl) $ text contactText
-- handle of the submit botton
handle :: HasValue i => F5 (i String) (i Bool) (i Bool) (i Bool) (i Bool) VALID
-> CGI ()
handle (F5 input box1 box2 box3 box4) = do
random1 <- io $ getStdRandom $ randomR (100000, 999999)
processID <- io getProcessID
let outputfile = baseDirGenerated ++ "result" ++ show processID
++ show (random1 :: Int)
str = CGI.value input
selectedBoxes = SB
{ outputTree = CGI.value box1
, outputTxt = CGI.value box2
, outputTex = CGI.value box3
, archive = CGI.value box4 }
RESL res <- io $ fmap RESL $ anaInput str selectedBoxes outputfile
ask $ html $ do
CGI.head $ title $ text "HETS results"
CGI.body $ printR str res selectedBoxes outputfile
-- Analyze the input
anaInput :: String -> SelectedBoxes -> FilePath
-> IO (CRes.Result Output)
anaInput contents selectedBoxes outputfiles = do
CRes.Result _ (Just (ast : _)) <- runResultT
$ readLibDefn logicGraph webOpts Nothing "<stdin>" "<stdin>" contents
CRes.Result ds mres <- runResultT
$ anaLibDefn logicGraph webOpts Set.empty emptyLibEnv emptyDG ast ""
let ds1 = filter diagFilter ds
if CRes.hasErrors ds1 then return $ CRes.Result ds1 Nothing else
maybe (return $ CRes.Result ds1 Nothing)
(\ res -> do
saveLog (archive selectedBoxes)
process_result ds1 res outputfiles selectedBoxes)
mres
where
diagFilter d = case CRes.diagKind d of
CRes.Hint -> False
CRes.Debug -> False
_ -> True
process_result :: [CRes.Diagnosis]
-> (LibName, LIB_DEFN, GlobalAnnos, LibEnv)
-> FilePath
-> SelectedBoxes
-> IO (CRes.Result Output)
process_result ds (_, libDefn, gannos, _) outputfile conf = do
let fMode = foldl unionFileModes nullFileMode
[ownerReadMode, ownerWriteMode,
groupReadMode, groupWriteMode,
otherReadMode]
when (outputTex conf) $ do
let pptexFile = outputfile ++ ".pp.tex"
latexFile = outputfile ++ ".tex"
pdfFile = outputfile ++ ".pdf"
tmpFile = outputfile ++ ".tmp"
writeLibDefnLatex logicGraph False gannos pptexFile libDefn
writeFile latexFile (latexHeader ++
"\\input{" ++ pptexFile ++
"}\n \\end{document}\n")
setFileMode pptexFile fMode
setFileMode latexFile fMode
system ("(cd " ++ baseDirGenerated ++ " ; ls -lh " ++
pdflatexCmd ++ " ; " ++ pdflatexCmd ++ " " ++
latexFile ++ ") > " ++ tmpFile)
setFileMode pdfFile fMode
when (outputTxt conf) $ do
let txtFile = outputfile ++ ".txt"
writeFile txtFile $ show (renderText gannos $
prettyLG logicGraph libDefn) ++ "\n"
setFileMode txtFile fMode
when (outputTree conf) $ do
let txtFile = outputfile ++ ".pp.xml"
writeFile txtFile . ppTopElement
$ xmlLibDefn logicGraph gannos libDefn
setFileMode txtFile fMode
return (CRes.Result ds $ Just $ selectOut conf libDefn gannos)
selectOut :: SelectedBoxes -> LIB_DEFN -> GlobalAnnos -> Output
selectOut conf ld ga = defaultOutput
{ asciiTxt = if outputTxt conf
then show $ renderText ga $ prettyLG logicGraph ld else ""
, parseTree = if outputTree conf
then ppElement $ xmlLibDefn logicGraph ga ld else "" }
-- log file
saveLog :: Bool -> IO ()
saveLog willSave = when willSave $ do
fd <- openFd logFile ReadWrite Nothing
defaultFileFlags {append = True}
fSize <- sizeof fd
let filelock = (WriteLock, AbsoluteSeek, 0, fSize)
fileunlock = (Unlock, AbsoluteSeek, 0, fSize)
aktTime <- timeStamp
setLock fd filelock
fdWrite fd (aktTime ++ "\n" ++ contents ++ "\n\n")
setLock fd fileunlock
closeFd fd
timeStamp :: IO String
timeStamp = do
t <- getClockTime
ct <- toCalendarTime t
return $ calendarTimeToString ct
sizeof :: Fd -> IO FileOffset
sizeof fd = do
fstatus <- getFdStatus fd
return $ fileSize fstatus
-- Print the result
printR :: String -> CRes.Result Output -> SelectedBoxes
-> FilePath
-> WithHTML x CGI ()
printR str (CRes.Result ds mres) conf outputFile =
do h3 $ text "You have submitted the CASL or DOL library:"
mapM_ (\ l -> text l >> br CGI.empty) $ lines str
h3 $ text "Diagnostic messages of parsing and static analysis:"
mapM_ (\ l -> text (show l) >> br CGI.empty) ds
maybe CGI.empty printRes mres
hr_S CGI.empty
p $ do
text "Not the result you expected? Please check the "
hlink (read hetsManualUrl) $ text "Hets Manual"
text "."
hr_S CGI.empty
p $ do
text "Contact address: "
hlink (read contactUrl) $ text contactText
where
adjustOutfile ext = baseUrlGenerated ++
drop (length baseDirGenerated) (outputFile ++ ext)
printRes res = do
when (outputTxt conf) $ do
heading3 "Pretty printed text:"
formatTxt (asciiTxt res)
p $ i $ do
text "You can download the "
hlink (read $ adjustOutfile ".txt") $ text "text file"
text " here. The file will be deleted after 30 minutes.\n"
when (outputTex conf) $ do
heading3 "LaTeX code:"
p $ i $ do
text "You can download the "
hlink (read $ adjustOutfile ".pp.tex") $ text "LaTeX file"
text " here. For compiling the LaTeX output, you need "
hlink (read hetcaslStyUrl) $ text "hetcasl.sty"
text "."
p $ i $ do
text "You can also download the "
hlink (read $ adjustOutfile ".pdf") $ text "PDF file"
text ". All files will be deleted after 30 minutes.\n"
when (outputTree conf) $ do
heading3 "XML parse tree:"
formatTxt (parseTree res)
p $ i $ do
text "You can download the "
hlink (read $ adjustOutfile ".pp.xml") $ text "XML file"
text " here. The file will be deleted after 30 minutes.\n"
formatTxt = p . mapM_ (\ l -> text l >> br CGI.empty) . lines
heading3 = h3 . text
| spechub/Hets | GUI/hets_cgi.hs | gpl-2.0 | 11,754 | 0 | 22 | 3,696 | 2,812 | 1,375 | 1,437 | 257 | 6 |
{-# LANGUAGE ExistentialQuantification #-}
-- |This module provides wrapper widgets for fixing the size of child
-- widgets in one or more dimensions in rows or columns, respectively.
-- This differs from the ''limit'' widgets in the Limits module in
-- that Limits enforce an upper bound on size.
module Graphics.Vty.Widgets.Fixed
( VFixed
, HFixed
, hFixed
, vFixed
, boxFixed
, setVFixed
, setHFixed
, addToVFixed
, addToHFixed
, getVFixedSize
, getHFixedSize
)
where
import Control.Monad
import Graphics.Vty
import Graphics.Vty.Widgets.Core
import Graphics.Vty.Widgets.Util
data HFixed a = (Show a) => HFixed Int (Widget a)
instance Show (HFixed a) where
show (HFixed i _) = "HFixed { width = " ++ show i ++ ", ... }"
-- |Impose a fixed horizontal size, in columns, on a 'Widget'.
hFixed :: (Show a) => Int -> Widget a -> IO (Widget (HFixed a))
hFixed fixedWidth child = do
let initSt = HFixed fixedWidth child
wRef <- newWidget initSt $ \w ->
w { render_ = \this s ctx -> do
HFixed width ch <- getState this
let region = s `withWidth` fromIntegral (min (toEnum width) (regionWidth s))
img <- render ch region ctx
-- Pad the image if it's smaller than the region.
let img' = if imageWidth img < regionWidth region
then img <|> (charFill (getNormalAttr ctx) ' '
(toEnum width - imageWidth img)
1)
else img
return img'
, setCurrentPosition_ =
\this pos -> do
HFixed _ ch <- getState this
setCurrentPosition ch pos
, getCursorPosition_ = \this -> do
HFixed _ ch <- getState this
getCursorPosition ch
}
wRef `relayKeyEvents` child
wRef `relayFocusEvents` child
return wRef
data VFixed a = (Show a) => VFixed Int (Widget a)
instance Show (VFixed a) where
show (VFixed i _) = "VFixed { height = " ++ show i ++ ", ... }"
-- |Impose a fixed vertical size, in columns, on a 'Widget'.
vFixed :: (Show a) => Int -> Widget a -> IO (Widget (VFixed a))
vFixed maxHeight child = do
let initSt = VFixed maxHeight child
wRef <- newWidget initSt $ \w ->
w { growHorizontal_ = const $ growHorizontal child
, render_ = \this s ctx -> do
VFixed height ch <- getState this
let region = s `withHeight` fromIntegral (min (toEnum height) (regionHeight s))
img <- render ch region ctx
-- Pad the image if it's smaller than the region.
let img' = if imageHeight img < regionHeight region
then img <-> (charFill (getNormalAttr ctx) ' '
1
(toEnum height - imageHeight img))
else img
return img'
, setCurrentPosition_ =
\this pos -> do
VFixed _ ch <- getState this
setCurrentPosition ch pos
, getCursorPosition_ = \this -> do
VFixed _ ch <- getState this
getCursorPosition ch
}
wRef `relayKeyEvents` child
wRef `relayFocusEvents` child
return wRef
-- |Set the vertical fixed size of a child widget.
setVFixed :: Widget (VFixed a) -> Int -> IO ()
setVFixed wRef lim =
when (lim >= 1) $
updateWidgetState wRef $ \(VFixed _ ch) -> VFixed lim ch
-- |Set the horizontal fixed size of a child widget.
setHFixed :: Widget (HFixed a) -> Int -> IO ()
setHFixed wRef lim =
when (lim >= 1) $
updateWidgetState wRef $ \(HFixed _ ch) -> HFixed lim ch
-- |Add to the vertical fixed size of a child widget.
addToVFixed :: Widget (VFixed a) -> Int -> IO ()
addToVFixed wRef delta = do
lim <- getVFixedSize wRef
setVFixed wRef $ lim + delta
-- |Add to the horizontal fixed size of a child widget.
addToHFixed :: Widget (HFixed a) -> Int -> IO ()
addToHFixed wRef delta = do
lim <- getHFixedSize wRef
setHFixed wRef $ lim + delta
-- |Get the vertical fixed size of a child widget.
getVFixedSize :: Widget (VFixed a) -> IO Int
getVFixedSize wRef = do
(VFixed lim _) <- state <~ wRef
return lim
-- |Get the horizontal fixed size of a child widget.
getHFixedSize :: Widget (HFixed a) -> IO Int
getHFixedSize wRef = do
(HFixed lim _) <- state <~ wRef
return lim
-- |Impose a fixed horizontal and vertical size on a widget.
boxFixed :: (Show a) =>
Int -- ^Width in columns
-> Int -- ^Height in rows
-> Widget a
-> IO (Widget (VFixed (HFixed a)))
boxFixed maxWidth maxHeight w = do
ch <- hFixed maxWidth w
vFixed maxHeight ch
| KommuSoft/vty-ui | src/Graphics/Vty/Widgets/Fixed.hs | bsd-3-clause | 4,885 | 0 | 24 | 1,629 | 1,352 | 673 | 679 | 104 | 2 |
{-# LANGUAGE GeneralizedNewtypeDeriving, DeriveDataTypeable, StandaloneDeriving #-}
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Server.Packages.Types
-- Copyright : (c) David Himmelstrup 2005
-- License : BSD-like
--
-- Maintainer : lemmih@gmail.com
-- Stability : provisional
-- Portability : portable
--
-- All data types for the entire cabal-install system gathered here to avoid some .hs-boot files.
-----------------------------------------------------------------------------
module Distribution.Server.Packages.Types where
import Distribution.Server.Users.Types (UserId)
import Distribution.Server.Framework.BlobStorage (BlobId)
import Distribution.Server.Framework.Instances ()
import Distribution.Server.Util.Parse (unpackUTF8)
import Distribution.Package
( PackageIdentifier(..), Package(..) )
import Distribution.PackageDescription
( GenericPackageDescription(..))
import Distribution.PackageDescription.Parse
( parsePackageDescription, ParseResult(..) )
import qualified Data.Serialize as Serialize
import Data.Serialize (Serialize)
import qualified Data.ByteString.Lazy.Char8 as BS (unpack)
import Data.ByteString.Lazy (ByteString)
import Data.Time.Clock (UTCTime)
import Data.Typeable (Typeable)
import Data.List (sortBy)
import Data.Ord (comparing)
newtype CabalFileText = CabalFileText { cabalFileByteString :: ByteString }
deriving (Eq, Serialize)
cabalFileString :: CabalFileText -> String
cabalFileString = unpackUTF8 . cabalFileByteString
instance Show CabalFileText where
show cft = "CabalFileText (Data.ByteString.Lazy.Char8.pack (Distribution.Simple.Utils.toUTF8 " ++ show (cabalFileString cft) ++ "))"
-- | The information we keep about a particular version of a package.
--
-- Previous versions of this package name and version may exist as well.
-- We normally disallow re-uploading but may make occasional exceptions.
data PkgInfo = PkgInfo {
pkgInfoId :: !PackageIdentifier,
-- | The information held in a parsed .cabal file (used by cabal-install)
-- If this takes up too much space, it might be possible to wrap it in
-- Maybe and let the server parse the data on the fly.
pkgDesc :: !GenericPackageDescription,
-- | The .cabal file text.
pkgData :: !CabalFileText,
-- | The actual package .tar.gz file. It is optional for making an incomplete
-- mirror, e.g. using archives of just the latest packages, or perhaps for a
-- multipart upload process.
--
-- The canonical tarball URL points to the most recently uploaded package.
pkgTarball :: ![(PkgTarball, UploadInfo)],
-- | Previous data. The UploadInfo does *not* indicate when the ByteString was
-- uploaded, but rather when it was replaced. This way, pkgUploadData won't change
-- even if a cabal file is changed.
-- Should be updated whenever a tarball is uploaded (see mergePkg state function)
pkgDataOld :: ![(CabalFileText, UploadInfo)],
-- | When the package was created. Imports will override this with time in their logs.
pkgUploadData :: !UploadInfo
} deriving (Eq, Typeable, Show)
data PkgTarball = PkgTarball {
pkgTarballGz :: !BlobId,
pkgTarballNoGz :: !BlobId
} deriving (Eq, Typeable, Show)
type UploadInfo = (UTCTime, UserId)
pkgUploadTime :: PkgInfo -> UTCTime
pkgUploadTime = fst . pkgUploadData
pkgUploadUser :: PkgInfo -> UserId
pkgUploadUser = snd . pkgUploadData
-- a small utility
descendUploadTimes :: [(a, UploadInfo)] -> [(a, UploadInfo)]
descendUploadTimes = sortBy (flip $ comparing (fst . snd))
instance Package PkgInfo where packageId = pkgInfoId
instance Serialize PkgInfo where
put pkgInfo = do
Serialize.put (pkgInfoId pkgInfo)
Serialize.put (pkgData pkgInfo)
Serialize.put (pkgTarball pkgInfo)
Serialize.put (pkgDataOld pkgInfo)
Serialize.put (pkgUploadData pkgInfo)
get = do
infoId <- Serialize.get
cabal <- Serialize.get
desc <- case parsePackageDescription . cabalFileString $ cabal of
ParseFailed e -> fail $ "Internal error: " ++ show e
ParseOk _ x -> return x
tarball <- Serialize.get
old <- Serialize.get
updata <- Serialize.get
return PkgInfo {
pkgInfoId = infoId,
pkgDesc = desc,
pkgUploadData = updata,
pkgDataOld = old,
pkgTarball = tarball,
pkgData = cabal
}
instance Serialize PkgTarball where
put tb = do
Serialize.put (pkgTarballGz tb)
Serialize.put (pkgTarballNoGz tb)
get = do
gz <- Serialize.get
noGz <- Serialize.get
return PkgTarball {
pkgTarballGz = gz,
pkgTarballNoGz = noGz
}
------------------------------------------------------
-- | The information we keep about a candidate package.
--
-- It's currently possible to have candidates for packages which don't exist yet.
--
data CandPkgInfo = CandPkgInfo {
candInfoId :: !PackageIdentifier,
-- there should be one ByteString and one BlobId per candidate.
-- this was enforced in the types.. but it's easier to just
-- reuse PkgInfo for the task.
candPkgInfo :: !PkgInfo,
-- | Warnings to display at the top of the package page.
candWarnings :: ![String],
-- | Whether to allow non-maintainers to view the page or not.
candPublic :: !Bool
} deriving (Show, Typeable)
instance Package CandPkgInfo where packageId = candInfoId
instance Serialize CandPkgInfo where
put pkgInfo = do
Serialize.put (candInfoId pkgInfo)
Serialize.put (candPkgInfo pkgInfo)
Serialize.put (candWarnings pkgInfo)
Serialize.put (candPublic pkgInfo)
get = do
infoId <- Serialize.get
pkgInfo <- Serialize.get
warning <- Serialize.get
public <- Serialize.get
return CandPkgInfo {
candInfoId = infoId,
candPkgInfo = pkgInfo,
candWarnings = warning,
candPublic = public
}
| isomorphism/hackage2 | Distribution/Server/Packages/Types.hs | bsd-3-clause | 5,987 | 0 | 13 | 1,217 | 1,073 | 605 | 468 | 126 | 1 |
{-# LANGUAGE CPP, ScopedTypeVariables #-}
module TcErrors(
reportUnsolved, reportAllUnsolved, warnAllUnsolved,
warnDefaulting,
solverDepthErrorTcS
) where
#include "HsVersions.h"
import TcRnTypes
import TcRnMonad
import TcMType
import TcType
import RnEnv( unknownNameSuggestions )
import TypeRep
import Type
import Kind ( isKind )
import Unify ( tcMatchTys )
import Module
import FamInst
import Inst
import InstEnv
import TyCon
import DataCon
import TcEvidence
import Name
import RdrName ( lookupGRE_Name, GlobalRdrEnv, mkRdrUnqual )
import Class( className )
import PrelNames( typeableClassName )
import Id
import Var
import VarSet
import VarEnv
import Bag
import ErrUtils ( ErrMsg, pprLocErrMsg )
import BasicTypes
import Util
import FastString
import Outputable
import SrcLoc
import DynFlags
import StaticFlags ( opt_PprStyle_Debug )
import ListSetOps ( equivClasses )
import Control.Monad ( when )
import Data.Maybe
import Data.List ( partition, mapAccumL, nub, sortBy )
{-
************************************************************************
* *
\section{Errors and contexts}
* *
************************************************************************
ToDo: for these error messages, should we note the location as coming
from the insts, or just whatever seems to be around in the monad just
now?
Note [Deferring coercion errors to runtime]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
While developing, sometimes it is desirable to allow compilation to succeed even
if there are type errors in the code. Consider the following case:
module Main where
a :: Int
a = 'a'
main = print "b"
Even though `a` is ill-typed, it is not used in the end, so if all that we're
interested in is `main` it is handy to be able to ignore the problems in `a`.
Since we treat type equalities as evidence, this is relatively simple. Whenever
we run into a type mismatch in TcUnify, we normally just emit an error. But it
is always safe to defer the mismatch to the main constraint solver. If we do
that, `a` will get transformed into
co :: Int ~ Char
co = ...
a :: Int
a = 'a' `cast` co
The constraint solver would realize that `co` is an insoluble constraint, and
emit an error with `reportUnsolved`. But we can also replace the right-hand side
of `co` with `error "Deferred type error: Int ~ Char"`. This allows the program
to compile, and it will run fine unless we evaluate `a`. This is what
`deferErrorsToRuntime` does.
It does this by keeping track of which errors correspond to which coercion
in TcErrors. TcErrors.reportTidyWanteds does not print the errors
and does not fail if -fdefer-type-errors is on, so that we can continue
compilation. The errors are turned into warnings in `reportUnsolved`.
-}
-- | Report unsolved goals as errors or warnings. We may also turn some into
-- deferred run-time errors if `-fdefer-type-errors` is on.
reportUnsolved :: WantedConstraints -> TcM (Bag EvBind)
reportUnsolved wanted
= do { binds_var <- newTcEvBinds
; defer_errors <- goptM Opt_DeferTypeErrors
; warn_errors <- woptM Opt_WarnDeferredTypeErrors -- implement #10283
; let type_errors | not defer_errors = TypeError
| warn_errors = TypeWarn
| otherwise = TypeDefer
; defer_holes <- goptM Opt_DeferTypedHoles
; warn_holes <- woptM Opt_WarnTypedHoles
; let expr_holes | not defer_holes = HoleError
| warn_holes = HoleWarn
| otherwise = HoleDefer
; partial_sigs <- xoptM Opt_PartialTypeSignatures
; warn_partial_sigs <- woptM Opt_WarnPartialTypeSignatures
; let type_holes | not partial_sigs = HoleError
| warn_partial_sigs = HoleWarn
| otherwise = HoleDefer
; report_unsolved (Just binds_var) False type_errors expr_holes type_holes wanted
; getTcEvBinds binds_var }
-- | Report *all* unsolved goals as errors, even if -fdefer-type-errors is on
-- See Note [Deferring coercion errors to runtime]
reportAllUnsolved :: WantedConstraints -> TcM ()
reportAllUnsolved wanted
= report_unsolved Nothing False TypeError HoleError HoleError wanted
-- | Report all unsolved goals as warnings (but without deferring any errors to
-- run-time). See Note [Safe Haskell Overlapping Instances Implementation] in
-- TcSimplify
warnAllUnsolved :: WantedConstraints -> TcM ()
warnAllUnsolved wanted
= report_unsolved Nothing True TypeWarn HoleWarn HoleWarn wanted
-- | Report unsolved goals as errors or warnings.
report_unsolved :: Maybe EvBindsVar -- cec_binds
-> Bool -- Errors as warnings
-> TypeErrorChoice -- Deferred type errors
-> HoleChoice -- Expression holes
-> HoleChoice -- Type holes
-> WantedConstraints -> TcM ()
report_unsolved mb_binds_var err_as_warn type_errors expr_holes type_holes wanted
| isEmptyWC wanted
= return ()
| otherwise
= do { traceTc "reportUnsolved (before zonking and tidying)" (ppr wanted)
; wanted <- zonkWC wanted -- Zonk to reveal all information
; env0 <- tcInitTidyEnv
-- If we are deferring we are going to need /all/ evidence around,
-- including the evidence produced by unflattening (zonkWC)
; let tidy_env = tidyFreeTyVars env0 free_tvs
free_tvs = tyVarsOfWC wanted
; traceTc "reportUnsolved (after zonking and tidying):" $
vcat [ pprTvBndrs (varSetElems free_tvs)
, ppr wanted ]
; warn_redundant <- woptM Opt_WarnRedundantConstraints
; let err_ctxt = CEC { cec_encl = []
, cec_tidy = tidy_env
, cec_defer_type_errors = type_errors
, cec_errors_as_warns = err_as_warn
, cec_expr_holes = expr_holes
, cec_type_holes = type_holes
, cec_suppress = False -- See Note [Suppressing error messages]
, cec_warn_redundant = warn_redundant
, cec_binds = mb_binds_var }
; tc_lvl <- getTcLevel
; reportWanteds err_ctxt tc_lvl wanted }
--------------------------------------------
-- Internal functions
--------------------------------------------
data TypeErrorChoice -- What to do for type errors found by the type checker
= TypeError -- A type error aborts compilation with an error message
| TypeWarn -- A type error is deferred to runtime, plus a compile-time warning
| TypeDefer -- A type error is deferred to runtime; no error or warning at compile time
data HoleChoice
= HoleError -- A hole is a compile-time error
| HoleWarn -- Defer to runtime, emit a compile-time warning
| HoleDefer -- Defer to runtime, no warning
data ReportErrCtxt
= CEC { cec_encl :: [Implication] -- Enclosing implications
-- (innermost first)
-- ic_skols and givens are tidied, rest are not
, cec_tidy :: TidyEnv
, cec_binds :: Maybe EvBindsVar
-- Nothinng <=> Report all errors, including holes; no bindings
-- Just ev <=> make some errors (depending on cec_defer)
-- into warnings, and emit evidence bindings
-- into 'ev' for unsolved constraints
, cec_errors_as_warns :: Bool -- Turn all errors into warnings
-- (except for Holes, which are
-- controlled by cec_type_holes and
-- cec_expr_holes)
, cec_defer_type_errors :: TypeErrorChoice -- Defer type errors until runtime
-- Irrelevant if cec_binds = Nothing
, cec_expr_holes :: HoleChoice -- Holes in expressions
, cec_type_holes :: HoleChoice -- Holes in types
, cec_warn_redundant :: Bool -- True <=> -fwarn-redundant-constraints
, cec_suppress :: Bool -- True <=> More important errors have occurred,
-- so create bindings if need be, but
-- don't issue any more errors/warnings
-- See Note [Suppressing error messages]
}
{-
Note [Suppressing error messages]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The cec_suppress flag says "don't report any errors". Instead, just create
evidence bindings (as usual). It's used when more important errors have occurred.
Specifically (see reportWanteds)
* If there are insoluble Givens, then we are in unreachable code and all bets
are off. So don't report any further errors.
* If there are any insolubles (eg Int~Bool), here or in a nested implication,
then suppress errors from the simple constraints here. Sometimes the
simple-constraint errors are a knock-on effect of the insolubles.
-}
reportImplic :: ReportErrCtxt -> Implication -> TcM ()
reportImplic ctxt implic@(Implic { ic_skols = tvs, ic_given = given
, ic_wanted = wanted, ic_binds = evb
, ic_status = status, ic_info = info
, ic_env = tcl_env, ic_tclvl = tc_lvl })
| BracketSkol <- info
, not insoluble
= return () -- For Template Haskell brackets report only
-- definite errors. The whole thing will be re-checked
-- later when we plug it in, and meanwhile there may
-- certainly be un-satisfied constraints
| otherwise
= do { reportWanteds ctxt' tc_lvl wanted
; traceTc "reportImplic" (ppr implic)
; when (cec_warn_redundant ctxt) $
warnRedundantConstraints ctxt' tcl_env info' dead_givens }
where
insoluble = isInsolubleStatus status
(env1, tvs') = mapAccumL tidyTyVarBndr (cec_tidy ctxt) tvs
(env2, info') = tidySkolemInfo env1 info
implic' = implic { ic_skols = tvs'
, ic_given = map (tidyEvVar env2) given
, ic_info = info' }
ctxt' = ctxt { cec_tidy = env2
, cec_encl = implic' : cec_encl ctxt
, cec_suppress = insoluble -- Suppress inessential errors if there
-- are are insolubles anywhere in the
-- tree rooted here
, cec_binds = case cec_binds ctxt of
Nothing -> Nothing
Just {} -> Just evb }
dead_givens = case status of
IC_Solved { ics_dead = dead } -> dead
_ -> []
warnRedundantConstraints :: ReportErrCtxt -> TcLclEnv -> SkolemInfo -> [EvVar] -> TcM ()
warnRedundantConstraints ctxt env info ev_vars
| null redundant_evs
= return ()
| SigSkol {} <- info
= setLclEnv env $ -- We want to add "In the type signature for f"
-- to the error context, which is a bit tiresome
addErrCtxt (ptext (sLit "In") <+> ppr info) $
do { env <- getLclEnv
; msg <- mkErrorMsg ctxt env doc
; reportWarning msg }
| otherwise -- But for InstSkol there already *is* a surrounding
-- "In the instance declaration for Eq [a]" context
-- and we don't want to say it twice. Seems a bit ad-hoc
= do { msg <- mkErrorMsg ctxt env doc
; reportWarning msg }
where
doc = ptext (sLit "Redundant constraint") <> plural redundant_evs <> colon
<+> pprEvVarTheta redundant_evs
redundant_evs = case info of -- See Note [Redundant constraints in instance decls]
InstSkol -> filterOut improving ev_vars
_ -> ev_vars
improving ev_var = any isImprovementPred $
transSuperClassesPred (idType ev_var)
{- Note [Redundant constraints in instance decls]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
For instance declarations, we don't report unused givens if
they can give rise to improvement. Example (Trac #10100):
class Add a b ab | a b -> ab, a ab -> b
instance Add Zero b b
instance Add a b ab => Add (Succ a) b (Succ ab)
The context (Add a b ab) for the instance is clearly unused in terms
of evidence, since the dictionary has no feilds. But it is still
needed! With the context, a wanted constraint
Add (Succ Zero) beta (Succ Zero)
we will reduce to (Add Zero beta Zero), and thence we get beta := Zero.
But without the context we won't find beta := Zero.
This only matters in instance declarations..
-}
reportWanteds :: ReportErrCtxt -> TcLevel -> WantedConstraints -> TcM ()
reportWanteds ctxt tc_lvl (WC { wc_simple = simples, wc_insol = insols, wc_impl = implics })
= do { traceTc "reportWanteds" (vcat [ ptext (sLit "Simples =") <+> ppr simples
, ptext (sLit "Suppress =") <+> ppr (cec_suppress ctxt)])
; let tidy_cts = bagToList (mapBag (tidyCt env) (insols `unionBags` simples))
-- First deal with things that are utterly wrong
-- Like Int ~ Bool (incl nullary TyCons)
-- or Int ~ t a (AppTy on one side)
-- These ones are not suppressed by the incoming context
; let ctxt_for_insols = ctxt { cec_suppress = False }
; (ctxt1, cts1) <- tryReporters ctxt_for_insols report1 tidy_cts
-- Now all the other constraints. We suppress errors here if
-- any of the first batch failed, or if the enclosing context
-- says to suppress
; let ctxt2 = ctxt { cec_suppress = cec_suppress ctxt || cec_suppress ctxt1 }
; (_, leftovers) <- tryReporters ctxt2 report2 cts1
; MASSERT2( null leftovers, ppr leftovers )
-- All the Derived ones have been filtered out of simples
-- by the constraint solver. This is ok; we don't want
-- to report unsolved Derived goals as errors
-- See Note [Do not report derived but soluble errors]
; mapBagM_ (reportImplic ctxt2) implics }
-- NB ctxt1: don't suppress inner insolubles if there's only a
-- wanted insoluble here; but do suppress inner insolubles
-- if there's a *given* insoluble here (= inaccessible code)
where
env = cec_tidy ctxt
-- report1: ones that should *not* be suppresed by
-- an insoluble somewhere else in the tree
-- It's crucial that anything that is considered insoluble
-- (see TcRnTypes.trulyInsoluble) is caught here, otherwise
-- we might suppress its error message, and proceed on past
-- type checking to get a Lint error later
report1 = [ ("insoluble1", is_given, True, mkGroupReporter mkEqErr)
, ("insoluble2", utterly_wrong, True, mkGroupReporter mkEqErr)
, ("insoluble3", rigid_nom_tv_eq, True, mkSkolReporter)
, ("insoluble4", rigid_nom_eq, True, mkGroupReporter mkEqErr)
, ("Out of scope", is_out_of_scope, True, mkHoleReporter)
, ("Holes", is_hole, False, mkHoleReporter)
-- The only remaining equalities are alpha ~ ty,
-- where alpha is untouchable; and representational equalities
, ("Other eqs", is_equality, False, mkGroupReporter mkEqErr) ]
-- report2: we suppress these if there are insolubles elsewhere in the tree
report2 = [ ("Implicit params", is_ip, False, mkGroupReporter mkIPErr)
, ("Irreds", is_irred, False, mkGroupReporter mkIrredErr)
, ("Dicts", is_dict, False, mkGroupReporter mkDictErr) ]
rigid_nom_eq, rigid_nom_tv_eq, is_hole, is_dict,
is_equality, is_ip, is_irred :: Ct -> PredTree -> Bool
utterly_wrong _ (EqPred NomEq ty1 ty2) = isRigidTy ty1 && isRigidTy ty2
utterly_wrong _ _ = False
is_out_of_scope ct _ = isOutOfScopeCt ct
is_hole ct _ = isHoleCt ct
is_given ct _ = not (isWantedCt ct) -- The Derived ones are actually all from Givens
-- Skolem (i.e. non-meta) type variable on the left
rigid_nom_eq _ pred = isRigidEqPred tc_lvl pred
rigid_nom_tv_eq _ pred
| EqPred _ ty1 _ <- pred = isRigidEqPred tc_lvl pred && isTyVarTy ty1
| otherwise = False
is_equality _ (EqPred {}) = True
is_equality _ _ = False
is_dict _ (ClassPred {}) = True
is_dict _ _ = False
is_ip _ (ClassPred cls _) = isIPClass cls
is_ip _ _ = False
is_irred _ (IrredPred {}) = True
is_irred _ _ = False
---------------
isTyFun_maybe :: Type -> Maybe TyCon
isTyFun_maybe ty = case tcSplitTyConApp_maybe ty of
Just (tc,_) | isTypeFamilyTyCon tc -> Just tc
_ -> Nothing
--------------------------------------------
-- Reporters
--------------------------------------------
type Reporter
= ReportErrCtxt -> [Ct] -> TcM ()
type ReporterSpec
= ( String -- Name
, Ct -> PredTree -> Bool -- Pick these ones
, Bool -- True <=> suppress subsequent reporters
, Reporter) -- The reporter itself
mkSkolReporter :: Reporter
-- Suppress duplicates with the same LHS
mkSkolReporter ctxt cts
= mapM_ (reportGroup mkEqErr ctxt) (equivClasses cmp_lhs_type cts)
where
cmp_lhs_type ct1 ct2
= case (classifyPredType (ctPred ct1), classifyPredType (ctPred ct2)) of
(EqPred eq_rel1 ty1 _, EqPred eq_rel2 ty2 _) ->
(eq_rel1 `compare` eq_rel2) `thenCmp` (ty1 `cmpType` ty2)
_ -> pprPanic "mkSkolReporter" (ppr ct1 $$ ppr ct2)
mkHoleReporter :: Reporter
-- Reports errors one at a time
mkHoleReporter ctxt
= mapM_ $ \ct ->
do { err <- mkHoleError ctxt ct
; maybeReportHoleError ctxt ct err
; maybeAddDeferredHoleBinding ctxt err ct }
mkGroupReporter :: (ReportErrCtxt -> [Ct] -> TcM ErrMsg)
-- Make error message for a group
-> Reporter -- Deal with lots of constraints
-- Group together errors from same location,
-- and report only the first (to avoid a cascade)
mkGroupReporter mk_err ctxt cts
= mapM_ (reportGroup mk_err ctxt) (equivClasses cmp_loc cts)
where
cmp_loc ct1 ct2 = ctLocSpan (ctLoc ct1) `compare` ctLocSpan (ctLoc ct2)
reportGroup :: (ReportErrCtxt -> [Ct] -> TcM ErrMsg) -> ReportErrCtxt
-> [Ct] -> TcM ()
reportGroup mk_err ctxt cts
= do { err <- mk_err ctxt cts
; maybeReportError ctxt err
; mapM_ (maybeAddDeferredBinding ctxt err) cts }
-- Add deferred bindings for all
-- But see Note [Always warn with -fdefer-type-errors]
maybeReportHoleError :: ReportErrCtxt -> Ct -> ErrMsg -> TcM ()
maybeReportHoleError ctxt ct err
-- When -XPartialTypeSignatures is on, warnings (instead of errors) are
-- generated for holes in partial type signatures. Unless
-- -fwarn_partial_type_signatures is not on, in which case the messages are
-- discarded.
| isTypeHoleCt ct
= -- For partial type signatures, generate warnings only, and do that
-- only if -fwarn_partial_type_signatures is on
case cec_type_holes ctxt of
HoleError -> reportError err
HoleWarn -> reportWarning err
HoleDefer -> return ()
-- Otherwise this is a typed hole in an expression
| otherwise
= -- If deferring, report a warning only if -fwarn-typed-holds is on
case cec_expr_holes ctxt of
HoleError -> reportError err
HoleWarn -> reportWarning err
HoleDefer -> return ()
maybeReportError :: ReportErrCtxt -> ErrMsg -> TcM ()
-- Report the error and/or make a deferred binding for it
maybeReportError ctxt err
| cec_errors_as_warns ctxt
= reportWarning err
| otherwise
= case cec_defer_type_errors ctxt of
TypeDefer -> return ()
TypeWarn -> reportWarning err
-- handle case when suppress is on like in the original code
TypeError -> if cec_suppress ctxt then return () else reportError err
addDeferredBinding :: ReportErrCtxt -> ErrMsg -> Ct -> TcM ()
-- See Note [Deferring coercion errors to runtime]
addDeferredBinding ctxt err ct
| CtWanted { ctev_pred = pred, ctev_evar = ev_id } <- ctEvidence ct
-- Only add deferred bindings for Wanted constraints
, Just ev_binds_var <- cec_binds ctxt -- We have somewhere to put the bindings
= do { dflags <- getDynFlags
; let err_msg = pprLocErrMsg err
err_fs = mkFastString $ showSDoc dflags $
err_msg $$ text "(deferred type error)"
-- Create the binding
; addTcEvBind ev_binds_var (mkWantedEvBind ev_id (EvDelayedError pred err_fs)) }
| otherwise -- Do not set any evidence for Given/Derived
= return ()
maybeAddDeferredHoleBinding :: ReportErrCtxt -> ErrMsg -> Ct -> TcM ()
maybeAddDeferredHoleBinding ctxt err ct
| isExprHoleCt ct
, case cec_expr_holes ctxt of
HoleDefer -> True
HoleWarn -> True
HoleError -> False
= addDeferredBinding ctxt err ct -- Only add bindings for holes in expressions
| otherwise -- not for holes in partial type signatures
= return ()
maybeAddDeferredBinding :: ReportErrCtxt -> ErrMsg -> Ct -> TcM ()
maybeAddDeferredBinding ctxt err ct =
case cec_defer_type_errors ctxt of
TypeDefer -> deferred
TypeWarn -> deferred
TypeError -> return ()
where
deferred = addDeferredBinding ctxt err ct
tryReporters :: ReportErrCtxt -> [ReporterSpec] -> [Ct] -> TcM (ReportErrCtxt, [Ct])
-- Use the first reporter in the list whose predicate says True
tryReporters ctxt reporters cts
= do { traceTc "tryReporters {" (ppr cts)
; (ctxt', cts') <- go ctxt reporters cts
; traceTc "tryReporters }" (ppr cts')
; return (ctxt', cts') }
where
go ctxt [] cts
= return (ctxt, cts)
go ctxt (r : rs) cts
= do { (ctxt', cts') <- tryReporter ctxt r cts
; go ctxt' rs cts' }
-- Carry on with the rest, because we must make
-- deferred bindings for them if we have -fdefer-type-errors
-- But suppress their error messages
tryReporter :: ReportErrCtxt -> ReporterSpec -> [Ct] -> TcM (ReportErrCtxt, [Ct])
tryReporter ctxt (str, keep_me, suppress_after, reporter) cts
| null yeses = return (ctxt, cts)
| otherwise = do { traceTc "tryReporter:" (text str <+> ppr yeses)
; reporter ctxt yeses
; let ctxt' = ctxt { cec_suppress = suppress_after || cec_suppress ctxt }
; return (ctxt', nos) }
where
(yeses, nos) = partition (\ct -> keep_me ct (classifyPredType (ctPred ct))) cts
pprArising :: CtOrigin -> SDoc
-- Used for the main, top-level error message
-- We've done special processing for TypeEq, KindEq, Given
pprArising (TypeEqOrigin {}) = empty
pprArising (KindEqOrigin {}) = empty
pprArising (GivenOrigin {}) = empty
pprArising orig = pprCtOrigin orig
-- Add the "arising from..." part to a message about bunch of dicts
addArising :: CtOrigin -> SDoc -> SDoc
addArising orig msg = hang msg 2 (pprArising orig)
pprWithArising :: [Ct] -> (CtLoc, SDoc)
-- Print something like
-- (Eq a) arising from a use of x at y
-- (Show a) arising from a use of p at q
-- Also return a location for the error message
-- Works for Wanted/Derived only
pprWithArising []
= panic "pprWithArising"
pprWithArising (ct:cts)
| null cts
= (loc, addArising (ctLocOrigin loc)
(pprTheta [ctPred ct]))
| otherwise
= (loc, vcat (map ppr_one (ct:cts)))
where
loc = ctLoc ct
ppr_one ct' = hang (parens (pprType (ctPred ct')))
2 (pprCtLoc (ctLoc ct'))
mkErrorMsgFromCt :: ReportErrCtxt -> Ct -> SDoc -> TcM ErrMsg
mkErrorMsgFromCt ctxt ct msg
= mkErrorMsg ctxt (ctLocEnv (ctLoc ct)) msg
mkErrorMsg :: ReportErrCtxt -> TcLclEnv -> SDoc -> TcM ErrMsg
mkErrorMsg ctxt tcl_env msg
= do { err_info <- mkErrInfo (cec_tidy ctxt) (tcl_ctxt tcl_env)
; mkLongErrAt (RealSrcSpan (tcl_loc tcl_env)) msg err_info }
type UserGiven = ([EvVar], SkolemInfo, Bool, RealSrcSpan)
getUserGivens :: ReportErrCtxt -> [UserGiven]
-- One item for each enclosing implication
getUserGivens (CEC {cec_encl = ctxt})
= reverse $
[ (givens, info, no_eqs, tcl_loc env)
| Implic { ic_given = givens, ic_env = env
, ic_no_eqs = no_eqs, ic_info = info } <- ctxt
, not (null givens) ]
{-
Note [Always warn with -fdefer-type-errors]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When -fdefer-type-errors is on we warn about *all* type errors, even
if cec_suppress is on. This can lead to a lot more warnings than you
would get errors without -fdefer-type-errors, but if we suppress any of
them you might get a runtime error that wasn't warned about at compile
time.
This is an easy design choice to change; just flip the order of the
first two equations for maybeReportError
To be consistent, we should also report multiple warnings from a single
location in mkGroupReporter, when -fdefer-type-errors is on. But that
is perhaps a bit *over*-consistent! Again, an easy choice to change.
With #10283, you can now opt out of deferred type error warnings.
Note [Do not report derived but soluble errors]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The wc_simples include Derived constraints that have not been solved, but are
not insoluble (in that case they'd be in wc_insols). We do not want to report
these as errors:
* Superclass constraints. If we have an unsolved [W] Ord a, we'll also have
an unsolved [D] Eq a, and we do not want to report that; it's just noise.
* Functional dependencies. For givens, consider
class C a b | a -> b
data T a where
MkT :: C a d => [d] -> T a
f :: C a b => T a -> F Int
f (MkT xs) = length xs
Then we get a [D] b~d. But there *is* a legitimate call to
f, namely f (MkT [True]) :: T Bool, in which b=d. So we should
not reject the program.
For wanteds, something similar
data T a where
MkT :: C Int b => a -> b -> T a
g :: C Int c => c -> ()
f :: T a -> ()
f (MkT x y) = g x
Here we get [G] C Int b, [W] C Int a, hence [D] a~b.
But again f (MkT True True) is a legitimate call.
(We leave the Deriveds in wc_simple until reportErrors, so that we don't lose
derived superclasses between iterations of the solver.)
For functional dependencies, here is a real example,
stripped off from libraries/utf8-string/Codec/Binary/UTF8/Generic.hs
class C a b | a -> b
g :: C a b => a -> b -> ()
f :: C a b => a -> b -> ()
f xa xb =
let loop = g xa
in loop xb
We will first try to infer a type for loop, and we will succeed:
C a b' => b' -> ()
Subsequently, we will type check (loop xb) and all is good. But,
recall that we have to solve a final implication constraint:
C a b => (C a b' => .... cts from body of loop .... ))
And now we have a problem as we will generate an equality b ~ b' and fail to
solve it.
************************************************************************
* *
Irreducible predicate errors
* *
************************************************************************
-}
mkIrredErr :: ReportErrCtxt -> [Ct] -> TcM ErrMsg
mkIrredErr ctxt cts
= do { (ctxt, binds_msg, ct1) <- relevantBindings True ctxt ct1
; let orig = ctOrigin ct1
msg = couldNotDeduce (getUserGivens ctxt) (map ctPred cts, orig)
; mkErrorMsgFromCt ctxt ct1 (msg $$ binds_msg) }
where
(ct1:_) = cts
----------------
mkHoleError :: ReportErrCtxt -> Ct -> TcM ErrMsg
mkHoleError ctxt ct@(CHoleCan { cc_occ = occ, cc_hole = hole_sort })
| isOutOfScopeCt ct -- Out of scope variables, like 'a', where 'a' isn't bound
-- Suggest possible in-scope variables in the message
= do { dflags <- getDynFlags
; rdr_env <- getGlobalRdrEnv
; mkLongErrAt (RealSrcSpan (tcl_loc lcl_env)) out_of_scope_msg
(unknownNameSuggestions dflags rdr_env
(tcl_rdr lcl_env) (mkRdrUnqual occ)) }
| otherwise -- Explicit holes, like "_" or "_f"
= do { (ctxt, binds_doc, ct) <- relevantBindings False ctxt ct
-- The 'False' means "don't filter the bindings"; see Trac #8191
; mkErrorMsgFromCt ctxt ct (hole_msg $$ binds_doc) }
where
ct_loc = ctLoc ct
lcl_env = ctLocEnv ct_loc
hole_ty = ctEvPred (ctEvidence ct)
tyvars = varSetElems (tyVarsOfType hole_ty)
boring_type = isTyVarTy hole_ty
out_of_scope_msg -- Print v :: ty only if the type has structure
| boring_type = hang herald 2 (ppr occ)
| otherwise = hang herald 2 pp_with_type
pp_with_type = hang (pprPrefixOcc occ) 2 (dcolon <+> pprType hole_ty)
herald | isDataOcc occ = ptext (sLit "Data constructor not in scope:")
| otherwise = ptext (sLit "Variable not in scope:")
hole_msg = case hole_sort of
ExprHole -> vcat [ hang (ptext (sLit "Found hole:"))
2 pp_with_type
, tyvars_msg, expr_hole_hint ]
TypeHole -> vcat [ hang (ptext (sLit "Found type wildcard") <+> quotes (ppr occ))
2 (ptext (sLit "standing for") <+> quotes (pprType hole_ty))
, tyvars_msg, type_hole_hint ]
tyvars_msg = ppUnless (null tyvars) $
ptext (sLit "Where:") <+> vcat (map loc_msg tyvars)
type_hole_hint
| HoleError <- cec_type_holes ctxt
= ptext (sLit "To use the inferred type, enable PartialTypeSignatures")
| otherwise
= empty
expr_hole_hint -- Give hint for, say, f x = _x
| lengthFS (occNameFS occ) > 1 -- Don't give this hint for plain "_"
= ptext (sLit "Or perhaps") <+> quotes (ppr occ)
<+> ptext (sLit "is mis-spelled, or not in scope")
| otherwise
= empty
loc_msg tv
= case tcTyVarDetails tv of
SkolemTv {} -> quotes (ppr tv) <+> skol_msg
MetaTv {} -> quotes (ppr tv) <+> ptext (sLit "is an ambiguous type variable")
det -> pprTcTyVarDetails det
where
skol_msg = pprSkol (getSkolemInfo (cec_encl ctxt) tv) (getSrcLoc tv)
mkHoleError _ ct = pprPanic "mkHoleError" (ppr ct)
----------------
mkIPErr :: ReportErrCtxt -> [Ct] -> TcM ErrMsg
mkIPErr ctxt cts
= do { (ctxt, bind_msg, ct1) <- relevantBindings True ctxt ct1
; let orig = ctOrigin ct1
preds = map ctPred cts
givens = getUserGivens ctxt
msg | null givens
= addArising orig $
sep [ ptext (sLit "Unbound implicit parameter") <> plural cts
, nest 2 (pprTheta preds) ]
| otherwise
= couldNotDeduce givens (preds, orig)
; mkErrorMsgFromCt ctxt ct1 (msg $$ bind_msg) }
where
(ct1:_) = cts
{-
************************************************************************
* *
Equality errors
* *
************************************************************************
Note [Inaccessible code]
~~~~~~~~~~~~~~~~~~~~~~~~
Consider
data T a where
T1 :: T a
T2 :: T Bool
f :: (a ~ Int) => T a -> Int
f T1 = 3
f T2 = 4 -- Unreachable code
Here the second equation is unreachable. The original constraint
(a~Int) from the signature gets rewritten by the pattern-match to
(Bool~Int), so the danger is that we report the error as coming from
the *signature* (Trac #7293). So, for Given errors we replace the
env (and hence src-loc) on its CtLoc with that from the immediately
enclosing implication.
-}
mkEqErr :: ReportErrCtxt -> [Ct] -> TcM ErrMsg
-- Don't have multiple equality errors from the same location
-- E.g. (Int,Bool) ~ (Bool,Int) one error will do!
mkEqErr ctxt (ct:_) = mkEqErr1 ctxt ct
mkEqErr _ [] = panic "mkEqErr"
mkEqErr1 :: ReportErrCtxt -> Ct -> TcM ErrMsg
-- Wanted constraints only!
mkEqErr1 ctxt ct
| isGivenCt ct
= do { (ctxt, binds_msg, ct) <- relevantBindings True ctxt ct
; let (given_loc, given_msg) = mk_given (ctLoc ct) (cec_encl ctxt)
; dflags <- getDynFlags
; mkEqErr_help dflags ctxt (given_msg $$ binds_msg)
(setCtLoc ct given_loc) -- Note [Inaccessible code]
Nothing ty1 ty2 }
| otherwise -- Wanted or derived
= do { (ctxt, binds_msg, ct) <- relevantBindings True ctxt ct
; rdr_env <- getGlobalRdrEnv
; fam_envs <- tcGetFamInstEnvs
; exp_syns <- goptM Opt_PrintExpandedSynonyms
; let (is_oriented, wanted_msg) = mk_wanted_extra (ctOrigin ct) exp_syns
coercible_msg = case ctEqRel ct of
NomEq -> empty
ReprEq -> mkCoercibleExplanation rdr_env fam_envs ty1 ty2
; dflags <- getDynFlags
; traceTc "mkEqErr1" (ppr ct $$ pprCtOrigin (ctOrigin ct))
; mkEqErr_help dflags ctxt (wanted_msg $$ coercible_msg $$ binds_msg)
ct is_oriented ty1 ty2 }
where
(ty1, ty2) = getEqPredTys (ctPred ct)
mk_given :: CtLoc -> [Implication] -> (CtLoc, SDoc)
-- For given constraints we overwrite the env (and hence src-loc)
-- with one from the implication. See Note [Inaccessible code]
mk_given loc [] = (loc, empty)
mk_given loc (implic : _) = (setCtLocEnv loc (ic_env implic)
, hang (ptext (sLit "Inaccessible code in"))
2 (ppr (ic_info implic)))
-- If the types in the error message are the same as the types
-- we are unifying, don't add the extra expected/actual message
mk_wanted_extra :: CtOrigin -> Bool -> (Maybe SwapFlag, SDoc)
mk_wanted_extra orig@(TypeEqOrigin {}) expandSyns
= mkExpectedActualMsg ty1 ty2 orig expandSyns
mk_wanted_extra (KindEqOrigin cty1 cty2 sub_o) expandSyns
= (Nothing, msg1 $$ msg2)
where
msg1 = hang (ptext (sLit "When matching types"))
2 (vcat [ ppr cty1 <+> dcolon <+> ppr (typeKind cty1)
, ppr cty2 <+> dcolon <+> ppr (typeKind cty2) ])
msg2 = case sub_o of
TypeEqOrigin {} ->
snd (mkExpectedActualMsg cty1 cty2 sub_o expandSyns)
_ ->
empty
mk_wanted_extra _ _ = (Nothing, empty)
-- | This function tries to reconstruct why a "Coercible ty1 ty2" constraint
-- is left over.
mkCoercibleExplanation :: GlobalRdrEnv -> FamInstEnvs
-> TcType -> TcType -> SDoc
mkCoercibleExplanation rdr_env fam_envs ty1 ty2
| Just (tc, tys) <- tcSplitTyConApp_maybe ty1
, (rep_tc, _, _) <- tcLookupDataFamInst fam_envs tc tys
, Just msg <- coercible_msg_for_tycon rep_tc
= msg
| Just (tc, tys) <- splitTyConApp_maybe ty2
, (rep_tc, _, _) <- tcLookupDataFamInst fam_envs tc tys
, Just msg <- coercible_msg_for_tycon rep_tc
= msg
| Just (s1, _) <- tcSplitAppTy_maybe ty1
, Just (s2, _) <- tcSplitAppTy_maybe ty2
, s1 `eqType` s2
, has_unknown_roles s1
= hang (text "NB: We cannot know what roles the parameters to" <+>
quotes (ppr s1) <+> text "have;")
2 (text "we must assume that the role is nominal")
| otherwise
= empty
where
coercible_msg_for_tycon tc
| isAbstractTyCon tc
= Just $ hsep [ text "NB: The type constructor"
, quotes (pprSourceTyCon tc)
, text "is abstract" ]
| isNewTyCon tc
, [data_con] <- tyConDataCons tc
, let dc_name = dataConName data_con
, null (lookupGRE_Name rdr_env dc_name)
= Just $ hang (text "The data constructor" <+> quotes (ppr dc_name))
2 (sep [ text "of newtype" <+> quotes (pprSourceTyCon tc)
, text "is not in scope" ])
| otherwise = Nothing
has_unknown_roles ty
| Just (tc, tys) <- tcSplitTyConApp_maybe ty
= length tys >= tyConArity tc -- oversaturated tycon
| Just (s, _) <- tcSplitAppTy_maybe ty
= has_unknown_roles s
| isTyVarTy ty
= True
| otherwise
= False
{-
-- | Make a listing of role signatures for all the parameterised tycons
-- used in the provided types
-- SLPJ Jun 15: I could not convince myself that these hints were really
-- useful. Maybe they are, but I think we need more work to make them
-- actually helpful.
mkRoleSigs :: Type -> Type -> SDoc
mkRoleSigs ty1 ty2
= ppUnless (null role_sigs) $
hang (text "Relevant role signatures:")
2 (vcat role_sigs)
where
tcs = nameEnvElts $ tyConsOfType ty1 `plusNameEnv` tyConsOfType ty2
role_sigs = mapMaybe ppr_role_sig tcs
ppr_role_sig tc
| null roles -- if there are no parameters, don't bother printing
= Nothing
| otherwise
= Just $ hsep $ [text "type role", ppr tc] ++ map ppr roles
where
roles = tyConRoles tc
-}
mkEqErr_help :: DynFlags -> ReportErrCtxt -> SDoc
-> Ct
-> Maybe SwapFlag -- Nothing <=> not sure
-> TcType -> TcType -> TcM ErrMsg
mkEqErr_help dflags ctxt extra ct oriented ty1 ty2
| Just tv1 <- tcGetTyVar_maybe ty1 = mkTyVarEqErr dflags ctxt extra ct oriented tv1 ty2
| Just tv2 <- tcGetTyVar_maybe ty2 = mkTyVarEqErr dflags ctxt extra ct swapped tv2 ty1
| otherwise = reportEqErr ctxt extra ct oriented ty1 ty2
where
swapped = fmap flipSwap oriented
reportEqErr :: ReportErrCtxt -> SDoc
-> Ct
-> Maybe SwapFlag -- Nothing <=> not sure
-> TcType -> TcType -> TcM ErrMsg
reportEqErr ctxt extra1 ct oriented ty1 ty2
= do { let extra2 = mkEqInfoMsg ct ty1 ty2
; mkErrorMsgFromCt ctxt ct (vcat [ misMatchOrCND ctxt ct oriented ty1 ty2
, extra2, extra1]) }
mkTyVarEqErr :: DynFlags -> ReportErrCtxt -> SDoc -> Ct
-> Maybe SwapFlag -> TcTyVar -> TcType -> TcM ErrMsg
-- tv1 and ty2 are already tidied
mkTyVarEqErr dflags ctxt extra ct oriented tv1 ty2
| isUserSkolem ctxt tv1 -- ty2 won't be a meta-tyvar, or else the thing would
-- be oriented the other way round;
-- see TcCanonical.canEqTyVarTyVar
|| isSigTyVar tv1 && not (isTyVarTy ty2)
|| ctEqRel ct == ReprEq -- the cases below don't really apply to ReprEq
= mkErrorMsgFromCt ctxt ct (vcat [ misMatchOrCND ctxt ct oriented ty1 ty2
, extraTyVarInfo ctxt tv1 ty2
, extra ])
-- So tv is a meta tyvar (or started that way before we
-- generalised it). So presumably it is an *untouchable*
-- meta tyvar or a SigTv, else it'd have been unified
| not (k2 `tcIsSubKind` k1) -- Kind error
= mkErrorMsgFromCt ctxt ct $ (kindErrorMsg (mkTyVarTy tv1) ty2 $$ extra)
| OC_Occurs <- occ_check_expand
, NomEq <- ctEqRel ct -- reporting occurs check for Coercible is strange
= do { let occCheckMsg = addArising (ctOrigin ct) $
hang (text "Occurs check: cannot construct the infinite type:")
2 (sep [ppr ty1, char '~', ppr ty2])
extra2 = mkEqInfoMsg ct ty1 ty2
; mkErrorMsgFromCt ctxt ct (occCheckMsg $$ extra2 $$ extra) }
| OC_Forall <- occ_check_expand
= do { let msg = vcat [ ptext (sLit "Cannot instantiate unification variable")
<+> quotes (ppr tv1)
, hang (ptext (sLit "with a type involving foralls:")) 2 (ppr ty2)
, nest 2 (ptext (sLit "GHC doesn't yet support impredicative polymorphism")) ]
; mkErrorMsgFromCt ctxt ct msg }
-- If the immediately-enclosing implication has 'tv' a skolem, and
-- we know by now its an InferSkol kind of skolem, then presumably
-- it started life as a SigTv, else it'd have been unified, given
-- that there's no occurs-check or forall problem
| (implic:_) <- cec_encl ctxt
, Implic { ic_skols = skols } <- implic
, tv1 `elem` skols
= mkErrorMsgFromCt ctxt ct (vcat [ misMatchMsg ct oriented ty1 ty2
, extraTyVarInfo ctxt tv1 ty2
, extra ])
-- Check for skolem escape
| (implic:_) <- cec_encl ctxt -- Get the innermost context
, Implic { ic_env = env, ic_skols = skols, ic_info = skol_info } <- implic
, let esc_skols = filter (`elemVarSet` (tyVarsOfType ty2)) skols
, not (null esc_skols)
= do { let msg = misMatchMsg ct oriented ty1 ty2
esc_doc = sep [ ptext (sLit "because type variable") <> plural esc_skols
<+> pprQuotedList esc_skols
, ptext (sLit "would escape") <+>
if isSingleton esc_skols then ptext (sLit "its scope")
else ptext (sLit "their scope") ]
tv_extra = vcat [ nest 2 $ esc_doc
, sep [ (if isSingleton esc_skols
then ptext (sLit "This (rigid, skolem) type variable is")
else ptext (sLit "These (rigid, skolem) type variables are"))
<+> ptext (sLit "bound by")
, nest 2 $ ppr skol_info
, nest 2 $ ptext (sLit "at") <+> ppr (tcl_loc env) ] ]
; mkErrorMsgFromCt ctxt ct (msg $$ tv_extra $$ extra) }
-- Nastiest case: attempt to unify an untouchable variable
| (implic:_) <- cec_encl ctxt -- Get the innermost context
, Implic { ic_env = env, ic_given = given, ic_info = skol_info } <- implic
= do { let msg = misMatchMsg ct oriented ty1 ty2
tclvl_extra
= nest 2 $
sep [ quotes (ppr tv1) <+> ptext (sLit "is untouchable")
, nest 2 $ ptext (sLit "inside the constraints:") <+> pprEvVarTheta given
, nest 2 $ ptext (sLit "bound by") <+> ppr skol_info
, nest 2 $ ptext (sLit "at") <+> ppr (tcl_loc env) ]
tv_extra = extraTyVarInfo ctxt tv1 ty2
add_sig = suggestAddSig ctxt ty1 ty2
; mkErrorMsgFromCt ctxt ct (vcat [msg, tclvl_extra, tv_extra, add_sig, extra]) }
| otherwise
= reportEqErr ctxt extra ct oriented (mkTyVarTy tv1) ty2
-- This *can* happen (Trac #6123, and test T2627b)
-- Consider an ambiguous top-level constraint (a ~ F a)
-- Not an occurs check, because F is a type function.
where
occ_check_expand = occurCheckExpand dflags tv1 ty2
k1 = tyVarKind tv1
k2 = typeKind ty2
ty1 = mkTyVarTy tv1
mkEqInfoMsg :: Ct -> TcType -> TcType -> SDoc
-- Report (a) ambiguity if either side is a type function application
-- e.g. F a0 ~ Int
-- (b) warning about injectivity if both sides are the same
-- type function application F a ~ F b
-- See Note [Non-injective type functions]
mkEqInfoMsg ct ty1 ty2
= tyfun_msg $$ ambig_msg
where
mb_fun1 = isTyFun_maybe ty1
mb_fun2 = isTyFun_maybe ty2
ambig_msg | isJust mb_fun1 || isJust mb_fun2
= snd (mkAmbigMsg ct)
| otherwise = empty
tyfun_msg | Just tc1 <- mb_fun1
, Just tc2 <- mb_fun2
, tc1 == tc2
= ptext (sLit "NB:") <+> quotes (ppr tc1)
<+> ptext (sLit "is a type function, and may not be injective")
| otherwise = empty
isUserSkolem :: ReportErrCtxt -> TcTyVar -> Bool
-- See Note [Reporting occurs-check errors]
isUserSkolem ctxt tv
= isSkolemTyVar tv && any is_user_skol_tv (cec_encl ctxt)
where
is_user_skol_tv (Implic { ic_skols = sks, ic_info = skol_info })
= tv `elem` sks && is_user_skol_info skol_info
is_user_skol_info (InferSkol {}) = False
is_user_skol_info _ = True
misMatchOrCND :: ReportErrCtxt -> Ct -> Maybe SwapFlag -> TcType -> TcType -> SDoc
-- If oriented then ty1 is actual, ty2 is expected
misMatchOrCND ctxt ct oriented ty1 ty2
| null givens ||
(isRigidTy ty1 && isRigidTy ty2) ||
isGivenCt ct
-- If the equality is unconditionally insoluble
-- or there is no context, don't report the context
= misMatchMsg ct oriented ty1 ty2
| otherwise
= couldNotDeduce givens ([eq_pred], orig)
where
ev = ctEvidence ct
eq_pred = ctEvPred ev
orig = ctEvOrigin ev
givens = [ given | given@(_, _, no_eqs, _) <- getUserGivens ctxt, not no_eqs]
-- Keep only UserGivens that have some equalities
couldNotDeduce :: [UserGiven] -> (ThetaType, CtOrigin) -> SDoc
couldNotDeduce givens (wanteds, orig)
= vcat [ addArising orig (ptext (sLit "Could not deduce:") <+> pprTheta wanteds)
, vcat (pp_givens givens)]
pp_givens :: [UserGiven] -> [SDoc]
pp_givens givens
= case givens of
[] -> []
(g:gs) -> ppr_given (ptext (sLit "from the context:")) g
: map (ppr_given (ptext (sLit "or from:"))) gs
where
ppr_given herald (gs, skol_info, _, loc)
= hang (herald <+> pprEvVarTheta gs)
2 (sep [ ptext (sLit "bound by") <+> ppr skol_info
, ptext (sLit "at") <+> ppr loc])
extraTyVarInfo :: ReportErrCtxt -> TcTyVar -> TcType -> SDoc
-- Add on extra info about skolem constants
-- NB: The types themselves are already tidied
extraTyVarInfo ctxt tv1 ty2
= tv_extra tv1 $$ ty_extra ty2
where
implics = cec_encl ctxt
ty_extra ty = case tcGetTyVar_maybe ty of
Just tv -> tv_extra tv
Nothing -> empty
tv_extra tv | isTcTyVar tv, isSkolemTyVar tv
, let pp_tv = quotes (ppr tv)
= case tcTyVarDetails tv of
SkolemTv {} -> pp_tv <+> pprSkol (getSkolemInfo implics tv) (getSrcLoc tv)
FlatSkol {} -> pp_tv <+> ptext (sLit "is a flattening type variable")
RuntimeUnk {} -> pp_tv <+> ptext (sLit "is an interactive-debugger skolem")
MetaTv {} -> empty
| otherwise -- Normal case
= empty
suggestAddSig :: ReportErrCtxt -> TcType -> TcType -> SDoc
-- See Note [Suggest adding a type signature]
suggestAddSig ctxt ty1 ty2
| null inferred_bndrs
= empty
| [bndr] <- inferred_bndrs
= ptext (sLit "Possible fix: add a type signature for") <+> quotes (ppr bndr)
| otherwise
= ptext (sLit "Possible fix: add type signatures for some or all of") <+> (ppr inferred_bndrs)
where
inferred_bndrs = nub (get_inf ty1 ++ get_inf ty2)
get_inf ty | Just tv <- tcGetTyVar_maybe ty
, isTcTyVar tv, isSkolemTyVar tv
, InferSkol prs <- getSkolemInfo (cec_encl ctxt) tv
= map fst prs
| otherwise
= []
kindErrorMsg :: TcType -> TcType -> SDoc -- Types are already tidy
kindErrorMsg ty1 ty2
= vcat [ ptext (sLit "Kind incompatibility when matching types:")
, nest 2 (vcat [ ppr ty1 <+> dcolon <+> ppr k1
, ppr ty2 <+> dcolon <+> ppr k2 ]) ]
where
k1 = typeKind ty1
k2 = typeKind ty2
--------------------
misMatchMsg :: Ct -> Maybe SwapFlag -> TcType -> TcType -> SDoc
-- Types are already tidy
-- If oriented then ty1 is actual, ty2 is expected
misMatchMsg ct oriented ty1 ty2
| Just NotSwapped <- oriented
= misMatchMsg ct (Just IsSwapped) ty2 ty1
| otherwise -- So now we have Nothing or (Just IsSwapped)
-- For some reason we treat Nothign like IsSwapped
= addArising orig $
sep [ text herald1 <+> quotes (ppr ty1)
, nest padding $
text herald2 <+> quotes (ppr ty2)
, sameOccExtra ty2 ty1 ]
where
herald1 = conc [ "Couldn't match"
, if is_repr then "representation of" else ""
, if is_oriented then "expected" else ""
, what ]
herald2 = conc [ "with"
, if is_repr then "that of" else ""
, if is_oriented then ("actual " ++ what) else "" ]
padding = length herald1 - length herald2
is_repr = case ctEqRel ct of { ReprEq -> True; NomEq -> False }
is_oriented = isJust oriented
orig = ctOrigin ct
what | isKind ty1 = "kind"
| otherwise = "type"
conc :: [String] -> String
conc = foldr1 add_space
add_space :: String -> String -> String
add_space s1 s2 | null s1 = s2
| null s2 = s1
| otherwise = s1 ++ (' ' : s2)
mkExpectedActualMsg :: Type -> Type -> CtOrigin -> Bool
-> (Maybe SwapFlag, SDoc)
-- NotSwapped means (actual, expected), IsSwapped is the reverse
mkExpectedActualMsg ty1 ty2
(TypeEqOrigin { uo_actual = act, uo_expected = exp }) printExpanded
| act `pickyEqType` ty1, exp `pickyEqType` ty2 = (Just NotSwapped, empty)
| exp `pickyEqType` ty1, act `pickyEqType` ty2 = (Just IsSwapped, empty)
| otherwise = (Nothing, msg)
where
msg = vcat
[ text "Expected type:" <+> ppr exp
, text " Actual type:" <+> ppr act
, if printExpanded then expandedTys else empty
]
expandedTys =
ppUnless (expTy1 `pickyEqType` exp && expTy2 `pickyEqType` act) $ vcat
[ text "Type synonyms expanded:"
, text "Expected type:" <+> ppr expTy1
, text " Actual type:" <+> ppr expTy2
]
(expTy1, expTy2) = expandSynonymsToMatch exp act
mkExpectedActualMsg _ _ _ _ = panic "mkExpectedAcutalMsg"
pickyEqType :: TcType -> TcType -> Bool
-- ^ Check when two types _look_ the same, _including_ synonyms.
-- So (pickyEqType String [Char]) returns False
pickyEqType ty1 ty2
= go init_env ty1 ty2
where
init_env =
mkRnEnv2 (mkInScopeSet (tyVarsOfType ty1 `unionVarSet` tyVarsOfType ty2))
go env (TyVarTy tv1) (TyVarTy tv2) =
rnOccL env tv1 == rnOccR env tv2
go _ (LitTy lit1) (LitTy lit2) =
lit1 == lit2
go env (ForAllTy tv1 t1) (ForAllTy tv2 t2) =
go env (tyVarKind tv1) (tyVarKind tv2) && go (rnBndr2 env tv1 tv2) t1 t2
go env (AppTy s1 t1) (AppTy s2 t2) =
go env s1 s2 && go env t1 t2
go env (FunTy s1 t1) (FunTy s2 t2) =
go env s1 s2 && go env t1 t2
go env (TyConApp tc1 ts1) (TyConApp tc2 ts2) =
(tc1 == tc2) && gos env ts1 ts2
go _ _ _ =
False
gos _ [] [] = True
gos env (t1:ts1) (t2:ts2) = go env t1 t2 && gos env ts1 ts2
gos _ _ _ = False
{-
Note [Expanding type synonyms to make types similar]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In type error messages, if -fprint-expanded-types is used, we want to expand
type synonyms to make expected and found types as similar as possible, but we
shouldn't expand types too much to make type messages even more verbose and
harder to understand. The whole point here is to make the difference in expected
and found types clearer.
`expandSynonymsToMatch` does this, it takes two types, and expands type synonyms
only as much as necessary. It should work like this:
Given two types t1 and t2:
* If they're already same, it shouldn't expand any type synonyms and
just return.
* If they're in form `C1 t1_1 .. t1_n` and `C2 t2_1 .. t2_m` (C1 and C2 are
type constructors), it should expand C1 and C2 if they're different type
synonyms. Then it should continue doing same thing on expanded types. If C1
and C2 are same, then we should apply same procedure to arguments of C1
and argument of C2 to make them as similar as possible.
Most important thing here is to keep number of synonym expansions at
minimum. For example, if t1 is `T (T3, T5, Int)` and t2 is
`T (T5, T3, Bool)` where T5 = T4, T4 = T3, ..., T1 = X, we should return
`T (T3, T3, Int)` and `T (T3, T3, Bool)`.
In the implementation, we just search in all possible solutions for a solution
that does minimum amount of expansions. This leads to a complex algorithm: If
we have two synonyms like X_m = X_{m-1} = .. X and Y_n = Y_{n-1} = .. Y, where
X and Y are rigid types, we expand m * n times. But in practice it's not a
problem because deeply nested synonyms with no intervening rigid type
constructors are vanishingly rare.
-}
-- | Expand type synonyms in given types only enough to make them as equal as
-- possible. Returned types are the same in terms of used type synonyms.
--
-- To expand all synonyms, see 'Type.expandTypeSynonyms'.
expandSynonymsToMatch :: Type -> Type -> (Type, Type)
expandSynonymsToMatch ty1 ty2 = (ty1_ret, ty2_ret)
where
(_, ty1_ret, ty2_ret) = go 0 ty1 ty2
-- | Returns (number of synonym expansions done to make types similar,
-- type synonym expanded version of first type,
-- type synonym expanded version of second type)
--
-- Int argument is number of synonym expansions done so far.
go :: Int -> Type -> Type -> (Int, Type, Type)
go exps t1 t2
| t1 `pickyEqType` t2 =
-- Types are same, nothing to do
(exps, t1, t2)
go exps t1@(TyConApp tc1 tys1) t2@(TyConApp tc2 tys2)
| tc1 == tc2 =
-- Type constructors are same. They may be synonyms, but we don't
-- expand further.
let (exps', tys1', tys2') = unzip3 $ zipWith (go 0) tys1 tys2
in (exps + sum exps', TyConApp tc1 tys1', TyConApp tc2 tys2')
| otherwise =
-- Try to expand type constructors
case (tcView t1, tcView t2) of
-- When only one of the constructors is a synonym, we just
-- expand it and continue search
(Just t1', Nothing) ->
go (exps + 1) t1' t2
(Nothing, Just t2') ->
go (exps + 1) t1 t2'
(Just t1', Just t2') ->
-- Both constructors are synonyms, but they may be synonyms of
-- each other. We just search for minimally expanded solution.
-- See Note [Expanding type synonyms to make types similar].
let sol1@(exp1, _, _) = go (exps + 1) t1' t2
sol2@(exp2, _, _) = go (exps + 1) t1 t2'
in if exp1 < exp2 then sol1 else sol2
(Nothing, Nothing) ->
-- None of the constructors are synonyms, nothing to do
(exps, t1, t2)
go exps t1@TyConApp{} t2
| Just t1' <- tcView t1 = go (exps + 1) t1' t2
| otherwise = (exps, t1, t2)
go exps t1 t2@TyConApp{}
| Just t2' <- tcView t2 = go (exps + 1) t1 t2'
| otherwise = (exps, t1, t2)
go exps (AppTy t1_1 t1_2) (AppTy t2_1 t2_2) =
let (exps1, t1_1', t2_1') = go 0 t1_1 t2_1
(exps2, t1_2', t2_2') = go 0 t1_2 t2_2
in (exps + exps1 + exps2, mkAppTy t1_1' t1_2', mkAppTy t2_1' t2_2')
go exps (FunTy t1_1 t1_2) (FunTy t2_1 t2_2) =
let (exps1, t1_1', t2_1') = go 0 t1_1 t2_1
(exps2, t1_2', t2_2') = go 0 t1_2 t2_2
in (exps + exps1 + exps2, FunTy t1_1' t1_2', FunTy t2_1' t2_2')
go exps (ForAllTy tv1 t1) (ForAllTy tv2 t2) =
-- NOTE: We may have a bug here, but we just can't reproduce it easily.
-- See D1016 comments for details and our attempts at producing a test
-- case.
let (exps1, t1', t2') = go exps t1 t2
in (exps1, ForAllTy tv1 t1', ForAllTy tv2 t2')
go exps t1 t2 = (exps, t1, t2)
sameOccExtra :: TcType -> TcType -> SDoc
-- See Note [Disambiguating (X ~ X) errors]
sameOccExtra ty1 ty2
| Just (tc1, _) <- tcSplitTyConApp_maybe ty1
, Just (tc2, _) <- tcSplitTyConApp_maybe ty2
, let n1 = tyConName tc1
n2 = tyConName tc2
same_occ = nameOccName n1 == nameOccName n2
same_pkg = modulePackageKey (nameModule n1) == modulePackageKey (nameModule n2)
, n1 /= n2 -- Different Names
, same_occ -- but same OccName
= ptext (sLit "NB:") <+> (ppr_from same_pkg n1 $$ ppr_from same_pkg n2)
| otherwise
= empty
where
ppr_from same_pkg nm
| isGoodSrcSpan loc
= hang (quotes (ppr nm) <+> ptext (sLit "is defined at"))
2 (ppr loc)
| otherwise -- Imported things have an UnhelpfulSrcSpan
= hang (quotes (ppr nm))
2 (sep [ ptext (sLit "is defined in") <+> quotes (ppr (moduleName mod))
, ppUnless (same_pkg || pkg == mainPackageKey) $
nest 4 $ ptext (sLit "in package") <+> quotes (ppr pkg) ])
where
pkg = modulePackageKey mod
mod = nameModule nm
loc = nameSrcSpan nm
{-
Note [Suggest adding a type signature]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The OutsideIn algorithm rejects GADT programs that don't have a principal
type, and indeed some that do. Example:
data T a where
MkT :: Int -> T Int
f (MkT n) = n
Does this have type f :: T a -> a, or f :: T a -> Int?
The error that shows up tends to be an attempt to unify an
untouchable type variable. So suggestAddSig sees if the offending
type variable is bound by an *inferred* signature, and suggests
adding a declared signature instead.
This initially came up in Trac #8968, concerning pattern synonyms.
Note [Disambiguating (X ~ X) errors]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
See Trac #8278
Note [Reporting occurs-check errors]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Given (a ~ [a]), if 'a' is a rigid type variable bound by a user-supplied
type signature, then the best thing is to report that we can't unify
a with [a], because a is a skolem variable. That avoids the confusing
"occur-check" error message.
But nowadays when inferring the type of a function with no type signature,
even if there are errors inside, we still generalise its signature and
carry on. For example
f x = x:x
Here we will infer somethiing like
f :: forall a. a -> [a]
with a suspended error of (a ~ [a]). So 'a' is now a skolem, but not
one bound by the programmer! Here we really should report an occurs check.
So isUserSkolem distinguishes the two.
Note [Non-injective type functions]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
It's very confusing to get a message like
Couldn't match expected type `Depend s'
against inferred type `Depend s1'
so mkTyFunInfoMsg adds:
NB: `Depend' is type function, and hence may not be injective
Warn of loopy local equalities that were dropped.
************************************************************************
* *
Type-class errors
* *
************************************************************************
-}
mkDictErr :: ReportErrCtxt -> [Ct] -> TcM ErrMsg
mkDictErr ctxt cts
= ASSERT( not (null cts) )
do { inst_envs <- tcGetInstEnvs
; let (ct1:_) = cts -- ct1 just for its location
min_cts = elim_superclasses cts
; lookups <- mapM (lookup_cls_inst inst_envs) min_cts
; let (no_inst_cts, overlap_cts) = partition is_no_inst lookups
-- Report definite no-instance errors,
-- or (iff there are none) overlap errors
-- But we report only one of them (hence 'head') because they all
-- have the same source-location origin, to try avoid a cascade
-- of error from one location
; (ctxt, err) <- mk_dict_err ctxt (head (no_inst_cts ++ overlap_cts))
; mkErrorMsgFromCt ctxt ct1 err }
where
no_givens = null (getUserGivens ctxt)
is_no_inst (ct, (matches, unifiers, _))
= no_givens
&& null matches
&& (null unifiers || all (not . isAmbiguousTyVar) (varSetElems (tyVarsOfCt ct)))
lookup_cls_inst inst_envs ct
= do { tys_flat <- mapM quickFlattenTy tys
-- Note [Flattening in error message generation]
; return (ct, lookupInstEnv True inst_envs clas tys_flat) }
where
(clas, tys) = getClassPredTys (ctPred ct)
-- When simplifying [W] Ord (Set a), we need
-- [W] Eq a, [W] Ord a
-- but we really only want to report the latter
elim_superclasses cts
= filter (\ct -> any (eqPred (ctPred ct)) min_preds) cts
where
min_preds = mkMinimalBySCs (map ctPred cts)
mk_dict_err :: ReportErrCtxt -> (Ct, ClsInstLookupResult)
-> TcM (ReportErrCtxt, SDoc)
-- Report an overlap error if this class constraint results
-- from an overlap (returning Left clas), otherwise return (Right pred)
mk_dict_err ctxt (ct, (matches, unifiers, unsafe_overlapped))
| null matches -- No matches but perhaps several unifiers
= do { (ctxt, binds_msg, ct) <- relevantBindings True ctxt ct
; return (ctxt, cannot_resolve_msg ct binds_msg) }
| null unsafe_overlapped -- Some matches => overlap errors
= return (ctxt, overlap_msg)
| otherwise
= return (ctxt, safe_haskell_msg)
where
orig = ctOrigin ct
pred = ctPred ct
(clas, tys) = getClassPredTys pred
ispecs = [ispec | (ispec, _) <- matches]
unsafe_ispecs = [ispec | (ispec, _) <- unsafe_overlapped]
givens = getUserGivens ctxt
all_tyvars = all isTyVarTy tys
cannot_resolve_msg :: Ct -> SDoc -> SDoc
cannot_resolve_msg ct binds_msg
= vcat [ addArising orig no_inst_msg
, nest 2 extra_note
, vcat (pp_givens givens)
, ppWhen (has_ambig_tvs && not (null unifiers && null givens))
(vcat [ ambig_msg, binds_msg, potential_msg ])
, show_fixes (add_to_ctxt_fixes has_ambig_tvs ++ drv_fixes) ]
where
(has_ambig_tvs, ambig_msg) = mkAmbigMsg ct
orig = ctOrigin ct
potential_msg
= ppWhen (not (null unifiers) && want_potential orig) $
hang (if isSingleton unifiers
then ptext (sLit "Note: there is a potential instance available:")
else ptext (sLit "Note: there are several potential instances:"))
2 (ppr_insts (sortBy fuzzyClsInstCmp unifiers))
-- Report "potential instances" only when the constraint arises
-- directly from the user's use of an overloaded function
want_potential (TypeEqOrigin {}) = False
want_potential _ = True
add_to_ctxt_fixes has_ambig_tvs
| not has_ambig_tvs && all_tyvars
, (orig:origs) <- usefulContext ctxt pred
= [sep [ ptext (sLit "add") <+> pprParendType pred
<+> ptext (sLit "to the context of")
, nest 2 $ ppr_skol orig $$
vcat [ ptext (sLit "or") <+> ppr_skol orig
| orig <- origs ] ] ]
| otherwise = []
ppr_skol (PatSkol dc _) = ptext (sLit "the data constructor") <+> quotes (ppr dc)
ppr_skol skol_info = ppr skol_info
no_inst_msg
| null givens && null matches = ptext (sLit "No instance for") <+> pprParendType pred
| otherwise = ptext (sLit "Could not deduce") <+> pprParendType pred
extra_note | any isFunTy (filterOut isKind tys)
= ptext (sLit "(maybe you haven't applied a function to enough arguments?)")
| className clas == typeableClassName -- Avoid mysterious "No instance for (Typeable T)
, [_,ty] <- tys -- Look for (Typeable (k->*) (T k))
, Just (tc,_) <- tcSplitTyConApp_maybe ty
, not (isTypeFamilyTyCon tc)
= hang (ptext (sLit "GHC can't yet do polykinded"))
2 (ptext (sLit "Typeable") <+> parens (ppr ty <+> dcolon <+> ppr (typeKind ty)))
| otherwise
= empty
drv_fixes = case orig of
DerivOrigin -> [drv_fix]
DerivOriginDC {} -> [drv_fix]
DerivOriginCoerce {} -> [drv_fix]
_ -> []
drv_fix = hang (ptext (sLit "use a standalone 'deriving instance' declaration,"))
2 (ptext (sLit "so you can specify the instance context yourself"))
-- Normal overlap error
overlap_msg
= ASSERT( not (null matches) )
vcat [ addArising orig (ptext (sLit "Overlapping instances for")
<+> pprType (mkClassPred clas tys))
, ppUnless (null matching_givens) $
sep [ptext (sLit "Matching givens (or their superclasses):")
, nest 2 (vcat matching_givens)]
, sep [ptext (sLit "Matching instances:"),
nest 2 (vcat [pprInstances ispecs, pprInstances unifiers])]
, ppWhen (null matching_givens && isSingleton matches && null unifiers) $
-- Intuitively, some given matched the wanted in their
-- flattened or rewritten (from given equalities) form
-- but the matcher can't figure that out because the
-- constraints are non-flat and non-rewritten so we
-- simply report back the whole given
-- context. Accelerate Smart.hs showed this problem.
sep [ ptext (sLit "There exists a (perhaps superclass) match:")
, nest 2 (vcat (pp_givens givens))]
, ppWhen (isSingleton matches) $
parens (vcat [ ptext (sLit "The choice depends on the instantiation of") <+>
quotes (pprWithCommas ppr (varSetElems (tyVarsOfTypes tys)))
, ppWhen (null (matching_givens)) $
vcat [ ptext (sLit "To pick the first instance above, use IncoherentInstances")
, ptext (sLit "when compiling the other instance declarations")]
])]
where
givens = getUserGivens ctxt
matching_givens = mapMaybe matchable givens
matchable (evvars,skol_info,_,loc)
= case ev_vars_matching of
[] -> Nothing
_ -> Just $ hang (pprTheta ev_vars_matching)
2 (sep [ ptext (sLit "bound by") <+> ppr skol_info
, ptext (sLit "at") <+> ppr loc])
where ev_vars_matching = filter ev_var_matches (map evVarPred evvars)
ev_var_matches ty = case getClassPredTys_maybe ty of
Just (clas', tys')
| clas' == clas
, Just _ <- tcMatchTys (tyVarsOfTypes tys) tys tys'
-> True
| otherwise
-> any ev_var_matches (immSuperClasses clas' tys')
Nothing -> False
-- Overlap error because of Safe Haskell (first
-- match should be the most specific match)
safe_haskell_msg
= ASSERT( length matches == 1 && not (null unsafe_ispecs) )
vcat [ addArising orig (ptext (sLit "Unsafe overlapping instances for")
<+> pprType (mkClassPred clas tys))
, sep [ptext (sLit "The matching instance is:"),
nest 2 (pprInstance $ head ispecs)]
, vcat [ ptext $ sLit "It is compiled in a Safe module and as such can only"
, ptext $ sLit "overlap instances from the same module, however it"
, ptext $ sLit "overlaps the following instances from different modules:"
, nest 2 (vcat [pprInstances $ unsafe_ispecs])
]
]
usefulContext :: ReportErrCtxt -> TcPredType -> [SkolemInfo]
usefulContext ctxt pred
= go (cec_encl ctxt)
where
pred_tvs = tyVarsOfType pred
go [] = []
go (ic : ics)
| implausible ic = rest
| otherwise = ic_info ic : rest
where
-- Stop when the context binds a variable free in the predicate
rest | any (`elemVarSet` pred_tvs) (ic_skols ic) = []
| otherwise = go ics
implausible ic
| null (ic_skols ic) = True
| implausible_info (ic_info ic) = True
| otherwise = False
implausible_info (SigSkol (InfSigCtxt {}) _) = True
implausible_info _ = False
-- Do not suggest adding constraints to an *inferred* type signature!
show_fixes :: [SDoc] -> SDoc
show_fixes [] = empty
show_fixes (f:fs) = sep [ ptext (sLit "Possible fix:")
, nest 2 (vcat (f : map (ptext (sLit "or") <+>) fs))]
ppr_insts :: [ClsInst] -> SDoc
ppr_insts insts
= pprInstances (take 3 insts) $$ dot_dot_message
where
n_extra = length insts - 3
dot_dot_message
| n_extra <= 0 = empty
| otherwise = ptext (sLit "...plus")
<+> speakNOf n_extra (ptext (sLit "other"))
----------------------
quickFlattenTy :: TcType -> TcM TcType
-- See Note [Flattening in error message generation]
quickFlattenTy ty | Just ty' <- tcView ty = quickFlattenTy ty'
quickFlattenTy ty@(TyVarTy {}) = return ty
quickFlattenTy ty@(ForAllTy {}) = return ty -- See
quickFlattenTy ty@(LitTy {}) = return ty
-- Don't flatten because of the danger or removing a bound variable
quickFlattenTy (AppTy ty1 ty2) = do { fy1 <- quickFlattenTy ty1
; fy2 <- quickFlattenTy ty2
; return (AppTy fy1 fy2) }
quickFlattenTy (FunTy ty1 ty2) = do { fy1 <- quickFlattenTy ty1
; fy2 <- quickFlattenTy ty2
; return (FunTy fy1 fy2) }
quickFlattenTy (TyConApp tc tys)
| not (isTypeFamilyTyCon tc)
= do { fys <- mapM quickFlattenTy tys
; return (TyConApp tc fys) }
| otherwise
= do { let (funtys,resttys) = splitAt (tyConArity tc) tys
-- Ignore the arguments of the type family funtys
; v <- newMetaTyVar TauTv (typeKind (TyConApp tc funtys))
; flat_resttys <- mapM quickFlattenTy resttys
; return (foldl AppTy (mkTyVarTy v) flat_resttys) }
{-
Note [Flattening in error message generation]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider (C (Maybe (F x))), where F is a type function, and we have
instances
C (Maybe Int) and C (Maybe a)
Since (F x) might turn into Int, this is an overlap situation, and
indeed (because of flattening) the main solver will have refrained
from solving. But by the time we get to error message generation, we've
un-flattened the constraint. So we must *re*-flatten it before looking
up in the instance environment, lest we only report one matching
instance when in fact there are two.
Re-flattening is pretty easy, because we don't need to keep track of
evidence. We don't re-use the code in TcCanonical because that's in
the TcS monad, and we are in TcM here.
Note [Quick-flatten polytypes]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If we see C (Ix a => blah) or C (forall a. blah) we simply refrain from
flattening any further. After all, there can be no instance declarations
that match such things. And flattening under a for-all is problematic
anyway; consider C (forall a. F a)
Note [Suggest -fprint-explicit-kinds]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
It can be terribly confusing to get an error message like (Trac #9171)
Couldn't match expected type ‘GetParam Base (GetParam Base Int)’
with actual type ‘GetParam Base (GetParam Base Int)’
The reason may be that the kinds don't match up. Typically you'll get
more useful information, but not when it's as a result of ambiguity.
This test suggests -fprint-explicit-kinds when all the ambiguous type
variables are kind variables.
-}
mkAmbigMsg :: Ct -> (Bool, SDoc)
mkAmbigMsg ct
| null ambig_tkvs = (False, empty)
| otherwise = (True, msg)
where
ambig_tkv_set = filterVarSet isAmbiguousTyVar (tyVarsOfCt ct)
ambig_tkvs = varSetElems ambig_tkv_set
(ambig_kvs, ambig_tvs) = partition isKindVar ambig_tkvs
msg | any isRuntimeUnkSkol ambig_tkvs -- See Note [Runtime skolems]
= vcat [ ptext (sLit "Cannot resolve unknown runtime type") <> plural ambig_tvs
<+> pprQuotedList ambig_tvs
, ptext (sLit "Use :print or :force to determine these types")]
| not (null ambig_tvs)
= pp_ambig (ptext (sLit "type")) ambig_tvs
| otherwise -- All ambiguous kind variabes; suggest -fprint-explicit-kinds
= vcat [ pp_ambig (ptext (sLit "kind")) ambig_kvs
, sdocWithDynFlags suggest_explicit_kinds ]
pp_ambig what tkvs
= ptext (sLit "The") <+> what <+> ptext (sLit "variable") <> plural tkvs
<+> pprQuotedList tkvs <+> is_or_are tkvs <+> ptext (sLit "ambiguous")
is_or_are [_] = text "is"
is_or_are _ = text "are"
suggest_explicit_kinds dflags -- See Note [Suggest -fprint-explicit-kinds]
| gopt Opt_PrintExplicitKinds dflags = empty
| otherwise = ptext (sLit "Use -fprint-explicit-kinds to see the kind arguments")
pprSkol :: SkolemInfo -> SrcLoc -> SDoc
pprSkol UnkSkol _
= ptext (sLit "is an unknown type variable")
pprSkol skol_info tv_loc
= sep [ ptext (sLit "is a rigid type variable bound by"),
sep [ppr skol_info, ptext (sLit "at") <+> ppr tv_loc]]
getSkolemInfo :: [Implication] -> TcTyVar -> SkolemInfo
-- Get the skolem info for a type variable
-- from the implication constraint that binds it
getSkolemInfo [] tv
= pprPanic "No skolem info:" (ppr tv)
getSkolemInfo (implic:implics) tv
| tv `elem` ic_skols implic = ic_info implic
| otherwise = getSkolemInfo implics tv
-----------------------
-- relevantBindings looks at the value environment and finds values whose
-- types mention any of the offending type variables. It has to be
-- careful to zonk the Id's type first, so it has to be in the monad.
-- We must be careful to pass it a zonked type variable, too.
--
-- We always remove closed top-level bindings, though,
-- since they are never relevant (cf Trac #8233)
relevantBindings :: Bool -- True <=> filter by tyvar; False <=> no filtering
-- See Trac #8191
-> ReportErrCtxt -> Ct
-> TcM (ReportErrCtxt, SDoc, Ct)
-- Also returns the zonked and tidied CtOrigin of the constraint
relevantBindings want_filtering ctxt ct
= do { dflags <- getDynFlags
; (env1, tidy_orig) <- zonkTidyOrigin (cec_tidy ctxt) (ctLocOrigin loc)
; let ct_tvs = tyVarsOfCt ct `unionVarSet` extra_tvs
-- For *kind* errors, report the relevant bindings of the
-- enclosing *type* equality, because that's more useful for the programmer
extra_tvs = case tidy_orig of
KindEqOrigin t1 t2 _ -> tyVarsOfTypes [t1,t2]
_ -> emptyVarSet
; traceTc "relevantBindings" $
vcat [ ppr ct
, pprCtOrigin (ctLocOrigin loc)
, ppr ct_tvs
, ppr [id | TcIdBndr id _ <- tcl_bndrs lcl_env] ]
; (tidy_env', docs, discards)
<- go env1 ct_tvs (maxRelevantBinds dflags)
emptyVarSet [] False
(tcl_bndrs lcl_env)
-- tcl_bndrs has the innermost bindings first,
-- which are probably the most relevant ones
; let doc = ppUnless (null docs) $
hang (ptext (sLit "Relevant bindings include"))
2 (vcat docs $$ ppWhen discards discardMsg)
-- Put a zonked, tidied CtOrigin into the Ct
loc' = setCtLocOrigin loc tidy_orig
ct' = setCtLoc ct loc'
ctxt' = ctxt { cec_tidy = tidy_env' }
; return (ctxt', doc, ct') }
where
ev = ctEvidence ct
loc = ctEvLoc ev
lcl_env = ctLocEnv loc
run_out :: Maybe Int -> Bool
run_out Nothing = False
run_out (Just n) = n <= 0
dec_max :: Maybe Int -> Maybe Int
dec_max = fmap (\n -> n - 1)
go :: TidyEnv -> TcTyVarSet -> Maybe Int -> TcTyVarSet -> [SDoc]
-> Bool -- True <=> some filtered out due to lack of fuel
-> [TcIdBinder]
-> TcM (TidyEnv, [SDoc], Bool) -- The bool says if we filtered any out
-- because of lack of fuel
go tidy_env _ _ _ docs discards []
= return (tidy_env, reverse docs, discards)
go tidy_env ct_tvs n_left tvs_seen docs discards (TcIdBndr id top_lvl : tc_bndrs)
= do { (tidy_env', tidy_ty) <- zonkTidyTcType tidy_env (idType id)
; traceTc "relevantBindings 1" (ppr id <+> dcolon <+> ppr tidy_ty)
; let id_tvs = tyVarsOfType tidy_ty
doc = sep [ pprPrefixOcc id <+> dcolon <+> ppr tidy_ty
, nest 2 (parens (ptext (sLit "bound at")
<+> ppr (getSrcLoc id)))]
new_seen = tvs_seen `unionVarSet` id_tvs
; if (want_filtering && not opt_PprStyle_Debug
&& id_tvs `disjointVarSet` ct_tvs)
-- We want to filter out this binding anyway
-- so discard it silently
then go tidy_env ct_tvs n_left tvs_seen docs discards tc_bndrs
else if isTopLevel top_lvl && not (isNothing n_left)
-- It's a top-level binding and we have not specified
-- -fno-max-relevant-bindings, so discard it silently
then go tidy_env ct_tvs n_left tvs_seen docs discards tc_bndrs
else if run_out n_left && id_tvs `subVarSet` tvs_seen
-- We've run out of n_left fuel and this binding only
-- mentions aleady-seen type variables, so discard it
then go tidy_env ct_tvs n_left tvs_seen docs True tc_bndrs
-- Keep this binding, decrement fuel
else go tidy_env' ct_tvs (dec_max n_left) new_seen (doc:docs) discards tc_bndrs }
discardMsg :: SDoc
discardMsg = ptext (sLit "(Some bindings suppressed; use -fmax-relevant-binds=N or -fno-max-relevant-binds)")
-----------------------
warnDefaulting :: [Ct] -> Type -> TcM ()
warnDefaulting wanteds default_ty
= do { warn_default <- woptM Opt_WarnTypeDefaults
; env0 <- tcInitTidyEnv
; let tidy_env = tidyFreeTyVars env0 $
foldr (unionVarSet . tyVarsOfCt) emptyVarSet wanteds
tidy_wanteds = map (tidyCt tidy_env) wanteds
(loc, ppr_wanteds) = pprWithArising tidy_wanteds
warn_msg = hang (ptext (sLit "Defaulting the following constraint(s) to type")
<+> quotes (ppr default_ty))
2 ppr_wanteds
; setCtLocM loc $ warnTc warn_default warn_msg }
{-
Note [Runtime skolems]
~~~~~~~~~~~~~~~~~~~~~~
We want to give a reasonably helpful error message for ambiguity
arising from *runtime* skolems in the debugger. These
are created by in RtClosureInspect.zonkRTTIType.
************************************************************************
* *
Error from the canonicaliser
These ones are called *during* constraint simplification
* *
************************************************************************
-}
solverDepthErrorTcS :: CtLoc -> TcType -> TcM a
solverDepthErrorTcS loc ty
= setCtLocM loc $
do { ty <- zonkTcType ty
; env0 <- tcInitTidyEnv
; let tidy_env = tidyFreeTyVars env0 (tyVarsOfType ty)
tidy_ty = tidyType tidy_env ty
msg
= vcat [ text "Reduction stack overflow; size =" <+> ppr depth
, hang (text "When simplifying the following type:")
2 (ppr tidy_ty)
, note ]
; failWithTcM (tidy_env, msg) }
where
depth = ctLocDepth loc
note = vcat
[ text "Use -freduction-depth=0 to disable this check"
, text "(any upper bound you could choose might fail unpredictably with"
, text " minor updates to GHC, so disabling the check is recommended if"
, text " you're sure that type checking should terminate)" ]
| TomMD/ghc | compiler/typecheck/TcErrors.hs | bsd-3-clause | 81,630 | 1 | 21 | 25,035 | 16,475 | 8,415 | 8,060 | -1 | -1 |
module Tests.Arbitrary where
import Tests.Arbitrary.Xml ()
import Tests.Arbitrary.Xmpp ()
-- $derive makeArbitrary IQRequestType
| Philonous/pontarius-xmpp | tests/Tests/Arbitrary.hs | bsd-3-clause | 132 | 0 | 4 | 16 | 25 | 17 | 8 | 3 | 0 |
-- | This implements a watchdog process. It calls mueval with all the
-- user-specified arguments, sleeps, and then if mueval is still running
-- kills it.
-- Even an out-of-control mueval will have trouble avoiding 'terminateProcess'.
-- Note that it's too difficult to parse the user arguments to get the timeout,
-- so we specify it as a constant which is a little more generous than the default.
module Main where
import Control.Concurrent (forkIO, threadDelay)
import System.Environment (getArgs)
import System.Exit (exitWith, ExitCode(ExitFailure))
import System.Posix.Signals (signalProcess)
import System.Process (getProcessExitCode, runProcess, terminateProcess, waitForProcess)
import System.Process.Internals (withProcessHandle, ProcessHandle__(OpenHandle))
main :: IO ()
main = do args <- getArgs
hdl <- runProcess "mueval-core" args Nothing Nothing Nothing Nothing Nothing
_ <- forkIO $ do
threadDelay (7 * 700000)
status <- getProcessExitCode hdl
case status of
Nothing -> do terminateProcess hdl
_ <- withProcessHandle hdl (\x -> case x of
OpenHandle pid -> signalProcess 9 pid >> return (undefined, undefined)
_ -> return (undefined,undefined))
exitWith (ExitFailure 1)
Just a -> exitWith a
stat <- waitForProcess hdl
exitWith stat
| bitemyapp/mueval | watchdog.hs | bsd-3-clause | 1,633 | 0 | 25 | 559 | 301 | 158 | 143 | 22 | 3 |
-- | Utility functions
module Language.Haskell.Ghcid.Util(
dropPrefixRepeatedly,
chunksOfWord,
outWith, outStrLn, outStr,
allGoodMessage,
getModTime, getModTimeResolution
) where
import Control.Concurrent.Extra
import System.Time.Extra
import System.IO.Unsafe
import System.IO.Extra
import System.FilePath
import Data.List.Extra
import Data.Char
import Data.Time.Clock
import System.IO.Error
import System.Directory
import Control.Exception
import Control.Monad.Extra
import Control.Applicative
import Prelude
-- | Drop a prefix from a list, no matter how many times that prefix is present
dropPrefixRepeatedly :: Eq a => [a] -> [a] -> [a]
dropPrefixRepeatedly [] s = s
dropPrefixRepeatedly pre s = maybe s (dropPrefixRepeatedly pre) $ stripPrefix pre s
{-# NOINLINE lock #-}
lock :: Lock
lock = unsafePerformIO newLock
outWith :: IO a -> IO a
outWith = withLock lock
outStr :: String -> IO ()
outStr = outWith . putStr
outStrLn :: String -> IO ()
outStrLn s = outStr $ s ++ "\n"
-- | The message to show when no errors have been reported
allGoodMessage :: String
allGoodMessage = "All good"
-- | Like chunksOf, but deal with words up to some gap.
-- Flows onto a subsequent line if less than N characters end up being empty.
chunksOfWord :: Int -> Int -> String -> [String]
chunksOfWord mx gap = repeatedly $ \x ->
let (a,b) = splitAt mx x in
if null b then (a, []) else
let (a1,a2) = breakEnd isSpace a in
if length a2 <= gap then (a1, a2 ++ b) else (a, dropWhile isSpace b)
-- | Given a 'FilePath' return either 'Nothing' (file does not exist) or 'Just' (the modification time)
getModTime :: FilePath -> IO (Maybe UTCTime)
getModTime file = handleJust
(\e -> if isDoesNotExistError e then Just () else Nothing)
(\_ -> return Nothing)
(Just <$> getModificationTime file)
-- | Get the smallest difference that can be reported by two modification times
getModTimeResolution :: IO Seconds
getModTimeResolution = return getModTimeResolutionCache
{-# NOINLINE getModTimeResolutionCache #-}
-- Cache the result so only computed once per run
getModTimeResolutionCache :: Seconds
getModTimeResolutionCache = unsafePerformIO $ withTempDir $ \dir -> do
let file = dir </> "calibrate.txt"
-- with 10 measurements can get a bit slow, see Shake issue tracker #451
-- if it rounds to a second then 1st will be a fraction, but 2nd will be full second
mtime <- fmap maximum $ forM [1..3] $ \i -> fmap fst $ duration $ do
writeFile file $ show i
t1 <- getModificationTime file
flip loopM 0 $ \j -> do
writeFile file $ show (i,j)
t2 <- getModificationTime file
return $ if t1 == t2 then Left $ j+1 else Right ()
putStrLn $ "Longest file modification time lag was " ++ show (ceiling (mtime * 1000)) ++ "ms"
-- add a little bit of safety, but if it's really quick, don't make it that much slower
return $ mtime + min 0.1 mtime
| JPMoresmau/ghcid | src/Language/Haskell/Ghcid/Util.hs | bsd-3-clause | 2,978 | 0 | 22 | 631 | 753 | 401 | 352 | 60 | 3 |
{-# LANGUAGE Haskell98 #-}
{-# LINE 1 "src/System/IO/Unsafe/Compat.hs" #-}
{-# LANGUAGE CPP, NoImplicitPrelude #-}
module System.IO.Unsafe.Compat (
module Base
, unsafeFixIO
, unsafeDupablePerformIO
) where
import System.IO.Unsafe as Base
| phischu/fragnix | tests/packages/scotty/System.IO.Unsafe.Compat.hs | bsd-3-clause | 292 | 0 | 4 | 82 | 31 | 23 | 8 | 8 | 0 |
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Simple.Hpc
-- Copyright : Thomas Tuegel 2011
--
-- Maintainer : cabal-devel@haskell.org
-- Portability : portable
--
-- This module provides functions for locating various HPC-related paths and
-- a function for adding the necessary options to a PackageDescription to
-- build test suites with HPC enabled.
{- All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
* Neither the name of Isaac Jones nor the names of other
contributors may be used to endorse or promote products derived
from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -}
module Distribution.Simple.Hpc
( enableCoverage
, htmlDir
, tixDir
, tixFilePath
, markupPackage
, markupTest
) where
import Control.Monad ( when )
import Distribution.Compiler ( CompilerFlavor(..) )
import Distribution.ModuleName ( main )
import Distribution.PackageDescription
( BuildInfo(..)
, Library(..)
, PackageDescription(..)
, TestSuite(..)
, testModules
)
import Distribution.Simple.LocalBuildInfo ( LocalBuildInfo(..) )
import Distribution.Simple.Program
( hpcProgram
, requireProgramVersion
)
import Distribution.Simple.Program.Hpc ( markup, union )
import Distribution.Simple.Utils ( notice )
import Distribution.Version ( anyVersion )
import Distribution.Text
import Distribution.Verbosity ( Verbosity() )
import System.Directory ( createDirectoryIfMissing, doesFileExist )
import System.FilePath
-- -------------------------------------------------------------------------
-- Haskell Program Coverage
-- | Conditionally enable Haskell Program Coverage by adding the necessary
-- GHC options to a PackageDescription.
--
-- TODO: do this differently in the build stage by constructing local build
-- info, not by modifying the original PackageDescription.
--
enableCoverage :: Bool -- ^ Enable coverage?
-> String -- ^ \"dist/\" prefix
-> PackageDescription
-> PackageDescription
enableCoverage False _ x = x
enableCoverage True distPref p =
p { library = fmap enableLibCoverage (library p)
, testSuites = map enableTestCoverage (testSuites p)
}
where
enableBICoverage name oldBI =
let oldOptions = options oldBI
oldGHCOpts = lookup GHC oldOptions
newGHCOpts = case oldGHCOpts of
Just xs -> (GHC, hpcOpts ++ xs)
_ -> (GHC, hpcOpts)
newOptions = (:) newGHCOpts $ filter ((== GHC) . fst) oldOptions
hpcOpts = ["-fhpc", "-hpcdir", mixDir distPref name]
in oldBI { options = newOptions }
enableLibCoverage l =
l { libBuildInfo = enableBICoverage (display $ package p)
(libBuildInfo l)
}
enableTestCoverage t =
t { testBuildInfo = enableBICoverage (testName t) (testBuildInfo t) }
hpcDir :: FilePath -- ^ \"dist/\" prefix
-> FilePath -- ^ Directory containing component's HPC .mix files
hpcDir distPref = distPref </> "hpc"
mixDir :: FilePath -- ^ \"dist/\" prefix
-> FilePath -- ^ Component name
-> FilePath -- ^ Directory containing test suite's .mix files
mixDir distPref name = hpcDir distPref </> "mix" </> name
tixDir :: FilePath -- ^ \"dist/\" prefix
-> FilePath -- ^ Component name
-> FilePath -- ^ Directory containing test suite's .tix files
tixDir distPref name = hpcDir distPref </> "tix" </> name
-- | Path to the .tix file containing a test suite's sum statistics.
tixFilePath :: FilePath -- ^ \"dist/\" prefix
-> FilePath -- ^ Component name
-> FilePath -- ^ Path to test suite's .tix file
tixFilePath distPref name = tixDir distPref name </> name <.> "tix"
htmlDir :: FilePath -- ^ \"dist/\" prefix
-> FilePath -- ^ Component name
-> FilePath -- ^ Path to test suite's HTML markup directory
htmlDir distPref name = hpcDir distPref </> "html" </> name
-- | Generate the HTML markup for a test suite.
markupTest :: Verbosity
-> LocalBuildInfo
-> FilePath -- ^ \"dist/\" prefix
-> String -- ^ Library name
-> TestSuite
-> IO ()
markupTest verbosity lbi distPref libName suite = do
tixFileExists <- doesFileExist $ tixFilePath distPref $ testName suite
when tixFileExists $ do
-- behaviour of 'markup' depends on version, so we need *a* version
-- but no particular one
(hpc, hpcVer, _) <- requireProgramVersion verbosity
hpcProgram anyVersion (withPrograms lbi)
markup hpc hpcVer verbosity
(tixFilePath distPref $ testName suite) mixDirs
(htmlDir distPref $ testName suite)
(testModules suite ++ [ main ])
notice verbosity $ "Test coverage report written to "
++ htmlDir distPref (testName suite)
</> "hpc_index" <.> "html"
where
mixDirs = map (mixDir distPref) [ testName suite, libName ]
-- | Generate the HTML markup for all of a package's test suites.
markupPackage :: Verbosity
-> LocalBuildInfo
-> FilePath -- ^ \"dist/\" prefix
-> String -- ^ Library name
-> [TestSuite]
-> IO ()
markupPackage verbosity lbi distPref libName suites = do
let tixFiles = map (tixFilePath distPref . testName) suites
tixFilesExist <- mapM doesFileExist tixFiles
when (and tixFilesExist) $ do
-- behaviour of 'markup' depends on version, so we need *a* version
-- but no particular one
(hpc, hpcVer, _) <- requireProgramVersion verbosity
hpcProgram anyVersion (withPrograms lbi)
let outFile = tixFilePath distPref libName
htmlDir' = htmlDir distPref libName
excluded = concatMap testModules suites ++ [ main ]
createDirectoryIfMissing True $ takeDirectory outFile
union hpc verbosity tixFiles outFile excluded
markup hpc hpcVer verbosity outFile mixDirs htmlDir' excluded
notice verbosity $ "Package coverage report written to "
++ htmlDir' </> "hpc_index.html"
where
mixDirs = map (mixDir distPref) $ libName : map testName suites
| jwiegley/ghc-release | libraries/Cabal/cabal/Distribution/Simple/Hpc.hs | gpl-3.0 | 7,693 | 0 | 15 | 1,986 | 1,176 | 637 | 539 | 108 | 2 |
module ModuleRecordClash.R where
import Prelude
i :: Double
i = 1
| beni55/fay | tests/ModuleRecordClash/R.hs | bsd-3-clause | 68 | 0 | 4 | 13 | 19 | 12 | 7 | 4 | 1 |
{-# LANGUAGE ScopedTypeVariables, ExistentialQuantification #-}
-- |The primary entrypoint to the ROS client library portion of
-- roshask. This module defines the actions used to configure a ROS
-- Node.
module Ros.Node (Node, runNode, advertise, advertiseBuffered,
subscribe, getShutdownAction, runHandler, getParam,
getParamOpt, getName, getNamespace,
module Ros.Internal.RosTypes, Topic(..), topicRate,
module Ros.Internal.RosTime, liftIO) where
import Control.Applicative ((<$>))
import Control.Concurrent (newEmptyMVar, readMVar, putMVar)
import Control.Concurrent.BoundedChan
import Control.Concurrent.STM (newTVarIO)
import Control.Monad (when)
import Control.Monad.State (liftIO, get, put, execStateT)
import Control.Monad.Reader (ask, asks, runReaderT)
import qualified Data.Map as M
import qualified Data.Set as S
import Control.Concurrent (forkIO, ThreadId)
import Data.Dynamic
import System.Environment (getEnvironment, getArgs)
import Network.XmlRpc.Internals (XmlRpcType)
import Ros.Internal.Msg.MsgInfo
import Ros.Internal.RosBinary (RosBinary)
import Ros.Internal.RosTypes
import Ros.Internal.RosTime
import Ros.Internal.Util.AppConfig (Config, parseAppConfig, forkConfig, configured)
import Ros.Internal.Util.ArgRemapping
import Ros.Node.Type
import qualified Ros.Graph.ParameterServer as P
import Ros.Node.RosTcp (subStream, runServer)
import qualified Ros.Node.RunNode as RN
import Ros.Topic
import Ros.Topic.Stats (recvMessageStat, sendMessageStat)
import Ros.Topic.Util (topicRate, share)
-- |Maximum number of items to buffer for a subscriber.
recvBufferSize :: Int
recvBufferSize = 10
-- |Spark a thread that funnels a Stream from a URI into the given
-- Chan.
addSource :: (RosBinary a, MsgInfo a) =>
String -> (URI -> Int -> IO ()) -> BoundedChan a -> URI ->
Config ThreadId
addSource tname updateStats c uri =
forkConfig $ subStream uri tname (updateStats uri) >>=
liftIO . forever . join . fmap (writeChan c)
-- Create a new Subscription value that will act as a named input
-- channel with zero or more connected publishers.
mkSub :: forall a. (RosBinary a, MsgInfo a) =>
String -> Config (Topic IO a, Subscription)
mkSub tname = do c <- liftIO $ newBoundedChan recvBufferSize
let stream = Topic $ do x <- readChan c
return (x, stream)
known <- liftIO $ newTVarIO S.empty
stats <- liftIO $ newTVarIO M.empty
r <- ask
let topicType = msgTypeName (undefined::a)
updateStats = recvMessageStat stats
addSource' = flip runReaderT r . addSource tname updateStats c
sub = Subscription known addSource' topicType stats
return (stream, sub)
mkPub :: forall a. (RosBinary a, MsgInfo a, Typeable a) =>
Topic IO a -> Int -> Config Publication
mkPub t n = do t' <- liftIO $ share t
mkPubAux (msgTypeName (undefined::a)) t' (runServer t') n
mkPubAux :: Typeable a =>
String -> Topic IO a ->
((URI -> Int -> IO ()) -> Int -> Config (Config (), Int)) ->
Int -> Config Publication
mkPubAux trep t runServer' bufferSize =
do stats <- liftIO $ newTVarIO M.empty
(cleanup, port) <- runServer' (sendMessageStat stats) bufferSize
known <- liftIO $ newTVarIO S.empty
cleanup' <- configured cleanup
return $ Publication known trep port cleanup' (DynTopic t) stats
-- |Subscribe to the given Topic. Returns a 'Ros.TopicUtil.share'd 'Topic'.
subscribe :: (RosBinary a, MsgInfo a, Typeable a) =>
TopicName -> Node (Topic IO a)
subscribe name = do n <- get
name' <- canonicalizeName =<< remapName name
r <- nodeAppConfig <$> ask
let subs = subscriptions n
when (M.member name' subs)
(error $ "Already subscribed to "++name')
let pubs = publications n
if M.member name' pubs
then return . fromDynErr . pubTopic $ pubs M.! name'
else do (stream, sub) <- liftIO $
runReaderT (mkSub name') r
put n { subscriptions = M.insert name' sub subs }
--return stream
liftIO $ share stream
where fromDynErr = maybe (error msg) id . fromDynTopic
msg = "Subscription to "++name++" at a different type than "++
"what that Topic was already advertised at by this Node."
-- |Spin up a thread within a Node. This is typically used for message
-- handlers. Note that the supplied 'Topic' is traversed solely for
-- any side effects of its steps; the produced values are ignored.
runHandler :: (a -> IO b) -> Topic IO a -> Node ThreadId
runHandler = ((liftIO . forkIO . forever . join) .) . fmap
advertiseAux :: (Int -> Config Publication) -> Int -> TopicName -> Node ()
advertiseAux mkPub' bufferSize name =
do n <- get
name' <- remapName =<< canonicalizeName name
r <- nodeAppConfig <$> ask
let pubs = publications n
if M.member name' pubs
then error $ "Already advertised " ++ name'
else do pub <- liftIO $ runReaderT (mkPub' bufferSize) r
put n { publications = M.insert name' pub pubs }
-- |Advertise a 'Topic' publishing a stream of 'IO' values with a
-- per-client transmit buffer of the specified size.
advertiseBuffered :: (RosBinary a, MsgInfo a, Typeable a) =>
Int -> TopicName -> Topic IO a -> Node ()
advertiseBuffered bufferSize name s = advertiseAux (mkPub s) bufferSize name
-- |Advertise a 'Topic' publishing a stream of values produced in
-- the 'IO' monad.
advertise :: (RosBinary a, MsgInfo a, Typeable a) =>
TopicName -> Topic IO a -> Node ()
advertise = advertiseBuffered 1
-- -- |Existentially quantified message type that roshask can
-- -- serialize. This type provides a way to work with collections of
-- -- differently typed 'Topic's.
-- data SomeMsg = forall a. (RosBinary a, MsgInfo a, Typeable a) => SomeMsg a
-- -- |Advertise projections of a 'Topic' as discrete 'Topic's.
-- advertiseSplit :: [(TopicName, a -> SomeMsg)] -> Topic IO a -> Node ()
-- advertiseSplit = undefined
-- |Get an action that will shutdown this Node.
getShutdownAction :: Node (IO ())
getShutdownAction = get >>= liftIO . readMVar . signalShutdown
-- |Apply any matching renames to a given name.
remapName :: String -> Node String
remapName name = asks (maybe name id . lookup name . nodeRemaps)
-- |Convert relative names to absolute names. Leaves absolute names
-- unchanged.
canonicalizeName :: String -> Node String
canonicalizeName n@('/':_) = return n
canonicalizeName ('~':n) = do state <- get
let node = nodeName state
return $ node ++ "/" ++ n
canonicalizeName n = do (++n) . namespace <$> get
-- |Get a parameter value from the Parameter Server.
getServerParam :: XmlRpcType a => String -> Node (Maybe a)
getServerParam var = do state <- get
let masterUri = master state
myName = nodeName state
-- Call hasParam first because getParam only returns
-- a partial result (just the return code) in failure.
hasParam <- liftIO $ P.hasParam masterUri myName var
case hasParam of
Right True -> liftIO $ P.getParam masterUri myName var
_ -> return Nothing
-- |Get the value associated with the given parameter name. If the
-- parameter is not set, then 'Nothing' is returned; if the parameter
-- is set to @x@, then @Just x@ is returned.
getParamOpt :: (XmlRpcType a, FromParam a) => String -> Node (Maybe a)
getParamOpt var = do var' <- remapName =<< canonicalizeName var
params <- nodeParams <$> ask
case lookup var' params of
Just val -> return . Just $ fromParam val
Nothing -> getServerParam var'
-- |Get the value associated with the given parameter name. If the
-- parameter is not set, return the second argument as the default
-- value.
getParam :: (XmlRpcType a, FromParam a) => String -> a -> Node a
getParam var def = maybe def id <$> getParamOpt var
-- |Get the current node's name.
getName :: Node String
getName = nodeName <$> get
-- |Get the current namespace.
getNamespace :: Node String
getNamespace = namespace <$> get
-- |Run a ROS Node.
runNode :: NodeName -> Node a -> IO ()
runNode name (Node nConf) =
do myURI <- newEmptyMVar
sigStop <- newEmptyMVar
env <- liftIO getEnvironment
(conf, args) <- parseAppConfig <$> liftIO getArgs
let getConfig' var def = maybe def id $ lookup var env
getConfig = flip lookup env
masterConf = getConfig' "ROS_MASTER_URI" "http://localhost:11311"
namespaceConf = let ns = getConfig' "ROS_NAMESPACE" "/"
in if last ns == '/' then ns else ns ++ "/"
(nameMap, params) = parseRemappings args
name' = case lookup "__name" params of
Just x -> fromParam x
Nothing -> case name of
'/':_ -> name
_ -> namespaceConf ++ name
-- Name remappings apply to exact strings and resolved names.
resolve p@(('/':_),_) = [p]
resolve (('_':n),v) = [(name'++"/"++n, v)]
resolve (('~':n),v) = [(name'++"/"++ n, v)] --, ('_':n,v)]
resolve (n,v) = [(namespaceConf ++ n,v), (n,v)]
nameMap' = concatMap resolve nameMap
params' = concatMap resolve params
when (not $ null nameMap')
(putStrLn $ "Remapping name(s) "++show nameMap')
when (not $ null params')
(putStrLn $ "Setting parameter(s) "++show params')
case getConfig "ROS_IP" of
Nothing -> case getConfig "ROS_HOSTNAME" of
Nothing -> return ()
Just n -> putMVar myURI $! "http://"++n
Just ip -> putMVar myURI $! "http://"++ip
let configuredNode = runReaderT nConf (NodeConfig params' nameMap' conf)
initialState = NodeState name' namespaceConf masterConf myURI
sigStop M.empty M.empty
statefulNode = execStateT configuredNode initialState
statefulNode >>= flip runReaderT conf . RN.runNode name'
| bitemyapp/roshask | src/Ros/Node.hs | bsd-3-clause | 10,821 | 0 | 16 | 3,180 | 2,731 | 1,416 | 1,315 | 170 | 9 |
{- |
Module : GUI.Glade.%s
Description : Glade xmlstring for %s
Copyright : (c) Thiemo Wiedemeyer, Uni Bremen 2008
License : GPLv2 or higher, see LICENSE.txt
Maintainer : raider@informatik.uni-bremen.de
Stability : provisional
Portability : portable
This module provides a string containing the xml data of the glade file for:
%s
This module is automatically created.
-}
module GUI.Glade.%s (get)
where
get :: (String, String)
get = ("%s", xmlString)
xmlString :: String
xmlString =
| mariefarrell/Hets | GUI/Glade/Template.append.hs | gpl-2.0 | 510 | 1 | 6 | 100 | 51 | 30 | 21 | -1 | -1 |
module Lamdu.GUI.ExpressionEdit.HoleEdit.Open.ShownResult
( ShownResult(..)
, srPick
) where
import Data.Store.Guid (Guid)
import qualified Data.Store.Transaction as Transaction
import qualified Graphics.UI.Bottle.Widget as Widget
import qualified Lamdu.GUI.ExpressionEdit.HoleEdit.Results as HoleResults
import qualified Lamdu.Sugar.Types as Sugar
type T = Transaction.Transaction
data ShownResult m = ShownResult
{ srEventMap :: Widget.EventHandlers (T m)
, srHoleResult :: Sugar.HoleResult Sugar.Name m HoleResults.SugarExprPl
, srPickTo ::
T m
( Maybe Guid -- Hole target guid
, Widget.EventResult
)
}
srPick :: Functor m => ShownResult m -> T m Widget.EventResult
srPick = fmap snd . srPickTo
| schell/lamdu | Lamdu/GUI/ExpressionEdit/HoleEdit/Open/ShownResult.hs | gpl-3.0 | 732 | 0 | 11 | 120 | 187 | 115 | 72 | 18 | 1 |
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeFamilies #-}
-- | Compile a 'GPUMem' program to imperative code with kernels.
-- This is mostly (but not entirely) the same process no matter if we
-- are targeting OpenCL or CUDA. The important distinctions (the host
-- level code) are introduced later.
module Futhark.CodeGen.ImpGen.GPU
( compileProgOpenCL,
compileProgCUDA,
Warnings,
)
where
import Control.Monad.Except
import Data.Bifunctor (second)
import Data.List (foldl')
import qualified Data.Map as M
import Data.Maybe
import Futhark.CodeGen.ImpCode.GPU (bytes)
import qualified Futhark.CodeGen.ImpCode.GPU as Imp
import Futhark.CodeGen.ImpGen hiding (compileProg)
import qualified Futhark.CodeGen.ImpGen
import Futhark.CodeGen.ImpGen.GPU.Base
import Futhark.CodeGen.ImpGen.GPU.SegHist
import Futhark.CodeGen.ImpGen.GPU.SegMap
import Futhark.CodeGen.ImpGen.GPU.SegRed
import Futhark.CodeGen.ImpGen.GPU.SegScan
import Futhark.CodeGen.ImpGen.GPU.Transpose
import Futhark.CodeGen.SetDefaultSpace
import Futhark.Error
import Futhark.IR.GPUMem
import qualified Futhark.IR.Mem.IxFun as IxFun
import Futhark.MonadFreshNames
import Futhark.Util.IntegralExp (IntegralExp, divUp, quot, rem)
import Prelude hiding (quot, rem)
callKernelOperations :: Operations GPUMem HostEnv Imp.HostOp
callKernelOperations =
Operations
{ opsExpCompiler = expCompiler,
opsCopyCompiler = callKernelCopy,
opsOpCompiler = opCompiler,
opsStmsCompiler = defCompileStms,
opsAllocCompilers = mempty
}
openclAtomics, cudaAtomics :: AtomicBinOp
(openclAtomics, cudaAtomics) = (flip lookup opencl, flip lookup cuda)
where
opencl64 =
[ (Add Int64 OverflowUndef, Imp.AtomicAdd Int64),
(SMax Int64, Imp.AtomicSMax Int64),
(SMin Int64, Imp.AtomicSMin Int64),
(UMax Int64, Imp.AtomicUMax Int64),
(UMin Int64, Imp.AtomicUMin Int64),
(And Int64, Imp.AtomicAnd Int64),
(Or Int64, Imp.AtomicOr Int64),
(Xor Int64, Imp.AtomicXor Int64)
]
opencl32 =
[ (Add Int32 OverflowUndef, Imp.AtomicAdd Int32),
(SMax Int32, Imp.AtomicSMax Int32),
(SMin Int32, Imp.AtomicSMin Int32),
(UMax Int32, Imp.AtomicUMax Int32),
(UMin Int32, Imp.AtomicUMin Int32),
(And Int32, Imp.AtomicAnd Int32),
(Or Int32, Imp.AtomicOr Int32),
(Xor Int32, Imp.AtomicXor Int32)
]
opencl = opencl32 ++ opencl64
cuda =
opencl
++ [ (FAdd Float32, Imp.AtomicFAdd Float32),
(FAdd Float64, Imp.AtomicFAdd Float64)
]
compileProg ::
MonadFreshNames m =>
HostEnv ->
Prog GPUMem ->
m (Warnings, Imp.Program)
compileProg env prog =
second (fmap setOpSpace . setDefsSpace)
<$> Futhark.CodeGen.ImpGen.compileProg env callKernelOperations device_space prog
where
device_space = Imp.Space "device"
global_space = Imp.Space "global"
setDefsSpace = setDefaultSpace device_space
setOpSpace (Imp.CallKernel kernel) =
Imp.CallKernel
kernel
{ Imp.kernelBody =
setDefaultCodeSpace global_space $ Imp.kernelBody kernel
}
setOpSpace op = op
-- | Compile a 'GPUMem' program to low-level parallel code, with
-- either CUDA or OpenCL characteristics.
compileProgOpenCL,
compileProgCUDA ::
MonadFreshNames m => Prog GPUMem -> m (Warnings, Imp.Program)
compileProgOpenCL = compileProg $ HostEnv openclAtomics OpenCL mempty
compileProgCUDA = compileProg $ HostEnv cudaAtomics CUDA mempty
opCompiler ::
Pat GPUMem ->
Op GPUMem ->
CallKernelGen ()
opCompiler dest (Alloc e space) =
compileAlloc dest e space
opCompiler (Pat [pe]) (Inner (SizeOp (GetSize key size_class))) = do
fname <- askFunction
sOp $
Imp.GetSize (patElemName pe) (keyWithEntryPoint fname key) $
sizeClassWithEntryPoint fname size_class
opCompiler (Pat [pe]) (Inner (SizeOp (CmpSizeLe key size_class x))) = do
fname <- askFunction
let size_class' = sizeClassWithEntryPoint fname size_class
sOp . Imp.CmpSizeLe (patElemName pe) (keyWithEntryPoint fname key) size_class'
=<< toExp x
opCompiler (Pat [pe]) (Inner (SizeOp (GetSizeMax size_class))) =
sOp $ Imp.GetSizeMax (patElemName pe) size_class
opCompiler (Pat [pe]) (Inner (SizeOp (CalcNumGroups w64 max_num_groups_key group_size))) = do
fname <- askFunction
max_num_groups :: TV Int32 <- dPrim "max_num_groups" int32
sOp $
Imp.GetSize (tvVar max_num_groups) (keyWithEntryPoint fname max_num_groups_key) $
sizeClassWithEntryPoint fname SizeNumGroups
-- If 'w' is small, we launch fewer groups than we normally would.
-- We don't want any idle groups.
--
-- The calculations are done with 64-bit integers to avoid overflow
-- issues.
let num_groups_maybe_zero =
sMin64 (toInt64Exp w64 `divUp` toInt64Exp group_size) $
sExt64 (tvExp max_num_groups)
-- We also don't want zero groups.
let num_groups = sMax64 1 num_groups_maybe_zero
mkTV (patElemName pe) int32 <-- sExt32 num_groups
opCompiler dest (Inner (SegOp op)) =
segOpCompiler dest op
opCompiler pat e =
compilerBugS $
"opCompiler: Invalid pattern\n "
++ pretty pat
++ "\nfor expression\n "
++ pretty e
sizeClassWithEntryPoint :: Maybe Name -> Imp.SizeClass -> Imp.SizeClass
sizeClassWithEntryPoint fname (Imp.SizeThreshold path def) =
Imp.SizeThreshold (map f path) def
where
f (name, x) = (keyWithEntryPoint fname name, x)
sizeClassWithEntryPoint _ size_class = size_class
segOpCompiler ::
Pat GPUMem ->
SegOp SegLevel GPUMem ->
CallKernelGen ()
segOpCompiler pat (SegMap lvl space _ kbody) =
compileSegMap pat lvl space kbody
segOpCompiler pat (SegRed lvl@SegThread {} space reds _ kbody) =
compileSegRed pat lvl space reds kbody
segOpCompiler pat (SegScan lvl@SegThread {} space scans _ kbody) =
compileSegScan pat lvl space scans kbody
segOpCompiler pat (SegHist (SegThread num_groups group_size _) space ops _ kbody) =
compileSegHist pat num_groups group_size space ops kbody
segOpCompiler pat segop =
compilerBugS $ "segOpCompiler: unexpected " ++ pretty (segLevel segop) ++ " for rhs of pattern " ++ pretty pat
-- Create boolean expression that checks whether all kernels in the
-- enclosed code do not use more local memory than we have available.
-- We look at *all* the kernels here, even those that might be
-- otherwise protected by their own multi-versioning branches deeper
-- down. Currently the compiler will not generate multi-versioning
-- that makes this a problem, but it might in the future.
checkLocalMemoryReqs :: Imp.Code -> CallKernelGen (Maybe (Imp.TExp Bool))
checkLocalMemoryReqs code = do
scope <- askScope
let alloc_sizes = map (sum . map alignedSize . localAllocSizes . Imp.kernelBody) $ getGPU code
-- If any of the sizes involve a variable that is not known at this
-- point, then we cannot check the requirements.
if any (`M.notMember` scope) (namesToList $ freeIn alloc_sizes)
then return Nothing
else do
local_memory_capacity :: TV Int32 <- dPrim "local_memory_capacity" int32
sOp $ Imp.GetSizeMax (tvVar local_memory_capacity) SizeLocalMemory
let local_memory_capacity_64 =
sExt64 $ tvExp local_memory_capacity
fits size =
unCount size .<=. local_memory_capacity_64
return $ Just $ foldl' (.&&.) true (map fits alloc_sizes)
where
getGPU = foldMap getKernel
getKernel (Imp.CallKernel k) = [k]
getKernel _ = []
localAllocSizes = foldMap localAllocSize
localAllocSize (Imp.LocalAlloc _ size) = [size]
localAllocSize _ = []
-- These allocations will actually be padded to an 8-byte aligned
-- size, so we should take that into account when checking whether
-- they fit.
alignedSize x = x + ((8 - (x `rem` 8)) `rem` 8)
withAcc ::
Pat GPUMem ->
[(Shape, [VName], Maybe (Lambda GPUMem, [SubExp]))] ->
Lambda GPUMem ->
CallKernelGen ()
withAcc pat inputs lam = do
atomics <- hostAtomics <$> askEnv
locksForInputs atomics $ zip accs inputs
where
accs = map paramName $ lambdaParams lam
locksForInputs _ [] =
defCompileExp pat $ WithAcc inputs lam
locksForInputs atomics ((c, (_, _, op)) : inputs')
| Just (op_lam, _) <- op,
AtomicLocking _ <- atomicUpdateLocking atomics op_lam = do
let num_locks = 100151
locks_arr <-
sStaticArray "withacc_locks" (Space "device") int32 $
Imp.ArrayZeros num_locks
let locks = Locks locks_arr num_locks
extend env = env {hostLocks = M.insert c locks $ hostLocks env}
localEnv extend $ locksForInputs atomics inputs'
| otherwise =
locksForInputs atomics inputs'
expCompiler :: ExpCompiler GPUMem HostEnv Imp.HostOp
-- We generate a simple kernel for itoa and replicate.
expCompiler (Pat [pe]) (BasicOp (Iota n x s et)) = do
x' <- toExp x
s' <- toExp s
sIota (patElemName pe) (toInt64Exp n) x' s' et
expCompiler (Pat [pe]) (BasicOp (Replicate _ se))
| Acc {} <- patElemType pe = pure ()
| otherwise =
sReplicate (patElemName pe) se
-- Allocation in the "local" space is just a placeholder.
expCompiler _ (Op (Alloc _ (Space "local"))) =
return ()
expCompiler pat (WithAcc inputs lam) =
withAcc pat inputs lam
-- This is a multi-versioning If created by incremental flattening.
-- We need to augment the conditional with a check that any local
-- memory requirements in tbranch are compatible with the hardware.
-- We do not check anything for fbranch, as we assume that it will
-- always be safe (and what would we do if none of the branches would
-- work?).
expCompiler dest (If cond tbranch fbranch (IfDec _ IfEquiv)) = do
tcode <- collect $ compileBody dest tbranch
fcode <- collect $ compileBody dest fbranch
check <- checkLocalMemoryReqs tcode
emit $ case check of
Nothing -> fcode
Just ok -> Imp.If (ok .&&. toBoolExp cond) tcode fcode
expCompiler dest e =
defCompileExp dest e
callKernelCopy :: CopyCompiler GPUMem HostEnv Imp.HostOp
callKernelCopy bt destloc@(MemLoc destmem _ destIxFun) srcloc@(MemLoc srcmem srcshape srcIxFun)
| Just (destoffset, srcoffset, num_arrays, size_x, size_y) <-
isMapTransposeCopy bt destloc srcloc = do
fname <- mapTransposeForType bt
emit $
Imp.Call
[]
fname
[ Imp.MemArg destmem,
Imp.ExpArg $ untyped destoffset,
Imp.MemArg srcmem,
Imp.ExpArg $ untyped srcoffset,
Imp.ExpArg $ untyped num_arrays,
Imp.ExpArg $ untyped size_x,
Imp.ExpArg $ untyped size_y
]
| bt_size <- primByteSize bt,
Just destoffset <- IxFun.linearWithOffset destIxFun bt_size,
Just srcoffset <- IxFun.linearWithOffset srcIxFun bt_size = do
let num_elems = Imp.elements $ product $ map toInt64Exp srcshape
srcspace <- entryMemSpace <$> lookupMemory srcmem
destspace <- entryMemSpace <$> lookupMemory destmem
emit $
Imp.Copy
destmem
(bytes $ sExt64 destoffset)
destspace
srcmem
(bytes $ sExt64 srcoffset)
srcspace
$ num_elems `Imp.withElemType` bt
| otherwise = sCopy bt destloc srcloc
mapTransposeForType :: PrimType -> CallKernelGen Name
mapTransposeForType bt = do
let fname = nameFromString $ "builtin#" <> mapTransposeName bt
exists <- hasFunction fname
unless exists $ emitFunction fname $ mapTransposeFunction bt
return fname
mapTransposeName :: PrimType -> String
mapTransposeName bt = "gpu_map_transpose_" ++ pretty bt
mapTransposeFunction :: PrimType -> Imp.Function
mapTransposeFunction bt =
Imp.Function Nothing [] params transpose_code [] []
where
params =
[ memparam destmem,
intparam destoffset,
memparam srcmem,
intparam srcoffset,
intparam num_arrays,
intparam x,
intparam y
]
space = Space "device"
memparam v = Imp.MemParam v space
intparam v = Imp.ScalarParam v $ IntType Int32
[ destmem,
destoffset,
srcmem,
srcoffset,
num_arrays,
x,
y,
mulx,
muly,
block
] =
zipWith
(VName . nameFromString)
[ "destmem",
"destoffset",
"srcmem",
"srcoffset",
"num_arrays",
"x_elems",
"y_elems",
-- The following is only used for low width/height
-- transpose kernels
"mulx",
"muly",
"block"
]
[0 ..]
block_dim_int = 16
block_dim :: IntegralExp a => a
block_dim = 16
-- When an input array has either width==1 or height==1, performing a
-- transpose will be the same as performing a copy.
can_use_copy =
let onearr = Imp.le32 num_arrays .==. 1
height_is_one = Imp.le32 y .==. 1
width_is_one = Imp.le32 x .==. 1
in onearr .&&. (width_is_one .||. height_is_one)
transpose_code =
Imp.If input_is_empty mempty $
mconcat
[ Imp.DeclareScalar muly Imp.Nonvolatile (IntType Int32),
Imp.SetScalar muly $ untyped $ block_dim `quot` Imp.le32 x,
Imp.DeclareScalar mulx Imp.Nonvolatile (IntType Int32),
Imp.SetScalar mulx $ untyped $ block_dim `quot` Imp.le32 y,
Imp.If can_use_copy copy_code $
Imp.If should_use_lowwidth (callTransposeKernel TransposeLowWidth) $
Imp.If should_use_lowheight (callTransposeKernel TransposeLowHeight) $
Imp.If should_use_small (callTransposeKernel TransposeSmall) $
callTransposeKernel TransposeNormal
]
input_is_empty =
Imp.le32 num_arrays .==. 0 .||. Imp.le32 x .==. 0 .||. Imp.le32 y .==. 0
should_use_small =
Imp.le32 x .<=. (block_dim `quot` 2)
.&&. Imp.le32 y .<=. (block_dim `quot` 2)
should_use_lowwidth =
Imp.le32 x .<=. (block_dim `quot` 2)
.&&. block_dim .<. Imp.le32 y
should_use_lowheight =
Imp.le32 y .<=. (block_dim `quot` 2)
.&&. block_dim .<. Imp.le32 x
copy_code =
let num_bytes = sExt64 $ Imp.le32 x * Imp.le32 y * primByteSize bt
in Imp.Copy
destmem
(Imp.Count $ sExt64 $ Imp.le32 destoffset)
space
srcmem
(Imp.Count $ sExt64 $ Imp.le32 srcoffset)
space
(Imp.Count num_bytes)
callTransposeKernel =
Imp.Op . Imp.CallKernel
. mapTransposeKernel
(mapTransposeName bt)
block_dim_int
( destmem,
Imp.le32 destoffset,
srcmem,
Imp.le32 srcoffset,
Imp.le32 x,
Imp.le32 y,
Imp.le32 mulx,
Imp.le32 muly,
Imp.le32 num_arrays,
block
)
bt
| HIPERFIT/futhark | src/Futhark/CodeGen/ImpGen/GPU.hs | isc | 14,947 | 0 | 17 | 3,676 | 4,052 | 2,074 | 1,978 | 342 | 4 |
{-# LANGUAGE OverloadedStrings, RankNTypes, PatternSynonyms, ViewPatterns,
BangPatterns #-}
module Data.CSS.Syntax.Tokens
( Token(..)
, NumericValue(..)
, HashFlag(..)
, Unit
, tokenize
, serialize
) where
import Control.Applicative
import Control.Monad
import qualified Data.Text as T
import qualified Data.Text.Lazy as TL
import qualified Data.Text.Lazy.Builder as TLB
import Data.Monoid
import Data.Char
import Data.Scientific
import Numeric
import Prelude
import Data.Text.Internal (Text(..))
import Data.Text.Unsafe (inlineInterleaveST)
import qualified Data.Text.Array as A
import Control.Monad.ST (ST)
import GHC.Base (unsafeChr)
import Data.Word (Word16)
import Data.Char (ord)
import Data.Bits
data Token
= Whitespace
| CDO -- CommentDelimiterOpen
| CDC -- CommentDelimiterClose
| Comma
| Colon
| Semicolon
| LeftParen
| RightParen
| LeftSquareBracket
| RightSquareBracket
| LeftCurlyBracket
| RightCurlyBracket
| SuffixMatch
| SubstringMatch
| PrefixMatch
| DashMatch
| IncludeMatch
| Column
| String !Text
| BadString
| Number !Text !NumericValue
| Percentage !Text !NumericValue
| Dimension !Text !NumericValue !Unit
| Url !Text
| BadUrl
| Ident !Text
| AtKeyword !Text
| Function !Text
| Hash !HashFlag !Text
| Delim !Char
deriving (Show, Eq)
data NumericValue
= NVInteger !Integer -- ^ number without dot '.' or exponent 'e'
| NVNumber !Scientific -- ^ number with dot '.' or exponent 'e'
deriving (Show, Eq)
data HashFlag = HId | HUnrestricted
deriving (Show, Eq)
type Unit = Text
-- Tokenization
-------------------------------------------------------------------------------
-- | Parse a 'Text' into a list of 'Token's.
--
-- https://drafts.csswg.org/css-syntax/#tokenization
tokenize :: Text -> [Token]
tokenize = parseTokens . preprocessInputStream
-- | Before sending the input stream to the tokenizer, implementations must
-- make the following code point substitutions: (see spec)
--
-- https://drafts.csswg.org/css-syntax/#input-preprocessing
preprocessInputStream :: Text -> Text
preprocessInputStream t0@(Text _ _ len) = withNewA len $ \ dst -> do
let go t d = case t of
'\x0D' :. '\x0A' :. t' ->
put '\x0A' t'
'\x0D' :. t' ->
put '\x0A' t'
'\x0C' :. t' ->
put '\x0A' t'
'\x00' :. t' ->
put '\xFFFD' t'
c :. t' ->
put c t'
_ ->
return d
where put x t' = do
write dst d x
go t' (d + 1)
go t0 0
-- Low level utilities
-------------------------------------------------------------------------------
pattern (:.) :: Char -> Text -> Text
pattern x :. xs <- (uncons -> Just (x, xs))
infixr 5 :.
-- | uncons first Word16 from Text without trying to decode UTF-16 sequence
uncons :: Text -> Maybe (Char, Text)
uncons (Text src offs len)
| len <= 0 = Nothing
| otherwise =
Just (w2c (A.unsafeIndex src offs), Text src (offs+1) (len-1))
{-# INLINE uncons #-}
-- | write 16bit character
write :: A.MArray s -> Int -> Char -> ST s ()
write dst d x = A.unsafeWrite dst d (c2w x)
{-# INLINE write #-}
-- | write character that could have more than 16bit
-- code from Data.Text.Internal.Unsafe.Char.unsafeWrite
writeChar :: A.MArray s -> Int -> Char -> ST s Int
writeChar dst d c
| n < 0x10000 = do
A.unsafeWrite dst d (fromIntegral n)
return (d+1)
| otherwise = do
A.unsafeWrite dst d lo
A.unsafeWrite dst (d+1) hi
return (d+2)
where n = ord c
m = n - 0x10000
lo = fromIntegral $ (m `shiftR` 10) + 0xD800
hi = fromIntegral $ (m .&. 0x3FF) + 0xDC00
{-# INLINE writeChar #-}
type Writer' s = (A.MArray s -> Int -> ST s Int, Text)
type Writer s = A.MArray s -> Int -> ST s (Int, Text)
-- | no-op for convenient pattern matching
w2c :: Word16 -> Char
w2c = unsafeChr . fromIntegral
{-# INLINE w2c #-}
c2w :: Char -> Word16
c2w = fromIntegral . ord
{-# INLINE c2w #-}
withNewA :: Int -> (forall s . A.MArray s -> ST s Int) -> Text
withNewA len act = Text a 0 l
where (a, l) = A.run2 $ do
dst <- A.new len
dLen <- act dst
return (dst, dLen)
-- Serialization
-------------------------------------------------------------------------------
-- | Serialize a list of 'Token's back into 'Text'.
--
-- Serialization "round-trips" with parsing:
--
-- tokenize (serialize (tokenize s)) == tokenize s
--
-- https://drafts.csswg.org/css-syntax/#serialization
serialize :: [Token] -> Text
serialize = TL.toStrict . TLB.toLazyText . go
where go [] = ""
go [Delim '\\'] = "\\" -- do not add newline in last token
go [x] = renderToken x
go (x:xs@(y:_))
| needComment x y = renderToken x <> "/**/" <> go xs
| otherwise = renderToken x <> go xs
{-# INLINE renderToken #-}
{-# INLINE needComment #-}
needComment :: Token -> Token -> Bool
needComment a CDC = case a of
-- Can't be parsed that way but may exists in generated `Token` list.
-- It's also possible to make Delim 'a' which will be parsed as Ident
-- but we can't do much in this case since it's impossible to
-- create Delim 'a' tokens in parser.
Delim '!' -> True
Delim '@' -> True
Delim '#' -> True
Delim '-' -> True
Number {} -> True
Dimension {} -> True
Ident _ -> True
AtKeyword _ -> True
Function _ -> True
Hash {} -> True
_ -> False
needComment a b = case a of
Whitespace -> b == Whitespace
Ident _ -> idn || b == CDC || b == LeftParen
AtKeyword _ -> idn || b == CDC
Hash {} -> idn || b == CDC
Dimension {} -> idn || b == CDC
Delim '#' -> idn
Delim '-' -> idn
Number {} -> i || num || b == Delim '%'
Delim '@' -> i || b == Delim '-'
Delim '.' -> num
Delim '+' -> num
Delim '/' -> b == Delim '*' || b == SubstringMatch
Delim '|' -> b == Delim '='
|| b == Delim '|' || b == Column || b == DashMatch
Delim '$' -> b == Delim '='
Delim '*' -> b == Delim '='
Delim '^' -> b == Delim '='
Delim '~' -> b == Delim '='
_ -> False
where idn = i || b == Delim '-' || num
i = case b of
Ident _ -> True
Function _ -> True
Url _ -> True
BadUrl -> True
_ -> False
num = case b of
Number {} -> True
Percentage {} -> True
Dimension {} -> True
_ -> False
renderToken :: Token -> TLB.Builder
renderToken token = case token of
Whitespace -> c ' '
CDO -> "<!--"
CDC -> "-->"
Comma -> c ','
Colon -> c ':'
Semicolon -> c ';'
LeftParen -> c '('
RightParen -> c ')'
LeftSquareBracket -> c '['
RightSquareBracket -> c ']'
LeftCurlyBracket -> c '{'
RightCurlyBracket -> c '}'
SuffixMatch -> "$="
SubstringMatch -> "*="
PrefixMatch -> "^="
DashMatch -> "|="
IncludeMatch -> "~="
Column -> "||"
String x -> string x
BadString -> "\"\n"
Number x _ -> t x
Percentage x _ -> t x <> c '%'
Dimension x _ u -> t x <> t (renderDimensionUnit x u)
Url x -> "url(" <> t (renderUrl x) <> c ')'
BadUrl -> "url(()"
Ident x -> ident x
AtKeyword x -> c '@' <> ident x
Function x -> ident x <> c '('
Hash HId x -> c '#' <> ident x
Hash HUnrestricted x -> c '#' <> t (renderUnrestrictedHash x)
Delim '\\' -> "\\\n"
Delim x -> c x
where c = TLB.singleton
t = TLB.fromText
q = c '"'
string x = q <> t (renderString x) <> q
ident = t . renderIdent
-- https://www.w3.org/TR/cssom-1/#serialize-a-string
renderString :: Text -> Text
renderString t0@(Text _ _ l)
| T.any needEscape t0 = withNewA (l*8) $ go t0 0
| otherwise = t0
where
needEscape c = c <= '\x1F' || c == '\x7F' || c == '"' || c == '\\'
go t d dst = case T.uncons t of
Nothing -> return d
Just (c, t')
| c == '\x0' -> do
write dst d '\xFFFD'
-- spec says it should be escaped, but we loose
-- serialize->tokenize->serialize roundtrip that way
go t' (d+1) dst
| (c >= '\x1' && c <= '\x1F') || c == '\x7F' -> do
d' <- escapeAsCodePoint dst d c
go t' d' dst
| c == '"' || c == '\\' -> do
-- strings are always in double quotes, so '\'' aren't escaped
write dst d '\\'
write dst (d+1) c
go t' (d+2) dst
| otherwise -> do
d' <- writeChar dst d c
go t' d' dst
renderUrl :: Text -> Text
renderUrl t0@(Text _ _ l)
| T.any needEscape t0 = withNewA (l*8) $ go t0 0
| otherwise = t0
where
needEscape c = c <= '\x1F' || c == '\x7F' || isWhitespace c
|| c == '\\' || c == ')' || c == '"' || c == '\'' || c == '('
go t d dst = case T.uncons t of
Nothing -> return d
Just (c, t')
| c == '\x0' -> do
write dst d '\xFFFD'
go t' (d+1) dst
| needEscape c -> do
d' <- escapeAsCodePoint dst d c
go t' d' dst
| otherwise -> do
d' <- writeChar dst d c
go t' d' dst
renderDimensionUnit :: Text -> Text -> Text
renderDimensionUnit num t0@(Text _ _ l)
| not (T.any isExponent num)
, c :. t' <- t0
, isExponent c && validExp t' =
withNewA (l*8) $ \ dst -> do
d' <- escapeAsCodePoint dst 0 c
renderUnrestrictedHash' t' d' dst
| otherwise =
renderIdent t0
where validExp (s :. d :. _) | (s == '+' || s == '-') = isDigit d
validExp (d :. _) = isDigit d
validExp _ = False
renderIdent :: Text -> Text
renderIdent "-" = "\\-"
renderIdent t0@(Text _ _ l) = case t0 of
c :. t'
| isDigit c -> withNewA (l*8) $ \ dst -> do
d' <- escapeAsCodePoint dst 0 c
renderUnrestrictedHash' t' d' dst
'-' :. c :. t'
| isDigit c -> withNewA (l*8) $ \ dst -> do
write dst 0 '-'
d' <- escapeAsCodePoint dst 1 c
renderUnrestrictedHash' t' d' dst
_ -> renderUnrestrictedHash t0
renderUnrestrictedHash :: Text -> Text
renderUnrestrictedHash t0@(Text _ _ l)
| T.any (not . nameCodePoint) t0 =
withNewA (l*8) $ renderUnrestrictedHash' t0 0
| otherwise = t0
renderUnrestrictedHash' :: Text -> Int -> A.MArray s -> ST s Int
renderUnrestrictedHash' = go
where go t d dst = case T.uncons t of
Nothing -> return d
Just (c, t')
| c == '\x0' -> do
write dst d '\xFFFD'
go t' (d+1) dst
| (c >= '\x1' && c <= '\x1F') || c == '\x7F' -> do
d' <- escapeAsCodePoint dst d c
go t' d' dst
| nameCodePoint c -> do
d' <- writeChar dst d c
go t' d' dst
| otherwise -> do
write dst d '\\'
d' <- writeChar dst (d+1) c
go t' d' dst
escapeAsCodePoint :: A.MArray s -> Int -> Char -> ST s Int
escapeAsCodePoint dst d c = do
write dst d '\\'
d' <- foldM (\ o x -> write dst o x >> return (o+1))
(d+1) (showHex (ord c) [])
write dst d' ' '
return (d' + 1)
-- | verify valid escape and consume escaped code point
escapedCodePoint :: Text -> Maybe (Writer' s)
escapedCodePoint t = case t of
(hex -> Just d) :. ts -> go 5 d ts
'\n' :. _ -> Nothing
c :. ts -> Just (\ dst d -> write dst d c >> return (d+1), ts)
_ -> Nothing
where go :: Int -> Int -> Text -> Maybe (Writer' s)
go 0 acc ts = ret acc ts
go n acc ts = case ts of
(hex -> Just d) :. ts' -> go (n-1) (acc*16 + d) ts'
c :. ts' | isWhitespace c -> ret acc ts'
_ -> ret acc ts
ret (safe -> c) ts
| c < 0x10000 = Just
(\ dst d -> write dst d (unsafeChr c) >> return (d+1), ts)
| otherwise = Just
(\ dst d -> write dst d lo >> write dst (d+1) hi >> return (d+2)
,ts)
where m = c - 0x10000
lo = unsafeChr $ (m `shiftR` 10) + 0xD800
hi = unsafeChr $ (m .&. 0x3FF) + 0xDC00
safe :: Int -> Int
safe x
| x == 0 || x > 0x10FFFF = 0xFFFD
| x .&. 0x1ff800 /= 0xd800 = x
| otherwise = 0xFFFD -- UTF16 surrogate code point
hex :: Char -> Maybe Int
hex c
| c >= '0' && c <= '9' = Just (ord c - ord '0')
| c >= 'a' && c <= 'f' = Just (ord c - ord 'a' + 10)
| c >= 'A' && c <= 'F' = Just (ord c - ord 'A' + 10)
| otherwise = Nothing
{-# INLINE safe #-}
{-# INLINE hex #-}
escapedCodePoint' :: Text -> Maybe (Writer' s)
escapedCodePoint' ('\\' :. ts) = escapedCodePoint ts
escapedCodePoint' _ = Nothing
nameStartCodePoint :: Char -> Bool
nameStartCodePoint c =
isAsciiLower c || isAsciiUpper c || c >= '\x0080' || c == '_'
nameCodePoint :: Char -> Bool
nameCodePoint c = nameStartCodePoint c || isDigit c || c == '-'
satisfyOrEscaped :: (Char -> Bool) -> Text -> Maybe (Writer' s)
satisfyOrEscaped p (c :. ts)
| p c = Just (\ dst d -> write dst d c >> return (d+1), ts)
| c == '\\' = escapedCodePoint ts
satisfyOrEscaped _ _ = Nothing
-- | Check if three code points would start an identifier and consume name
parseName :: Text -> Maybe (Writer s)
parseName t = case t of
'-' :. ts -> consumeName' <$> satisfyOrEscaped (\ c -> nameStartCodePoint c || c == '-') ts
ts -> consumeName <$> satisfyOrEscaped nameStartCodePoint ts
where consumeName' n dst d = do
write dst d '-'
consumeName n dst (d + 1)
consumeName :: Writer' s -> Writer s
consumeName (w0, ts0) dst d0 = do
d' <- w0 dst d0
loop ts0 d'
where loop ts d = case satisfyOrEscaped nameCodePoint ts of
Just (w, ts') -> do
d' <- w dst d
loop ts' d'
Nothing -> return (d, ts)
{-# INLINE parseName #-}
{-# INLINE consumeName #-}
{-# INLINE satisfyOrEscaped #-}
{-# INLINE escapedCodePoint #-}
{-# INLINE escapedCodePoint' #-}
parseNumericValue :: Text -> Maybe (Text, NumericValue, Text)
parseNumericValue t0@(Text a offs1 _) = case withSign start t0 of
Just (nv, ts@(Text _ offs2 _)) ->
Just (Text a offs1 (offs2 - offs1), nv, ts)
Nothing -> Nothing
where start sign t = case t of
'.' :. (digit -> Just d) :. ts -> dot sign (startIR d) (-1) ts
(digit -> Just d) :. ts -> digits sign (startIR d) ts
_ -> Nothing
digits sign !c t = case t of
'.' :. (digit -> Just d) :. ts -> dot sign (accIR c d) (-1) ts
(digit -> Just d) :. ts -> digits sign (accIR c d) ts
_ -> Just $ expn True (sign $ readIR c) 0 t
dot sign !c !e t = case t of
(digit -> Just d) :. ts -> dot sign (accIR c d) (e-1) ts
_ -> Just $ expn False (sign $ readIR c) e t
expn int c e0 t = case t of
x :. ts
| isExponent x
, Just r <- withSign (expStart c e0 0) ts -> r
_ | int -> (NVInteger c, t)
| otherwise -> (NVNumber $ scientific c e0, t)
expStart c e0 e sign t = case t of
(digit -> Just d) :. ts -> expDigits c e0 (e*10 + d) sign ts
_ -> Nothing
expDigits c e0 !e sign t = case t of
(digit -> Just d) :. ts -> expDigits c e0 (e*10 + d) sign ts
_ -> Just (NVNumber $ scientific c (sign e + e0), t)
digit :: Enum a => Char -> Maybe a
digit c
| isDigit c = Just (toEnum $ ord c - ord '0')
| otherwise = Nothing
withSign :: Num a => ((a -> a) -> Text -> Maybe (b, Text))
-> Text -> Maybe (b, Text)
withSign f t = case t of
'+' :. ts -> f id ts
'-' :. ts -> f negate ts
_ -> f id t
-- Idea stolen from GHC implementation of `instance Read Integer`
-- http://hackage.haskell.org/package/base-4.11.1.0/docs/src/Text.Read.Lex.html#valInteger
-- A sub-quadratic algorithm for converting digits to Integer.
-- First we collect blocks of `blockDigits`-digit Integers
-- (so we don't do anything besides simple (acc*10+digit) on most inputs).
-- Then we combine them:
-- Pairs of adjacent radix b digits are combined into a single radix b^2 digit.
-- This process is repeated until we are left with a single digit.
blockDigits :: Int
blockDigits = 40
startBase :: Integer
startBase = 10^blockDigits
-- | (num digits in current block, blocks, current block's value)
type IntegerReader = (Int, [Integer], Integer)
startIR :: Integer -> IntegerReader
startIR d = (1, [], d)
{-# INLINE startIR #-}
{-# INLINE accIR #-}
{-# INLINE readIR #-}
accIR :: IntegerReader -> Integer -> IntegerReader
accIR (n, blocks, !cd) d
| n < blockDigits = (n+1, blocks, cd*10 + d)
| otherwise = (1, cd:blocks, d)
readIR :: IntegerReader -> Integer
readIR (_, [], cd) = cd
readIR (n, blocks, cd) =
go startBase ((cd * padding):blocks) `div` padding
where padding = 10^(blockDigits-n)
go :: Integer -> [Integer] -> Integer
go _ [] = 0
go _ [x] = x
go b xs = go (b*b) (combine b xs)
combine :: Integer -> [Integer] -> [Integer]
combine _ [] = []
combine _ [x] = [x]
combine b (x0:x1:xs) = x' : combine b xs
where !x' = x0 + x1*b
skipComment :: Text -> Text
skipComment t = case t of
'*' :. '/' :. ts -> ts
_ :. ts -> skipComment ts
ts -> ts
skipWhitespace :: Text -> Text
skipWhitespace t = case t of
c :. ts
| isWhitespace c -> skipWhitespace ts
| otherwise -> t
ts -> ts
parseTokens :: Text -> [Token]
parseTokens t0@(Text _ _ len) = snd $ A.run2 $ do
dst <- A.new len
dsta <- A.unsafeFreeze dst
let go' !t d tgo = do
ts <- inlineInterleaveST $ go d tgo
return (t : ts)
go d tgo = case tgo of
c :. ts | isWhitespace c ->
go' Whitespace d (skipWhitespace ts)
'/' :. '*' :. ts -> go d (skipComment ts)
'<' :. '!' :. '-' :. '-' :. ts -> token CDO ts
'-' :. '-' :. '>' :. ts -> token CDC ts
',' :. ts -> token Comma ts
':' :. ts -> token Colon ts
';' :. ts -> token Semicolon ts
'(' :. ts -> token LeftParen ts
')' :. ts -> token RightParen ts
'[' :. ts -> token LeftSquareBracket ts
']' :. ts -> token RightSquareBracket ts
'{' :. ts -> token LeftCurlyBracket ts
'}' :. ts -> token RightCurlyBracket ts
'$' :. '=' :. ts -> token SuffixMatch ts
'*' :. '=' :. ts -> token SubstringMatch ts
'^' :. '=' :. ts -> token PrefixMatch ts
'|' :. '=' :. ts -> token DashMatch ts
'~' :. '=' :. ts -> token IncludeMatch ts
'|' :. '|' :. ts -> token Column ts
(parseNumericValue -> Just (repr, nv, ts))
| '%' :. ts' <- ts ->
go' (Percentage repr nv) d ts'
| Just u <- parseName ts -> do
(unit, d', ts') <- mkText dst d u
go' (Dimension repr nv unit) d' ts'
| otherwise ->
go' (Number repr nv) d ts
-- ident like
(parseName -> Just n) -> do
(name, d', ts) <- mkText dst d n
if isUrl name then
-- Special handling of url() functions (they are not really
-- functions, they have their own Token type).
case ts of
'(' :. (skipWhitespace -> ts') ->
case ts' of
'"' :. _ -> go' (Function name) d' ts'
'\'' :. _ -> go' (Function name) d' ts'
_ -> parseUrl d' ts'
_ -> go' (Ident name) d' ts
else
case ts of
'(' :. ts' -> go' (Function name) d' ts'
_ -> go' (Ident name) d' ts
'"' :. ts -> parseString '"' d ts
'\'' :. ts -> parseString '\'' d ts
'@' :. (parseName -> Just n) -> do
(name, d', ts) <- mkText dst d n
go' (AtKeyword name) d' ts
'#' :. (parseName -> Just n) -> do
(name, d', ts) <- mkText dst d n
go' (Hash HId name) d' ts
'#' :. (satisfyOrEscaped nameCodePoint -> Just n) -> do
(name, d', ts) <- mkText dst d (consumeName n)
go' (Hash HUnrestricted name) d' ts
c :. ts ->
token (Delim c) ts
_ -> return []
where token t ts = go' t d ts
isUrl t@(Text _ _ 3)
| u :. r :. l :. _ <- t =
(u == 'u' || u == 'U') &&
(r == 'r' || r == 'R') &&
(l == 'l' || l == 'L')
isUrl _ = False
-- https://drafts.csswg.org/css-syntax-3/#consume-string-token
parseString endingCodePoint d0 = string d0
where string d t = case t of
c :. ts | c == endingCodePoint -> ret d ts
'\\' :. ts
| Just (p, ts') <- escapedCodePoint ts -> do
d' <- p dst d
string d' ts'
| '\n' :. ts' <- ts ->
string d ts'
| Text _ _ 0 <- ts ->
string d ts
'\n' :. _ -> go' BadString d t
c :. ts -> do
write dst d c
string (d+1) ts
_ -> ret d t
ret d t = go' (String $ Text dsta d0 (d-d0)) d t
-- https://drafts.csswg.org/css-syntax/#consume-url-token
parseUrl d0 tUrl = url d0 (skipWhitespace tUrl)
where ret d ts = go' (Url (Text dsta d0 (d-d0))) d ts
url d t = case t of
')' :. ts -> ret d ts
c :. ts
| c == '"' || c == '\'' || c == '('
|| nonPrintableCodePoint c -> do
badUrl d ts
| isWhitespace c ->
whitespace d ts
'\\' :. ts
| Just (p, ts') <- escapedCodePoint ts -> do
d' <- p dst d
url d' ts'
| otherwise ->
badUrl d ts
c :. ts -> do
write dst d c
url (d+1) ts
_ ->
ret d t
whitespace d t = case t of
c :. ts -> do
if isWhitespace c then
whitespace d ts
else if c == ')' then
ret d ts
else
badUrl d ts
_ ->
ret d t
badUrl d t = case t of
')' :. ts -> go' BadUrl d ts
(escapedCodePoint' -> Just (_, ts)) -> do
badUrl d ts
_ :. ts ->
badUrl d ts
_ -> go' BadUrl d t
mkText :: A.MArray s -> Int -> Writer s -> ST s (Text, Int, Text)
mkText dest d w = do
(d', ts) <- w dest d
return (Text dsta d (d' - d), d', ts)
r <- go 0 t0
return (dst, r)
isWhitespace :: Char -> Bool
isWhitespace '\x0009' = True
isWhitespace '\x000A' = True
isWhitespace '\x0020' = True
isWhitespace _ = False
nonPrintableCodePoint :: Char -> Bool
nonPrintableCodePoint c
| c >= '\x0000' && c <= '\x0008' = True -- NULL through BACKSPACE
| c == '\x000B' = True -- LINE TABULATION
| c >= '\x000E' && c <= '\x001F' = True -- SHIFT OUT through INFORMATION SEPARATOR ONE
| c == '\x007F' = True -- DELETE
| otherwise = False
isExponent :: Char -> Bool
isExponent c = c == 'e' || c == 'E'
| wereHamster/haskell-css-syntax | src/Data/CSS/Syntax/Tokens.hs | mit | 25,606 | 0 | 25 | 10,149 | 8,659 | 4,278 | 4,381 | 639 | 48 |
{-# LANGUAGE OverloadedStrings, CPP #-}
{-# LANGUAGE NamedFieldPuns, RecordWildCards #-}
{-# LANGUAGE BangPatterns #-}
module Network.Wai.Handler.Warp.HTTP2.Types where
import Control.Concurrent (forkIO)
import Control.Concurrent.STM
import Control.Exception (SomeException, bracket)
import qualified Data.ByteString as BS
import Data.ByteString.Builder (Builder)
import Data.IORef
import Data.IntMap.Strict (IntMap, IntMap)
import qualified Data.IntMap.Strict as M
import Network.HPACK hiding (Buffer)
import qualified Network.HTTP.Types as H
import Network.HTTP2
import Network.HTTP2.Priority
import Network.Wai (Request, FilePart)
import Network.Wai.Handler.Warp.HTTP2.Manager
import Network.Wai.Handler.Warp.Imports
import Network.Wai.Handler.Warp.Types
----------------------------------------------------------------
http2ver :: H.HttpVersion
http2ver = H.HttpVersion 2 0
isHTTP2 :: Transport -> Bool
isHTTP2 TCP = False
isHTTP2 tls = useHTTP2
where
useHTTP2 = case tlsNegotiatedProtocol tls of
Nothing -> False
Just proto -> "h2-" `BS.isPrefixOf` proto
----------------------------------------------------------------
data Input = Input Stream Request ValueTable InternalInfo
----------------------------------------------------------------
type DynaNext = Buffer -> BufSize -> WindowSize -> IO Next
type BytesFilled = Int
data Next = Next !BytesFilled (Maybe DynaNext)
data Rspn = RspnNobody H.Status (TokenHeaderList, ValueTable)
| RspnStreaming H.Status (TokenHeaderList, ValueTable) (TBQueue Sequence)
| RspnBuilder H.Status (TokenHeaderList, ValueTable) Builder
| RspnFile H.Status (TokenHeaderList, ValueTable) FilePath (Maybe FilePart)
rspnStatus :: Rspn -> H.Status
rspnStatus (RspnNobody s _) = s
rspnStatus (RspnStreaming s _ _) = s
rspnStatus (RspnBuilder s _ _) = s
rspnStatus (RspnFile s _ _ _ ) = s
rspnHeaders :: Rspn -> (TokenHeaderList, ValueTable)
rspnHeaders (RspnNobody _ t) = t
rspnHeaders (RspnStreaming _ t _) = t
rspnHeaders (RspnBuilder _ t _) = t
rspnHeaders (RspnFile _ t _ _ ) = t
data Output = Output {
outputStream :: !Stream
, outputRspn :: !Rspn
, outputII :: !InternalInfo
, outputHook :: IO () -- OPush: wait for done, O*: telling done
, outputH2Data :: IO (Maybe HTTP2Data)
, outputType :: !OutputType
}
data OutputType = ORspn
| OWait
| OPush !TokenHeaderList !StreamId -- associated stream id from client
| ONext !DynaNext
outputMaybeTBQueue :: Output -> Maybe (TBQueue Sequence)
outputMaybeTBQueue (Output _ (RspnStreaming _ _ tbq) _ _ _ _) = Just tbq
outputMaybeTBQueue _ = Nothing
data Control = CFinish
| CGoaway !ByteString
| CFrame !ByteString
| CSettings !ByteString !SettingsList
| CSettings0 !ByteString !ByteString !SettingsList
----------------------------------------------------------------
data Sequence = SFinish
| SFlush
| SBuilder Builder
----------------------------------------------------------------
-- | The context for HTTP/2 connection.
data Context = Context {
-- HTTP/2 settings received from a browser
http2settings :: !(IORef Settings)
, firstSettings :: !(IORef Bool)
, streamTable :: !StreamTable
, concurrency :: !(IORef Int)
, priorityTreeSize :: !(IORef Int)
-- | RFC 7540 says "Other frames (from any stream) MUST NOT
-- occur between the HEADERS frame and any CONTINUATION
-- frames that might follow". This field is used to implement
-- this requirement.
, continued :: !(IORef (Maybe StreamId))
, clientStreamId :: !(IORef StreamId)
, serverStreamId :: !(IORef StreamId)
, inputQ :: !(TQueue Input)
, outputQ :: !(PriorityTree Output)
, controlQ :: !(TQueue Control)
, encodeDynamicTable :: !DynamicTable
, decodeDynamicTable :: !DynamicTable
-- the connection window for data from a server to a browser.
, connectionWindow :: !(TVar WindowSize)
}
----------------------------------------------------------------
newContext :: IO Context
newContext = Context <$> newIORef defaultSettings
<*> newIORef False
<*> newStreamTable
<*> newIORef 0
<*> newIORef 0
<*> newIORef Nothing
<*> newIORef 0
<*> newIORef 0
<*> newTQueueIO
<*> newPriorityTree
<*> newTQueueIO
<*> newDynamicTableForEncoding defaultDynamicTableSize
<*> newDynamicTableForDecoding defaultDynamicTableSize 4096
<*> newTVarIO defaultInitialWindowSize
clearContext :: Context -> IO ()
clearContext _ctx = return ()
----------------------------------------------------------------
data OpenState =
JustOpened
| Continued [HeaderBlockFragment]
!Int -- Total size
!Int -- The number of continuation frames
!Bool -- End of stream
!Priority
| NoBody (TokenHeaderList,ValueTable) !Priority
| HasBody (TokenHeaderList,ValueTable) !Priority
| Body !(TQueue ByteString)
!(Maybe Int) -- received Content-Length
-- compared the body length for error checking
!(IORef Int) -- actual body length
data ClosedCode = Finished
| Killed
| Reset !ErrorCodeId
| ResetByMe SomeException
deriving Show
data StreamState =
Idle
| Open !OpenState
| HalfClosed
| Closed !ClosedCode
| Reserved
isIdle :: StreamState -> Bool
isIdle Idle = True
isIdle _ = False
isOpen :: StreamState -> Bool
isOpen Open{} = True
isOpen _ = False
isHalfClosed :: StreamState -> Bool
isHalfClosed HalfClosed = True
isHalfClosed _ = False
isClosed :: StreamState -> Bool
isClosed Closed{} = True
isClosed _ = False
instance Show StreamState where
show Idle = "Idle"
show Open{} = "Open"
show HalfClosed = "HalfClosed"
show (Closed e) = "Closed: " ++ show e
show Reserved = "Reserved"
----------------------------------------------------------------
data Stream = Stream {
streamNumber :: !StreamId
, streamState :: !(IORef StreamState)
, streamWindow :: !(TVar WindowSize)
, streamPrecedence :: !(IORef Precedence)
}
instance Show Stream where
show s = show (streamNumber s)
newStream :: StreamId -> WindowSize -> IO Stream
newStream sid win = Stream sid <$> newIORef Idle
<*> newTVarIO win
<*> newIORef defaultPrecedence
newPushStream :: Context -> WindowSize -> Precedence -> IO Stream
newPushStream Context{serverStreamId} win pre = do
sid <- atomicModifyIORef' serverStreamId inc2
Stream sid <$> newIORef Reserved
<*> newTVarIO win
<*> newIORef pre
where
inc2 x = let !x' = x + 2 in (x', x')
----------------------------------------------------------------
opened :: Context -> Stream -> IO ()
opened Context{concurrency} Stream{streamState} = do
atomicModifyIORef' concurrency (\x -> (x+1,()))
writeIORef streamState (Open JustOpened)
closed :: Context -> Stream -> ClosedCode -> IO ()
closed Context{concurrency,streamTable} Stream{streamState,streamNumber} cc = do
remove streamTable streamNumber
atomicModifyIORef' concurrency (\x -> (x-1,()))
writeIORef streamState (Closed cc) -- anyway
----------------------------------------------------------------
newtype StreamTable = StreamTable (IORef (IntMap Stream))
newStreamTable :: IO StreamTable
newStreamTable = StreamTable <$> newIORef M.empty
insert :: StreamTable -> M.Key -> Stream -> IO ()
insert (StreamTable ref) k v = atomicModifyIORef' ref $ \m ->
let !m' = M.insert k v m
in (m', ())
remove :: StreamTable -> M.Key -> IO ()
remove (StreamTable ref) k = atomicModifyIORef' ref $ \m ->
let !m' = M.delete k m
in (m', ())
search :: StreamTable -> M.Key -> IO (Maybe Stream)
search (StreamTable ref) k = M.lookup k <$> readIORef ref
updateAllStreamWindow :: (WindowSize -> WindowSize) -> StreamTable -> IO ()
updateAllStreamWindow adst (StreamTable ref) = do
strms <- M.elems <$> readIORef ref
forM_ strms $ \strm -> atomically $ modifyTVar (streamWindow strm) adst
{-# INLINE forkAndEnqueueWhenReady #-}
forkAndEnqueueWhenReady :: IO () -> PriorityTree Output -> Output -> Manager -> IO ()
forkAndEnqueueWhenReady wait outQ out mgr = bracket setup teardown $ \_ ->
void . forkIO $ do
wait
enqueueOutput outQ out
where
setup = addMyId mgr
teardown _ = deleteMyId mgr
{-# INLINE enqueueOutput #-}
enqueueOutput :: PriorityTree Output -> Output -> IO ()
enqueueOutput outQ out = do
let Stream{..} = outputStream out
pre <- readIORef streamPrecedence
enqueue outQ streamNumber pre out
{-# INLINE enqueueControl #-}
enqueueControl :: TQueue Control -> Control -> IO ()
enqueueControl ctlQ ctl = atomically $ writeTQueue ctlQ ctl
----------------------------------------------------------------
-- | HTTP/2 specific data.
--
-- Since: 3.2.7
data HTTP2Data = HTTP2Data {
-- | Accessor for 'PushPromise' in 'HTTP2Data'.
--
-- Since: 3.2.7
http2dataPushPromise :: [PushPromise]
-- Since: 3.2.8
, http2dataTrailers :: H.ResponseHeaders
} deriving (Eq,Show)
-- | Default HTTP/2 specific data.
--
-- Since: 3.2.7
defaultHTTP2Data :: HTTP2Data
defaultHTTP2Data = HTTP2Data [] []
-- | HTTP/2 push promise or sever push.
--
-- Since: 3.2.7
data PushPromise = PushPromise {
-- | Accessor for a URL path in 'PushPromise'.
-- E.g. \"\/style\/default.css\".
--
-- Since: 3.2.7
promisedPath :: ByteString
-- | Accessor for 'FilePath' in 'PushPromise'.
-- E.g. \"FILE_PATH/default.css\".
--
-- Since: 3.2.7
, promisedFile :: FilePath
-- | Accessor for 'H.ResponseHeaders' in 'PushPromise'
-- \"content-type\" must be specified.
-- Default value: [].
--
--
-- Since: 3.2.7
, promisedResponseHeaders :: H.ResponseHeaders
-- | Accessor for 'Weight' in 'PushPromise'.
-- Default value: 16.
--
-- Since: 3.2.7
, promisedWeight :: Weight
} deriving (Eq,Ord,Show)
-- | Default push promise.
--
-- Since: 3.2.7
defaultPushPromise :: PushPromise
defaultPushPromise = PushPromise "" "" [] 16
| creichert/wai | warp/Network/Wai/Handler/Warp/HTTP2/Types.hs | mit | 10,741 | 0 | 19 | 2,691 | 2,543 | 1,351 | 1,192 | 306 | 2 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE TypeFamilies #-} -- For type-level functions
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE FlexibleInstances #-} -- For HList instances
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE TypeApplications #-}
{-# LANGUAGE ScopedTypeVariables #-}
module DependentTypes where
--
-- Sized Vectors
--
data Nat = Zero | Succ Nat
deriving Show
data Vector (n :: Nat) (a :: *) where
VNil :: Vector Zero a
VCons :: a -> Vector n a -> Vector ('Succ n) a
instance Show a => Show (Vector n a) where
show VNil = "<>"
show (VCons a as) = show a ++ "<:>" ++ show as
instance Eq a => Eq (Vector n a) where
VNil == VNil = True
(a `VCons` as) == (b `VCons` bs) = a == b && as == bs
_ == _ = False
add :: Nat -> Nat -> Nat
add n Zero = n -- shortcut
add Zero n = n
add (Succ n) m = add n (Succ m)
fromInt :: Int -> Nat
fromInt n
| n == 0 = Zero
| otherwise = Succ $ fromInt (n - 1)
toInt :: Nat -> Int
toInt Zero = 0
toInt (Succ n) = 1 + (toInt n)
type family n :+ m where
'Zero :+ n = n
'Succ n :+ m = 'Succ (n :+ m)
type family n :* m where
'Zero :* m = 'Zero
n :* 'Zero = 'Zero
'Succ 'Zero :* m = m
'Succ n :* m = n :* (m :+ m)
append :: Vector n a -> Vector m a -> Vector (n :+ m) a
append VNil as = as
append (VCons a as) bs = VCons a (append as bs)
toList :: Vector n a -> [a]
toList VNil = []
toList (VCons a as) = a : toList as
{-
fromList :: [a] -> Vector n a
fromList xs = let
n = fromInt $ length xs
in go n xs
where
go :: Nat -> [a] -> Vector n a
go Zero _ = VNil
go (Succ n) (x:xs) = append (x `VCons` VNil) (go n xs)
-}
vmap :: (a -> b) -> Vector n a -> Vector n b
vmap _ VNil = VNil
vmap f (a `VCons` as) = f a `VCons` (vmap f as)
vinit :: Vector ('Succ n) a -> Vector n a
vinit (a `VCons` as) = case as of
VNil -> VNil
_ `VCons` _ -> a `VCons` vinit as
vlast :: Vector n a -> a
vlast (a `VCons` as) =
case as of
VNil -> a
_ `VCons` _ -> vlast as
vuncons :: Vector ('Succ n) a -> (a, Vector n a)
vuncons (a `VCons` as) = (a, as)
zipWithSame :: (a -> b -> c) -> Vector n a -> Vector n b -> Vector n c
zipWithSame _ VNil _ = VNil
zipWithSame f (a `VCons` as) (b `VCons` bs) = f a b `VCons` zipWithSame f as bs
type family Min n m where
Min 'Zero m = 'Zero
Min n 'Zero = 'Zero
Min ('Succ n) ('Succ m) = 'Succ (Min n m)
vZipWith :: (a -> b -> c) -> Vector n a -> Vector m b -> Vector (Min n m) c
vZipWith _ VNil VNil = VNil
vZipWith _ VNil (_ `VCons` _) = VNil
vZipWith _ (_ `VCons` _) VNil = VNil
vZipWith f (a `VCons` as) (b `VCons` bs) = f a b `VCons` vZipWith f as bs
vfoldr :: (a -> b -> b) -> b -> Vector n a -> b
vfoldr _ seed VNil = seed
vfoldr f seed (a `VCons` as) = f a (vfoldr f seed as)
--
-- HLists
--
data HList xs where
HNil :: HList '[]
(:::) :: a -> HList as -> HList (a ': as)
infixr 6 :::
instance Show (HList '[]) where
show HNil = "'[]"
instance (Show (HList rest), Show a) => Show (HList (a ': rest)) where
show (a:::rest) = show a ++ " ::: " ++ show rest
--
-- Extensible Records
--
newtype s >> a = Named a
-- This is an hlist with named elements rather than just ordinal elements
data HRec xs where
HEmpty :: HRec '[]
HCons :: (s >> a) -> HRec xs -> HRec (s >> a ': xs)
instance Show (HRec '[]) where
show HEmpty = "HEmpty"
instance (Show a, KnownSymbol s) => Show (HRec (s >> a ': xs)) where
show (HCons (Named a) rest) =
let val = show a
key = symbolVal (undefined :: x s)
| ChrisCoffey/haskell_sandbox | DependentTypes.hs | mit | 3,635 | 1 | 12 | 1,004 | 1,661 | 878 | 783 | -1 | -1 |
{-# LANGUAGE ScopedTypeVariables #-}
module Main where
import Control.Applicative
import Control.Monad (when)
import Options.Applicative
import qualified Data.ByteString as B
import qualified Data.ByteString.UTF8 as U
import Control.Exception (try, IOException)
import Data.Either (either)
import System.FilePath (replaceExtension)
import qualified Litany as L
data Args = Args { infile :: String
, markdown :: Bool
, extract :: Bool } deriving (Eq, Show)
args :: Parser Args
args = Args <$> ( argument str (metavar "<infile>") )
<*> switch
( long "markdown"
<> short 'm'
<> help "emit a markdown file basename.md" )
<*> switch
( long "extract"
<> short 'e'
<> help "extract files from basename.lit" )
argsInfo :: ParserInfo Args
argsInfo = info ( helper <*> args )
( fullDesc <> progDesc "process a .lit file" <> header "litany" )
readErr = "readFile error"
runMain :: Args -> IO ()
runMain (Args i m e) = do
res <- (try $ B.readFile i >>= (return . U.toString) :: IO (Either IOException String))
let s = case res of
Left err -> readErr
Right s0 -> s0
if not m && not e
then putStrLn "error: must choose at least one of {-m,-e}"
else either
(mapM_ putStrLn)
(\s' -> sequence_ [when m (L.writeMarkdown (replaceExtension i "md") s'),
when e (L.writeEmbeddedFiles s')])
(if s == readErr then Left [s] else L.check s)
main :: IO ()
main = execParser argsInfo >>= runMain
| sshastry/litany | Main.hs | mit | 1,681 | 0 | 17 | 541 | 498 | 263 | 235 | 43 | 4 |
ordenadacola :: (ord a) => Cola a -> Bool
ordenadacola CV = True;
ordenadacola (CV :. x) = True
ordenadacola (c :. y :. x) = if y<=x then ordenadacola (c :. y) else False
| josegury/HaskellFuntions | Colas/Boolean-Ordenada.hs | mit | 171 | 0 | 8 | 35 | 87 | 46 | 41 | -1 | -1 |
{-# LANGUAGE RankNTypes #-}
module Firestone.Database ( lookupMinion
, lookupMinionM
, lookupMinions
, lookupCard
, lookupCards
) where
import Firestone.Types
import Firestone.Game
import Firestone.Player
import Firestone.IdGenerator
import Control.Lens
import Control.Monad.State
lookupMultiple :: (IdGenerator -> String -> (a, IdGenerator))
-> IdGenerator -> [String] -> ([a], IdGenerator)
lookupMultiple lookup gen = foldr go ([], gen)
where
go name (rest, gen) = (lookup gen name) & _1 %~ (flip (:) rest)
lookupMinions :: IdGenerator -> [String] -> ([Minion], IdGenerator)
lookupMinions = lookupMultiple lookupMinion
lookupCards :: IdGenerator -> [String] -> ([Card], IdGenerator)
lookupCards = lookupMultiple lookupCard
lookupMinionM :: String -> State Game Minion
lookupMinionM name = do
gen1 <- use idGen
let (minion, gen2) = lookupMinion gen1 name
idGen .= gen2
return minion
lookupMinion :: IdGenerator -> String -> (Minion, IdGenerator)
lookupMinion gen name@"Oasis Snapjaw" = (minion, newGen)
where
(mId, mTime, newGen) = create gen name
minion = makeMinion mId name 2 7 None [] True mTime []
lookupMinion gen name@"Murloc Raider" = (minion, newGen)
where
(mId, mTime, newGen) = create gen name
minion = makeMinion mId name 2 1 Murloc [] True mTime []
lookupMinion gen name@"Magma Rager" = (minion, newGen)
where
(mId, mTime, newGen) = create gen name
minion = makeMinion mId name 5 1 None [] True mTime []
lookupMinion gen name@"Imp Gang Boss" = (minion, newGen)
where
(mId, mTime, newGen) = create gen name
minion = makeMinion mId name 2 4 Demon [] True mTime [Trigger MinionDamaged summonImp]
summonImp :: Bool -> MinionLens -> State Game ()
summonImp isMe m = case isMe of
True -> do
imp <- lookupMinionM "Imp"
me <- prerror m "Invalid minion sent to summonImp"
position <- positionOf me
zoom (ownerOf me) $ summonMinionAt (position + 1) imp
False -> do
return ()
lookupMinion gen name@"Imp" = (minion, newGen)
where
(mId, mTime, newGen) = create gen name
minion = makeMinion mId name 1 1 Demon [] True mTime []
lookupMinion gen name@"Murloc Tinyfin" = (minion, newGen)
where
(mId, mTime, newGen) = create gen name
minion = makeMinion mId name 1 1 Murloc [] True mTime []
lookupCard :: IdGenerator -> String -> (Card, IdGenerator)
lookupCard gen name@"Oasis Snapjaw" = (card, newGen)
where
(cId, _, newGen) = create gen name
card = makeCard cId name 4 (Just 2) (Just 7) MinionCard "" False
lookupCard gen name@"Murloc Raider" = (card, newGen)
where
(cId, _, newGen) = create gen name
card = makeCard cId name 1 (Just 2) (Just 1) MinionCard "" False
lookupCard gen name@"Magma Rager" = (card, newGen)
where
(cId, _, newGen) = create gen name
card = makeCard cId name 3 (Just 5) (Just 1) MinionCard "" False
lookupCard gen name@"Imp Gang Boss" = (card, newGen)
where
(cId, _, newGen) = create gen name
card = makeCard cId name 3 (Just 2) (Just 4) MinionCard "" False
lookupCard gen name@"Imp" = (card, newGen)
where
(cId, _, newGen) = create gen name
card = makeCard cId name 1 (Just 1) (Just 1) MinionCard "" False
lookupCard gen name@"Murloc Tinyfin" = (card, newGen)
where
(cId, _, newGen) = create gen name
card = makeCard cId name 0 (Just 1) (Just 1) MinionCard "" False | Jinxit/firestone | src/Firestone/Database.hs | mit | 3,594 | 0 | 15 | 922 | 1,322 | 705 | 617 | 73 | 2 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Models where
import Control.Applicative
import Data.Aeson
import Data.Validation.Aeson
import Data.Validation.Historical
import Validation
-- # Models
data Parent = Parent { parentName :: String32
, parentChild :: Maybe Child
, parentChildren :: [Child]
} deriving (Eq, Show)
data Child = Child { childName :: String32
} deriving (Eq, Show)
-- # Smart constructors
parent :: V String32 -> V (Maybe Child) -> V [Child] -> V Parent
parent pName pChild pChildren = Parent
<$> pName >: "name"
<*> pChild >: "child"
<*> pChildren >: "children"
child :: V String32 -> V Child
child cName = Child
<$> cName >: "name"
-- # Aeson instances
instance FromJSON (VA Child) where
parseJSON = withObjectV parse
where parse o = child
<$> o .:: "name"
instance FromJSON (VA Parent) where
parseJSON = withObjectV parse
where parse o = parent
<$> o .:: "name"
<*> o .::? "child"
<*> o .:: "children" | danclien/validation-aeson-history | src/Models.hs | mit | 1,372 | 0 | 13 | 512 | 307 | 166 | 141 | 34 | 1 |
module GCD where
-- If `b > a` we'll just swap them on the first run.
myGCD a b = if remainder == 0
then b
else myGCD b remainder
where remainder = a `mod` b
myRecGCD a b = goMyRecGCD a b 1
goMyRecGCD a b 0 = a
goMyRecGCD a b _remainder = goMyRecGCD b remainder remainder
where
remainder = a `mod` b
| raventid/coursera_learning | haskell/will_kurt/7.1_myGCD.hs | mit | 338 | 0 | 7 | 103 | 106 | 57 | 49 | 9 | 2 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.