code stringlengths 5 1.03M | repo_name stringlengths 5 90 | path stringlengths 4 158 | license stringclasses 15 values | size int64 5 1.03M | n_ast_errors int64 0 53.9k | ast_max_depth int64 2 4.17k | n_whitespaces int64 0 365k | n_ast_nodes int64 3 317k | n_ast_terminals int64 1 171k | n_ast_nonterminals int64 1 146k | loc int64 -1 37.3k | cycloplexity int64 -1 1.31k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE DeriveDataTypeable #-}
module DrupalNodeTaxonomy
where
import qualified Text.JSON.Generic as JSON
data DrupalNodeTaxonomy = DrupalNodeTaxonomy
{ tid :: String
} deriving (Show, JSON.Data, JSON.Typeable)
data DrupalNodeTaxonomyRelation = DrupalNodeTaxonomyRelation
{ und :: [DrupalNodeTaxonomy]
} deriving (Show, JSON.Data, JSON.Typeable)
| shaan7/ghost-drupal-import | DrupalNodeTaxonomy.hs | gpl-2.0 | 375 | 0 | 9 | 62 | 85 | 51 | 34 | 9 | 0 |
{-
teafree, a Haskell utility for tea addicts
Copyright (C) 2013 Fabien Dubosson <fabien.dubosson@gmail.com>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
-}
{-# LANGUAGE OverloadedStrings #-}
module Teafree.Interaction.PPrint
( Colorized
, PPrint(..)
) where
import Text.PrettyPrint.ANSI.Leijen
type Colorized = Bool
undef :: String
undef = "Not defined"
class PPrint a where
ppName :: Colorized -> a -> Doc
ppDetails :: Colorized -> a -> Doc
ppSummary :: Colorized -> a -> Doc
pprint :: Colorized -> a -> Doc
pprint c a = (ppName c a) <$> indent 4 (ppDetails c a)
instance (PPrint a) => PPrint (Maybe a) where
ppName True Nothing = yellow . text $ undef
ppName False Nothing = text undef
ppName c (Just v) = ppDetails c v
ppDetails True Nothing = yellow . text $ undef
ppDetails False Nothing = text undef
ppDetails c (Just v) = ppDetails c v
ppSummary True Nothing = yellow . text $ undef
ppSummary False Nothing = text undef
ppSummary c (Just v) = ppSummary c v
| StreakyCobra/teafree | src/Teafree/Interaction/PPrint.hs | gpl-3.0 | 1,665 | 0 | 10 | 397 | 319 | 166 | 153 | 24 | 1 |
module Infsabot.Board.Logic (
Board(Board),
boardContents, boardRobots, boardSize, boardTime,
(!!!), setRobot, robotAt, updateSpot, robotAlongPath, inBoard,
listOfRobots,
startingBoard,
GameSpot(GameSpot), toSeenSpot
) where
import Data.Vector(Vector, (!), (//), fromList)
import Infsabot.Tools.Interface(isPrime, unNatural)
import Infsabot.Base.Interface
import Infsabot.Robot.Interface
import Infsabot.RobotAction.Interface
import Infsabot.Parameters
import qualified Data.Map as M
-- Represents a Spot on the Board as seen by a robot.
-- This contains a Board Spot, which the Robot can always see, contains a robot's appearance iff there is a robot at that spot.
data GameSpot = GameSpot BoardSpot (Maybe Robot) deriving (Show)
-- Converts a GameSpot to a seen spot
toSeenSpot :: GameSpot -> SeenSpot
toSeenSpot (GameSpot s Nothing) = SeenSpot s Nothing
toSeenSpot (GameSpot s (Just rob)) = SeenSpot s $ Just $ robotAppearance rob
-- Represents a board.
data Board = Board {
-- The contents of the board
-- In the form of a RAL of RALs of GameSpots, forming a Matrix
boardContents :: RAL (RAL GameSpot),
-- The robots on the Board
boardRobots :: M.Map (Int, Int) Robot,
-- The size of the Board
boardSize :: Int,
-- The Current Time of the Board
boardTime :: Int
} deriving (Show)
-- Gets the game spot at the given board location
(!!!) :: Board -> (Int, Int) -> Maybe GameSpot
b !!! (x, y)
| inBoard b (x, y) = Just $ boardContents b .!. x .!. y
| otherwise = Nothing
-- Sets the game spot at the given board location to the given value
(!->) :: Board -> (Int, Int) -> GameSpot -> Board
(b !-> (x, y)) gs
| inBoard b (x, y) = b {boardContents = newcontents}
| otherwise = b
where
-- The old column x
oldx = boardContents b .!. x
-- The updated column x with the new value of y
newx = update y gs oldx
-- The updated board with the element at (x, y)
newcontents = update x newx $ boardContents b
-- Creates a starting square board with a given size
-- This board contains one robot from each team.
startingBoard :: Parameters -> (Team -> RobotProgram) -> Board
startingBoard p programOf
= setRobot (0, unNatural (paramBoardSize p) - 1) (bot B) $
setRobot (unNatural (paramBoardSize p) - 1, 0) (bot A)
Board {
boardContents = startingSpots,
boardRobots = M.fromList [],
boardSize = unNatural $ paramBoardSize p,
boardTime = 0
}
where
startingSpots :: (RAL (RAL GameSpot))
startingSpots = ys <$> fromList [0..unNatural (paramBoardSize p)]
where
ys x = initialColor <$> fromList [0..unNatural (paramBoardSize p)]
where
initialColor :: Int -> GameSpot
initialColor y =
if isPrime (x * x + y * y)
then GameSpot SpotMaterial Nothing
else GameSpot SpotEmpty Nothing
bot team = Just $ defaultRobot p team (programOf team)
-- Sets the robot at the given spot to the given value, or deletes it.
-- 1. places the robot at the gamespot at the given coordinates
-- 2. Adds the robot to the list of robots
setRobot :: (Int, Int) -> Maybe Robot -> Board -> Board
setRobot (x, y) rob b = delRobot $ b !!! (x, y)
where
delRobot Nothing = b
delRobot (Just (GameSpot oldMaterial _))
= newB {boardRobots = newRobots rob}
where
newB = b !-> (x, y) $ GameSpot oldMaterial rob
newRobots Nothing = M.delete (x, y) oldRobots
newRobots (Just robot) = M.insert (x, y) robot oldRobots
oldRobots = boardRobots newB
--Updates the given spot to the new value
updateSpot :: (Int, Int) -> BoardSpot -> Board -> Board
updateSpot (x, y) spot b = b !-> (x, y) $ GameSpot spot (robotAt b (x, y))
-- Gets the robot at the given position, if it exists
robotAt :: Board -> (Int, Int) -> Maybe Robot
robotAt b pos = (b !!! pos) >>= (\(GameSpot _ rob) -> rob)
-- Finds the first robot along the given direction from the given position
-- (but not the robot at the given position)
-- Which may be up to n paces away
robotAlongPath :: Team -> Board -> (Int, Int) -> RDirection -> Int -> Maybe (Int, Int, Robot)
robotAlongPath _ _ _ _ 0 = Nothing
robotAlongPath team b (x, y) dir n
= case robotAt b (x, y) of
Nothing -> robotAlongPath team b (applyDirection team dir (x, y)) dir (n-1)
Just rob -> Just (x, y, rob)
-- Returns true iff the given coordinate pair is in the board
inBoard :: Board -> (Int, Int) -> Bool
inBoard b (x, y) = x >= 0 && x < boardSize b && y >= 0 && y < boardSize b
listOfRobots :: Board -> [PositionedRobot]
listOfRobots b = map PositionedRobot $ M.toList $ boardRobots b
type RAL = Vector
(.!.) :: RAL a -> Int -> a
(.!.) = (!)
update :: Int -> a -> RAL a -> RAL a
update n x v = v // [(n, x)]
| kavigupta/Infsabot | Infsabot/Board/Logic.hs | gpl-3.0 | 4,988 | 0 | 16 | 1,316 | 1,469 | 808 | 661 | -1 | -1 |
module Main where
import System.Exit
import System.Console.GetOpt
import Data.Time.Clock.POSIX
import Data.List
import Network
import Verbosity
import HostPortStatus
import CheckOpen
import Flags
main :: IO ()
main = do
whenLoud $ putStrLn "Hello world."
(flags, _, _) <- processOptions
let timeout = flagTimeout flags
subnets = flagSubnets flags
ports = map (PortNumber . fromIntegral) $ flagPorts flags
if not $ flagHelp flags
then return ()
else do
putStrLn $ flip usageInfo options $
"Usage: port-scan [options]\n" ++
"Scan open ports on specified subnets and ports.\n"
exitSuccess
startTime <- getPOSIXTime
hostPorts <- filterOpenPortsMany timeout (parseHostsPortsIPv4 subnets ports)
if flagWithAllPorts flags
then putStrLn $ showHostNames $ filterAllPorts (length ports) hostPorts
else putStrLn $ showHostPorts hostPorts
whenLoud $ getPOSIXTime >>=
\x -> putStrLn $ "Total time used: " ++ show (x-startTime)
whenLoud $ putStrLn "Goodbye world."
return ()
showHostPorts :: [HostPort] -> String
showHostPorts xs =
"Open HostPorts:\n" ++
(concat $ intersperse "\n" $ map show xs)
showHostNames:: [HostName] -> String
showHostNames xs =
"Open HostNames:\n" ++
(concat $ intersperse "\n" xs)
| waterret/PortScan-haskell | src/Main.hs | gpl-3.0 | 1,366 | 0 | 13 | 341 | 369 | 186 | 183 | 41 | 3 |
{-# LANGUAGE GADTs #-}
{-# LANGUAGE TypeSynonymInstances #-}
module SgFrontendTest where
import Control.Monad
import Control.Monad.State
import Data.HList
data Draw = Draw
newtype Resource a = Resource a
class Attrib a
instance Attrib (Resource a)
instance Attrib a => Attrib [a]
data RenderState = A
{-
- myStyle = style $
- module Default
- {
- RenderState = do diffuseColorTexture $= texA
- shader $= shaderA
- module House
- {
- module Window
- {
- RenderState = do shader $= mirror
- }
- module Wall
- {
- RenderState = do shader $= dirt
- }
- }
- }
- sg = use (myStyle.Default) $ do
-
-}
type Vec = (Float,Float,Float)
type Name = String
data Module = Module
data Sg where
DrawIndexed :: Int -> Sg
Group :: [Sg] -> Sg
Transform :: Vec -> Sg -> Sg
Using :: Module -> Sg -> Sg
SetAttribute :: Attrib a => Name -> a -> Sg
data PrimitiveGeometry = Cube | Sphere
primitive :: PrimitiveGeometry -> Sg
primitive = undefined
testSg = Group [
Transform (0,0,0) (DrawIndexed 10),
Transform (10,10,10) (primitive Cube)
]
{-
data FootNMouth = FootNMouth
key = firstLabel FootNMouth "key"
name = nextLabel key "name"
breed = nextLabel name "breed"
price = nextLabel breed "price"
-}
unpricedAngus = "jac" .=. (42::Integer)
.*. emptyRecord
blubber = emptyRecord
| haraldsteinlechner/lambdaWolf | SgFrontendTest.hs | gpl-3.0 | 1,572 | 0 | 9 | 542 | 285 | 163 | 122 | -1 | -1 |
module Model.Types( TransformationMatrix
, TransformationVector
, Translation
, Rotation
, Scale
, ColorRGB
, GLfloat
, GLint
, GLuint
) where
import Graphics.Rendering.OpenGL
import qualified Linear as L
type TransformationMatrix = L.M44 GLfloat
type TransformationVector = L.V3 GLfloat
type Translation = L.V3 GLfloat
type Scale = L.V3 GLfloat
type Rotation = L.V3 GLfloat
type ColorRGB = L.V3 GLfloat
| halvorgb/AO2D | src/Model/Types.hs | gpl-3.0 | 610 | 0 | 6 | 264 | 113 | 68 | 45 | 17 | 0 |
module Pictikz.Output.Tikz where
import Data.Maybe
class Drawable a where
draw :: a -> String
node style (x,y) name alignment label = concat $
[ "\\node"
, "[" ++ csl style ++ "]"
, "(" ++ name ++ ")"
, " at "
, "(" ++ show x ++ ", " ++ show y ++ ") "
, "[ align=" ++ alignment ++"]"
, "{" ++ label ++ "};"
]
edge style from to = concat $
[ "\\draw"
, "[" ++ csl style ++ "]"
, "(" ++ from ++ ")"
, " edge "
, "(" ++ to ++ ");"
]
uncover begin end element = concat $
[ "\\uncover"
, "<" ++ fromMaybe [] begin ++ "-" ++ fromMaybe [] end ++ ">"
, "{" ++ element ++ "}"
]
csl [] = []
csl [x] = x
csl (x:xs) = x ++ ", " ++ concatMap (\l -> ',':' ': l ) xs
tikzpicture d = concat
[ "\\begin{tikzpicture}\n"
, draw d
, "\\end{tikzpicture}\n"]
| mgmillani/pictikz | src/Pictikz/Output/Tikz.hs | gpl-3.0 | 792 | 0 | 12 | 222 | 345 | 182 | 163 | 29 | 1 |
{-# LANGUAGE OverloadedStrings
, LambdaCase
, RecordWildCards
#-}
module Glyph
( Glyph(..)
, parseKvg
, renderXhtml
, glyphName
) where
import Control.Applicative ((<$>))
import Control.DeepSeq (force, NFData(..))
import Data.Generics.Aliases (orElse)
import Data.List (isPrefixOf)
import Data.Maybe (catMaybes)
import Text.XML.Light as X
import Text.XML.Light.Input as X
import Text.XML.Light.Output as X
import Text.XML.Light.Proc as X
data Glyph
= Path { pathData :: String }
| Group
{ groupName :: Maybe String
, groupSubGlyphs :: [Glyph]
}
deriving (Show, Eq)
instance NFData Glyph where
rnf = \case
Path{..} -> rnf pathData
Group{..} -> rnf groupName `seq` rnf groupSubGlyphs
glyphName :: Glyph -> Maybe Char
glyphName = \case
Group{..} -> head <$> groupName
Path{..} -> Nothing
xmlnsAttr = uqAttr "xmlns" "http://www.w3.org/2000/svg"
xlinkAttr = X.Attr (xmlnsName "xlink") "http://www.w3.org/1999/xlink"
xmlnsName = prefixName "xmlns"
xlinkName = prefixName "xlink"
kvgName = prefixName "kvg"
prefixName :: String -> String -> X.QName
prefixName prefix name = X.QName{ qPrefix = Just prefix, qName = name, qURI = Nothing }
parseKvg :: String -> Maybe Glyph
parseKvg kvg = do
xml <- X.parseXMLDoc kvg
parseKvg' xml
writeKvg :: Glyph -> String
writeKvg glyph = X.showTopElement $ kvg
where
kvg = X.unode "svg" (attrs, [writeKvg' glyph])
attrs =
[ xmlnsAttr
, uqAttr "width" $ show size
, uqAttr "height" $ show size
, uqAttr "viewBox" "0 0 109 109"
]
size = 109
parseKvg' :: X.Element -> Maybe Glyph
parseKvg' xml = do
svg <- filterName (("svg" ==) . X.qName) xml
strokePaths <- X.filterElement isStrokePathGroup svg
parseKvgGlyph $ head $ X.elChildren $ strokePaths
filterName :: (X.QName -> Bool) -> X.Element -> Maybe X.Element
filterName pred elem = if pred $ X.elName elem then Just elem else Nothing
isStrokePathGroup :: X.Element -> Bool
isStrokePathGroup elem = X.qName (X.elName elem) == "g" && idHasPrefix "kvg:StrokePaths" elem
idHasPrefix :: String -> X.Element -> Bool
idHasPrefix prefix elem =
case X.findAttr (X.unqual "id") elem of
Just id -> isPrefixOf prefix id
Nothing -> False
parseKvgGlyph :: X.Element -> Maybe Glyph
parseKvgGlyph elem = parseKvgPath elem `orElse` parseKvgGroup elem
parseKvgPath :: X.Element -> Maybe Glyph
parseKvgPath elem = do
path <- filterName (("path" ==) . X.qName) elem
d <- X.findAttr (X.unqual "d") path
return $ Path{ pathData = d }
parseKvgGroup :: X.Element -> Maybe Glyph
parseKvgGroup elem = do
g <- filterName (("g" ==) . X.qName) elem
let name = X.findAttr (kvgName "element") g
subGlyphs = catMaybes $ map parseKvgGlyph $ X.elChildren g
return $ Group{ groupName = name, groupSubGlyphs = subGlyphs }
writeKvg' :: Glyph -> X.Element
writeKvg' = \case
Path{..} -> X.unode "path" $ uqAttr "d" pathData
Group{..} -> X.unode "g" $ (catMaybes [uqAttr "element" <$> groupName], map writeKvg' groupSubGlyphs)
renderLinkedSvg :: Glyph -> X.Element
renderLinkedSvg glyph = X.unode "svg" (attrs, [style, g])
where
g = X.unode "g" (uqAttr "class" "top", renderLinkedSvg' (-1) glyph)
style = X.unode "style" (aStyle ++ pathStyle)
aStyle = "a:hover{stroke:red;stroke-width:4;}" :: String
pathStyle = "g.top{fill:none;stroke:black;stroke-width:3;stroke-linecap:round;stroke-linejoin:round;}"
attrs =
[ xmlnsAttr
, xlinkAttr
, uqAttr "width" $ show size
, uqAttr "height" $ show size
, uqAttr "viewBox" "0 0 109 109"
]
size = 4 * 109
renderLinkedSvg' :: Int -> Glyph -> X.Element
renderLinkedSvg' level = \case
Path{..} -> X.unode "path" $ uqAttr "d" pathData
Group{..} ->
let content = X.unode "g" $ map (renderLinkedSvg' $ level+1) groupSubGlyphs
subName = if level == 0 then groupName else Nothing
in case subName of
Just element -> X.unode "a" ([X.Attr (xlinkName "href") element], [content])
Nothing -> content
renderSvg :: Glyph -> X.Element
renderSvg glyph = X.unode "div" ([uqAttr "border" "1px"], [X.unode "svg" (attrs, [style, g])])
where
g = X.unode "g" (uqAttr "class" "top", renderSvg' glyph)
style = X.unode "style" pathStyle
pathStyle = "g.top{fill:none;stroke:black;stroke-width:3;stroke-linecap:round;stroke-linejoin:round;}" :: String
attrs =
[ xmlnsAttr
, xlinkAttr
, uqAttr "width" $ show size
, uqAttr "height" $ show size
, uqAttr "viewBox" "0 0 109 109"
]
size = 0.3 * 109
renderSvg' :: Glyph -> X.Element
renderSvg' = \case
Path{..} -> X.unode "path" $ uqAttr "d" pathData
Group{..} -> X.unode "g" $ map renderSvg' groupSubGlyphs
renderTree :: Glyph -> X.Element
renderTree = \case
glyph@Group{..} -> X.unode "span" $ [renderSvg glyph, X.unode "ul" $ map (X.unode "li" . renderTree) groupSubGlyphs]
glyph@Path{..} -> X.unode "span" $ renderSvg glyph
renderXhtml :: Glyph -> X.Element
renderXhtml glyph = X.unode "table" $ X.unode "tr" $ map (X.unode "td") [image, tree]
where
tree = renderTree glyph
image = renderLinkedSvg glyph
uqAttr :: String -> String -> X.Attr
uqAttr = X.Attr . X.unqual
| mkovacs/kanjibrowse | src/main/Glyph.hs | gpl-3.0 | 5,319 | 0 | 18 | 1,181 | 1,834 | 956 | 878 | 130 | 4 |
module Lambda.Types.TestEither where
import Prelude hiding (succ, pred, sum)
import Test.HUnit
import Lambda.Variable
import Lambda.Engine
import Lambda.Types.Either
import Lambda.Types.Bool
import Lambda.Types.Int
import Lambda.Types.Pair
tests :: Test
tests = test [
assertEqual
"Test reveal inl"
(VarTerm (Variable "left"))
(reduceAll (reveal
(inl (VarTerm (Variable "left")))
(Lambda (Variable "x") (VarTerm (Variable "x")))
(Lambda (Variable "x") (VarTerm (Variable "wrong")))))
,
assertEqual
"Test reveal inr"
(VarTerm (Variable "right"))
(reduceAll (reveal
(inr (VarTerm (Variable "right")))
(Lambda (Variable "x") (VarTerm (Variable "wrong")))
(Lambda (Variable "x") (VarTerm (Variable "x")))))
,
assertEqual
"Test showEither inl(zero)"
"inl(zero)"
(showEither showInt showInt (inl zero))
,
assertEqual
"Test showEither inr(true)"
"inr(true)"
(showEither show showBool (inr true))
,
assertEqual
"Test showEither inl(one)"
"inl(succ(zero))"
(showEither showInt show (inl one))
,
assertEqual
"Test showEither inr(<true,two>)"
"inr(<true,succ(succ(zero))>)"
(showEither
showInt
(showPair showBool showInt)
(inr (pair true two)))
]
where
one = succ zero
two = succ one
| fpoli/lambda | test/Lambda/Types/TestEither.hs | gpl-3.0 | 1,648 | 0 | 17 | 614 | 428 | 224 | 204 | 46 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Speech.Types
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
module Network.Google.Speech.Types
(
-- * Service Configuration
speechService
-- * OAuth Scopes
, cloudPlatformScope
-- * LongRunningRecognizeMetadata
, LongRunningRecognizeMetadata
, longRunningRecognizeMetadata
, lrrmStartTime
, lrrmURI
, lrrmProgressPercent
, lrrmLastUpdateTime
-- * Status
, Status
, status
, sDetails
, sCode
, sMessage
-- * ListOperationsResponse
, ListOperationsResponse
, listOperationsResponse
, lorNextPageToken
, lorOperations
-- * Operation
, Operation
, operation
, oDone
, oError
, oResponse
, oName
, oMetadata
-- * SpeechRecognitionAlternative
, SpeechRecognitionAlternative
, speechRecognitionAlternative
, sraConfidence
, sraWords
, sraTranscript
-- * WordInfo
, WordInfo
, wordInfo
, wiStartOffSet
, wiConfidence
, wiEndOffSet
, wiWord
, wiSpeakerTag
-- * StatusDetailsItem
, StatusDetailsItem
, statusDetailsItem
, sdiAddtional
-- * SpeechRecognitionResult
, SpeechRecognitionResult
, speechRecognitionResult
, srrAlternatives
, srrLanguageCode
, srrChannelTag
-- * Xgafv
, Xgafv (..)
-- * LongRunningRecognizeResponse
, LongRunningRecognizeResponse
, longRunningRecognizeResponse
, lrrrResults
-- * OperationMetadata
, OperationMetadata
, operationMetadata
, omAddtional
-- * OperationResponse
, OperationResponse
, operationResponse
, orAddtional
) where
import Network.Google.Prelude
import Network.Google.Speech.Types.Product
import Network.Google.Speech.Types.Sum
-- | Default request referring to version 'v2beta1' of the Cloud Speech-to-Text API. This contains the host and root path used as a starting point for constructing service requests.
speechService :: ServiceConfig
speechService
= defaultService (ServiceId "speech:v2beta1")
"speech.googleapis.com"
-- | See, edit, configure, and delete your Google Cloud Platform data
cloudPlatformScope :: Proxy '["https://www.googleapis.com/auth/cloud-platform"]
cloudPlatformScope = Proxy
| brendanhay/gogol | gogol-speech/gen/Network/Google/Speech/Types.hs | mpl-2.0 | 2,692 | 0 | 7 | 620 | 267 | 185 | 82 | 71 | 1 |
module Pr32 (g) where
-- g 25 16 -> 4
g :: Int -> Int -> Int
g 0 n = n
g n 0 = n
g a b =
let
x = max a b
y = min a b
in
g (x-y) y
| ekalosak/haskell-practice | Pr32.hs | lgpl-3.0 | 163 | 0 | 9 | 79 | 91 | 47 | 44 | 9 | 1 |
module Main where
import Test.Framework (defaultMain)
-- Util tests
import qualified Network.Haskoin.Util.Tests (tests)
-- Crypto tests
import qualified Network.Haskoin.Crypto.BigWord.Tests (tests)
import qualified Network.Haskoin.Crypto.Point.Tests (tests)
import qualified Network.Haskoin.Crypto.ECDSA.Tests (tests)
import qualified Network.Haskoin.Crypto.Base58.Tests (tests)
import qualified Network.Haskoin.Crypto.Base58.Units (tests)
import qualified Network.Haskoin.Crypto.Keys.Tests (tests)
import qualified Network.Haskoin.Crypto.ExtendedKeys.Tests (tests)
import qualified Network.Haskoin.Crypto.ExtendedKeys.Units (tests)
import qualified Network.Haskoin.Crypto.Hash.Tests (tests)
import qualified Network.Haskoin.Crypto.Hash.Units (tests)
import qualified Network.Haskoin.Crypto.Mnemonic.Tests (tests)
import qualified Network.Haskoin.Crypto.Mnemonic.Units (tests)
import qualified Network.Haskoin.Crypto.Units (tests)
-- Node tests
import qualified Network.Haskoin.Node.Units (tests)
-- Script tests
import qualified Network.Haskoin.Script.Tests (tests)
import qualified Network.Haskoin.Script.Units (tests)
-- Transaction tests
import qualified Network.Haskoin.Transaction.Tests (tests)
import qualified Network.Haskoin.Transaction.Units (tests)
-- Block tests
import qualified Network.Haskoin.Block.Tests (tests)
import qualified Network.Haskoin.Block.Units (tests)
-- Stratum tests
import qualified Network.Haskoin.Stratum.Tests (tests)
import qualified Network.Haskoin.Stratum.Units (tests)
-- Json tests
import qualified Network.Haskoin.Json.Tests (tests)
-- Binary tests
import qualified Network.Haskoin.Binary.Tests (tests)
main :: IO ()
main = defaultMain
( Network.Haskoin.Json.Tests.tests
++ Network.Haskoin.Binary.Tests.tests
++ Network.Haskoin.Util.Tests.tests
++ Network.Haskoin.Crypto.BigWord.Tests.tests
++ Network.Haskoin.Crypto.Point.Tests.tests
++ Network.Haskoin.Crypto.ECDSA.Tests.tests
++ Network.Haskoin.Crypto.Base58.Tests.tests
++ Network.Haskoin.Crypto.Base58.Units.tests
++ Network.Haskoin.Crypto.Hash.Tests.tests
++ Network.Haskoin.Crypto.Hash.Units.tests
++ Network.Haskoin.Crypto.Keys.Tests.tests
++ Network.Haskoin.Crypto.ExtendedKeys.Tests.tests
++ Network.Haskoin.Crypto.ExtendedKeys.Units.tests
++ Network.Haskoin.Crypto.Mnemonic.Tests.tests
++ Network.Haskoin.Crypto.Mnemonic.Units.tests
++ Network.Haskoin.Crypto.Units.tests
++ Network.Haskoin.Node.Units.tests
++ Network.Haskoin.Script.Tests.tests
++ Network.Haskoin.Script.Units.tests
++ Network.Haskoin.Transaction.Tests.tests
++ Network.Haskoin.Transaction.Units.tests
++ Network.Haskoin.Block.Tests.tests
++ Network.Haskoin.Block.Units.tests
++ Network.Haskoin.Stratum.Tests.tests
++ Network.Haskoin.Stratum.Units.tests
)
| nuttycom/haskoin | tests/Main.hs | unlicense | 2,854 | 0 | 31 | 320 | 614 | 426 | 188 | 54 | 1 |
module Main where
import Control.DeepSeq
import Criterion.Main
main :: IO ()
main =
defaultMain [
]
| erochest/barth-scrape | benchmark/BenchAll.hs | apache-2.0 | 133 | 0 | 6 | 49 | 34 | 19 | 15 | 6 | 1 |
{-
Copyright 2016, Dominic Orchard, Andrew Rice, Mistral Contrastin, Matthew Danish
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-}
{-# LANGUAGE FlexibleInstances, UndecidableInstances,
DoAndIfThenElse, MultiParamTypeClasses, FlexibleContexts,
ScopedTypeVariables #-}
{- Provides support for outputting source files and analysis information -}
module Camfort.Output
(
-- * Classes
OutputFiles(..)
, Show'(..)
-- * Refactoring
, refactoring
) where
import qualified Language.Fortran.AST as F
import qualified Language.Fortran.PrettyPrint as PP
import qualified Language.Fortran.Util.Position as FU
import qualified Language.Fortran.ParserMonad as FPM
import Camfort.Analysis.Annotations
import Camfort.Reprint
import Camfort.Helpers
import Camfort.Helpers.Syntax
import System.Directory
import qualified Data.ByteString.Char8 as B
import Data.Generics
import Data.Functor.Identity
import Control.Monad
import Control.Monad.Trans.Class
import Control.Monad.Trans.State.Lazy
-- Custom 'Show' which on strings is the identity
class Show' s where
show' :: s -> String
instance {-# OVERLAPS #-} Show' String where
show' = id
instance {-# OVERLAPS #-} (Show' a, Show' b) => Show' (a, b) where
show' (a, b) = "(" ++ show' a ++ "," ++ show' b ++")"
instance {-# OVERLAPPABLE #-} (Show a) => Show' a where
show' = show
class OutputFiles t where
{-| Given a directory and list of triples of filenames, with their source
text (if it exists) and their AST, write these to the directory -}
mkOutputText :: FileOrDir -> t -> SourceText
outputFile :: t -> Filename
isNewFile :: t -> Bool
outputFiles :: FileOrDir -> FileOrDir -> [t] -> IO ()
outputFiles inp outp pdata = do
outIsDir <- isDirectory outp
if outIsDir then do
-- Output to a directory, create if missing
createDirectoryIfMissing True outp
-- Report which directory the files are going to
putStrLn $ "Writing refactored files to directory: " ++ outp ++ "/"
-- If the input was a directory then work out the path prefix
-- which needs to be replaced with the new directory path
isdir <- isDirectory inp
let inSrc = if isdir then inp else getDir inp
forM_ pdata (\x -> let f' = changeDir outp inSrc (outputFile x)
in do checkDir f'
putStrLn $ "Writing " ++ f'
B.writeFile f' (mkOutputText outp x))
else
forM_ pdata (\x -> do
let out = if isNewFile x then outputFile x else outp
putStrLn $ "Writing " ++ out
B.writeFile out (mkOutputText outp x))
{-| changeDir is used to change the directory of a filename string.
If the filename string has no directory then this is an identity -}
changeDir newDir oldDir oldFilename =
newDir ++ listDiffL oldDir oldFilename
where
listDiffL [] ys = ys
listDiffL _ [] = []
listDiffL (x:xs) (y:ys)
| x==y = listDiffL xs ys
| otherwise = ys
-- When the new source text is already provided
instance OutputFiles (Filename, SourceText) where
mkOutputText _ (_, output) = output
outputFile (f, _) = f
isNewFile _ = True
-- When there is a file to be reprinted (for refactoring)
instance OutputFiles (F.ProgramFile Annotation, SourceText) where
mkOutputText _ (ast@(F.ProgramFile (F.MetaInfo version _) _), input) =
-- If we are create a file, call the pretty printer directly
if B.null input
then B.pack $ PP.pprintAndRender version ast (Just 0)
-- Otherwise, applying the refactoring system with reprint
else runIdentity $ reprint (refactoring version) ast input
outputFile (pf, _) = F.pfGetFilename pf
isNewFile (_, inp) = B.null inp
{- Specifies how to do specific refactorings
(uses generic query extension - remember extQ is non-symmetric) -}
refactoring :: Typeable a
=> FPM.FortranVersion
-> a -> SourceText -> StateT FU.Position Identity (SourceText, Bool)
refactoring v z inp = ((catchAll inp `extQ` refactoringsForProgramUnits v inp) `extQ` refactoringsForBlocks v inp) $ z
where
catchAll :: SourceText -> a -> StateT FU.Position Identity (SourceText, Bool)
catchAll _ _ = return (B.empty, False)
refactoringsForProgramUnits :: FPM.FortranVersion
-> SourceText
-> F.ProgramUnit Annotation
-> StateT FU.Position Identity (SourceText, Bool)
refactoringsForProgramUnits v inp z =
mapStateT (\n -> Identity $ n `evalState` 0) (refactorProgramUnits v inp z)
refactorProgramUnits :: FPM.FortranVersion
-> SourceText
-> F.ProgramUnit Annotation
-> StateT FU.Position (State Int) (SourceText, Bool)
-- Output comments
refactorProgramUnits _ inp (F.PUComment ann span (F.Comment comment)) = do
cursor <- get
if pRefactored ann
then let (FU.SrcSpan lb ub) = span
(p0, _) = takeBounds (cursor, lb) inp
nl = if null comment then B.empty else B.pack "\n"
in (put ub >> return (B.concat [p0, B.pack comment, nl], True))
else return (B.empty, False)
refactorProgramUnits _ _ _ = return (B.empty, False)
refactoringsForBlocks :: FPM.FortranVersion
-> SourceText
-> F.Block Annotation
-> StateT FU.Position Identity (SourceText, Bool)
refactoringsForBlocks v inp z =
mapStateT (\n -> Identity $ n `evalState` 0) (refactorBlocks v inp z)
refactorBlocks :: FPM.FortranVersion
-> SourceText
-> F.Block Annotation
-> StateT FU.Position (State Int) (SourceText, Bool)
-- Output comments
refactorBlocks _ inp (F.BlComment ann span (F.Comment comment)) = do
cursor <- get
if pRefactored ann
then let (FU.SrcSpan lb ub) = span
(p0, _) = takeBounds (cursor, lb) inp
nl = if null comment then B.empty else B.pack "\n"
in put ub >> return (B.concat [p0, B.pack comment, nl], True)
else return (B.empty, False)
-- Refactor use statements
refactorBlocks v inp b@(F.BlStatement _ _ _ u@F.StUse{}) = do
cursor <- get
case refactored $ F.getAnnotation u of
Just (FU.Position _ rCol _) -> do
let (FU.SrcSpan lb _) = FU.getSpan u
let (p0, _) = takeBounds (cursor, lb) inp
let out = B.pack $ PP.pprintAndRender v b (Just (rCol -1))
added <- lift get
when (newNode $ F.getAnnotation u)
(lift $ put $ added + countLines out)
put $ toCol0 lb
return (p0 `B.append` out, True)
Nothing -> return (B.empty, False)
-- Common blocks, equivalence statements, and declarations can all
-- be refactored by the default refactoring
refactorBlocks v inp (F.BlStatement _ _ _ s@F.StEquivalence{}) =
refactorStatements v inp s
refactorBlocks v inp (F.BlStatement _ _ _ s@F.StCommon{}) =
refactorStatements v inp s
refactorBlocks v inp (F.BlStatement _ _ _ s@F.StDeclaration{}) =
refactorStatements v inp s
-- Arbitrary statements can be refactored *as blocks* (in order to
-- get good indenting)
refactorBlocks v inp b@F.BlStatement {} = refactorSyntax v inp b
refactorBlocks _ _ _ = return (B.empty, False)
-- Wrapper to fix the type of refactorSyntax to deal with statements
refactorStatements :: FPM.FortranVersion -> SourceText
-> F.Statement A -> StateT FU.Position (State Int) (SourceText, Bool)
refactorStatements = refactorSyntax
refactorSyntax ::
(Typeable s, F.Annotated s, FU.Spanned (s A), PP.IndentablePretty (s A))
=> FPM.FortranVersion -> SourceText
-> s A -> StateT FU.Position (State Int) (SourceText, Bool)
refactorSyntax v inp e = do
cursor <- get
let a = F.getAnnotation e
case refactored a of
Nothing -> return (B.empty, False)
Just (FU.Position _ rCol _) -> do
let (FU.SrcSpan lb ub) = FU.getSpan e
let (pre, _) = takeBounds (cursor, lb) inp
let indent = if newNode a then Just (rCol - 1) else Nothing
let output = if deleteNode a then B.empty
else B.pack $ PP.pprintAndRender v e indent
out <- if newNode a then do
-- If a new node is begin created then
numAdded <- lift get
let diff = linesCovered ub lb
-- remove empty newlines here if extra lines were added
let (out, numRemoved) = if numAdded <= diff
then removeNewLines output numAdded
else removeNewLines output diff
lift $ put (numAdded - numRemoved)
return out
else return output
put ub
return (B.concat [pre, out], True)
countLines xs =
case B.uncons xs of
Nothing -> 0
Just ('\n', xs) -> 1 + countLines xs
Just (_, xs) -> countLines xs
{- 'removeNewLines xs n' removes at most 'n' new lines characters from
the input string xs, returning the new string and the number of new
lines that were removed. Note that the number of new lines removed
might actually be less than 'n'- but in principle this should not
happen with the usaage in 'refactorDecl' -}
removeNewLines xs 0 = (xs, 0)
-- Deal with CR LF in the same way as just LF
removeNewLines xs n =
case unpackFst (B.splitAt 4 xs) of
("\r\n\r\n", xs) -> (xs', n' + 1)
where (xs', n') = removeNewLines (B.pack "\r\n" `B.append` xs) (n - 1)
_ ->
case unpackFst (B.splitAt 2 xs) of
("\n\n", xs) -> (xs', n' + 1)
where (xs', n') = removeNewLines (B.pack "\n" `B.append` xs) (n - 1)
_ ->
case B.uncons xs of
Nothing -> (xs, 0)
Just (x, xs) -> (B.cons x xs', n)
where (xs', _) = removeNewLines xs n
unpackFst (x, y) = (B.unpack x, y)
| dorchard/camfort | src/Camfort/Output.hs | apache-2.0 | 10,706 | 15 | 20 | 3,088 | 2,862 | 1,502 | 1,360 | 182 | 6 |
-- Copyright 2020 Google LLC
--
-- Licensed under the Apache License, Version 2.0 (the "License");
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an "AS IS" BASIS,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.
module BootUser where
import {-# SOURCE #-} Boot
data IntList = IntList Int Boot | Nil
deriving Show
| google/hrepl | hrepl/tests/BootUser.hs | apache-2.0 | 696 | 0 | 6 | 125 | 38 | 29 | 9 | 4 | 0 |
-- Copyright 2020 Google LLC
--
-- Licensed under the Apache License, Version 2.0 (the "License");
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an "AS IS" BASIS,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.
{-# LANGUAGE TemplateHaskell #-}
module Main (main) where
import Control.Monad (guard)
import Foo (foo)
import Language.Haskell.TH
result :: Integer
-- Uses the FFI within TemplateHaskell:
result = $(litE $ IntegerL $ fromIntegral $ foo 3)
main :: IO ()
main = guard (15 == result)
| google/cabal2bazel | bzl/tests/ffi/TemplateHaskellBaseTest.hs | apache-2.0 | 874 | 0 | 9 | 149 | 102 | 64 | 38 | 9 | 1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE TypeFamilies #-}
module HERMIT.External
( -- * Externals
External
, ExternalName
, ExternalHelp
, externName
, externDyn
, externHelp
, externTypeString
, externTypeArgResString
, splitFunTyArgs
, toHelp
, external
, Extern(..)
, matchingExternals
-- * Tags
, CmdTag(..)
, TagE
, Tag((.+),remTag,tagMatch)
, (.&)
, (.||)
, notT
, externTags
, dictionaryOfTags
-- * Boxes
-- | Boxes are used by the 'Extern' class.
, CoreString(..)
, CrumbBox(..)
, IntBox(..)
, IntListBox(..)
, PathBox(..)
, StringBox(..)
, StringListBox(..)
, TagBox(..)
-- ** LCore Boxes
, TransformLCoreStringBox(..)
, TransformLCoreUnitBox(..)
, TransformLCorePathBox(..)
, RewriteLCoreBox(..)
, BiRewriteLCoreBox(..)
, RewriteLCoreListBox(..)
-- ** LCoreTC Boxes
, TransformLCoreTCStringBox(..)
, TransformLCoreTCUnitBox(..)
, TransformLCoreTCLCoreBox(..)
, TransformLCoreTCPathBox(..)
, RewriteLCoreTCBox(..)
, BiRewriteLCoreTCBox(..)
, RewriteLCoreTCListBox(..)
) where
import Data.Map hiding (map)
import Data.Dynamic
import Data.List
import Data.Typeable.Internal (TypeRep(..), funTc)
import HERMIT.Core
import HERMIT.Context (LocalPathH)
import HERMIT.Kure
import HERMIT.Lemma
-----------------------------------------------------------------
-- | 'External' names are just strings.
type ExternalName = String
-- | Help information for 'External's is stored as a list of strings, designed for multi-line displaying.
type ExternalHelp = [String]
-- Tags --------------------------------------------------------
-- | Requirement: commands cannot have the same name as any 'CmdTag'
-- (or the help function will not find it).
-- These should be /user facing/, because they give the user
-- a way of sub-dividing our confusing array of commands.
data CmdTag = Shell -- ^ Shell-specific command.
| Eval -- ^ The arrow of evaluation (reduces a term).
| KURE -- ^ 'Language.KURE' command.
| Loop -- ^ Command may operate multiple times.
| Deep -- ^ Command may make a deep change, can be O(n).
| Shallow -- ^ Command operates on local nodes only, O(1).
| Navigation -- ^ Uses 'Path' or 'Lens' to focus onto something.
| Query -- ^ Extract information from an expression.
| Predicate -- ^ Something that passes or fails.
| Introduce -- ^ Introduce something, like a new name.
| Commute -- ^ Commute is when you swap nested terms.
| PreCondition -- ^ Operation has a (perhaps undocumented) precondition.
| Strictness -- ^ Alters the strictness of the expression.
| Debug -- ^ Commands specifically to help debugging.
| VersionControl -- ^ Version control for Core syntax.
| Context -- ^ A command that uses its context, such as inlining.
| Unsafe -- ^ Commands that are not type safe (may cause Core Lint to fail),
-- or may otherwise change the semantics of the program.
-- Only available in unsafe mode!
| Safe -- ^ Include in Strict Safety mode (currently unused)
| Proof -- ^ Commands related to proving lemmas.
| TODO -- ^ An incomplete or potentially buggy command.
| Experiment -- ^ Things we are trying out.
| Deprecated -- ^ A command that will be removed in a future release;
-- it has probably been renamed or subsumed by another command.
deriving (Eq, Show, Read, Bounded, Enum, Typeable)
-- | Lists all the tags paired with a short description of what they're about.
dictionaryOfTags :: [(CmdTag,String)]
dictionaryOfTags = notes ++ [ (tag,"(unknown purpose)")
| tag <- [minBound..maxBound]
, tag `notElem` map fst notes
]
where notes =
-- These should give the user a clue about what the sub-commands might do
[ (Shell, "Shell-specific command.")
, (Eval, "The arrow of evaluation (reduces a term).")
, (KURE, "Direct reflection of a combinator from the KURE DSL.")
, (Loop, "Command may operate multiple times.")
, (Deep, "Command may make a deep change, can be O(n).")
, (Shallow, "Command operates on local nodes only, O(1).")
, (Navigation, "Navigate via focus, or directional command.")
, (Query, "Extract information from an expression.")
, (Predicate, "Something that passes or fails.")
, (Introduce, "Introduce something, like a new name.")
, (Commute, "Commute is when you swap nested terms.")
, (PreCondition, "Operation has a (perhaps undocumented) precondition.")
, (Strictness, "Alters the strictness of an expression.")
, (Debug, "A command specifically to help debugging.")
, (VersionControl, "Version control for Core syntax.")
, (Context, "A command that uses its context, such as inlining.")
, (Unsafe, "Commands that are not type safe (may cause Core Lint to fail), or may otherwise change the semantics of the program.")
, (Proof, "Commands related to proving lemmas.")
, (TODO, "An incomplete or potentially buggy command.")
, (Experiment, "Things we are trying out, use at your own risk.")
, (Deprecated, "A command that will be removed in a future release; it has probably been renamed or subsumed by another command.")
]
-- Unfortunately, record update syntax seems to associate to the right.
-- These operators save us some parentheses.
infixl 3 .+
infixr 4 .||
infixr 5 .&
-- | A data type of logical operations on tags.
data TagE :: * where
Tag :: Tag a => a -> TagE
NotTag :: TagE -> TagE
AndTag :: TagE -> TagE -> TagE
OrTag :: TagE -> TagE -> TagE
deriving Typeable
-- | Tags are meta-data that we add to 'External's to make them sortable and searchable.
class Tag a where
toTagE :: a -> TagE
-- | Add a 'Tag' to an 'External'.
(.+) :: External -> a -> External
-- | Remove a 'Tag' from an 'External'.
remTag :: a -> External -> External
-- | Check if an 'External' has the specified 'Tag'.
tagMatch :: a -> External -> Bool
deriving instance Typeable Tag
instance Tag TagE where
toTagE = id
e .+ (Tag t) = e .+ t
e .+ (NotTag t) = remTag t e
e .+ (AndTag t1 t2) = e .+ t1 .+ t2
e .+ (OrTag t1 t2) = e .+ t1 .+ t2 -- not sure what else to do
remTag (Tag t) e = remTag t e
remTag (NotTag t) e = e .+ t
remTag (AndTag t1 t2) e = remTag t1 (remTag t2 e)
remTag (OrTag t1 t2) e = remTag t1 (remTag t2 e) -- again
tagMatch (Tag t) e = tagMatch t e
tagMatch (NotTag t) e = not (tagMatch t e)
tagMatch (AndTag t1 t2) e = tagMatch t1 e && tagMatch t2 e
tagMatch (OrTag t1 t2) e = tagMatch t1 e || tagMatch t2 e
instance Tag CmdTag where
toTagE = Tag
ex@(External {externTags = ts}) .+ t = ex {externTags = t:ts}
remTag t ex@(External {externTags = ts}) = ex { externTags = [ t' | t' <- ts, t' /= t ] }
tagMatch t (External {externTags = ts}) = t `elem` ts
-- | An \"and\" on 'Tag's.
(.&) :: (Tag a, Tag b) => a -> b -> TagE
t1 .& t2 = AndTag (toTagE t1) (toTagE t2)
-- | An \"or\" on 'Tag's.
(.||) :: (Tag a, Tag b) => a -> b -> TagE
t1 .|| t2 = OrTag (toTagE t1) (toTagE t2)
-- how to make a unary operator?
-- | A \"not\" on 'Tag's.
notT :: Tag a => a -> TagE
notT = NotTag . toTagE
-----------------------------------------------------------------
-- | An 'External' is a 'Dynamic' value with some associated meta-data (name, help string and tags).
data External = External
{ externName :: ExternalName -- ^ Get the name of an 'External'.
, externDyn :: Dynamic -- ^ Get the 'Dynamic' value stored in an 'External'.
, externHelp :: ExternalHelp -- ^ Get the list of help 'String's for an 'External'.
, externTags :: [CmdTag] -- ^ List all the 'CmdTag's associated with an 'External'
}
-- | The primitive way to build an 'External'.
external :: Extern a => ExternalName -> a -> ExternalHelp -> External
external nm fn help = External
{ externName = nm
, externDyn = toDyn (box fn)
, externHelp = map (" " ++) help
, externTags = []
}
-- | Get all the 'External's which match a given tag predicate
-- and box a Transform of the appropriate type.
matchingExternals :: (Extern tr, Tag t) => t -> [External] -> [(External, tr)]
matchingExternals tag exts = [ (e,tr) | e <- exts, tagMatch tag e
, Just tr <- [fmap unbox $ fromDynamic $ externDyn e] ]
-- | Build a 'Data.Map' from names to help information.
toHelp :: [External] -> Map ExternalName ExternalHelp
toHelp = fromListWith (++) . map toH
where
toH :: External -> (ExternalName,ExternalHelp)
toH e = (externName e, spaceout (externName e ++ " :: " ++ externTypeString e)
(show (externTags e)) : externHelp e)
spaceout xs ys = xs ++ replicate (width - (length xs + length ys)) ' ' ++ ys
width = 78
-- | Get a string representation of the (monomorphic) type of an 'External'
externTypeString :: External -> String
externTypeString = deBoxify . show . dynTypeRep . externDyn
-- | Remove the word 'Box' from a string.
deBoxify :: String -> String
deBoxify s | "CLSBox -> " `isPrefixOf` s = go (drop 10 s)
| "PrettyPrinter -> " `isPrefixOf` s = go (drop 17 s)
| otherwise = go s
where go xs
| "Box" `isPrefixOf` xs = go (drop 3 xs)
go (x:xs) = x : go xs
go [] = []
externTypeArgResString :: External -> ([String], String)
externTypeArgResString e = (map (deBoxify . show) aTys, deBoxify (show rTy))
where (aTys, rTy) = splitExternFunType e
splitExternFunType :: External -> ([TypeRep], TypeRep)
splitExternFunType = splitFunTyArgs . dynTypeRep . externDyn
splitFunTyArgs :: TypeRep -> ([TypeRep], TypeRep)
splitFunTyArgs tr = case splitFunTyMaybe tr of
Nothing -> ([], tr)
Just (a, r) -> let (as, r') = splitFunTyArgs r
in (a:as, r')
splitFunTyMaybe :: TypeRep -> Maybe (TypeRep, TypeRep)
#if __GLASGOW_HASKELL__ < 710
splitFunTyMaybe (TypeRep _ tc [a,r]) | tc == funTc = Just (a,r)
#else
splitFunTyMaybe (TypeRep _ tc _krs [a,r]) | tc == funTc = Just (a,r)
#endif
splitFunTyMaybe _ = Nothing
-----------------------------------------------------------------
-- | The class of things that can be made into 'External's.
-- To be an 'Extern' there must exist an isomorphic 'Box' type that is an instance of 'Typeable'.
class Typeable (Box a) => Extern a where
-- | An isomorphic wrapper.
type Box a
-- | Wrap a value in a 'Box'.
box :: a -> Box a
-- | Unwrap a value from a 'Box'.
unbox :: Box a -> a
deriving instance Typeable Extern
-----------------------------------------------------------------
instance (Extern a, Extern b) => Extern (a -> b) where
type Box (a -> b) = Box a -> Box b
box f = box . f . unbox
unbox f = unbox . f . box
-----------------------------------------------------------------
data TagBox = TagBox TagE deriving Typeable
instance Extern TagE where
type Box TagE = TagBox
box = TagBox
unbox (TagBox t) = t
-----------------------------------------------------------------
data IntBox = IntBox Int deriving Typeable
instance Extern Int where
type Box Int = IntBox
box = IntBox
unbox (IntBox i) = i
-----------------------------------------------------------------
-- TODO: Considering unifying CrumbBox and PathBox under TransformLCoreTCPathBox.
data CrumbBox = CrumbBox Crumb deriving Typeable
instance Extern Crumb where
type Box Crumb = CrumbBox
box = CrumbBox
unbox (CrumbBox cr) = cr
-----------------------------------------------------------------
data PathBox = PathBox LocalPathH deriving Typeable
instance Extern LocalPathH where
type Box LocalPathH = PathBox
box = PathBox
unbox (PathBox p) = p
-----------------------------------------------------------------
newtype CoreString = CoreString { unCoreString :: String } deriving Typeable
instance Extern CoreString where
type Box CoreString = CoreString
box = id
unbox = id
-----------------------------------------------------------------
data StringBox = StringBox String deriving Typeable
instance Extern String where
type Box String = StringBox
box = StringBox
unbox (StringBox s) = s
-----------------------------------------------------------------
data StringListBox = StringListBox [String] deriving Typeable
instance Extern [String] where
type Box [String] = StringListBox
box = StringListBox
unbox (StringListBox l) = l
-----------------------------------------------------------------
data IntListBox = IntListBox [Int] deriving Typeable
instance Extern [Int] where
type Box [Int] = IntListBox
box = IntListBox
unbox (IntListBox l) = l
-----------------------------------------------------------------
instance Extern LemmaName where
type Box LemmaName = LemmaName
box = id
unbox = id
-----------------------------------------------------------------
data RewriteLCoreBox = RewriteLCoreBox (RewriteH LCore) deriving Typeable
instance Extern (RewriteH LCore) where
type Box (RewriteH LCore) = RewriteLCoreBox
box = RewriteLCoreBox
unbox (RewriteLCoreBox r) = r
-----------------------------------------------------------------
data TransformLCoreStringBox = TransformLCoreStringBox (TransformH LCore String) deriving Typeable
instance Extern (TransformH LCore String) where
type Box (TransformH LCore String) = TransformLCoreStringBox
box = TransformLCoreStringBox
unbox (TransformLCoreStringBox t) = t
-----------------------------------------------------------------
data TransformLCoreUnitBox = TransformLCoreUnitBox (TransformH LCore ()) deriving Typeable
instance Extern (TransformH LCore ()) where
type Box (TransformH LCore ()) = TransformLCoreUnitBox
box = TransformLCoreUnitBox
unbox (TransformLCoreUnitBox t) = t
-----------------------------------------------------------------
data TransformLCorePathBox = TransformLCorePathBox (TransformH LCore LocalPathH) deriving Typeable
instance Extern (TransformH LCore LocalPathH) where
type Box (TransformH LCore LocalPathH) = TransformLCorePathBox
box = TransformLCorePathBox
unbox (TransformLCorePathBox t) = t
-----------------------------------------------------------------
data BiRewriteLCoreBox = BiRewriteLCoreBox (BiRewriteH LCore) deriving Typeable
instance Extern (BiRewriteH LCore) where
type Box (BiRewriteH LCore) = BiRewriteLCoreBox
box = BiRewriteLCoreBox
unbox (BiRewriteLCoreBox b) = b
-----------------------------------------------------------------
data RewriteLCoreListBox = RewriteLCoreListBox [RewriteH LCore] deriving Typeable
instance Extern [RewriteH LCore] where
type Box [RewriteH LCore] = RewriteLCoreListBox
box = RewriteLCoreListBox
unbox (RewriteLCoreListBox l) = l
-----------------------------------------------------------------
data RewriteLCoreTCBox = RewriteLCoreTCBox (RewriteH LCoreTC) deriving Typeable
instance Extern (RewriteH LCoreTC) where
type Box (RewriteH LCoreTC) = RewriteLCoreTCBox
box = RewriteLCoreTCBox
unbox (RewriteLCoreTCBox r) = r
-----------------------------------------------------------------
data TransformLCoreTCStringBox = TransformLCoreTCStringBox (TransformH LCoreTC String) deriving Typeable
instance Extern (TransformH LCoreTC String) where
type Box (TransformH LCoreTC String) = TransformLCoreTCStringBox
box = TransformLCoreTCStringBox
unbox (TransformLCoreTCStringBox t) = t
-----------------------------------------------------------------
data TransformLCoreTCUnitBox = TransformLCoreTCUnitBox (TransformH LCoreTC ()) deriving Typeable
instance Extern (TransformH LCoreTC ()) where
type Box (TransformH LCoreTC ()) = TransformLCoreTCUnitBox
box = TransformLCoreTCUnitBox
unbox (TransformLCoreTCUnitBox t) = t
-----------------------------------------------------------------
data TransformLCoreTCLCoreBox = TransformLCoreTCLCoreBox (TransformH LCoreTC LCore) deriving Typeable
instance Extern (TransformH LCoreTC LCore) where
type Box (TransformH LCoreTC LCore) = TransformLCoreTCLCoreBox
box = TransformLCoreTCLCoreBox
unbox (TransformLCoreTCLCoreBox t) = t
-----------------------------------------------------------------
data TransformLCoreTCPathBox = TransformLCoreTCPathBox (TransformH LCoreTC LocalPathH) deriving Typeable
instance Extern (TransformH LCoreTC LocalPathH) where
type Box (TransformH LCoreTC LocalPathH) = TransformLCoreTCPathBox
box = TransformLCoreTCPathBox
unbox (TransformLCoreTCPathBox t) = t
-----------------------------------------------------------------
data BiRewriteLCoreTCBox = BiRewriteLCoreTCBox (BiRewriteH LCoreTC) deriving Typeable
instance Extern (BiRewriteH LCoreTC) where
type Box (BiRewriteH LCoreTC) = BiRewriteLCoreTCBox
box = BiRewriteLCoreTCBox
unbox (BiRewriteLCoreTCBox b) = b
-----------------------------------------------------------------
data RewriteLCoreTCListBox = RewriteLCoreTCListBox [RewriteH LCoreTC] deriving Typeable
instance Extern [RewriteH LCoreTC] where
type Box [RewriteH LCoreTC] = RewriteLCoreTCListBox
box = RewriteLCoreTCListBox
unbox (RewriteLCoreTCListBox l) = l
-----------------------------------------------------------------
| beni55/hermit | src/HERMIT/External.hs | bsd-2-clause | 18,532 | 39 | 22 | 4,515 | 3,635 | 2,101 | 1,534 | 315 | 3 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE DeriveDataTypeable #-}
module Harihara.Log where
import Control.Applicative ((<$>))
import Control.Exception
import Control.Monad (when)
import Data.List as L
import Data.Text as T
import Data.Typeable
-- Exceptions {{{
data LastfmException
= NoResponse
| JSONParseError String
deriving (Typeable)
instance Show LastfmException where
show e = case e of
NoResponse ->
"No response from last.fm"
JSONParseError js ->
"Failed to parse last.fm's JSON response:\n" ++ show js
instance Exception LastfmException
data TagException
= FileNotFound FilePath
deriving (Show,Typeable)
instance Exception TagException
-- }}}
-- Log functions {{{
logError :: (MonadLog m) => Text -> m ()
logError = filterLog LogError
logWarn :: (MonadLog m) => Text -> m ()
logWarn = filterLog LogWarn
logInfo :: (MonadLog m) => Text -> m ()
logInfo = filterLog LogInfo
logDebug :: (MonadLog m) => Text -> m ()
logDebug = filterLog LogDebug
logErrorData :: (MonadLog m) => Text -> String -> m ()
logErrorData msg dat = logError $ T.unlines (msg:dat')
where
dat' = indentData 2 $ T.pack dat
logWarnData :: (MonadLog m) => Text -> String -> m ()
logWarnData msg dat = logWarn $ T.unlines (msg:dat')
where
dat' = indentData 2 $ T.pack dat
logInfoData :: (MonadLog m) => Text -> String -> m ()
logInfoData msg dat = logInfo $ T.unlines (msg:dat')
where
dat' = indentData 2 $ T.pack dat
logDebugData :: (MonadLog m) => Text -> String -> m ()
logDebugData msg dat = logDebug $ T.unlines (msg:dat')
where
dat' = indentData 2 $ T.pack dat
-- }}}
-- Logging {{{
data LogLevel
= LogSilent
| LogError
| LogWarn
| LogInfo
| LogDebug
deriving (Eq, Ord, Show)
class (Functor m, Monad m) => MonadLog m where
getLogLevel :: m LogLevel
writeLog :: Text -> m ()
header :: m Text
indentData :: Int -> Text -> [Text]
indentData n = L.map (T.replicate n " " `T.append`) . T.lines
filterLog :: (MonadLog m) => LogLevel -> Text -> m ()
filterLog lvl msg = do
shouldLog <- (lvl <=) <$> getLogLevel
hdr <- header
let fullMsg = T.concat
[ justifyLeft 20 ' ' (bracketMsgs [ renderLevel lvl , "|" , hdr ])
,msg
]
when shouldLog $ writeLog fullMsg
bracketMsgs :: [Text] -> Text
bracketMsgs = ("[ " `append`) . (`append` " ]") . T.unwords
renderLevel :: LogLevel -> Text
renderLevel ll = case ll of
LogSilent -> "\"Silent\""
LogError -> "Error"
LogWarn -> "Warn"
LogInfo -> "Info"
LogDebug -> "Debug"
-- }}}
| kylcarte/harihara | src/Harihara/Log.hs | bsd-3-clause | 2,561 | 0 | 16 | 569 | 912 | 481 | 431 | 74 | 5 |
module Day10 where
import Test.Hspec
import qualified Text.Megaparsec.String as P
import qualified Text.Megaparsec as P
import qualified Data.Map as Map
import Data.Map (Map)
import Data.List
import Debug.Trace
import Utils
-- Parsing
parser = parseLine `P.sepBy` (P.string "\n")
parseLine = (GiveR <$> parseGive) P.<|> (ValueR <$> parseValue)
parseBot = Bot <$> (P.string "bot " *> number)
parseValue = do
P.string "value "
n <- number
P.string " goes to "
bot <- parseBot
return $ Value n bot
parseOutputO = do
P.string "output "
n <- number
return $ OutputO n
parseOutput = parseOutputO P.<|> (BotO <$> parseBot)
parseGive = do
bot <- parseBot
P.string " gives low to "
outLow <- parseOutput
P.string " and high to "
outHigh <- parseOutput
return $ Give bot outLow outHigh
number :: P.Parser Int
number = read <$> P.many (P.oneOf "0123456789")
-- Input DSL
data Value = Value Int Bot deriving (Show)
data Give = Give Bot Output Output deriving (Show)
data Rule = ValueR Value | GiveR Give deriving (Show)
data Bot = Bot Int deriving (Show, Ord, Eq)
data Output = BotO Bot | OutputO Int deriving (Show)
-- Problem DSL
type BotStatus = [Int]
splitValueGive :: [Rule] -> ([Value], [Give])
splitValueGive [] = ([], [])
splitValueGive (x:xs) = let (values, gives) = splitValueGive xs
in case x of
ValueR v -> (v : values, gives)
GiveR g -> (values, g : gives)
makeBotStatuses :: [Value] -> Map Bot BotStatus
makeBotStatuses l = Map.fromListWith (++) (map (\(Value v bot) -> (bot, [v])) l)
makeGiveDict :: [Give] -> Map Bot (Output, Output)
makeGiveDict g = Map.fromList (map (\(Give bot a b) -> (bot, (a, b))) g)
initProblem :: [Rule] -> (Map Bot BotStatus, Map Bot (Output, Output))
initProblem rules = (botStatuses, giveDict)
where
(values, gives) = splitValueGive rules
botStatuses = makeBotStatuses values
giveDict = makeGiveDict gives
-- utils
-- FIRST problem
doit code = go botStatuses [] []
where
(botStatuses, giveDict) = initProblem code
go statuses finalBots outputs = let currentBot = find withTwo (Map.toList statuses)
in case currentBot of
Nothing -> (finalBots, outputs)
Just (bot, status) -> let (newStatuses, updatedOutputs) = transfertBot statuses bot (giveDict Map.! bot)
newFinalBots = if checkStatus status then bot:finalBots else finalBots
in go newStatuses newFinalBots (outputs ++ updatedOutputs)
transfertBot :: Map Bot BotStatus -> Bot -> (Output, Output) -> (Map Bot BotStatus, [(Int, Int)])
transfertBot statuses bot (lowTo, highTo) = let [lowVal, highVal] = sort (statuses Map.! bot)
-- reset currentBot
statuses' = Map.insert bot [] statuses
-- add to the other bots
(statuses'', o) = insertChip lowVal lowTo statuses'
(statuses''', o') = insertChip highVal highTo statuses''
in (statuses''', o ++ o')
insertChip :: Int -> Output -> Map Bot BotStatus -> (Map Bot BotStatus, [(Int, Int)])
insertChip val (OutputO o) m = (m, [(o, val)])
insertChip val (BotO b) m = (Map.insertWith (++) b [val] m, [])
withTwo (bot, status) = length status == 2
checkStatus status = sort status == [17, 61]
day code = fst (doit code)
-- SECOND problem
day' code = let outputs = take 3 (sortOn fst (snd (doit code)))
in product (map snd outputs)
-- tests and data
-- comment out and add tests
test = hspec $ it "works" $ do
day <$> content `shouldReturn` [Bot 141]
day' <$> content `shouldReturn` 1209
fileContent = readFile "content/day10"
content = parse parser <$> fileContent
exampleSimple = "value 5 goes to bot 2\n\
\bot 2 gives low to bot 1 and high to bot 0\n\
\value 3 goes to bot 1\n\
\bot 1 gives low to output 1 and high to bot 0\n\
\bot 0 gives low to output 2 and high to output 0\n\
\value 2 goes to bot 2"
| guibou/AdventOfCode2016 | src/Day10.hs | bsd-3-clause | 4,346 | 0 | 18 | 1,327 | 1,409 | 753 | 656 | 81 | 3 |
module Adt03
(
)
where
data Expression =
Var String -- Variable
| Val Int -- Integer literal
| Op Expression Bop Expression -- Operation
deriving (Show, Eq)
-- Binary (2-input) operators
data Bop =
Plus
| Minus
| Times
| Divide
| Gt
| Ge
| Lt
| Le
| Eql
deriving (Show, Eq)
data Statement =
Assign String Expression
| Incr String
| If Expression Statement Statement
| While Expression Statement
| For Statement Expression Statement Statement
| Sequence Statement Statement
| Skip
deriving (Show, Eq)
type State = String -> Int
-- Exercise 1 -----------------------------------------
extend :: State -> String -> Int -> State
extend st s i s' = if s' == s then i else st s'
empty :: State
empty _ = 0
-- Exercise 2 -----------------------------------------
evalE :: State -> Expression -> Int
evalE _ (Val i) = i
evalE st (Var a) = st a
evalE st (Op el bop er) = op bop (evalE st el) (evalE st er)
where op Plus = (+)
op Minus = (-)
op Times = (*)
op Divide = div
op Gt = \a->boolToInt . (a >)
op Ge = \a->boolToInt . (a >=)
op Lt = \a->boolToInt . (a <)
op Le = \a->boolToInt . (a <=)
op Eql = \a->boolToInt . (a ==)
boolToInt True = 1
boolToInt False = 0
-- Exercise 3 -----------------------------------------
data DietStatement = DAssign String Expression
| DIf Expression DietStatement DietStatement
| DWhile Expression DietStatement
| DSequence DietStatement DietStatement
| DSkip
deriving (Show, Eq)
desugar :: Statement -> DietStatement
desugar (Assign s e) = DAssign s e
desugar (If e thenSm elseSm) = DIf e (desugar thenSm) (desugar elseSm)
desugar (While e sm) = DWhile e (desugar sm)
desugar (Sequence sm1 sm2) = DSequence (desugar sm1) (desugar sm2)
desugar Skip = DSkip
desugar (Incr var) = DAssign var (Op (Var var) Plus (Val 1))
desugar (For initSm predEx update body) = DSequence
(desugar initSm)
(DWhile predEx
(DSequence
(desugar body)
(desugar update)))
-- Exercise 4 -----------------------------------------
evalSimple :: State -> DietStatement -> State
evalSimple st DSkip = st
evalSimple st (DAssign s e) = extend st s (evalE st e)
evalSimple st (DIf e thenDs elseDs) =
if evalE st e == 1 then evalSimple st thenDs else evalSimple st elseDs
evalSimple st (DWhile e ds) =
if evalE st e == 1 then evalSimple (evalSimple st ds) (DWhile e ds) else st
evalSimple st (DSequence ds1 ds2) =
evalSimple (evalSimple st ds1) ds2
run :: State -> Statement -> State
run st = evalSimple st . desugar
-- Programs -------------------------------------------
slist :: [Statement] -> Statement
slist [] = Skip
slist l = foldr1 Sequence l
{- Calculate the factorial of the input
for (Out := 1; In > 0; In := In - 1) {
Out := In * Out
}
-}
factorial :: Statement
factorial = For (Assign "Out" (Val 1))
(Op (Var "In") Gt (Val 0))
(Assign "In" (Op (Var "In") Minus (Val 1)))
(Assign "Out" (Op (Var "In") Times (Var "Out")))
{- Calculate the floor of the square root of the input
B := 0;
while (A >= B * B) {
B++
};
B := B - 1
-}
squareRoot :: Statement
squareRoot = slist [ Assign "B" (Val 0)
, While (Op (Var "A") Ge (Op (Var "B") Times (Var "B")))
(Incr "B")
, Assign "B" (Op (Var "B") Minus (Val 1))
]
{- Calculate the nth Fibonacci number
F0 := 1;
F1 := 1;
if (In == 0) {
Out := F0
} else {
if (In == 1) {
Out := F1
} else {
for (C := 2; C <= In; C++) {
T := F0 + F1;
F0 := F1;
F1 := T;
Out := T
}
}
}
-}
fibonacci :: Statement
fibonacci = slist [ Assign "F0" (Val 1)
, Assign "F1" (Val 1)
, If (Op (Var "In") Eql (Val 0))
(Assign "Out" (Var "F0"))
(If (Op (Var "In") Eql (Val 1))
(Assign "Out" (Var "F1"))
(For (Assign "C" (Val 2))
(Op (Var "C") Le (Var "In"))
(Incr "C")
(slist
[ Assign "T" (Op (Var "F0") Plus (Var "F1"))
, Assign "F0" (Var "F1")
, Assign "F1" (Var "T")
, Assign "Out" (Var "T")
])
)
)
]
| Enzo-Liu/cis194 | src/Adt03.hs | bsd-3-clause | 4,975 | 0 | 19 | 1,974 | 1,505 | 785 | 720 | 105 | 10 |
module Math.Modular where
import Data.Ratio
import GHC.Real
class Num a => Euclidean a where
eDivMod :: a -> a -> (a, a)
eDiv :: a -> a -> a
eMod :: a -> a -> a
eDiv x y = fst (eDivMod x y)
eMod x y = snd (eDivMod x y)
eDivMod x y = (eDiv x y, eMod x y)
instance Euclidean Word where
eDivMod = divMod
instance Euclidean Int where
eDivMod = divMod
instance Euclidean Integer where
eDivMod = divMod
modPow :: Euclidean a => a -> Integer -> a -> a
modPow _ 0 _ = 1
modPow a e n =
let x = modPow a (e `div` 2) n
in
if even e
then (x * x) `eMod` n
else (a * x * x) `eMod` n
invMod :: (Eq a, Euclidean a) => a -> a -> Maybe a
invMod a n =
let (x, y) = bezout a n
g = a * x + n * y in
if g /= 0 && 1 `eMod` g == 0
then Just (x `eMod` n)
else Nothing
-- bezout a b = (x,y) where a*x + b*y = gcd(a, b)
bezout :: (Eq a, Euclidean a) => a -> a -> (a, a)
bezout _ 0 = (1, 0)
bezout a b =
-- r = a - q * b
-- x' * b + y' * r = gcd(a, b)
-- x' * b + y' * (a - q * b) = gcd(a, b)
-- y' * a + (x' - q * y') b = gcd(a, b)
let (q, r) = a `eDivMod` b in
if r == 0
then (0, 1)
else let (x', y') = bezout b r in (y', x' - q * y')
eGcd :: (Eq a, Euclidean a) => a -> a -> a
eGcd a 0 = a
eGcd a b = eGcd b (eMod a b)
modForm :: (Eq a, Euclidean a) => Ratio a -> a -> Maybe a
modForm (a :% b) n = fmap (\x -> (a * x) `eMod` n) (invMod b n)
-- TODO: Investigate creating Num + Fractional instances at runtime via reflection
-- package.
| bhamrick/hsmath | Math/Modular.hs | bsd-3-clause | 1,535 | 0 | 12 | 493 | 711 | 380 | 331 | 42 | 2 |
{-# LANGUAGE OverloadedStrings, DeriveDataTypeable, FlexibleContexts,
GeneralizedNewtypeDeriving, RecordWildCards #-}
module Test.WebDriver.Session(
-- * WDSessionState class
WDSessionState(..), modifySession
-- ** WebDriver sessions
, WDSession(..), lastHTTPRequest, SessionId(..)
) where
import Data.Aeson
import Data.ByteString as BS(ByteString)
import Data.ByteString.Lazy as LBS(ByteString)
import Data.Text (Text)
import Data.Maybe
import Control.Monad.Trans.Control
import Control.Monad.Trans.Maybe
import Control.Monad.Trans.Identity
import Control.Monad.List
import Control.Monad.Reader
import Control.Monad.Error
--import Control.Monad.Cont
import Control.Monad.Writer.Strict as SW
import Control.Monad.Writer.Lazy as LW
import Control.Monad.State.Strict as SS
import Control.Monad.State.Lazy as LS
import Control.Monad.RWS.Strict as SRWS
import Control.Monad.RWS.Lazy as LRWS
import Network.HTTP.Client (Manager, Request, Response)
{- |An opaque identifier for a WebDriver session. These handles are produced by
the server on session creation, and act to identify a session in progress. -}
newtype SessionId = SessionId Text
deriving (Eq, Ord, Show, Read, FromJSON, ToJSON)
{- |The local state of a WebDriver session. This structure is passed
implicitly through all 'WD' computations -}
data WDSession = WDSession {
-- server hostname
wdSessHost :: BS.ByteString
-- server port
, wdSessPort :: Int
-- Base path for API requests
, wdSessBasePath :: BS.ByteString
-- |An opaque reference identifying the session to
-- use with 'WD' commands.
-- A value of Nothing indicates that a session
-- hasn't been created yet.
-- Sessions can be created within 'WD' via
-- 'Test.WebDriver.createSession', or created
-- automatically with 'Test.WebDriver.runSession'
, wdSessId :: Maybe SessionId
-- |The complete history of HTTP requests and
-- responses, most recent first.
, wdSessHist :: [(Request, Response LBS.ByteString)]
-- Update function used to append new entries to session history
, wdSessHistUpdate :: (Request, Response LBS.ByteString)
-> [(Request, Response LBS.ByteString)]
-> [(Request, Response LBS.ByteString)]
-- |HTTP 'Manager' used for connection pooling by the http-client library.
, wdSessHTTPManager :: Manager
}
-- |The last HTTP request issued by this session, if any.
lastHTTPRequest :: WDSession -> Maybe Request
lastHTTPRequest = fmap fst . listToMaybe . wdSessHist
-- |A class for monads that carry a WebDriver session with them. The
-- MonadBaseControl superclass is used for exception handling through
-- the lifted-base package.
class MonadBaseControl IO s => WDSessionState s where
getSession :: s WDSession
putSession :: WDSession -> s ()
modifySession :: WDSessionState s => (WDSession -> WDSession) -> s ()
modifySession f = getSession >>= putSession . f
instance WDSessionState m => WDSessionState (LS.StateT s m) where
getSession = lift getSession
putSession = lift . putSession
instance WDSessionState m => WDSessionState (SS.StateT s m) where
getSession = lift getSession
putSession = lift . putSession
instance WDSessionState m => WDSessionState (MaybeT m) where
getSession = lift getSession
putSession = lift . putSession
instance WDSessionState m => WDSessionState (IdentityT m) where
getSession = lift getSession
putSession = lift . putSession
instance (Monoid w, WDSessionState m) => WDSessionState (LW.WriterT w m) where
getSession = lift getSession
putSession = lift . putSession
instance WDSessionState m => WDSessionState (ReaderT r m) where
getSession = lift getSession
putSession = lift . putSession
instance (Error e, WDSessionState m) => WDSessionState (ErrorT e m) where
getSession = lift getSession
putSession = lift . putSession
instance (Monoid w, WDSessionState m) => WDSessionState (SRWS.RWST r w s m) where
getSession = lift getSession
putSession = lift . putSession
instance (Monoid w, WDSessionState wd) => WDSessionState (LRWS.RWST r w s wd) where
getSession = lift getSession
putSession = lift . putSession
| begriffs/hs-webdriver | src/Test/WebDriver/Session.hs | bsd-3-clause | 4,876 | 0 | 14 | 1,441 | 887 | 509 | 378 | 69 | 1 |
{-# LANGUAGE ForeignFunctionInterface, CPP #-}
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.Rendering.OpenGL.Raw.ARB.ShaderAtomicCounters
-- Copyright : (c) Sven Panne 2013
-- License : BSD3
--
-- Maintainer : Sven Panne <svenpanne@gmail.com>
-- Stability : stable
-- Portability : portable
--
-- All raw functions and tokens from the ARB_shader_atomic_counters extension,
-- see <http://www.opengl.org/registry/specs/ARB/shader_atomic_counters.txt>.
--
--------------------------------------------------------------------------------
module Graphics.Rendering.OpenGL.Raw.ARB.ShaderAtomicCounters (
-- * Functions
glGetActiveAtomicCounterBufferiv,
-- * Tokens
gl_ATOMIC_COUNTER_BUFFER,
gl_ATOMIC_COUNTER_BUFFER_BINDING,
gl_ATOMIC_COUNTER_BUFFER_START,
gl_ATOMIC_COUNTER_BUFFER_SIZE,
gl_ATOMIC_COUNTER_BUFFER_DATA_SIZE,
gl_ATOMIC_COUNTER_BUFFER_ACTIVE_ATOMIC_COUNTERS,
gl_ATOMIC_COUNTER_BUFFER_ACTIVE_ATOMIC_COUNTER_INDICES,
gl_ATOMIC_COUNTER_BUFFER_REFERENCED_BY_VERTEX_SHADER,
gl_ATOMIC_COUNTER_BUFFER_REFERENCED_BY_TESS_CONTROL_SHADER,
gl_ATOMIC_COUNTER_BUFFER_REFERENCED_BY_TESS_EVALUATION_SHADER,
gl_ATOMIC_COUNTER_BUFFER_REFERENCED_BY_GEOMETRY_SHADER,
gl_ATOMIC_COUNTER_BUFFER_REFERENCED_BY_FRAGMENT_SHADER,
gl_MAX_VERTEX_ATOMIC_COUNTER_BUFFERS,
gl_MAX_TESS_CONTROL_ATOMIC_COUNTER_BUFFERS,
gl_MAX_TESS_EVALUATION_ATOMIC_COUNTER_BUFFERS,
gl_MAX_GEOMETRY_ATOMIC_COUNTER_BUFFERS,
gl_MAX_FRAGMENT_ATOMIC_COUNTER_BUFFERS,
gl_MAX_COMBINED_ATOMIC_COUNTER_BUFFERS,
gl_MAX_VERTEX_ATOMIC_COUNTERS,
gl_MAX_TESS_CONTROL_ATOMIC_COUNTERS,
gl_MAX_TESS_EVALUATION_ATOMIC_COUNTERS,
gl_MAX_GEOMETRY_ATOMIC_COUNTERS,
gl_MAX_FRAGMENT_ATOMIC_COUNTERS,
gl_MAX_COMBINED_ATOMIC_COUNTERS,
gl_MAX_ATOMIC_COUNTER_BUFFER_SIZE,
gl_MAX_ATOMIC_COUNTER_BUFFER_BINDINGS,
gl_ACTIVE_ATOMIC_COUNTER_BUFFERS,
gl_UNIFORM_ATOMIC_COUNTER_BUFFER_INDEX,
gl_UNSIGNED_INT_ATOMIC_COUNTER
) where
import Foreign.C.Types
import Foreign.Ptr
import Graphics.Rendering.OpenGL.Raw.Extensions
import Graphics.Rendering.OpenGL.Raw.Core31.Types
#include "HsOpenGLRaw.h"
extensionNameString :: String
extensionNameString = "GL_ARB_shader_atomic_counter"
EXTENSION_ENTRY(dyn_glGetActiveAtomicCounterBufferiv,ptr_glGetActiveAtomicCounterBufferiv,"glGetActiveAtomicCounterBufferiv",glGetActiveAtomicCounterBufferiv,GLuint -> GLuint -> GLenum -> Ptr GLint -> IO ())
gl_ATOMIC_COUNTER_BUFFER :: GLenum
gl_ATOMIC_COUNTER_BUFFER = 0x92C0
gl_ATOMIC_COUNTER_BUFFER_BINDING :: GLenum
gl_ATOMIC_COUNTER_BUFFER_BINDING = 0x92C1
gl_ATOMIC_COUNTER_BUFFER_START :: GLenum
gl_ATOMIC_COUNTER_BUFFER_START = 0x92C2
gl_ATOMIC_COUNTER_BUFFER_SIZE :: GLenum
gl_ATOMIC_COUNTER_BUFFER_SIZE = 0x92C3
gl_ATOMIC_COUNTER_BUFFER_DATA_SIZE :: GLenum
gl_ATOMIC_COUNTER_BUFFER_DATA_SIZE = 0x92C4
gl_ATOMIC_COUNTER_BUFFER_ACTIVE_ATOMIC_COUNTERS :: GLenum
gl_ATOMIC_COUNTER_BUFFER_ACTIVE_ATOMIC_COUNTERS = 0x92C5
gl_ATOMIC_COUNTER_BUFFER_ACTIVE_ATOMIC_COUNTER_INDICES :: GLenum
gl_ATOMIC_COUNTER_BUFFER_ACTIVE_ATOMIC_COUNTER_INDICES = 0x92C6
gl_ATOMIC_COUNTER_BUFFER_REFERENCED_BY_VERTEX_SHADER :: GLenum
gl_ATOMIC_COUNTER_BUFFER_REFERENCED_BY_VERTEX_SHADER = 0x92C7
gl_ATOMIC_COUNTER_BUFFER_REFERENCED_BY_TESS_CONTROL_SHADER :: GLenum
gl_ATOMIC_COUNTER_BUFFER_REFERENCED_BY_TESS_CONTROL_SHADER = 0x92C8
gl_ATOMIC_COUNTER_BUFFER_REFERENCED_BY_TESS_EVALUATION_SHADER :: GLenum
gl_ATOMIC_COUNTER_BUFFER_REFERENCED_BY_TESS_EVALUATION_SHADER = 0x92C9
gl_ATOMIC_COUNTER_BUFFER_REFERENCED_BY_GEOMETRY_SHADER :: GLenum
gl_ATOMIC_COUNTER_BUFFER_REFERENCED_BY_GEOMETRY_SHADER = 0x92CA
gl_ATOMIC_COUNTER_BUFFER_REFERENCED_BY_FRAGMENT_SHADER :: GLenum
gl_ATOMIC_COUNTER_BUFFER_REFERENCED_BY_FRAGMENT_SHADER = 0x92CB
gl_MAX_VERTEX_ATOMIC_COUNTER_BUFFERS :: GLenum
gl_MAX_VERTEX_ATOMIC_COUNTER_BUFFERS = 0x92CC
gl_MAX_TESS_CONTROL_ATOMIC_COUNTER_BUFFERS :: GLenum
gl_MAX_TESS_CONTROL_ATOMIC_COUNTER_BUFFERS = 0x92CD
gl_MAX_TESS_EVALUATION_ATOMIC_COUNTER_BUFFERS :: GLenum
gl_MAX_TESS_EVALUATION_ATOMIC_COUNTER_BUFFERS = 0x92CE
gl_MAX_GEOMETRY_ATOMIC_COUNTER_BUFFERS :: GLenum
gl_MAX_GEOMETRY_ATOMIC_COUNTER_BUFFERS = 0x92CF
gl_MAX_FRAGMENT_ATOMIC_COUNTER_BUFFERS :: GLenum
gl_MAX_FRAGMENT_ATOMIC_COUNTER_BUFFERS = 0x92D0
gl_MAX_COMBINED_ATOMIC_COUNTER_BUFFERS :: GLenum
gl_MAX_COMBINED_ATOMIC_COUNTER_BUFFERS = 0x92D1
gl_MAX_VERTEX_ATOMIC_COUNTERS :: GLenum
gl_MAX_VERTEX_ATOMIC_COUNTERS = 0x92D2
gl_MAX_TESS_CONTROL_ATOMIC_COUNTERS :: GLenum
gl_MAX_TESS_CONTROL_ATOMIC_COUNTERS = 0x92D3
gl_MAX_TESS_EVALUATION_ATOMIC_COUNTERS :: GLenum
gl_MAX_TESS_EVALUATION_ATOMIC_COUNTERS = 0x92D4
gl_MAX_GEOMETRY_ATOMIC_COUNTERS :: GLenum
gl_MAX_GEOMETRY_ATOMIC_COUNTERS = 0x92D5
gl_MAX_FRAGMENT_ATOMIC_COUNTERS :: GLenum
gl_MAX_FRAGMENT_ATOMIC_COUNTERS = 0x92D6
gl_MAX_COMBINED_ATOMIC_COUNTERS :: GLenum
gl_MAX_COMBINED_ATOMIC_COUNTERS = 0x92D7
gl_MAX_ATOMIC_COUNTER_BUFFER_SIZE :: GLenum
gl_MAX_ATOMIC_COUNTER_BUFFER_SIZE = 0x92D8
gl_MAX_ATOMIC_COUNTER_BUFFER_BINDINGS :: GLenum
gl_MAX_ATOMIC_COUNTER_BUFFER_BINDINGS = 0x92DC
gl_ACTIVE_ATOMIC_COUNTER_BUFFERS :: GLenum
gl_ACTIVE_ATOMIC_COUNTER_BUFFERS = 0x92D9
gl_UNIFORM_ATOMIC_COUNTER_BUFFER_INDEX :: GLenum
gl_UNIFORM_ATOMIC_COUNTER_BUFFER_INDEX = 0x92DA
gl_UNSIGNED_INT_ATOMIC_COUNTER :: GLenum
gl_UNSIGNED_INT_ATOMIC_COUNTER = 0x92DB
| mfpi/OpenGLRaw | src/Graphics/Rendering/OpenGL/Raw/ARB/ShaderAtomicCounters.hs | bsd-3-clause | 5,395 | 0 | 13 | 446 | 495 | 310 | 185 | -1 | -1 |
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
module Node.HCP.Eddy
( rules
, EddyUnwarpedImages (..)
) where
import qualified FSL
import qualified Node.HCP.Normalize as N
import qualified Node.HCP.Preprocessing as Preprocessing
import qualified Node.HCP.Topup as Topup
import Node.HCP.Types (CaseId, PhaseOrientation (..))
import Node.HCP.Util (hcppath)
import Node.Util (showKey)
import Paths (outdir)
import Shake.BuildNode
stage = "3_Eddy"
newtype EddyUnwarpedImages = EddyUnwarpedImages ([Int], CaseId)
deriving (Show,Generic,Typeable,Eq,Hashable,Binary,NFData,Read)
instance BuildNode EddyUnwarpedImages where
path n@(EddyUnwarpedImages (indices, caseid)) = hcppath caseid stage n <.> "nii.gz"
build out@(EddyUnwarpedImages (indices, caseid)) = Just $ do
need $ N.DwiN (N.DwiJoinedAll indices, caseid)
need (Preprocessing.Index indices caseid)
need (Preprocessing.AcqParams indices caseid)
need (Topup.TopupOutput indices caseid)
need (Topup.Mask indices caseid)
command_ [] "eddy" ["--imain=" ++ (path $ N.DwiN (N.DwiJoinedAll indices, caseid))
,"--mask=" ++ (path $ Topup.Mask indices caseid)
,"--index=" ++ (path $ Preprocessing.Index indices caseid)
,"--acqp=" ++ (path $ Preprocessing.AcqParams indices caseid)
,"--bvecs=" ++ (FSL.bvec $ N.DwiN (N.DwiJoinedAll indices, caseid))
,"--bvals=" ++ (FSL.bval $ N.DwiN (N.DwiJoinedAll indices, caseid))
,"--fwhm=0"
,"--topup=" ++ (hcppath caseid "2_Topup" (Topup.TopupOutput indices caseid))
,"--flm=quadratic"
,"-v"
,"--out=" ++ path out]
rules =
rule (buildNode :: EddyUnwarpedImages -> Maybe (Action [Double]))
| reckbo/ppl | pipeline-lib/Node/HCP/Eddy.hs | bsd-3-clause | 2,026 | 0 | 17 | 590 | 563 | 310 | 253 | 39 | 1 |
{-# LANGUAGE NoImplicitPrelude #-}
module Utils ( showTrace
, listDiff
) where
import BasePrelude
showTrace :: (Show a) => a -> a
showTrace = trace =<< show
-- assume these are sorted
listDiff :: (Ord a) => [a] -> [a] -> ([a], [a])
listDiff = listDiff' ([], [])
where
listDiff' a [] [] = a
listDiff' (ax,ay) [] y = (ax, ay ++ y)
listDiff' (ax,ay) x [] = (ax ++ x, ay)
listDiff' a@(ax,ay) x@(xc:xs) y@(yc:ys)
| xc == yc = listDiff' a xs ys
| xc < yc = listDiff' (xc:ax,ay) xs y
| otherwise = listDiff' (ax,yc:ay) x ys
| isovector/iwmag | src/Utils.hs | bsd-3-clause | 601 | 0 | 10 | 186 | 294 | 163 | 131 | 15 | 4 |
module TotalProject where
import Data
{-
infer :: Name -> Project -> TotalProject'
infer name proj = case proj of
CD c d as ->
TotalProject'
(makeCycle c as)
(makeDur d as)
(makeEndpoint (inferEndpointCD c d))
(makeTotaltime (inferTotaltimeD d))
(addOns' as)
CD' c d' as ->
TotalProject'
(makeCycle c as)
(makeDur d' as)
Maximum
Infinite
(addOns' as)
DE d e as ->
TotalProject'
(makeCycle (inferCycleDE d e) as)
(makeDur d as)
(makeEndpoint e)
(makeTotaltime (inferTotaltimeD d))
(addOns' as)
CE c e as ->
let d = estimateDurCE c e
in TotalProject'
(makeCycle c as)
(makeDur d as)
(makeEndpoint e)
(makeTotaltime (inferTotaltimeD d))
(addOns' as)
ET e t as ->
TotalProject'
(makeCycle (inferCycleET e t) as)
(makeDur (estimateDurET e t) as)
(makeEndpoint e)
(makeTotaltime t)
(addOns' as)
CT c t as ->
TotalProject'
(makeCycle c as)
(makeDur (inferDurCT c t) as)
(makeEndpoint (inferEndpointCT c t))
(makeTotaltime t)
(addOns' as)
CDE c d e as ->
if redundancyCDE c d e
then error "Redundancy."
else TotalProject'
(makeCycle c as)
(makeDur d as)
(makeEndpoint e)
(makeTotaltime (inferTotaltimeD d))
(addOns' as)
CD'E c d' e as ->
let t = inferTotaltimeCE c e
in TotalProject'
(makeCycle c as)
(truncateDurT (makeDur d' as) t)
(makeEndpoint e)
(makeTotaltime t)
(addOns' as)
CDT c d t as ->
if redundancyDT d t
then error "Redundancy."
else TotalProject'
(makeCycle c as)
(makeDur d as)
(makeEndpoint (inferEndpointCT c t))
(makeTotaltime t)
(addOns' as)
CD'T c d' t as ->
TotalProject'
(makeCycle c as)
(truncateDurT (makeDur d' as) t)
(makeEndpoint (inferEndpointCT c t))
(makeTotaltime t)
(addOns' as)
CET c e t as ->
TotalProject'
(makeCycle c as)
(makeDur (inferDurCET c e t) as)
(makeEndpoint e)
(makeTotaltime t)
(addOns' as)
DET d e t as ->
if redundancyDT d t
then error "Redundancy."
else TotalProject'
(makeCycle (inferCycleDE d e) as)
(makeDur d as)
(makeEndpoint e)
(makeTotaltime t)
(addOns' as)
D'ET d' e t as ->
TotalProject'
(makeCycle (inferCycleD'E d' e) as)
(truncateDurT (makeDur d' as) t)
(makeEndpoint e)
(makeTotaltime t)
(addOns' as)
CDET c d e t as ->
case (redundancyDT d t, redundancyCDE c d e) of
(True, True) -> error "Redundancies."
(True, False) -> error "Redundancy: DT"
(False, True) -> error "Redundancy: CDE"
(False, False) -> TotalProject'
(makeCycle c as)
(makeDur d as)
(makeEndpoint e)
(makeTotaltime t)
(addOns' as)
CD'ET c d' e t as ->
TotalProject'
(makeCycle c as)
(truncateDurT (makeDur d' as) t)
(makeEndpoint e)
(makeTotaltime t)
(addOns' as)
---------------------
makeDur :: Duration -> AddOns -> Duration'
makeDur = undefined
makeCycle :: Cycle -> AddOns -> Cycle'
makeCycle = undefined
makeEndpoint :: Endpoint -> Endpoint'
makeEndpoint = Endpoint' . fromEndpoint
makeTotaltime :: Totaltime -> Totaltime'
makeTotaltime = Finite
truncateDurT = undefined
inferCycleCD'E = undefined
redundancyCDE = undefined
redundancyDT = undefined
inferCycleD'E = undefined
inferCycleDE = undefined
inferDurCET = undefined
truncateDE = undefined
maxDuration = undefined
estimateDurCE = undefined
inferCycleET = undefined
estimateDurET = undefined
inferDurCT = undefined
addOns' = undefined
inferEndpointCD = undefined
inferEndpointCT = undefined
inferTotaltimeD = undefined
inferTotaltimeCE = undefined
---------- Helpers
fromEndpoint (Endpoint e) = e
conversion :: TotalProject' -> TotalProject
conversion (TotalProject' c d _e _tt as) = TotalProject c d as
-}
| Luizss/TheSystem | src/TotalProject.hs | bsd-3-clause | 4,061 | 0 | 3 | 1,187 | 8 | 6 | 2 | 2 | 0 |
{-# LANGUAGE FlexibleInstances, TypeFamilies #-}
module QueryArrow.FileSystem.Connection where
import Control.Monad.Free
import Control.Monad.Trans.State
import Control.Monad.Trans.Reader
import QueryArrow.DB.DB
import QueryArrow.DB.GenericDatabase
import QueryArrow.DB.NoConnection
import QueryArrow.FileSystem.Query
import QueryArrow.FileSystem.Commands
instance INoConnectionDatabase2 (GenericDatabase FileSystemTrans FileSystemConn) where
type NoConnectionQueryType (GenericDatabase FileSystemTrans FileSystemConn) = FSProgram ()
type NoConnectionRowType (GenericDatabase FileSystemTrans FileSystemConn) = MapResultRow
noConnectionDBStmtExec (GenericDatabase _ (FileSystemConn hostmap hostmap2) _ _) qu rs = do
row <- rs
runReaderT (execStateT (iterM interpret qu) row) (hostmap, hostmap2)
| xu-hao/QueryArrow | QueryArrow-db-filesystem/src/QueryArrow/FileSystem/Connection.hs | bsd-3-clause | 822 | 0 | 12 | 96 | 182 | 102 | 80 | 16 | 0 |
module Feature.DeleteSpec where
import Test.Hspec
import Test.Hspec.Wai
import Text.Heredoc
import Network.HTTP.Types
import Network.Wai (Application)
import Protolude hiding (get)
spec :: SpecWith Application
spec =
describe "Deleting" $ do
context "existing record" $ do
it "succeeds with 204 and deletion count" $
request methodDelete "/items?id=eq.1" [] ""
`shouldRespondWith` ""
{ matchStatus = 204
, matchHeaders = ["Content-Range" <:> "*/*"]
}
it "returns the deleted item and count if requested" $
request methodDelete "/items?id=eq.2" [("Prefer", "return=representation"), ("Prefer", "count=exact")] ""
`shouldRespondWith` [str|[{"id":2}]|]
{ matchStatus = 200
, matchHeaders = ["Content-Range" <:> "*/1"]
}
it "returns the deleted item and shapes the response" $
request methodDelete "/complex_items?id=eq.2&select=id,name" [("Prefer", "return=representation")] ""
`shouldRespondWith` [str|[{"id":2,"name":"Two"}]|]
{ matchStatus = 200
, matchHeaders = ["Content-Range" <:> "*/*"]
}
it "can rename and cast the selected columns" $
request methodDelete "/complex_items?id=eq.3&select=ciId:id::text,ciName:name" [("Prefer", "return=representation")] ""
`shouldRespondWith` [str|[{"ciId":"3","ciName":"Three"}]|]
it "can embed (parent) entities" $
request methodDelete "/tasks?id=eq.8&select=id,name,project{id}" [("Prefer", "return=representation")] ""
`shouldRespondWith` [str|[{"id":8,"name":"Code OSX","project":{"id":4}}]|]
{ matchStatus = 200
, matchHeaders = ["Content-Range" <:> "*/*"]
}
it "actually clears items ouf the db" $ do
_ <- request methodDelete "/items?id=lt.15" [] ""
get "/items"
`shouldRespondWith` [str|[{"id":15}]|]
{ matchStatus = 200
, matchHeaders = ["Content-Range" <:> "0-0/*"]
}
context "known route, no records matched" $
it "includes [] body if return=rep" $
request methodDelete "/items?id=eq.101"
[("Prefer", "return=representation")] ""
`shouldRespondWith` "[]"
{ matchStatus = 200
, matchHeaders = ["Content-Range" <:> "*/*"]
}
context "totally unknown route" $
it "fails with 404" $
request methodDelete "/foozle?id=eq.101" [] "" `shouldRespondWith` 404
| Skyfold/postgrest | test/Feature/DeleteSpec.hs | mit | 2,507 | 0 | 18 | 651 | 510 | 288 | 222 | -1 | -1 |
{-# LANGUAGE TypeApplications #-}
module Tests.ColorSchemes (test) where
import qualified Control.Lens as Lens
import qualified Control.Monad.Trans.FastWriter as Writer
import Data.Aeson.Config (load)
import Data.Data.Lens (template)
import qualified Data.Map as Map
import GUI.Momentu.Draw (Color(..))
import qualified Lamdu.Config.Folder as Folder
import Lamdu.Config.Theme (Theme)
import System.FilePath (takeFileName)
import Test.Lamdu.Prelude
test :: Test
test =
Folder.getSelections (Proxy @Theme)
>>= traverse (Folder.selectionToPath (Proxy @Theme))
>>= traverse_ verifyTheme
& testCase "color-scheme"
verifyTheme :: FilePath -> IO ()
verifyTheme filename =
load filename & Writer.evalWriterT >>= verify
where
verify :: Theme -> IO ()
verify theme
| "retro.json" == takeFileName filename = traverse_ verifyRetroColor colors
| Map.size saturations <= 3 = pure ()
| otherwise =
assertString
("Too many saturation options in theme " ++ filename ++ ":\n" ++
prettyShow (saturations ^@.. Lens.itraversed))
where
saturations =
colors <&> (\c -> (roundIn 0.001 (colorSat c), [c]))
& Map.fromListWith (++)
colors = theme ^.. template
verifyRetroColor col@(Color r g b a)
| all (`elem` [0, 0.5, 1.0]) [r, g, b]
&& elem a [0, 0.05, 0.1, 0.5, 1.0] = pure ()
| otherwise =
assertString ("Bad retro color in theme " ++ filename ++ ": " ++ show col)
colorSat :: Color -> Double
colorSat = fst . colorSV
colorSV :: Color -> (Double, Double)
colorSV (Color r g b _a) =
(if v == 0 then 0 else (v - m) / v, v)
where
v = maximum [r, g, b]
m = minimum [r, g, b]
roundIn :: RealFrac a => a -> a -> a
roundIn unit x = fromIntegral (round (x / unit) :: Integer) * unit
| lamdu/lamdu | test/Tests/ColorSchemes.hs | gpl-3.0 | 2,016 | 0 | 16 | 624 | 657 | 360 | 297 | 47 | 2 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE UnicodeSyntax #-}
{-# OPTIONS_GHC -Wall -Wno-unused-imports -Wno-unticked-promoted-constructors -Wno-type-defaults -Wno-missing-signatures #-}
module Main where
import Control.Applicative
import Control.Applicative.Free
import Prelude.Unicode
import Text.Printf (printf)
data CF a
= C (CA a)
| L a
deriving (Functor)
type CA a = Ap CF a
mkC ∷ CA a → CA a
mkC = liftAp ∘ C
mkL ∷ a → CA a
mkL = liftAp ∘ L
interpIO ∷ CF a → IO a
interpIO (L x) = pure x
interpIO (C ap) = runAp interpIO ap -- shed a layer of C
interpId ∷ CF a → CF a
interpId (L x) = L x
interpId (C ap) = C $ runAp (liftAp ∘ interpId) ap
interpX ∷ CF a → [] a
interpX (L x) = [x]
interpX (C ap) = runAp interpX ap
main ∷ IO ()
main = do
putStrLn "You are standing at the end of a road before a small brick building."
putStrLn "Around you is a forest. A small stream flows out of the building and"
putStrLn "down a gully."
| deepfire/mood | experiments/apex/Main.hs | agpl-3.0 | 1,335 | 4 | 8 | 301 | 331 | 174 | 157 | 40 | 1 |
{-# LANGUAGE FlexibleContexts #-}
module BDCS.Export.Customize(CSOverlay,
Customization(..),
addToOverlay,
filesToObjectsC,
runCustomizations)
where
import qualified Control.Exception.Lifted as CEL
import Control.Monad(foldM)
import Control.Monad.Except(MonadError, throwError)
import Control.Monad.IO.Class(MonadIO)
import Control.Monad.Logger(MonadLogger, MonadLoggerIO, logDebugN)
import Control.Monad.Trans.Control(MonadBaseControl)
import Crypto.Hash(Digest, hash)
import Crypto.Hash.Algorithms(Blake2b_256)
import Data.ByteArray(convert)
import qualified Data.ByteString as BS
import Data.Conduit(Conduit, awaitForever, yield)
import Data.ContentStore(ContentStore)
import qualified Data.Map.Strict as Map
import qualified Data.Text as T
import BDCS.CS(Object(..), fileToObjectC)
import BDCS.DB(Files(..))
import BDCS.Export.FSTree(FSTree, addFileToTree)
type CSOverlay = Map.Map BS.ByteString Object
-- Just one type of customization for now, more to come later
{-# ANN module ("HLint: ignore Use newtype instead of data" :: String) #-}
data Customization = WriteFile Files (Maybe BS.ByteString)
deriving (Eq, Show)
-- Not everything looks good as an operator, hlint
{-# ANN filesToObjectsC ("HLint: ignore Use section" :: String) #-}
filesToObjectsC :: (MonadError String m, MonadIO m) => CSOverlay -> ContentStore -> Conduit Files m (Files, Object)
filesToObjectsC overlay repo = awaitForever $ \f@Files{..} ->
case maybe Nothing (flip Map.lookup overlay) filesCs_object of
Nothing -> fileToObjectC repo f
Just obj -> yield (f, obj)
addToOverlay :: (MonadError String m, MonadLogger m) => CSOverlay -> FSTree -> Files -> Maybe BS.ByteString -> m (CSOverlay, FSTree)
addToOverlay overlay tree file content = do
logDebugN $ T.pack "Adding to overlay: " `T.append` filesPath file
-- If the file has content, create a hash of it and add the content to the overlay.
-- The digest type doesn't need to match the content store, it just needs to be something we can
-- use as a hash key.
let (newFile, newOverlay) = case content of
Nothing-> (file{filesCs_object = Nothing}, overlay)
Just c -> let digest = makeDigest c
in (file{filesCs_object = Just digest}, Map.insert digest (FileObject c) overlay)
-- Add the metadata to the FSTree
newTree <- addFileToTree True tree newFile
return (newOverlay, newTree)
where
makeDigest :: BS.ByteString -> BS.ByteString
makeDigest input =
let digest = hash input :: Digest Blake2b_256
in convert digest
runCustomizations :: (MonadBaseControl IO m, MonadError String m, MonadLoggerIO m) =>
CSOverlay
-> ContentStore
-> FSTree
-> [Customization]
-> m (CSOverlay, FSTree)
runCustomizations overlay _repo tree customizations = do
logDebugN $ T.pack "Running customizations"
foldM runCustomization (overlay, tree) customizations `CEL.catch` \e -> throwError $ show (e :: CEL.IOException)
where
runCustomization :: (MonadError String m, MonadLoggerIO m) => (CSOverlay, FSTree) -> Customization -> m (CSOverlay, FSTree)
runCustomization (o, t) (WriteFile file content) = addToOverlay o t file content
| atodorov/bdcs | src/BDCS/Export/Customize.hs | lgpl-2.1 | 3,498 | 0 | 18 | 856 | 871 | 486 | 385 | -1 | -1 |
module Data.Interned.String
( InternedString(internedStringId)
) where
import Data.Interned.Internal.String
| greydot/intern | Data/Interned/String.hs | bsd-3-clause | 113 | 0 | 5 | 13 | 23 | 16 | 7 | 5 | 0 |
{-
Copyright 2012, 2013, 2014 Colin Woodbury <colingw@gmail.com>
This file is part of Aura.
Aura is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Aura is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Aura. If not, see <http://www.gnu.org/licenses/>.
-}
module Aura.Pkgbuild.Base where
import Control.Monad ((>=>))
import Aura.Bash
import Aura.Core
import Aura.Monad.Aura
import Aura.Pkgbuild.Editing
---
pkgbuildCache :: FilePath
pkgbuildCache = "/var/cache/aura/pkgbuilds/"
toFilename :: String -> FilePath
toFilename = (++ ".pb")
pkgbuildPath :: String -> FilePath
pkgbuildPath p = pkgbuildCache ++ toFilename p
trueVersion :: Namespace -> String
trueVersion ns = pkgver ++ "-" ++ pkgrel
where pkgver = head $ value ns "pkgver"
pkgrel = head $ value ns "pkgrel"
-- | Yields the value of the `depends` field.
depends :: Namespace -> [String]
depends = flip value "depends"
makedepends :: Namespace -> [String]
makedepends = flip value "makedepends"
checkdepends :: Namespace -> [String]
checkdepends = flip value "checkdepends"
-- One of my favourite functions in this code base.
pbCustomization :: Buildable -> Aura Buildable
pbCustomization = foldl (>=>) return [customizepkg,hotEdit]
-- | Package a Buildable, running the customization handler first.
packageBuildable :: Buildable -> Aura Package
packageBuildable b = do
b' <- pbCustomization b
ns <- namespace (baseNameOf b') (pkgbuildOf b')
return Package
{ pkgNameOf = baseNameOf b'
, pkgVersionOf = trueVersion ns
, pkgDepsOf = map parseDep $ concatMap ($ ns)
[depends,makedepends,checkdepends]
, pkgInstallTypeOf = Build b' }
| joehillen/aura | src/Aura/Pkgbuild/Base.hs | gpl-3.0 | 2,137 | 0 | 12 | 427 | 371 | 204 | 167 | 34 | 1 |
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="zh-CN">
<title>Browser View | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>搜索</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | 0xkasun/security-tools | src/org/zaproxy/zap/extension/browserView/resources/help_zh_CN/helpset_zh_CN.hs | apache-2.0 | 974 | 82 | 65 | 160 | 413 | 209 | 204 | -1 | -1 |
-- %************************************************************************
-- %* *
-- The known-key names for Template Haskell
-- %* *
-- %************************************************************************
module THNames where
import PrelNames( mk_known_key_name )
import Module( Module, mkModuleNameFS, mkModule, thUnitId )
import Name( Name )
import OccName( tcName, clsName, dataName, varName )
import RdrName( RdrName, nameRdrName )
import Unique
import FastString
-- To add a name, do three things
--
-- 1) Allocate a key
-- 2) Make a "Name"
-- 3) Add the name to templateHaskellNames
templateHaskellNames :: [Name]
-- The names that are implicitly mentioned by ``bracket''
-- Should stay in sync with the import list of DsMeta
templateHaskellNames = [
returnQName, bindQName, sequenceQName, newNameName, liftName,
mkNameName, mkNameG_vName, mkNameG_dName, mkNameG_tcName, mkNameLName,
mkNameSName,
liftStringName,
unTypeName,
unTypeQName,
unsafeTExpCoerceName,
-- Lit
charLName, stringLName, integerLName, intPrimLName, wordPrimLName,
floatPrimLName, doublePrimLName, rationalLName, stringPrimLName,
charPrimLName,
-- Pat
litPName, varPName, tupPName, unboxedTupPName,
conPName, tildePName, bangPName, infixPName,
asPName, wildPName, recPName, listPName, sigPName, viewPName,
-- FieldPat
fieldPatName,
-- Match
matchName,
-- Clause
clauseName,
-- Exp
varEName, conEName, litEName, appEName, infixEName,
infixAppName, sectionLName, sectionRName, lamEName, lamCaseEName,
tupEName, unboxedTupEName,
condEName, multiIfEName, letEName, caseEName, doEName, compEName,
fromEName, fromThenEName, fromToEName, fromThenToEName,
listEName, sigEName, recConEName, recUpdEName, staticEName, unboundVarEName,
-- FieldExp
fieldExpName,
-- Body
guardedBName, normalBName,
-- Guard
normalGEName, patGEName,
-- Stmt
bindSName, letSName, noBindSName, parSName,
-- Dec
funDName, valDName, dataDName, newtypeDName, tySynDName,
classDName, instanceWithOverlapDName,
standaloneDerivDName, sigDName, forImpDName,
pragInlDName, pragSpecDName, pragSpecInlDName, pragSpecInstDName,
pragRuleDName, pragAnnDName, defaultSigDName,
dataFamilyDName, openTypeFamilyDName, closedTypeFamilyDName,
dataInstDName, newtypeInstDName, tySynInstDName,
infixLDName, infixRDName, infixNDName,
roleAnnotDName, patSynDName, patSynSigDName,
-- Cxt
cxtName,
-- SourceUnpackedness
noSourceUnpackednessName, sourceNoUnpackName, sourceUnpackName,
-- SourceStrictness
noSourceStrictnessName, sourceLazyName, sourceStrictName,
-- Con
normalCName, recCName, infixCName, forallCName, gadtCName, recGadtCName,
-- Bang
bangName,
-- BangType
bangTypeName,
-- VarBangType
varBangTypeName,
-- PatSynDir (for pattern synonyms)
unidirPatSynName, implBidirPatSynName, explBidirPatSynName,
-- PatSynArgs (for pattern synonyms)
prefixPatSynName, infixPatSynName, recordPatSynName,
-- Type
forallTName, varTName, conTName, appTName, equalityTName,
tupleTName, unboxedTupleTName, arrowTName, listTName, sigTName, litTName,
promotedTName, promotedTupleTName, promotedNilTName, promotedConsTName,
wildCardTName,
-- TyLit
numTyLitName, strTyLitName,
-- TyVarBndr
plainTVName, kindedTVName,
-- Role
nominalRName, representationalRName, phantomRName, inferRName,
-- Kind
varKName, conKName, tupleKName, arrowKName, listKName, appKName,
starKName, constraintKName,
-- FamilyResultSig
noSigName, kindSigName, tyVarSigName,
-- InjectivityAnn
injectivityAnnName,
-- Callconv
cCallName, stdCallName, cApiCallName, primCallName, javaScriptCallName,
-- Safety
unsafeName,
safeName,
interruptibleName,
-- Inline
noInlineDataConName, inlineDataConName, inlinableDataConName,
-- RuleMatch
conLikeDataConName, funLikeDataConName,
-- Phases
allPhasesDataConName, fromPhaseDataConName, beforePhaseDataConName,
-- Overlap
overlappableDataConName, overlappingDataConName, overlapsDataConName,
incoherentDataConName,
-- TExp
tExpDataConName,
-- RuleBndr
ruleVarName, typedRuleVarName,
-- FunDep
funDepName,
-- FamFlavour
typeFamName, dataFamName,
-- TySynEqn
tySynEqnName,
-- AnnTarget
valueAnnotationName, typeAnnotationName, moduleAnnotationName,
-- The type classes
liftClassName,
-- And the tycons
qTyConName, nameTyConName, patTyConName, fieldPatTyConName, matchQTyConName,
clauseQTyConName, expQTyConName, fieldExpTyConName, predTyConName,
stmtQTyConName, decQTyConName, conQTyConName, bangTypeQTyConName,
varBangTypeQTyConName, typeQTyConName, expTyConName, decTyConName,
typeTyConName, tyVarBndrTyConName, matchTyConName, clauseTyConName,
patQTyConName, fieldPatQTyConName, fieldExpQTyConName, funDepTyConName,
predQTyConName, decsQTyConName, ruleBndrQTyConName, tySynEqnQTyConName,
roleTyConName, tExpTyConName, injAnnTyConName, kindTyConName,
overlapTyConName,
-- Quasiquoting
quoteDecName, quoteTypeName, quoteExpName, quotePatName]
thSyn, thLib, qqLib :: Module
thSyn = mkTHModule (fsLit "Language.Haskell.TH.Syntax")
thLib = mkTHModule (fsLit "Language.Haskell.TH.Lib")
qqLib = mkTHModule (fsLit "Language.Haskell.TH.Quote")
mkTHModule :: FastString -> Module
mkTHModule m = mkModule thUnitId (mkModuleNameFS m)
libFun, libTc, thFun, thTc, thCls, thCon, qqFun :: FastString -> Unique -> Name
libFun = mk_known_key_name OccName.varName thLib
libTc = mk_known_key_name OccName.tcName thLib
thFun = mk_known_key_name OccName.varName thSyn
thTc = mk_known_key_name OccName.tcName thSyn
thCls = mk_known_key_name OccName.clsName thSyn
thCon = mk_known_key_name OccName.dataName thSyn
qqFun = mk_known_key_name OccName.varName qqLib
-------------------- TH.Syntax -----------------------
liftClassName :: Name
liftClassName = thCls (fsLit "Lift") liftClassKey
qTyConName, nameTyConName, fieldExpTyConName, patTyConName,
fieldPatTyConName, expTyConName, decTyConName, typeTyConName,
tyVarBndrTyConName, matchTyConName, clauseTyConName, funDepTyConName,
predTyConName, tExpTyConName, injAnnTyConName, kindTyConName,
overlapTyConName :: Name
qTyConName = thTc (fsLit "Q") qTyConKey
nameTyConName = thTc (fsLit "Name") nameTyConKey
fieldExpTyConName = thTc (fsLit "FieldExp") fieldExpTyConKey
patTyConName = thTc (fsLit "Pat") patTyConKey
fieldPatTyConName = thTc (fsLit "FieldPat") fieldPatTyConKey
expTyConName = thTc (fsLit "Exp") expTyConKey
decTyConName = thTc (fsLit "Dec") decTyConKey
typeTyConName = thTc (fsLit "Type") typeTyConKey
tyVarBndrTyConName= thTc (fsLit "TyVarBndr") tyVarBndrTyConKey
matchTyConName = thTc (fsLit "Match") matchTyConKey
clauseTyConName = thTc (fsLit "Clause") clauseTyConKey
funDepTyConName = thTc (fsLit "FunDep") funDepTyConKey
predTyConName = thTc (fsLit "Pred") predTyConKey
tExpTyConName = thTc (fsLit "TExp") tExpTyConKey
injAnnTyConName = thTc (fsLit "InjectivityAnn") injAnnTyConKey
kindTyConName = thTc (fsLit "Kind") kindTyConKey
overlapTyConName = thTc (fsLit "Overlap") overlapTyConKey
returnQName, bindQName, sequenceQName, newNameName, liftName,
mkNameName, mkNameG_vName, mkNameG_dName, mkNameG_tcName,
mkNameLName, mkNameSName, liftStringName, unTypeName, unTypeQName,
unsafeTExpCoerceName :: Name
returnQName = thFun (fsLit "returnQ") returnQIdKey
bindQName = thFun (fsLit "bindQ") bindQIdKey
sequenceQName = thFun (fsLit "sequenceQ") sequenceQIdKey
newNameName = thFun (fsLit "newName") newNameIdKey
liftName = thFun (fsLit "lift") liftIdKey
liftStringName = thFun (fsLit "liftString") liftStringIdKey
mkNameName = thFun (fsLit "mkName") mkNameIdKey
mkNameG_vName = thFun (fsLit "mkNameG_v") mkNameG_vIdKey
mkNameG_dName = thFun (fsLit "mkNameG_d") mkNameG_dIdKey
mkNameG_tcName = thFun (fsLit "mkNameG_tc") mkNameG_tcIdKey
mkNameLName = thFun (fsLit "mkNameL") mkNameLIdKey
mkNameSName = thFun (fsLit "mkNameS") mkNameSIdKey
unTypeName = thFun (fsLit "unType") unTypeIdKey
unTypeQName = thFun (fsLit "unTypeQ") unTypeQIdKey
unsafeTExpCoerceName = thFun (fsLit "unsafeTExpCoerce") unsafeTExpCoerceIdKey
-------------------- TH.Lib -----------------------
-- data Lit = ...
charLName, stringLName, integerLName, intPrimLName, wordPrimLName,
floatPrimLName, doublePrimLName, rationalLName, stringPrimLName,
charPrimLName :: Name
charLName = libFun (fsLit "charL") charLIdKey
stringLName = libFun (fsLit "stringL") stringLIdKey
integerLName = libFun (fsLit "integerL") integerLIdKey
intPrimLName = libFun (fsLit "intPrimL") intPrimLIdKey
wordPrimLName = libFun (fsLit "wordPrimL") wordPrimLIdKey
floatPrimLName = libFun (fsLit "floatPrimL") floatPrimLIdKey
doublePrimLName = libFun (fsLit "doublePrimL") doublePrimLIdKey
rationalLName = libFun (fsLit "rationalL") rationalLIdKey
stringPrimLName = libFun (fsLit "stringPrimL") stringPrimLIdKey
charPrimLName = libFun (fsLit "charPrimL") charPrimLIdKey
-- data Pat = ...
litPName, varPName, tupPName, unboxedTupPName, conPName, infixPName, tildePName, bangPName,
asPName, wildPName, recPName, listPName, sigPName, viewPName :: Name
litPName = libFun (fsLit "litP") litPIdKey
varPName = libFun (fsLit "varP") varPIdKey
tupPName = libFun (fsLit "tupP") tupPIdKey
unboxedTupPName = libFun (fsLit "unboxedTupP") unboxedTupPIdKey
conPName = libFun (fsLit "conP") conPIdKey
infixPName = libFun (fsLit "infixP") infixPIdKey
tildePName = libFun (fsLit "tildeP") tildePIdKey
bangPName = libFun (fsLit "bangP") bangPIdKey
asPName = libFun (fsLit "asP") asPIdKey
wildPName = libFun (fsLit "wildP") wildPIdKey
recPName = libFun (fsLit "recP") recPIdKey
listPName = libFun (fsLit "listP") listPIdKey
sigPName = libFun (fsLit "sigP") sigPIdKey
viewPName = libFun (fsLit "viewP") viewPIdKey
-- type FieldPat = ...
fieldPatName :: Name
fieldPatName = libFun (fsLit "fieldPat") fieldPatIdKey
-- data Match = ...
matchName :: Name
matchName = libFun (fsLit "match") matchIdKey
-- data Clause = ...
clauseName :: Name
clauseName = libFun (fsLit "clause") clauseIdKey
-- data Exp = ...
varEName, conEName, litEName, appEName, infixEName, infixAppName,
sectionLName, sectionRName, lamEName, lamCaseEName, tupEName,
unboxedTupEName, condEName, multiIfEName, letEName, caseEName,
doEName, compEName, staticEName, unboundVarEName :: Name
varEName = libFun (fsLit "varE") varEIdKey
conEName = libFun (fsLit "conE") conEIdKey
litEName = libFun (fsLit "litE") litEIdKey
appEName = libFun (fsLit "appE") appEIdKey
infixEName = libFun (fsLit "infixE") infixEIdKey
infixAppName = libFun (fsLit "infixApp") infixAppIdKey
sectionLName = libFun (fsLit "sectionL") sectionLIdKey
sectionRName = libFun (fsLit "sectionR") sectionRIdKey
lamEName = libFun (fsLit "lamE") lamEIdKey
lamCaseEName = libFun (fsLit "lamCaseE") lamCaseEIdKey
tupEName = libFun (fsLit "tupE") tupEIdKey
unboxedTupEName = libFun (fsLit "unboxedTupE") unboxedTupEIdKey
condEName = libFun (fsLit "condE") condEIdKey
multiIfEName = libFun (fsLit "multiIfE") multiIfEIdKey
letEName = libFun (fsLit "letE") letEIdKey
caseEName = libFun (fsLit "caseE") caseEIdKey
doEName = libFun (fsLit "doE") doEIdKey
compEName = libFun (fsLit "compE") compEIdKey
-- ArithSeq skips a level
fromEName, fromThenEName, fromToEName, fromThenToEName :: Name
fromEName = libFun (fsLit "fromE") fromEIdKey
fromThenEName = libFun (fsLit "fromThenE") fromThenEIdKey
fromToEName = libFun (fsLit "fromToE") fromToEIdKey
fromThenToEName = libFun (fsLit "fromThenToE") fromThenToEIdKey
-- end ArithSeq
listEName, sigEName, recConEName, recUpdEName :: Name
listEName = libFun (fsLit "listE") listEIdKey
sigEName = libFun (fsLit "sigE") sigEIdKey
recConEName = libFun (fsLit "recConE") recConEIdKey
recUpdEName = libFun (fsLit "recUpdE") recUpdEIdKey
staticEName = libFun (fsLit "staticE") staticEIdKey
unboundVarEName = libFun (fsLit "unboundVarE") unboundVarEIdKey
-- type FieldExp = ...
fieldExpName :: Name
fieldExpName = libFun (fsLit "fieldExp") fieldExpIdKey
-- data Body = ...
guardedBName, normalBName :: Name
guardedBName = libFun (fsLit "guardedB") guardedBIdKey
normalBName = libFun (fsLit "normalB") normalBIdKey
-- data Guard = ...
normalGEName, patGEName :: Name
normalGEName = libFun (fsLit "normalGE") normalGEIdKey
patGEName = libFun (fsLit "patGE") patGEIdKey
-- data Stmt = ...
bindSName, letSName, noBindSName, parSName :: Name
bindSName = libFun (fsLit "bindS") bindSIdKey
letSName = libFun (fsLit "letS") letSIdKey
noBindSName = libFun (fsLit "noBindS") noBindSIdKey
parSName = libFun (fsLit "parS") parSIdKey
-- data Dec = ...
funDName, valDName, dataDName, newtypeDName, tySynDName, classDName,
instanceWithOverlapDName, sigDName, forImpDName, pragInlDName,
pragSpecDName,
pragSpecInlDName, pragSpecInstDName, pragRuleDName, pragAnnDName,
standaloneDerivDName, defaultSigDName, dataInstDName, newtypeInstDName,
tySynInstDName, dataFamilyDName, openTypeFamilyDName, closedTypeFamilyDName,
infixLDName, infixRDName, infixNDName, roleAnnotDName, patSynDName,
patSynSigDName :: Name
funDName = libFun (fsLit "funD") funDIdKey
valDName = libFun (fsLit "valD") valDIdKey
dataDName = libFun (fsLit "dataD") dataDIdKey
newtypeDName = libFun (fsLit "newtypeD") newtypeDIdKey
tySynDName = libFun (fsLit "tySynD") tySynDIdKey
classDName = libFun (fsLit "classD") classDIdKey
instanceWithOverlapDName
= libFun (fsLit "instanceWithOverlapD") instanceWithOverlapDIdKey
standaloneDerivDName = libFun (fsLit "standaloneDerivD") standaloneDerivDIdKey
sigDName = libFun (fsLit "sigD") sigDIdKey
defaultSigDName = libFun (fsLit "defaultSigD") defaultSigDIdKey
forImpDName = libFun (fsLit "forImpD") forImpDIdKey
pragInlDName = libFun (fsLit "pragInlD") pragInlDIdKey
pragSpecDName = libFun (fsLit "pragSpecD") pragSpecDIdKey
pragSpecInlDName = libFun (fsLit "pragSpecInlD") pragSpecInlDIdKey
pragSpecInstDName = libFun (fsLit "pragSpecInstD") pragSpecInstDIdKey
pragRuleDName = libFun (fsLit "pragRuleD") pragRuleDIdKey
pragAnnDName = libFun (fsLit "pragAnnD") pragAnnDIdKey
dataInstDName = libFun (fsLit "dataInstD") dataInstDIdKey
newtypeInstDName = libFun (fsLit "newtypeInstD") newtypeInstDIdKey
tySynInstDName = libFun (fsLit "tySynInstD") tySynInstDIdKey
openTypeFamilyDName = libFun (fsLit "openTypeFamilyD") openTypeFamilyDIdKey
closedTypeFamilyDName= libFun (fsLit "closedTypeFamilyD") closedTypeFamilyDIdKey
dataFamilyDName = libFun (fsLit "dataFamilyD") dataFamilyDIdKey
infixLDName = libFun (fsLit "infixLD") infixLDIdKey
infixRDName = libFun (fsLit "infixRD") infixRDIdKey
infixNDName = libFun (fsLit "infixND") infixNDIdKey
roleAnnotDName = libFun (fsLit "roleAnnotD") roleAnnotDIdKey
patSynDName = libFun (fsLit "patSynD") patSynDIdKey
patSynSigDName = libFun (fsLit "patSynSigD") patSynSigDIdKey
-- type Ctxt = ...
cxtName :: Name
cxtName = libFun (fsLit "cxt") cxtIdKey
-- data SourceUnpackedness = ...
noSourceUnpackednessName, sourceNoUnpackName, sourceUnpackName :: Name
noSourceUnpackednessName = libFun (fsLit "noSourceUnpackedness") noSourceUnpackednessKey
sourceNoUnpackName = libFun (fsLit "sourceNoUnpack") sourceNoUnpackKey
sourceUnpackName = libFun (fsLit "sourceUnpack") sourceUnpackKey
-- data SourceStrictness = ...
noSourceStrictnessName, sourceLazyName, sourceStrictName :: Name
noSourceStrictnessName = libFun (fsLit "noSourceStrictness") noSourceStrictnessKey
sourceLazyName = libFun (fsLit "sourceLazy") sourceLazyKey
sourceStrictName = libFun (fsLit "sourceStrict") sourceStrictKey
-- data Con = ...
normalCName, recCName, infixCName, forallCName, gadtCName, recGadtCName :: Name
normalCName = libFun (fsLit "normalC" ) normalCIdKey
recCName = libFun (fsLit "recC" ) recCIdKey
infixCName = libFun (fsLit "infixC" ) infixCIdKey
forallCName = libFun (fsLit "forallC" ) forallCIdKey
gadtCName = libFun (fsLit "gadtC" ) gadtCIdKey
recGadtCName = libFun (fsLit "recGadtC") recGadtCIdKey
-- data Bang = ...
bangName :: Name
bangName = libFun (fsLit "bang") bangIdKey
-- type BangType = ...
bangTypeName :: Name
bangTypeName = libFun (fsLit "bangType") bangTKey
-- type VarBangType = ...
varBangTypeName :: Name
varBangTypeName = libFun (fsLit "varBangType") varBangTKey
-- data PatSynDir = ...
unidirPatSynName, implBidirPatSynName, explBidirPatSynName :: Name
unidirPatSynName = libFun (fsLit "unidir") unidirPatSynIdKey
implBidirPatSynName = libFun (fsLit "implBidir") implBidirPatSynIdKey
explBidirPatSynName = libFun (fsLit "explBidir") explBidirPatSynIdKey
-- data PatSynArgs = ...
prefixPatSynName, infixPatSynName, recordPatSynName :: Name
prefixPatSynName = libFun (fsLit "prefixPatSyn") prefixPatSynIdKey
infixPatSynName = libFun (fsLit "infixPatSyn") infixPatSynIdKey
recordPatSynName = libFun (fsLit "recordPatSyn") recordPatSynIdKey
-- data Type = ...
forallTName, varTName, conTName, tupleTName, unboxedTupleTName, arrowTName,
listTName, appTName, sigTName, equalityTName, litTName,
promotedTName, promotedTupleTName,
promotedNilTName, promotedConsTName,
wildCardTName :: Name
forallTName = libFun (fsLit "forallT") forallTIdKey
varTName = libFun (fsLit "varT") varTIdKey
conTName = libFun (fsLit "conT") conTIdKey
tupleTName = libFun (fsLit "tupleT") tupleTIdKey
unboxedTupleTName = libFun (fsLit "unboxedTupleT") unboxedTupleTIdKey
arrowTName = libFun (fsLit "arrowT") arrowTIdKey
listTName = libFun (fsLit "listT") listTIdKey
appTName = libFun (fsLit "appT") appTIdKey
sigTName = libFun (fsLit "sigT") sigTIdKey
equalityTName = libFun (fsLit "equalityT") equalityTIdKey
litTName = libFun (fsLit "litT") litTIdKey
promotedTName = libFun (fsLit "promotedT") promotedTIdKey
promotedTupleTName = libFun (fsLit "promotedTupleT") promotedTupleTIdKey
promotedNilTName = libFun (fsLit "promotedNilT") promotedNilTIdKey
promotedConsTName = libFun (fsLit "promotedConsT") promotedConsTIdKey
wildCardTName = libFun (fsLit "wildCardT") wildCardTIdKey
-- data TyLit = ...
numTyLitName, strTyLitName :: Name
numTyLitName = libFun (fsLit "numTyLit") numTyLitIdKey
strTyLitName = libFun (fsLit "strTyLit") strTyLitIdKey
-- data TyVarBndr = ...
plainTVName, kindedTVName :: Name
plainTVName = libFun (fsLit "plainTV") plainTVIdKey
kindedTVName = libFun (fsLit "kindedTV") kindedTVIdKey
-- data Role = ...
nominalRName, representationalRName, phantomRName, inferRName :: Name
nominalRName = libFun (fsLit "nominalR") nominalRIdKey
representationalRName = libFun (fsLit "representationalR") representationalRIdKey
phantomRName = libFun (fsLit "phantomR") phantomRIdKey
inferRName = libFun (fsLit "inferR") inferRIdKey
-- data Kind = ...
varKName, conKName, tupleKName, arrowKName, listKName, appKName,
starKName, constraintKName :: Name
varKName = libFun (fsLit "varK") varKIdKey
conKName = libFun (fsLit "conK") conKIdKey
tupleKName = libFun (fsLit "tupleK") tupleKIdKey
arrowKName = libFun (fsLit "arrowK") arrowKIdKey
listKName = libFun (fsLit "listK") listKIdKey
appKName = libFun (fsLit "appK") appKIdKey
starKName = libFun (fsLit "starK") starKIdKey
constraintKName = libFun (fsLit "constraintK") constraintKIdKey
-- data FamilyResultSig = ...
noSigName, kindSigName, tyVarSigName :: Name
noSigName = libFun (fsLit "noSig") noSigIdKey
kindSigName = libFun (fsLit "kindSig") kindSigIdKey
tyVarSigName = libFun (fsLit "tyVarSig") tyVarSigIdKey
-- data InjectivityAnn = ...
injectivityAnnName :: Name
injectivityAnnName = libFun (fsLit "injectivityAnn") injectivityAnnIdKey
-- data Callconv = ...
cCallName, stdCallName, cApiCallName, primCallName, javaScriptCallName :: Name
cCallName = libFun (fsLit "cCall") cCallIdKey
stdCallName = libFun (fsLit "stdCall") stdCallIdKey
cApiCallName = libFun (fsLit "cApi") cApiCallIdKey
primCallName = libFun (fsLit "prim") primCallIdKey
javaScriptCallName = libFun (fsLit "javaScript") javaScriptCallIdKey
-- data Safety = ...
unsafeName, safeName, interruptibleName :: Name
unsafeName = libFun (fsLit "unsafe") unsafeIdKey
safeName = libFun (fsLit "safe") safeIdKey
interruptibleName = libFun (fsLit "interruptible") interruptibleIdKey
-- newtype TExp a = ...
tExpDataConName :: Name
tExpDataConName = thCon (fsLit "TExp") tExpDataConKey
-- data RuleBndr = ...
ruleVarName, typedRuleVarName :: Name
ruleVarName = libFun (fsLit ("ruleVar")) ruleVarIdKey
typedRuleVarName = libFun (fsLit ("typedRuleVar")) typedRuleVarIdKey
-- data FunDep = ...
funDepName :: Name
funDepName = libFun (fsLit "funDep") funDepIdKey
-- data FamFlavour = ...
typeFamName, dataFamName :: Name
typeFamName = libFun (fsLit "typeFam") typeFamIdKey
dataFamName = libFun (fsLit "dataFam") dataFamIdKey
-- data TySynEqn = ...
tySynEqnName :: Name
tySynEqnName = libFun (fsLit "tySynEqn") tySynEqnIdKey
-- data AnnTarget = ...
valueAnnotationName, typeAnnotationName, moduleAnnotationName :: Name
valueAnnotationName = libFun (fsLit "valueAnnotation") valueAnnotationIdKey
typeAnnotationName = libFun (fsLit "typeAnnotation") typeAnnotationIdKey
moduleAnnotationName = libFun (fsLit "moduleAnnotation") moduleAnnotationIdKey
matchQTyConName, clauseQTyConName, expQTyConName, stmtQTyConName,
decQTyConName, conQTyConName, bangTypeQTyConName,
varBangTypeQTyConName, typeQTyConName, fieldExpQTyConName,
patQTyConName, fieldPatQTyConName, predQTyConName, decsQTyConName,
ruleBndrQTyConName, tySynEqnQTyConName, roleTyConName :: Name
matchQTyConName = libTc (fsLit "MatchQ") matchQTyConKey
clauseQTyConName = libTc (fsLit "ClauseQ") clauseQTyConKey
expQTyConName = libTc (fsLit "ExpQ") expQTyConKey
stmtQTyConName = libTc (fsLit "StmtQ") stmtQTyConKey
decQTyConName = libTc (fsLit "DecQ") decQTyConKey
decsQTyConName = libTc (fsLit "DecsQ") decsQTyConKey -- Q [Dec]
conQTyConName = libTc (fsLit "ConQ") conQTyConKey
bangTypeQTyConName = libTc (fsLit "BangTypeQ") bangTypeQTyConKey
varBangTypeQTyConName = libTc (fsLit "VarBangTypeQ") varBangTypeQTyConKey
typeQTyConName = libTc (fsLit "TypeQ") typeQTyConKey
fieldExpQTyConName = libTc (fsLit "FieldExpQ") fieldExpQTyConKey
patQTyConName = libTc (fsLit "PatQ") patQTyConKey
fieldPatQTyConName = libTc (fsLit "FieldPatQ") fieldPatQTyConKey
predQTyConName = libTc (fsLit "PredQ") predQTyConKey
ruleBndrQTyConName = libTc (fsLit "RuleBndrQ") ruleBndrQTyConKey
tySynEqnQTyConName = libTc (fsLit "TySynEqnQ") tySynEqnQTyConKey
roleTyConName = libTc (fsLit "Role") roleTyConKey
-- quasiquoting
quoteExpName, quotePatName, quoteDecName, quoteTypeName :: Name
quoteExpName = qqFun (fsLit "quoteExp") quoteExpKey
quotePatName = qqFun (fsLit "quotePat") quotePatKey
quoteDecName = qqFun (fsLit "quoteDec") quoteDecKey
quoteTypeName = qqFun (fsLit "quoteType") quoteTypeKey
-- data Inline = ...
noInlineDataConName, inlineDataConName, inlinableDataConName :: Name
noInlineDataConName = thCon (fsLit "NoInline") noInlineDataConKey
inlineDataConName = thCon (fsLit "Inline") inlineDataConKey
inlinableDataConName = thCon (fsLit "Inlinable") inlinableDataConKey
-- data RuleMatch = ...
conLikeDataConName, funLikeDataConName :: Name
conLikeDataConName = thCon (fsLit "ConLike") conLikeDataConKey
funLikeDataConName = thCon (fsLit "FunLike") funLikeDataConKey
-- data Phases = ...
allPhasesDataConName, fromPhaseDataConName, beforePhaseDataConName :: Name
allPhasesDataConName = thCon (fsLit "AllPhases") allPhasesDataConKey
fromPhaseDataConName = thCon (fsLit "FromPhase") fromPhaseDataConKey
beforePhaseDataConName = thCon (fsLit "BeforePhase") beforePhaseDataConKey
-- data Overlap = ...
overlappableDataConName,
overlappingDataConName,
overlapsDataConName,
incoherentDataConName :: Name
overlappableDataConName = thCon (fsLit "Overlappable") overlappableDataConKey
overlappingDataConName = thCon (fsLit "Overlapping") overlappingDataConKey
overlapsDataConName = thCon (fsLit "Overlaps") overlapsDataConKey
incoherentDataConName = thCon (fsLit "Incoherent") incoherentDataConKey
{- *********************************************************************
* *
Class keys
* *
********************************************************************* -}
-- ClassUniques available: 200-299
-- Check in PrelNames if you want to change this
liftClassKey :: Unique
liftClassKey = mkPreludeClassUnique 200
{- *********************************************************************
* *
TyCon keys
* *
********************************************************************* -}
-- TyConUniques available: 200-299
-- Check in PrelNames if you want to change this
expTyConKey, matchTyConKey, clauseTyConKey, qTyConKey, expQTyConKey,
decQTyConKey, patTyConKey, matchQTyConKey, clauseQTyConKey,
stmtQTyConKey, conQTyConKey, typeQTyConKey, typeTyConKey, tyVarBndrTyConKey,
decTyConKey, bangTypeQTyConKey, varBangTypeQTyConKey,
fieldExpTyConKey, fieldPatTyConKey, nameTyConKey, patQTyConKey,
fieldPatQTyConKey, fieldExpQTyConKey, funDepTyConKey, predTyConKey,
predQTyConKey, decsQTyConKey, ruleBndrQTyConKey, tySynEqnQTyConKey,
roleTyConKey, tExpTyConKey, injAnnTyConKey, kindTyConKey,
overlapTyConKey :: Unique
expTyConKey = mkPreludeTyConUnique 200
matchTyConKey = mkPreludeTyConUnique 201
clauseTyConKey = mkPreludeTyConUnique 202
qTyConKey = mkPreludeTyConUnique 203
expQTyConKey = mkPreludeTyConUnique 204
decQTyConKey = mkPreludeTyConUnique 205
patTyConKey = mkPreludeTyConUnique 206
matchQTyConKey = mkPreludeTyConUnique 207
clauseQTyConKey = mkPreludeTyConUnique 208
stmtQTyConKey = mkPreludeTyConUnique 209
conQTyConKey = mkPreludeTyConUnique 210
typeQTyConKey = mkPreludeTyConUnique 211
typeTyConKey = mkPreludeTyConUnique 212
decTyConKey = mkPreludeTyConUnique 213
bangTypeQTyConKey = mkPreludeTyConUnique 214
varBangTypeQTyConKey = mkPreludeTyConUnique 215
fieldExpTyConKey = mkPreludeTyConUnique 216
fieldPatTyConKey = mkPreludeTyConUnique 217
nameTyConKey = mkPreludeTyConUnique 218
patQTyConKey = mkPreludeTyConUnique 219
fieldPatQTyConKey = mkPreludeTyConUnique 220
fieldExpQTyConKey = mkPreludeTyConUnique 221
funDepTyConKey = mkPreludeTyConUnique 222
predTyConKey = mkPreludeTyConUnique 223
predQTyConKey = mkPreludeTyConUnique 224
tyVarBndrTyConKey = mkPreludeTyConUnique 225
decsQTyConKey = mkPreludeTyConUnique 226
ruleBndrQTyConKey = mkPreludeTyConUnique 227
tySynEqnQTyConKey = mkPreludeTyConUnique 228
roleTyConKey = mkPreludeTyConUnique 229
tExpTyConKey = mkPreludeTyConUnique 230
injAnnTyConKey = mkPreludeTyConUnique 231
kindTyConKey = mkPreludeTyConUnique 232
overlapTyConKey = mkPreludeTyConUnique 233
{- *********************************************************************
* *
DataCon keys
* *
********************************************************************* -}
-- DataConUniques available: 100-150
-- If you want to change this, make sure you check in PrelNames
-- data Inline = ...
noInlineDataConKey, inlineDataConKey, inlinableDataConKey :: Unique
noInlineDataConKey = mkPreludeDataConUnique 100
inlineDataConKey = mkPreludeDataConUnique 101
inlinableDataConKey = mkPreludeDataConUnique 102
-- data RuleMatch = ...
conLikeDataConKey, funLikeDataConKey :: Unique
conLikeDataConKey = mkPreludeDataConUnique 103
funLikeDataConKey = mkPreludeDataConUnique 104
-- data Phases = ...
allPhasesDataConKey, fromPhaseDataConKey, beforePhaseDataConKey :: Unique
allPhasesDataConKey = mkPreludeDataConUnique 105
fromPhaseDataConKey = mkPreludeDataConUnique 106
beforePhaseDataConKey = mkPreludeDataConUnique 107
-- newtype TExp a = ...
tExpDataConKey :: Unique
tExpDataConKey = mkPreludeDataConUnique 108
-- data Overlap = ..
overlappableDataConKey,
overlappingDataConKey,
overlapsDataConKey,
incoherentDataConKey :: Unique
overlappableDataConKey = mkPreludeDataConUnique 109
overlappingDataConKey = mkPreludeDataConUnique 110
overlapsDataConKey = mkPreludeDataConUnique 111
incoherentDataConKey = mkPreludeDataConUnique 112
{- *********************************************************************
* *
Id keys
* *
********************************************************************* -}
-- IdUniques available: 200-499
-- If you want to change this, make sure you check in PrelNames
returnQIdKey, bindQIdKey, sequenceQIdKey, liftIdKey, newNameIdKey,
mkNameIdKey, mkNameG_vIdKey, mkNameG_dIdKey, mkNameG_tcIdKey,
mkNameLIdKey, mkNameSIdKey, unTypeIdKey, unTypeQIdKey,
unsafeTExpCoerceIdKey :: Unique
returnQIdKey = mkPreludeMiscIdUnique 200
bindQIdKey = mkPreludeMiscIdUnique 201
sequenceQIdKey = mkPreludeMiscIdUnique 202
liftIdKey = mkPreludeMiscIdUnique 203
newNameIdKey = mkPreludeMiscIdUnique 204
mkNameIdKey = mkPreludeMiscIdUnique 205
mkNameG_vIdKey = mkPreludeMiscIdUnique 206
mkNameG_dIdKey = mkPreludeMiscIdUnique 207
mkNameG_tcIdKey = mkPreludeMiscIdUnique 208
mkNameLIdKey = mkPreludeMiscIdUnique 209
mkNameSIdKey = mkPreludeMiscIdUnique 210
unTypeIdKey = mkPreludeMiscIdUnique 211
unTypeQIdKey = mkPreludeMiscIdUnique 212
unsafeTExpCoerceIdKey = mkPreludeMiscIdUnique 213
-- data Lit = ...
charLIdKey, stringLIdKey, integerLIdKey, intPrimLIdKey, wordPrimLIdKey,
floatPrimLIdKey, doublePrimLIdKey, rationalLIdKey, stringPrimLIdKey,
charPrimLIdKey:: Unique
charLIdKey = mkPreludeMiscIdUnique 220
stringLIdKey = mkPreludeMiscIdUnique 221
integerLIdKey = mkPreludeMiscIdUnique 222
intPrimLIdKey = mkPreludeMiscIdUnique 223
wordPrimLIdKey = mkPreludeMiscIdUnique 224
floatPrimLIdKey = mkPreludeMiscIdUnique 225
doublePrimLIdKey = mkPreludeMiscIdUnique 226
rationalLIdKey = mkPreludeMiscIdUnique 227
stringPrimLIdKey = mkPreludeMiscIdUnique 228
charPrimLIdKey = mkPreludeMiscIdUnique 229
liftStringIdKey :: Unique
liftStringIdKey = mkPreludeMiscIdUnique 230
-- data Pat = ...
litPIdKey, varPIdKey, tupPIdKey, unboxedTupPIdKey, conPIdKey, infixPIdKey,
tildePIdKey, bangPIdKey, asPIdKey, wildPIdKey, recPIdKey, listPIdKey,
sigPIdKey, viewPIdKey :: Unique
litPIdKey = mkPreludeMiscIdUnique 240
varPIdKey = mkPreludeMiscIdUnique 241
tupPIdKey = mkPreludeMiscIdUnique 242
unboxedTupPIdKey = mkPreludeMiscIdUnique 243
conPIdKey = mkPreludeMiscIdUnique 244
infixPIdKey = mkPreludeMiscIdUnique 245
tildePIdKey = mkPreludeMiscIdUnique 246
bangPIdKey = mkPreludeMiscIdUnique 247
asPIdKey = mkPreludeMiscIdUnique 248
wildPIdKey = mkPreludeMiscIdUnique 249
recPIdKey = mkPreludeMiscIdUnique 250
listPIdKey = mkPreludeMiscIdUnique 251
sigPIdKey = mkPreludeMiscIdUnique 252
viewPIdKey = mkPreludeMiscIdUnique 253
-- type FieldPat = ...
fieldPatIdKey :: Unique
fieldPatIdKey = mkPreludeMiscIdUnique 260
-- data Match = ...
matchIdKey :: Unique
matchIdKey = mkPreludeMiscIdUnique 261
-- data Clause = ...
clauseIdKey :: Unique
clauseIdKey = mkPreludeMiscIdUnique 262
-- data Exp = ...
varEIdKey, conEIdKey, litEIdKey, appEIdKey, infixEIdKey, infixAppIdKey,
sectionLIdKey, sectionRIdKey, lamEIdKey, lamCaseEIdKey, tupEIdKey,
unboxedTupEIdKey, condEIdKey, multiIfEIdKey,
letEIdKey, caseEIdKey, doEIdKey, compEIdKey,
fromEIdKey, fromThenEIdKey, fromToEIdKey, fromThenToEIdKey,
listEIdKey, sigEIdKey, recConEIdKey, recUpdEIdKey, staticEIdKey,
unboundVarEIdKey :: Unique
varEIdKey = mkPreludeMiscIdUnique 270
conEIdKey = mkPreludeMiscIdUnique 271
litEIdKey = mkPreludeMiscIdUnique 272
appEIdKey = mkPreludeMiscIdUnique 273
infixEIdKey = mkPreludeMiscIdUnique 274
infixAppIdKey = mkPreludeMiscIdUnique 275
sectionLIdKey = mkPreludeMiscIdUnique 276
sectionRIdKey = mkPreludeMiscIdUnique 277
lamEIdKey = mkPreludeMiscIdUnique 278
lamCaseEIdKey = mkPreludeMiscIdUnique 279
tupEIdKey = mkPreludeMiscIdUnique 280
unboxedTupEIdKey = mkPreludeMiscIdUnique 281
condEIdKey = mkPreludeMiscIdUnique 282
multiIfEIdKey = mkPreludeMiscIdUnique 283
letEIdKey = mkPreludeMiscIdUnique 284
caseEIdKey = mkPreludeMiscIdUnique 285
doEIdKey = mkPreludeMiscIdUnique 286
compEIdKey = mkPreludeMiscIdUnique 287
fromEIdKey = mkPreludeMiscIdUnique 288
fromThenEIdKey = mkPreludeMiscIdUnique 289
fromToEIdKey = mkPreludeMiscIdUnique 290
fromThenToEIdKey = mkPreludeMiscIdUnique 291
listEIdKey = mkPreludeMiscIdUnique 292
sigEIdKey = mkPreludeMiscIdUnique 293
recConEIdKey = mkPreludeMiscIdUnique 294
recUpdEIdKey = mkPreludeMiscIdUnique 295
staticEIdKey = mkPreludeMiscIdUnique 296
unboundVarEIdKey = mkPreludeMiscIdUnique 297
-- type FieldExp = ...
fieldExpIdKey :: Unique
fieldExpIdKey = mkPreludeMiscIdUnique 305
-- data Body = ...
guardedBIdKey, normalBIdKey :: Unique
guardedBIdKey = mkPreludeMiscIdUnique 306
normalBIdKey = mkPreludeMiscIdUnique 307
-- data Guard = ...
normalGEIdKey, patGEIdKey :: Unique
normalGEIdKey = mkPreludeMiscIdUnique 308
patGEIdKey = mkPreludeMiscIdUnique 309
-- data Stmt = ...
bindSIdKey, letSIdKey, noBindSIdKey, parSIdKey :: Unique
bindSIdKey = mkPreludeMiscIdUnique 310
letSIdKey = mkPreludeMiscIdUnique 311
noBindSIdKey = mkPreludeMiscIdUnique 312
parSIdKey = mkPreludeMiscIdUnique 313
-- data Dec = ...
funDIdKey, valDIdKey, dataDIdKey, newtypeDIdKey, tySynDIdKey, classDIdKey,
instanceWithOverlapDIdKey, instanceDIdKey, sigDIdKey, forImpDIdKey,
pragInlDIdKey, pragSpecDIdKey, pragSpecInlDIdKey, pragSpecInstDIdKey,
pragRuleDIdKey, pragAnnDIdKey, defaultSigDIdKey, dataFamilyDIdKey,
openTypeFamilyDIdKey, closedTypeFamilyDIdKey, dataInstDIdKey,
newtypeInstDIdKey, tySynInstDIdKey, standaloneDerivDIdKey, infixLDIdKey,
infixRDIdKey, infixNDIdKey, roleAnnotDIdKey, patSynDIdKey,
patSynSigDIdKey :: Unique
funDIdKey = mkPreludeMiscIdUnique 320
valDIdKey = mkPreludeMiscIdUnique 321
dataDIdKey = mkPreludeMiscIdUnique 322
newtypeDIdKey = mkPreludeMiscIdUnique 323
tySynDIdKey = mkPreludeMiscIdUnique 324
classDIdKey = mkPreludeMiscIdUnique 325
instanceWithOverlapDIdKey = mkPreludeMiscIdUnique 326
instanceDIdKey = mkPreludeMiscIdUnique 327
sigDIdKey = mkPreludeMiscIdUnique 328
forImpDIdKey = mkPreludeMiscIdUnique 329
pragInlDIdKey = mkPreludeMiscIdUnique 330
pragSpecDIdKey = mkPreludeMiscIdUnique 331
pragSpecInlDIdKey = mkPreludeMiscIdUnique 332
pragSpecInstDIdKey = mkPreludeMiscIdUnique 333
pragRuleDIdKey = mkPreludeMiscIdUnique 334
pragAnnDIdKey = mkPreludeMiscIdUnique 335
dataFamilyDIdKey = mkPreludeMiscIdUnique 336
openTypeFamilyDIdKey = mkPreludeMiscIdUnique 337
dataInstDIdKey = mkPreludeMiscIdUnique 338
newtypeInstDIdKey = mkPreludeMiscIdUnique 339
tySynInstDIdKey = mkPreludeMiscIdUnique 340
closedTypeFamilyDIdKey = mkPreludeMiscIdUnique 341
infixLDIdKey = mkPreludeMiscIdUnique 342
infixRDIdKey = mkPreludeMiscIdUnique 343
infixNDIdKey = mkPreludeMiscIdUnique 344
roleAnnotDIdKey = mkPreludeMiscIdUnique 345
standaloneDerivDIdKey = mkPreludeMiscIdUnique 346
defaultSigDIdKey = mkPreludeMiscIdUnique 347
patSynDIdKey = mkPreludeMiscIdUnique 348
patSynSigDIdKey = mkPreludeMiscIdUnique 349
-- type Cxt = ...
cxtIdKey :: Unique
cxtIdKey = mkPreludeMiscIdUnique 350
-- data SourceUnpackedness = ...
noSourceUnpackednessKey, sourceNoUnpackKey, sourceUnpackKey :: Unique
noSourceUnpackednessKey = mkPreludeMiscIdUnique 351
sourceNoUnpackKey = mkPreludeMiscIdUnique 352
sourceUnpackKey = mkPreludeMiscIdUnique 353
-- data SourceStrictness = ...
noSourceStrictnessKey, sourceLazyKey, sourceStrictKey :: Unique
noSourceStrictnessKey = mkPreludeMiscIdUnique 354
sourceLazyKey = mkPreludeMiscIdUnique 355
sourceStrictKey = mkPreludeMiscIdUnique 356
-- data Con = ...
normalCIdKey, recCIdKey, infixCIdKey, forallCIdKey, gadtCIdKey,
recGadtCIdKey :: Unique
normalCIdKey = mkPreludeMiscIdUnique 357
recCIdKey = mkPreludeMiscIdUnique 358
infixCIdKey = mkPreludeMiscIdUnique 359
forallCIdKey = mkPreludeMiscIdUnique 360
gadtCIdKey = mkPreludeMiscIdUnique 361
recGadtCIdKey = mkPreludeMiscIdUnique 362
-- data Bang = ...
bangIdKey :: Unique
bangIdKey = mkPreludeMiscIdUnique 363
-- type BangType = ...
bangTKey :: Unique
bangTKey = mkPreludeMiscIdUnique 364
-- type VarBangType = ...
varBangTKey :: Unique
varBangTKey = mkPreludeMiscIdUnique 365
-- data PatSynDir = ...
unidirPatSynIdKey, implBidirPatSynIdKey, explBidirPatSynIdKey :: Unique
unidirPatSynIdKey = mkPreludeMiscIdUnique 366
implBidirPatSynIdKey = mkPreludeMiscIdUnique 367
explBidirPatSynIdKey = mkPreludeMiscIdUnique 368
-- data PatSynArgs = ...
prefixPatSynIdKey, infixPatSynIdKey, recordPatSynIdKey :: Unique
prefixPatSynIdKey = mkPreludeMiscIdUnique 369
infixPatSynIdKey = mkPreludeMiscIdUnique 370
recordPatSynIdKey = mkPreludeMiscIdUnique 371
-- data Type = ...
forallTIdKey, varTIdKey, conTIdKey, tupleTIdKey, unboxedTupleTIdKey, arrowTIdKey,
listTIdKey, appTIdKey, sigTIdKey, equalityTIdKey, litTIdKey,
promotedTIdKey, promotedTupleTIdKey,
promotedNilTIdKey, promotedConsTIdKey,
wildCardTIdKey :: Unique
forallTIdKey = mkPreludeMiscIdUnique 380
varTIdKey = mkPreludeMiscIdUnique 381
conTIdKey = mkPreludeMiscIdUnique 382
tupleTIdKey = mkPreludeMiscIdUnique 383
unboxedTupleTIdKey = mkPreludeMiscIdUnique 384
arrowTIdKey = mkPreludeMiscIdUnique 385
listTIdKey = mkPreludeMiscIdUnique 386
appTIdKey = mkPreludeMiscIdUnique 387
sigTIdKey = mkPreludeMiscIdUnique 388
equalityTIdKey = mkPreludeMiscIdUnique 389
litTIdKey = mkPreludeMiscIdUnique 390
promotedTIdKey = mkPreludeMiscIdUnique 391
promotedTupleTIdKey = mkPreludeMiscIdUnique 392
promotedNilTIdKey = mkPreludeMiscIdUnique 393
promotedConsTIdKey = mkPreludeMiscIdUnique 394
wildCardTIdKey = mkPreludeMiscIdUnique 395
-- data TyLit = ...
numTyLitIdKey, strTyLitIdKey :: Unique
numTyLitIdKey = mkPreludeMiscIdUnique 400
strTyLitIdKey = mkPreludeMiscIdUnique 401
-- data TyVarBndr = ...
plainTVIdKey, kindedTVIdKey :: Unique
plainTVIdKey = mkPreludeMiscIdUnique 402
kindedTVIdKey = mkPreludeMiscIdUnique 403
-- data Role = ...
nominalRIdKey, representationalRIdKey, phantomRIdKey, inferRIdKey :: Unique
nominalRIdKey = mkPreludeMiscIdUnique 404
representationalRIdKey = mkPreludeMiscIdUnique 405
phantomRIdKey = mkPreludeMiscIdUnique 406
inferRIdKey = mkPreludeMiscIdUnique 407
-- data Kind = ...
varKIdKey, conKIdKey, tupleKIdKey, arrowKIdKey, listKIdKey, appKIdKey,
starKIdKey, constraintKIdKey :: Unique
varKIdKey = mkPreludeMiscIdUnique 408
conKIdKey = mkPreludeMiscIdUnique 409
tupleKIdKey = mkPreludeMiscIdUnique 410
arrowKIdKey = mkPreludeMiscIdUnique 411
listKIdKey = mkPreludeMiscIdUnique 412
appKIdKey = mkPreludeMiscIdUnique 413
starKIdKey = mkPreludeMiscIdUnique 414
constraintKIdKey = mkPreludeMiscIdUnique 415
-- data FamilyResultSig = ...
noSigIdKey, kindSigIdKey, tyVarSigIdKey :: Unique
noSigIdKey = mkPreludeMiscIdUnique 416
kindSigIdKey = mkPreludeMiscIdUnique 417
tyVarSigIdKey = mkPreludeMiscIdUnique 418
-- data InjectivityAnn = ...
injectivityAnnIdKey :: Unique
injectivityAnnIdKey = mkPreludeMiscIdUnique 419
-- data Callconv = ...
cCallIdKey, stdCallIdKey, cApiCallIdKey, primCallIdKey,
javaScriptCallIdKey :: Unique
cCallIdKey = mkPreludeMiscIdUnique 420
stdCallIdKey = mkPreludeMiscIdUnique 421
cApiCallIdKey = mkPreludeMiscIdUnique 422
primCallIdKey = mkPreludeMiscIdUnique 423
javaScriptCallIdKey = mkPreludeMiscIdUnique 424
-- data Safety = ...
unsafeIdKey, safeIdKey, interruptibleIdKey :: Unique
unsafeIdKey = mkPreludeMiscIdUnique 430
safeIdKey = mkPreludeMiscIdUnique 431
interruptibleIdKey = mkPreludeMiscIdUnique 432
-- data FunDep = ...
funDepIdKey :: Unique
funDepIdKey = mkPreludeMiscIdUnique 440
-- data FamFlavour = ...
typeFamIdKey, dataFamIdKey :: Unique
typeFamIdKey = mkPreludeMiscIdUnique 450
dataFamIdKey = mkPreludeMiscIdUnique 451
-- data TySynEqn = ...
tySynEqnIdKey :: Unique
tySynEqnIdKey = mkPreludeMiscIdUnique 460
-- quasiquoting
quoteExpKey, quotePatKey, quoteDecKey, quoteTypeKey :: Unique
quoteExpKey = mkPreludeMiscIdUnique 470
quotePatKey = mkPreludeMiscIdUnique 471
quoteDecKey = mkPreludeMiscIdUnique 472
quoteTypeKey = mkPreludeMiscIdUnique 473
-- data RuleBndr = ...
ruleVarIdKey, typedRuleVarIdKey :: Unique
ruleVarIdKey = mkPreludeMiscIdUnique 480
typedRuleVarIdKey = mkPreludeMiscIdUnique 481
-- data AnnTarget = ...
valueAnnotationIdKey, typeAnnotationIdKey, moduleAnnotationIdKey :: Unique
valueAnnotationIdKey = mkPreludeMiscIdUnique 490
typeAnnotationIdKey = mkPreludeMiscIdUnique 491
moduleAnnotationIdKey = mkPreludeMiscIdUnique 492
{-
************************************************************************
* *
RdrNames
* *
************************************************************************
-}
lift_RDR, mkNameG_dRDR, mkNameG_vRDR :: RdrName
lift_RDR = nameRdrName liftName
mkNameG_dRDR = nameRdrName mkNameG_dName
mkNameG_vRDR = nameRdrName mkNameG_vName
-- data Exp = ...
conE_RDR, litE_RDR, appE_RDR, infixApp_RDR :: RdrName
conE_RDR = nameRdrName conEName
litE_RDR = nameRdrName litEName
appE_RDR = nameRdrName appEName
infixApp_RDR = nameRdrName infixAppName
-- data Lit = ...
stringL_RDR, intPrimL_RDR, wordPrimL_RDR, floatPrimL_RDR,
doublePrimL_RDR, stringPrimL_RDR, charPrimL_RDR :: RdrName
stringL_RDR = nameRdrName stringLName
intPrimL_RDR = nameRdrName intPrimLName
wordPrimL_RDR = nameRdrName wordPrimLName
floatPrimL_RDR = nameRdrName floatPrimLName
doublePrimL_RDR = nameRdrName doublePrimLName
stringPrimL_RDR = nameRdrName stringPrimLName
charPrimL_RDR = nameRdrName charPrimLName
| vikraman/ghc | compiler/prelude/THNames.hs | bsd-3-clause | 45,377 | 0 | 8 | 9,400 | 8,146 | 4,728 | 3,418 | 755 | 1 |
-- (c) The University of Glasgow 2002-2006
{-# LANGUAGE CPP, RankNTypes #-}
module IfaceEnv (
newGlobalBinder, newInteractiveBinder,
externaliseName,
lookupIfaceTop,
lookupOrig, lookupOrigNameCache, extendNameCache,
newIfaceName, newIfaceNames,
extendIfaceIdEnv, extendIfaceTyVarEnv,
tcIfaceLclId, tcIfaceTyVar, lookupIfaceVar,
lookupIfaceTyVar, extendIfaceEnvs,
setNameModule,
ifaceExportNames,
-- Name-cache stuff
allocateGlobalBinder, updNameCache,
mkNameCacheUpdater, NameCacheUpdater(..),
) where
#include "HsVersions.h"
import GhcPrelude
import TcRnMonad
import HscTypes
import Type
import Var
import Name
import Avail
import Module
import FastString
import FastStringEnv
import IfaceType
import NameCache
import UniqSupply
import SrcLoc
import Outputable
import Data.List ( partition )
{-
*********************************************************
* *
Allocating new Names in the Name Cache
* *
*********************************************************
See Also: Note [The Name Cache] in NameCache
-}
newGlobalBinder :: Module -> OccName -> SrcSpan -> TcRnIf a b Name
-- Used for source code and interface files, to make the
-- Name for a thing, given its Module and OccName
-- See Note [The Name Cache]
--
-- The cache may already already have a binding for this thing,
-- because we may have seen an occurrence before, but now is the
-- moment when we know its Module and SrcLoc in their full glory
newGlobalBinder mod occ loc
= do { mod `seq` occ `seq` return () -- See notes with lookupOrig
; name <- updNameCache $ \name_cache ->
allocateGlobalBinder name_cache mod occ loc
; traceIf (text "newGlobalBinder" <+>
(vcat [ ppr mod <+> ppr occ <+> ppr loc, ppr name]))
; return name }
newInteractiveBinder :: HscEnv -> OccName -> SrcSpan -> IO Name
-- Works in the IO monad, and gets the Module
-- from the interactive context
newInteractiveBinder hsc_env occ loc
= do { let mod = icInteractiveModule (hsc_IC hsc_env)
; updNameCacheIO hsc_env $ \name_cache ->
allocateGlobalBinder name_cache mod occ loc }
allocateGlobalBinder
:: NameCache
-> Module -> OccName -> SrcSpan
-> (NameCache, Name)
-- See Note [The Name Cache]
allocateGlobalBinder name_supply mod occ loc
= case lookupOrigNameCache (nsNames name_supply) mod occ of
-- A hit in the cache! We are at the binding site of the name.
-- This is the moment when we know the SrcLoc
-- of the Name, so we set this field in the Name we return.
--
-- Then (bogus) multiple bindings of the same Name
-- get different SrcLocs can can be reported as such.
--
-- Possible other reason: it might be in the cache because we
-- encountered an occurrence before the binding site for an
-- implicitly-imported Name. Perhaps the current SrcLoc is
-- better... but not really: it'll still just say 'imported'
--
-- IMPORTANT: Don't mess with wired-in names.
-- Their wired-in-ness is in their NameSort
-- and their Module is correct.
Just name | isWiredInName name
-> (name_supply, name)
| otherwise
-> (new_name_supply, name')
where
uniq = nameUnique name
name' = mkExternalName uniq mod occ loc
-- name' is like name, but with the right SrcSpan
new_cache = extendNameCache (nsNames name_supply) mod occ name'
new_name_supply = name_supply {nsNames = new_cache}
-- Miss in the cache!
-- Build a completely new Name, and put it in the cache
_ -> (new_name_supply, name)
where
(uniq, us') = takeUniqFromSupply (nsUniqs name_supply)
name = mkExternalName uniq mod occ loc
new_cache = extendNameCache (nsNames name_supply) mod occ name
new_name_supply = name_supply {nsUniqs = us', nsNames = new_cache}
ifaceExportNames :: [IfaceExport] -> TcRnIf gbl lcl [AvailInfo]
ifaceExportNames exports = return exports
-- | A function that atomically updates the name cache given a modifier
-- function. The second result of the modifier function will be the result
-- of the IO action.
newtype NameCacheUpdater
= NCU { updateNameCache :: forall c. (NameCache -> (NameCache, c)) -> IO c }
mkNameCacheUpdater :: TcRnIf a b NameCacheUpdater
mkNameCacheUpdater = do { hsc_env <- getTopEnv
; return (NCU (updNameCacheIO hsc_env)) }
updNameCache :: (NameCache -> (NameCache, c)) -> TcRnIf a b c
updNameCache upd_fn = do { hsc_env <- getTopEnv
; liftIO $ updNameCacheIO hsc_env upd_fn }
{-
************************************************************************
* *
Name cache access
* *
************************************************************************
-}
-- | Look up the 'Name' for a given 'Module' and 'OccName'.
-- Consider alternatively using 'lookupIfaceTop' if you're in the 'IfL' monad
-- and 'Module' is simply that of the 'ModIface' you are typechecking.
lookupOrig :: Module -> OccName -> TcRnIf a b Name
lookupOrig mod occ
= do { -- First ensure that mod and occ are evaluated
-- If not, chaos can ensue:
-- we read the name-cache
-- then pull on mod (say)
-- which does some stuff that modifies the name cache
-- This did happen, with tycon_mod in TcIface.tcIfaceAlt (DataAlt..)
mod `seq` occ `seq` return ()
; traceIf (text "lookup_orig" <+> ppr mod <+> ppr occ)
; updNameCache $ \name_cache ->
case lookupOrigNameCache (nsNames name_cache) mod occ of {
Just name -> (name_cache, name);
Nothing ->
case takeUniqFromSupply (nsUniqs name_cache) of {
(uniq, us) ->
let
name = mkExternalName uniq mod occ noSrcSpan
new_cache = extendNameCache (nsNames name_cache) mod occ name
in (name_cache{ nsUniqs = us, nsNames = new_cache }, name)
}}}
externaliseName :: Module -> Name -> TcRnIf m n Name
-- Take an Internal Name and make it an External one,
-- with the same unique
externaliseName mod name
= do { let occ = nameOccName name
loc = nameSrcSpan name
uniq = nameUnique name
; occ `seq` return () -- c.f. seq in newGlobalBinder
; updNameCache $ \ ns ->
let name' = mkExternalName uniq mod occ loc
ns' = ns { nsNames = extendNameCache (nsNames ns) mod occ name' }
in (ns', name') }
-- | Set the 'Module' of a 'Name'.
setNameModule :: Maybe Module -> Name -> TcRnIf m n Name
setNameModule Nothing n = return n
setNameModule (Just m) n =
newGlobalBinder m (nameOccName n) (nameSrcSpan n)
{-
************************************************************************
* *
Type variables and local Ids
* *
************************************************************************
-}
tcIfaceLclId :: FastString -> IfL Id
tcIfaceLclId occ
= do { lcl <- getLclEnv
; case (lookupFsEnv (if_id_env lcl) occ) of
Just ty_var -> return ty_var
Nothing -> failIfM (text "Iface id out of scope: " <+> ppr occ)
}
extendIfaceIdEnv :: [Id] -> IfL a -> IfL a
extendIfaceIdEnv ids thing_inside
= do { env <- getLclEnv
; let { id_env' = extendFsEnvList (if_id_env env) pairs
; pairs = [(occNameFS (getOccName id), id) | id <- ids] }
; setLclEnv (env { if_id_env = id_env' }) thing_inside }
tcIfaceTyVar :: FastString -> IfL TyVar
tcIfaceTyVar occ
= do { lcl <- getLclEnv
; case (lookupFsEnv (if_tv_env lcl) occ) of
Just ty_var -> return ty_var
Nothing -> failIfM (text "Iface type variable out of scope: " <+> ppr occ)
}
lookupIfaceTyVar :: IfaceTvBndr -> IfL (Maybe TyVar)
lookupIfaceTyVar (occ, _)
= do { lcl <- getLclEnv
; return (lookupFsEnv (if_tv_env lcl) occ) }
lookupIfaceVar :: IfaceBndr -> IfL (Maybe TyCoVar)
lookupIfaceVar (IfaceIdBndr (occ, _))
= do { lcl <- getLclEnv
; return (lookupFsEnv (if_id_env lcl) occ) }
lookupIfaceVar (IfaceTvBndr (occ, _))
= do { lcl <- getLclEnv
; return (lookupFsEnv (if_tv_env lcl) occ) }
extendIfaceTyVarEnv :: [TyVar] -> IfL a -> IfL a
extendIfaceTyVarEnv tyvars thing_inside
= do { env <- getLclEnv
; let { tv_env' = extendFsEnvList (if_tv_env env) pairs
; pairs = [(occNameFS (getOccName tv), tv) | tv <- tyvars] }
; setLclEnv (env { if_tv_env = tv_env' }) thing_inside }
extendIfaceEnvs :: [TyCoVar] -> IfL a -> IfL a
extendIfaceEnvs tcvs thing_inside
= extendIfaceTyVarEnv tvs $
extendIfaceIdEnv cvs $
thing_inside
where
(tvs, cvs) = partition isTyVar tcvs
{-
************************************************************************
* *
Getting from RdrNames to Names
* *
************************************************************************
-}
-- | Look up a top-level name from the current Iface module
lookupIfaceTop :: OccName -> IfL Name
lookupIfaceTop occ
= do { env <- getLclEnv; lookupOrig (if_mod env) occ }
newIfaceName :: OccName -> IfL Name
newIfaceName occ
= do { uniq <- newUnique
; return $! mkInternalName uniq occ noSrcSpan }
newIfaceNames :: [OccName] -> IfL [Name]
newIfaceNames occs
= do { uniqs <- newUniqueSupply
; return [ mkInternalName uniq occ noSrcSpan
| (occ,uniq) <- occs `zip` uniqsFromSupply uniqs] }
| ezyang/ghc | compiler/iface/IfaceEnv.hs | bsd-3-clause | 10,522 | 0 | 21 | 3,334 | 2,063 | 1,109 | 954 | 154 | 2 |
{-# LANGUAGE OverloadedStrings #-}
{- This example demonstrates an ability to stream in constant space content from a remote resource into an S3 object accessible publicly -}
import qualified Aws
import Aws.Aws (Configuration (..))
import qualified Aws.S3 as S3
import Control.Applicative ((<$>))
import Data.Conduit (unwrapResumable)
import qualified Data.Text as T
import Network.HTTP.Conduit (http, parseUrl, responseBody,
withManager)
import System.Environment (getArgs)
main :: IO ()
main = do
maybeCreds <- Aws.loadCredentialsFromEnv
case maybeCreds of
Nothing -> do
putStrLn "Please set the environment variables AWS_ACCESS_KEY_ID and AWS_ACCESS_KEY_SECRET"
Just creds -> do
args <- getArgs
cfg <- Aws.dbgConfiguration
let s3cfg = Aws.defServiceConfig :: S3.S3Configuration Aws.NormalQuery
case args of
[sourceUrl,destBucket,destObj] -> do
request <- parseUrl sourceUrl
withManager $ \mgr -> do
resumableSource <- responseBody <$> http request mgr
(source, _) <- unwrapResumable resumableSource
let initiator b o = (S3.postInitiateMultipartUpload b o){S3.imuAcl = Just S3.AclPublicRead}
S3.multipartUploadWithInitiator cfg{credentials = creds} s3cfg initiator mgr (T.pack destBucket) (T.pack destObj) source (10*1024*1024)
_ -> do
putStrLn "Usage: MultipartTransfer sourceUrl destinationBucket destinationObjectname"
| romanb/aws | Examples/MultipartTransfer.hs | bsd-3-clause | 1,604 | 0 | 27 | 441 | 362 | 190 | 172 | 30 | 3 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="id-ID">
<title>Network Add-on</title>
<maps>
<homeID>addon.network</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | kingthorin/zap-extensions | addOns/network/src/main/javahelp/help_id_ID/helpset_id_ID.hs | apache-2.0 | 969 | 77 | 67 | 156 | 413 | 209 | 204 | -1 | -1 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="hu-HU">
<title>Core Language Files | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | kingthorin/zap-extensions | addOns/coreLang/src/main/javahelp/org/zaproxy/zap/extension/coreLang/resources/help_hu_HU/helpset_hu_HU.hs | apache-2.0 | 980 | 78 | 66 | 160 | 415 | 210 | 205 | -1 | -1 |
module LetIn1 where
data Tree a = Leaf a | Branch (Tree a) (Tree a)
fringe_global x = let
fringe :: Tree a -> [a]
fringe (Leaf x) = [x]
fringe (Branch left@(Leaf b_1) right)
= (fringe left) ++ (fringe right)
fringe (Branch left@(Branch b_1 b_2) right)
= (fringe left) ++ (fringe right)
fringe (Branch left right)
= (fringe left) ++ (fringe right)
in fringe x | kmate/HaRe | old/testing/subIntroPattern/LetIn1_TokOut.hs | bsd-3-clause | 570 | 0 | 14 | 277 | 198 | 102 | 96 | 12 | 4 |
{-# OPTIONS #-}
-----------------------------------------------------------------------------
{- |
Module : Numeric.GSL.Integration
Copyright : (c) Alberto Ruiz 2006
License : GPL-style
Maintainer : Alberto Ruiz (aruiz at um dot es)
Stability : provisional
Portability : uses ffi
Numerical integration routines.
<http://www.gnu.org/software/gsl/manual/html_node/Numerical-Integration.html#Numerical-Integration>
-}
-----------------------------------------------------------------------------
module Numeric.GSL.Integration (
integrateQNG,
integrateQAGS,
integrateQAGI,
integrateQAGIU,
integrateQAGIL
) where
import Foreign.C.Types
import Foreign.Marshal.Alloc(malloc, free)
import Foreign.Ptr(Ptr, FunPtr, freeHaskellFunPtr)
import Foreign.Storable(peek)
import Data.Packed.Internal(check,(//))
import System.IO.Unsafe(unsafePerformIO)
{- | conversion of Haskell functions into function pointers that can be used in the C side
-}
foreign import ccall safe "wrapper" mkfun:: (Double -> Ptr() -> Double) -> IO( FunPtr (Double -> Ptr() -> Double))
--------------------------------------------------------------------
{- | Numerical integration using /gsl_integration_qags/ (adaptive integration with singularities). For example:
@\> let quad = integrateQAGS 1E-9 1000
\> let f a x = x**(-0.5) * log (a*x)
\> quad (f 1) 0 1
(-3.999999999999974,4.871658632055187e-13)@
-}
integrateQAGS :: Double -- ^ precision (e.g. 1E-9)
-> Int -- ^ size of auxiliary workspace (e.g. 1000)
-> (Double -> Double) -- ^ function to be integrated on the interval (a,b)
-> Double -- ^ a
-> Double -- ^ b
-> (Double, Double) -- ^ result of the integration and error
integrateQAGS prec n f a b = unsafePerformIO $ do
r <- malloc
e <- malloc
fp <- mkfun (\x _ -> f x)
c_integrate_qags fp a b prec (fromIntegral n) r e // check "integrate_qags"
vr <- peek r
ve <- peek e
let result = (vr,ve)
free r
free e
freeHaskellFunPtr fp
return result
foreign import ccall safe "gsl-aux.h integrate_qags"
c_integrate_qags :: FunPtr (Double-> Ptr() -> Double) -> Double -> Double -> Double -> CInt
-> Ptr Double -> Ptr Double -> IO CInt
-----------------------------------------------------------------
{- | Numerical integration using /gsl_integration_qng/ (useful for fast integration of smooth functions). For example:
@\> let quad = integrateQNG 1E-6
\> quad (\\x -> 4\/(1+x*x)) 0 1
(3.141592653589793,3.487868498008632e-14)@
-}
integrateQNG :: Double -- ^ precision (e.g. 1E-9)
-> (Double -> Double) -- ^ function to be integrated on the interval (a,b)
-> Double -- ^ a
-> Double -- ^ b
-> (Double, Double) -- ^ result of the integration and error
integrateQNG prec f a b = unsafePerformIO $ do
r <- malloc
e <- malloc
fp <- mkfun (\x _ -> f x)
c_integrate_qng fp a b prec r e // check "integrate_qng"
vr <- peek r
ve <- peek e
let result = (vr,ve)
free r
free e
freeHaskellFunPtr fp
return result
foreign import ccall safe "gsl-aux.h integrate_qng"
c_integrate_qng :: FunPtr (Double-> Ptr() -> Double) -> Double -> Double -> Double
-> Ptr Double -> Ptr Double -> IO CInt
--------------------------------------------------------------------
{- | Numerical integration using /gsl_integration_qagi/ (integration over the infinite integral -Inf..Inf using QAGS).
For example:
@\> let quad = integrateQAGI 1E-9 1000
\> let f a x = exp(-a * x^2)
\> quad (f 0.5)
(2.5066282746310002,6.229215880648858e-11)@
-}
integrateQAGI :: Double -- ^ precision (e.g. 1E-9)
-> Int -- ^ size of auxiliary workspace (e.g. 1000)
-> (Double -> Double) -- ^ function to be integrated on the interval (-Inf,Inf)
-> (Double, Double) -- ^ result of the integration and error
integrateQAGI prec n f = unsafePerformIO $ do
r <- malloc
e <- malloc
fp <- mkfun (\x _ -> f x)
c_integrate_qagi fp prec (fromIntegral n) r e // check "integrate_qagi"
vr <- peek r
ve <- peek e
let result = (vr,ve)
free r
free e
freeHaskellFunPtr fp
return result
foreign import ccall safe "gsl-aux.h integrate_qagi"
c_integrate_qagi :: FunPtr (Double-> Ptr() -> Double) -> Double -> CInt
-> Ptr Double -> Ptr Double -> IO CInt
--------------------------------------------------------------------
{- | Numerical integration using /gsl_integration_qagiu/ (integration over the semi-infinite integral a..Inf).
For example:
@\> let quad = integrateQAGIU 1E-9 1000
\> let f a x = exp(-a * x^2)
\> quad (f 0.5) 0
(1.2533141373155001,3.114607940324429e-11)@
-}
integrateQAGIU :: Double -- ^ precision (e.g. 1E-9)
-> Int -- ^ size of auxiliary workspace (e.g. 1000)
-> (Double -> Double) -- ^ function to be integrated on the interval (a,Inf)
-> Double -- ^ a
-> (Double, Double) -- ^ result of the integration and error
integrateQAGIU prec n f a = unsafePerformIO $ do
r <- malloc
e <- malloc
fp <- mkfun (\x _ -> f x)
c_integrate_qagiu fp a prec (fromIntegral n) r e // check "integrate_qagiu"
vr <- peek r
ve <- peek e
let result = (vr,ve)
free r
free e
freeHaskellFunPtr fp
return result
foreign import ccall safe "gsl-aux.h integrate_qagiu"
c_integrate_qagiu :: FunPtr (Double-> Ptr() -> Double) -> Double -> Double -> CInt
-> Ptr Double -> Ptr Double -> IO CInt
--------------------------------------------------------------------
{- | Numerical integration using /gsl_integration_qagil/ (integration over the semi-infinite integral -Inf..b).
For example:
@\> let quad = integrateQAGIL 1E-9 1000
\> let f a x = exp(-a * x^2)
\> quad (f 0.5) 0
(1.2533141373155001,3.114607940324429e-11)@
-}
integrateQAGIL :: Double -- ^ precision (e.g. 1E-9)
-> Int -- ^ size of auxiliary workspace (e.g. 1000)
-> (Double -> Double) -- ^ function to be integrated on the interval (a,Inf)
-> Double -- ^ b
-> (Double, Double) -- ^ result of the integration and error
integrateQAGIL prec n f b = unsafePerformIO $ do
r <- malloc
e <- malloc
fp <- mkfun (\x _ -> f x)
c_integrate_qagil fp b prec (fromIntegral n) r e // check "integrate_qagil"
vr <- peek r
ve <- peek e
let result = (vr,ve)
free r
free e
freeHaskellFunPtr fp
return result
foreign import ccall safe "gsl-aux.h integrate_qagil"
c_integrate_qagil :: FunPtr (Double-> Ptr() -> Double) -> Double -> Double -> CInt
-> Ptr Double -> Ptr Double -> IO CInt
| mightymoose/liquidhaskell | benchmarks/hmatrix-0.15.0.1/lib/Numeric/GSL/Integration.hs | bsd-3-clause | 7,068 | 0 | 14 | 1,890 | 1,405 | 700 | 705 | -1 | -1 |
-- | Our extended FCode monad.
-- We add a mapping from names to CmmExpr, to support local variable names in
-- the concrete C-- code. The unique supply of the underlying FCode monad
-- is used to grab a new unique for each local variable.
-- In C--, a local variable can be declared anywhere within a proc,
-- and it scopes from the beginning of the proc to the end. Hence, we have
-- to collect declarations as we parse the proc, and feed the environment
-- back in circularly (to avoid a two-pass algorithm).
module StgCmmExtCode (
CmmParse, unEC,
Named(..), Env,
loopDecls,
getEnv,
withName,
getName,
newLocal,
newLabel,
newBlockId,
newFunctionName,
newImport,
lookupLabel,
lookupName,
code,
emit, emitLabel, emitAssign, emitStore,
getCode, getCodeR, getCodeScoped,
emitOutOfLine,
withUpdFrameOff, getUpdFrameOff
)
where
import qualified StgCmmMonad as F
import StgCmmMonad (FCode, newUnique)
import Cmm
import CLabel
import MkGraph
-- import BasicTypes
import BlockId
import DynFlags
import FastString
import Module
import UniqFM
import Unique
import Control.Monad (liftM, ap)
-- | The environment contains variable definitions or blockids.
data Named
= VarN CmmExpr -- ^ Holds CmmLit(CmmLabel ..) which gives the label type,
-- eg, RtsLabel, ForeignLabel, CmmLabel etc.
| FunN UnitId -- ^ A function name from this package
| LabelN BlockId -- ^ A blockid of some code or data.
-- | An environment of named things.
type Env = UniqFM Named
-- | Local declarations that are in scope during code generation.
type Decls = [(FastString,Named)]
-- | Does a computation in the FCode monad, with a current environment
-- and a list of local declarations. Returns the resulting list of declarations.
newtype CmmParse a
= EC { unEC :: String -> Env -> Decls -> FCode (Decls, a) }
type ExtCode = CmmParse ()
returnExtFC :: a -> CmmParse a
returnExtFC a = EC $ \_ _ s -> return (s, a)
thenExtFC :: CmmParse a -> (a -> CmmParse b) -> CmmParse b
thenExtFC (EC m) k = EC $ \c e s -> do (s',r) <- m c e s; unEC (k r) c e s'
instance Functor CmmParse where
fmap = liftM
instance Applicative CmmParse where
pure = returnExtFC
(<*>) = ap
instance Monad CmmParse where
(>>=) = thenExtFC
instance HasDynFlags CmmParse where
getDynFlags = EC (\_ _ d -> do dflags <- getDynFlags
return (d, dflags))
-- | Takes the variable decarations and imports from the monad
-- and makes an environment, which is looped back into the computation.
-- In this way, we can have embedded declarations that scope over the whole
-- procedure, and imports that scope over the entire module.
-- Discards the local declaration contained within decl'
--
loopDecls :: CmmParse a -> CmmParse a
loopDecls (EC fcode) =
EC $ \c e globalDecls -> do
(_, a) <- F.fixC $ \ ~(decls, _) ->
fcode c (addListToUFM e decls) globalDecls
return (globalDecls, a)
-- | Get the current environment from the monad.
getEnv :: CmmParse Env
getEnv = EC $ \_ e s -> return (s, e)
-- | Get the current context name from the monad
getName :: CmmParse String
getName = EC $ \c _ s -> return (s, c)
-- | Set context name for a sub-parse
withName :: String -> CmmParse a -> CmmParse a
withName c' (EC fcode) = EC $ \_ e s -> fcode c' e s
addDecl :: FastString -> Named -> ExtCode
addDecl name named = EC $ \_ _ s -> return ((name, named) : s, ())
-- | Add a new variable to the list of local declarations.
-- The CmmExpr says where the value is stored.
addVarDecl :: FastString -> CmmExpr -> ExtCode
addVarDecl var expr = addDecl var (VarN expr)
-- | Add a new label to the list of local declarations.
addLabel :: FastString -> BlockId -> ExtCode
addLabel name block_id = addDecl name (LabelN block_id)
-- | Create a fresh local variable of a given type.
newLocal
:: CmmType -- ^ data type
-> FastString -- ^ name of variable
-> CmmParse LocalReg -- ^ register holding the value
newLocal ty name = do
u <- code newUnique
let reg = LocalReg u ty
addVarDecl name (CmmReg (CmmLocal reg))
return reg
-- | Allocate a fresh label.
newLabel :: FastString -> CmmParse BlockId
newLabel name = do
u <- code newUnique
addLabel name (mkBlockId u)
return (mkBlockId u)
newBlockId :: CmmParse BlockId
newBlockId = code F.newLabelC
-- | Add add a local function to the environment.
newFunctionName
:: FastString -- ^ name of the function
-> UnitId -- ^ package of the current module
-> ExtCode
newFunctionName name pkg = addDecl name (FunN pkg)
-- | Add an imported foreign label to the list of local declarations.
-- If this is done at the start of the module the declaration will scope
-- over the whole module.
newImport
:: (FastString, CLabel)
-> CmmParse ()
newImport (name, cmmLabel)
= addVarDecl name (CmmLit (CmmLabel cmmLabel))
-- | Lookup the BlockId bound to the label with this name.
-- If one hasn't been bound yet, create a fresh one based on the
-- Unique of the name.
lookupLabel :: FastString -> CmmParse BlockId
lookupLabel name = do
env <- getEnv
return $
case lookupUFM env name of
Just (LabelN l) -> l
_other -> mkBlockId (newTagUnique (getUnique name) 'L')
-- | Lookup the location of a named variable.
-- Unknown names are treated as if they had been 'import'ed from the runtime system.
-- This saves us a lot of bother in the RTS sources, at the expense of
-- deferring some errors to link time.
lookupName :: FastString -> CmmParse CmmExpr
lookupName name = do
env <- getEnv
return $
case lookupUFM env name of
Just (VarN e) -> e
Just (FunN pkg) -> CmmLit (CmmLabel (mkCmmCodeLabel pkg name))
_other -> CmmLit (CmmLabel (mkCmmCodeLabel rtsUnitId name))
-- | Lift an FCode computation into the CmmParse monad
code :: FCode a -> CmmParse a
code fc = EC $ \_ _ s -> do
r <- fc
return (s, r)
emit :: CmmAGraph -> CmmParse ()
emit = code . F.emit
emitLabel :: BlockId -> CmmParse ()
emitLabel = code . F.emitLabel
emitAssign :: CmmReg -> CmmExpr -> CmmParse ()
emitAssign l r = code (F.emitAssign l r)
emitStore :: CmmExpr -> CmmExpr -> CmmParse ()
emitStore l r = code (F.emitStore l r)
getCode :: CmmParse a -> CmmParse CmmAGraph
getCode (EC ec) = EC $ \c e s -> do
((s',_), gr) <- F.getCodeR (ec c e s)
return (s', gr)
getCodeR :: CmmParse a -> CmmParse (a, CmmAGraph)
getCodeR (EC ec) = EC $ \c e s -> do
((s', r), gr) <- F.getCodeR (ec c e s)
return (s', (r,gr))
getCodeScoped :: CmmParse a -> CmmParse (a, CmmAGraphScoped)
getCodeScoped (EC ec) = EC $ \c e s -> do
((s', r), gr) <- F.getCodeScoped (ec c e s)
return (s', (r,gr))
emitOutOfLine :: BlockId -> CmmAGraphScoped -> CmmParse ()
emitOutOfLine l g = code (F.emitOutOfLine l g)
withUpdFrameOff :: UpdFrameOffset -> CmmParse () -> CmmParse ()
withUpdFrameOff size inner
= EC $ \c e s -> F.withUpdFrameOff size $ (unEC inner) c e s
getUpdFrameOff :: CmmParse UpdFrameOffset
getUpdFrameOff = code $ F.getUpdFrameOff
| oldmanmike/ghc | compiler/codeGen/StgCmmExtCode.hs | bsd-3-clause | 7,447 | 0 | 15 | 1,941 | 1,899 | 1,012 | 887 | 144 | 3 |
{-# language PolyKinds #-}
{-# language FlexibleContexts #-}
{-# language ConstraintKinds #-}
{-# language FlexibleInstances #-}
{-# language FunctionalDependencies #-}
import GHC.Exts (Constraint)
class Ríki (p :: i -> i -> *)
class (Ríki p) => Varpi p q f | f -> p q
instance Varpi () () f => Varpi (->) (->) (Either f) where
| ezyang/ghc | testsuite/tests/polykinds/T11516.hs | bsd-3-clause | 332 | 0 | 9 | 59 | 107 | 59 | 48 | -1 | -1 |
module T12746 where
import T12746A
foo a = case a of
Foo -> True
_ -> False
| ezyang/ghc | testsuite/tests/patsyn/should_compile/T12746.hs | bsd-3-clause | 94 | 0 | 7 | 35 | 31 | 17 | 14 | 5 | 2 |
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE CPP, ForeignFunctionInterface #-}
-----------------------------------------------------------------------------
-- |
-- Module : GHC.ConsoleHandler
-- Copyright : (c) The University of Glasgow
-- License : see libraries/base/LICENSE
--
-- Maintainer : cvs-ghc@haskell.org
-- Stability : internal
-- Portability : non-portable (GHC extensions)
--
-- NB. the contents of this module are only available on Windows.
--
-- Installing Win32 console handlers.
--
-----------------------------------------------------------------------------
module GHC.ConsoleHandler
#if !defined(mingw32_HOST_OS) && !defined(__HADDOCK__)
where
#else /* whole file */
( Handler(..)
, installHandler
, ConsoleEvent(..)
, flushConsole
) where
{-
#include "rts/Signals.h"
Note: this #include is inside a Haskell comment
but it brings into scope some #defines
that are used by CPP below (eg STG_SIG_DFL).
Having it in a comment means that there's no
danger that C-like crap will be misunderstood
by GHC
-}
import Foreign
import Foreign.C
import GHC.IO.FD
import GHC.IO.Exception
import GHC.IO.Handle.Types
import GHC.IO.Handle.Internals
import GHC.Conc
import Control.Concurrent.MVar
import Data.Typeable
data Handler
= Default
| Ignore
| Catch (ConsoleEvent -> IO ())
-- | Allows Windows console events to be caught and handled. To
-- handle a console event, call 'installHandler' passing the
-- appropriate 'Handler' value. When the event is received, if the
-- 'Handler' value is @Catch f@, then a new thread will be spawned by
-- the system to execute @f e@, where @e@ is the 'ConsoleEvent' that
-- was received.
--
-- Note that console events can only be received by an application
-- running in a Windows console. Certain environments that look like consoles
-- do not support console events, these include:
--
-- * Cygwin shells with @CYGWIN=tty@ set (if you don't set @CYGWIN=tty@,
-- then a Cygwin shell behaves like a Windows console).
-- * Cygwin xterm and rxvt windows
-- * MSYS rxvt windows
--
-- In order for your application to receive console events, avoid running
-- it in one of these environments.
--
installHandler :: Handler -> IO Handler
installHandler handler
| threaded =
modifyMVar win32ConsoleHandler $ \old_h -> do
(new_h,rc) <-
case handler of
Default -> do
r <- rts_installHandler STG_SIG_DFL nullPtr
return (no_handler, r)
Ignore -> do
r <- rts_installHandler STG_SIG_IGN nullPtr
return (no_handler, r)
Catch h -> do
r <- rts_installHandler STG_SIG_HAN nullPtr
return (h, r)
prev_handler <-
case rc of
STG_SIG_DFL -> return Default
STG_SIG_IGN -> return Ignore
STG_SIG_HAN -> return (Catch old_h)
_ -> error "installHandler: Bad threaded rc value"
return (new_h, prev_handler)
| otherwise =
alloca $ \ p_sp -> do
rc <-
case handler of
Default -> rts_installHandler STG_SIG_DFL p_sp
Ignore -> rts_installHandler STG_SIG_IGN p_sp
Catch h -> do
v <- newStablePtr (toHandler h)
poke p_sp v
rts_installHandler STG_SIG_HAN p_sp
case rc of
STG_SIG_DFL -> return Default
STG_SIG_IGN -> return Ignore
STG_SIG_HAN -> do
osptr <- peek p_sp
oldh <- deRefStablePtr osptr
-- stable pointer is no longer in use, free it.
freeStablePtr osptr
return (Catch (\ ev -> oldh (fromConsoleEvent ev)))
_ -> error "installHandler: Bad non-threaded rc value"
where
fromConsoleEvent ev =
case ev of
ControlC -> 0 {- CTRL_C_EVENT-}
Break -> 1 {- CTRL_BREAK_EVENT-}
Close -> 2 {- CTRL_CLOSE_EVENT-}
Logoff -> 5 {- CTRL_LOGOFF_EVENT-}
Shutdown -> 6 {- CTRL_SHUTDOWN_EVENT-}
toHandler hdlr ev = do
case toWin32ConsoleEvent ev of
-- see rts/win32/ConsoleHandler.c for comments as to why
-- rts_ConsoleHandlerDone is called here.
Just x -> hdlr x >> rts_ConsoleHandlerDone ev
Nothing -> return () -- silently ignore..
no_handler = error "win32ConsoleHandler"
foreign import ccall "rtsSupportsBoundThreads" threaded :: Bool
foreign import ccall unsafe "RtsExternal.h rts_InstallConsoleEvent"
rts_installHandler :: CInt -> Ptr (StablePtr (CInt -> IO ())) -> IO CInt
foreign import ccall unsafe "RtsExternal.h rts_ConsoleHandlerDone"
rts_ConsoleHandlerDone :: CInt -> IO ()
flushConsole :: Handle -> IO ()
flushConsole h =
wantReadableHandle_ "flushConsole" h $ \ Handle__{haDevice=dev} ->
case cast dev of
Nothing -> ioException $
IOError (Just h) IllegalOperation "flushConsole"
"handle is not a file descriptor" Nothing Nothing
Just fd -> do
throwErrnoIfMinus1Retry_ "flushConsole" $
flush_console_fd (fdFD fd)
foreign import ccall unsafe "consUtils.h flush_input_console__"
flush_console_fd :: CInt -> IO CInt
#endif /* mingw32_HOST_OS */
| beni55/haste-compiler | libraries/ghc-7.8/base/GHC/ConsoleHandler.hs | bsd-3-clause | 5,192 | 0 | 3 | 1,302 | 26 | 24 | 2 | 3 | 0 |
{-# LANGUAGE FlexibleContexts #-}
module Futhark.Analysis.Usage
( usageInStm
, usageInExp
, usageInLambda
, UsageInOp(..)
)
where
import Data.Monoid
import qualified Data.Set as S
import Futhark.Representation.AST
import Futhark.Representation.AST.Attributes.Aliases
import qualified Futhark.Analysis.UsageTable as UT
usageInStm :: (Attributes lore, Aliased lore, UsageInOp (Op lore)) =>
Stm lore -> UT.UsageTable
usageInStm (Let pat lore e) =
mconcat [usageInPat,
usageInExpLore,
usageInExp e,
UT.usages (freeInExp e)]
where usageInPat =
UT.usages (mconcat (map freeIn $ patternElements pat)
`S.difference`
S.fromList (patternNames pat))
<> mconcat (map consumptionInPatElem $ patternElements pat)
usageInExpLore =
UT.usages $ freeIn lore
consumptionInPatElem (PatElem _ (BindInPlace src _) _) =
UT.consumedUsage src
consumptionInPatElem _ =
mempty
usageInExp :: (Aliased lore, UsageInOp (Op lore)) => Exp lore -> UT.UsageTable
usageInExp (Apply _ args _ _) =
mconcat [ mconcat $ map UT.consumedUsage $
S.toList $ subExpAliases arg
| (arg,d) <- args, d == Consume ]
usageInExp (DoLoop _ merge _ _) =
mconcat [ mconcat $ map UT.consumedUsage $
S.toList $ subExpAliases se
| (v,se) <- merge, unique $ paramDeclType v ]
usageInExp (Op op) =
mconcat $ usageInOp op : map UT.consumedUsage (S.toList $ consumedInOp op)
usageInExp _ = UT.empty
class UsageInOp op where
usageInOp :: op -> UT.UsageTable
instance UsageInOp () where
usageInOp () = mempty
usageInLambda :: Aliased lore =>
Lambda lore -> [VName] -> UT.UsageTable
usageInLambda lam arrs =
mconcat $
map (UT.consumedUsage . snd) $
filter ((`S.member` consumed_in_body) . fst) $
zip (map paramName arr_params) arrs
where arr_params = snd $ splitAt n $ lambdaParams lam
consumed_in_body = consumedInBody $ lambdaBody lam
n = length arrs
| ihc/futhark | src/Futhark/Analysis/Usage.hs | isc | 2,107 | 0 | 14 | 573 | 679 | 352 | 327 | 55 | 2 |
module CommandLine ( parseCommandLine
, interpretGlobalOptions
, defaultGlobalOptions
) where
import ListExtras
import ProgramInfo
import System.Console.GetOpt
data Flag = Verbose
| Filename String
| Output String
| Help
| Version
| Display
deriving Show
data GlobalOptions = GlobalOptions { verbose :: Bool
, file :: Maybe String
, output :: Maybe String
} deriving Show
defaultGlobalOptions :: GlobalOptions
defaultGlobalOptions = GlobalOptions { verbose = False
, file = Nothing
, output = Nothing
}
options :: [OptDescr Flag]
options =
[ Option "v" ["verbose"] (NoArg Verbose) "Verbose output"
, Option "f" ["file"] (ReqArg Filename "FILE") "Input file"
, Option "o" ["output"] (ReqArg Output "FILE") "Output file"
, Option "V" ["version"] (NoArg Version) "Show version info"
, Option "h" ["help"] (NoArg Help) "Show help"
, Option "d" ["display"] (NoArg Display) "Display all expenses"
]
parseCommandLine :: [String] -> Either String [Flag]
parseCommandLine args = case getOpt Permute options args of
([], [], []) -> Left $ displayErrors ["No arguments given\n"]
(o, [], []) -> Right o
(_, ns, []) -> Left $ displayErrors $ map (\o -> "Invalid option `" ++ o ++ "'\n") ns
(_, _, es) -> Left $ displayErrors es
where displayErrors :: [String] -> String
displayErrors strs = unlines [ "Errors:"
, concatMap (" " ++) strs
, usageInfo usageHeader options
]
interpretGlobalOption :: GlobalOptions -> Flag -> (GlobalOptions, Bool)
interpretGlobalOption gos f = case f of
Verbose -> (gos { verbose = True }, True)
Filename fn -> (gos { file = Just fn }, True)
Output fn -> (gos { output = Just fn }, True)
_ -> (gos, False)
interpretGlobalOptions :: (GlobalOptions, [Flag]) -> (GlobalOptions, [Flag])
interpretGlobalOptions (gos, fs) = foldFilter interpretGlobalOption gos fs []
| fredmorcos/attic | projects/pet/archive/pet_haskell_20131209/CommandLine.hs | isc | 2,371 | 0 | 13 | 875 | 645 | 360 | 285 | 47 | 4 |
module Days.Day6
( day6
) where
import Days.Prelude
part1 = const ""
part2 = const ""
day6 =
Day
{ _parser = parser
, _dayPart1 = part1
, _dayPart2 = part2
}
where
parser :: Parser ()
parser = pure ()
| unknownloner/aoc2016 | src/Days/Day6.hs | mit | 228 | 0 | 8 | 68 | 79 | 45 | 34 | 12 | 1 |
{-# LANGUAGE DeriveLift#-}
module System.Console.Docopt.Types
where
import Data.Char (isUpper)
import Data.List (nub)
import Data.Map (Map)
import qualified Data.Map as M
import Language.Haskell.TH.Syntax (Lift)
-- * Usage expression Types
type Name = String
data Pattern a = Sequence [Pattern a]
| OneOf [Pattern a]
| Unordered [Pattern a]
| Optional (Pattern a)
| Repeated (Pattern a)
| Atom a
deriving (Show, Eq, Lift)
atoms :: Eq a => Pattern a -> [a]
atoms (Sequence ps) = concatMap atoms ps
atoms (OneOf ps) = concatMap atoms $ nub ps
atoms (Unordered ps) = concatMap atoms $ nub ps
atoms (Optional p) = atoms p
atoms (Repeated p) = atoms p
atoms (Atom a) = [a]
-- | A named leaf node of the usage pattern tree
data Option = LongOption Name
| ShortOption Char
| Command Name
| Argument Name
| AnyOption
deriving (Show, Eq, Ord, Lift)
type OptPattern = Pattern Option
humanize :: Option -> String
humanize opt = case opt of
Command name -> name
Argument name -> if all isUpper name
then name
else "<" ++ name ++ ">"
LongOption name -> "--"++name
ShortOption c -> ['-',c]
AnyOption -> "[options]"
-- | Used when parsing through the available option descriptions.
-- Holds a list of synonymous options, Maybe a default value (if specified),
-- an expectsVal :: Bool that indicates whether this option is a flag (--flag)
-- or an option that needs an argument (--opt=arg), and isRepeated :: Bool
-- that indicates whether this option is always single or needs to be accumulated
data OptionInfo = OptionInfo
{ synonyms :: [Option]
, defaultVal :: Maybe String
, expectsVal :: Bool
, isRepeated :: Bool
} deriving (Show, Eq, Lift)
fromSynList :: [Option] -> OptionInfo
fromSynList opts = OptionInfo { synonyms = opts
, defaultVal = Nothing
, expectsVal = False
, isRepeated = False }
-- | Maps each available option to a OptionInfo entry
-- (each synonymous option gets its own separate entry, for easy lookup)
type OptInfoMap = Map Option OptionInfo
-- | Contains all the relevant information parsed out of a usage string.
-- Used to build the actual command-line arg parser.
type OptFormat = (OptPattern, OptInfoMap)
-- |
data OptParserState = OptParserState
{ optInfoMap :: OptInfoMap
, parsedArgs :: Arguments
, inShortOptStack :: Bool
, inTopLevelSequence :: Bool
} deriving (Show)
fromOptInfoMap :: OptInfoMap -> OptParserState
fromOptInfoMap m = OptParserState { optInfoMap = m
, parsedArgs = M.empty
, inShortOptStack = False
, inTopLevelSequence = True }
data ArgValue = MultiValue [String]
| Value String
| NoValue
| Counted Int
| Present
| NotPresent
deriving (Show, Eq, Ord)
-- | Maps each Option to all of the valued parsed from the command line
-- (in order of last to first, if multiple values encountered)
type Arguments = Map Option ArgValue
-- | An abstract data type which represents Docopt usage patterns.
data Docopt = Docopt { optFormat :: OptFormat
-- | Retrieve the original usage string.
, usage :: String
}
| docopt/docopt.hs | System/Console/Docopt/Types.hs | mit | 3,776 | 0 | 10 | 1,340 | 736 | 420 | 316 | 72 | 6 |
module ActorPath where
import Address
data ActorPath = ActorPath
{ address :: Address
, parent :: ActorPath
, name :: String
, (/) :: String -> ActorPath
}
-- TODO
instance Show ActorPath where
show a = show (parent a) ++ "/" ++ name a
instance Eq ActorPath where
p1 == p2 =
address p1 == address p2 && parent p1 == parent p2 && name p1 == name p2
-- TODO: instance Ord where
splitActorPath :: ActorPath -> [String]
splitActorPath = reverse . go
where go path = name path : go (parent path)
| crdueck/actors | ActorPath.hs | mit | 545 | 2 | 11 | 149 | 183 | 96 | 87 | 15 | 1 |
module Api.Users where
-- Prelude.
import ClassyPrelude hiding (hash)
import Control.Lens
import Data.Time (NominalDiffTime, addUTCTime)
import Database.Persist (Entity(Entity))
-- Servant imports.
import Servant
import Servant.Auth.Server hiding (makeJWT)
-- Local imports.
import Foundation
import Logging
import Model
import Query.User
import Types.BCrypt
import Types.Token
import Types.User hiding (userBio, userImage)
--------------------------------------------------------------------------------
-- | Servant type-level representation of the "users" route fragment.
type UsersApi auths = (Auth auths Token :> ProtectedApi) :<|> UnprotectedApi
-- | Handler function for the "users" route fragment.
usersHandler :: ServerT (UsersApi auths) App
usersHandler = protected :<|> unprotected
--------------------------------------------------------------------------------
-- | Type-level representation of the endpoints protected by 'Auth'.
type ProtectedApi = "users" :>
"register"
:> ReqBody '[JSON] UserRegister
:> Post '[JSON] UserResponse
-- | Check authentication status and dispatch the request to the appropriate
-- endpoint handler.
protected :: AuthResult Token -> ServerT ProtectedApi App
protected (Authenticated t) = register t
protected _ = throwAll err401
-- | Registration endpoint handler.
register :: Token -> UserRegister -> App UserResponse
register _ userReg = do
hashedPw <- hashPassword $ fromUPlainText $ userReg ^. password
dbUser <- runDB $ insertUser (userReg ^. name) (userReg ^. email) hashedPw
let logAction = addNamespace "register"
$ logInfoM [logt|"#{dbUser} was registered."|]
mkUserResponse userReg hashedPw dbUser logAction
--------------------------------------------------------------------------------
-- | Type-level representation of the endpoints not protected by 'Auth'.
type UnprotectedApi = "users" :>
"login"
:> ReqBody '[JSON] UserLogin
:> Post '[JSON] UserResponse
-- | Dispatch the request to the appropriate endpoint handler.
unprotected :: ServerT UnprotectedApi App
unprotected = login
login :: UserLogin -> App UserResponse
login userLogin = do
-- Get the user and password associated with this email, if they exist.
maybeUserPass <- runDB $ getUserByEmail (userLogin ^. email)
(dbUser, dbPass) <- case maybeUserPass of
Nothing -> throwM err404
Just ( (Entity _ dbUser)
, (Entity _ dbPass)) -> pure (dbUser, dbPass)
let logAction = addNamespace "login"
$ logInfoM [logt|"#{dbUser} logged in."|]
mkUserResponse userLogin (passwordHash dbPass) dbUser logAction
--------------------------------------------------------------------------------
-- | Return a token for a given user if the login password is valid when
-- compared to the hash in the database; throw 401 if the user's password
-- is invalid
mkToken :: Text -> BCrypt -> User -> App Token
mkToken pass hashed dbUser = do
-- Validate the stored hash against the plaintext password
isValid <- validatePassword pass hashed
-- If the password isn't valid, throw a 401
-- TODO - maybe validatePassword should return an Either so that when
-- validation fails internally, we can throw a 500.
if isValid then pure () else throwM err401
pure $ Token (userUuid dbUser)
-- | Return a textual view of a JWT from a token, valid for a given duration
-- of seconds
mkJWT :: Token -> NominalDiffTime -> App JWTText
mkJWT token duration = do
-- Try to make a JWT with the settings from the Reader environment.
settings <- view jwtSettings
expires <- liftIO $ Just . (addUTCTime duration) <$> getCurrentTime
tryJWT <- liftIO $ makeJWT token settings expires
case tryJWT of
-- If JWT generation failed, log the error and throw a 500
Left e -> addNamespace "jwt_generation" $ do
logErrorM [logt|JWT generation failed with the error #{e}|]
throwM err500
Right lazyJWT -> pure . JWTText . decodeUtf8 . toStrict $ lazyJWT
-- | Generate a 'UserResponse' with an expiring token (defined in 'Config'),
-- logging to 'Katip' with the given @logAction@ function.
mkUserResponse
:: HasPassword r UPlainText
=> r -> BCrypt -> User -> App () -> App UserResponse
mkUserResponse user hashedPw dbUser logAction = do
timeout <- view jwtTimeout
tok <- mkToken (fromUPlainText $ user ^. password) hashedPw dbUser
jwt <- mkJWT tok timeout
logAction
pure $ UserResponse
(userEmail dbUser) jwt (userName dbUser) (userBio dbUser) (userImage dbUser)
| jkachmar/servant-persistent-realworld | src/Api/Users.hs | mit | 4,676 | 0 | 14 | 965 | 939 | 488 | 451 | -1 | -1 |
module WUnderground
( module WUnderground.Client
, module WUnderground.Types
) where
-------------------------------------------------------------------------------
import WUnderground.Client
import WUnderground.Types
-------------------------------------------------------------------------------
| Soostone/wunderground | src/WUnderground.hs | mit | 312 | 0 | 5 | 30 | 30 | 20 | 10 | 5 | 0 |
module Main where
import System.Environment
import System.Random
import System.Console.ANSI
import System.Timeout
import System.IO
import Control.Monad
import Control.DeepSeq
import Control.Applicative
data Cell = Alive | Dead
deriving Eq
data World = World Int Int [Cell]
instance Show Cell where
show Alive = "O"
show Dead = " "
-- given width and height as arguments initializes a random World
-- and starts the main loop
main :: IO ()
main = do
args <- getArgs
if length args /= 2 then putStrLn "Usage: life width height"
else do
hSetBuffering stdin NoBuffering
hideCursor
cls
loop $ initWorld (read $ head args) (read $ args !! 1)
-- displays the World, evolves it and checks for input
loop :: IO World -> IO ()
loop world = do
w @ (World sw sh cs) <- world
setSGR [SetColor Foreground Vivid Cyan]
printWorld w
setSGR []
showInfo cs
input <- timeout 50000 getChar
case input of
Just i | i == 'q' -> setSGR [] >> showCursor
| i == 'r' -> initWorld sw sh >>= step
| otherwise -> step w
Nothing -> step w
where step w = cls >> loop (return $ evolve w)
-- clears the screen and resets cursor position
cls :: IO ()
cls = clearScreen >> setCursorPosition 0 0
-- given width and height, returns a random World
initWorld :: Int -> Int -> IO World
initWorld w h = World w h <$> replicateM (w * h) initCell
-- returns a random Cell
initCell :: IO Cell
initCell = do
r <- randomRIO(0, 1) :: IO Int
return $ if r == 0 then Alive else Dead
-- given a World, return the World's next (evolved) state
evolve :: World -> World
evolve world @ (World w h cs) = World w h [evolveCell x y world | y <- ys, x <- xs]
where
xs = [0..w-1]
ys = [0..h-1]
-- given the coordinates of a cell in the given world returns that Cell's next state
evolveCell :: Int -> Int -> World -> Cell
evolveCell x y w
| n == 2 && isAlive x y w || n == 3 = Alive
| otherwise = Dead
where n = countNeighbours x y w
-- returns the number of adjacent living Cells to the Cell at (x, y)
countNeighbours :: Int -> Int -> World -> Int
countNeighbours x y w = length . filter (True==) . map (\(i, j) -> isAlive i j w) $ ls
where ls = [(i, j) | i <- [x-1..x+1], j <- [y-1..y+1], i /= x || j /= y ]
-- checks whether the Cell at (x, y) is dead or alive
isAlive :: Int -> Int -> World -> Bool
isAlive x y w = case getCell x y w of
Alive -> True
Dead -> False
-- returns the Cell that is on (x, y) in the given World
getCell :: Int -> Int -> World -> Cell
getCell x y (World w h cs)
| isInside = cs !! (y * w + x)
| otherwise = Dead
where isInside = x >= 0 && y >= 0 && x < w && y < h
-- display the given World
printWorld :: World -> IO ()
printWorld w = putStrLn $!! showWorld w
-- takes a World and returns it's String representation
showWorld :: World -> String
showWorld (World _ _ []) = []
showWorld (World w h cs) = concatMap show line
++ "\n" ++ showWorld (World w h rest)
where (line, rest) = splitAt w cs
-- given the list of current Cells, display's some information
showInfo :: [Cell] -> IO ()
showInfo cs = putStrLn ("(q - quit) (r - reset) live cells: "
++ (show . length $ filter (Alive==) cs))
| GeorgiKhomeriki/MonadOfLife | life.hs | mit | 3,318 | 3 | 15 | 893 | 1,172 | 593 | 579 | 77 | 2 |
{-# LANGUAGE PatternGuards #-}
-- TODO: this should actually be reusable by being a package.
-- And the parser should be an actual parser (ReadP? Whatever).
--
-- Could be combined with Toxaris/filter-agda-dependency-graph, since it needs
-- the output of this tool.
import qualified Data.List as List
import Control.Applicative
import System.Environment
import System.IO
import System.Exit
import System.FilePath
import System.FilePath.Find
--------------------------------------------------------------------------------
-- Configuration parameters
--------------------------------------------------------------------------------
marker = "INCREMENTAL λ-CALCULUS"
projectName = "Incremental λ-calculus"
binaryNameSuffix = "Ilc"
binaryName = "GenerateEverything" ++ binaryNameSuffix
headerFile = "EverythingHeader.agda.inc"
outputFile = "Everything.agda"
-- This could be "src", as it was in the Agda standard library. But that change
-- might cause conflicts with other Agda work.
srcDir = "."
--------------------------------------------------------------------------------
-- Logic to choose files to list - project-dependent
--------------------------------------------------------------------------------
-- Should we descend into this dir?
descendIntoDir = fileName /=? "bugs"
-- Do we want to exclude this source file
wantedSourceFile =
fileName /=? "README.agda" &&?
liftOp ((not .) . flip List.isInfixOf) filePath (".stack-work" ++ [pathSeparator])
--------------------------------------------------------------------------------
-- Logic to choose files to list - should be project-independent
--------------------------------------------------------------------------------
isSource =
fileName /=? outputFile &&?
(extension ==? ".agda" ||? extension ==? ".lagda") &&?
wantedSourceFile
sources = find descendIntoDir isSource srcDir
----------------------------------------
-- Reusable implementation
----------------------------------------
main = do
args <- getArgs
case args of
[] -> return ()
_ -> hPutStr stderr usage >> exitFailure
header <- readFileUTF8 headerFile
modules <- filter isLibraryModule . List.sort <$> sources
headers <- mapM extractHeader modules
writeFileUTF8 outputFile $
header ++ format (zip modules headers)
-- | Usage info.
usage :: String
usage = unlines
[ binaryName ++ ": A utility program for Agda libraries (specialized for "
++ projectName ++ ")."
, ""
, "Usage: " ++ binaryName
, ""
, "This program should be run in the base directory of a clean checkout of"
, "the library."
, ""
, "The program generates documentation for the library by extracting"
, "headers from library modules. The output is written to " ++ outputFile
, "with the file " ++ headerFile ++ " inserted verbatim at the beginning."
]
-- | Returns 'True' for all Agda files except for core modules.
isLibraryModule :: FilePath -> Bool
isLibraryModule f =
takeExtension f `elem` [".agda", ".lagda"] &&
dropExtension (takeFileName f) /= "Core"
trim toTrim list = core
where
(prefix, rest) = span toTrim list
(revSuffix, revCore) = span toTrim (reverse rest)
core = reverse revCore
-- | Reads a module and extracts the header.
extractHeader :: FilePath -> IO [String]
extractHeader mod = fmap (extract . lines) $ readFileUTF8 mod
where
delimiter line = length line /= 0 && all (== '-') line
extract (d1 : expectedMarker : "--" : ss)
| delimiter d1
, expectedMarker == "-- " ++ marker
, (info, rest) <- span ("--" `List.isPrefixOf`) ss
, let d2 = last info
, delimiter d2
= trim delimiter info
extract _ = []
-- | Formats the extracted module information.
format :: [(FilePath, [String])]
-- ^ Pairs of module names and headers. All lines in the
-- headers are already prefixed with \"-- \".
-> String
format = unlines . concat . map fmt
where
fmt (mod, header) = "" : header ++ ["import " ++ fileToMod mod]
-- | Translates a file name to the corresponding module name. It is
-- assumed that the file name corresponds to an Agda module under
-- 'srcDir'.
fileToMod :: FilePath -> String
fileToMod = map slashToDot . dropExtension . makeRelative srcDir
where
slashToDot c | isPathSeparator c = '.'
| otherwise = c
-- | A variant of 'readFile' which uses the 'utf8' encoding.
readFileUTF8 :: FilePath -> IO String
readFileUTF8 f = do
h <- openFile f ReadMode
hSetEncoding h utf8
hGetContents h
-- | A variant of 'writeFile' which uses the 'utf8' encoding.
writeFileUTF8 :: FilePath -> String -> IO ()
writeFileUTF8 f s = withFile f WriteMode $ \h -> do
hSetEncoding h utf8
hPutStr h s
| inc-lc/ilc-agda | GenerateEverythingIlc.hs | mit | 4,747 | 0 | 13 | 905 | 912 | 483 | 429 | 83 | 2 |
{- |
Module: Capnp.Pointer
Description: Support for parsing/serializing capnproto pointers
This module provides support for parsing and serializing capnproto pointers.
This is a low-level module; most users will not need to call it directly.
-}
module Capnp.Pointer
( Ptr(..)
, ElementSize(..)
, EltSpec(..)
, parsePtr
, parsePtr'
, serializePtr
, serializePtr'
, parseEltSpec
, serializeEltSpec
)
where
import Data.Bits
import Data.Int
import Data.Word
import Capnp.Bits
-- | A 'Ptr' represents the information in a capnproto pointer.
data Ptr
= StructPtr !Int32 !Word16 !Word16
-- ^ @'StructPtr' off dataSz ptrSz@ is a pointer to a struct
-- at offset @off@ in words from the end of the pointer, with
-- a data section of size @dataSz@ words, and a pointer section
-- of size @ptrSz@ words.
--
-- Note that the value @'StructPtr' 0 0 0@ is illegal, since
-- its encoding is reserved for the "null" pointer.
| ListPtr !Int32 !EltSpec
-- ^ @'ListPtr' off eltSpec@ is a pointer to a list starting at
-- offset @off@ in words from the end of the pointer. @eltSpec@
-- encodes the C and D fields in the encoding spec; see 'EltSpec'
-- for details
| FarPtr !Bool !Word32 !Word32
-- ^ @'FarPtr' twoWords off segment@ is a far pointer, whose landing
-- pad is:
--
-- * two words iff @twoWords@,
-- * @off@ words from the start of the target segment, and
-- * in segment id @segment@.
| CapPtr !Word32
-- ^ @'CapPtr' id@ is a pointer to the capability with the id @id@.
deriving(Show, Eq)
-- | The element size field in a list pointer.
data ElementSize
= Sz0
| Sz1
| Sz8
| Sz16
| Sz32
| Sz64
| SzPtr
deriving(Show, Eq, Enum)
-- | A combination of the C and D fields in a list pointer, i.e. the element
-- size, and either the number of elements in the list, or the total number
-- of /words/ in the list (if size is composite).
data EltSpec
= EltNormal !ElementSize !Word32
-- ^ @'EltNormal' size len@ is a normal (non-composite) element type
-- (C /= 7). @size@ is the size of the elements, and @len@ is the
-- number of elements in the list.
| EltComposite !Int32
-- ^ @EltComposite len@ is a composite element (C == 7). @len@ is the
-- length of the list in words.
deriving(Show, Eq)
-- | @'parsePtr' word@ parses word as a capnproto pointer. A null pointer is
-- parsed as 'Nothing'.
parsePtr :: Word64 -> Maybe Ptr
parsePtr 0 = Nothing
parsePtr p = Just (parsePtr' p)
-- | @'parsePtr'' word@ parses @word@ as a capnproto pointer. It ignores
-- nulls, returning them the same as @(StructPtr 0 0 0)@.
parsePtr' :: Word64 -> Ptr
parsePtr' word =
case bitRange word 0 2 :: Word64 of
0 -> StructPtr
(i30 (lo word))
(bitRange word 32 48)
(bitRange word 48 64)
1 -> ListPtr
(i30 (lo word))
(parseEltSpec word)
2 -> FarPtr
(toEnum (bitRange word 2 3))
(bitRange word 3 32)
(bitRange word 32 64)
3 -> CapPtr (bitRange word 32 64)
_ -> error "unreachable"
-- | @'serializePtr' ptr@ serializes the pointer as a 'Word64', translating
-- 'Nothing' to a null pointer.
--
-- This also changes the offset of zero-sized struct pointers to -1, to avoid
-- them being interpreted as null.
serializePtr :: Maybe Ptr -> Word64
serializePtr Nothing = 0
serializePtr (Just p@(StructPtr (-1) 0 0)) =
serializePtr' p
serializePtr (Just (StructPtr _ 0 0)) =
-- We need to handle this specially, for two reasons.
--
-- First, if the offset is zero, the the normal encoding would be interpreted
-- as null. We can get around this by changing the offset to -1, which will
-- point immediately before the pointer, which is always a valid position --
-- and since the size is zero, we can stick it at any valid position.
--
-- Second, the canonicalization algorithm requires that *all* zero size structs
-- are encoded this way, and doing this for all offsets, rather than only zero
-- offsets, avoids needing extra logic elsewhere.
serializePtr' (StructPtr (-1) 0 0)
serializePtr (Just p) =
serializePtr' p
-- | @'serializePtr'' ptr@ serializes the pointer as a Word64.
--
-- Unlike 'serializePtr', this results in a null pointer on the input
-- @(StructPtr 0 0 0)@, rather than adjusting the offset.
serializePtr' :: Ptr -> Word64
serializePtr' (StructPtr off dataSz ptrSz) =
-- 0 .|.
fromLo (fromI30 off) .|.
(fromIntegral dataSz `shiftL` 32) .|.
(fromIntegral ptrSz `shiftL` 48)
serializePtr' (ListPtr off eltSpec) = -- eltSz numElts) =
1 .|.
fromLo (fromI30 off) .|.
serializeEltSpec eltSpec
serializePtr' (FarPtr twoWords off segId) =
2 .|.
(fromIntegral (fromEnum twoWords) `shiftL` 2) .|.
(fromIntegral off `shiftL` 3) .|.
(fromIntegral segId `shiftL` 32)
serializePtr' (CapPtr index) =
3 .|.
-- (fromIntegral 0 `shiftL` 2) .|.
(fromIntegral index `shiftL` 32)
-- | @'parseEltSpec' word@ reads the 'EltSpec' from @word@, which must be the
-- encoding of a list pointer (this is not verified).
parseEltSpec :: Word64 -> EltSpec
parseEltSpec word = case bitRange word 32 35 of
7 -> EltComposite (i29 (hi word))
sz -> EltNormal (toEnum sz) (bitRange word 35 64)
-- | @'serializeEltSpec' eltSpec@ serializes @eltSpec@ as a 'Word64'. all bits
-- which are not determined by the 'EltSpec' are zero.
serializeEltSpec :: EltSpec -> Word64
serializeEltSpec (EltNormal sz len) =
(fromIntegral (fromEnum sz) `shiftL` 32) .|.
(fromIntegral len `shiftL` 35)
serializeEltSpec (EltComposite words) =
(7 `shiftL` 32) .|.
fromHi (fromI29 words)
| zenhack/haskell-capnp | lib/Capnp/Pointer.hs | mit | 5,860 | 0 | 12 | 1,469 | 960 | 527 | 433 | 112 | 5 |
{-# LANGUAGE MultiParamTypeClasses #-}
module Y2017.M10.D25.Solution where
{--
Today, we're going to divide the topic you chose into subtopics, and then
optionally, view those subtopics as a graph, or retrieve articles in the
subtopic by index (which also works in the graph-structure, too, but in this
case the graph is specialized to a map.
Okay, yesterday, we computed the subtopics by counting instances, but today,
we want the more general grouping of subtopics by object.
--}
import Control.Arrow ((&&&))
import Control.Monad
import Data.List (isInfixOf)
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Maybe (fromMaybe)
import Data.Set (Set)
-- below imports available via 1HaskellADay git repository.
-- import Control.Logic.Frege ((-|))
import Data.Hierarchy
import Data.Relation
import Graph.Query (cyphIt, graphEndpoint, Endpoint)
import Y2017.M10.D04.Solution (ArticleSummary (ArtSum), Topic)
import Y2017.M10.D05.Solution (visualize)
import Y2017.M10.D20.Solution -- for article groupings and graphing
import Y2017.M10.D23.Solution -- Article
import Y2017.M10.D24.Solution -- to get the articles we're studying from file
hiding (hasSubcategory, categorize, seed, categorizor)
-- So, we want to subcategorize articles using the categorizer, but grab
-- the whole article, not just a count
-- First lets map the articles for indexing by their, well, index:
type IxArts = Map Integer ArticleSummary
indexArticles :: [Article] -> IxArts
indexArticles = Map.fromList . map (artIdx &&& summarize)
-- you fetch the articles from file using articlesFromFile
-- but that also means we need to translate Article values to ArticleSummary
summarize :: Article -> ArticleSummary
summarize art = ArtSum (artIdx art) (title art) (published art)
-- now that we have our articles nicely indexed, let's (sub)categorize them
hasSubcategory :: MonadPlus m => String -> Article -> m (IxArts -> IxArts)
hasSubcategory storm art =
guard (isInfixOf storm (fullText art)) >> inserter art
inserter :: MonadPlus m => Article -> m (IxArts -> IxArts)
inserter art = pure (Map.insert (artIdx art) (summarize art))
-- adds this article to, e.g.: "Maria" subcategory if it mentions, e.g.: "Maria"
-- which means we need a starter function:
seed :: Subcategory IxArts
seed = replicate 4 Map.empty
-- now we can categorize hurricane articles by each storm
-- or, more generally, categorize articles into subcategories
categorizor :: [Article] -> Subcategory IxArts
categorizor = foldr (\art ->
let maria = hasSubcategory "Maria" art
harvey = hasSubcategory "Harvey" art
irma = hasSubcategory "Irma" art
none = case msum [maria, harvey, irma] of
-- case msum is saying: "Are all of these mzero?"
-- And now we need monadic xor here:
Nothing -> inserter art
Just _ -> Nothing
in zipWith (\m a -> fromMaybe a (m <*> pure a))
[maria, harvey, irma, none]) seed
-- puts each article into its respective subcategory bin
-- n.b. an article can be in multiple bins.
-- With the data sets you derived from the NYT archive, take the category
-- you chose and break it into subcategories that make sense.
-- Now you've got groups of related content you can browse.
{-- BONUS -----------------------------------------------------------------
Create a data visualization from your results, for example as a circle chart
or as a graph
E.g. use Y2017.M10.D05.Exercise.visualize or Y2017.M10.D20.uploadArticles2Graph
--}
-- Okay. Here we go.
data ArticleNode = Root | Subject Topic | SubTopic Topic Int | News ArticleSummary
deriving (Eq, Show)
instance Node ArticleNode where
asNode Root = "ARCHIVE { name: 'NYT' }"
asNode (Subject top) = "TOPIC { name: '" ++ top ++ "' }"
asNode (SubTopic top n) =
"SUBTOPIC { name: '" ++ top ++ "', children: " ++ show n ++ " }"
asNode (News (ArtSum a b c)) =
"ARTICLE { id: " ++ show a ++ ", title: '" ++ clean b
++ "', published: '" ++ show c ++ "' }"
data ArticleRel = TOPIC | SUBTOPIC | ARTICLE deriving (Eq, Show)
instance Edge ArticleRel where asEdge = show
instance Relatable ArticleNode ArticleNode ArticleRel where
relate Root a@(Subject _) = Rel Root TOPIC a
relate a@(Subject _) b@(SubTopic _ _) = Rel a SUBTOPIC b
relate a@(SubTopic _ _) b@(News _) = Rel a ARTICLE b
-- So, now, we convert our map of topics to subtopics (and their articles)
-- to a hierarchy and graph.
arts2hier :: Map Topic (Map Topic [ArticleSummary]) -> Hierarchy ArticleNode
arts2hier = Hier Root . Kids . map mkTopicHier . Map.toList
mkTopicHier :: (Topic, Map Topic [ArticleSummary]) -> Hierarchy ArticleNode
mkTopicHier (top, arts) =
Hier (Subject top) (Kids (map mkSubtopics (Map.toList arts)))
mkSubtopics :: (Topic, [ArticleSummary]) -> Hierarchy ArticleNode
mkSubtopics (top, arts) =
Hier (SubTopic top (length arts)) (Kids (map (flip Hier (Size 1) . News) arts))
graphCategories :: Endpoint -> Map Topic (Map Topic [ArticleSummary]) -> IO String
graphCategories url = cyphIt url . hier2rel . arts2hier
-- which means we have to load in the topics then map topics to subtopics and
-- subtopics to articles
{--
>>> hurr <- articlesFromFile "Y2017/M10/D24/hurricanes.json.gz"
>>> title $ head hurr
"In Sweltering South, Climate Change Is Now a Workplace Hazard"
>>> flood <- articlesFromFile "Y2017/M10/D24/floods.json.gz"
>>> rain <- articlesFromFile "Y2017/M10/D24/rains.json.gz"
>>> rainCat = categorizor rain
>>> floodCat = categorizor flood
>>> hurrCat = categorizor hurr
>>> subCat2Map = Map.fromList . zip ["Maria", "Harvey", "Irma", "None"] . map Map.elems
>>> megaMap = Map.fromList (zip ["Hurricanes", "Floods", "Rain"] (map subCat2Map [hurrCat, floodCat, rainCat]))
>>> url <- graphEndpoint
>>> graphCategories url megaMap
... \"errors\": []}
And we have our graph. YES!
Let's formalize the above (for hurricanes, anyway):
--}
subCat2Map :: Subcategory IxArts -> Map Topic [ArticleSummary]
subCat2Map = Map.fromList . zip (words "Maria Harvey Irma None") . map Map.elems
hurricaneAnalysis :: IO String
hurricaneAnalysis = do
hurr <- articlesFromFile "Y2017/M10/D24/hurricanes.json.gz"
flood <- articlesFromFile "Y2017/M10/D24/floods.json.gz"
rain <- articlesFromFile "Y2017/M10/D24/rains.json.gz"
let rainCat = categorizor rain
floodCat = categorizor flood
hurrCat = categorizor hurr
megaMap = Map.fromList (zip (words "Hurricanes Floods Rain")
(map subCat2Map [hurrCat, floodCat, rainCat]))
url <- graphEndpoint
graphCategories url megaMap
{--
>>> hurricaneAnalysis
...,{\"columns\":[],\"data\":[]}],\"errors\":[]}\n"
... in the neo4j browser:
match (t:TOPIC { name: 'Hurricanes' })-[]->(s:SUBTOPIC {name: 'Harvey'})-[]->(a:ARTICLE)
return a.id, a.published, a.title ORDER BY a.published DESC
a.id a.published a.title
7729 2017-09-23 When Disaster Hits and Landlines Fail, Social Media Is a Lifeline
7570 2017-09-22 How to Help Puerto Rico and Other Islands After Hurricane Maria
7454 2017-09-21 How to Avoid Buying a Car Flooded by Hurricanes
7024 2017-09-19 The 2017 Hurricane Season Really Is More Intense Than Normal
7048 2017-09-19 Harvey and Irma Wiped Out Our Kitchens. Still, We Cook.
6873 2017-09-18 How the Internet Kept Humming During 2 Hurricanes
6875 2017-09-18 The Real Unknown of Climate Change: Our Behavior
...
GRAPH, HO!
Here's how to read an article from the graph:
>>> hurmap = Map.fromList (map (artIdx &&& fullText) hurr)
>>> hurmap Map.! 3225
and there's article 3225
--}
| geophf/1HaskellADay | exercises/HAD/Y2017/M10/D25/Solution.hs | mit | 7,634 | 0 | 16 | 1,415 | 1,330 | 713 | 617 | 80 | 2 |
module MPCH.MPD where
import MPCH.Config (Config, host, password, port)
import qualified Network.MPD as MPD
mpd :: Config -> MPD.MPD a -> IO (MPD.Response a)
mpd config action = MPD.withMPD_ h p $ doPw pw >> action
where h = host config
p = port config
pw = password config
doPw = maybe (return ()) MPD.password
| mineo/mpch | MPCH/MPD.hs | mit | 358 | 0 | 10 | 101 | 136 | 72 | 64 | 9 | 1 |
module Euler.E47 where
import Data.List (nub, isPrefixOf)
import Euler.Lib (primeFactors)
euler47 :: Int -> Int
euler47 n = findSeq n $ validNums n
findSeq :: Int -> [Int] -> Int
findSeq _ [] = 0
findSeq n xs'@(x:xs)
| ys `isPrefixOf` xs' = x
| otherwise = findSeq n xs
where ys = [ x .. x + n - 1 ]
validNums :: Int -> [Int]
validNums n = filter (hasNFactors n) $ [ 1 .. ]
hasNFactors :: Int -> Int -> Bool
hasNFactors n x = n == (length $ nub $ primeFactors x)
main :: IO ()
main = print $ euler47 4
| D4r1/project-euler | Euler/E47.hs | mit | 531 | 2 | 9 | 136 | 253 | 134 | 119 | 17 | 1 |
module PreludeSpec where
import Test.Hspec
import Test.QuickCheck
spec :: Spec
spec =
describe "composition" $ do
let composed = (+10).(*2)
simple x = (x * 2) + 10
it "should always return the same" $
property $ \x -> composed x == simple (x::Int)
| zsedem/haskell-playground | human-prelude/test/PreludeSpec.hs | mit | 294 | 1 | 13 | 90 | 106 | 57 | 49 | 10 | 1 |
{-# LANGUAGE
RecordWildCards
#-}
{-|
Module : Test.Problem.Instances.CNF.Builder.Internal
Description : The tests for the Internal CNFBuilder module
Copyright : (c) Andrew Burnett 2014-2015
Maintainer : andyburnett88@gmail.com
Stability : experimental
Portability : Unknown
Exports the tests for the Internal CNFBuilder module
-}
module Test.Problem.Instances.CNF.Builder.Internal (
tests , -- TestTree
genCNFBuilderEmptyClause, -- Int -> Gen CNFBuilder
genCNFBuilderLitInClause, -- Int -> Gen CNFBuilder
genCNFBuilderFinalise -- Int -> Gen CNFBuilder
) where
import qualified Data.Vector as V
import HSat.Problem.Instances.CNF.Builder.Internal
import HSat.Problem.Instances.Common
import Test.Problem.Instances.Common.Clause (genClause)
import Test.Problem.Instances.Common.Clauses (genClauses)
import Test.Problem.Instances.Common.Literal (genLiteral)
import TestUtils
import TestUtils.Validate
name :: String
name = "Internal"
tests :: TestTree
tests =
testGroup name [
cnfBuilderTest1,
cnfBuilderError1,
testGroup "canAddLiteral" [
canAddLiteralTest1,
canAddLiteralTest2
],
testGroup "canFinalise" [
canFinaliseTest1,
canFinaliseTest2
],
testGroup "canFinishClause" [
canFinishClauseTest1,
canFinishClauseTest2
]
]
cnfBuilderTest1 :: TestTree
cnfBuilderTest1 =
testProperty "validate arbitrary CNFBuilder" $ property testCNFBuilder
where
testCNFBuilder :: CNFBuilder -> Bool
testCNFBuilder = validate
cnfBuilderError1 :: TestTree
cnfBuilderError1 =
testProperty "validate arbitrary CNFBuilderError" $ property testCNFBuilderError
where
testCNFBuilderError :: CNFBuilderError -> Bool
testCNFBuilderError = validate
canAddLiteralTest1 :: TestTree
canAddLiteralTest1 =
testProperty ("canAddLiteral " `equiv` " True on valid CNFBuilder") $
forAll
(oneof [sized genCNFBuilderEmptyClause,
sized genCNFBuilderLitInClause
]
)
canAddLiteral
canAddLiteralTest2 :: TestTree
canAddLiteralTest2 =
testProperty ("canAddLiteral " `equiv`" False on invalid CNFBuilder") $
forAll
(sized genCNFBuilderFinalise)
(not . canAddLiteral)
canFinaliseTest1 :: TestTree
canFinaliseTest1 =
testProperty ("canFinalise " `equiv` " True on CNFBuilder on final clause") $
forAll
(sized genCNFBuilderFinalise)
canFinalise
canFinaliseTest2 :: TestTree
canFinaliseTest2 =
testProperty ("canFinalise " `equiv` " False on non-final clause builder") $
forAll
(oneof [sized genCNFBuilderEmptyClause,
sized genCNFBuilderLitInClause])
(not . canFinalise)
canFinishClauseTest1 :: TestTree
canFinishClauseTest1 =
testProperty ("canFinishClause " `equiv` " True on valid CNFBuilder") $
forAll
(oneof [
sized genCNFBuilderEmptyClause ,
sized genCNFBuilderLitInClause
])
canFinishClause
canFinishClauseTest2 :: TestTree
canFinishClauseTest2 =
testProperty ("canFinishClause " `equiv` " False on invalid CNFBuilder") $
forAll
(sized genCNFBuilderFinalise)
(not . canFinishClause)
instance Validate CNFBuilder where
validate CNFBuilder{..} =
let computedSize = sizeFunc getCurrClauses getCurrClause
in (getExptdClNumb >= getCurrClNumb) &&
V.all testVarInRange (getVectClause getCurrClauses) &&
(computedSize == getCurrClNumb) &&
testVarInRange getCurrClause &&
validate getCurrClauses &&
validate getCurrClause
where
testVarInRange :: Clause -> Bool
testVarInRange cl = V.all (varInRange getExptdMaxVar) .
V.map getVariable $ getVectLiteral cl
instance Validate CNFBuilderError where
validate (IncorrectClauseNumber gotten expected) =
expected /= gotten
validate (VarOutsideRange gotten expected) =
(toInteger expected < gotten) ||
(gotten == 0)
validate (Initialisation variables clauses) =
(variables < 0) || (clauses < 0) ||
(variables > maxWord) || (clauses > maxWord)
where
maxWord = toInteger (maxBound :: Word)
genCNFBuilder :: Int -> Gen CNFBuilder
genCNFBuilder size =
oneof $ map (\f -> f size) [
genCNFBuilderFinalise,
genCNFBuilderEmptyClause,
genCNFBuilderLitInClause
]
instance Arbitrary CNFBuilder where
arbitrary = sized genCNFBuilder
shrink CNFBuilder{..} =
let mkBuilder (vect, clause) =
let size = sizeFunc vect clause
in CNFBuilder getExptdMaxVar getExptdClNumb size vect clause
in map mkBuilder $ shrink (getCurrClauses,getCurrClause)
sizeFunc :: Clauses -> Clause -> Word
sizeFunc cl c = (+) (getSizeClauses cl) $
if clauseIsEmpty c then 0 else 1
instance Arbitrary CNFBuilderError where
arbitrary = genCNFBuilderError
shrink (IncorrectClauseNumber
gotten
expected) =
filter validate . map (uncurry IncorrectClauseNumber) $
shrink (gotten,expected)
shrink (VarOutsideRange
gotten
expected) =
filter validate . map (uncurry VarOutsideRange) $
shrink (gotten,expected)
shrink (Initialisation vars clauses) =
filter validate . map (uncurry Initialisation) $
shrink (vars,clauses)
genCNFBuilderFinalise :: Int -> Gen CNFBuilder
genCNFBuilderFinalise size = do
maxVar' <- toEnum <$> choose (1,size)
clauses <- genClauses maxVar' size
--Either return what we set as the maximum, or find the true maximum
maxVar <- oneof [
return maxVar',
return $ findMaxVar clauses
]
let sizeClauses = getSizeClauses clauses
builder =
CNFBuilder maxVar sizeClauses sizeClauses clauses emptyClause
return builder
{-
This function generates a triple which consists of:
A randomly generated Clauses type
A random Word that is strictly above the size of Clauses
A random Word that is strictly above the maximum Variable in Clauses
-}
genBuilderHelper :: Int -> Gen (Clauses,Word,Word)
genBuilderHelper size = do
maxVar' <- toEnum . (1+) <$> choose (0,size)
clauses <- genClauses maxVar' size
let baseVal = 1 + getSizeClauses clauses
targetSize <- (baseVal +) . toEnum <$> choose (0,size)
maxVar <- ((+) maxVar' . toEnum ) <$> choose (1,size)
return (clauses,targetSize,maxVar)
{-
Generate a CNFBuilder with an empty Clauses. There should be at least a single
Clause left to add
-}
genCNFBuilderEmptyClause :: Int -> Gen CNFBuilder
genCNFBuilderEmptyClause size = do
(clauses,targetSize,maxVar) <- genBuilderHelper size
let builder =
CNFBuilder maxVar targetSize
(getSizeClauses clauses) clauses emptyClause
return builder
{-
Generate a CNFBuilder. We should be on a clause, where we can still add
literals
-}
genCNFBuilderLitInClause :: Int -> Gen CNFBuilder
genCNFBuilderLitInClause size = do
(clauses,targetSize,maxVar) <- genBuilderHelper size
literal <- genLiteral maxVar
clause <- flip clauseAddLiteral literal <$>
genClause maxVar size
let clauseSize = getSizeClauses clauses
builder = CNFBuilder
maxVar (targetSize+1) (clauseSize+1) clauses clause
return builder
genCNFBuilderError :: Gen CNFBuilderError
genCNFBuilderError =
oneof [
do
expected <- choose (0,maxBound)
gotten <- choose (1,maxBound)
let val = IncorrectClauseNumber (expected+gotten) expected
return val
,
do
expected <- choose (0,maxBound - 1)
gotten <- choose (expected + 1, maxBound)
let val = VarOutsideRange (toInteger gotten) expected
return val
]
| aburnett88/HSat | tests-src/Test/Problem/Instances/CNF/Builder/Internal.hs | mit | 7,860 | 0 | 15 | 1,920 | 1,753 | 912 | 841 | 183 | 2 |
{-# LANGUAGE ExistentialQuantification, FlexibleInstances, Rank2Types #-}
------------------------------------------------------------------------------
module Snap.Snaplet.Rest.Resource.Media
(
-- * Type
Media (..)
, newMedia
, newResponseMedia
, newRequestMedia
, newIntermediateMedia
-- * Setters
, MediaSetter
, fromResource
, toResource
, toDiff
, toEither
, fromResourceList
, toResourceList
-- * Common instances
, json
, jsonFromInstances
, xml
, xhtml
, html
, form
, multipart
) where
------------------------------------------------------------------------------
import qualified Blaze.ByteString.Builder as BB
import qualified Data.ByteString.Lazy as LBS
import qualified Data.ByteString.UTF8 as BS
import qualified Text.XmlHtml as Xml
------------------------------------------------------------------------------
import Control.Lens
import Control.Monad
import Data.Aeson hiding (json)
import Data.ByteString (ByteString)
import Network.HTTP.Media (MediaType)
import Snap.Core
import Text.XmlHtml (Document)
------------------------------------------------------------------------------
-- | A grouping of mediatypes and their associated renderers and parsers. You
-- can use the standard instances defined below, or define your own.
data Media res m diff int = Media
{ _fromResource :: Maybe (res -> m int)
, _toResource :: Maybe (int -> m (Maybe res))
, _toDiff :: Maybe (int -> m (Maybe diff))
, _fromResourceList :: Maybe ([res] -> m int)
, _toResourceList :: Maybe (int -> m (Maybe [res]))
, responseMedia :: Maybe ([MediaType], int -> m ByteString)
, requestMedia :: Maybe ([MediaType], ByteString -> m (Maybe int))
}
------------------------------------------------------------------------------
-- | Convenience class that allows 'serialize' and 'parse' to be implemented
-- with a default for some types.
class Intermediate int where
defaultFrom :: MonadSnap m => int -> m ByteString
defaultTo :: MonadSnap m => ByteString -> m (Maybe int)
instance Intermediate ByteString where
defaultFrom = return
defaultTo = return . Just
instance Intermediate String where
defaultFrom = return . BS.fromString
defaultTo = return . Just . BS.toString
------------------------------------------------------------------------------
-- | Construct a new media grouping with the given response and request
-- mediatypes.
newMedia
:: (Intermediate int, MonadSnap m) => [MediaType] -> [MediaType]
-> Media res m diff int
newMedia = newIntermediateMedia defaultFrom defaultTo
------------------------------------------------------------------------------
-- | Construct a new media grouping with response mediatypes only.
newResponseMedia
:: (int -> m ByteString) -> [MediaType] -> Media res m diff int
newResponseMedia a b =
Media Nothing Nothing Nothing Nothing Nothing (notEmpty b a) Nothing
------------------------------------------------------------------------------
-- | Construct a new media grouping with request mediatypes only.
newRequestMedia
:: (ByteString -> m (Maybe int)) -> [MediaType]
-> Media res m diff int
newRequestMedia a b =
Media Nothing Nothing Nothing Nothing Nothing Nothing (notEmpty b a)
------------------------------------------------------------------------------
-- | Construct a new media grouping with an intermediate type between the
-- resource and the rendered form.
newIntermediateMedia
:: (int -> m ByteString) -> (ByteString -> m (Maybe int))
-> [MediaType] -> [MediaType] -> Media res m diff int
newIntermediateMedia a b x y = Media
Nothing Nothing Nothing Nothing Nothing (notEmpty x a) (notEmpty y b)
------------------------------------------------------------------------------
notEmpty :: [a] -> f -> Maybe ([a], f)
notEmpty l f = guard (not $ null l) >> Just (l, f)
------------------------------------------------------------------------------
-- | A 'Setter' for defining properties of a media grouping.
type MediaSetter res m diff int f a = Setter
(Media res m diff int) (Media res m diff int) (f a) a
------------------------------------------------------------------------------
-- | Set the resource renderer.
fromResource :: MediaSetter res m diff int Maybe (res -> m int)
fromResource f m = f (_fromResource m) <&> \g -> m { _fromResource = Just g }
------------------------------------------------------------------------------
-- | Set the resource parser.
toResource :: MediaSetter res m diff int Maybe (int -> m (Maybe res))
toResource f m = f (_toResource m) <&> \g -> m { _toResource = Just g }
------------------------------------------------------------------------------
-- | Set the diff parser.
toDiff :: MediaSetter res m diff int Maybe (int -> m (Maybe diff))
toDiff f m = f (_toDiff m) <&> \g -> m { _toDiff = Just g }
------------------------------------------------------------------------------
-- | Set the resource and diff parser at the same time.
toEither :: MediaSetter res m res int Both (int -> m (Maybe res))
toEither f m = f (_toResource m, _toDiff m) <&> \g -> m
{ _toResource = Just g
, _toDiff = Just g
}
type Both a = (Maybe a, Maybe a)
------------------------------------------------------------------------------
-- | Set the resource list renderer.
fromResourceList :: MediaSetter res m diff int Maybe ([res] -> m int)
fromResourceList f m =
f (_fromResourceList m) <&> \g -> m { _fromResourceList = Just g }
------------------------------------------------------------------------------
-- | Set the resource list parser.
toResourceList :: MediaSetter res m diff int Maybe (int -> m (Maybe [res]))
toResourceList f m =
f (_toResourceList m) <&> \g -> m { _toResourceList = Just g }
------------------------------------------------------------------------------
-- | Outputs JSON in UTF-8 and parses JSON agnostic of character set.
json :: Monad m => Media res m diff Value
json = newIntermediateMedia
(return . LBS.toStrict . encode) (return . decodeStrict)
["application/json; charset=utf-8"] ["application/json"]
------------------------------------------------------------------------------
-- | Outputs JSON in UTF-8 and parses JSON agnostic of character set. Uses
-- the type class instances to automatically set the media methods.
jsonFromInstances
:: (Monad m, ToJSON res, FromJSON res, FromJSON diff)
=> Media res m diff Value
jsonFromInstances = Media
(Just (return . toJSON))
(Just (return . resultToMaybe . fromJSON))
(Just (return . resultToMaybe . fromJSON))
(Just (return . toJSON))
(Just (return . resultToMaybe . fromJSON))
(Just (["application/json; charset=utf-8"],
return . LBS.toStrict . encode))
(Just (["application/json"], return . decode . LBS.fromStrict))
------------------------------------------------------------------------------
resultToMaybe :: Result a -> Maybe a
resultToMaybe (Error _) = Nothing
resultToMaybe (Success a) = Just a
------------------------------------------------------------------------------
-- | Outputs XML in UTF-8 and parses XML agnostic of character set.
xml :: Monad m => Media res m diff Document
xml = newIntermediateMedia
(return . BB.toByteString . Xml.render)
(return . either (const Nothing) Just . Xml.parseXML "")
["application/xml; charset=utf-8"] ["application/xml"]
------------------------------------------------------------------------------
-- | Supports both XHTML and HTML in UTF-8 as the output format only.
-- Recommended over 'html' if the output will be valid XHTML.
xhtml :: MonadSnap m => Media res m diff ByteString
xhtml = newMedia
["application/xhtml+xml; charset=utf-8", "text/html; charset=utf-8"] []
------------------------------------------------------------------------------
-- | Supports HTML in UTF-8 as the output format only. Use 'xhtml' if the
-- output is guaranteed to be well formed.
html :: MonadSnap m => Media res m diff ByteString
html = newMedia ["text/html; charset=utf-8"] []
------------------------------------------------------------------------------
-- | Supports URL-encoded web forms as the input format only.
form :: MonadSnap m => Media res m diff Params
form = newRequestMedia (const $ fmap Just getParams)
["application/x-www-form-urlencoded"]
------------------------------------------------------------------------------
-- | Supports multipart web forms as the input format only.
multipart :: MonadSnap m => Media res m diff ByteString
multipart = newMedia [] ["multipart/form-data"]
| zmthy/snaplet-rest | src/Snap/Snaplet/Rest/Resource/Media.hs | mit | 8,726 | 0 | 15 | 1,502 | 1,935 | 1,055 | 880 | 123 | 1 |
{-# LANGUAGE OverloadedStrings, FlexibleInstances, TypeFamilies #-}
{-# LANGUAGE DataKinds, TypeOperators, GADTs #-}
module Haste.JQuery where
import Haste hiding (AttrName)
import Haste.Foreign
import Data.Char
import GHC.TypeLits
newtype JQuery = JQuery JSAny
fromJQuery :: JQuery -> JSAny
fromJQuery (JQuery q) = q
type Selector = String
type ClassName = String
type AttrName = String
type PropName = String
type HTML = String
type Plain = String
-- Internal
-- chain :: JSString -> x1 -> .. -> xn -> JSAny -> IO a
-- chain method = function(xs,q){ return q.method(xs); }
chainFFI :: Int -> String -> String
chainFFI 0 method = "(function(q){ return q." ++ method ++ "(); })"
chainFFI n method = "(function(" ++ xss ++ "q){ return q." ++ method ++ "(" ++ init xss ++ "); })" where
xss = foldr (\x y -> x ++ "," ++ y) "" $ fmap (\i -> 'x' : show i) xs
xs = [1..n]
class ChainMethod r where
chain :: String -> r
instance FromAny a => ChainMethod (JSAny -> IO a) where
chain = ffi . toJSString . chainFFI 0
instance (FromAny a, ToAny x1) => ChainMethod (x1 -> JSAny -> IO a) where
chain = ffi . toJSString . chainFFI 1
instance (FromAny a, ToAny x1, ToAny x2) => ChainMethod (x1 -> x2 -> JSAny -> IO a) where
chain = ffi . toJSString . chainFFI 2
instance (FromAny a, ToAny x1, ToAny x2, ToAny x3) => ChainMethod (x1 -> x2 -> x3 -> JSAny -> IO a) where
chain = ffi . toJSString . chainFFI 3
instance (FromAny a, ToAny x1, ToAny x2, ToAny x3, ToAny x4) => ChainMethod (x1 -> x2 -> x3 -> x4 -> JSAny -> IO a) where
chain = ffi . toJSString . chainFFI 4
-- Core
jQuery :: Selector -> IO JQuery
jQuery s = JQuery <$> f s where
f :: Selector -> IO JSAny
f = ffi "(function(sel){ return $(sel); })"
j :: Selector -> IO JQuery
j = jQuery
-- Attributes
addClass :: ClassName -> JQuery -> IO JQuery
addClass cls (JQuery q) = JQuery <$> chain "addClass" cls q
getAttr :: AttrName -> JQuery -> IO String
getAttr atn (JQuery q) = chain "attr" atn q
setAttr :: AttrName -> String -> JQuery -> IO JQuery
setAttr atn v (JQuery q) = JQuery <$> chain "attr" atn v q
hasClass :: ClassName -> JQuery -> IO Bool
hasClass cls (JQuery q) = chain "hasClass" cls q
getHtml :: JQuery -> IO String
getHtml (JQuery q) = chain "html" () q
setHtml :: String -> JQuery -> IO JQuery
setHtml html (JQuery q) = JQuery <$> chain "html" html q
getProp :: PropName -> JQuery -> IO String
getProp prop (JQuery q) = chain "prop" prop q
setProp :: PropName -> String -> JQuery -> IO JQuery
setProp prop v (JQuery q) = JQuery <$> chain "prop" prop v q
removeAttr :: AttrName -> JQuery -> IO JQuery
removeAttr atn (JQuery q) = JQuery <$> chain "removeAttr" atn q
removeClass :: ClassName -> JQuery -> IO JQuery
removeClass cls (JQuery q) = JQuery <$> chain "removeClass" cls q
removeProp :: PropName -> JQuery -> IO JQuery
removeProp prop (JQuery q) = JQuery <$> chain "removeProp" prop q
toggleClass :: ClassName -> JQuery -> IO JQuery
toggleClass cls (JQuery q) = JQuery <$> chain "toggleClass" cls q
getValue :: JQuery -> IO String
getValue (JQuery q) = chain "val" () q
setValue :: String -> JQuery -> IO JQuery
setValue val (JQuery q) = JQuery <$> chain "val" val q
-- Traversing
add :: Selector -> JQuery -> IO JQuery
add sel (JQuery q) = JQuery <$> chain "add" sel q
addAt :: Selector -> JQuery -> JQuery -> IO JQuery
addAt sel (JQuery qat) (JQuery q) = JQuery <$> chain "add" sel qat q
addBack :: Selector -> JQuery -> IO JQuery
addBack sel (JQuery q) = JQuery <$> chain "addBack" sel q
children :: JQuery -> IO JQuery
children (JQuery q) = JQuery <$> chain "children" q
childrenAt :: Selector -> JQuery -> IO JQuery
childrenAt sel (JQuery q) = JQuery <$> chain "children" sel q
closest :: Selector -> JQuery -> IO JQuery
closest sel (JQuery q) = JQuery <$> chain "closest" sel q
contents :: JQuery -> IO JQuery
contents (JQuery q) = JQuery <$> chain "contents" q
-- ?
each :: (JQuery -> IO JQuery) -> JQuery -> IO JQuery
each f (JQuery q) = JQuery <$> chain "each" (\any -> fromJQuery <$> f (JQuery any)) q
end :: JQuery -> IO JQuery
end (JQuery q) = JQuery <$> chain "end" q
eq :: Int -> JQuery -> IO JQuery
eq i (JQuery q) = JQuery <$> chain "eq" i q
jmap :: ((Int, Elem) -> IO JQuery) -> JQuery -> IO JQuery
jmap f (JQuery q) = JQuery <$> chain "map" (\x -> fromJQuery <$> f x) q
sfilter :: Selector -> JQuery -> IO JQuery
sfilter sel (JQuery q) = JQuery <$> chain "filter" sel q
sfind :: Selector -> JQuery -> IO JQuery
sfind sel (JQuery q) = JQuery <$> chain "find" sel q
jfirst :: JQuery -> IO JQuery
jfirst (JQuery q) = JQuery <$> chain "first" q
jlast :: JQuery -> IO JQuery
jlast (JQuery q) = JQuery <$> chain "last" q
next :: JQuery -> IO JQuery
next (JQuery q) = JQuery <$> chain "next" q
nextAt :: Selector -> JQuery -> IO JQuery
nextAt sel (JQuery q) = JQuery <$> chain "next" sel q
nextAll :: JQuery -> IO JQuery
nextAll (JQuery q) = JQuery <$> chain "nextAll" q
nextAllAt :: Selector -> JQuery -> IO JQuery
nextAllAt sel (JQuery q) = JQuery <$> chain "nextAll" sel q
nextUntil :: JQuery -> IO JQuery
nextUntil (JQuery q) = JQuery <$> chain "nextUntil" q
nextUntilAt :: Selector -> JQuery -> IO JQuery
nextUntilAt sel (JQuery q) = JQuery <$> chain "nextUntil" sel q
prev :: JQuery -> IO JQuery
prev (JQuery q) = JQuery <$> chain "prev" q
prevAt :: Selector -> JQuery -> IO JQuery
prevAt sel (JQuery q) = JQuery <$> chain "prev" sel q
prevAll :: JQuery -> IO JQuery
prevAll (JQuery q) = JQuery <$> chain "prevAll" q
prevAllAt :: Selector -> JQuery -> IO JQuery
prevAllAt sel (JQuery q) = JQuery <$> chain "prevAll" sel q
prevUntil :: JQuery -> IO JQuery
prevUntil (JQuery q) = JQuery <$> chain "prevUntil" q
prevUntilAt :: Selector -> JQuery -> IO JQuery
prevUntilAt sel (JQuery q) = JQuery <$> chain "prevUntil" sel q
has :: Selector -> JQuery -> IO JQuery
has sel (JQuery q) = JQuery <$> chain "has" sel q
is :: Selector -> JQuery -> IO JQuery
is sel (JQuery q) = JQuery <$> chain "is" sel q
snot :: Selector -> JQuery -> IO JQuery
snot sel (JQuery q) = JQuery <$> chain "not" sel q
offsetParent :: JQuery -> IO JQuery
offsetParent (JQuery q) = JQuery <$> chain "offsetParent" q
parent :: JQuery -> IO JQuery
parent (JQuery q) = JQuery <$> chain "parent" q
parentAt :: Selector -> JQuery -> IO JQuery
parentAt sel (JQuery q) = JQuery <$> chain "parent" sel q
parentsUntil :: JQuery -> IO JQuery
parentsUntil (JQuery q) = JQuery <$> chain "parentsUntil" q
parentsUntilAt :: Selector -> JQuery -> IO JQuery
parentsUntilAt sel (JQuery q) = JQuery <$> chain "parentsUntil" sel q
siblings :: JQuery -> IO JQuery
siblings (JQuery q) = JQuery <$> chain "siblings" q
siblingsAt :: Selector -> JQuery -> IO JQuery
siblingsAt sel (JQuery q) = JQuery <$> chain "siblings" sel q
sliceFrom :: Int -> JQuery -> IO JQuery
sliceFrom s (JQuery q) = JQuery <$> chain "slice" s q
slice :: Int -> Int -> JQuery -> IO JQuery
slice s t (JQuery q) = JQuery <$> chain "slice" (s,t) q
-- Manipulation
before :: HTML -> JQuery -> IO JQuery
before html (JQuery q) = JQuery <$> chain "after" html q
after :: HTML -> JQuery -> IO JQuery
after html (JQuery q) = JQuery <$> chain "after" html q
insertAfter :: Selector -> JQuery -> IO JQuery
insertAfter sel (JQuery q) = JQuery <$> chain "insertAfter" sel q
insertBefore :: Selector -> JQuery -> IO JQuery
insertBefore sel (JQuery q) = JQuery <$> chain "insertBefore" sel q
append :: HTML -> JQuery -> IO JQuery
append html (JQuery q) = JQuery <$> chain "after" html q
appendTo :: Selector -> JQuery -> IO JQuery
appendTo sel (JQuery q) = JQuery <$> chain "after" sel q
clone :: JQuery -> IO JQuery
clone (JQuery q) = JQuery <$> chain "clone" q
getCss :: PropName -> JQuery -> IO JQuery
getCss prop (JQuery q) = JQuery <$> chain "css" prop q
setCss :: PropName -> String -> JQuery -> IO JQuery
setCss prop v (JQuery q) = JQuery <$> chain "css" prop v q
detach :: Selector -> JQuery -> IO JQuery
detach sel (JQuery q) = JQuery <$> chain "css" sel q
empty :: JQuery -> IO JQuery
empty (JQuery q) = JQuery <$> chain "empty" q
getHeight :: JQuery -> IO Int
getHeight (JQuery q) = chain "height" q
setHeight :: Int -> JQuery -> IO JQuery
setHeight h (JQuery q) = JQuery <$> chain "height" h q
getInnerHeight :: JQuery -> IO Int
getInnerHeight (JQuery q) = chain "innerHeight" q
setInnerHeight :: Int -> JQuery -> IO JQuery
setInnerHeight h (JQuery q) = JQuery <$> chain "innerHeight" h q
getInnerWidth :: JQuery -> IO Int
getInnerWidth (JQuery q) = chain "innerWidth" q
setInnerWidth :: Int -> JQuery -> IO JQuery
setInnerWidth h (JQuery q) = JQuery <$> chain "innerWidth" h q
cssNumber :: IO JQuery
cssNumber = JQuery <$> (ffi "(function(){ return jQuery.cssNumber; })" $ ())
getOffset :: JQuery -> IO JQuery
getOffset (JQuery q) = JQuery <$> chain "offset" q
setOffset :: Plain -> JQuery -> IO JQuery
setOffset p (JQuery q) = JQuery <$> chain "offset" p q
getOuterHeight :: JQuery -> IO Int
getOuterHeight (JQuery q) = chain "outerHeight" q
setOuterHeight :: Int -> JQuery -> IO JQuery
setOuterHeight h (JQuery q) = JQuery <$> chain "outerHeight" h q
getOuterWidth :: JQuery -> IO Int
getOuterWidth (JQuery q) = chain "outerWidth" q
setOuterWidth :: Int -> JQuery -> IO JQuery
setOuterWidth h (JQuery q) = JQuery <$> chain "outerWidth" h q
position :: JQuery -> IO JSAny
position (JQuery q) = chain "position" q
prepend :: HTML -> JQuery -> IO JQuery
prepend html (JQuery q) = JQuery <$> chain "prepend" html q
prependTo :: Selector -> JQuery -> IO JQuery
prependTo sel (JQuery q) = JQuery <$> chain "prependTo" sel q
remove :: JQuery -> IO JQuery
remove (JQuery q) = JQuery <$> chain "remove" q
removeAt :: Selector -> JQuery -> IO JQuery
removeAt sel (JQuery q) = JQuery <$> chain "remove" sel q
replaceAll :: Selector -> JQuery -> IO JQuery
replaceAll sel (JQuery q) = JQuery <$> chain "replaceAll" sel q
replaceWith :: HTML -> JQuery -> IO JQuery
replaceWith html (JQuery q) = JQuery <$> chain "replaceWith" html q
getScrollLeft :: JQuery -> IO Int
getScrollLeft (JQuery q) = chain "scrollLeft" q
setScrollLeft :: Int -> JQuery -> IO JQuery
setScrollLeft v (JQuery q) = JQuery <$> chain "scrollLeft" v q
getScrollTop :: JQuery -> IO Int
getScrollTop (JQuery q) = chain "scrollTop" q
setScrollTop :: Int -> JQuery -> IO JQuery
setScrollTop v (JQuery q) = JQuery <$> chain "scrollTop" v q
getText :: JQuery -> IO String
getText (JQuery q) = chain "text" q
setText :: String -> JQuery -> IO JQuery
setText v (JQuery q) = JQuery <$> chain "text" v q
getWidth :: JQuery -> IO Int
getWidth (JQuery q) = chain "width" q
setWidth :: Int -> JQuery -> IO JQuery
setWidth v (JQuery q) = JQuery <$> chain "width" v q
unwrap :: JQuery -> IO JQuery
unwrap (JQuery q) = JQuery <$> chain "unwrap" q
wrap :: Selector -> JQuery -> IO JQuery
wrap sel (JQuery q) = JQuery <$> chain "wrap" sel q
wrapAll :: Selector -> JQuery -> IO JQuery
wrapAll sel (JQuery q) = JQuery <$> chain "wrapAll" sel q
wrapInner :: HTML -> JQuery -> IO JQuery
wrapInner html (JQuery q) = JQuery <$> chain "wrapInner" html q
-- CSS
cssHooks :: IO JQuery
cssHooks = JQuery <$> (ffi "(function(){ return jQuery.cssHooks; })" $ ())
-- Event
-- Effects
data Easing = Linear | Swing | Jswing |
EaseInQuad | EaseOutQuad | EaseInOutQuad |
EaseInCubic | EaseOutCubic | EaseInOutCubic |
EaseInQuart | EaseOutQuart | EaseInOutQuart |
EaseInQuint | EaseOutQuint | EaseInOutQuint |
EaseInSine | EaseOutSine | EaseInOutSine |
EaseInExpo | EaseOutExpo | EaseInOutExpo |
EaseInCirc | EaseOutCirc | EaseInOutCirc |
EaseInElastic | EaseOutElastic | EaseInOutElastic |
EaseInBack | EaseOutBack | EaseInOutBack |
EaseInBounce | EaseOutBounce | EaseInOutBounce
deriving (Eq, Show)
showEasing :: Easing -> String
showEasing e = (\(x:xs) -> toLower x : xs) $ show e
instance ToAny Easing where
toAny = toAny . showEasing
listToAny = listToAny . fmap showEasing
data AnimateOption = AnimateOption {
duration :: Int,
easing :: Easing,
bqueue :: Bool,
specialEasing :: Plain
-- how to convert a normal function to JSString ?
-- complete, step, progress
-- start, done, fail, always
}
defAnimateOption :: AnimateOption
defAnimateOption = AnimateOption 400 Swing True ""
-- ?
-- animation *With functions work right ?
fromAnimateOption :: AnimateOption -> JSAny
fromAnimateOption anim =
toObject [("duration", toAny $ duration anim),
("easing", toAny $ easing anim),
("queue", toAny $ bqueue anim),
("specialEasing", toAny $ specialEasing anim)]
animate :: Plain -> JQuery -> IO JQuery
animate p (JQuery q) = JQuery <$> chain "animate" p q
animateOption :: Plain -> Int -> Easing -> IO () -> JQuery -> IO JQuery
animateOption p d e func (JQuery q) = JQuery <$> chain "animate" p d (showEasing e) func q
animateWith :: Plain -> AnimateOption -> JQuery -> IO JQuery
animateWith p opt (JQuery q) = JQuery <$> chain "animate" p (fromAnimateOption opt) q
type family FuncType (as :: [*]) where
FuncType '[] = JQuery -> IO JQuery
FuncType (a ': as) = a -> FuncType as
data Name (s :: Symbol) = Name
class AnimateOptionFunc (s :: Symbol) where
type Arg s :: [*]
option :: (KnownSymbol s) => Name s -> FuncType (Arg s)
optionSimple :: (KnownSymbol s) => Name s -> JQuery -> IO JQuery
optionSimple n (JQuery q) = JQuery <$> chain (symbolVal n) q
optionWith :: (KnownSymbol s) => Name s -> AnimateOption -> JQuery -> IO JQuery
optionWith n anim (JQuery q) = JQuery <$> chain (symbolVal n) q
instance AnimateOptionFunc "fadeIn" where
type Arg "fadeIn" = [Int, IO ()]
option n x1 x2 (JQuery q) = JQuery <$> chain (symbolVal n) x1 x2 q
fadeIn :: JQuery -> IO JQuery
fadeIn = optionSimple (Name :: Name "fadeIn")
fadeInOption :: Int -> IO () -> JQuery -> IO JQuery
fadeInOption = option (Name :: Name "fadeIn")
fadeInWith :: AnimateOption -> JQuery -> IO JQuery
fadeInWith = optionWith (Name :: Name "fadeIn")
instance AnimateOptionFunc "fadeOut" where
type Arg "fadeOut" = [Int, IO ()]
option n x1 x2 (JQuery q) = JQuery <$> chain (symbolVal n) x1 x2 q
fadeOut :: JQuery -> IO JQuery
fadeOut = optionSimple (Name :: Name "fadeOut")
fadeOutOption :: Int -> IO () -> JQuery -> IO JQuery
fadeOutOption = option (Name :: Name "fadeOut")
fadeOutWith :: AnimateOption -> JQuery -> IO JQuery
fadeOutWith = optionWith (Name :: Name "fadeOut")
fadeTo :: Int -> Int -> JQuery -> IO JQuery
fadeTo d o (JQuery q) = JQuery <$> chain "fadeTo" d o q
instance AnimateOptionFunc "fadeToggle" where
type Arg "fadeToggle" = [Int, Easing, IO ()]
option n d e func (JQuery q) = JQuery <$> chain (symbolVal n) d (showEasing e) func q
fadeToggle :: JQuery -> IO JQuery
fadeToggle = optionSimple (Name :: Name "fadeToggle")
fadeToggleOption :: Int -> Easing -> IO () -> JQuery -> IO JQuery
fadeToggleOption = option (Name :: Name "fadeToggle")
fadeToggleWith :: AnimateOption -> JQuery -> IO JQuery
fadeToggleWith = optionWith (Name :: Name "fadeToggle")
instance AnimateOptionFunc "slideDown" where
type Arg "slideDown" = [Int, IO ()]
option n d func (JQuery q) = JQuery <$> chain (symbolVal n) d func q
slideDown :: JQuery -> IO JQuery
slideDown = optionSimple (Name :: Name "slideDown")
slideDownOption :: Int -> IO () -> JQuery -> IO JQuery
slideDownOption = option (Name :: Name "slideDown")
slideDownWith :: AnimateOption -> JQuery -> IO JQuery
slideDownWith = optionWith (Name :: Name "slideDown")
instance AnimateOptionFunc "slideToggle" where
type Arg "slideToggle" = [Int, IO ()]
option n d func (JQuery q) = JQuery <$> chain (symbolVal n) d func q
slideToggle :: JQuery -> IO JQuery
slideToggle = optionSimple (Name :: Name "slideToggle")
slideToggleOption :: Int -> IO () -> JQuery -> IO JQuery
slideToggleOption = option (Name :: Name "slideToggle")
slideToggleWith :: AnimateOption -> JQuery -> IO JQuery
slideToggleWith = optionWith (Name :: Name "slideToggle")
instance AnimateOptionFunc "slideUp" where
type Arg "slideUp" = [Int, IO ()]
option n d func (JQuery q) = JQuery <$> chain (symbolVal n) d func q
slideUp :: JQuery -> IO JQuery
slideUp = optionSimple (Name :: Name "slideUp")
slideUpOption :: Int -> IO () -> JQuery -> IO JQuery
slideUpOption = option (Name :: Name "slideUp")
slideUpWith :: AnimateOption -> JQuery -> IO JQuery
slideUpWith = optionWith (Name :: Name "slideUp")
instance AnimateOptionFunc "jshow" where
type Arg "jshow" = [Int, IO ()]
option n d func (JQuery q) = JQuery <$> chain (symbolVal n) d func q
jshow :: JQuery -> IO JQuery
jshow = optionSimple (Name :: Name "jshow")
jshowOption :: Int -> IO () -> JQuery -> IO JQuery
jshowOption = option (Name :: Name "jshow")
jshowWith :: AnimateOption -> JQuery -> IO JQuery
jshowWith = optionWith (Name :: Name "jshow")
instance AnimateOptionFunc "jhide" where
type Arg "jhide" = [Int, IO ()]
option n d func (JQuery q) = JQuery <$> chain (symbolVal n) d func q
jhide :: JQuery -> IO JQuery
jhide = optionSimple (Name :: Name "jhide")
jhideOption :: Int -> IO () -> JQuery -> IO JQuery
jhideOption = option (Name :: Name "jhide")
jhideWith :: AnimateOption -> JQuery -> IO JQuery
jhideWith = optionWith (Name :: Name "jhide")
instance AnimateOptionFunc "toggle" where
type Arg "toggle" = [Int, IO ()]
option n d func (JQuery q) = JQuery <$> chain (symbolVal n) d func q
toggle :: JQuery -> IO JQuery
toggle = optionSimple (Name :: Name "toggle")
toggleOption :: Int -> IO () -> JQuery -> IO JQuery
toggleOption = option (Name :: Name "toggle")
toggleWith :: AnimateOption -> JQuery -> IO JQuery
toggleWith = optionWith (Name :: Name "toggle")
clearQueue :: JQuery -> IO JQuery
clearQueue (JQuery q) = JQuery <$> chain "clearQueue" q
delay :: Int -> JQuery -> IO JQuery
delay n (JQuery q) = JQuery <$> chain "delay" n q
queue :: JQuery -> IO JQuery
queue (JQuery q) = JQuery <$> chain "queue" q
dequeue :: JQuery -> IO JQuery
dequeue (JQuery q) = JQuery <$> chain "dequeue" q
finish :: JQuery -> IO JQuery
finish (JQuery q) = JQuery <$> chain "finish" q
fxInterval :: IO Int
fxInterval = ffi "(function(){ return jQuery.fx.interval; })" $ ()
fxOff :: IO Bool
fxOff = ffi "(function(){ return jQuery.fx.off; })" $ ()
stop :: JQuery -> IO JQuery
stop (JQuery q) = JQuery <$> chain "stop" q
-- Ajax
-- ?
ajaxComplete :: IO () -> JQuery -> IO JQuery
ajaxComplete func (JQuery q) = JQuery <$> chain "ajaxComplete" func q
ajaxError :: IO () -> JQuery -> IO JQuery
ajaxError func (JQuery q) = JQuery <$> chain "ajaxError" func q
ajaxSend :: IO () -> JQuery -> IO JQuery
ajaxSend func (JQuery q) = JQuery <$> chain "ajaxSend" func q
ajaxStart :: IO () -> JQuery -> IO JQuery
ajaxStart func (JQuery q) = JQuery <$> chain "ajaxStart" func q
ajaxStop :: IO () -> JQuery -> IO JQuery
ajaxStop func (JQuery q) = JQuery <$> chain "ajaxStop" func q
ajaxSucess :: IO () -> JQuery -> IO JQuery
ajaxSucess func (JQuery q) = JQuery <$> chain "ajaxSucess" func q
param :: Plain -> IO String
param p = ffi "(function(p){ return jQuery.param(p); })" $ p
serialize :: JQuery -> IO String
serialize (JQuery q) = chain "serialize" q
serializeArray :: JQuery -> IO String
serializeArray (JQuery q) = chain "serializeArray" q
-- ajax option ?
-- type jqXHR
ajax :: URL -> Plain -> IO JSAny
ajax = ffi "(function(url,p){ return jQuery.ajax(url,p); })"
-- jquery.ajaxPrefilter
-- jquery.ajaxTransport
ajaxSetup :: Plain -> IO JSAny
ajaxSetup = ffi "(function(p){ return jQuery.ajaxSetup(p); })"
ajaxGet :: URL -> IO JSAny
ajaxGet = ffi "(function(url){ return jQuery.get(url); })"
ajaxGetJSON :: URL -> IO JSAny
ajaxGetJSON = ffi "(function(url){ return jQuery.getJSON(url); })"
ajaxGetScript :: URL -> IO JSAny
ajaxGetScript = ffi "(function(url){ return jQuery.getScript(url); })"
ajaxPost :: URL -> IO JSAny
ajaxPost = ffi "(function(url){ return jQuery.post(url); })"
load :: URL -> JQuery -> IO JQuery
load url (JQuery q) = JQuery <$> chain "load" url q
-- Utilities
contains :: Elem -> Elem -> IO Bool
contains = ffi "(function(e1,e2){ return jQuery.contains(e1,e2); })"
jdata :: Elem -> String -> JSAny -> IO JSAny
jdata = ffi "(function(e,k,v){ return jQuery.data(e,k,v); })"
extend :: JSAny -> IO JSAny
extend = ffi "(function(t){ return jQuery.extend(t); })"
fnextend :: JSAny -> IO JSAny
fnextend = ffi "(function(t){ return jQuery.fn.extend(t); })"
-- globalEval, grep
-- inArray, isArray
isEmptyObject :: JSAny -> IO Bool
isEmptyObject = ffi "(function(t){ return jQuery.isEmptyObject(t); })"
isFunction :: Plain -> IO Bool
isFunction = ffi "(function(t){ return jQuery.isFunction(t); })"
isNumeric :: Plain -> IO Bool
isNumeric = ffi "(function(t){ return jQuery.isNumeric(t); })"
isPlainObject :: Plain -> IO Bool
isPlainObject = ffi "(function(t){ return jQuery.isPlainObject(t); })"
isWindow :: Plain -> IO Bool
isWindow = ffi "(function(t){ return jQuery.isWindow(t); })"
isXMLDoc :: Elem -> IO Bool
isXMLDoc = ffi "(function(t){ return jQuery.isXMLDoc(t); })"
-- makeArray, merge
noop :: IO ()
noop = ffi "(function(){ return jQuery.noop(); })" $ ()
now :: IO Int
now = ffi "(function(){ return jQuery.now(); })" $ ()
-- parseHTML, parseJSON, parseXML, proxy
removeData :: Elem -> IO JQuery
removeData e = JQuery <$> (ffi "(function(e){ return jQuery.removeData(e); })" e)
trim :: String -> IO String
trim = ffi "(function(e){ return jQuery.trim(e); })"
jtype :: JSAny -> IO String
jtype = ffi "(function(a){ return jQuery.type(a); })"
-- unique
| myuon/haste-jQuery | Haste/JQuery.hs | mit | 21,524 | 0 | 12 | 4,226 | 8,101 | 4,071 | 4,030 | 441 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Elm.Encoder
( toElmEncoderRef
, toElmEncoderRefWith
, toElmEncoderSource
, toElmEncoderSourceWith
) where
import Control.Monad.Reader
import Data.Text
import Elm.Common
import Elm.Type
import Formatting
class HasEncoder a where
render :: a -> Reader Options Text
class HasEncoderRef a where
renderRef :: a -> Reader Options Text
instance HasEncoder ElmDatatype where
render d@(ElmDatatype name constructor) = do
fnName <- renderRef d
sformat
(stext % " : " % stext % " -> Json.Encode.Value" % cr % stext % " x =" % stext)
fnName
name
fnName <$>
render constructor
render (ElmPrimitive primitive) = renderRef primitive
instance HasEncoderRef ElmDatatype where
renderRef (ElmDatatype name _) =
pure $ sformat ("encode" % stext) name
renderRef (ElmPrimitive primitive) =
renderRef primitive
instance HasEncoder ElmConstructor where
render (RecordConstructor _ value) =
sformat (cr % " Json.Encode.object" % cr % " [ " % stext % cr % " ]") <$> render value
instance HasEncoder ElmValue where
render (ElmField name value) = do
fieldModifier <- asks fieldLabelModifier
valueBody <- render value
pure $
sformat
("( \"" % stext % "\", " % stext % " x." % stext % " )")
(fieldModifier name)
valueBody
name
render (ElmPrimitiveRef primitive) = renderRef primitive
render (ElmRef name) = pure $ sformat ("encode" % stext) name
render (Values x y) = sformat (stext % cr % " , " % stext) <$> render x <*> render y
instance HasEncoderRef ElmPrimitive where
renderRef EDate = pure "(Json.Encode.string << toISOString)"
renderRef EUnit = pure "Json.Encode.null"
renderRef EInt = pure "Json.Encode.int"
renderRef EChar = pure "Json.Encode.char"
renderRef EBool = pure "Json.Encode.bool"
renderRef EFloat = pure "Json.Encode.float"
renderRef EString = pure "Json.Encode.string"
renderRef (EList (ElmPrimitive EChar)) = pure "Json.Encode.string"
renderRef (EList datatype) = sformat ("(Json.Encode.list << List.map " % stext % ")") <$> renderRef datatype
renderRef (EMaybe datatype) =
sformat ("(Maybe.withDefault Json.Encode.null << Maybe.map " % stext % ")") <$>
renderRef datatype
renderRef (ETuple2 x y) =
sformat ("(tuple2 " % stext % " " % stext % ")") <$> renderRef x <*>
renderRef y
renderRef (EDict k datatype) =
sformat ("(dict " % stext % " " % stext % ")") <$> renderRef k <*> renderRef datatype
toElmEncoderRefWith :: ElmType a => Options -> a -> Text
toElmEncoderRefWith options x = runReader (renderRef (toElmType x)) options
toElmEncoderRef :: ElmType a => a -> Text
toElmEncoderRef = toElmEncoderRefWith defaultOptions
toElmEncoderSourceWith :: ElmType a => Options -> a -> Text
toElmEncoderSourceWith options x = runReader (render (toElmType x)) options
toElmEncoderSource :: ElmType a => a -> Text
toElmEncoderSource = toElmEncoderSourceWith defaultOptions
| InfernalKnight/elm-export | src/Elm/Encoder.hs | epl-1.0 | 3,222 | 0 | 17 | 840 | 925 | 453 | 472 | 72 | 1 |
{- |
Description : Hets shell (command interpreter)
Copyright : (c) Otto-von-Guericke University of Magdeburg
License : GPLv2 or higher, see LICENSE.txt
Hets shell (command interpreter, call it with hets -I) that makes Hets scriptable
-}
module CMDL where
| spechub/Hets | CMDL.hs | gpl-2.0 | 268 | 0 | 2 | 51 | 5 | 4 | 1 | 1 | 0 |
{-# LANGUAGE ScopedTypeVariables, LambdaCase #-}
module Salsa20 where {
import SeqIterate;
import Util;
import Data.Typeable(Typeable,cast);
import Data.Bits(rotate,xor,Bits);
import Data.List hiding (length, replicate,(!!));
-- import Control.Exception(assert);
import qualified UserError as User;
import Data.Word;
import Prelude hiding(length, replicate,(!!));
-- import Debug.Trace;
round_func :: (Bits a, Num a) => Rotation -> a -> a -> a -> a;
round_func (Rotation k) a b c = xor c $ rotate (b + a) k;
newtype Arity = Arity Integer deriving (Show);
unArity :: Arity -> Integer;
unArity (Arity n) = n;
{-
a b c 7 c:=1
b 1 d 9 d:=2
1 2 e 13 e:=3
2 3 f 18 f:=4
3 4 g ?
-}
-- arity=2 for salsa20, the number of elements above the current position it depends on.
-- it always depends on the current position, so actual arity of r is arity+1;
do_column :: forall a b . Arity -> (b -> [a] -> a) -> [b] -> [a] -> [a];
do_column (Arity arity) f shifts l0 = let
{ l :: [a]
; l = cycle l0
; answer :: [a]
-- cycle shifts is for generalization of salsa20 on bigger matrices, yet still keeping the 4 rotation amounts. Just repeat the rotations as necessary: far from clear this is a safe thing to do.
; answer = zipWith f (cycle shifts) $ transpose $ (genericDrop arity l:) $ genericTake arity $ tails $ genericTake arity l ++ answer;
} in answer;
salsa20_arity :: Arity;
salsa20_arity = Arity 2;
quarter_round :: (Num a, Typeable a, Bits a) => [a] -> [a];
quarter_round input = list_rotate (negate $ unArity salsa20_arity) $ take_same_length input $ do_column salsa20_arity r_as_list (map Rotation [7,9,13,18]) input;
shift_columns :: [[a]] -> [[a]];
shift_columns = zipWith list_rotate $ enumFrom $ negate 1;
unshift_columns :: [[a]] -> [[a]];
unshift_columns = zipWith list_rotate $ enumFromThen 1 0;
r_as_list :: forall a . (Typeable a, Bits a, Num a) => Rotation -> [a] -> a;
-- this is how to trace a polymorphic function
r_as_list k (l@[c,a,b]) = no_trace (case cast l of {
Nothing -> "not word";
Just (ww::[W]) -> "(" ++ show k ++ ",[" ++ (unwords $ map whex ww) ++ "])"}) $ round_func k a b c;
r_as_list _ _ = error "wrong arity";
-- map whex $ fourfunc 2 r_as_list (map Rotation [7,9,13,18]) [0x18171615,0x61707865::W,0x100f0e0d,0x7]
-- ["d3c83331","71572c6d","f3e4deb6","4dfdec95"]
-- agrees with salsafamily paper
no_trace :: String -> a -> a;
no_trace = flip const;
take_same_length :: [a] -> [b] -> [b];
take_same_length [] _ = [];
take_same_length (_:r1) (h:r2) = h:take_same_length r1 r2;
take_same_length _ _ = error "take_same_length: second list too short";
example_key :: [W];
example_key = map code4bytes $ mat4 $ enumFromTo 1 32;
start_string :: [W];
start_string = let {
d :: Integer -> [W];
d i = [genericIndex salsa20_diagonal i]
} in d 0 ++ take 4 example_key ++ d 1 ++ [0x01040103,0x06020905,7,0] ++ d 2 ++ drop 4 example_key ++ d 3;
-- we transpose first because we prefer to work with rows rather than columns
one_round :: (Typeable a, Num a, Bits a) => [[a]] -> [[a]];
one_round = unshift_columns . map quarter_round . shift_columns . transpose;
newtype Rounds = Rounds Integer deriving (Show);
core :: (Typeable a, Num a, Bits a, NFData a) => Rounds -> [[a]] -> [[a]];
core (Rounds n) = ((flip genericIndex) n) . seqIterate one_round;
salsa20_test :: Rounds -> [W] -> IO();
salsa20_test num_rounds s = mapM_ putStrLn $ map (unwords . map whex) $ core num_rounds $ mat4 s;
{-
ckkk
kcnn
nnck
kkkc
-}
hsalsa_setup :: [Word8] -> [Word8] -> [W];
hsalsa_setup key nonce = key_iv_setup (u8_to_32_little key) $ u8_to_32_little nonce;
hsalsa_subkey :: [[W]] -> [W];
hsalsa_subkey x = map (genericIndex $ concat x) [0::Integer,5,10,15,6,7,8,9];
hsalsa :: [Word8] -> [Word8] -> [W];
hsalsa key = hsalsa_subkey . core (Rounds 20) . mat4 . hsalsa_setup key;
key_iv_setup :: [W] -> [W] -> [W];
key_iv_setup key iv =
let {
(left :: [W], right :: [W]) = splitAt 4 $ key;
d :: Integer -> [W];
d i = [genericIndex salsa20_diagonal i];}
in User.assert "key_iv_setup key length" ((256::Integer) == 32* genericLength key)
$ User.assert "key_iv_setup right" ((4::Integer) == genericLength right)
$ User.assert "key_iv_setup left" ((4::Integer) == genericLength left)
$ User.assert "key_iv_setup iv" ((128::Integer) == 32* genericLength iv)
$ d 0
++ left
++ d 1
++ iv
++ d 2
++ right
++ d 3;
with_add :: (Typeable a, Num a, Bits a, NFData a) => Rounds -> [a] -> [a];
with_add rounds = and_add $ concat . core rounds . mat4;
encode_counter :: [W] -> [W] -> Integer -> [W];
encode_counter key iv counter = User.assert "encode_counter >=0" (counter>=0)
$ User.assert "encode_counter <64" (counter < 2^(64::Integer))
$ key_iv_setup key $ iv ++ let { (q,r) = divMod counter $ 2^(32::Integer) } in map fromIntegral [r,q];
salsa20w :: Rounds -> [W] -> [W] -> [[W]];
salsa20w rounds key iv = map (with_add rounds . encode_counter key iv) [0..];
xsalsa_w :: Rounds -> [Word8] -> [Word8] -> [[W]];
xsalsa_w rounds key iv = let
{ (iv1,iv2) = splitAt 16 iv; }
in salsa20w rounds (hsalsa key iv1) $ u8_to_32_little iv2;
xsalsa :: [Word8] -> [Word8] -> [Word8];
xsalsa key iv = concatMap block_bytes $ xsalsa_w (Rounds 20) key iv;
}
| kenta2/yescrypt | Salsa20.hs | gpl-3.0 | 5,153 | 0 | 21 | 907 | 2,099 | 1,160 | 939 | 92 | 2 |
import NormalEverything
import ConcreteNormal.PreRenderable
import Blender
import Triangulation
import SimplicialPartialQuotient
import DisjointUnion
tr = mkTriangulation 1 [ (0 ./ tABC, 0 ./ oDAB) ]
spqwc = oneTetWithDefaultCoords tr show
ntA' = standardCoordinates (0 ./ ntA)
ntB' = standardCoordinates (0 ./ ntB)
ntC' = standardCoordinates (0 ./ ntC)
ntD' = standardCoordinates (0 ./ ntD)
q_ab' = standardCoordinates (0 ./ Q_ad)
ns = 2 *^ sumV [ ntA', ntB', ntC', ntD'
]
main = testBlender
(defaultScene
(fromSpqwc spqwc
`disjointUnion`
fromIntegerNormalSurface spqwc ns))
| DanielSchuessler/hstri | test_concreteNormal.hs | gpl-3.0 | 686 | 0 | 10 | 190 | 188 | 103 | 85 | 19 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Analytics.Management.ClientId.HashClientId
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Hashes the given Client ID.
--
-- /See:/ <https://developers.google.com/analytics/ Google Analytics API Reference> for @analytics.management.clientId.hashClientId@.
module Network.Google.Resource.Analytics.Management.ClientId.HashClientId
(
-- * REST Resource
ManagementClientIdHashClientIdResource
-- * Creating a Request
, managementClientIdHashClientId
, ManagementClientIdHashClientId
-- * Request Lenses
, mcihciPayload
) where
import Network.Google.Analytics.Types
import Network.Google.Prelude
-- | A resource alias for @analytics.management.clientId.hashClientId@ method which the
-- 'ManagementClientIdHashClientId' request conforms to.
type ManagementClientIdHashClientIdResource =
"analytics" :>
"v3" :>
"management" :>
"clientId:hashClientId" :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] HashClientIdRequest :>
Post '[JSON] HashClientIdResponse
-- | Hashes the given Client ID.
--
-- /See:/ 'managementClientIdHashClientId' smart constructor.
newtype ManagementClientIdHashClientId =
ManagementClientIdHashClientId'
{ _mcihciPayload :: HashClientIdRequest
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ManagementClientIdHashClientId' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'mcihciPayload'
managementClientIdHashClientId
:: HashClientIdRequest -- ^ 'mcihciPayload'
-> ManagementClientIdHashClientId
managementClientIdHashClientId pMcihciPayload_ =
ManagementClientIdHashClientId' {_mcihciPayload = pMcihciPayload_}
-- | Multipart request metadata.
mcihciPayload :: Lens' ManagementClientIdHashClientId HashClientIdRequest
mcihciPayload
= lens _mcihciPayload
(\ s a -> s{_mcihciPayload = a})
instance GoogleRequest ManagementClientIdHashClientId
where
type Rs ManagementClientIdHashClientId =
HashClientIdResponse
type Scopes ManagementClientIdHashClientId =
'["https://www.googleapis.com/auth/analytics.edit",
"https://www.googleapis.com/auth/analytics.readonly"]
requestClient ManagementClientIdHashClientId'{..}
= go (Just AltJSON) _mcihciPayload analyticsService
where go
= buildClient
(Proxy ::
Proxy ManagementClientIdHashClientIdResource)
mempty
| brendanhay/gogol | gogol-analytics/gen/Network/Google/Resource/Analytics/Management/ClientId/HashClientId.hs | mpl-2.0 | 3,312 | 0 | 13 | 689 | 312 | 192 | 120 | 54 | 1 |
module Main where
import Text.JSON (fromFile)
import System.Environment (getArgs)
import System.IO (hPutStr, stderr)
main :: IO ()
main = do
args <- getArgs
if null args
then hPutStr stderr "json: expecting path to JSON file\n"
else parse (head args)
parse :: String -> IO ()
parse path = do
result <- fromFile path
case result of
Left err -> print err
Right json -> print json
| mharrys/json | cli/Main.hs | lgpl-2.1 | 425 | 0 | 10 | 113 | 150 | 75 | 75 | 16 | 2 |
{-# LANGUAGE OverloadedStrings #-}
module AtomTable
( AtomTable
, empty
, fromList
, add
, merge
, listNames
, lookupByName
, lookupByCode
) where
import qualified Data.ByteString as B
import Data.Int
import Data.List as List
import ETerm (AtomNo (..))
data AtomTable = AT !Int32 [(B.ByteString, AtomNo)] deriving Show
empty :: AtomTable
empty = AT 0 []
fromList :: [B.ByteString] -> AtomTable
fromList lst = merge empty (AT (fromIntegral (length lst)) (zip lst (map AtomNo [0..])))
add :: AtomTable -> B.ByteString -> AtomTable
add at@(AT no xs) name =
case lookupByNameM at name of
Just _ -> at
Nothing -> AT (no+1) ((name, AtomNo no):xs)
merge :: AtomTable -> AtomTable -> AtomTable
merge at1 at2 = foldl' add at1 (listNames at2)
listNames :: AtomTable -> [B.ByteString]
listNames (AT _ xs) = map fst xs
lookupByNameM :: AtomTable -> B.ByteString -> Maybe AtomNo
lookupByNameM (AT _ xs) name = List.lookup name xs
lookupByName :: AtomTable -> B.ByteString -> AtomNo
lookupByName at name =
case lookupByNameM at name of
Just no -> no
Nothing -> error $ "AT.lookupByName: atom not found: " ++ show name
lookupByCodeM :: AtomTable -> AtomNo -> Maybe B.ByteString
lookupByCodeM (AT _ xs) code = List.lookup code (map (\(x,y) -> (y,x)) xs)
lookupByCode :: AtomTable -> AtomNo -> B.ByteString
lookupByCode at no =
case lookupByCodeM at no of
Just bs -> bs
Nothing -> error $ "AT.lookupByCode: atom not found: " ++ show no
| kolmodin/herl | AtomTable.hs | apache-2.0 | 1,526 | 0 | 12 | 342 | 565 | 299 | 266 | 44 | 2 |
--
-- Minio Haskell SDK, (C) 2017 Minio, Inc.
--
-- Licensed under the Apache License, Version 2.0 (the "License");
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an "AS IS" BASIS,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.
--
module Network.Minio.PutObject
(
putObjectInternal
, ObjectData(..)
, selectPartSizes
, copyObjectInternal
, selectCopyRanges
, minPartSize
) where
import qualified Data.Conduit as C
import qualified Data.Conduit.Binary as CB
import qualified Data.Conduit.Combinators as CC
import qualified Data.Conduit.List as CL
import qualified Data.List as List
import Lib.Prelude
import Network.Minio.Data
import Network.Minio.Errors
import Network.Minio.S3API
import Network.Minio.Utils
-- | max obj size is 5TiB
maxObjectSize :: Int64
maxObjectSize = 5 * 1024 * 1024 * oneMiB
-- | minimum size of parts used in multipart operations.
minPartSize :: Int64
minPartSize = 64 * oneMiB
oneMiB :: Int64
oneMiB = 1024 * 1024
maxMultipartParts :: Int64
maxMultipartParts = 10000
-- | A data-type to represent the source data for an object. A
-- file-path or a producer-conduit may be provided.
--
-- For files, a size may be provided - this is useful in cases when
-- the file size cannot be automatically determined or if only some
-- prefix of the file is desired.
--
-- For streams also, a size may be provided. This is useful to limit
-- the input - if it is not provided, upload will continue until the
-- stream ends or the object reaches `maxObjectsize` size.
data ObjectData m =
ODFile FilePath (Maybe Int64) -- ^ Takes filepath and optional size.
| ODStream (C.Producer m ByteString) (Maybe Int64) -- ^ Pass size in bytes as maybe if known.
-- | Put an object from ObjectData. This high-level API handles
-- objects of all sizes, and even if the object size is unknown.
putObjectInternal :: Bucket -> Object -> ObjectData Minio -> Minio ETag
putObjectInternal b o (ODStream src sizeMay) = sequentialMultipartUpload b o sizeMay src
putObjectInternal b o (ODFile fp sizeMay) = do
hResE <- withNewHandle fp $ \h ->
liftM2 (,) (isHandleSeekable h) (getFileSize h)
(isSeekable, handleSizeMay) <- either (const $ return (False, Nothing)) return
hResE
-- prefer given size to queried size.
let finalSizeMay = listToMaybe $ catMaybes [sizeMay, handleSizeMay]
case finalSizeMay of
-- unable to get size, so assume non-seekable file and max-object size
Nothing -> sequentialMultipartUpload b o (Just maxObjectSize) $
CB.sourceFile fp
-- got file size, so check for single/multipart upload
Just size ->
if | size <= 64 * oneMiB -> either throwM return =<<
withNewHandle fp (\h -> putObjectSingle b o [] h 0 size)
| size > maxObjectSize -> throwM $ MErrVPutSizeExceeded size
| isSeekable -> parallelMultipartUpload b o fp size
| otherwise -> sequentialMultipartUpload b o (Just size) $
CB.sourceFile fp
-- | Select part sizes - the logic is that the minimum part-size will
-- be 64MiB.
selectPartSizes :: Int64 -> [(PartNumber, Int64, Int64)]
selectPartSizes size = uncurry (List.zip3 [1..]) $
List.unzip $ loop 0 size
where
ceil :: Double -> Int64
ceil = ceiling
partSize = max minPartSize (ceil $ fromIntegral size /
fromIntegral maxMultipartParts)
m = fromIntegral partSize
loop st sz
| st > sz = []
| st + m >= sz = [(st, sz - st)]
| otherwise = (st, m) : loop (st + m) sz
parallelMultipartUpload :: Bucket -> Object -> FilePath -> Int64
-> Minio ETag
parallelMultipartUpload b o filePath size = do
-- get a new upload id.
uploadId <- newMultipartUpload b o []
let partSizeInfo = selectPartSizes size
-- perform upload with 10 threads
uploadedPartsE <- limitedMapConcurrently 10
(uploadPart uploadId) partSizeInfo
-- if there were any errors, rethrow exception.
mapM_ throwM $ lefts uploadedPartsE
-- if we get here, all parts were successfully uploaded.
completeMultipartUpload b o uploadId $ rights uploadedPartsE
where
uploadPart uploadId (partNum, offset, sz) =
withNewHandle filePath $ \h -> do
let payload = PayloadH h offset sz
putObjectPart b o uploadId partNum [] payload
-- | Upload multipart object from conduit source sequentially
sequentialMultipartUpload :: Bucket -> Object -> Maybe Int64
-> C.Producer Minio ByteString -> Minio ETag
sequentialMultipartUpload b o sizeMay src = do
-- get a new upload id.
uploadId <- newMultipartUpload b o []
-- upload parts in loop
let partSizes = selectPartSizes $ maybe maxObjectSize identity sizeMay
(pnums, _, sizes) = List.unzip3 partSizes
uploadedParts <- src
C..| chunkBSConduit sizes
C..| CL.map PayloadBS
C..| uploadPart' uploadId pnums
C.$$ CC.sinkList
-- complete multipart upload
completeMultipartUpload b o uploadId uploadedParts
where
uploadPart' _ [] = return ()
uploadPart' uid (pn:pns) = do
payloadMay <- C.await
case payloadMay of
Nothing -> return ()
Just payload -> do pinfo <- lift $ putObjectPart b o uid pn [] payload
C.yield pinfo
uploadPart' uid pns
-- | Copy an object using single or multipart copy strategy.
copyObjectInternal :: Bucket -> Object -> CopyPartSource
-> Minio ETag
copyObjectInternal b' o cps = do
-- validate and extract the src bucket and object
(srcBucket, srcObject) <- maybe
(throwM $ MErrVInvalidSrcObjSpec $ cpSource cps)
return $ cpsToObject cps
-- get source object size with a head request
(ObjectInfo _ _ _ srcSize) <- headObject srcBucket srcObject
-- check that byte offsets are valid if specified in cps
when (isJust (cpSourceRange cps) &&
or [fst range < 0, snd range < fst range,
snd range >= fromIntegral srcSize]) $
throwM $ MErrVInvalidSrcObjByteRange range
-- 1. If sz > 64MiB (minPartSize) use multipart copy, OR
-- 2. If startOffset /= 0 use multipart copy
let destSize = (\(a, b) -> b - a + 1 ) $
maybe (0, srcSize - 1) identity $ cpSourceRange cps
startOffset = maybe 0 fst $ cpSourceRange cps
endOffset = maybe (srcSize - 1) snd $ cpSourceRange cps
if destSize > minPartSize || (endOffset - startOffset + 1 /= srcSize)
then multiPartCopyObject b' o cps srcSize
else fst <$> copyObjectSingle b' o cps{cpSourceRange = Nothing} []
where
range = maybe (0, 0) identity $ cpSourceRange cps
-- | Given the input byte range of the source object, compute the
-- splits for a multipart copy object procedure. Minimum part size
-- used is minPartSize.
selectCopyRanges :: (Int64, Int64) -> [(PartNumber, (Int64, Int64))]
selectCopyRanges (st, end) = zip pns $
map (\(x, y) -> (st + x, st + x + y - 1)) $ zip startOffsets partSizes
where
size = end - st + 1
(pns, startOffsets, partSizes) = List.unzip3 $ selectPartSizes size
-- | Perform a multipart copy object action. Since we cannot verify
-- existing parts based on the source object, there is no resuming
-- copy action support.
multiPartCopyObject :: Bucket -> Object -> CopyPartSource -> Int64
-> Minio ETag
multiPartCopyObject b o cps srcSize = do
uid <- newMultipartUpload b o []
let byteRange = maybe (0, fromIntegral $ srcSize - 1) identity $
cpSourceRange cps
partRanges = selectCopyRanges byteRange
partSources = map (\(x, y) -> (x, cps {cpSourceRange = Just y}))
partRanges
copiedParts <- limitedMapConcurrently 10
(\(pn, cps') -> do
(etag, _) <- copyObjectPart b o cps' uid pn []
return (pn, etag)
)
partSources
completeMultipartUpload b o uid copiedParts
| donatello/minio-hs | src/Network/Minio/PutObject.hs | apache-2.0 | 8,449 | 0 | 17 | 2,163 | 1,961 | 1,023 | 938 | -1 | -1 |
-----------------------------------------------------------------------------
-- Copyright 2012 Microsoft Corporation.
--
-- This is free software; you can redistribute it and/or modify it under the
-- terms of the Apache License, Version 2.0. A copy of the License can be
-- found in the file "license.txt" at the root of this distribution.
-----------------------------------------------------------------------------
{- Re-analyze a recursive binding group just after type checking.
Due to overloading, functions that seem mutually recursive may not
actually be mutually recursive after the types are resolved.
-}
-----------------------------------------------------------------------------
module Core.BindingGroups( regroup ) where
import qualified Data.Set as S
import qualified Data.Map as M
import Lib.Scc( scc ) -- determine strongly connected components
import Common.Failure(failure)
import Common.Name
import Core.Core
type Deps = M.Map TName TNames
type FreeVar = TNames
-- | Re-analyze a recursive binding group and return a list of new
-- definition groups. After type checking it is possible that definitions
-- that seemed recursive are actually not mutually recursive.
regroup :: [Def] -> [DefGroup]
regroup [def]
= let fvdef = fv def in
if (S.member (TName (defName def) (defType def)) fvdef
|| defName def `elem` [name | name <- map getName (S.toList fvdef), not (isQualified name)] -- this is for recursive definitions where the type may still differ since we cannot improve too early for mutual recursive definitions (or we unify too eagerly, see "while")
)
then [DefRec [def]]
else -- trace (" not recursive? " ++ show (defName def, defType def, map showTName (S.toList (fv def)))) $
[DefNonRec def]
regroup defs
= let defNames = map (\def -> TName (defName def) (defType def)) defs
defMap = M.fromList (zip defNames defs)
deps = M.fromList (zip defNames (map (S.intersection (S.fromList defNames) . fv) defs))
-- determine strongly connected components
defOrder = scc [(name,S.toList fvs) | (name,fvs) <- M.toList deps]
-- create a definition group
makeGroup names = case names of
[name] -> if S.member name (find name deps)
then DefRec [find name defMap]
else DefNonRec (find name defMap)
_ -> DefRec [find name defMap | name <- names]
in -- trace (" new order: " ++ show (map (map showTName) defOrder)) $
map makeGroup defOrder
find :: TName -> M.Map TName a -> a
find name nameMap
= case M.lookup name nameMap of
Just x -> x
Nothing -> failure ("Core.BindingGroups.find: could not find: " ++ show name) | lpeterse/koka | src/Core/BindingGroups.hs | apache-2.0 | 2,875 | 0 | 19 | 725 | 556 | 298 | 258 | 32 | 4 |
-- |
-- Module : $Header$
-- Copyright : (c) 2013-2014 Galois, Inc.
-- License : BSD3
-- Maintainer : cryptol@galois.com
-- Stability : provisional
-- Portability : portable
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE CPP #-}
module Cryptol.Symbolic.BitVector where
import Data.Bits
import Control.Monad (replicateM)
import System.Random
import Data.SBV.Bridge.Yices
import Data.SBV.Internals
import Data.SBV.BitVectors.Data
import Cryptol.Utils.Panic
-- BitVector type --------------------------------------------------------------
data BitVector = BV { signedcxt :: Bool, width :: !Int, val :: !Integer }
deriving (Eq, Ord, Show)
-- ^ Invariant: BV w x requires that 0 <= w and 0 <= x < 2^w.
bitMask :: Int -> Integer
bitMask w = bit w - 1
-- | Smart constructor for bitvectors.
bv :: Int -> Integer -> BitVector
bv = sbv False
sbv :: Bool -> Int -> Integer -> BitVector
sbv b w x = BV b w (x .&. bitMask w)
unsigned :: Int -> Integer -> Integer
unsigned w x = x + bit w
signed :: Int -> Integer -> Integer
signed w x
| w > 0 && testBit x (w - 1) = x - bit w
| otherwise = x
same :: Int -> Int -> Int
same m n | m == n = m
| otherwise = panic "Cryptol.Symbolic.BitVector.same"
[ "BitVector size mismatch: " ++ show (m, n) ]
instance SignCast SWord SWord where
signCast (SBV (KBounded _ w) (Left (cwVal -> (CWInteger x)))) =
SBV k (Left (CW k (CWInteger (signed w x)))) where
k = KBounded True w
signCast x@(SBV (KBounded _ w) _) = SBV k (Right (cache y)) where
k = KBounded True w
y st = do xsw <- sbvToSW st x
newExpr st k (SBVApp (Extract (intSizeOf x-1) 0) [xsw])
signCast _ = panic "Cryptol.Symbolic.BitVector"
[ "signCast called on non-bitvector value" ]
unsignCast (SBV (KBounded _ w) (Left (cwVal -> (CWInteger x)))) =
SBV k (Left (CW k (CWInteger (unsigned w x)))) where
k = KBounded False w
unsignCast x@(SBV (KBounded _ w) _) = SBV k (Right (cache y)) where
k = KBounded False w
y st = do xsw <- sbvToSW st x
newExpr st k (SBVApp (Extract (intSizeOf x-1) 0) [xsw])
unsignCast _ = panic "Cryptol.Symbolic.BitVector"
[ "unsignCast called on non-bitvector value" ]
instance Num BitVector where
fromInteger n = panic "Cryptol.Symbolic.BitVector"
[ "fromInteger " ++ show n ++ " :: BitVector" ]
BV s m x + BV _ n y = sbv s (same m n) (x + y)
BV s m x - BV _ n y = sbv s (same m n) (x - y)
BV s m x * BV _ n y = sbv s (same m n) (x * y)
negate (BV s m x) = sbv s m (- x)
abs = id
signum (BV s m _) = sbv s m 1
instance Bits BitVector where
BV s m x .&. BV _ n y = BV s (same m n) (x .&. y)
BV s m x .|. BV _ n y = BV s (same m n) (x .|. y)
BV s m x `xor` BV _ n y = BV s (same m n) (x `xor` y)
complement (BV s m x) = BV s m (x `xor` bitMask m)
shift (BV s m x) i = sbv s m (shift x i)
rotate (BV s m x) i = sbv s m (shift x j .|. shift x (j - m))
where j = i `mod` m
bit _i = panic "Cryptol.Symbolic.BitVector"
[ "bit: can't determine width" ]
setBit (BV s m x) i = BV s m (setBit x i)
clearBit (BV s m x) i = BV s m (clearBit x i)
complementBit (BV s m x) i = BV s m (complementBit x i)
testBit (BV _ _ x) i = testBit x i
bitSize (BV _ m _) = m
#if __GLASGOW_HASKELL__ >= 708
bitSizeMaybe (BV _ m _) = Just m
#endif
isSigned (BV s _ _) = s
popCount (BV _ _ x) = popCount x
--------------------------------------------------------------------------------
-- SBV class instances
type SWord = SBV BitVector
instance HasKind BitVector where
kindOf (BV s w _) = KBounded s w
instance SymWord BitVector where
literal (BV s w x) = SBV k (Left (mkConstCW k x))
where k = KBounded s w
fromCW c@(CW (KBounded s w) _) = BV s w (fromCW c)
fromCW c = panic "Cryptol.Symbolic.BitVector"
[ "fromCW: Unsupported non-integral value: " ++ show c ]
mkSymWord _ _ = panic "Cryptol.Symbolic.BitVector"
[ "mkSymWord unimplemented for type BitVector" ]
instance SIntegral BitVector where
instance FromBits (SBV BitVector) where
fromBitsLE bs = go (literal (bv (length bs) 0)) 0 bs
where go !acc _ [] = acc
go !acc !i (x:xs) = go (ite x (setBit acc i) acc) (i+1) xs
instance SDivisible BitVector where
sQuotRem (BV _ m x) (BV _ n y) = (BV False w q, BV False w r)
where (q, r) = quotRem x y
w = same m n
sDivMod (BV _ m x) (BV _ n y) = (BV False w q, BV False w r)
where (q, r) = divMod x y
w = same m n
instance SDivisible (SBV BitVector) where
sQuotRem = liftQRem
sDivMod = liftDMod
extract :: Int -> Int -> SWord -> SWord
extract i j x@(SBV (KBounded s _) _) =
case x of
_ | i < j -> SBV k (Left (CW k (CWInteger 0)))
SBV _ (Left cw) ->
case cw of
CW _ (CWInteger v) -> SBV k (Left (normCW (CW k (CWInteger (v `shiftR` j)))))
_ -> panic "Cryptol.Symbolic.BitVector.extract" [ "non-integer concrete word" ]
_ -> SBV k (Right (cache y))
where y st = do sw <- sbvToSW st x
newExpr st k (SBVApp (Extract i j) [sw])
where
k = KBounded s (i - j + 1)
extract _ _ _ = panic "Cryptol.Symbolic.BitVector.extract" [ "non-bitvector value" ]
cat :: SWord -> SWord -> SWord
cat x y | bitSize x == 0 = y
| bitSize y == 0 = x
cat x@(SBV _ (Left a)) y@(SBV _ (Left b)) =
case (a, b) of
(CW _ (CWInteger m), CW _ (CWInteger n)) ->
SBV k (Left (CW k (CWInteger ((m `shiftL` (bitSize y) .|. n)))))
_ -> panic "Cryptol.Symbolic.BitVector.cat" [ "non-integer concrete word" ]
where k = KBounded False (bitSize x + bitSize y)
cat x y = SBV k (Right (cache z))
where k = KBounded False (bitSize x + bitSize y)
z st = do xsw <- sbvToSW st x
ysw <- sbvToSW st y
newExpr st k (SBVApp Join [xsw, ysw])
randomSBVBitVector :: Int -> IO (SBV BitVector)
randomSBVBitVector w = do
bs <- replicateM w randomIO
let x = sum [ bit i | (i, b) <- zip [0..] bs, b ]
return (literal (bv w x))
mkSymBitVector :: Maybe Quantifier -> Maybe String -> Int -> Symbolic (SBV BitVector)
mkSymBitVector mbQ mbNm w =
mkSymSBVWithRandom (randomSBVBitVector w) mbQ (KBounded False w) mbNm
forallBV :: String -> Int -> Symbolic (SBV BitVector)
forallBV nm w = mkSymBitVector (Just ALL) (Just nm) w
forallBV_ :: Int -> Symbolic (SBV BitVector)
forallBV_ w = mkSymBitVector (Just ALL) Nothing w
existsBV :: String -> Int -> Symbolic (SBV BitVector)
existsBV nm w = mkSymBitVector (Just EX) (Just nm) w
existsBV_ :: Int -> Symbolic (SBV BitVector)
existsBV_ w = mkSymBitVector (Just EX) Nothing w
| TomMD/cryptol | src/Cryptol/Symbolic/BitVector.hs | bsd-3-clause | 7,007 | 0 | 21 | 1,993 | 3,043 | 1,519 | 1,524 | 150 | 4 |
{-# OPTIONS_GHC -funbox-strict-fields #-}
-- |
-- Module : Control.Concurrent.NamedLock
-- Copyright : (c) Thomas Schilling 2009
-- License : BSD-style
--
-- Maintainer : nominolo@googlemail.com
-- Stability : experimental
-- Portability : portable
--
-- This module implements \"named locks\".
--
-- A named lock is like a normal lock (@MVar ()@) but is created
-- on demand. This is useful when you have a potentially infinite
-- number of resources that should not be used concurrently.
--
-- For example, in a web-server you might create a new lock for each
-- database query so that the same query is only run once.
--
-- Named locks are allocated in a 'LockPool'. Names are arbitrary,
-- well-behaved instances of the 'Ord' class.
--
module Control.Concurrent.NamedLock
( -- * Creating Lock Pools
newLockPool, LockPool,
-- * Working with Named Locks
grabNamedLock, releaseNamedLock, withNamedLock )
where
import Control.Concurrent
import qualified Data.Map as M
import Control.Exception ( block, unblock, onException )
newtype LockPool name = LockPool (MVar (M.Map name NLItem))
data NLItem = NLItem {-# UNPACK #-} !Int
{-# UNPACK #-} !(MVar ())
-- | Create a new, empty, lock pool.
newLockPool :: IO (LockPool name)
newLockPool = LockPool `fmap` newMVar M.empty
-- | Grab the lock with given name. Blocks until the lock becomes
-- available.
grabNamedLock :: Ord name => LockPool name -> name -> IO ()
grabNamedLock (LockPool mvar) name = block $ do
mp <- takeMVar mvar
case M.lookup name mp of
Nothing -> do
-- No one currently holds the lock named 'name', so we create it.
name_mvar <- newEmptyMVar
let mp' = M.insert name (NLItem 1 name_mvar) mp
putMVar mvar mp'
Just (NLItem ctr name_mvar) -> do
-- Someone is currently holding the lock.
--
-- 1. Increase the reference counter.
let mp' = M.insert name (NLItem (ctr + 1) name_mvar) mp
-- Integer overflow is possible in principle, but that would
-- imply to have (maxBound :: Int) threads contending for
-- the same lock, which seems very unlikely.
-- 2. Release the outer lock.
putMVar mvar mp'
-- 3. Finally, wait for the lock to become available.
takeMVar name_mvar
-- | Release the lock with the given name.
--
-- The released lock must have previously been grabbed via
-- 'grabNamedLock'.
releaseNamedLock :: Ord name => LockPool name -> name -> IO ()
releaseNamedLock (LockPool mvar) name = block $ do
mp <- takeMVar mvar
case M.lookup name mp of
Nothing -> do
putMVar mvar mp
error $ "releaseNamedLock: cannot release non-existent lock."
Just (NLItem ctr name_mvar) -> do
-- We must not delete the lock before every thread that was
-- trying to get it has released it. We use a reference counter
-- to keep track of the number of threads that try to grab the
-- lock.
let mp'
| ctr > 1 = M.insert name (NLItem (ctr - 1) name_mvar) mp
| otherwise = M.delete name mp
putMVar mvar mp'
-- Release the lock. This will never block, since no two
-- threads can write to the lock without having a reader
-- waiting.
putMVar name_mvar ()
-- | Hold the lock while running the action.
--
-- If the action throws an exception, the lock is released an the
-- exception propagated. Returns the result of the action.
withNamedLock :: Ord name => LockPool name -> name -> IO a -> IO a
withNamedLock pool name action = block $ do
grabNamedLock pool name
unblock action `onException` releaseNamedLock pool name
{-
-- Use this for testing.
main = do
lpool <- newLockPool
sequence_ (replicate 20 (forkIO (worker lpool =<< myThreadId)))
worker lpool =<< myThreadId
where
lock_names = ["a", "b", "c", "d", "e"]
num_names = length lock_names
worker lpool tid = do
n <- (lock_names !!) `fmap` randomRIO (0, num_names - 1)
putStrLn $ show tid ++ ": grabbing " ++ show n
grabNamedLock lpool n
--threadDelay 1000000
putStrLn $ show tid ++ ": releasing " ++ show n
releaseNamedLock lpool n
worker lpool tid
-}
| nominolo/named-lock | src/Control/Concurrent/NamedLock.hs | bsd-3-clause | 4,252 | 0 | 21 | 1,069 | 627 | 330 | 297 | 42 | 2 |
{-# LANGUAGE RecordWildCards #-}
module BV.Util(mod2,
zero,
mkConst,
tConst,
termExt,
varToHaskell,
termToHaskell,
atomToHaskell,
constSlice,
constMul,
constInvert,
constNeg,
constConcat,
mkCAtomConj,
mkCAtom,
ctermPlus,
ctermMinus,
ctermMul,
ctermUMinus,
ctermSlice,
ctermSubst,
catomSolve) where
import Data.Bits
import Data.List
import Data.Maybe
import Data.Tuple.HT
import Math.NumberTheory.Moduli
import Util hiding (trace)
import BV.Types
import Debug.Trace
mod2 :: Integer -> Int -> Integer
mod2 i w = i `mod` (1 `shiftL` w)
zero :: Int -> Const
zero w = Const 0 w
mkConst :: Integer -> Int -> Const
mkConst i w = Const (i `mod2` w) w
constSlice :: Integer -> (Int, Int) -> Const
constSlice c (l,h) = Const (foldl' (\a i -> if' (testBit c i) (setBit a (i-l)) a) 0 [l..h]) (h - l + 1)
constMul :: Integer -> Const -> Int -> Const
constMul c cn w = mkConst (c * (cVal cn)) w
constInvert :: Integer -> Int -> Maybe Integer
constInvert i w = invertMod i (1 `shiftL` w)
constNeg :: Const -> Const
constNeg (Const c w) = mkConst ((complement c) `mod2` w) w
constConcat :: Const -> Const -> Const
constConcat c1 c2 = mkConst (cVal c1 + (cVal c2 `shiftL` (width c1))) (width c1 + width c2)
tConst :: Integer -> Int -> Term
tConst i w = TConst $ mkConst i w
termExt :: Term -> Int -> Term
termExt t w | width t >= w = t
| otherwise = TConcat [t, tConst 0 (w - width t)]
relToHaskell :: Rel -> String
relToHaskell Eq = ".=="
relToHaskell Neq = "./="
relToHaskell Lt = ".<"
relToHaskell Lte = ".<="
varToHaskell :: Var -> String
varToHaskell Var{..} = "Var \"" ++ vName ++ "\" " ++ (show vWidth)
termToHaskell :: Term -> String
termToHaskell (TConst c) = "tConst " ++ (show $ cVal c) ++ " " ++ (show $ cWidth c)
termToHaskell (TVar v) = "TVar (" ++ varToHaskell v ++ ")"
termToHaskell (TSlice t (l,h)) = "(" ++ termToHaskell t ++ ") .: (" ++ show l ++ "," ++ show h ++ ")"
termToHaskell (TConcat ts) = "TConcat [" ++ (intercalate ", " $ map termToHaskell ts) ++ "]"
termToHaskell (TNeg t) = "TNeg (" ++ termToHaskell t ++ ")"
termToHaskell (TPlus ts) = "TPlus [" ++ (intercalate ", " $ map termToHaskell ts) ++ "]"
termToHaskell (TMul c t w) = "TMul " ++ show c ++ "(" ++ termToHaskell t ++ ") " ++ show w
atomToHaskell :: Atom -> String
atomToHaskell (Atom r t1 t2) = "(" ++ termToHaskell t1 ++ ") " ++ relToHaskell r ++ " (" ++ termToHaskell t2 ++ ")"
-- assumes that terms have been gathered already
ctermOrder :: CTerm -> CTerm
ctermOrder (CTerm ts c) = CTerm (sortBy (\t1 t2 -> compare (snd t1) (snd t2)) ts) c
ctermGather :: CTerm -> CTerm
ctermGather (CTerm ts c) = CTerm ts' c
where w = width c
ts' = filter ((/= 0) . fst)
$ map (\ts0@((_,v):_) -> ((sum $ map fst ts0) `mod2` w, v))
$ sortAndGroup snd ts
ctermPlus :: [CTerm] -> Int -> CTerm
ctermPlus ts w = ctermOrder $ ctermGather $ ctermPlus' ts w
ctermPlus' :: [CTerm] -> Int -> CTerm
ctermPlus' ts w | any ((< w) . width) ts = error "BV.ctermPlus': cannot expand term width"
| otherwise = CTerm (concatMap ctVars ts) (mkConst (sum $ map (cVal . ctConst) ts) w)
ctermMinus :: CTerm -> CTerm -> CTerm
ctermMinus t1 t2 = ctermPlus [t1, ctermUMinus t2] (max (width t1) (width t2))
ctermMul :: CTerm -> Integer -> Int -> CTerm
ctermMul t c w = ctermOrder $ ctermGather $ ctermMul' t c w
ctermMul' :: CTerm -> Integer -> Int -> CTerm
ctermMul' CTerm{..} c w = CTerm (map (\(i,v) -> ((i*c) `mod2` w, v)) ctVars) (mkConst ((cVal ctConst) * c) w)
ctermSubst :: SVar -> CTerm -> CTerm -> CTerm
ctermSubst v ct ct'@(CTerm vs c) =
ctermPlus
((map (\(i,v') -> if' (v'/=v)
(CTerm [(i,v')] (zero w)) $
if' (width ct < width ct')
(error $ "BV.ctermSubst " ++ show v ++ " " ++ show ct ++ " " ++ show ct' ++ ": cannot expand term width")
(ctermMul ct i w)) vs)
++ [CTerm [] c]) w
where w = width ct'
ctermUMinus :: CTerm -> CTerm
ctermUMinus t = ctermMul t (-1) (width t)
ctermSlice :: CTerm -> (Int,Int) -> Maybe CTerm
ctermSlice ct@(CTerm ts c) (l,h) | -- at most one term has bits below l and this term must have multiplier =1
(all (\(i,_) -> i `mod2` l == 0 || i == 1) ts) &&
(length $ filter (\i -> i `mod2` l /= 0) $ (cVal c) : (map fst ts)) <= 1
= Just $ CTerm ((filter (\(_,(_,(ll,hh))) -> ll <= hh))
$ map (\(i,(v,(l',_h))) -> if' (i `mod2` l == 0)
(i `shiftR` l, (v,(l',min _h (l'+h))))
(i `mod2` w, (v,(l'+l, min _h (l'+h))))) ts)
$ constSlice (cVal c) (l,h)
| otherwise
= trace (error $ "ctermSlice: cannot handle slice [" ++ show l ++ ":" ++ show h ++ "] of term " ++ show ct) Nothing
{-| null ts
= Just $ CTerm [] $ constSlice (cVal c) (l,h)
| l == 0
= Just $ CTerm (map (\(i,(v,(l',_h))) -> (i `mod2` w, (v,(l',min _h (l'+h))))) ts) $ constSlice (cVal c) (l,h)
| length ts == 1 && cVal c == 0 && (fst $ head ts) == 1
= let [(_, (v,(_l,_h)))] = ts in
Just $ CTerm [(1,(v,(l+_l,min _h (l+_l+w-1))))] $ zero w -}
where w = h - l + 1
catom :: Rel -> CTerm -> CTerm -> Either Bool CAtom
catom rel (CTerm [] c1) (CTerm [] c2) = Left $
case rel of
Eq -> cVal c1 == cVal c2
Neq -> cVal c1 /= cVal c2
Lt -> cVal c1 < cVal c2
Lte -> cVal c1 <= cVal c2
catom rel ct1 ct2 | ct1 == ct2 = Left $
case rel of
Eq -> True
Neq -> False
Lt -> False
Lte -> True
catom rel ct1 ct2 | elem rel [Lt, Lte] && ctVars ct1 == ctVars ct2 =
let cn1@(Const c1 _) = ctConst ct1
cn2@(Const c2 _) = ctConst ct2
w = width ct1
vterm = CTerm (ctVars ct1) $ zero w
in if' ((c1 == 0) && (cn2 == mkConst (-1) w)) (mkCAtom Eq vterm (CTerm [] $ zero w)) $
if' ((c1 == 0) && (c2 == 1)) (mkCAtom Neq vterm (CTerm [] $ mkConst (-1) w)) $
if' (c1 == 0) (mkCAtom Lt vterm (CTerm [] $ mkConst (-c2) w)) $
if' ((c2 == 0) && (cn1 == mkConst (-1) w)) (mkCAtom Neq vterm (CTerm [] $ zero w)) $
if' ((c2 == 0) && (c1 == 1)) (mkCAtom Eq vterm (CTerm [] $ mkConst (-1) w)) $
if' (c2 == 0) (mkCAtom Lte (CTerm [] $ mkConst (-c1) w) vterm)
(Right $ CAtom rel ct1 ct2)
catom Lt ct1 ct2 | (null $ ctVars ct2) && (cVal (ctConst ct2) == 0) = Left False
catom Lt ct1 ct2 | (null $ ctVars ct2) && (cVal (ctConst ct2) == 1) = mkCAtom Eq ct1 (CTerm [] $ zero $ width ct1)
catom Lt ct1 ct2 | (null $ ctVars ct1) && ((ctConst ct1) == mkConst (-1) (width ct1)) = Left False
catom Lte ct1 ct2 | (null $ ctVars ct1) && ((ctConst ct1) == mkConst (-1) (width ct1)) = mkCAtom Eq ct1 ct2
catom Lte ct1 ct2 | (null $ ctVars ct1) && (cVal (ctConst ct1) == 0) = Left True
catom Lte ct1 ct2 | (null $ ctVars ct2) && (cVal (ctConst ct2) == 0) = mkCAtom Eq ct1 ct2
catom Lte ct1 ct2 | (null $ ctVars ct2) && ((ctConst ct2) == mkConst (-1) (width ct2)) = Left True
catom rel ct1 ct2 = Right $ CAtom rel ct1 ct2
mkCAtomConj :: [(Rel, CTerm, CTerm)] -> Maybe [CAtom]
mkCAtomConj ins | any (== Left False) as = Nothing
| otherwise = Just $ nub $ map fromRight as
where as = filter (/= (Left True)) $ map (uncurry3 mkCAtom) ins
-- Move the first variable (in var ordering) to the left and
-- try to solve the equation wrt this var.
mkCAtom :: Rel -> CTerm -> CTerm -> Either Bool CAtom
mkCAtom rel ct1 ct2 | width ct1 /= width ct2 = error "BV.mkCAtom: cannot make an atom out of unequal-width terms"
| elem rel [Eq, Neq] =
if null ctVars
then catom rel ct (CTerm [] $ zero $ width ct)
else Right $ catomInSolvedForm rel (head ctVars) (ctermUMinus $ CTerm (tail ctVars) ctConst)
| otherwise = catom rel ct1 ct2
where ct@CTerm{..} = ctermPlus [ct1, ctermUMinus ct2] $ width ct1
catomInSolvedForm :: Rel -> (Integer, SVar) -> CTerm -> CAtom
catomInSolvedForm rel (i, v) ct = maybe (CAtom rel (CTerm [(i,v)] (zero $ width ct)) ct)
(\inv -> CAtom rel (CTerm [(1,v)] (zero $ width ct)) (ctermMul ct inv w))
(constInvert i w)
where w = width ct
-- Solve atom wrt given variable. If successful, returns the solution,
-- and additional atoms that are implided by the input atom, but not
-- by the solution.
-- (see Section 3.2 of "A decision procedure for bit-vector arithmetic")
catomSolve :: SVar -> CAtom -> Maybe (Either Bool (CTerm, [CAtom]))
catomSolve v (CAtom rel ct1 ct2) | rel /= Eq = Nothing
| null lhs = Nothing
| pow2 == 0 = Just $ Right (ctermMul ctrhs inv w, [])
| cas == Nothing = Just $ Left False
| otherwise = fmap (\ct' -> Right (ctermMul ct' inv w', fromJust cas))
$ ctermSlice ctrhs (pow2,w-1)
where CTerm{..} = ctermPlus [ct1, ctermUMinus ct2] $ width ct1
(lhs, rhs) = partition ((== v) . snd) ctVars
ctrhs = ctermUMinus $ CTerm rhs ctConst
[(i,_)] = lhs
w = width ct1
(pow2, oddi) = oddevenDecomp i
w' = w - pow2
inv = fromJust $ constInvert oddi w'
cas = case mkCAtom Eq (CTerm [] $ zero w') (fromJust $ ctermSlice ctrhs (0, pow2-1)) of
Left True -> Just []
Left False -> Nothing
Right ca -> Just [ca]
-- decompose i into a product of a power of 2 and an odd number
oddevenDecomp :: Integer -> (Int, Integer)
oddevenDecomp i | odd i = (0, i)
| otherwise = let (p, i') = oddevenDecomp (i `div` 2)
in (p + 1, i')
| termite2/bv | BV/Util.hs | bsd-3-clause | 10,737 | 0 | 21 | 3,771 | 4,317 | 2,216 | 2,101 | 185 | 7 |
{-# LANGUAGE DeriveGeneric #-}
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Client.Config
-- Copyright : (c) David Himmelstrup 2005
-- License : BSD-like
--
-- Maintainer : lemmih@gmail.com
-- Stability : provisional
-- Portability : portable
--
-- Utilities for handling saved state such as known packages, known servers and
-- downloaded packages.
-----------------------------------------------------------------------------
module Distribution.Client.Config (
SavedConfig(..),
loadConfig,
getConfigFilePath,
showConfig,
showConfigWithComments,
parseConfig,
defaultCabalDir,
defaultConfigFile,
defaultCacheDir,
defaultCompiler,
defaultLogsDir,
defaultUserInstall,
baseSavedConfig,
commentSavedConfig,
initialSavedConfig,
configFieldDescriptions,
haddockFlagsFields,
installDirsFields,
withProgramsFields,
withProgramOptionsFields,
userConfigDiff,
userConfigUpdate,
createDefaultConfigFile,
remoteRepoFields
) where
import Distribution.Client.Types
( RemoteRepo(..), Username(..), Password(..), emptyRemoteRepo
, AllowOlder(..), AllowNewer(..), RelaxDeps(..)
)
import Distribution.Client.BuildReports.Types
( ReportLevel(..) )
import Distribution.Client.Setup
( GlobalFlags(..), globalCommand, defaultGlobalFlags
, ConfigExFlags(..), configureExOptions, defaultConfigExFlags
, InstallFlags(..), installOptions, defaultInstallFlags
, UploadFlags(..), uploadCommand
, ReportFlags(..), reportCommand
, showRepo, parseRepo, readRepo )
import Distribution.Utils.NubList
( NubList, fromNubList, toNubList, overNubList )
import Distribution.Simple.Compiler
( DebugInfoLevel(..), OptimisationLevel(..) )
import Distribution.Simple.Setup
( ConfigFlags(..), configureOptions, defaultConfigFlags
, HaddockFlags(..), haddockOptions, defaultHaddockFlags
, installDirsOptions, optionDistPref
, programDbPaths', programDbOptions
, Flag(..), toFlag, flagToMaybe, fromFlagOrDefault )
import Distribution.Simple.InstallDirs
( InstallDirs(..), defaultInstallDirs
, PathTemplate, toPathTemplate )
import Distribution.ParseUtils
( FieldDescr(..), liftField
, ParseResult(..), PError(..), PWarning(..)
, locatedErrorMsg, showPWarning
, readFields, warning, lineNo
, simpleField, listField, spaceListField
, parseFilePathQ, parseOptCommaList, parseTokenQ )
import Distribution.Client.ParseUtils
( parseFields, ppFields, ppSection )
import Distribution.Client.HttpUtils
( isOldHackageURI )
import qualified Distribution.ParseUtils as ParseUtils
( Field(..) )
import qualified Distribution.Text as Text
( Text(..), display )
import Distribution.Simple.Command
( CommandUI(commandOptions), commandDefaultFlags, ShowOrParseArgs(..)
, viewAsFieldDescr )
import Distribution.Simple.Program
( defaultProgramDb )
import Distribution.Simple.Utils
( die', notice, warn, lowercase, cabalVersion )
import Distribution.Compiler
( CompilerFlavor(..), defaultCompilerFlavor )
import Distribution.Verbosity
( Verbosity, normal )
import Distribution.Solver.Types.ConstraintSource
import Data.List
( partition, find, foldl', nubBy )
import Data.Maybe
( fromMaybe )
import Control.Monad
( when, unless, foldM, liftM )
import qualified Distribution.Compat.ReadP as Parse
( (<++), option )
import Distribution.Compat.Semigroup
import qualified Text.PrettyPrint as Disp
( render, text, empty )
import Text.PrettyPrint
( ($+$) )
import Text.PrettyPrint.HughesPJ
( text, Doc )
import System.Directory
( createDirectoryIfMissing, getAppUserDataDirectory, renameFile )
import Network.URI
( URI(..), URIAuth(..), parseURI )
import System.FilePath
( (<.>), (</>), takeDirectory )
import System.IO.Error
( isDoesNotExistError )
import Distribution.Compat.Environment
( getEnvironment )
import Distribution.Compat.Exception
( catchIO )
import qualified Paths_cabal_install
( version )
import Data.Version
( showVersion )
import Data.Char
( isSpace )
import qualified Data.Map as M
import Data.Function
( on )
import GHC.Generics ( Generic )
--
-- * Configuration saved in the config file
--
data SavedConfig = SavedConfig {
savedGlobalFlags :: GlobalFlags,
savedInstallFlags :: InstallFlags,
savedConfigureFlags :: ConfigFlags,
savedConfigureExFlags :: ConfigExFlags,
savedUserInstallDirs :: InstallDirs (Flag PathTemplate),
savedGlobalInstallDirs :: InstallDirs (Flag PathTemplate),
savedUploadFlags :: UploadFlags,
savedReportFlags :: ReportFlags,
savedHaddockFlags :: HaddockFlags
} deriving Generic
instance Monoid SavedConfig where
mempty = gmempty
mappend = (<>)
instance Semigroup SavedConfig where
a <> b = SavedConfig {
savedGlobalFlags = combinedSavedGlobalFlags,
savedInstallFlags = combinedSavedInstallFlags,
savedConfigureFlags = combinedSavedConfigureFlags,
savedConfigureExFlags = combinedSavedConfigureExFlags,
savedUserInstallDirs = combinedSavedUserInstallDirs,
savedGlobalInstallDirs = combinedSavedGlobalInstallDirs,
savedUploadFlags = combinedSavedUploadFlags,
savedReportFlags = combinedSavedReportFlags,
savedHaddockFlags = combinedSavedHaddockFlags
}
where
-- This is ugly, but necessary. If we're mappending two config files, we
-- want the values of the *non-empty* list fields from the second one to
-- *override* the corresponding values from the first one. Default
-- behaviour (concatenation) is confusing and makes some use cases (see
-- #1884) impossible.
--
-- However, we also want to allow specifying multiple values for a list
-- field in a *single* config file. For example, we want the following to
-- continue to work:
--
-- remote-repo: hackage.haskell.org:http://hackage.haskell.org/
-- remote-repo: private-collection:http://hackage.local/
--
-- So we can't just wrap the list fields inside Flags; we have to do some
-- special-casing just for SavedConfig.
-- NB: the signature prevents us from using 'combine' on lists.
combine' :: (SavedConfig -> flags) -> (flags -> Flag a) -> Flag a
combine' field subfield =
(subfield . field $ a) `mappend` (subfield . field $ b)
combineMonoid :: Monoid mon => (SavedConfig -> flags) -> (flags -> mon)
-> mon
combineMonoid field subfield =
(subfield . field $ a) `mappend` (subfield . field $ b)
lastNonEmpty' :: (SavedConfig -> flags) -> (flags -> [a]) -> [a]
lastNonEmpty' field subfield =
let a' = subfield . field $ a
b' = subfield . field $ b
in case b' of [] -> a'
_ -> b'
lastNonEmptyNL' :: (SavedConfig -> flags) -> (flags -> NubList a)
-> NubList a
lastNonEmptyNL' field subfield =
let a' = subfield . field $ a
b' = subfield . field $ b
in case fromNubList b' of [] -> a'
_ -> b'
combinedSavedGlobalFlags = GlobalFlags {
globalVersion = combine globalVersion,
globalNumericVersion = combine globalNumericVersion,
globalConfigFile = combine globalConfigFile,
globalSandboxConfigFile = combine globalSandboxConfigFile,
globalConstraintsFile = combine globalConstraintsFile,
globalRemoteRepos = lastNonEmptyNL globalRemoteRepos,
globalCacheDir = combine globalCacheDir,
globalLocalRepos = lastNonEmptyNL globalLocalRepos,
globalLogsDir = combine globalLogsDir,
globalWorldFile = combine globalWorldFile,
globalRequireSandbox = combine globalRequireSandbox,
globalIgnoreSandbox = combine globalIgnoreSandbox,
globalIgnoreExpiry = combine globalIgnoreExpiry,
globalHttpTransport = combine globalHttpTransport,
globalNix = combine globalNix
}
where
combine = combine' savedGlobalFlags
lastNonEmptyNL = lastNonEmptyNL' savedGlobalFlags
combinedSavedInstallFlags = InstallFlags {
installDocumentation = combine installDocumentation,
installHaddockIndex = combine installHaddockIndex,
installDryRun = combine installDryRun,
installMaxBackjumps = combine installMaxBackjumps,
installReorderGoals = combine installReorderGoals,
installCountConflicts = combine installCountConflicts,
installIndependentGoals = combine installIndependentGoals,
installShadowPkgs = combine installShadowPkgs,
installStrongFlags = combine installStrongFlags,
installAllowBootLibInstalls = combine installAllowBootLibInstalls,
installReinstall = combine installReinstall,
installAvoidReinstalls = combine installAvoidReinstalls,
installOverrideReinstall = combine installOverrideReinstall,
installUpgradeDeps = combine installUpgradeDeps,
installOnly = combine installOnly,
installOnlyDeps = combine installOnlyDeps,
installIndexState = combine installIndexState,
installRootCmd = combine installRootCmd,
installSummaryFile = lastNonEmptyNL installSummaryFile,
installLogFile = combine installLogFile,
installBuildReports = combine installBuildReports,
installReportPlanningFailure = combine installReportPlanningFailure,
installSymlinkBinDir = combine installSymlinkBinDir,
installPerComponent = combine installPerComponent,
installOneShot = combine installOneShot,
installNumJobs = combine installNumJobs,
installKeepGoing = combine installKeepGoing,
installRunTests = combine installRunTests,
installOfflineMode = combine installOfflineMode,
installProjectFileName = combine installProjectFileName
}
where
combine = combine' savedInstallFlags
lastNonEmptyNL = lastNonEmptyNL' savedInstallFlags
combinedSavedConfigureFlags = ConfigFlags {
configArgs = lastNonEmpty configArgs,
configPrograms_ = configPrograms_ . savedConfigureFlags $ b,
-- TODO: NubListify
configProgramPaths = lastNonEmpty configProgramPaths,
-- TODO: NubListify
configProgramArgs = lastNonEmpty configProgramArgs,
configProgramPathExtra = lastNonEmptyNL configProgramPathExtra,
configInstantiateWith = lastNonEmpty configInstantiateWith,
configHcFlavor = combine configHcFlavor,
configHcPath = combine configHcPath,
configHcPkg = combine configHcPkg,
configVanillaLib = combine configVanillaLib,
configProfLib = combine configProfLib,
configProf = combine configProf,
configSharedLib = combine configSharedLib,
configStaticLib = combine configStaticLib,
configDynExe = combine configDynExe,
configProfExe = combine configProfExe,
configProfDetail = combine configProfDetail,
configProfLibDetail = combine configProfLibDetail,
-- TODO: NubListify
configConfigureArgs = lastNonEmpty configConfigureArgs,
configOptimization = combine configOptimization,
configDebugInfo = combine configDebugInfo,
configProgPrefix = combine configProgPrefix,
configProgSuffix = combine configProgSuffix,
-- Parametrised by (Flag PathTemplate), so safe to use 'mappend'.
configInstallDirs =
(configInstallDirs . savedConfigureFlags $ a)
`mappend` (configInstallDirs . savedConfigureFlags $ b),
configScratchDir = combine configScratchDir,
-- TODO: NubListify
configExtraLibDirs = lastNonEmpty configExtraLibDirs,
-- TODO: NubListify
configExtraFrameworkDirs = lastNonEmpty configExtraFrameworkDirs,
-- TODO: NubListify
configExtraIncludeDirs = lastNonEmpty configExtraIncludeDirs,
configDeterministic = combine configDeterministic,
configIPID = combine configIPID,
configCID = combine configCID,
configDistPref = combine configDistPref,
configCabalFilePath = combine configCabalFilePath,
configVerbosity = combine configVerbosity,
configUserInstall = combine configUserInstall,
-- TODO: NubListify
configPackageDBs = lastNonEmpty configPackageDBs,
configGHCiLib = combine configGHCiLib,
configSplitObjs = combine configSplitObjs,
configStripExes = combine configStripExes,
configStripLibs = combine configStripLibs,
-- TODO: NubListify
configConstraints = lastNonEmpty configConstraints,
-- TODO: NubListify
configDependencies = lastNonEmpty configDependencies,
-- TODO: NubListify
configConfigurationsFlags = lastNonEmpty configConfigurationsFlags,
configTests = combine configTests,
configBenchmarks = combine configBenchmarks,
configCoverage = combine configCoverage,
configLibCoverage = combine configLibCoverage,
configExactConfiguration = combine configExactConfiguration,
configFlagError = combine configFlagError,
configRelocatable = combine configRelocatable,
configUseResponseFiles = combine configUseResponseFiles
}
where
combine = combine' savedConfigureFlags
lastNonEmpty = lastNonEmpty' savedConfigureFlags
lastNonEmptyNL = lastNonEmptyNL' savedConfigureFlags
combinedSavedConfigureExFlags = ConfigExFlags {
configCabalVersion = combine configCabalVersion,
-- TODO: NubListify
configExConstraints = lastNonEmpty configExConstraints,
-- TODO: NubListify
configPreferences = lastNonEmpty configPreferences,
configSolver = combine configSolver,
configAllowNewer = combineMonoid savedConfigureExFlags configAllowNewer,
configAllowOlder = combineMonoid savedConfigureExFlags configAllowOlder
}
where
combine = combine' savedConfigureExFlags
lastNonEmpty = lastNonEmpty' savedConfigureExFlags
-- Parametrised by (Flag PathTemplate), so safe to use 'mappend'.
combinedSavedUserInstallDirs = savedUserInstallDirs a
`mappend` savedUserInstallDirs b
-- Parametrised by (Flag PathTemplate), so safe to use 'mappend'.
combinedSavedGlobalInstallDirs = savedGlobalInstallDirs a
`mappend` savedGlobalInstallDirs b
combinedSavedUploadFlags = UploadFlags {
uploadCandidate = combine uploadCandidate,
uploadDoc = combine uploadDoc,
uploadUsername = combine uploadUsername,
uploadPassword = combine uploadPassword,
uploadPasswordCmd = combine uploadPasswordCmd,
uploadVerbosity = combine uploadVerbosity
}
where
combine = combine' savedUploadFlags
combinedSavedReportFlags = ReportFlags {
reportUsername = combine reportUsername,
reportPassword = combine reportPassword,
reportVerbosity = combine reportVerbosity
}
where
combine = combine' savedReportFlags
combinedSavedHaddockFlags = HaddockFlags {
-- TODO: NubListify
haddockProgramPaths = lastNonEmpty haddockProgramPaths,
-- TODO: NubListify
haddockProgramArgs = lastNonEmpty haddockProgramArgs,
haddockHoogle = combine haddockHoogle,
haddockHtml = combine haddockHtml,
haddockHtmlLocation = combine haddockHtmlLocation,
haddockForHackage = combine haddockForHackage,
haddockExecutables = combine haddockExecutables,
haddockTestSuites = combine haddockTestSuites,
haddockBenchmarks = combine haddockBenchmarks,
haddockForeignLibs = combine haddockForeignLibs,
haddockInternal = combine haddockInternal,
haddockCss = combine haddockCss,
haddockHscolour = combine haddockHscolour,
haddockHscolourCss = combine haddockHscolourCss,
haddockContents = combine haddockContents,
haddockDistPref = combine haddockDistPref,
haddockKeepTempFiles = combine haddockKeepTempFiles,
haddockVerbosity = combine haddockVerbosity
}
where
combine = combine' savedHaddockFlags
lastNonEmpty = lastNonEmpty' savedHaddockFlags
--
-- * Default config
--
-- | These are the absolute basic defaults. The fields that must be
-- initialised. When we load the config from the file we layer the loaded
-- values over these ones, so any missing fields in the file take their values
-- from here.
--
baseSavedConfig :: IO SavedConfig
baseSavedConfig = do
userPrefix <- defaultCabalDir
logsDir <- defaultLogsDir
worldFile <- defaultWorldFile
return mempty {
savedConfigureFlags = mempty {
configHcFlavor = toFlag defaultCompiler,
configUserInstall = toFlag defaultUserInstall,
configVerbosity = toFlag normal
},
savedUserInstallDirs = mempty {
prefix = toFlag (toPathTemplate userPrefix)
},
savedGlobalFlags = mempty {
globalLogsDir = toFlag logsDir,
globalWorldFile = toFlag worldFile
}
}
-- | This is the initial configuration that we write out to to the config file
-- if the file does not exist (or the config we use if the file cannot be read
-- for some other reason). When the config gets loaded it gets layered on top
-- of 'baseSavedConfig' so we do not need to include it into the initial
-- values we save into the config file.
--
initialSavedConfig :: IO SavedConfig
initialSavedConfig = do
cacheDir <- defaultCacheDir
logsDir <- defaultLogsDir
worldFile <- defaultWorldFile
extraPath <- defaultExtraPath
return mempty {
savedGlobalFlags = mempty {
globalCacheDir = toFlag cacheDir,
globalRemoteRepos = toNubList [defaultRemoteRepo],
globalWorldFile = toFlag worldFile
},
savedConfigureFlags = mempty {
configProgramPathExtra = toNubList extraPath
},
savedInstallFlags = mempty {
installSummaryFile = toNubList [toPathTemplate (logsDir </> "build.log")],
installBuildReports= toFlag AnonymousReports,
installNumJobs = toFlag Nothing
}
}
--TODO: misleading, there's no way to override this default
-- either make it possible or rename to simply getCabalDir.
defaultCabalDir :: IO FilePath
defaultCabalDir = getAppUserDataDirectory "cabal"
defaultConfigFile :: IO FilePath
defaultConfigFile = do
dir <- defaultCabalDir
return $ dir </> "config"
defaultCacheDir :: IO FilePath
defaultCacheDir = do
dir <- defaultCabalDir
return $ dir </> "packages"
defaultLogsDir :: IO FilePath
defaultLogsDir = do
dir <- defaultCabalDir
return $ dir </> "logs"
-- | Default position of the world file
defaultWorldFile :: IO FilePath
defaultWorldFile = do
dir <- defaultCabalDir
return $ dir </> "world"
defaultExtraPath :: IO [FilePath]
defaultExtraPath = do
dir <- defaultCabalDir
return [dir </> "bin"]
defaultCompiler :: CompilerFlavor
defaultCompiler = fromMaybe GHC defaultCompilerFlavor
defaultUserInstall :: Bool
defaultUserInstall = True
-- We do per-user installs by default on all platforms. We used to default to
-- global installs on Windows but that no longer works on Windows Vista or 7.
defaultRemoteRepo :: RemoteRepo
defaultRemoteRepo = RemoteRepo name uri Nothing [] 0 False
where
name = "hackage.haskell.org"
uri = URI "http:" (Just (URIAuth "" name "")) "/" "" ""
-- Note that lots of old ~/.cabal/config files will have the old url
-- http://hackage.haskell.org/packages/archive
-- but new config files can use the new url (without the /packages/archive)
-- and avoid having to do a http redirect
-- For the default repo we know extra information, fill this in.
--
-- We need this because the 'defaultRemoteRepo' above is only used for the
-- first time when a config file is made. So for users with older config files
-- we might have only have older info. This lets us fill that in even for old
-- config files.
--
addInfoForKnownRepos :: RemoteRepo -> RemoteRepo
addInfoForKnownRepos repo
| remoteRepoName repo == remoteRepoName defaultRemoteRepo
= useSecure . tryHttps . fixOldURI $ repo
where
fixOldURI r
| isOldHackageURI (remoteRepoURI r)
= r { remoteRepoURI = remoteRepoURI defaultRemoteRepo }
| otherwise = r
tryHttps r = r { remoteRepoShouldTryHttps = True }
useSecure r@RemoteRepo{
remoteRepoSecure = secure,
remoteRepoRootKeys = [],
remoteRepoKeyThreshold = 0
} | secure /= Just False
= r {
-- Use hackage-security by default unless you opt-out with
-- secure: False
remoteRepoSecure = Just True,
remoteRepoRootKeys = defaultHackageRemoteRepoKeys,
remoteRepoKeyThreshold = defaultHackageRemoteRepoKeyThreshold
}
useSecure r = r
addInfoForKnownRepos other = other
-- | The current hackage.haskell.org repo root keys that we ship with cabal.
---
-- This lets us bootstrap trust in this repo without user intervention.
-- These keys need to be periodically updated when new root keys are added.
-- See the root key procedures for details.
--
defaultHackageRemoteRepoKeys :: [String]
defaultHackageRemoteRepoKeys =
[ "fe331502606802feac15e514d9b9ea83fee8b6ffef71335479a2e68d84adc6b0",
"1ea9ba32c526d1cc91ab5e5bd364ec5e9e8cb67179a471872f6e26f0ae773d42",
"2c6c3627bd6c982990239487f1abd02e08a02e6cf16edb105a8012d444d870c3",
"0a5c7ea47cd1b15f01f5f51a33adda7e655bc0f0b0615baa8e271f4c3351e21d",
"51f0161b906011b52c6613376b1ae937670da69322113a246a09f807c62f6921"
]
-- | The required threshold of root key signatures for hackage.haskell.org
--
defaultHackageRemoteRepoKeyThreshold :: Int
defaultHackageRemoteRepoKeyThreshold = 3
--
-- * Config file reading
--
-- | Loads the main configuration, and applies additional defaults to give the
-- effective configuration. To loads just what is actually in the config file,
-- use 'loadRawConfig'.
--
loadConfig :: Verbosity -> Flag FilePath -> IO SavedConfig
loadConfig verbosity configFileFlag = do
config <- loadRawConfig verbosity configFileFlag
extendToEffectiveConfig config
extendToEffectiveConfig :: SavedConfig -> IO SavedConfig
extendToEffectiveConfig config = do
base <- baseSavedConfig
let effective0 = base `mappend` config
globalFlags0 = savedGlobalFlags effective0
effective = effective0 {
savedGlobalFlags = globalFlags0 {
globalRemoteRepos =
overNubList (map addInfoForKnownRepos)
(globalRemoteRepos globalFlags0)
}
}
return effective
-- | Like 'loadConfig' but does not apply any additional defaults, it just
-- loads what is actually in the config file. This is thus suitable for
-- comparing or editing a config file, but not suitable for using as the
-- effective configuration.
--
loadRawConfig :: Verbosity -> Flag FilePath -> IO SavedConfig
loadRawConfig verbosity configFileFlag = do
(source, configFile) <- getConfigFilePathAndSource configFileFlag
minp <- readConfigFile mempty configFile
case minp of
Nothing -> do
notice verbosity $ "Config file path source is " ++ sourceMsg source ++ "."
notice verbosity $ "Config file " ++ configFile ++ " not found."
createDefaultConfigFile verbosity configFile
Just (ParseOk ws conf) -> do
unless (null ws) $ warn verbosity $
unlines (map (showPWarning configFile) ws)
return conf
Just (ParseFailed err) -> do
let (line, msg) = locatedErrorMsg err
die' verbosity $
"Error parsing config file " ++ configFile
++ maybe "" (\n -> ':' : show n) line ++ ":\n" ++ msg
where
sourceMsg CommandlineOption = "commandline option"
sourceMsg EnvironmentVariable = "env var CABAL_CONFIG"
sourceMsg Default = "default config file"
data ConfigFileSource = CommandlineOption
| EnvironmentVariable
| Default
-- | Returns the config file path, without checking that the file exists.
-- The order of precedence is: input flag, CABAL_CONFIG, default location.
getConfigFilePath :: Flag FilePath -> IO FilePath
getConfigFilePath = fmap snd . getConfigFilePathAndSource
getConfigFilePathAndSource :: Flag FilePath -> IO (ConfigFileSource, FilePath)
getConfigFilePathAndSource configFileFlag =
getSource sources
where
sources =
[ (CommandlineOption, return . flagToMaybe $ configFileFlag)
, (EnvironmentVariable, lookup "CABAL_CONFIG" `liftM` getEnvironment)
, (Default, Just `liftM` defaultConfigFile) ]
getSource [] = error "no config file path candidate found."
getSource ((source,action): xs) =
action >>= maybe (getSource xs) (return . (,) source)
readConfigFile :: SavedConfig -> FilePath -> IO (Maybe (ParseResult SavedConfig))
readConfigFile initial file = handleNotExists $
fmap (Just . parseConfig (ConstraintSourceMainConfig file) initial)
(readFile file)
where
handleNotExists action = catchIO action $ \ioe ->
if isDoesNotExistError ioe
then return Nothing
else ioError ioe
createDefaultConfigFile :: Verbosity -> FilePath -> IO SavedConfig
createDefaultConfigFile verbosity filePath = do
commentConf <- commentSavedConfig
initialConf <- initialSavedConfig
notice verbosity $ "Writing default configuration to " ++ filePath
writeConfigFile filePath commentConf initialConf
return initialConf
writeConfigFile :: FilePath -> SavedConfig -> SavedConfig -> IO ()
writeConfigFile file comments vals = do
let tmpFile = file <.> "tmp"
createDirectoryIfMissing True (takeDirectory file)
writeFile tmpFile $ explanation ++ showConfigWithComments comments vals ++ "\n"
renameFile tmpFile file
where
explanation = unlines
["-- This is the configuration file for the 'cabal' command line tool."
,"--"
,"-- The available configuration options are listed below."
,"-- Some of them have default values listed."
,"--"
,"-- Lines (like this one) beginning with '--' are comments."
,"-- Be careful with spaces and indentation because they are"
,"-- used to indicate layout for nested sections."
,"--"
,"-- This config file was generated using the following versions"
,"-- of Cabal and cabal-install:"
,"-- Cabal library version: " ++ Text.display cabalVersion
,"-- cabal-install version: " ++ showVersion Paths_cabal_install.version
,"",""
]
-- | These are the default values that get used in Cabal if a no value is
-- given. We use these here to include in comments when we write out the
-- initial config file so that the user can see what default value they are
-- overriding.
--
commentSavedConfig :: IO SavedConfig
commentSavedConfig = do
userInstallDirs <- defaultInstallDirs defaultCompiler True True
globalInstallDirs <- defaultInstallDirs defaultCompiler False True
let conf0 = mempty {
savedGlobalFlags = defaultGlobalFlags {
globalRemoteRepos = toNubList [defaultRemoteRepo]
},
savedInstallFlags = defaultInstallFlags,
savedConfigureExFlags = defaultConfigExFlags {
configAllowNewer = Just (AllowNewer RelaxDepsNone),
configAllowOlder = Just (AllowOlder RelaxDepsNone)
},
savedConfigureFlags = (defaultConfigFlags defaultProgramDb) {
configUserInstall = toFlag defaultUserInstall
},
savedUserInstallDirs = fmap toFlag userInstallDirs,
savedGlobalInstallDirs = fmap toFlag globalInstallDirs,
savedUploadFlags = commandDefaultFlags uploadCommand,
savedReportFlags = commandDefaultFlags reportCommand,
savedHaddockFlags = defaultHaddockFlags
}
conf1 <- extendToEffectiveConfig conf0
let globalFlagsConf1 = savedGlobalFlags conf1
conf2 = conf1 {
savedGlobalFlags = globalFlagsConf1 {
globalRemoteRepos = overNubList (map removeRootKeys)
(globalRemoteRepos globalFlagsConf1)
}
}
return conf2
where
-- Most people don't want to see default root keys, so don't print them.
removeRootKeys :: RemoteRepo -> RemoteRepo
removeRootKeys r = r { remoteRepoRootKeys = [] }
-- | All config file fields.
--
configFieldDescriptions :: ConstraintSource -> [FieldDescr SavedConfig]
configFieldDescriptions src =
toSavedConfig liftGlobalFlag
(commandOptions (globalCommand []) ParseArgs)
["version", "numeric-version", "config-file", "sandbox-config-file"] []
++ toSavedConfig liftConfigFlag
(configureOptions ParseArgs)
(["builddir", "constraint", "dependency", "ipid"]
++ map fieldName installDirsFields)
-- This is only here because viewAsFieldDescr gives us a parser
-- that only recognises 'ghc' etc, the case-sensitive flag names, not
-- what the normal case-insensitive parser gives us.
[simpleField "compiler"
(fromFlagOrDefault Disp.empty . fmap Text.disp) (optional Text.parse)
configHcFlavor (\v flags -> flags { configHcFlavor = v })
-- TODO: The following is a temporary fix. The "optimization"
-- and "debug-info" fields are OptArg, and viewAsFieldDescr
-- fails on that. Instead of a hand-written hackaged parser
-- and printer, we should handle this case properly in the
-- library.
,liftField configOptimization (\v flags ->
flags { configOptimization = v }) $
let name = "optimization" in
FieldDescr name
(\f -> case f of
Flag NoOptimisation -> Disp.text "False"
Flag NormalOptimisation -> Disp.text "True"
Flag MaximumOptimisation -> Disp.text "2"
_ -> Disp.empty)
(\line str _ -> case () of
_ | str == "False" -> ParseOk [] (Flag NoOptimisation)
| str == "True" -> ParseOk [] (Flag NormalOptimisation)
| str == "0" -> ParseOk [] (Flag NoOptimisation)
| str == "1" -> ParseOk [] (Flag NormalOptimisation)
| str == "2" -> ParseOk [] (Flag MaximumOptimisation)
| lstr == "false" -> ParseOk [caseWarning] (Flag NoOptimisation)
| lstr == "true" -> ParseOk [caseWarning] (Flag NormalOptimisation)
| otherwise -> ParseFailed (NoParse name line)
where
lstr = lowercase str
caseWarning = PWarning $
"The '" ++ name
++ "' field is case sensitive, use 'True' or 'False'.")
,liftField configDebugInfo (\v flags -> flags { configDebugInfo = v }) $
let name = "debug-info" in
FieldDescr name
(\f -> case f of
Flag NoDebugInfo -> Disp.text "False"
Flag MinimalDebugInfo -> Disp.text "1"
Flag NormalDebugInfo -> Disp.text "True"
Flag MaximalDebugInfo -> Disp.text "3"
_ -> Disp.empty)
(\line str _ -> case () of
_ | str == "False" -> ParseOk [] (Flag NoDebugInfo)
| str == "True" -> ParseOk [] (Flag NormalDebugInfo)
| str == "0" -> ParseOk [] (Flag NoDebugInfo)
| str == "1" -> ParseOk [] (Flag MinimalDebugInfo)
| str == "2" -> ParseOk [] (Flag NormalDebugInfo)
| str == "3" -> ParseOk [] (Flag MaximalDebugInfo)
| lstr == "false" -> ParseOk [caseWarning] (Flag NoDebugInfo)
| lstr == "true" -> ParseOk [caseWarning] (Flag NormalDebugInfo)
| otherwise -> ParseFailed (NoParse name line)
where
lstr = lowercase str
caseWarning = PWarning $
"The '" ++ name
++ "' field is case sensitive, use 'True' or 'False'.")
]
++ toSavedConfig liftConfigExFlag
(configureExOptions ParseArgs src)
[]
[let pkgs = (Just . AllowOlder . RelaxDepsSome) `fmap` parseOptCommaList Text.parse
parseAllowOlder = ((Just . AllowOlder . toRelaxDeps) `fmap` Text.parse) Parse.<++ pkgs in
simpleField "allow-older"
(showRelaxDeps . fmap unAllowOlder) parseAllowOlder
configAllowOlder (\v flags -> flags { configAllowOlder = v })
,let pkgs = (Just . AllowNewer . RelaxDepsSome) `fmap` parseOptCommaList Text.parse
parseAllowNewer = ((Just . AllowNewer . toRelaxDeps) `fmap` Text.parse) Parse.<++ pkgs in
simpleField "allow-newer"
(showRelaxDeps . fmap unAllowNewer) parseAllowNewer
configAllowNewer (\v flags -> flags { configAllowNewer = v })
]
++ toSavedConfig liftInstallFlag
(installOptions ParseArgs)
["dry-run", "only", "only-dependencies", "dependencies-only"] []
++ toSavedConfig liftUploadFlag
(commandOptions uploadCommand ParseArgs)
["verbose", "check", "documentation", "publish"] []
++ toSavedConfig liftReportFlag
(commandOptions reportCommand ParseArgs)
["verbose", "username", "password"] []
--FIXME: this is a hack, hiding the user name and password.
-- But otherwise it masks the upload ones. Either need to
-- share the options or make then distinct. In any case
-- they should probably be per-server.
++ [ viewAsFieldDescr
$ optionDistPref
(configDistPref . savedConfigureFlags)
(\distPref config ->
config
{ savedConfigureFlags = (savedConfigureFlags config) {
configDistPref = distPref }
, savedHaddockFlags = (savedHaddockFlags config) {
haddockDistPref = distPref }
}
)
ParseArgs
]
where
toSavedConfig lift options exclusions replacements =
[ lift (fromMaybe field replacement)
| opt <- options
, let field = viewAsFieldDescr opt
name = fieldName field
replacement = find ((== name) . fieldName) replacements
, name `notElem` exclusions ]
optional = Parse.option mempty . fmap toFlag
showRelaxDeps Nothing = mempty
showRelaxDeps (Just RelaxDepsNone) = Disp.text "False"
showRelaxDeps (Just _) = Disp.text "True"
toRelaxDeps True = RelaxDepsAll
toRelaxDeps False = RelaxDepsNone
-- TODO: next step, make the deprecated fields elicit a warning.
--
deprecatedFieldDescriptions :: [FieldDescr SavedConfig]
deprecatedFieldDescriptions =
[ liftGlobalFlag $
listField "repos"
(Disp.text . showRepo) parseRepo
(fromNubList . globalRemoteRepos)
(\rs cfg -> cfg { globalRemoteRepos = toNubList rs })
, liftGlobalFlag $
simpleField "cachedir"
(Disp.text . fromFlagOrDefault "") (optional parseFilePathQ)
globalCacheDir (\d cfg -> cfg { globalCacheDir = d })
, liftUploadFlag $
simpleField "hackage-username"
(Disp.text . fromFlagOrDefault "" . fmap unUsername)
(optional (fmap Username parseTokenQ))
uploadUsername (\d cfg -> cfg { uploadUsername = d })
, liftUploadFlag $
simpleField "hackage-password"
(Disp.text . fromFlagOrDefault "" . fmap unPassword)
(optional (fmap Password parseTokenQ))
uploadPassword (\d cfg -> cfg { uploadPassword = d })
, liftUploadFlag $
spaceListField "hackage-password-command"
Disp.text parseTokenQ
(fromFlagOrDefault [] . uploadPasswordCmd)
(\d cfg -> cfg { uploadPasswordCmd = Flag d })
]
++ map (modifyFieldName ("user-"++) . liftUserInstallDirs) installDirsFields
++ map (modifyFieldName ("global-"++) . liftGlobalInstallDirs) installDirsFields
where
optional = Parse.option mempty . fmap toFlag
modifyFieldName :: (String -> String) -> FieldDescr a -> FieldDescr a
modifyFieldName f d = d { fieldName = f (fieldName d) }
liftUserInstallDirs :: FieldDescr (InstallDirs (Flag PathTemplate))
-> FieldDescr SavedConfig
liftUserInstallDirs = liftField
savedUserInstallDirs (\flags conf -> conf { savedUserInstallDirs = flags })
liftGlobalInstallDirs :: FieldDescr (InstallDirs (Flag PathTemplate))
-> FieldDescr SavedConfig
liftGlobalInstallDirs = liftField
savedGlobalInstallDirs (\flags conf -> conf { savedGlobalInstallDirs = flags })
liftGlobalFlag :: FieldDescr GlobalFlags -> FieldDescr SavedConfig
liftGlobalFlag = liftField
savedGlobalFlags (\flags conf -> conf { savedGlobalFlags = flags })
liftConfigFlag :: FieldDescr ConfigFlags -> FieldDescr SavedConfig
liftConfigFlag = liftField
savedConfigureFlags (\flags conf -> conf { savedConfigureFlags = flags })
liftConfigExFlag :: FieldDescr ConfigExFlags -> FieldDescr SavedConfig
liftConfigExFlag = liftField
savedConfigureExFlags (\flags conf -> conf { savedConfigureExFlags = flags })
liftInstallFlag :: FieldDescr InstallFlags -> FieldDescr SavedConfig
liftInstallFlag = liftField
savedInstallFlags (\flags conf -> conf { savedInstallFlags = flags })
liftUploadFlag :: FieldDescr UploadFlags -> FieldDescr SavedConfig
liftUploadFlag = liftField
savedUploadFlags (\flags conf -> conf { savedUploadFlags = flags })
liftReportFlag :: FieldDescr ReportFlags -> FieldDescr SavedConfig
liftReportFlag = liftField
savedReportFlags (\flags conf -> conf { savedReportFlags = flags })
parseConfig :: ConstraintSource
-> SavedConfig
-> String
-> ParseResult SavedConfig
parseConfig src initial = \str -> do
fields <- readFields str
let (knownSections, others) = partition isKnownSection fields
config <- parse others
let user0 = savedUserInstallDirs config
global0 = savedGlobalInstallDirs config
(remoteRepoSections0, haddockFlags, user, global, paths, args) <-
foldM parseSections
([], savedHaddockFlags config, user0, global0, [], [])
knownSections
let remoteRepoSections =
reverse
. nubBy ((==) `on` remoteRepoName)
$ remoteRepoSections0
return config {
savedGlobalFlags = (savedGlobalFlags config) {
globalRemoteRepos = toNubList remoteRepoSections
},
savedConfigureFlags = (savedConfigureFlags config) {
configProgramPaths = paths,
configProgramArgs = args
},
savedHaddockFlags = haddockFlags,
savedUserInstallDirs = user,
savedGlobalInstallDirs = global
}
where
isKnownSection (ParseUtils.Section _ "repository" _ _) = True
isKnownSection (ParseUtils.F _ "remote-repo" _) = True
isKnownSection (ParseUtils.Section _ "haddock" _ _) = True
isKnownSection (ParseUtils.Section _ "install-dirs" _ _) = True
isKnownSection (ParseUtils.Section _ "program-locations" _ _) = True
isKnownSection (ParseUtils.Section _ "program-default-options" _ _) = True
isKnownSection _ = False
parse = parseFields (configFieldDescriptions src
++ deprecatedFieldDescriptions) initial
parseSections (rs, h, u, g, p, a)
(ParseUtils.Section _ "repository" name fs) = do
r' <- parseFields remoteRepoFields (emptyRemoteRepo name) fs
when (remoteRepoKeyThreshold r' > length (remoteRepoRootKeys r')) $
warning $ "'key-threshold' for repository " ++ show (remoteRepoName r')
++ " higher than number of keys"
when (not (null (remoteRepoRootKeys r'))
&& remoteRepoSecure r' /= Just True) $
warning $ "'root-keys' for repository " ++ show (remoteRepoName r')
++ " non-empty, but 'secure' not set to True."
return (r':rs, h, u, g, p, a)
parseSections (rs, h, u, g, p, a)
(ParseUtils.F lno "remote-repo" raw) = do
let mr' = readRepo raw
r' <- maybe (ParseFailed $ NoParse "remote-repo" lno) return mr'
return (r':rs, h, u, g, p, a)
parseSections accum@(rs, h, u, g, p, a)
(ParseUtils.Section _ "haddock" name fs)
| name == "" = do h' <- parseFields haddockFlagsFields h fs
return (rs, h', u, g, p, a)
| otherwise = do
warning "The 'haddock' section should be unnamed"
return accum
parseSections accum@(rs, h, u, g, p, a)
(ParseUtils.Section _ "install-dirs" name fs)
| name' == "user" = do u' <- parseFields installDirsFields u fs
return (rs, h, u', g, p, a)
| name' == "global" = do g' <- parseFields installDirsFields g fs
return (rs, h, u, g', p, a)
| otherwise = do
warning "The 'install-paths' section should be for 'user' or 'global'"
return accum
where name' = lowercase name
parseSections accum@(rs, h, u, g, p, a)
(ParseUtils.Section _ "program-locations" name fs)
| name == "" = do p' <- parseFields withProgramsFields p fs
return (rs, h, u, g, p', a)
| otherwise = do
warning "The 'program-locations' section should be unnamed"
return accum
parseSections accum@(rs, h, u, g, p, a)
(ParseUtils.Section _ "program-default-options" name fs)
| name == "" = do a' <- parseFields withProgramOptionsFields a fs
return (rs, h, u, g, p, a')
| otherwise = do
warning "The 'program-default-options' section should be unnamed"
return accum
parseSections accum f = do
warning $ "Unrecognized stanza on line " ++ show (lineNo f)
return accum
showConfig :: SavedConfig -> String
showConfig = showConfigWithComments mempty
showConfigWithComments :: SavedConfig -> SavedConfig -> String
showConfigWithComments comment vals = Disp.render $
case fmap (uncurry ppRemoteRepoSection)
(zip (getRemoteRepos comment) (getRemoteRepos vals)) of
[] -> Disp.text ""
(x:xs) -> foldl' (\ r r' -> r $+$ Disp.text "" $+$ r') x xs
$+$ Disp.text ""
$+$ ppFields (skipSomeFields (configFieldDescriptions ConstraintSourceUnknown))
mcomment vals
$+$ Disp.text ""
$+$ ppSection "haddock" "" haddockFlagsFields
(fmap savedHaddockFlags mcomment) (savedHaddockFlags vals)
$+$ Disp.text ""
$+$ installDirsSection "user" savedUserInstallDirs
$+$ Disp.text ""
$+$ installDirsSection "global" savedGlobalInstallDirs
$+$ Disp.text ""
$+$ configFlagsSection "program-locations" withProgramsFields
configProgramPaths
$+$ Disp.text ""
$+$ configFlagsSection "program-default-options" withProgramOptionsFields
configProgramArgs
where
getRemoteRepos = fromNubList . globalRemoteRepos . savedGlobalFlags
mcomment = Just comment
installDirsSection name field =
ppSection "install-dirs" name installDirsFields
(fmap field mcomment) (field vals)
configFlagsSection name fields field =
ppSection name "" fields
(fmap (field . savedConfigureFlags) mcomment)
((field . savedConfigureFlags) vals)
-- skip fields based on field name. currently only skips "remote-repo",
-- because that is rendered as a section. (see 'ppRemoteRepoSection'.)
skipSomeFields = filter ((/= "remote-repo") . fieldName)
-- | Fields for the 'install-dirs' sections.
installDirsFields :: [FieldDescr (InstallDirs (Flag PathTemplate))]
installDirsFields = map viewAsFieldDescr installDirsOptions
ppRemoteRepoSection :: RemoteRepo -> RemoteRepo -> Doc
ppRemoteRepoSection def vals = ppSection "repository" (remoteRepoName vals)
remoteRepoFields (Just def) vals
remoteRepoFields :: [FieldDescr RemoteRepo]
remoteRepoFields =
[ simpleField "url"
(text . show) (parseTokenQ >>= parseURI')
remoteRepoURI (\x repo -> repo { remoteRepoURI = x })
, simpleField "secure"
showSecure (Just `fmap` Text.parse)
remoteRepoSecure (\x repo -> repo { remoteRepoSecure = x })
, listField "root-keys"
text parseTokenQ
remoteRepoRootKeys (\x repo -> repo { remoteRepoRootKeys = x })
, simpleField "key-threshold"
showThreshold Text.parse
remoteRepoKeyThreshold (\x repo -> repo { remoteRepoKeyThreshold = x })
]
where
parseURI' uriString =
case parseURI uriString of
Nothing -> fail $ "remote-repo: no parse on " ++ show uriString
Just uri -> return uri
showSecure Nothing = mempty -- default 'secure' setting
showSecure (Just True) = text "True" -- user explicitly enabled it
showSecure (Just False) = text "False" -- user explicitly disabled it
-- If the key-threshold is set to 0, we omit it as this is the default
-- and it looks odd to have a value for key-threshold but not for 'secure'
-- (note that an empty list of keys is already omitted by default, since
-- that is what we do for all list fields)
showThreshold 0 = mempty
showThreshold t = text (show t)
-- | Fields for the 'haddock' section.
haddockFlagsFields :: [FieldDescr HaddockFlags]
haddockFlagsFields = [ field
| opt <- haddockOptions ParseArgs
, let field = viewAsFieldDescr opt
name = fieldName field
, name `notElem` exclusions ]
where
exclusions = ["verbose", "builddir", "for-hackage"]
-- | Fields for the 'program-locations' section.
withProgramsFields :: [FieldDescr [(String, FilePath)]]
withProgramsFields =
map viewAsFieldDescr $
programDbPaths' (++ "-location") defaultProgramDb
ParseArgs id (++)
-- | Fields for the 'program-default-options' section.
withProgramOptionsFields :: [FieldDescr [(String, [String])]]
withProgramOptionsFields =
map viewAsFieldDescr $
programDbOptions defaultProgramDb ParseArgs id (++)
-- | Get the differences (as a pseudo code diff) between the user's
-- '~/.cabal/config' and the one that cabal would generate if it didn't exist.
userConfigDiff :: GlobalFlags -> IO [String]
userConfigDiff globalFlags = do
userConfig <- loadRawConfig normal (globalConfigFile globalFlags)
testConfig <- initialSavedConfig
return $ reverse . foldl' createDiff [] . M.toList
$ M.unionWith combine
(M.fromList . map justFst $ filterShow testConfig)
(M.fromList . map justSnd $ filterShow userConfig)
where
justFst (a, b) = (a, (Just b, Nothing))
justSnd (a, b) = (a, (Nothing, Just b))
combine (Nothing, Just b) (Just a, Nothing) = (Just a, Just b)
combine (Just a, Nothing) (Nothing, Just b) = (Just a, Just b)
combine x y = error $ "Can't happen : userConfigDiff "
++ show x ++ " " ++ show y
createDiff :: [String] -> (String, (Maybe String, Maybe String)) -> [String]
createDiff acc (key, (Just a, Just b))
| a == b = acc
| otherwise = ("+ " ++ key ++ ": " ++ b)
: ("- " ++ key ++ ": " ++ a) : acc
createDiff acc (key, (Nothing, Just b)) = ("+ " ++ key ++ ": " ++ b) : acc
createDiff acc (key, (Just a, Nothing)) = ("- " ++ key ++ ": " ++ a) : acc
createDiff acc (_, (Nothing, Nothing)) = acc
filterShow :: SavedConfig -> [(String, String)]
filterShow cfg = map keyValueSplit
. filter (\s -> not (null s) && ':' `elem` s)
. map nonComment
. lines
$ showConfig cfg
nonComment [] = []
nonComment ('-':'-':_) = []
nonComment (x:xs) = x : nonComment xs
topAndTail = reverse . dropWhile isSpace . reverse . dropWhile isSpace
keyValueSplit s =
let (left, right) = break (== ':') s
in (topAndTail left, topAndTail (drop 1 right))
-- | Update the user's ~/.cabal/config' keeping the user's customizations.
userConfigUpdate :: Verbosity -> GlobalFlags -> IO ()
userConfigUpdate verbosity globalFlags = do
userConfig <- loadRawConfig normal (globalConfigFile globalFlags)
newConfig <- initialSavedConfig
commentConf <- commentSavedConfig
cabalFile <- getConfigFilePath $ globalConfigFile globalFlags
let backup = cabalFile ++ ".backup"
notice verbosity $ "Renaming " ++ cabalFile ++ " to " ++ backup ++ "."
renameFile cabalFile backup
notice verbosity $ "Writing merged config to " ++ cabalFile ++ "."
writeConfigFile cabalFile commentConf (newConfig `mappend` userConfig)
| themoritz/cabal | cabal-install/Distribution/Client/Config.hs | bsd-3-clause | 50,869 | 0 | 27 | 13,894 | 10,371 | 5,587 | 4,784 | 889 | 13 |
{-# LANGUAGE OverloadedStrings #-}
module Test.Suggest where
import Test.Common
import Test.Import
spec :: Spec
spec =
describe "Suggest" $
it "returns a search suggestion using the phrase suggester" $ withTestEnv $ do
_ <- insertData
let query = QueryMatchNoneQuery
phraseSuggester = mkPhraseSuggester (FieldName "message")
namedSuggester = Suggest "Use haskel" "suggest_name" (SuggestTypePhraseSuggester phraseSuggester)
search' = mkSearch (Just query) Nothing
search = search' { suggestBody = Just namedSuggester }
expectedText = Just "use haskell"
resp <- searchByIndex testIndex search
parsed <- parseEsResponse resp :: BH IO (Either EsError (SearchResult Tweet))
case parsed of
Left e -> liftIO $ expectationFailure ("Expected an search suggestion but got " <> show e)
Right sr -> liftIO $ (suggestOptionsText . head . suggestResponseOptions . head . nsrResponses <$> suggest sr) `shouldBe` expectedText
| bitemyapp/bloodhound | tests/Test/Suggest.hs | bsd-3-clause | 1,016 | 0 | 18 | 232 | 255 | 127 | 128 | 20 | 2 |
{-# LANGUAGE FlexibleInstances,
UndecidableInstances, CPP, ViewPatterns,
NondecreasingIndentation #-}
#if __GLASGOW_HASKELL__ < 709
{-# LANGUAGE OverlappingInstances #-}
{-# OPTIONS_GHC -fno-warn-unrecognised-pragmas #-}
#endif
-----------------------------------------------------------------------------
-- |
-- Module : Language.Glambda.Repl
-- Copyright : (C) 2015 Richard Eisenberg
-- License : BSD-style (see LICENSE)
-- Maintainer : Richard Eisenberg (rae@cs.brynmawr.edu)
-- Stability : experimental
--
-- Implements a REPL for glambda.
--
----------------------------------------------------------------------------
module Language.Glambda.Repl ( main ) where
import Prelude hiding ( lex )
import Language.Glambda.Check
import Language.Glambda.Eval
import Language.Glambda.Lex
import Language.Glambda.Parse
import Language.Glambda.Unchecked
import Language.Glambda.Util
import Language.Glambda.Statement
import Language.Glambda.Globals
import Language.Glambda.Monad
import Language.Glambda.Exp
import Language.Glambda.Type
import Text.PrettyPrint.ANSI.Leijen as Pretty hiding ( (<$>) )
import System.Console.Haskeline
import System.Directory
import Control.Monad
import Control.Monad.Reader
import Control.Monad.State
import Data.Char
import Data.List as List
#if __GLASGOW_HASKELL__ < 709
import Control.Applicative
#endif
-- | The glamorous Glambda interpreter
main :: IO ()
main = runInputT defaultSettings $
runGlam $ do
helloWorld
loop
loop :: Glam ()
loop = do
m_line <- prompt "λ> "
case stripWhitespace <$> m_line of
Nothing -> quit
Just (':' : cmd) -> runCommand cmd
Just str -> runStmts str
loop
-- | Prints welcome message
helloWorld :: Glam ()
helloWorld = do
printLine lambda
printLine $ text "Welcome to the Glamorous Glambda interpreter, version" <+>
text version <> char '.'
-- | The welcome message
lambda :: Doc
lambda
= vcat $ List.map text
[ " \\\\\\\\\\\\ "
, " \\\\\\\\\\\\ "
, " /-\\ \\\\\\\\\\\\ "
, " | | \\\\\\\\\\\\ "
, " \\-/| \\\\\\\\\\\\ "
, " | //\\\\\\\\\\\\ "
, " \\-/ ////\\\\\\\\\\\\ "
, " //////\\\\\\\\\\\\ "
, " ////// \\\\\\\\\\\\ "
, " ////// \\\\\\\\\\\\ "
]
-- | The current version of glambda
version :: String
version = "1.0"
-------------------------------------------
-- running statements
runStmts :: String -> Glam ()
runStmts str = reportErrors $ do
toks <- lexG str
stmts <- parseStmtsG toks
doStmts stmts
-- | Run a sequence of statements, returning the new global variables
doStmts :: [Statement] -> GlamE Globals
doStmts = foldr doStmt ask
-- | Run a 'Statement' and then run another action with the global
-- variables built in the 'Statement'
doStmt :: Statement -> GlamE a -> GlamE a
doStmt (BareExp uexp) thing_inside = check uexp $ \sty exp -> do
printLine $ printValWithType (eval exp) sty
thing_inside
doStmt (NewGlobal g uexp) thing_inside = check uexp $ \sty exp -> do
printLine $ text g <+> char '=' <+> printWithType exp sty
local (extend g sty exp) thing_inside
-------------------------------------------
-- commands
-- | Interpret a command (missing the initial ':').
runCommand :: String -> Glam ()
runCommand = dispatchCommand cmdTable
type CommandTable = [(String, String -> Glam ())]
dispatchCommand :: CommandTable -> String -> Glam ()
dispatchCommand table line
= case List.filter ((cmd `List.isPrefixOf`) . fst) table of
[] -> printLine $ text "Unknown command:" <+> squotes (text cmd)
[(_, action)] -> action arg
many -> do printLine $ text "Ambiguous command:" <+> squotes (text cmd)
printLine $ text "Possibilities:" $$
indent 2 (vcat $ List.map (text . fst) many)
where (cmd, arg) = List.break isSpace line
cmdTable :: CommandTable
cmdTable = [ ("quit", quitCmd)
, ("d-lex", lexCmd)
, ("d-parse", parseCmd)
, ("load", loadCmd)
, ("eval", evalCmd)
, ("step", stepCmd)
, ("type", typeCmd)
, ("all", allCmd) ]
quitCmd :: String -> Glam ()
quitCmd _ = quit
class Reportable a where
report :: a -> Glam Globals
instance Reportable Doc where
report x = printLine x >> get
instance Reportable () where
report _ = get
instance Reportable Globals where
report = return
instance {-# OVERLAPPABLE #-} Pretty a => Reportable a where
report other = printLine (pretty other) >> get
reportErrors :: Reportable a => GlamE a -> Glam ()
reportErrors thing_inside = do
result <- runGlamE thing_inside
new_globals <- case result of
Left err -> printLine err >> get
Right x -> report x
put new_globals
parseLex :: String -> GlamE UExp
parseLex = parseExpG <=< lexG
printWithType :: (Pretty exp, Pretty ty) => exp -> ty -> Doc
printWithType exp ty
= pretty exp <+> colon <+> pretty ty
printValWithType :: Val ty -> STy ty -> Doc
printValWithType val sty
= prettyVal val sty <+> colon <+> pretty sty
lexCmd, parseCmd, evalCmd, stepCmd, typeCmd, allCmd, loadCmd
:: String -> Glam ()
lexCmd expr = reportErrors $ lexG expr
parseCmd = reportErrors . parseLex
evalCmd expr = reportErrors $ do
uexp <- parseLex expr
check uexp $ \sty exp ->
return $ printValWithType (eval exp) sty
stepCmd expr = reportErrors $ do
uexp <- parseLex expr
check uexp $ \sty exp -> do
printLine $ printWithType exp sty
let loop e = case step e of
Left e' -> do
printLine $ text "-->" <+> printWithType e' sty
loop e'
Right v -> return v
v <- loop exp
return $ printValWithType v sty
typeCmd expr = reportErrors $ do
uexp <- parseLex expr
check uexp $ \sty exp -> return (printWithType exp sty)
allCmd expr = do
printLine (text "Small step:")
_ <- stepCmd expr
printLine Pretty.empty
printLine (text "Big step:")
evalCmd expr
loadCmd (stripWhitespace -> file) = do
file_exists <- liftIO $ doesFileExist file
if not file_exists then file_not_found else do
contents <- liftIO $ readFile file
runStmts contents
where
file_not_found = do
printLine (text "File not found:" <+> squotes (text file))
cwd <- liftIO getCurrentDirectory
printLine (parens (text "Current directory:" <+> text cwd))
| goldfirere/glambda | src/Language/Glambda/Repl.hs | bsd-3-clause | 6,627 | 0 | 22 | 1,692 | 1,751 | 897 | 854 | 159 | 3 |
{-# LANGUAGE DataKinds, DeriveGeneric, TypeApplications #-}
module Test24 where
-- Test case from #24, comments preserved
import Optics.Core
import Data.Generics.Product.Fields
import Data.Generics.Product.Positions
import GHC.Generics
data Foo a b = Foo { x1 :: a, x2 :: b } deriving (Generic, Show)
data Bar a b = Bar { x3 :: Foo a b, x4 :: Int } deriving (Generic, Show)
tup :: ((Int, Char), Int)
tup = ((1, 'a'), 2)
tup2, tup3, tup4 :: ((Char, Char), Int)
tup2 = tup & _1 % _1 %~ toEnum -- Works.
tup3 = tup & x %~ toEnum -- Works also with type annotation.
where x :: Lens ((Int, Char), Int) ((Char, Char), Int) Int Char
x = _1 % _1
-- Works.
tup4 = tup & position @1 % position @1 %~ toEnum
foo :: Foo Int Char
foo = Foo 1 'a'
foo2, foo3 :: Foo Char Char
foo2 = foo & field @"x1" %~ toEnum -- Works when there's just one 'field'.
foo3 = foo & position @1 %~ toEnum -- Works when there's just one 'position'.
bar :: Bar Int Char
bar = Bar (Foo 1 'a') 2
bar2, bar3, bar4 :: Bar Char Char
-- Doesn't work, error at first 'field' (Couldn't match type ‘Int’ with ‘Char’ arising from a use of ‘field’).
bar2 = bar & field @"x3" % field @"x1" %~ toEnum
-- Type annotation doesn't help.
bar3 = bar & l %~ toEnum
where l :: Lens (Bar Int Char) (Bar Char Char) Int Char
l = field @"x3" % field @"x1"
-- Doesn't work, error at first 'position' (Couldn't match type ‘Int’ with ‘Char’ arising from a use of ‘position’).
bar4 = bar & position @1 % position @1 %~ toEnum
-- Works if we stick to simple Lens' (modify to the same type).
bar5 :: Bar Int Char
bar5 = bar & field @"x3" % field @"x1" %~ (+1)
main :: IO ()
main = print bar5
| kcsongor/generic-lens | generic-optics/test/Test24.hs | bsd-3-clause | 1,681 | 0 | 9 | 355 | 560 | 315 | 245 | 33 | 1 |
{-# LANGUAGE TemplateHaskell #-}
module Mantle.Interface.TH where
import Control.Monad
import Control.Lens
import Language.Haskell.TH
import Language.Haskell.TH.Lens
import Language.Haskell.TH.Build hiding (name)
import Mantle.Interface
appsT :: TypeQ -> [TypeQ] -> TypeQ
appsT = foldl appT
unTV (PlainTV t) = return $ VarT t
unTV (KindedTV t k) = return $ SigT (VarT t) k
baseL :: SimpleIso Name String
baseL = iso nameBase mkName
polyType :: Name -> [TyVarBndr] -> TypeQ
polyType n tvs = appsT (conT n) $ map unTV tvs
makeInterface :: Name -> Q [Dec]
makeInterface n = do
TyConI (DataD context ifcName tyvars [con] _) <- reify n
ifcT <- polyType ifcName tyvars
inst <- instanceD' context (appT' ''Interface ifcT) [
dataIfcDef n ifcT con,
newIfcDef n con,
exposeDef n con]
return [inst]
dataIfcDef :: Name -> Type -> Con -> DecQ
dataIfcDef ifcName ifcT con = do
d <- VarT `fmap` newName "d"
let ifcWrap = AppT $ AppT (ConT ''Ifc) d
ifcC <- case con of
(NormalC _ sts) ->
let wrap (s,t) = (s,ifcWrap t) in
normalC' ifcName $ map wrap sts
(RecC _ vsts) ->
let wrap (fn,s,t) = (fieldName fn,s,ifcWrap t) in
recC' ifcName $ map wrap vsts
dataInstD' () ''Ifc [d,ifcT] [ifcC] ()
where
fieldName = mkName . tail . nameBase
consCount :: Con -> Int
consCount (NormalC n sts) = length sts
consCount (RecC n vsts) = length vsts
consCount (InfixC x n y) = 2
newNames :: Int -> Q [Name]
newNames i = replicateM i $ newName "x"
newIfcDef :: Name -> Con -> DecQ
newIfcDef n c = do
vars <- newNames $ consCount c
news <- zipWithM bindS' vars (repeat 'newIfc)
ret <- noBindS' $ appE' 'return $ appsE' (n : vars)
body <- doE' $ news ++ [ret]
funD' 'newIfcCircuit $ clause' () body ()
exposeDef :: Name -> Con -> DecQ
exposeDef n c = do
vars <- newNames $ consCount c
body <- appsE' $ (conE' n) : (map (appE' 'expose) vars)
funD' 'expose $ clause' (conP' n vars) body ()
| aninhumer/mantle | src/Mantle/Interface/TH.hs | bsd-3-clause | 2,030 | 0 | 16 | 513 | 884 | 442 | 442 | 56 | 2 |
{-# LANGUAGE PatternSynonyms #-}
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.GL.ARB.BufferStorage
-- Copyright : (c) Sven Panne 2019
-- License : BSD3
--
-- Maintainer : Sven Panne <svenpanne@gmail.com>
-- Stability : stable
-- Portability : portable
--
--------------------------------------------------------------------------------
module Graphics.GL.ARB.BufferStorage (
-- * Extension Support
glGetARBBufferStorage,
gl_ARB_buffer_storage,
-- * Enums
pattern GL_BUFFER_IMMUTABLE_STORAGE,
pattern GL_BUFFER_STORAGE_FLAGS,
pattern GL_CLIENT_MAPPED_BUFFER_BARRIER_BIT,
pattern GL_CLIENT_STORAGE_BIT,
pattern GL_DYNAMIC_STORAGE_BIT,
pattern GL_MAP_COHERENT_BIT,
pattern GL_MAP_PERSISTENT_BIT,
pattern GL_MAP_READ_BIT,
pattern GL_MAP_WRITE_BIT,
-- * Functions
glBufferStorage
) where
import Graphics.GL.ExtensionPredicates
import Graphics.GL.Tokens
import Graphics.GL.Functions
| haskell-opengl/OpenGLRaw | src/Graphics/GL/ARB/BufferStorage.hs | bsd-3-clause | 991 | 0 | 5 | 133 | 97 | 67 | 30 | 17 | 0 |
module Vulkan.Utils.ShaderQQ.Backend.Glslang
( GlslangError
, GlslangWarning
, processGlslangMessages
) where
import qualified Data.ByteString.Lazy.Char8 as BSL
import Data.List.Extra
import System.FilePath
type GlslangError = String
type GlslangWarning = String
processGlslangMessages :: BSL.ByteString -> ([GlslangWarning], [GlslangError])
processGlslangMessages =
foldr grep ([], []) . filter (not . null) . lines . BSL.unpack
where
grep line (ws, es) | "WARNING: " `isPrefixOf` line = (cut line : ws, es)
| "ERROR: " `isPrefixOf` line = (ws, cut line : es)
| otherwise = (ws, es)
cut line = takeFileName path <> msg
where (path, msg) = break (== ':') . drop 1 $ dropWhile (/= ' ') line
| expipiplus1/vulkan | utils/src/Vulkan/Utils/ShaderQQ/Backend/Glslang.hs | bsd-3-clause | 800 | 0 | 12 | 213 | 262 | 149 | 113 | 17 | 1 |
module Sprite.GL where
import Graphics.UI.Gtk.OpenGL
import Graphics.UI.Gtk
import Graphics.Rendering.OpenGL
import Control.Concurrent.STM
import Control.Monad.Trans
bootGL :: IO ()
bootGL = do
-- Initialise the Gtk+ OpenGL extension
-- (including reading various command line parameters)
initGL
depthFunc $= Nothing -- specifies comparison function for DepthBuffer
clearColor $= Color4 0 0 0 1
blend $= Enabled
blendFunc $= (SrcAlpha, OneMinusSrcAlpha)
lineSmooth $= Enabled
pointSmooth $= Enabled
polygonSmooth $= Enabled
shadeModel $= Smooth
mkCanva :: TVar (Int,Int) -> (GLWindow -> IO a) -> IO GLDrawingArea
mkCanva size draw = do
glconfig <- glConfigNew [GLModeRGBA,
GLModeDepth,
GLModeDouble]
canvas <- glDrawingAreaNew glconfig
-- widgetSetSizeRequest canvas dx dy
-- Initialise some GL setting just before the canvas first gets shown
-- (We can't initialise these things earlier since the GL resources that
-- we are using wouldn't heve been setup yet)
onRealize canvas $ withGLDrawingArea canvas $ \_ -> do
-- viewport $= (Position 0 0, Size (fromIntegral dx) (fromIntegral dy))
clearColor $= (Color4 1.0 1.0 1.0 0.0)
matrixMode $= Projection
loadIdentity
ortho 0.0 1.0 0.0 1.0 (-1.0) 1.0
depthFunc $= Just Less
drawBuffer $= BackBuffers
on canvas configureEvent $ do
(w,h) <- eventSize
liftIO $ do
atomically $ writeTVar size (w,h)
viewport $= (Position 0 0, Size (fromIntegral w) (fromIntegral h))
matrixMode $= Projection
loadIdentity
ortho 0.0 1 0.0 1.0 (-1.0) 1.0
-- the following line is not in the original example, but it's good style...
matrixMode $= Modelview 0
return True
-- Set the repaint handler
onExpose canvas $ \_ -> do
withGLDrawingArea canvas $ \glwindow -> do
clear [DepthBuffer, ColorBuffer]
draw glwindow
glDrawableSwapBuffers glwindow
return True
set canvas [widgetCanFocus := True]
idleAdd (do
widgetQueueDraw canvas
return True)
priorityLow
return canvas
kcolor :: GLfloat -> GLfloat -> GLfloat -> GLfloat -> GLfloat -> Color4 GLfloat
kcolor p r g b = Color4 (pn r) (pn g) (pn b) where
pn n = (p + n) / (n+1)
mcolor p r g b = color . kcolor p r g b
| paolino/sprites | Sprite/GL.hs | bsd-3-clause | 2,284 | 17 | 17 | 524 | 668 | 328 | 340 | 56 | 1 |
module Main(
main,
emptyRepl
) where
import SLISP.Core
import SLISP.Data
import SLISP.Repl
import SLISP.Util.System(time)
import System.Environment(getArgs)
import System.Exit(exitFailure)
import qualified Control.Exception as E(catch, SomeException)
main :: IO ()
main = do
getArgs >>= \args -> case args of
[] -> return ()
"-h":_ -> putStrLn "slisp [-rhib] <files>"
"-r":files -> repl True files
"-i":_ -> repl False []
"-b":files -> time (runFiles files)
files -> runFiles files
emptyRepl :: IO ()
emptyRepl = repl True []
runFiles :: [String] -> IO ()
runFiles files = E.catch (justRun files) handler
justRun :: [String] -> IO ()
justRun files = loadLibs files (emptyTable, Fixnum 1) >>= putStrLn . show . snd
handler :: E.SomeException -> IO ()
handler e = (putStrLn $ "Exception: " ++ show e) >> exitFailure | ameingast/slisp | src/Main.hs | bsd-3-clause | 859 | 0 | 14 | 172 | 344 | 180 | 164 | 27 | 6 |
{-# OPTIONS_GHC -fno-warn-incomplete-patterns #-}
module Mud.UndeploySpec where
import System.Exit
import System.Posix.Process
import Mud.Config (Config(..), defaultConfig)
import Mud.History
import Mud.Options
import Mud.Undeploy
import SpecHelpers
spec :: Spec
spec = do
let parseConfigFiles = \case
"simple" -> [defaultConfig "/etc/mud/simple"]
"complex" -> [ (defaultConfig "/etc/mud/complex")
{ cfgBasePath = "/one" }
, (defaultConfig "/etc/mud/complex")
{ cfgBasePath = "/two" }
]
"rootwheel" -> [ (defaultConfig "/etc/mud/rootwheel")
{ cfgUser = Just "root"
, cfgGroup = Just "wheel"
, cfgBasePath = "/root"
}
]
"overwrite" -> [ (defaultConfig "/etc/mud/overwrite")
{ cfgVars = [("one", "1"), ("two", "2")] }
]
name -> error $ "no mock found for parseConfigFiles " ++ show name
describe "undeployCommand" $ do
it "runs the script specified in the config" $ do
let runProcess Nothing Nothing "/etc/mud/simple.undeploy"
["simple", "", "/tmp"] [] = Exited ExitSuccess
runFakeMud mempty parseConfigFiles runProcess
(undeployCommand "simple" "" [])
`shouldBe` Right ()
it "runs the script with the user and group specified in the config" $ do
let runProcess (Just "root") (Just "wheel") "/etc/mud/rootwheel.undeploy"
["rootwheel", "", "/root"] [] = Exited ExitSuccess
runFakeMud mempty parseConfigFiles runProcess
(undeployCommand "rootwheel" "" [])
`shouldBe` Right ()
it "runs all the scripts if several configs are available" $ do
let runProcess Nothing Nothing "/etc/mud/complex.undeploy"
["complex", "", "/one"] [] = Exited ExitSuccess
runProcess Nothing Nothing "/etc/mud/complex.undeploy"
["complex", "", "/two"] [] = Exited ExitSuccess
runFakeMud mempty parseConfigFiles runProcess
(undeployCommand "complex" "" []) `shouldBe` Right ()
it "overwrites the variables if available" $ do
let runProcess Nothing Nothing "/etc/mud/overwrite.undeploy"
["overwrite", "some-version", "/tmp"]
[("two", "2"), ("three", "3"), ("one", "42")] =
Exited ExitSuccess
runProcess mUser mGroup path args vars =
error $ show (mUser, mGroup, path, args, vars)
entry = HistDeploy "overwrite" someTime True "some-version"
[("one", "0"), ("three", "3")]
histories = [("/tmp", defaultHistory { histEntries = [entry] })]
action = runFakeMudHist mempty parseConfigFiles runProcess histories
(undeployCommand "overwrite" "some-version"
[("one", "42")])
fmap fst action `shouldBe` Right ()
it "overwrites the user, group and base path if given in the options" $ do
let runProcess (Just "www") (Just "daemon") "/etc/mud/rootwheel.undeploy"
["rootwheel", "", "/var/www"] [] = Exited ExitSuccess
options = mempty
{ optUser = Just "www"
, optGroup = Just "daemon"
, optBasePath = Just "/var/www"
}
runFakeMud options parseConfigFiles runProcess
(undeployCommand "rootwheel" "" [])
`shouldBe` Right ()
describe "on history" $ do
let runProcess _ _ _ _ _ = Exited ExitSuccess
entry1a = HistDeploy "complex" someTime True "version1" [("a","b")]
entry1b = HistDeploy "complex" someTime True "version1" [("c","d")]
entry2 = HistUndeploy "complex" someTime True "version1"
histories = [ ("/one", defaultHistory { histEntries = [entry1a] })
, ("/two", defaultHistory { histEntries = [entry1b] })
]
it "adds a new entry to each history file" $ do
let histories' =
[ ("/one", defaultHistory { histEntries = [entry1a, entry2] })
, ("/two", defaultHistory { histEntries = [entry1b, entry2] }) ]
runFakeMudHist mempty parseConfigFiles runProcess histories
(undeployCommand "complex" "version1" [])
`shouldBe` Right ((), histories')
it "adds a new entry to the base path given in the options if any" $ do
let histories' =
[ ("/one", defaultHistory { histEntries = [entry1a, entry2] })
, ("/two", defaultHistory { histEntries = [entry1b] }) ]
runFakeMudHist (mempty { optBasePath = Just "/one" })
parseConfigFiles runProcess histories
(undeployCommand "complex" "version1" [])
`shouldBe` Right ((), histories')
| thoferon/mud | tests/Mud/UndeploySpec.hs | bsd-3-clause | 5,078 | 0 | 23 | 1,729 | 1,228 | 660 | 568 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
-- | Model for videos.
module HL.Model.Videos
(getHomeVideos)
where
import Data.Text (Text)
-- | Get videos for the home page.
getHomeVideos :: Monad m => m [(Text, Text, Text)]
getHomeVideos =
return vids
-- | For now we manually encode them until I have time to think of a
-- better way. This interface will be seamlessly replaceable with a
-- config file or so (as far as the controller and view are
-- concerned).
vids :: [(Text,Text, Text)]
vids =
[("Escape from the ivory tower: The Haskell journey, by Simon Peyton-Jones", "https://www.youtube.com/watch?v=re96UgMk6GQ", "https://i1.ytimg.com/vi/re96UgMk6GQ/mqdefault.jpg")
,("Haskell taketh away: limiting side effects for parallel programming, by Ryan Newton", "https://www.youtube.com/watch?v=lC5UWG5N8oY", "https://i1.ytimg.com/vi/lC5UWG5N8oY/mqdefault.jpg")
,("Production Haskell, by Reid Draper", "https://www.youtube.com/watch?v=AZQLkkDXy68", "https://i1.ytimg.com/vi/AZQLkkDXy68/mqdefault.jpg")
,("Haskell Amuse-Bouche, by Mark Lentczner", "https://www.youtube.com/watch?v=b9FagOVqxmI", "https://i1.ytimg.com/vi/b9FagOVqxmI/mqdefault.jpg")
,("Haskell is Not For Production and Other Tales, by Katie Miller", "https://www.youtube.com/watch?v=mlTO510zO78", "https://i1.ytimg.com/vi/mlTO510zO78/mqdefault.jpg")
,("Your First Web Application with Spock, by Oskar Wickström", "https://www.youtube.com/watch?v=Orm-jIIgVD0", "https://i1.ytimg.com/vi/Orm-jIIgVD0/mqdefault.jpg")
]
| haskell-infra/hl | src/HL/Model/Videos.hs | bsd-3-clause | 1,498 | 0 | 8 | 176 | 158 | 102 | 56 | 15 | 1 |
{-# LANGUAGE DataKinds,
FlexibleContexts,
TypeOperators #-}
module Main where
import Language.Hakaru.Syntax.Prelude
import Language.Hakaru.CodeGen.Wrapper
import Language.Hakaru.CodeGen.CodeGenMonad
import Language.Hakaru.CodeGen.AST
import Language.Hakaru.CodeGen.Pretty
import Language.Hakaru.Syntax.AST
import Language.Hakaru.Syntax.ABT
import Language.Hakaru.Evaluation.ConstantPropagation
import Language.Hakaru.Syntax.AST.Transforms (expandTransformations, optimizations)
import Language.Hakaru.Syntax.TypeOf (typeOf)
import Language.Hakaru.Syntax.TypeCheck
import Language.Hakaru.Types.DataKind
import Language.Hakaru.Summary
import Text.PrettyPrint (render)
import Data.Text hiding (foldr)
import qualified Data.Text.IO as IO
main :: IO ()
main = do
compileHakaru bucketAdd "bucketAdd.c"
compileHakaru bucketNoOp "bucketNoOp.c"
compileHakaru bucketFanout "bucketFanout.c"
compileHakaru bucketFanout2 "bucketFanout2.c"
compileHakaru bucketFanout3 "bucketFanout3.c"
compileHakaru bucketFanout4 "bucketFanout4.c"
compileHakaru bucketSplit "bucketSplit.c"
compileHakaru bucketIndex "bucketIndex.c"
compileHakaru bucketIndex2 "bucketIndex2.c"
bucketAdd :: TrivialABT Term '[] ('HNat ':-> 'HNat)
bucketAdd = triv $
lam $ \x -> bucket (nat_ 0) x (r_add (const (nat_ 1)))
bucketNoOp :: TrivialABT Term '[] ('HNat ':-> HUnit)
bucketNoOp = triv $
lam $ \x -> bucket (nat_ 0) x r_nop
bucketFanout :: TrivialABT Term '[] ('HNat ':-> (HPair 'HNat 'HNat))
bucketFanout = triv $
lam $ \x -> bucket (nat_ 0) x
(r_fanout (r_add (const (nat_ 1)))
(r_add (const (nat_ 2))))
bucketFanout2 :: TrivialABT Term '[] ('HNat ':-> (HPair 'HNat (HPair 'HNat 'HNat)))
bucketFanout2 = triv $ lam $ \x -> bucket (nat_ 0) x
(r_fanout (r_add (const (nat_ 1)))
(r_fanout (r_add (const (nat_ 2)))
(r_add (const (nat_ 3)))))
bucketFanout3 :: TrivialABT Term '[] ('HNat ':-> (HPair 'HNat HUnit))
bucketFanout3 = triv $ lam $ \x -> bucket (nat_ 0) x
(r_fanout (r_add (const (nat_ 1)))
r_nop)
bucketFanout4 :: TrivialABT Term '[] ('HNat ':-> (HPair (HPair 'HNat 'HNat) 'HNat))
bucketFanout4 = triv $ lam $ \x -> bucket (nat_ 0) x
(r_fanout (r_fanout (r_add (const (nat_ 2)))
(r_add (const (nat_ 3))))
(r_add (const (nat_ 1))))
bucketSplit :: TrivialABT Term '[] ('HNat ':-> (HPair 'HNat 'HNat))
bucketSplit = triv $ lam $ \x -> bucket (nat_ 0) x
(r_split (const true)
(r_add (const (nat_ 1)))
(r_add (const (nat_ 2))))
bucketIndex :: TrivialABT Term '[] ('HNat ':-> ('HArray 'HNat))
bucketIndex = triv $ lam $ \x -> bucket (nat_ 0) x
(r_index (const (nat_ 10))
(const (nat_ 5))
(r_add (const (nat_ 42))))
bucketIndex2 :: TrivialABT Term '[] ('HNat ':-> ('HArray (HPair 'HNat 'HNat)))
bucketIndex2 = triv $ lam $ \x -> bucket (nat_ 0) x
(r_index (const (nat_ 10))
(const (nat_ 5))
(r_fanout (r_add (const (nat_ 1)))
(r_add (const (nat_ 2)))))
compileHakaru
:: TrivialABT Term '[] a
-> FilePath
-> IO ()
compileHakaru abt outFile = do
let ast' = TypedAST (typeOf abt) $ foldr id abt abtPasses
codeGen = wrapProgram ast' Nothing (PrintConfig True True)
codeGenConfig = emptyCG { sharedMem = True }
cast = CAST $ runCodeGenWith codeGen codeGenConfig
output = pack . render . pretty $ cast
IO.writeFile outFile output
where abtPasses = [ expandTransformations
, constantPropagation
, optimizations
]
| zachsully/hkc-test | tests/haskell/bucketTests.hs | bsd-3-clause | 3,620 | 0 | 17 | 796 | 1,426 | 742 | 684 | 86 | 1 |
module Util (binToInt, binToDouble) where
import Genes
binToInt::Chromosome->Int
binToInt chromosome = binToInt' 0 base chromosome
where base = 2^(length chromosome-1)
binToInt'::Int->Int->Chromosome->Int
binToInt' acc 0 [x] = acc+x
binToInt' acc base (x:xs) = binToInt' (acc+x*base) newBase xs
where newBase = div base 2
binToInt' acc _ _ = acc
binToDouble::Double->Chromosome->Double
binToDouble n chromosome = binToDouble' 0 base chromosome
where base = 2**(n-1) -- | n = 1 means 1 digit before "."; means (2^0)*digit
binToDouble'::Double->Double->Chromosome->Double
binToDouble' acc _ [] = acc
binToDouble' acc base (x:xs) = binToDouble' newAcc newBase xs
where
newAcc = acc + (fromIntegral x)*base
newBase = base / 2.0 | Teaspot-Studio/bmstu-binary-genetics | src/Util.hs | mit | 760 | 0 | 10 | 137 | 293 | 154 | 139 | 18 | 1 |
module NestedSampling.Model where
-- Imports
import Control.Monad.Primitive
import System.Random.MWC
-- A type for representing models.
-- The type parameter defines the parameter space.
data Model a = Model
{
modelFromPrior :: !(Gen RealWorld -> IO a),
modelPerturb :: !(a -> Gen RealWorld -> IO (Double, a)),
modelLogLikelihood :: !(a -> Double)
}
| jtobin/NestedSampling.hs | lib/NestedSampling/Model.hs | mit | 442 | 0 | 14 | 144 | 95 | 55 | 40 | 13 | 0 |
-- Copyright (c) Microsoft. All rights reserved.
-- Licensed under the MIT license. See LICENSE file in the project root for full license information.
{-# LANGUAGE QuasiQuotes, OverloadedStrings, RecordWildCards #-}
module Language.Bond.Codegen.Cpp.Types_cpp (types_cpp) where
import Data.Monoid
import Prelude
import Data.Text.Lazy (Text)
import Text.Shakespeare.Text
import Language.Bond.Syntax.Types
import Language.Bond.Codegen.TypeMapping
import Language.Bond.Codegen.Util
import qualified Language.Bond.Codegen.Cpp.Util as CPP
-- | Codegen template for generating /base_name/_types.cpp containing
-- definitions of helper functions and schema metadata static variables.
types_cpp :: MappingContext -> String -> [Import] -> [Declaration] -> (String, Text)
types_cpp cpp file _imports declarations = ("_types.cpp", [lt|
#include "#{file}_reflection.h"
#include <bond/core/exception.h>
#{unorderedMapInclude}
#{CPP.openNamespace cpp}
#{doubleLineSepEnd 1 statics declarations}
#{CPP.closeNamespace cpp}
|])
where
unorderedMapInclude = if not (any CPP.isEnumDeclaration declarations) then mempty else [lt|#include <unordered_map>
|]
-- definitions of Schema statics for non-generic structs
statics s@Struct {..} =
if null declParams then CPP.schemaMetadata cpp s else mempty
-- global variables for enum name/value conversions
--
-- ToString is intentionally not implemented in terms of FromEnum, as
-- ToString returns a reference to the name stored in the map. FromEnum
-- copies this name into the output paramater.
statics Enum {..} = [lt|
namespace _bond_enumerators
{
namespace #{declName}
{
namespace
{
struct _hash_#{declName}
{
std::size_t operator()(enum #{declName} value) const
{
return static_cast<std::size_t>(value);
}
};
}
const std::string& ToString(enum #{declName} value)
{
const auto& map = GetValueToNameMap<std::unordered_map<enum #{declName}, std::string, _hash_#{declName}> >(value);
auto it = map.find(value);
if (map.end() == it)
::bond::InvalidEnumValueException(value, "#{declName}");
return it->second;
}
void FromString(const std::string& name, enum #{declName}& value)
{
if (!ToEnum(value, name))
::bond::InvalidEnumValueException(name.c_str(), "#{declName}");
}
bool ToEnum(enum #{declName}& value, const std::string& name)
{
const auto& map = GetNameToValueMap<std::unordered_map<std::string, enum #{declName}> >(value);
auto it = map.find(name);
if (map.end() == it)
return false;
value = it->second;
return true;
}
bool FromEnum(std::string& name, enum #{declName} value)
{
const auto& map = GetValueToNameMap<std::unordered_map<enum #{declName}, std::string, _hash_#{declName}> >(value);
auto it = map.find(value);
if (map.end() == it)
return false;
name = it->second;
return true;
}
} // namespace #{declName}
} // namespace _bond_enumerators|]
statics _ = mempty
| chwarr/bond | compiler/src/Language/Bond/Codegen/Cpp/Types_cpp.hs | mit | 3,368 | 0 | 11 | 891 | 232 | 147 | 85 | -1 | -1 |
{-# OPTIONS -fno-cse #-}
{-# LANGUAGE NamedFieldPuns #-}
-- -fno-cse is needed for GLOBAL_VAR's to behave properly
-----------------------------------------------------------------------------
--
-- GHC Driver
--
-- (c) The University of Glasgow 2005
--
-----------------------------------------------------------------------------
module DriverPipeline (
-- Run a series of compilation steps in a pipeline, for a
-- collection of source files.
oneShot, compileFile,
-- Interfaces for the batch-mode driver
linkBinary,
-- Interfaces for the compilation manager (interpreted/batch-mode)
preprocess,
compileOne, compileOne',
link,
-- Exports for hooks to override runPhase and link
PhasePlus(..), CompPipeline(..), PipeEnv(..), PipeState(..),
phaseOutputFilename, getPipeState, getPipeEnv,
hscPostBackendPhase, getLocation, setModLocation, setDynFlags,
runPhase, exeFileName,
mkExtraObjToLinkIntoBinary, mkNoteObjsToLinkIntoBinary,
maybeCreateManifest, runPhase_MoveBinary,
linkingNeeded, checkLinkInfo
) where
#include "HsVersions.h"
import PipelineMonad
import Packages
import HeaderInfo
import DriverPhases
import SysTools
import HscMain
import Finder
import HscTypes hiding ( Hsc )
import Outputable
import Module
import UniqFM ( eltsUFM )
import ErrUtils
import DynFlags
import Config
import Panic
import Util
import StringBuffer ( hGetStringBuffer )
import BasicTypes ( SuccessFlag(..) )
import Maybes ( expectJust )
import ParserCoreUtils ( getCoreModuleName )
import SrcLoc
import FastString
import LlvmCodeGen ( llvmFixupAsm )
import MonadUtils
import Platform
import TcRnTypes
import Hooks
import Exception
import Data.IORef ( readIORef )
import System.Directory
import System.FilePath
import System.IO
import Control.Monad
import Data.List ( isSuffixOf )
import Data.Maybe
import System.Environment
import Data.Char
-- ---------------------------------------------------------------------------
-- Pre-process
-- | Just preprocess a file, put the result in a temp. file (used by the
-- compilation manager during the summary phase).
--
-- We return the augmented DynFlags, because they contain the result
-- of slurping in the OPTIONS pragmas
preprocess :: HscEnv
-> (FilePath, Maybe Phase) -- ^ filename and starting phase
-> IO (DynFlags, FilePath)
preprocess hsc_env (filename, mb_phase) =
ASSERT2(isJust mb_phase || isHaskellSrcFilename filename, text filename)
runPipeline anyHsc hsc_env (filename, fmap RealPhase mb_phase)
Nothing Temporary Nothing{-no ModLocation-} Nothing{-no stub-}
-- ---------------------------------------------------------------------------
-- | Compile
--
-- Compile a single module, under the control of the compilation manager.
--
-- This is the interface between the compilation manager and the
-- compiler proper (hsc), where we deal with tedious details like
-- reading the OPTIONS pragma from the source file, converting the
-- C or assembly that GHC produces into an object file, and compiling
-- FFI stub files.
--
-- NB. No old interface can also mean that the source has changed.
compileOne :: HscEnv
-> ModSummary -- ^ summary for module being compiled
-> Int -- ^ module N ...
-> Int -- ^ ... of M
-> Maybe ModIface -- ^ old interface, if we have one
-> Maybe Linkable -- ^ old linkable, if we have one
-> SourceModified
-> IO HomeModInfo -- ^ the complete HomeModInfo, if successful
compileOne = compileOne' Nothing (Just batchMsg)
compileOne' :: Maybe TcGblEnv
-> Maybe Messager
-> HscEnv
-> ModSummary -- ^ summary for module being compiled
-> Int -- ^ module N ...
-> Int -- ^ ... of M
-> Maybe ModIface -- ^ old interface, if we have one
-> Maybe Linkable -- ^ old linkable, if we have one
-> SourceModified
-> IO HomeModInfo -- ^ the complete HomeModInfo, if successful
compileOne' m_tc_result mHscMessage
hsc_env0 summary mod_index nmods mb_old_iface maybe_old_linkable
source_modified0
= do
let dflags0 = ms_hspp_opts summary
this_mod = ms_mod summary
src_flavour = ms_hsc_src summary
location = ms_location summary
input_fn = expectJust "compile:hs" (ml_hs_file location)
input_fnpp = ms_hspp_file summary
mod_graph = hsc_mod_graph hsc_env0
needsTH = any (xopt Opt_TemplateHaskell . ms_hspp_opts) mod_graph
needsQQ = any (xopt Opt_QuasiQuotes . ms_hspp_opts) mod_graph
needsLinker = needsTH || needsQQ
isDynWay = any (== WayDyn) (ways dflags0)
isProfWay = any (== WayProf) (ways dflags0)
-- #8180 - when using TemplateHaskell, switch on -dynamic-too so
-- the linker can correctly load the object files.
let dflags1 = if needsLinker && dynamicGhc && not isDynWay && not isProfWay
then gopt_set dflags0 Opt_BuildDynamicToo
else dflags0
debugTraceMsg dflags1 2 (text "compile: input file" <+> text input_fnpp)
let basename = dropExtension input_fn
-- We add the directory in which the .hs files resides) to the import path.
-- This is needed when we try to compile the .hc file later, if it
-- imports a _stub.h file that we created here.
let current_dir = takeDirectory basename
old_paths = includePaths dflags1
dflags = dflags1 { includePaths = current_dir : old_paths }
hsc_env = hsc_env0 {hsc_dflags = dflags}
-- Figure out what lang we're generating
let hsc_lang = hscTarget dflags
-- ... and what the next phase should be
let next_phase = hscPostBackendPhase dflags src_flavour hsc_lang
-- ... and what file to generate the output into
output_fn <- getOutputFilename next_phase
Temporary basename dflags next_phase (Just location)
let extCore_filename = basename ++ ".hcr"
-- -fforce-recomp should also work with --make
let force_recomp = gopt Opt_ForceRecomp dflags
source_modified
| force_recomp || isNothing maybe_old_linkable = SourceModified
| otherwise = source_modified0
object_filename = ml_obj_file location
let always_do_basic_recompilation_check = case hsc_lang of
HscInterpreted -> True
_ -> False
e <- genericHscCompileGetFrontendResult
always_do_basic_recompilation_check
m_tc_result mHscMessage
hsc_env summary source_modified mb_old_iface (mod_index, nmods)
case e of
Left iface ->
do details <- genModDetails hsc_env iface
MASSERT(isJust maybe_old_linkable)
return (HomeModInfo{ hm_details = details,
hm_iface = iface,
hm_linkable = maybe_old_linkable })
Right (tc_result, mb_old_hash) ->
-- run the compiler
case hsc_lang of
HscInterpreted ->
case ms_hsc_src summary of
HsBootFile ->
do (iface, _changed, details) <- hscSimpleIface hsc_env tc_result mb_old_hash
return (HomeModInfo{ hm_details = details,
hm_iface = iface,
hm_linkable = maybe_old_linkable })
_ -> do guts0 <- hscDesugar hsc_env summary tc_result
guts <- hscSimplify hsc_env guts0
(iface, _changed, details, cgguts) <- hscNormalIface hsc_env extCore_filename guts mb_old_hash
(hasStub, comp_bc, modBreaks) <- hscInteractive hsc_env cgguts summary
stub_o <- case hasStub of
Nothing -> return []
Just stub_c -> do
stub_o <- compileStub hsc_env stub_c
return [DotO stub_o]
let hs_unlinked = [BCOs comp_bc modBreaks]
unlinked_time = ms_hs_date summary
-- Why do we use the timestamp of the source file here,
-- rather than the current time? This works better in
-- the case where the local clock is out of sync
-- with the filesystem's clock. It's just as accurate:
-- if the source is modified, then the linkable will
-- be out of date.
let linkable = LM unlinked_time this_mod
(hs_unlinked ++ stub_o)
return (HomeModInfo{ hm_details = details,
hm_iface = iface,
hm_linkable = Just linkable })
HscNothing ->
do (iface, _changed, details) <- hscSimpleIface hsc_env tc_result mb_old_hash
let linkable = if isHsBoot src_flavour
then maybe_old_linkable
else Just (LM (ms_hs_date summary) this_mod [])
return (HomeModInfo{ hm_details = details,
hm_iface = iface,
hm_linkable = linkable })
_ ->
case ms_hsc_src summary of
HsBootFile ->
do (iface, changed, details) <- hscSimpleIface hsc_env tc_result mb_old_hash
hscWriteIface dflags iface changed summary
touchObjectFile dflags object_filename
return (HomeModInfo{ hm_details = details,
hm_iface = iface,
hm_linkable = maybe_old_linkable })
_ -> do guts0 <- hscDesugar hsc_env summary tc_result
guts <- hscSimplify hsc_env guts0
(iface, changed, details, cgguts) <- hscNormalIface hsc_env extCore_filename guts mb_old_hash
hscWriteIface dflags iface changed summary
-- We're in --make mode: finish the compilation pipeline.
let mod_name = ms_mod_name summary
_ <- runPipeline StopLn hsc_env
(output_fn,
Just (HscOut src_flavour mod_name (HscRecomp cgguts summary)))
(Just basename)
Persistent
(Just location)
Nothing
-- The object filename comes from the ModLocation
o_time <- getModificationUTCTime object_filename
let linkable = LM o_time this_mod [DotO object_filename]
return (HomeModInfo{ hm_details = details,
hm_iface = iface,
hm_linkable = Just linkable })
-----------------------------------------------------------------------------
-- stub .h and .c files (for foreign export support)
-- The _stub.c file is derived from the haskell source file, possibly taking
-- into account the -stubdir option.
--
-- The object file created by compiling the _stub.c file is put into a
-- temporary file, which will be later combined with the main .o file
-- (see the MergeStubs phase).
compileStub :: HscEnv -> FilePath -> IO FilePath
compileStub hsc_env stub_c = do
(_, stub_o) <- runPipeline StopLn hsc_env (stub_c,Nothing) Nothing
Temporary Nothing{-no ModLocation-} Nothing
return stub_o
-- ---------------------------------------------------------------------------
-- Link
link :: GhcLink -- interactive or batch
-> DynFlags -- dynamic flags
-> Bool -- attempt linking in batch mode?
-> HomePackageTable -- what to link
-> IO SuccessFlag
-- For the moment, in the batch linker, we don't bother to tell doLink
-- which packages to link -- it just tries all that are available.
-- batch_attempt_linking should only be *looked at* in batch mode. It
-- should only be True if the upsweep was successful and someone
-- exports main, i.e., we have good reason to believe that linking
-- will succeed.
link ghcLink dflags
= lookupHook linkHook l dflags ghcLink dflags
where
l LinkInMemory _ _ _
= if cGhcWithInterpreter == "YES"
then -- Not Linking...(demand linker will do the job)
return Succeeded
else panicBadLink LinkInMemory
l NoLink _ _ _
= return Succeeded
l LinkBinary dflags batch_attempt_linking hpt
= link' dflags batch_attempt_linking hpt
l LinkStaticLib dflags batch_attempt_linking hpt
= link' dflags batch_attempt_linking hpt
l LinkDynLib dflags batch_attempt_linking hpt
= link' dflags batch_attempt_linking hpt
panicBadLink :: GhcLink -> a
panicBadLink other = panic ("link: GHC not built to link this way: " ++
show other)
link' :: DynFlags -- dynamic flags
-> Bool -- attempt linking in batch mode?
-> HomePackageTable -- what to link
-> IO SuccessFlag
link' dflags batch_attempt_linking hpt
| batch_attempt_linking
= do
let
staticLink = case ghcLink dflags of
LinkStaticLib -> True
_ -> platformBinariesAreStaticLibs (targetPlatform dflags)
home_mod_infos = eltsUFM hpt
-- the packages we depend on
pkg_deps = concatMap (map fst . dep_pkgs . mi_deps . hm_iface) home_mod_infos
-- the linkables to link
linkables = map (expectJust "link".hm_linkable) home_mod_infos
debugTraceMsg dflags 3 (text "link: linkables are ..." $$ vcat (map ppr linkables))
-- check for the -no-link flag
if isNoLink (ghcLink dflags)
then do debugTraceMsg dflags 3 (text "link(batch): linking omitted (-c flag given).")
return Succeeded
else do
let getOfiles (LM _ _ us) = map nameOfObject (filter isObject us)
obj_files = concatMap getOfiles linkables
exe_file = exeFileName staticLink dflags
linking_needed <- linkingNeeded dflags staticLink linkables pkg_deps
if not (gopt Opt_ForceRecomp dflags) && not linking_needed
then do debugTraceMsg dflags 2 (text exe_file <+> ptext (sLit "is up to date, linking not required."))
return Succeeded
else do
compilationProgressMsg dflags ("Linking " ++ exe_file ++ " ...")
-- Don't showPass in Batch mode; doLink will do that for us.
let link = case ghcLink dflags of
LinkBinary -> linkBinary
LinkStaticLib -> linkStaticLibCheck
LinkDynLib -> linkDynLibCheck
other -> panicBadLink other
link dflags obj_files pkg_deps
debugTraceMsg dflags 3 (text "link: done")
-- linkBinary only returns if it succeeds
return Succeeded
| otherwise
= do debugTraceMsg dflags 3 (text "link(batch): upsweep (partially) failed OR" $$
text " Main.main not exported; not linking.")
return Succeeded
linkingNeeded :: DynFlags -> Bool -> [Linkable] -> [PackageId] -> IO Bool
linkingNeeded dflags staticLink linkables pkg_deps = do
-- if the modification time on the executable is later than the
-- modification times on all of the objects and libraries, then omit
-- linking (unless the -fforce-recomp flag was given).
let exe_file = exeFileName staticLink dflags
e_exe_time <- tryIO $ getModificationUTCTime exe_file
case e_exe_time of
Left _ -> return True
Right t -> do
-- first check object files and extra_ld_inputs
let extra_ld_inputs = [ f | FileOption _ f <- ldInputs dflags ]
e_extra_times <- mapM (tryIO . getModificationUTCTime) extra_ld_inputs
let (errs,extra_times) = splitEithers e_extra_times
let obj_times = map linkableTime linkables ++ extra_times
if not (null errs) || any (t <) obj_times
then return True
else do
-- next, check libraries. XXX this only checks Haskell libraries,
-- not extra_libraries or -l things from the command line.
let pkg_map = pkgIdMap (pkgState dflags)
pkg_hslibs = [ (libraryDirs c, lib)
| Just c <- map (lookupPackage pkg_map) pkg_deps,
lib <- packageHsLibs dflags c ]
pkg_libfiles <- mapM (uncurry (findHSLib dflags)) pkg_hslibs
if any isNothing pkg_libfiles then return True else do
e_lib_times <- mapM (tryIO . getModificationUTCTime)
(catMaybes pkg_libfiles)
let (lib_errs,lib_times) = splitEithers e_lib_times
if not (null lib_errs) || any (t <) lib_times
then return True
else checkLinkInfo dflags pkg_deps exe_file
-- Returns 'False' if it was, and we can avoid linking, because the
-- previous binary was linked with "the same options".
checkLinkInfo :: DynFlags -> [PackageId] -> FilePath -> IO Bool
checkLinkInfo dflags pkg_deps exe_file
| not (platformSupportsSavingLinkOpts (platformOS (targetPlatform dflags)))
-- ToDo: Windows and OS X do not use the ELF binary format, so
-- readelf does not work there. We need to find another way to do
-- this.
= return False -- conservatively we should return True, but not
-- linking in this case was the behaviour for a long
-- time so we leave it as-is.
| otherwise
= do
link_info <- getLinkInfo dflags pkg_deps
debugTraceMsg dflags 3 $ text ("Link info: " ++ link_info)
m_exe_link_info <- readElfSection dflags ghcLinkInfoSectionName exe_file
debugTraceMsg dflags 3 $ text ("Exe link info: " ++ show m_exe_link_info)
return (Just link_info /= m_exe_link_info)
platformSupportsSavingLinkOpts :: OS -> Bool
platformSupportsSavingLinkOpts os
| os == OSSolaris2 = False -- see #5382
| otherwise = osElfTarget os
ghcLinkInfoSectionName :: String
ghcLinkInfoSectionName = ".debug-ghc-link-info"
-- if we use the ".debug" prefix, then strip will strip it by default
findHSLib :: DynFlags -> [String] -> String -> IO (Maybe FilePath)
findHSLib dflags dirs lib = do
let batch_lib_file = if gopt Opt_Static dflags
then "lib" ++ lib <.> "a"
else mkSOName (targetPlatform dflags) lib
found <- filterM doesFileExist (map (</> batch_lib_file) dirs)
case found of
[] -> return Nothing
(x:_) -> return (Just x)
-- -----------------------------------------------------------------------------
-- Compile files in one-shot mode.
oneShot :: HscEnv -> Phase -> [(String, Maybe Phase)] -> IO ()
oneShot hsc_env stop_phase srcs = do
o_files <- mapM (compileFile hsc_env stop_phase) srcs
doLink (hsc_dflags hsc_env) stop_phase o_files
compileFile :: HscEnv -> Phase -> (FilePath, Maybe Phase) -> IO FilePath
compileFile hsc_env stop_phase (src, mb_phase) = do
exists <- doesFileExist src
when (not exists) $
throwGhcExceptionIO (CmdLineError ("does not exist: " ++ src))
let
dflags = hsc_dflags hsc_env
split = gopt Opt_SplitObjs dflags
mb_o_file = outputFile dflags
ghc_link = ghcLink dflags -- Set by -c or -no-link
-- When linking, the -o argument refers to the linker's output.
-- otherwise, we use it as the name for the pipeline's output.
output
-- If we are dong -fno-code, then act as if the output is
-- 'Temporary'. This stops GHC trying to copy files to their
-- final location.
| HscNothing <- hscTarget dflags = Temporary
| StopLn <- stop_phase, not (isNoLink ghc_link) = Persistent
-- -o foo applies to linker
| isJust mb_o_file = SpecificFile
-- -o foo applies to the file we are compiling now
| otherwise = Persistent
stop_phase' = case stop_phase of
As _ | split -> SplitAs
_ -> stop_phase
( _, out_file) <- runPipeline stop_phase' hsc_env
(src, fmap RealPhase mb_phase) Nothing output
Nothing{-no ModLocation-} Nothing
return out_file
doLink :: DynFlags -> Phase -> [FilePath] -> IO ()
doLink dflags stop_phase o_files
| not (isStopLn stop_phase)
= return () -- We stopped before the linking phase
| otherwise
= case ghcLink dflags of
NoLink -> return ()
LinkBinary -> linkBinary dflags o_files []
LinkStaticLib -> linkStaticLibCheck dflags o_files []
LinkDynLib -> linkDynLibCheck dflags o_files []
other -> panicBadLink other
-- ---------------------------------------------------------------------------
-- | Run a compilation pipeline, consisting of multiple phases.
--
-- This is the interface to the compilation pipeline, which runs
-- a series of compilation steps on a single source file, specifying
-- at which stage to stop.
--
-- The DynFlags can be modified by phases in the pipeline (eg. by
-- OPTIONS_GHC pragmas), and the changes affect later phases in the
-- pipeline.
runPipeline
:: Phase -- ^ When to stop
-> HscEnv -- ^ Compilation environment
-> (FilePath,Maybe PhasePlus) -- ^ Input filename (and maybe -x suffix)
-> Maybe FilePath -- ^ original basename (if different from ^^^)
-> PipelineOutput -- ^ Output filename
-> Maybe ModLocation -- ^ A ModLocation, if this is a Haskell module
-> Maybe FilePath -- ^ stub object, if we have one
-> IO (DynFlags, FilePath) -- ^ (final flags, output filename)
runPipeline stop_phase hsc_env0 (input_fn, mb_phase)
mb_basename output maybe_loc maybe_stub_o
= do let
dflags0 = hsc_dflags hsc_env0
-- Decide where dump files should go based on the pipeline output
dflags = dflags0 { dumpPrefix = Just (basename ++ ".") }
hsc_env = hsc_env0 {hsc_dflags = dflags}
(input_basename, suffix) = splitExtension input_fn
suffix' = drop 1 suffix -- strip off the .
basename | Just b <- mb_basename = b
| otherwise = input_basename
-- If we were given a -x flag, then use that phase to start from
start_phase = fromMaybe (RealPhase (startPhase suffix')) mb_phase
isHaskell (RealPhase (Unlit _)) = True
isHaskell (RealPhase (Cpp _)) = True
isHaskell (RealPhase (HsPp _)) = True
isHaskell (RealPhase (Hsc _)) = True
isHaskell (HscOut {}) = True
isHaskell _ = False
isHaskellishFile = isHaskell start_phase
env = PipeEnv{ pe_isHaskellishFile = isHaskellishFile,
stop_phase,
src_filename = input_fn,
src_basename = basename,
src_suffix = suffix',
output_spec = output }
-- We want to catch cases of "you can't get there from here" before
-- we start the pipeline, because otherwise it will just run off the
-- end.
--
-- There is a partial ordering on phases, where A < B iff A occurs
-- before B in a normal compilation pipeline.
let happensBefore' = happensBefore dflags
case start_phase of
RealPhase start_phase' ->
when (not (start_phase' `happensBefore'` stop_phase)) $
throwGhcExceptionIO (UsageError
("cannot compile this file to desired target: "
++ input_fn))
HscOut {} -> return ()
debugTraceMsg dflags 4 (text "Running the pipeline")
r <- runPipeline' start_phase hsc_env env input_fn
maybe_loc maybe_stub_o
-- If we are compiling a Haskell module, and doing
-- -dynamic-too, but couldn't do the -dynamic-too fast
-- path, then rerun the pipeline for the dyn way
let dflags = extractDynFlags hsc_env
-- NB: Currently disabled on Windows (ref #7134, #8228, and #5987)
when (not $ platformOS (targetPlatform dflags) == OSMinGW32) $ do
when isHaskellishFile $ whenCannotGenerateDynamicToo dflags $ do
debugTraceMsg dflags 4
(text "Running the pipeline again for -dynamic-too")
let dflags' = dynamicTooMkDynamicDynFlags dflags
hsc_env' <- newHscEnv dflags'
_ <- runPipeline' start_phase hsc_env' env input_fn
maybe_loc maybe_stub_o
return ()
return r
runPipeline'
:: PhasePlus -- ^ When to start
-> HscEnv -- ^ Compilation environment
-> PipeEnv
-> FilePath -- ^ Input filename
-> Maybe ModLocation -- ^ A ModLocation, if this is a Haskell module
-> Maybe FilePath -- ^ stub object, if we have one
-> IO (DynFlags, FilePath) -- ^ (final flags, output filename)
runPipeline' start_phase hsc_env env input_fn
maybe_loc maybe_stub_o
= do
-- Execute the pipeline...
let state = PipeState{ hsc_env, maybe_loc, maybe_stub_o = maybe_stub_o }
evalP (pipeLoop start_phase input_fn) env state
-- ---------------------------------------------------------------------------
-- outer pipeline loop
-- | pipeLoop runs phases until we reach the stop phase
pipeLoop :: PhasePlus -> FilePath -> CompPipeline (DynFlags, FilePath)
pipeLoop phase input_fn = do
env <- getPipeEnv
dflags <- getDynFlags
let happensBefore' = happensBefore dflags
stopPhase = stop_phase env
case phase of
RealPhase realPhase | realPhase `eqPhase` stopPhase -- All done
-> -- Sometimes, a compilation phase doesn't actually generate any output
-- (eg. the CPP phase when -fcpp is not turned on). If we end on this
-- stage, but we wanted to keep the output, then we have to explicitly
-- copy the file, remembering to prepend a {-# LINE #-} pragma so that
-- further compilation stages can tell what the original filename was.
case output_spec env of
Temporary ->
return (dflags, input_fn)
output ->
do pst <- getPipeState
final_fn <- liftIO $ getOutputFilename
stopPhase output (src_basename env)
dflags stopPhase (maybe_loc pst)
when (final_fn /= input_fn) $ do
let msg = ("Copying `" ++ input_fn ++"' to `" ++ final_fn ++ "'")
line_prag = Just ("{-# LINE 1 \"" ++ src_filename env ++ "\" #-}\n")
liftIO $ copyWithHeader dflags msg line_prag input_fn final_fn
return (dflags, final_fn)
| not (realPhase `happensBefore'` stopPhase)
-- Something has gone wrong. We'll try to cover all the cases when
-- this could happen, so if we reach here it is a panic.
-- eg. it might happen if the -C flag is used on a source file that
-- has {-# OPTIONS -fasm #-}.
-> panic ("pipeLoop: at phase " ++ show realPhase ++
" but I wanted to stop at phase " ++ show stopPhase)
_
-> do liftIO $ debugTraceMsg dflags 4
(ptext (sLit "Running phase") <+> ppr phase)
(next_phase, output_fn) <- runHookedPhase phase input_fn dflags
r <- pipeLoop next_phase output_fn
case phase of
HscOut {} ->
whenGeneratingDynamicToo dflags $ do
setDynFlags $ dynamicTooMkDynamicDynFlags dflags
-- TODO shouldn't ignore result:
_ <- pipeLoop phase input_fn
return ()
_ ->
return ()
return r
runHookedPhase :: PhasePlus -> FilePath -> DynFlags
-> CompPipeline (PhasePlus, FilePath)
runHookedPhase pp input dflags =
lookupHook runPhaseHook runPhase dflags pp input dflags
-- -----------------------------------------------------------------------------
-- In each phase, we need to know into what filename to generate the
-- output. All the logic about which filenames we generate output
-- into is embodied in the following function.
phaseOutputFilename :: Phase{-next phase-} -> CompPipeline FilePath
phaseOutputFilename next_phase = do
PipeEnv{stop_phase, src_basename, output_spec} <- getPipeEnv
PipeState{maybe_loc, hsc_env} <- getPipeState
let dflags = hsc_dflags hsc_env
liftIO $ getOutputFilename stop_phase output_spec
src_basename dflags next_phase maybe_loc
getOutputFilename
:: Phase -> PipelineOutput -> String
-> DynFlags -> Phase{-next phase-} -> Maybe ModLocation -> IO FilePath
getOutputFilename stop_phase output basename dflags next_phase maybe_location
| is_last_phase, Persistent <- output = persistent_fn
| is_last_phase, SpecificFile <- output = case outputFile dflags of
Just f -> return f
Nothing ->
panic "SpecificFile: No filename"
| keep_this_output = persistent_fn
| otherwise = newTempName dflags suffix
where
hcsuf = hcSuf dflags
odir = objectDir dflags
osuf = objectSuf dflags
keep_hc = gopt Opt_KeepHcFiles dflags
keep_s = gopt Opt_KeepSFiles dflags
keep_bc = gopt Opt_KeepLlvmFiles dflags
myPhaseInputExt HCc = hcsuf
myPhaseInputExt MergeStub = osuf
myPhaseInputExt StopLn = osuf
myPhaseInputExt other = phaseInputExt other
is_last_phase = next_phase `eqPhase` stop_phase
-- sometimes, we keep output from intermediate stages
keep_this_output =
case next_phase of
As _ | keep_s -> True
LlvmOpt | keep_bc -> True
HCc | keep_hc -> True
_other -> False
suffix = myPhaseInputExt next_phase
-- persistent object files get put in odir
persistent_fn
| StopLn <- next_phase = return odir_persistent
| otherwise = return persistent
persistent = basename <.> suffix
odir_persistent
| Just loc <- maybe_location = ml_obj_file loc
| Just d <- odir = d </> persistent
| otherwise = persistent
-- -----------------------------------------------------------------------------
-- | Each phase in the pipeline returns the next phase to execute, and the
-- name of the file in which the output was placed.
--
-- We must do things dynamically this way, because we often don't know
-- what the rest of the phases will be until part-way through the
-- compilation: for example, an {-# OPTIONS -fasm #-} at the beginning
-- of a source file can change the latter stages of the pipeline from
-- taking the LLVM route to using the native code generator.
--
runPhase :: PhasePlus -- ^ Run this phase
-> FilePath -- ^ name of the input file
-> DynFlags -- ^ for convenience, we pass the current dflags in
-> CompPipeline (PhasePlus, -- next phase to run
FilePath) -- output filename
-- Invariant: the output filename always contains the output
-- Interesting case: Hsc when there is no recompilation to do
-- Then the output filename is still a .o file
-------------------------------------------------------------------------------
-- Unlit phase
runPhase (RealPhase (Unlit sf)) input_fn dflags
= do
output_fn <- phaseOutputFilename (Cpp sf)
let flags = [ -- The -h option passes the file name for unlit to
-- put in a #line directive
SysTools.Option "-h"
, SysTools.Option $ escape $ normalise input_fn
, SysTools.FileOption "" input_fn
, SysTools.FileOption "" output_fn
]
liftIO $ SysTools.runUnlit dflags flags
return (RealPhase (Cpp sf), output_fn)
where
-- escape the characters \, ", and ', but don't try to escape
-- Unicode or anything else (so we don't use Util.charToC
-- here). If we get this wrong, then in
-- Coverage.addTicksToBinds where we check that the filename in
-- a SrcLoc is the same as the source filenaame, the two will
-- look bogusly different. See test:
-- libraries/hpc/tests/function/subdir/tough2.lhs
escape ('\\':cs) = '\\':'\\': escape cs
escape ('\"':cs) = '\\':'\"': escape cs
escape ('\'':cs) = '\\':'\'': escape cs
escape (c:cs) = c : escape cs
escape [] = []
-------------------------------------------------------------------------------
-- Cpp phase : (a) gets OPTIONS out of file
-- (b) runs cpp if necessary
runPhase (RealPhase (Cpp sf)) input_fn dflags0
= do
src_opts <- liftIO $ getOptionsFromFile dflags0 input_fn
(dflags1, unhandled_flags, warns)
<- liftIO $ parseDynamicFilePragma dflags0 src_opts
setDynFlags dflags1
liftIO $ checkProcessArgsResult dflags1 unhandled_flags
if not (xopt Opt_Cpp dflags1) then do
-- we have to be careful to emit warnings only once.
unless (gopt Opt_Pp dflags1) $
liftIO $ handleFlagWarnings dflags1 warns
-- no need to preprocess CPP, just pass input file along
-- to the next phase of the pipeline.
return (RealPhase (HsPp sf), input_fn)
else do
output_fn <- phaseOutputFilename (HsPp sf)
liftIO $ doCpp dflags1 True{-raw-}
input_fn output_fn
-- re-read the pragmas now that we've preprocessed the file
-- See #2464,#3457
src_opts <- liftIO $ getOptionsFromFile dflags0 output_fn
(dflags2, unhandled_flags, warns)
<- liftIO $ parseDynamicFilePragma dflags0 src_opts
liftIO $ checkProcessArgsResult dflags2 unhandled_flags
unless (gopt Opt_Pp dflags2) $
liftIO $ handleFlagWarnings dflags2 warns
-- the HsPp pass below will emit warnings
setDynFlags dflags2
return (RealPhase (HsPp sf), output_fn)
-------------------------------------------------------------------------------
-- HsPp phase
runPhase (RealPhase (HsPp sf)) input_fn dflags
= do
if not (gopt Opt_Pp dflags) then
-- no need to preprocess, just pass input file along
-- to the next phase of the pipeline.
return (RealPhase (Hsc sf), input_fn)
else do
PipeEnv{src_basename, src_suffix} <- getPipeEnv
let orig_fn = src_basename <.> src_suffix
output_fn <- phaseOutputFilename (Hsc sf)
liftIO $ SysTools.runPp dflags
( [ SysTools.Option orig_fn
, SysTools.Option input_fn
, SysTools.FileOption "" output_fn
]
)
-- re-read pragmas now that we've parsed the file (see #3674)
src_opts <- liftIO $ getOptionsFromFile dflags output_fn
(dflags1, unhandled_flags, warns)
<- liftIO $ parseDynamicFilePragma dflags src_opts
setDynFlags dflags1
liftIO $ checkProcessArgsResult dflags1 unhandled_flags
liftIO $ handleFlagWarnings dflags1 warns
return (RealPhase (Hsc sf), output_fn)
-----------------------------------------------------------------------------
-- Hsc phase
-- Compilation of a single module, in "legacy" mode (_not_ under
-- the direction of the compilation manager).
runPhase (RealPhase (Hsc src_flavour)) input_fn dflags0
= do -- normal Hsc mode, not mkdependHS
PipeEnv{ stop_phase=stop,
src_basename=basename,
src_suffix=suff } <- getPipeEnv
-- we add the current directory (i.e. the directory in which
-- the .hs files resides) to the include path, since this is
-- what gcc does, and it's probably what you want.
let current_dir = takeDirectory basename
paths = includePaths dflags0
dflags = dflags0 { includePaths = current_dir : paths }
setDynFlags dflags
-- gather the imports and module name
(hspp_buf,mod_name,imps,src_imps) <- liftIO $
case src_flavour of
ExtCoreFile -> do -- no explicit imports in ExtCore input.
m <- getCoreModuleName input_fn
return (Nothing, mkModuleName m, [], [])
_ -> do
buf <- hGetStringBuffer input_fn
(src_imps,imps,L _ mod_name) <- getImports dflags buf input_fn (basename <.> suff)
return (Just buf, mod_name, imps, src_imps)
-- Take -o into account if present
-- Very like -ohi, but we must *only* do this if we aren't linking
-- (If we're linking then the -o applies to the linked thing, not to
-- the object file for one module.)
-- Note the nasty duplication with the same computation in compileFile above
location <- getLocation src_flavour mod_name
let o_file = ml_obj_file location -- The real object file
-- Figure out if the source has changed, for recompilation avoidance.
--
-- Setting source_unchanged to True means that M.o seems
-- to be up to date wrt M.hs; so no need to recompile unless imports have
-- changed (which the compiler itself figures out).
-- Setting source_unchanged to False tells the compiler that M.o is out of
-- date wrt M.hs (or M.o doesn't exist) so we must recompile regardless.
src_timestamp <- liftIO $ getModificationUTCTime (basename <.> suff)
source_unchanged <- liftIO $
if not (isStopLn stop)
-- SourceModified unconditionally if
-- (a) recompilation checker is off, or
-- (b) we aren't going all the way to .o file (e.g. ghc -S)
then return SourceModified
-- Otherwise look at file modification dates
else do o_file_exists <- doesFileExist o_file
if not o_file_exists
then return SourceModified -- Need to recompile
else do t2 <- getModificationUTCTime o_file
if t2 > src_timestamp
then return SourceUnmodified
else return SourceModified
let extCore_filename = basename ++ ".hcr"
PipeState{hsc_env=hsc_env'} <- getPipeState
-- Tell the finder cache about this module
mod <- liftIO $ addHomeModuleToFinder hsc_env' mod_name location
-- Make the ModSummary to hand to hscMain
let
mod_summary = ModSummary { ms_mod = mod,
ms_hsc_src = src_flavour,
ms_hspp_file = input_fn,
ms_hspp_opts = dflags,
ms_hspp_buf = hspp_buf,
ms_location = location,
ms_hs_date = src_timestamp,
ms_obj_date = Nothing,
ms_textual_imps = imps,
ms_srcimps = src_imps }
-- run the compiler!
result <- liftIO $ hscCompileOneShot hsc_env' extCore_filename
mod_summary source_unchanged
return (HscOut src_flavour mod_name result,
panic "HscOut doesn't have an input filename")
runPhase (HscOut src_flavour mod_name result) _ dflags = do
location <- getLocation src_flavour mod_name
setModLocation location
let o_file = ml_obj_file location -- The real object file
hsc_lang = hscTarget dflags
next_phase = hscPostBackendPhase dflags src_flavour hsc_lang
case result of
HscNotGeneratingCode ->
return (RealPhase next_phase,
panic "No output filename from Hsc when no-code")
HscUpToDate ->
do liftIO $ touchObjectFile dflags o_file
-- The .o file must have a later modification date
-- than the source file (else we wouldn't get Nothing)
-- but we touch it anyway, to keep 'make' happy (we think).
return (RealPhase StopLn, o_file)
HscUpdateBoot ->
do -- In the case of hs-boot files, generate a dummy .o-boot
-- stamp file for the benefit of Make
liftIO $ touchObjectFile dflags o_file
return (RealPhase next_phase, o_file)
HscRecomp cgguts mod_summary
-> do output_fn <- phaseOutputFilename next_phase
PipeState{hsc_env=hsc_env'} <- getPipeState
(outputFilename, mStub) <- liftIO $ hscGenHardCode hsc_env' cgguts mod_summary output_fn
case mStub of
Nothing -> return ()
Just stub_c ->
do stub_o <- liftIO $ compileStub hsc_env' stub_c
setStubO stub_o
return (RealPhase next_phase, outputFilename)
-----------------------------------------------------------------------------
-- Cmm phase
runPhase (RealPhase CmmCpp) input_fn dflags
= do
output_fn <- phaseOutputFilename Cmm
liftIO $ doCpp dflags False{-not raw-}
input_fn output_fn
return (RealPhase Cmm, output_fn)
runPhase (RealPhase Cmm) input_fn dflags
= do
let hsc_lang = hscTarget dflags
let next_phase = hscPostBackendPhase dflags HsSrcFile hsc_lang
output_fn <- phaseOutputFilename next_phase
PipeState{hsc_env} <- getPipeState
liftIO $ hscCompileCmmFile hsc_env input_fn output_fn
return (RealPhase next_phase, output_fn)
-----------------------------------------------------------------------------
-- Cc phase
-- we don't support preprocessing .c files (with -E) now. Doing so introduces
-- way too many hacks, and I can't say I've ever used it anyway.
runPhase (RealPhase cc_phase) input_fn dflags
| any (cc_phase `eqPhase`) [Cc, Ccpp, HCc, Cobjc, Cobjcpp]
= do
let platform = targetPlatform dflags
hcc = cc_phase `eqPhase` HCc
let cmdline_include_paths = includePaths dflags
-- HC files have the dependent packages stamped into them
pkgs <- if hcc then liftIO $ getHCFilePackages input_fn else return []
-- add package include paths even if we're just compiling .c
-- files; this is the Value Add(TM) that using ghc instead of
-- gcc gives you :)
pkg_include_dirs <- liftIO $ getPackageIncludePath dflags pkgs
let include_paths = foldr (\ x xs -> ("-I" ++ x) : xs) []
(cmdline_include_paths ++ pkg_include_dirs)
let gcc_extra_viac_flags = extraGccViaCFlags dflags
let pic_c_flags = picCCOpts dflags
let verbFlags = getVerbFlags dflags
-- cc-options are not passed when compiling .hc files. Our
-- hc code doesn't not #include any header files anyway, so these
-- options aren't necessary.
pkg_extra_cc_opts <- liftIO $
if cc_phase `eqPhase` HCc
then return []
else getPackageExtraCcOpts dflags pkgs
framework_paths <-
if platformUsesFrameworks platform
then do pkgFrameworkPaths <- liftIO $ getPackageFrameworkPath dflags pkgs
let cmdlineFrameworkPaths = frameworkPaths dflags
return $ map ("-F"++)
(cmdlineFrameworkPaths ++ pkgFrameworkPaths)
else return []
let split_objs = gopt Opt_SplitObjs dflags
split_opt | hcc && split_objs = [ "-DUSE_SPLIT_MARKERS" ]
| otherwise = [ ]
let cc_opt | optLevel dflags >= 2 = [ "-O2" ]
| optLevel dflags >= 1 = [ "-O" ]
| otherwise = []
-- Decide next phase
let next_phase = As False
output_fn <- phaseOutputFilename next_phase
let
more_hcc_opts =
-- on x86 the floating point regs have greater precision
-- than a double, which leads to unpredictable results.
-- By default, we turn this off with -ffloat-store unless
-- the user specified -fexcess-precision.
(if platformArch platform == ArchX86 &&
not (gopt Opt_ExcessPrecision dflags)
then [ "-ffloat-store" ]
else []) ++
-- gcc's -fstrict-aliasing allows two accesses to memory
-- to be considered non-aliasing if they have different types.
-- This interacts badly with the C code we generate, which is
-- very weakly typed, being derived from C--.
["-fno-strict-aliasing"]
let gcc_lang_opt | cc_phase `eqPhase` Ccpp = "c++"
| cc_phase `eqPhase` Cobjc = "objective-c"
| cc_phase `eqPhase` Cobjcpp = "objective-c++"
| otherwise = "c"
liftIO $ SysTools.runCc dflags (
-- force the C compiler to interpret this file as C when
-- compiling .hc files, by adding the -x c option.
-- Also useful for plain .c files, just in case GHC saw a
-- -x c option.
[ SysTools.Option "-x", SysTools.Option gcc_lang_opt
, SysTools.FileOption "" input_fn
, SysTools.Option "-o"
, SysTools.FileOption "" output_fn
]
++ map SysTools.Option (
pic_c_flags
-- Stub files generated for foreign exports references the runIO_closure
-- and runNonIO_closure symbols, which are defined in the base package.
-- These symbols are imported into the stub.c file via RtsAPI.h, and the
-- way we do the import depends on whether we're currently compiling
-- the base package or not.
++ (if platformOS platform == OSMinGW32 &&
thisPackage dflags == basePackageId
then [ "-DCOMPILING_BASE_PACKAGE" ]
else [])
-- We only support SparcV9 and better because V8 lacks an atomic CAS
-- instruction. Note that the user can still override this
-- (e.g., -mcpu=ultrasparc) as GCC picks the "best" -mcpu flag
-- regardless of the ordering.
--
-- This is a temporary hack. See #2872, commit
-- 5bd3072ac30216a505151601884ac88bf404c9f2
++ (if platformArch platform == ArchSPARC
then ["-mcpu=v9"]
else [])
-- GCC 4.6+ doesn't like -Wimplicit when compiling C++.
++ (if (cc_phase /= Ccpp && cc_phase /= Cobjcpp)
then ["-Wimplicit"]
else [])
++ (if hcc
then gcc_extra_viac_flags ++ more_hcc_opts
else [])
++ verbFlags
++ [ "-S" ]
++ cc_opt
++ [ "-D__GLASGOW_HASKELL__="++cProjectVersionInt ]
++ framework_paths
++ split_opt
++ include_paths
++ pkg_extra_cc_opts
))
return (RealPhase next_phase, output_fn)
-----------------------------------------------------------------------------
-- Splitting phase
runPhase (RealPhase Splitter) input_fn dflags
= do -- tmp_pfx is the prefix used for the split .s files
split_s_prefix <- liftIO $ SysTools.newTempName dflags "split"
let n_files_fn = split_s_prefix
liftIO $ SysTools.runSplit dflags
[ SysTools.FileOption "" input_fn
, SysTools.FileOption "" split_s_prefix
, SysTools.FileOption "" n_files_fn
]
-- Save the number of split files for future references
s <- liftIO $ readFile n_files_fn
let n_files = read s :: Int
dflags' = dflags { splitInfo = Just (split_s_prefix, n_files) }
setDynFlags dflags'
-- Remember to delete all these files
liftIO $ addFilesToClean dflags'
[ split_s_prefix ++ "__" ++ show n ++ ".s"
| n <- [1..n_files]]
return (RealPhase SplitAs,
"**splitter**") -- we don't use the filename in SplitAs
-----------------------------------------------------------------------------
-- As, SpitAs phase : Assembler
-- This is for calling the assembler on a regular assembly file (not split).
runPhase (RealPhase (As with_cpp)) input_fn dflags
= do
-- LLVM from version 3.0 onwards doesn't support the OS X system
-- assembler, so we use clang as the assembler instead. (#5636)
let whichAsProg | hscTarget dflags == HscLlvm &&
platformOS (targetPlatform dflags) == OSDarwin
= do
-- be careful what options we call clang with
-- see #5903 and #7617 for bugs caused by this.
llvmVer <- liftIO $ figureLlvmVersion dflags
return $ case llvmVer of
Just n | n >= 30 -> SysTools.runClang
_ -> SysTools.runAs
| otherwise = return SysTools.runAs
as_prog <- whichAsProg
let cmdline_include_paths = includePaths dflags
next_phase <- maybeMergeStub
output_fn <- phaseOutputFilename next_phase
-- we create directories for the object file, because it
-- might be a hierarchical module.
liftIO $ createDirectoryIfMissing True (takeDirectory output_fn)
ccInfo <- liftIO $ getCompilerInfo dflags
let runAssembler inputFilename outputFilename
= liftIO $ as_prog dflags
([ SysTools.Option ("-I" ++ p) | p <- cmdline_include_paths ]
-- We only support SparcV9 and better because V8 lacks an atomic CAS
-- instruction so we have to make sure that the assembler accepts the
-- instruction set. Note that the user can still override this
-- (e.g., -mcpu=ultrasparc). GCC picks the "best" -mcpu flag
-- regardless of the ordering.
--
-- This is a temporary hack.
++ (if platformArch (targetPlatform dflags) == ArchSPARC
then [SysTools.Option "-mcpu=v9"]
else [])
++ (if any (ccInfo ==) [Clang, AppleClang, AppleClang51]
then [SysTools.Option "-Qunused-arguments"]
else [])
++ [ SysTools.Option "-x"
, if with_cpp
then SysTools.Option "assembler-with-cpp"
else SysTools.Option "assembler"
, SysTools.Option "-c"
, SysTools.FileOption "" inputFilename
, SysTools.Option "-o"
, SysTools.FileOption "" outputFilename
])
liftIO $ debugTraceMsg dflags 4 (text "Running the assembler")
runAssembler input_fn output_fn
return (RealPhase next_phase, output_fn)
-- This is for calling the assembler on a split assembly file (so a collection
-- of assembly files)
runPhase (RealPhase SplitAs) _input_fn dflags
= do
-- we'll handle the stub_o file in this phase, so don't MergeStub,
-- just jump straight to StopLn afterwards.
let next_phase = StopLn
output_fn <- phaseOutputFilename next_phase
let base_o = dropExtension output_fn
osuf = objectSuf dflags
split_odir = base_o ++ "_" ++ osuf ++ "_split"
-- this also creates the hierarchy
liftIO $ createDirectoryIfMissing True split_odir
-- remove M_split/ *.o, because we're going to archive M_split/ *.o
-- later and we don't want to pick up any old objects.
fs <- liftIO $ getDirectoryContents split_odir
liftIO $ mapM_ removeFile $
map (split_odir </>) $ filter (osuf `isSuffixOf`) fs
let (split_s_prefix, n) = case splitInfo dflags of
Nothing -> panic "No split info"
Just x -> x
let split_s n = split_s_prefix ++ "__" ++ show n <.> "s"
split_obj :: Int -> FilePath
split_obj n = split_odir </>
takeFileName base_o ++ "__" ++ show n <.> osuf
let assemble_file n
= SysTools.runAs dflags (
-- We only support SparcV9 and better because V8 lacks an atomic CAS
-- instruction so we have to make sure that the assembler accepts the
-- instruction set. Note that the user can still override this
-- (e.g., -mcpu=ultrasparc). GCC picks the "best" -mcpu flag
-- regardless of the ordering.
--
-- This is a temporary hack.
(if platformArch (targetPlatform dflags) == ArchSPARC
then [SysTools.Option "-mcpu=v9"]
else []) ++
[ SysTools.Option "-c"
, SysTools.Option "-o"
, SysTools.FileOption "" (split_obj n)
, SysTools.FileOption "" (split_s n)
])
liftIO $ mapM_ assemble_file [1..n]
-- Note [pipeline-split-init]
-- If we have a stub file, it may contain constructor
-- functions for initialisation of this module. We can't
-- simply leave the stub as a separate object file, because it
-- will never be linked in: nothing refers to it. We need to
-- ensure that if we ever refer to the data in this module
-- that needs initialisation, then we also pull in the
-- initialisation routine.
--
-- To that end, we make a DANGEROUS ASSUMPTION here: the data
-- that needs to be initialised is all in the FIRST split
-- object. See Note [codegen-split-init].
PipeState{maybe_stub_o} <- getPipeState
case maybe_stub_o of
Nothing -> return ()
Just stub_o -> liftIO $ do
tmp_split_1 <- newTempName dflags osuf
let split_1 = split_obj 1
copyFile split_1 tmp_split_1
removeFile split_1
joinObjectFiles dflags [tmp_split_1, stub_o] split_1
-- join them into a single .o file
liftIO $ joinObjectFiles dflags (map split_obj [1..n]) output_fn
return (RealPhase next_phase, output_fn)
-----------------------------------------------------------------------------
-- LlvmOpt phase
runPhase (RealPhase LlvmOpt) input_fn dflags
= do
ver <- liftIO $ readIORef (llvmVersion dflags)
let opt_lvl = max 0 (min 2 $ optLevel dflags)
-- don't specify anything if user has specified commands. We do this
-- for opt but not llc since opt is very specifically for optimisation
-- passes only, so if the user is passing us extra options we assume
-- they know what they are doing and don't get in the way.
optFlag = if null (getOpts dflags opt_lo)
then map SysTools.Option $ words (llvmOpts ver !! opt_lvl)
else []
tbaa | ver < 29 = "" -- no tbaa in 2.8 and earlier
| gopt Opt_LlvmTBAA dflags = "--enable-tbaa=true"
| otherwise = "--enable-tbaa=false"
output_fn <- phaseOutputFilename LlvmLlc
liftIO $ SysTools.runLlvmOpt dflags
([ SysTools.FileOption "" input_fn,
SysTools.Option "-o",
SysTools.FileOption "" output_fn]
++ optFlag
++ [SysTools.Option tbaa])
return (RealPhase LlvmLlc, output_fn)
where
-- we always (unless -optlo specified) run Opt since we rely on it to
-- fix up some pretty big deficiencies in the code we generate
llvmOpts ver = [ "-mem2reg -globalopt"
, if ver >= 34 then "-O1 -globalopt" else "-O1"
-- LLVM 3.4 -O1 doesn't eliminate aliases reliably (bug #8855)
, "-O2"
]
-----------------------------------------------------------------------------
-- LlvmLlc phase
runPhase (RealPhase LlvmLlc) input_fn dflags
= do
ver <- liftIO $ readIORef (llvmVersion dflags)
let opt_lvl = max 0 (min 2 $ optLevel dflags)
-- iOS requires external references to be loaded indirectly from the
-- DATA segment or dyld traps at runtime writing into TEXT: see #7722
rmodel | platformOS (targetPlatform dflags) == OSiOS = "dynamic-no-pic"
| gopt Opt_PIC dflags = "pic"
| not (gopt Opt_Static dflags) = "dynamic-no-pic"
| otherwise = "static"
tbaa | ver < 29 = "" -- no tbaa in 2.8 and earlier
| gopt Opt_LlvmTBAA dflags = "--enable-tbaa=true"
| otherwise = "--enable-tbaa=false"
-- hidden debugging flag '-dno-llvm-mangler' to skip mangling
let next_phase = case gopt Opt_NoLlvmMangler dflags of
False -> LlvmMangle
True | gopt Opt_SplitObjs dflags -> Splitter
True -> As False
output_fn <- phaseOutputFilename next_phase
liftIO $ SysTools.runLlvmLlc dflags
([ SysTools.Option (llvmOpts !! opt_lvl),
SysTools.Option $ "-relocation-model=" ++ rmodel,
SysTools.FileOption "" input_fn,
SysTools.Option "-o", SysTools.FileOption "" output_fn]
++ [SysTools.Option tbaa]
++ map SysTools.Option fpOpts
++ map SysTools.Option abiOpts
++ map SysTools.Option sseOpts
++ map SysTools.Option avxOpts
++ map SysTools.Option avx512Opts
++ map SysTools.Option stackAlignOpts)
return (RealPhase next_phase, output_fn)
where
-- Bug in LLVM at O3 on OSX.
llvmOpts = if platformOS (targetPlatform dflags) == OSDarwin
then ["-O1", "-O2", "-O2"]
else ["-O1", "-O2", "-O3"]
-- On ARMv7 using LLVM, LLVM fails to allocate floating point registers
-- while compiling GHC source code. It's probably due to fact that it
-- does not enable VFP by default. Let's do this manually here
fpOpts = case platformArch (targetPlatform dflags) of
ArchARM ARMv7 ext _ -> if (elem VFPv3 ext)
then ["-mattr=+v7,+vfp3"]
else if (elem VFPv3D16 ext)
then ["-mattr=+v7,+vfp3,+d16"]
else []
ArchARM ARMv6 ext _ -> if (elem VFPv2 ext)
then ["-mattr=+v6,+vfp2"]
else ["-mattr=+v6"]
_ -> []
-- On Ubuntu/Debian with ARM hard float ABI, LLVM's llc still
-- compiles into soft-float ABI. We need to explicitly set abi
-- to hard
abiOpts = case platformArch (targetPlatform dflags) of
ArchARM _ _ HARD -> ["-float-abi=hard"]
ArchARM _ _ _ -> []
_ -> []
sseOpts | isSse4_2Enabled dflags = ["-mattr=+sse42"]
| isSse2Enabled dflags = ["-mattr=+sse2"]
| isSseEnabled dflags = ["-mattr=+sse"]
| otherwise = []
avxOpts | isAvx512fEnabled dflags = ["-mattr=+avx512f"]
| isAvx2Enabled dflags = ["-mattr=+avx2"]
| isAvxEnabled dflags = ["-mattr=+avx"]
| otherwise = []
avx512Opts =
[ "-mattr=+avx512cd" | isAvx512cdEnabled dflags ] ++
[ "-mattr=+avx512er" | isAvx512erEnabled dflags ] ++
[ "-mattr=+avx512pf" | isAvx512pfEnabled dflags ]
stackAlignOpts =
case platformArch (targetPlatform dflags) of
ArchX86_64 | isAvxEnabled dflags -> ["-stack-alignment=32"]
_ -> []
-----------------------------------------------------------------------------
-- LlvmMangle phase
runPhase (RealPhase LlvmMangle) input_fn dflags
= do
let next_phase = if gopt Opt_SplitObjs dflags then Splitter else As False
output_fn <- phaseOutputFilename next_phase
liftIO $ llvmFixupAsm dflags input_fn output_fn
return (RealPhase next_phase, output_fn)
-----------------------------------------------------------------------------
-- merge in stub objects
runPhase (RealPhase MergeStub) input_fn dflags
= do
PipeState{maybe_stub_o} <- getPipeState
output_fn <- phaseOutputFilename StopLn
liftIO $ createDirectoryIfMissing True (takeDirectory output_fn)
case maybe_stub_o of
Nothing ->
panic "runPhase(MergeStub): no stub"
Just stub_o -> do
liftIO $ joinObjectFiles dflags [input_fn, stub_o] output_fn
return (RealPhase StopLn, output_fn)
-- warning suppression
runPhase (RealPhase other) _input_fn _dflags =
panic ("runPhase: don't know how to run phase " ++ show other)
maybeMergeStub :: CompPipeline Phase
maybeMergeStub
= do
PipeState{maybe_stub_o} <- getPipeState
if isJust maybe_stub_o then return MergeStub else return StopLn
getLocation :: HscSource -> ModuleName -> CompPipeline ModLocation
getLocation src_flavour mod_name = do
dflags <- getDynFlags
PipeEnv{ src_basename=basename,
src_suffix=suff } <- getPipeEnv
-- Build a ModLocation to pass to hscMain.
-- The source filename is rather irrelevant by now, but it's used
-- by hscMain for messages. hscMain also needs
-- the .hi and .o filenames, and this is as good a way
-- as any to generate them, and better than most. (e.g. takes
-- into account the -osuf flags)
location1 <- liftIO $ mkHomeModLocation2 dflags mod_name basename suff
-- Boot-ify it if necessary
let location2 | isHsBoot src_flavour = addBootSuffixLocn location1
| otherwise = location1
-- Take -ohi into account if present
-- This can't be done in mkHomeModuleLocation because
-- it only applies to the module being compiles
let ohi = outputHi dflags
location3 | Just fn <- ohi = location2{ ml_hi_file = fn }
| otherwise = location2
-- Take -o into account if present
-- Very like -ohi, but we must *only* do this if we aren't linking
-- (If we're linking then the -o applies to the linked thing, not to
-- the object file for one module.)
-- Note the nasty duplication with the same computation in compileFile above
let expl_o_file = outputFile dflags
location4 | Just ofile <- expl_o_file
, isNoLink (ghcLink dflags)
= location3 { ml_obj_file = ofile }
| otherwise = location3
return location4
-----------------------------------------------------------------------------
-- MoveBinary sort-of-phase
-- After having produced a binary, move it somewhere else and generate a
-- wrapper script calling the binary. Currently, we need this only in
-- a parallel way (i.e. in GUM), because PVM expects the binary in a
-- central directory.
-- This is called from linkBinary below, after linking. I haven't made it
-- a separate phase to minimise interfering with other modules, and
-- we don't need the generality of a phase (MoveBinary is always
-- done after linking and makes only sense in a parallel setup) -- HWL
runPhase_MoveBinary :: DynFlags -> FilePath -> IO Bool
runPhase_MoveBinary dflags input_fn
| WayPar `elem` ways dflags && not (gopt Opt_Static dflags) =
panic ("Don't know how to combine PVM wrapper and dynamic wrapper")
| WayPar `elem` ways dflags = do
let sysMan = pgm_sysman dflags
pvm_root <- getEnv "PVM_ROOT"
pvm_arch <- getEnv "PVM_ARCH"
let
pvm_executable_base = "=" ++ input_fn
pvm_executable = pvm_root ++ "/bin/" ++ pvm_arch ++ "/" ++ pvm_executable_base
-- nuke old binary; maybe use configur'ed names for cp and rm?
_ <- tryIO (removeFile pvm_executable)
-- move the newly created binary into PVM land
copy dflags "copying PVM executable" input_fn pvm_executable
-- generate a wrapper script for running a parallel prg under PVM
writeFile input_fn (mk_pvm_wrapper_script pvm_executable pvm_executable_base sysMan)
return True
| otherwise = return True
mkExtraObj :: DynFlags -> Suffix -> String -> IO FilePath
mkExtraObj dflags extn xs
= do cFile <- newTempName dflags extn
oFile <- newTempName dflags "o"
writeFile cFile xs
let rtsDetails = getPackageDetails (pkgState dflags) rtsPackageId
SysTools.runCc dflags
([Option "-c",
FileOption "" cFile,
Option "-o",
FileOption "" oFile]
++ map (FileOption "-I") (includeDirs rtsDetails))
return oFile
-- When linking a binary, we need to create a C main() function that
-- starts everything off. This used to be compiled statically as part
-- of the RTS, but that made it hard to change the -rtsopts setting,
-- so now we generate and compile a main() stub as part of every
-- binary and pass the -rtsopts setting directly to the RTS (#5373)
--
mkExtraObjToLinkIntoBinary :: DynFlags -> IO FilePath
mkExtraObjToLinkIntoBinary dflags = do
when (gopt Opt_NoHsMain dflags && haveRtsOptsFlags dflags) $ do
log_action dflags dflags SevInfo noSrcSpan defaultUserStyle
(text "Warning: -rtsopts and -with-rtsopts have no effect with -no-hs-main." $$
text " Call hs_init_ghc() from your main() function to set these options.")
mkExtraObj dflags "c" (showSDoc dflags main)
where
main
| gopt Opt_NoHsMain dflags = empty
| otherwise = vcat [
ptext (sLit "#include \"Rts.h\""),
ptext (sLit "extern StgClosure ZCMain_main_closure;"),
ptext (sLit "int main(int argc, char *argv[])"),
char '{',
ptext (sLit " RtsConfig __conf = defaultRtsConfig;"),
ptext (sLit " __conf.rts_opts_enabled = ")
<> text (show (rtsOptsEnabled dflags)) <> semi,
case rtsOpts dflags of
Nothing -> empty
Just opts -> ptext (sLit " __conf.rts_opts= ") <>
text (show opts) <> semi,
ptext (sLit " __conf.rts_hs_main = rtsTrue;"),
ptext (sLit " return hs_main(argc, argv, &ZCMain_main_closure,__conf);"),
char '}',
char '\n' -- final newline, to keep gcc happy
]
-- Write out the link info section into a new assembly file. Previously
-- this was included as inline assembly in the main.c file but this
-- is pretty fragile. gas gets upset trying to calculate relative offsets
-- that span the .note section (notably .text) when debug info is present
mkNoteObjsToLinkIntoBinary :: DynFlags -> [PackageId] -> IO [FilePath]
mkNoteObjsToLinkIntoBinary dflags dep_packages = do
link_info <- getLinkInfo dflags dep_packages
if (platformSupportsSavingLinkOpts (platformOS (targetPlatform dflags)))
then fmap (:[]) $ mkExtraObj dflags "s" (showSDoc dflags (link_opts link_info))
else return []
where
link_opts info = hcat [
text "\t.section ", text ghcLinkInfoSectionName,
text ",\"\",",
text elfSectionNote,
text "\n",
text "\t.ascii \"", info', text "\"\n",
-- ALL generated assembly must have this section to disable
-- executable stacks. See also
-- compiler/nativeGen/AsmCodeGen.lhs for another instance
-- where we need to do this.
(if platformHasGnuNonexecStack (targetPlatform dflags)
then text ".section .note.GNU-stack,\"\",@progbits\n"
else empty)
]
where
info' = text $ escape info
escape :: String -> String
escape = concatMap (charToC.fromIntegral.ord)
elfSectionNote :: String
elfSectionNote = case platformArch (targetPlatform dflags) of
ArchARM _ _ _ -> "%note"
_ -> "@note"
-- The "link info" is a string representing the parameters of the
-- link. We save this information in the binary, and the next time we
-- link, if nothing else has changed, we use the link info stored in
-- the existing binary to decide whether to re-link or not.
getLinkInfo :: DynFlags -> [PackageId] -> IO String
getLinkInfo dflags dep_packages = do
package_link_opts <- getPackageLinkOpts dflags dep_packages
pkg_frameworks <- if platformUsesFrameworks (targetPlatform dflags)
then getPackageFrameworks dflags dep_packages
else return []
let extra_ld_inputs = ldInputs dflags
let
link_info = (package_link_opts,
pkg_frameworks,
rtsOpts dflags,
rtsOptsEnabled dflags,
gopt Opt_NoHsMain dflags,
map showOpt extra_ld_inputs,
getOpts dflags opt_l)
--
return (show link_info)
-- generates a Perl skript starting a parallel prg under PVM
mk_pvm_wrapper_script :: String -> String -> String -> String
mk_pvm_wrapper_script pvm_executable pvm_executable_base sysMan = unlines $
[
"eval 'exec perl -S $0 ${1+\"$@\"}'",
" if $running_under_some_shell;",
"# =!=!=!=!=!=!=!=!=!=!=!",
"# This script is automatically generated: DO NOT EDIT!!!",
"# Generated by Glasgow Haskell Compiler",
"# ngoqvam choHbogh vaj' vIHoHnISbej !!!!",
"#",
"$pvm_executable = '" ++ pvm_executable ++ "';",
"$pvm_executable_base = '" ++ pvm_executable_base ++ "';",
"$SysMan = '" ++ sysMan ++ "';",
"",
{- ToDo: add the magical shortcuts again iff we actually use them -- HWL
"# first, some magical shortcuts to run "commands" on the binary",
"# (which is hidden)",
"if ($#ARGV == 1 && $ARGV[0] eq '+RTS' && $ARGV[1] =~ /^--((size|file|strip|rm|nm).*)/ ) {",
" local($cmd) = $1;",
" system("$cmd $pvm_executable");",
" exit(0); # all done",
"}", -}
"",
"# Now, run the real binary; process the args first",
"$ENV{'PE'} = $pvm_executable_base;", -- ++ pvm_executable_base,
"$debug = '';",
"$nprocessors = 0; # the default: as many PEs as machines in PVM config",
"@nonPVM_args = ();",
"$in_RTS_args = 0;",
"",
"args: while ($a = shift(@ARGV)) {",
" if ( $a eq '+RTS' ) {",
" $in_RTS_args = 1;",
" } elsif ( $a eq '-RTS' ) {",
" $in_RTS_args = 0;",
" }",
" if ( $a eq '-d' && $in_RTS_args ) {",
" $debug = '-';",
" } elsif ( $a =~ /^-qN(\\d+)/ && $in_RTS_args ) {",
" $nprocessors = $1;",
" } elsif ( $a =~ /^-qp(\\d+)/ && $in_RTS_args ) {",
" $nprocessors = $1;",
" } else {",
" push(@nonPVM_args, $a);",
" }",
"}",
"",
"local($return_val) = 0;",
"# Start the parallel execution by calling SysMan",
"system(\"$SysMan $debug $pvm_executable $nprocessors @nonPVM_args\");",
"$return_val = $?;",
"# ToDo: fix race condition moving files and flushing them!!",
"system(\"cp $ENV{'HOME'}/$pvm_executable_base.???.gr .\") if -f \"$ENV{'HOME'}/$pvm_executable_base.002.gr\";",
"exit($return_val);"
]
-----------------------------------------------------------------------------
-- Look for the /* GHC_PACKAGES ... */ comment at the top of a .hc file
getHCFilePackages :: FilePath -> IO [PackageId]
getHCFilePackages filename =
Exception.bracket (openFile filename ReadMode) hClose $ \h -> do
l <- hGetLine h
case l of
'/':'*':' ':'G':'H':'C':'_':'P':'A':'C':'K':'A':'G':'E':'S':rest ->
return (map stringToPackageId (words rest))
_other ->
return []
-----------------------------------------------------------------------------
-- Static linking, of .o files
-- The list of packages passed to link is the list of packages on
-- which this program depends, as discovered by the compilation
-- manager. It is combined with the list of packages that the user
-- specifies on the command line with -package flags.
--
-- In one-shot linking mode, we can't discover the package
-- dependencies (because we haven't actually done any compilation or
-- read any interface files), so the user must explicitly specify all
-- the packages.
linkBinary :: DynFlags -> [FilePath] -> [PackageId] -> IO ()
linkBinary = linkBinary' False
linkBinary' :: Bool -> DynFlags -> [FilePath] -> [PackageId] -> IO ()
linkBinary' staticLink dflags o_files dep_packages = do
let platform = targetPlatform dflags
mySettings = settings dflags
verbFlags = getVerbFlags dflags
output_fn = exeFileName staticLink dflags
-- get the full list of packages to link with, by combining the
-- explicit packages with the auto packages and all of their
-- dependencies, and eliminating duplicates.
full_output_fn <- if isAbsolute output_fn
then return output_fn
else do d <- getCurrentDirectory
return $ normalise (d </> output_fn)
pkg_lib_paths <- getPackageLibraryPath dflags dep_packages
let pkg_lib_path_opts = concatMap get_pkg_lib_path_opts pkg_lib_paths
get_pkg_lib_path_opts l
| osElfTarget (platformOS platform) &&
dynLibLoader dflags == SystemDependent &&
not (gopt Opt_Static dflags)
= let libpath = if gopt Opt_RelativeDynlibPaths dflags
then "$ORIGIN" </>
(l `makeRelativeTo` full_output_fn)
else l
rpath = if gopt Opt_RPath dflags
then ["-Wl,-rpath", "-Wl," ++ libpath]
else []
-- Solaris 11's linker does not support -rpath-link option. It silently
-- ignores it and then complains about next option which is -l<some
-- dir> as being a directory and not expected object file, E.g
-- ld: elf error: file
-- /tmp/ghc-src/libraries/base/dist-install/build:
-- elf_begin: I/O error: region read: Is a directory
rpathlink = if (platformOS platform) == OSSolaris2
then []
else ["-Wl,-rpath-link", "-Wl," ++ l]
in ["-L" ++ l] ++ rpathlink ++ rpath
| osMachOTarget (platformOS platform) &&
dynLibLoader dflags == SystemDependent &&
not (gopt Opt_Static dflags) &&
gopt Opt_RPath dflags
= let libpath = if gopt Opt_RelativeDynlibPaths dflags
then "@loader_path" </>
(l `makeRelativeTo` full_output_fn)
else l
in ["-L" ++ l] ++ ["-Wl,-rpath", "-Wl," ++ libpath]
| otherwise = ["-L" ++ l]
let lib_paths = libraryPaths dflags
let lib_path_opts = map ("-L"++) lib_paths
extraLinkObj <- mkExtraObjToLinkIntoBinary dflags
noteLinkObjs <- mkNoteObjsToLinkIntoBinary dflags dep_packages
pkg_link_opts <- do
(package_hs_libs, extra_libs, other_flags) <- getPackageLinkOpts dflags dep_packages
return $ if staticLink
then package_hs_libs -- If building an executable really means making a static
-- library (e.g. iOS), then we only keep the -l options for
-- HS packages, because libtool doesn't accept other options.
-- In the case of iOS these need to be added by hand to the
-- final link in Xcode.
else other_flags ++ package_hs_libs ++ extra_libs -- -Wl,-u,<sym> contained in other_flags
-- needs to be put before -l<package>,
-- otherwise Solaris linker fails linking
-- a binary with unresolved symbols in RTS
-- which are defined in base package
-- the reason for this is a note in ld(1) about
-- '-u' option: "The placement of this option
-- on the command line is significant.
-- This option must be placed before the library
-- that defines the symbol."
pkg_framework_path_opts <-
if platformUsesFrameworks platform
then do pkg_framework_paths <- getPackageFrameworkPath dflags dep_packages
return $ map ("-F" ++) pkg_framework_paths
else return []
framework_path_opts <-
if platformUsesFrameworks platform
then do let framework_paths = frameworkPaths dflags
return $ map ("-F" ++) framework_paths
else return []
pkg_framework_opts <-
if platformUsesFrameworks platform
then do pkg_frameworks <- getPackageFrameworks dflags dep_packages
return $ concat [ ["-framework", fw] | fw <- pkg_frameworks ]
else return []
framework_opts <-
if platformUsesFrameworks platform
then do let frameworks = cmdlineFrameworks dflags
-- reverse because they're added in reverse order from
-- the cmd line:
return $ concat [ ["-framework", fw]
| fw <- reverse frameworks ]
else return []
-- probably _stub.o files
let extra_ld_inputs = ldInputs dflags
-- Here are some libs that need to be linked at the *end* of
-- the command line, because they contain symbols that are referred to
-- by the RTS. We can't therefore use the ordinary way opts for these.
let
debug_opts | WayDebug `elem` ways dflags = [
#if defined(HAVE_LIBBFD)
"-lbfd", "-liberty"
#endif
]
| otherwise = []
let thread_opts
| WayThreaded `elem` ways dflags =
let os = platformOS (targetPlatform dflags)
in if os == OSOsf3 then ["-lpthread", "-lexc"]
else if os `elem` [OSMinGW32, OSFreeBSD, OSOpenBSD,
OSNetBSD, OSHaiku, OSQNXNTO, OSiOS]
then []
else ["-lpthread"]
| otherwise = []
rc_objs <- maybeCreateManifest dflags output_fn
let link = if staticLink
then SysTools.runLibtool
else SysTools.runLink
link dflags (
map SysTools.Option verbFlags
++ [ SysTools.Option "-o"
, SysTools.FileOption "" output_fn
]
++ map SysTools.Option (
[]
-- Permit the linker to auto link _symbol to _imp_symbol.
-- This lets us link against DLLs without needing an "import library".
++ (if platformOS platform == OSMinGW32
then ["-Wl,--enable-auto-import"]
else [])
-- '-no_compact_unwind'
-- C++/Objective-C exceptions cannot use optimised
-- stack unwinding code. The optimised form is the
-- default in Xcode 4 on at least x86_64, and
-- without this flag we're also seeing warnings
-- like
-- ld: warning: could not create compact unwind for .LFB3: non-standard register 5 being saved in prolog
-- on x86.
++ (if sLdSupportsCompactUnwind mySettings &&
not staticLink &&
(platformOS platform == OSDarwin || platformOS platform == OSiOS) &&
case platformArch platform of
ArchX86 -> True
ArchX86_64 -> True
ArchARM {} -> True
_ -> False
then ["-Wl,-no_compact_unwind"]
else [])
-- '-no_pie'
-- iOS uses 'dynamic-no-pic', so we must pass this to ld to suppress a warning; see #7722
++ (if platformOS platform == OSiOS &&
not staticLink
then ["-Wl,-no_pie"]
else [])
-- '-Wl,-read_only_relocs,suppress'
-- ld gives loads of warnings like:
-- ld: warning: text reloc in _base_GHCziArr_unsafeArray_info to _base_GHCziArr_unsafeArray_closure
-- when linking any program. We're not sure
-- whether this is something we ought to fix, but
-- for now this flags silences them.
++ (if platformOS platform == OSDarwin &&
platformArch platform == ArchX86 &&
not staticLink
then ["-Wl,-read_only_relocs,suppress"]
else [])
++ o_files
++ lib_path_opts)
++ extra_ld_inputs
++ map SysTools.Option (
rc_objs
++ framework_path_opts
++ framework_opts
++ pkg_lib_path_opts
++ extraLinkObj:noteLinkObjs
++ pkg_link_opts
++ pkg_framework_path_opts
++ pkg_framework_opts
++ debug_opts
++ thread_opts
))
-- parallel only: move binary to another dir -- HWL
success <- runPhase_MoveBinary dflags output_fn
unless success $
throwGhcExceptionIO (InstallationError ("cannot move binary"))
exeFileName :: Bool -> DynFlags -> FilePath
exeFileName staticLink dflags
| Just s <- outputFile dflags =
case platformOS (targetPlatform dflags) of
OSMinGW32 -> s <?.> "exe"
_ -> if staticLink
then s <?.> "a"
else s
| otherwise =
if platformOS (targetPlatform dflags) == OSMinGW32
then "main.exe"
else if staticLink
then "liba.a"
else "a.out"
where s <?.> ext | null (takeExtension s) = s <.> ext
| otherwise = s
maybeCreateManifest
:: DynFlags
-> FilePath -- filename of executable
-> IO [FilePath] -- extra objects to embed, maybe
maybeCreateManifest dflags exe_filename
| platformOS (targetPlatform dflags) == OSMinGW32 &&
gopt Opt_GenManifest dflags
= do let manifest_filename = exe_filename <.> "manifest"
writeFile manifest_filename $
"<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"yes\"?>\n"++
" <assembly xmlns=\"urn:schemas-microsoft-com:asm.v1\" manifestVersion=\"1.0\">\n"++
" <assemblyIdentity version=\"1.0.0.0\"\n"++
" processorArchitecture=\"X86\"\n"++
" name=\"" ++ dropExtension exe_filename ++ "\"\n"++
" type=\"win32\"/>\n\n"++
" <trustInfo xmlns=\"urn:schemas-microsoft-com:asm.v3\">\n"++
" <security>\n"++
" <requestedPrivileges>\n"++
" <requestedExecutionLevel level=\"asInvoker\" uiAccess=\"false\"/>\n"++
" </requestedPrivileges>\n"++
" </security>\n"++
" </trustInfo>\n"++
"</assembly>\n"
-- Windows will find the manifest file if it is named
-- foo.exe.manifest. However, for extra robustness, and so that
-- we can move the binary around, we can embed the manifest in
-- the binary itself using windres:
if not (gopt Opt_EmbedManifest dflags) then return [] else do
rc_filename <- newTempName dflags "rc"
rc_obj_filename <- newTempName dflags (objectSuf dflags)
writeFile rc_filename $
"1 24 MOVEABLE PURE " ++ show manifest_filename ++ "\n"
-- magic numbers :-)
-- show is a bit hackish above, but we need to escape the
-- backslashes in the path.
runWindres dflags $ map SysTools.Option $
["--input="++rc_filename,
"--output="++rc_obj_filename,
"--output-format=coff"]
-- no FileOptions here: windres doesn't like seeing
-- backslashes, apparently
removeFile manifest_filename
return [rc_obj_filename]
| otherwise = return []
linkDynLibCheck :: DynFlags -> [String] -> [PackageId] -> IO ()
linkDynLibCheck dflags o_files dep_packages
= do
when (haveRtsOptsFlags dflags) $ do
log_action dflags dflags SevInfo noSrcSpan defaultUserStyle
(text "Warning: -rtsopts and -with-rtsopts have no effect with -shared." $$
text " Call hs_init_ghc() from your main() function to set these options.")
linkDynLib dflags o_files dep_packages
linkStaticLibCheck :: DynFlags -> [String] -> [PackageId] -> IO ()
linkStaticLibCheck dflags o_files dep_packages
= do
when (platformOS (targetPlatform dflags) `notElem` [OSiOS, OSDarwin]) $
throwGhcExceptionIO (ProgramError "Static archive creation only supported on Darwin/OS X/iOS")
linkBinary' True dflags o_files dep_packages
-- -----------------------------------------------------------------------------
-- Running CPP
doCpp :: DynFlags -> Bool -> FilePath -> FilePath -> IO ()
doCpp dflags raw input_fn output_fn = do
let hscpp_opts = picPOpts dflags
let cmdline_include_paths = includePaths dflags
pkg_include_dirs <- getPackageIncludePath dflags []
let include_paths = foldr (\ x xs -> "-I" : x : xs) []
(cmdline_include_paths ++ pkg_include_dirs)
let verbFlags = getVerbFlags dflags
let cpp_prog args | raw = SysTools.runCpp dflags args
| otherwise = SysTools.runCc dflags (SysTools.Option "-E" : args)
let target_defs =
[ "-D" ++ HOST_OS ++ "_BUILD_OS=1",
"-D" ++ HOST_ARCH ++ "_BUILD_ARCH=1",
"-D" ++ TARGET_OS ++ "_HOST_OS=1",
"-D" ++ TARGET_ARCH ++ "_HOST_ARCH=1" ]
-- remember, in code we *compile*, the HOST is the same our TARGET,
-- and BUILD is the same as our HOST.
let sse_defs =
[ "-D__SSE__=1" | isSseEnabled dflags ] ++
[ "-D__SSE2__=1" | isSse2Enabled dflags ] ++
[ "-D__SSE4_2__=1" | isSse4_2Enabled dflags ]
let avx_defs =
[ "-D__AVX__=1" | isAvxEnabled dflags ] ++
[ "-D__AVX2__=1" | isAvx2Enabled dflags ] ++
[ "-D__AVX512CD__=1" | isAvx512cdEnabled dflags ] ++
[ "-D__AVX512ER__=1" | isAvx512erEnabled dflags ] ++
[ "-D__AVX512F__=1" | isAvx512fEnabled dflags ] ++
[ "-D__AVX512PF__=1" | isAvx512pfEnabled dflags ]
backend_defs <- getBackendDefs dflags
cpp_prog ( map SysTools.Option verbFlags
++ map SysTools.Option include_paths
++ map SysTools.Option hsSourceCppOpts
++ map SysTools.Option target_defs
++ map SysTools.Option backend_defs
++ map SysTools.Option hscpp_opts
++ map SysTools.Option sse_defs
++ map SysTools.Option avx_defs
-- Set the language mode to assembler-with-cpp when preprocessing. This
-- alleviates some of the C99 macro rules relating to whitespace and the hash
-- operator, which we tend to abuse. Clang in particular is not very happy
-- about this.
++ [ SysTools.Option "-x"
, SysTools.Option "assembler-with-cpp"
, SysTools.Option input_fn
-- We hackily use Option instead of FileOption here, so that the file
-- name is not back-slashed on Windows. cpp is capable of
-- dealing with / in filenames, so it works fine. Furthermore
-- if we put in backslashes, cpp outputs #line directives
-- with *double* backslashes. And that in turn means that
-- our error messages get double backslashes in them.
-- In due course we should arrange that the lexer deals
-- with these \\ escapes properly.
, SysTools.Option "-o"
, SysTools.FileOption "" output_fn
])
getBackendDefs :: DynFlags -> IO [String]
getBackendDefs dflags | hscTarget dflags == HscLlvm = do
llvmVer <- figureLlvmVersion dflags
return $ case llvmVer of
Just n -> [ "-D__GLASGOW_HASKELL_LLVM__="++show n ]
_ -> []
getBackendDefs _ =
return []
hsSourceCppOpts :: [String]
-- Default CPP defines in Haskell source
hsSourceCppOpts =
[ "-D__GLASGOW_HASKELL__="++cProjectVersionInt ]
-- ---------------------------------------------------------------------------
-- join object files into a single relocatable object file, using ld -r
joinObjectFiles :: DynFlags -> [FilePath] -> FilePath -> IO ()
joinObjectFiles dflags o_files output_fn = do
let mySettings = settings dflags
ldIsGnuLd = sLdIsGnuLd mySettings
osInfo = platformOS (targetPlatform dflags)
ld_r args cc = SysTools.runLink dflags ([
SysTools.Option "-nostdlib",
SysTools.Option "-Wl,-r"
]
++ (if any (cc ==) [Clang, AppleClang, AppleClang51]
then []
else [SysTools.Option "-nodefaultlibs"])
++ (if osInfo == OSFreeBSD
then [SysTools.Option "-L/usr/lib"]
else [])
-- gcc on sparc sets -Wl,--relax implicitly, but
-- -r and --relax are incompatible for ld, so
-- disable --relax explicitly.
++ (if platformArch (targetPlatform dflags) == ArchSPARC
&& ldIsGnuLd
then [SysTools.Option "-Wl,-no-relax"]
else [])
++ map SysTools.Option ld_build_id
++ [ SysTools.Option "-o",
SysTools.FileOption "" output_fn ]
++ args)
-- suppress the generation of the .note.gnu.build-id section,
-- which we don't need and sometimes causes ld to emit a
-- warning:
ld_build_id | sLdSupportsBuildId mySettings = ["-Wl,--build-id=none"]
| otherwise = []
ccInfo <- getCompilerInfo dflags
if ldIsGnuLd
then do
script <- newTempName dflags "ldscript"
writeFile script $ "INPUT(" ++ unwords o_files ++ ")"
ld_r [SysTools.FileOption "" script] ccInfo
else if sLdSupportsFilelist mySettings
then do
filelist <- newTempName dflags "filelist"
writeFile filelist $ unlines o_files
ld_r [SysTools.Option "-Wl,-filelist",
SysTools.FileOption "-Wl," filelist] ccInfo
else do
ld_r (map (SysTools.FileOption "") o_files) ccInfo
-- -----------------------------------------------------------------------------
-- Misc.
-- | What phase to run after one of the backend code generators has run
hscPostBackendPhase :: DynFlags -> HscSource -> HscTarget -> Phase
hscPostBackendPhase _ HsBootFile _ = StopLn
hscPostBackendPhase dflags _ hsc_lang =
case hsc_lang of
HscC -> HCc
HscAsm | gopt Opt_SplitObjs dflags -> Splitter
| otherwise -> As False
HscLlvm -> LlvmOpt
HscNothing -> StopLn
HscInterpreted -> StopLn
touchObjectFile :: DynFlags -> FilePath -> IO ()
touchObjectFile dflags path = do
createDirectoryIfMissing True $ takeDirectory path
SysTools.touch dflags "Touching object file" path
haveRtsOptsFlags :: DynFlags -> Bool
haveRtsOptsFlags dflags =
isJust (rtsOpts dflags) || case rtsOptsEnabled dflags of
RtsOptsSafeOnly -> False
_ -> True
| jwiegley/ghc-release | compiler/main/DriverPipeline.hs | gpl-3.0 | 97,270 | 0 | 31 | 34,012 | 15,987 | 8,071 | 7,916 | -1 | -1 |
{-# LANGUAGE OverloadedStrings, DeriveDataTypeable #-}
module HollaBack.Types (Payload(..),
EmailAddress,
ParseError(..)) where
import qualified Control.Exception as E
import Control.Applicative ((<$>),
(<*>))
import Data.Aeson (decode',
encode,
FromJSON(..),
ToJSON(..),
Value(..),
object,
(.=),
(.:))
import Data.Aeson.Types (typeMismatch)
import Data.Maybe (fromMaybe)
import Data.Text (Text)
import Data.Typeable (Typeable)
import Data.ByteString.Lazy (toChunks,
fromChunks)
import qualified Data.ByteString as BS
import Database.Redis.ByteStringClass
data Payload = Payload { from :: EmailAddress,
to :: EmailAddress,
subject :: Text,
body :: Text,
offsetSeconds :: Int } deriving (Show, Eq)
type EmailAddress = Text
instance BS Payload where
toBS = deLazy . encode
where deLazy = BS.concat . toChunks
fromBS bs = fromMaybe (E.throw $ ParseError "Failed to parse") parsed
where parsed = decode' $ reLazy bs
reLazy = fromChunks . (:[])
instance ToJSON Payload where
toJSON pl = object ["from" .= from pl,
"body" .= body pl,
"to" .= to pl,
"subject" .= subject pl,
"offset_seconds" .= offsetSeconds pl ]
instance FromJSON Payload where
parseJSON (Object v) = Payload <$> v .: "from"
<*> v .: "to"
<*> v .: "subject"
<*> v .: "body"
<*> v .: "offset_seconds"
parseJSON v = typeMismatch "Payload" v
data ParseError = ParseError String deriving (Show, Eq, Typeable)
instance E.Exception ParseError
| bitemyapp/HollaBack | HollaBack/Types.hs | bsd-2-clause | 2,083 | 0 | 15 | 880 | 484 | 280 | 204 | 50 | 0 |
module Graphs.SearchCycles (searchCycles, cleanCycles) where
{--
This module implements a cycle searcher. It works as a reference counting garbage collector
--}
import Utils.Utils hiding (get)
import Control.Monad.State
import Data.Map hiding (null, filter, map)
import qualified Data.Map as M
import Data.Set (Set)
import qualified Data.Set as S
import Prelude hiding (lookup)
import Graphs.PathFinder
-- Constructs all possible cycles
cleanCycles :: (Ord n, Eq n) => Map n (Set n) -> [[n]]
cleanCycles dict
| M.null dict = []
| otherwise = let (start, _) = findMax dict
cycles = filter (/= []) $
cleanCyclesFrom dict start in
cycles ++ cleanCycles (delete start dict)
cleanCyclesFrom :: (Ord n, Eq n) => Map n (Set n) -> n -> [[n]]
cleanCyclesFrom dict n
= let starts = S.toList $ findWithDefault S.empty n dict
selfCycles = if n `elem` starts then [n,n] else [] in
selfCycles : (starts >>= flip (searchPaths dict) n ) |> (n:)
{-
Calculates the cycles in a given import graph, by deleting all leafs.
Args:
- Map of {n --> depends on these}
- Returns a map of resting dependencies
-}
searchCycles :: (Ord n, Eq n) => Map n (Set n) -> Map n (Set n)
searchCycles = execState removeRecursively
removeRecursively :: (Ord n, Eq n) => State (Map n (Set n)) ()
removeRecursively = do stabilize_ checkAllNodes
modify invertDict -- by reverting the graph and checking again, the nodes depending on a cycle get removed too
stabilize_ checkAllNodes
modify invertDict
checkAllNodes :: (Ord n) => State (Map n (Set n)) ()
checkAllNodes = do all <- get
mapM_ checkNode $ keys all
-- Checks dependencies of a node. If no dependencies remain, the node is removed.
checkNode :: (Ord n) => n -> State (Map n (Set n)) ()
checkNode n = do directDeps <- gets (findWithDefault S.empty n) |> S.toList
depCount <- mapM numberOfDeps directDeps
let resting = map snd $ filter ((> 0) . fst ) $ zip depCount directDeps
modify $ if null resting then delete n
else insert n $ S.fromList resting
numberOfDeps :: (Ord n) => n -> State (Map n (Set n)) Int
numberOfDeps n = do depSet <- gets $ lookup n
return $ maybe 0 S.size depSet
-- executes the given state change, until the state is stable
stabilize :: (Eq s) => State s a -> State s [a]
stabilize act = do s <- get
a <- act
s' <- get
if s == s' then return [a]
else do as <- stabilize act
return (a:as)
stabilize_ :: (Eq s) => State s a -> State s ()
stabilize_ act = do s <- get
act
s' <- get
when (s /= s') $ stabilize_ act
| pietervdvn/ALGT | src/Graphs/SearchCycles.hs | bsd-3-clause | 2,569 | 106 | 13 | 568 | 964 | 516 | 448 | 52 | 2 |
-- auto update all packages maintained by arch-haskell that were
-- built with cabal2arch older as asserted by oldCabal2Arch.
--
-- generate the pkgbuild, upload it.
--
import Distribution.ArchLinux.AUR
import Distribution.ArchLinux.PkgBuild
import Distribution.ArchLinux.Report
import Data.Map as M hiding (update)
import System.FilePath
import System.Cmd
import Distribution.Text
import Control.Monad
import Text.Printf
me = "arch-haskell"
main = do
packages <- maintainer me
idx <- loadPackageIndex
print (length packages)
-- for all my packages
forM_ packages $ \p -> do
-- grab the parsed pkgbuild.
(_,k) <- package (packageName p) -- need a short circuit
case k of
Left err -> print err
Right pkg ->
if oldCabal2Arch pkg
then do
-- convert arch name to hackage name
let name = takeFileName (packageURL p)
printf "Updating %s -> %s\n" (packageName p) name
update idx name
else return () -- putStrLn $ "OK: " ++ (packageName p)
update v p = do
case M.lookup p v of -- arch haskell name, not cabal name
Nothing -> do
putStrLn $ "No package found for " ++ show p
Just version -> do
let url = "http://hackage.haskell.org/packages/archive"
</> p </> display version </> p <.> "cabal"
system $ "update-packages.sh " ++ url
return ()
| remyoudompheng/archhaskell-web | scripts/needs-updates.hs | bsd-3-clause | 1,578 | 2 | 22 | 543 | 337 | 166 | 171 | 34 | 3 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Stackage.Test
( runTestSuites
) where
import qualified Control.Concurrent as C
import Control.Exception (Exception, SomeException, handle, throwIO, IOException, try)
import Control.Monad (replicateM, unless, when, forM_)
import qualified Data.Map as Map
import qualified Data.Set as Set
import qualified Control.Monad.Trans.Writer as W
import Distribution.Package (Dependency (Dependency))
import Data.Version (parseVersion, Version (Version))
import Data.Typeable (Typeable)
import Stackage.Types
import Stackage.Util
import System.Directory (copyFile, createDirectory,
createDirectoryIfMissing, doesFileExist, findExecutable,
getDirectoryContents, removeFile,
renameDirectory, canonicalizePath)
import System.Exit (ExitCode (ExitSuccess))
import System.FilePath ((<.>), (</>), takeDirectory)
import System.IO (IOMode (WriteMode, AppendMode),
withBinaryFile)
import System.Process (readProcess, runProcess, waitForProcess, createProcess, proc, cwd)
import Text.ParserCombinators.ReadP (readP_to_S)
import Data.IORef (IORef, readIORef, atomicModifyIORef, newIORef)
import qualified Data.ByteString.Lazy as L
import qualified Data.ByteString.Lazy.Char8 as L8
import qualified Distribution.PackageDescription as PD
import Distribution.PackageDescription.Parse (ParseResult (ParseOk),
parsePackageDescription)
runTestSuites :: BuildSettings -> BuildPlan -> IO ()
runTestSuites settings' bp = do
settings <- fixBuildSettings settings'
let selected' = Map.filterWithKey notSkipped $ bpPackages bp
let testdir = "runtests"
docdir = "haddock"
rm_r testdir
rm_r docdir
createDirectory testdir
createDirectory docdir
putStrLn "Determining package dependencies"
selected <- mapM (addDependencies settings (Map.keysSet selected') testdir)
$ Map.toList selected'
putStrLn "Running test suites"
copyBuiltInHaddocks docdir
cabalVersion <- getCabalVersion
haddockFilesRef <- newIORef []
allPass <- parFoldM
(testWorkerThreads settings)
(runTestSuite cabalVersion settings testdir docdir bp haddockFilesRef)
(&&)
True
selected
unless allPass $ error $ "There were failures, please see the logs in " ++ testdir
where
notSkipped p _ = p `Set.notMember` bpSkippedTests bp
addDependencies :: BuildSettings
-> Set PackageName -- ^ all packages to be installed
-> FilePath -- ^ testdir
-> (PackageName, SelectedPackageInfo)
-> IO (PackageName, Set PackageName, SelectedPackageInfo)
addDependencies settings allPackages testdir (packageName, spi) = do
package' <- replaceTarball (tarballDir settings) package
deps <- handle (\e -> print (e :: IOException) >> return Set.empty)
$ getDeps allPackages testdir packageName package package'
return (packageName, deps, spi)
where
package = packageVersionString (packageName, spiVersion spi)
getDeps :: Set PackageName -- ^ all packages to be installed
-> FilePath -> PackageName -> String -> String -> IO (Set PackageName)
getDeps allPackages testdir (PackageName name) nameVer loc = do
(Nothing, Nothing, Nothing, ph) <- createProcess
(proc "cabal" ["unpack", loc, "--verbose=0"]) { cwd = Just testdir }
ec <- waitForProcess ph
unless (ec == ExitSuccess) $ error $ "Unable to unpack: " ++ loc
lbs <- L.readFile $ testdir </> nameVer </> name <.> "cabal"
case parsePackageDescription $ L8.unpack lbs of
ParseOk _ gpd -> return $ Set.intersection allPackages $ allLibraryDeps gpd
_ -> return Set.empty
allLibraryDeps :: PD.GenericPackageDescription -> Set PackageName
allLibraryDeps =
maybe Set.empty (W.execWriter . goTree) . PD.condLibrary
where
goTree tree = do
mapM_ goDep $ PD.condTreeConstraints tree
forM_ (PD.condTreeComponents tree) $ \(_, y, z) -> do
goTree y
maybe (return ()) goTree z
goDep (Dependency pn _) = W.tell $ Set.singleton pn
getCabalVersion :: IO CabalVersion
getCabalVersion = do
output <- readProcess "cabal" ["--numeric-version"] ""
case filter (null . snd) $ readP_to_S parseVersion $ filter notCRLF output of
(Version (x:y:_) _, _):_ -> return $ CabalVersion x y
_ -> error $ "Invalid cabal version: " ++ show output
where
notCRLF '\n' = False
notCRLF '\r' = False
notCRLF _ = True
parFoldM :: Int -- ^ number of threads
-> ((PackageName, payload) -> IO c)
-> (a -> c -> a)
-> a
-> [(PackageName, Set PackageName, payload)]
-> IO a
parFoldM threadCount0 f g a0 bs0 = do
ma <- C.newMVar a0
mbs <- C.newMVar bs0
signal <- C.newEmptyMVar
completed <- newIORef Set.empty
tids <- replicateM threadCount0 $ C.forkIO $ worker completed ma mbs signal
wait threadCount0 signal tids
unrun <- C.takeMVar mbs
when (not $ null unrun) $
error $ "The following tests were not run: " ++ unwords
[x | (PackageName x, _, _) <- unrun]
C.takeMVar ma
where
worker completedRef ma mbs signal =
handle
(C.putMVar signal . Just)
(loop >> C.putMVar signal Nothing)
where
loop = do
mb <- C.modifyMVar mbs $ \bs -> do
completed <- readIORef completedRef
return $ case findReady completed bs of
-- There's a workload ready with no deps
Just (b, bs') -> (bs', Just b)
-- No workload with no deps
Nothing -> (bs, Nothing)
case mb of
Nothing -> return ()
Just (name, _, payload) -> do
c <- f (name, payload)
C.modifyMVar_ ma $ \a -> return $! g a c
atomicModifyIORef completedRef $ \s -> (Set.insert name s, ())
loop
wait threadCount signal tids
| threadCount == 0 = return ()
| otherwise = do
me <- C.takeMVar signal
case me of
Nothing -> wait (threadCount - 1) signal tids
Just e -> do
mapM_ C.killThread tids
throwIO (e :: SomeException)
-- | Find a workload whose dependencies have been met.
findReady :: Ord key
=> Set key -- ^ workloads already complete
-> [(key, Set key, value)]
-> Maybe ((key, Set key, value), [(key, Set key, value)])
findReady completed =
loop id
where
loop _ [] = Nothing
loop front (x@(_, deps, _):xs)
| Set.null $ Set.difference deps completed = Just (x, front xs)
| otherwise = loop (front . (x:)) xs
data TestException = TestException
deriving (Show, Typeable)
instance Exception TestException
data CabalVersion = CabalVersion Int Int
deriving (Eq, Ord, Show)
runTestSuite :: CabalVersion
-> BuildSettings
-> FilePath -- ^ testdir
-> FilePath -- ^ docdir
-> BuildPlan
-> IORef [(String, FilePath)] -- ^ .haddock files
-> (PackageName, SelectedPackageInfo)
-> IO Bool
runTestSuite cabalVersion settings testdir docdir
bp haddockFilesRef (packageName, SelectedPackageInfo {..}) = do
-- Set up a new environment that includes the sandboxed bin folder in PATH.
env' <- getModifiedEnv settings
let menv = Just $ addSandbox env'
addSandbox = (("HASKELL_PACKAGE_SANDBOX", packageDir settings):)
let run cmd args wdir handle' = do
ph <- runProcess cmd args (Just wdir) menv Nothing (Just handle') (Just handle')
ec <- waitForProcess ph
unless (ec == ExitSuccess) $ throwIO TestException
passed <- handle (\TestException -> return False) $ do
case cabalFileDir settings of
Nothing -> return ()
Just cfd -> do
let PackageName name = packageName
basename = name ++ ".cabal"
src = dir </> basename
dst = cfd </> basename
createDirectoryIfMissing True cfd
copyFile src dst
getHandle WriteMode $ run "cabal" (addCabalArgs settings BSTest ["configure", "--enable-tests"]) dir
-- Try building docs first in case tests have an expected failure.
when (buildDocs settings) $ do
hfs <- readIORef haddockFilesRef
let hfsOpts = flip map hfs $ \(pkgVer, hf) -> concat
[ "--haddock-options=--read-interface="
, "../"
, pkgVer
, "/,"
, hf
]
getHandle AppendMode $ run "cabal"
( "haddock"
: "--hyperlink-source"
: "--html"
: "--hoogle"
: "--html-location=../$pkg-$version/"
: hfsOpts) dir
let PackageName packageName' = packageName
handle (\(_ :: IOException) -> return ()) $ renameDirectory
(dir </> "dist" </> "doc" </> "html" </> packageName')
(docdir </> package)
enewPath <- try $ canonicalizePath $ docdir </> package </> packageName' <.> "haddock"
case enewPath :: Either IOException FilePath of
Left _ -> return () -- print e
Right newPath -> atomicModifyIORef haddockFilesRef $ \hfs'
-> ((package, newPath) : hfs', ())
when spiHasTests $ do
getHandle AppendMode $ run "cabal" ["build"] dir
getHandle AppendMode $ run "cabal" (concat
[ ["test"]
, if cabalVersion >= CabalVersion 1 20
then ["--show-details=streaming"] -- FIXME temporary workaround for https://github.com/haskell/cabal/issues/1810
else []
]) dir
return True
let expectedFailure = packageName `Set.member` bpExpectedFailures bp
if passed
then do
removeFile logfile
when expectedFailure $ putStrLn $ " " ++ package ++ " passed, but I didn't think it would."
else unless expectedFailure $ putStrLn $ concat
[ "Test suite failed: "
, package
, "("
, unMaintainer spiMaintainer
, githubMentions spiGithubUser
, ")"
]
rm_r dir
return $! passed || expectedFailure
where
logfile = testdir </> package <.> "log"
dir = testdir </> package
getHandle mode = withBinaryFile logfile mode
package = packageVersionString (packageName, spiVersion)
copyBuiltInHaddocks docdir = do
Just ghc <- findExecutable "ghc"
copyTree (takeDirectory ghc </> "../share/doc/ghc/html/libraries") docdir
where
copyTree src dest = do
entries <- fmap (filter (\s -> s /= "." && s /= ".."))
$ getDirectoryContents src
forM_ entries $ \entry -> do
let src' = src </> entry
dest' = dest </> entry
isFile <- doesFileExist src'
if isFile
then copyFile src' dest'
else do
createDirectory dest'
copyTree src' dest'
| Tarrasch/stackage | Stackage/Test.hs | mit | 11,872 | 0 | 21 | 3,931 | 3,189 | 1,619 | 1,570 | 250 | 5 |
{-# LANGUAGE ViewPatterns #-}
{-# OPTIONS -fno-warn-name-shadowing #-}
-- | Freenect example with GLUT, showing connection boundaries.
-- Video demo here: http://www.youtube.com/watch?v=as2syH8Y8yc
module Main
where
import Control.Concurrent
import Control.Monad
import Data.Bits
import Data.IORef
import Data.Vector.Storable (Vector,(!))
import qualified Data.Vector.Storable as V
import Data.Word
import Foreign.ForeignPtr
import Freenect
import Graphics.Rendering.OpenGL
import Graphics.UI.GLUT hiding (shift)
width, height :: Int
width = 640
height = 480
main :: IO ()
main = do
depthGrid <- newIORef Nothing
_ <- getDepthThread depthGrid
glThread depthGrid
getDepthThread :: IORef (Maybe (Vector Word16)) -> IO ThreadId
getDepthThread depthGrid = forkOS $ do
withContext $ \context -> do
setLogLevel LogFatal context
selectSubdevices context devices
withDevice context index $ \device -> do
setDepthMode device Medium ElevenBit
setDepthCallback device $ \payload _timestamp -> do
writeIORef depthGrid (Just payload)
postRedisplay Nothing
return ()
startDepth device
forever $ processEvents context
where devices = [Camera]
index = 0 :: Integer
glThread :: IORef (Maybe (Vector Word16)) -> IO ()
glThread depthGrid = do
(_progname,_args) <- getArgsAndInitialize
-- initialDisplayMode $= [DoubleBuffered]
_window <- createWindow "Kinect"
windowSize $= Size (fromIntegral width) (fromIntegral height)
ortho2D 0 (fromIntegral width) 0 (fromIntegral height)
displayCallback $= display depthGrid
clearColor $= Color4 0.2 0.2 0.2 0
mainLoop
n=2^11 - 1000
display :: IORef (Maybe (Vector Word16)) -> IO ()
display depthGrid = do
depthGrid <- readIORef depthGrid
case depthGrid of
Nothing -> return ()
Just grid -> do
clear [ColorBuffer]
forM_ [(x,y) | x <- [0..width-1], y <- [0..height-1]] $ \(x,y) -> do
let rawDisparity = fromIntegral (grid ! (y*width + x))
d = rawDisparity/n
if d<0.8
then if d<0.5
then patch (x,height-y) (1,d,d)
else patch (x,height-y) (d,d,d)
else return ()
swapBuffers
type PatchColor = (GLfloat,GLfloat,GLfloat)
type Loc = (Int,Int)
patch :: Loc -> PatchColor -> IO ()
patch (x,y) (r,g,b) = do
color $ Color3 r g b
rect (Vertex2 xf yf) (Vertex2 (xf+1) (yf+1))
where xf = fromIntegral x :: GLfloat
yf = fromIntegral y :: GLfloat
| chrisdone/freenect | examples/src/GlutBoundary.hs | bsd-3-clause | 2,599 | 0 | 24 | 667 | 881 | 455 | 426 | 71 | 4 |
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE TemplateHaskell, DataKinds #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE TypeSynonymInstances #-}
-----------------------------------------------------------------------------
-- |
-- Module : Lib.Region
-- Copyright : (c) Artem Chirkin
-- License : MIT
--
-- Maintainer : chirkin@arch.ethz.ch
--
-- Variation of a quad-tree.
-- Uses BoundedBy (cheap) and Spatial (expensive) type classes to locate objects in logarithmic time.
-- Constructing the whole tree from a list is O(n log n)
-- Does not support adding new objects yet.
--
-----------------------------------------------------------------------------
module Lib.Region
( -- * Data type
Region ()
, rBound
-- ^ Everything in this region is fully contained within this bound
, rCenter
-- ^ Pivoting point to split the quad-tree into four branches
, rBlocks, rLL, rLR, rUL, rUR
-- * Object-location queries
, findClosest, foldNearby
-- * Construction
, fromList, toList
) where
import Data.List (foldl')
import Data.List.NonEmpty (NonEmpty (..))
import Data.Semigroup
import qualified Control.Lens as Lens
import GHC.Exts (IsList(..))
import Numeric.EasyTensor
import Lib.ExtraTypes
-- | Data structure to store blocks.
-- Every child block gets the blocks with centers in corresponding quadrant of parent.
-- Though their bounding boxes may overlap a little bit.
-- Construction guarantees (fromList):
-- 1. Bounds of objects on current level larger than half of region bound
-- 2. Each existing leaf contains at least one object inside.
data Region a
= Region
{ _rBound :: !(MinMax Vec2f)
-- ^ Everything in this region is fully contained within this bound
, _rCenter :: !Vec2f
-- ^ Pivoting point to split the quad-tree into four branches
, _rBlocks :: ![a]
-- ^ All the blocks big enough to not fit within child bounded regions
, _rLL :: !(Region a)
-- ^ Lower-left region
, _rLR :: !(Region a)
-- ^ Lower-right region
, _rUL :: !(Region a)
-- ^ Upper-left region
, _rUR :: !(Region a)
-- ^ Upper-right region
}
| Wasteland
{ _rBound :: !(MinMax Vec2f)
-- ^ Everything in this region is fully contained within this bound
, _rCenter :: !Vec2f
-- ^ Pivoting point to split the quad-tree into four branches
}
deriving (Eq,Show)
instance BoundedBy Vec2f (Region a) where
bounds = _rBound
{-# INLINE bounds #-}
instance Functor Region where
fmap f Region {..} = Region
{ _rBound = _rBound
, _rCenter = _rCenter
, _rBlocks = f <$> _rBlocks
, _rLL = f <$> _rLL
, _rLR = f <$> _rLR
, _rUL = f <$> _rUL
, _rUR = f <$> _rUR
}
fmap _ Wasteland {..} = Wasteland _rBound _rCenter
instance Foldable Region where
foldMap f Region {..} = foldMap f _rBlocks
`mappend` foldMap f _rLL
`mappend` foldMap f _rLR
`mappend` foldMap f _rUL
`mappend` foldMap f _rUR
foldMap _ Wasteland {} = mempty
instance Traversable Region where
traverse f Region {..} = Region _rBound _rCenter
<$> traverse f _rBlocks
<*> traverse f _rLL
<*> traverse f _rLR
<*> traverse f _rUL
<*> traverse f _rUR
traverse _ Wasteland {..} = pure $ Wasteland _rBound _rCenter
sequenceA Region {..} = Region _rBound _rCenter
<$> sequenceA _rBlocks
<*> sequenceA _rLL
<*> sequenceA _rLR
<*> sequenceA _rUL
<*> sequenceA _rUR
sequenceA Wasteland {..} = pure $ Wasteland _rBound _rCenter
instance BoundedBy Vec2f a => IsList (Region a) where
type Item (Region a) = a
fromList xs = foldl' (addObject $ mmDiff scBound) (Wasteland scBound scCenter) xs
where
scBound' = sconcat . ( mmBound :|) $ map bounds xs
scCenter = mmAvg scBound'
scDist = broadcast . unScalar $ normLPInf (mmDiff scBound') / 2
scBound = MinMax (scCenter - scDist) (scCenter + scDist)
addObject softSize r@Region{..} b
| d <- softSize * 0.5
, bc <- mmAvg $ bounds b
, mmDiff (bounds b) < d =
case (_x bc <= _x _rCenter, _y bc <= _y _rCenter) of
(True, True) -> r
{ _rBound = _rBound <> bounds b
, _rLL = addObject d _rLL b
}
(True, False) -> r
{ _rBound = _rBound <> bounds b
, _rUL = addObject d _rUL b
}
(False, True) -> r
{ _rBound = _rBound <> bounds b
, _rLR = addObject d _rLR b
}
(False, False) -> r
{ _rBound = _rBound <> bounds b
, _rUR = addObject d _rUR b
}
| otherwise = r{ _rBound = _rBound <> bounds b, _rBlocks = b:_rBlocks}
addObject softSize Wasteland {..} b
| d <- softSize * 0.5
, MinMax bmin bmax <- _rBound
, dx <- vec2 (realToFrac $ _x d) 0
, dy <- vec2 0 (realToFrac $ _y d) = addObject softSize
Region
{ _rBound = _rBound
, _rCenter = _rCenter
, _rBlocks = []
, _rLL = Wasteland (MinMax bmin (bmax-d)) (_rCenter - d/2)
, _rUL = Wasteland (MinMax (bmin+dy) (bmax-dx)) (_rCenter - dx/2 + dy/2)
, _rLR = Wasteland (MinMax (bmin+dx) (bmax-dy)) (_rCenter + dx/2 - dy/2)
, _rUR = Wasteland (MinMax (bmin+d) bmax) (_rCenter + d/2)
}
b
toList Region {..} = _rBlocks ++ toList _rLL ++ toList _rLR ++ toList _rUL ++ toList _rUR
toList Wasteland {} = []
closerObject :: Spatial 2 Float a => Vec2f -> Option (ArgMin Scf a) -> a -> Option (ArgMin Scf a)
closerObject p (Option Nothing) b = Option . Just . Min $ Arg (p `distL2To` b) b
closerObject p (Option (Just ax@(Min (Arg x _)))) b =
if normL1 y - normL1 dy * 0.5 > x * 1.4142135623730951
then Option (Just ax)
else Option . Just $ ax <> Min (Arg (p `distL2To` b) b)
where
y = mmAvg (bounds b) - p
dy = mmDiff (bounds b) :: Vec2f
closerObjectInR ::Spatial 2 Float a => Vec2f -> Option (ArgMin Scf a) -> Region a -> Option (ArgMin Scf a)
closerObjectInR _ x Wasteland {} = x
closerObjectInR p (Option (Just ax@(Min (Arg x _)))) Region {..}
| y <- mmAvg _rBound - p
, dy <- mmDiff _rBound
, normL1 y - normL1 dy * 0.5 > x * 1.4142135623730951 = Option (Just ax)
closerObjectInR p ax Region {..} =
case (_x p <= _x _rCenter, _y p <= _y _rCenter) of
(True , True ) -> ax' +^+ _rLL +^+ _rUL +^+ _rLR +^+ _rUR
(True , False) -> ax' +^+ _rUL +^+ _rLL +^+ _rUR +^+ _rLR
(False, True ) -> ax' +^+ _rLR +^+ _rLL +^+ _rUR +^+ _rUL
(False, False) -> ax' +^+ _rUR +^+ _rUL +^+ _rLR +^+ _rLL
where
-- find a closer block in a given region
infixl 3 +^+
ob +^+ r' = closerObjectInR p ob r'
-- closest block on this level, if any
ax' = foldl' (closerObject p) ax _rBlocks
-- | Find the closest object inside the region.
-- O(log n) complexity.
findClosest :: Spatial 2 Float a => Vec2f -> Region a -> Maybe (Arg Scf a)
findClosest p = fmap getMin . getOption . closerObjectInR p (Option Nothing)
-- | Fold objects within a given boundary.
-- Should be something like O(k log n) where k is the number of objects in a region.
foldNearby :: (BoundedBy Vec2f a, Monoid m) => (a -> m) -> MinMax Vec2f -> Region a -> m
foldNearby _ _ Wasteland {} = mempty
foldNearby f bound@(MinMax bll bur) Region {..}
| MinMax rll rur <- _rBound
, bur > rll && bll < rur = foldMap f' _rBlocks +^+ _rLL +^+ _rUL +^+ _rLR +^+ _rUR
| otherwise = mempty
where
-- go recursive into children
infixl 3 +^+
m +^+ r' = m `mappend` foldNearby f bound r'
f' i | MinMax ill iur <- bounds i
, bur > ill && bll < iur = f i
| otherwise = mempty
Lens.makeLenses ''Region
| mb21/qua-kit | services/examples-hs/src/Lib/Region.hs | mit | 8,133 | 0 | 17 | 2,372 | 2,419 | 1,265 | 1,154 | 170 | 4 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE OverloadedStrings #-}
module Yi.Config.Default ( defaultConfig, availableFrontends, defaultEmacsConfig
, defaultVimConfig, defaultCuaConfig, toVimStyleConfig
, toEmacsStyleConfig, toCuaStyleConfig) where
import Control.Applicative
import Control.Lens ((.~), (^.), use)
import Control.Monad
import Data.Default
import qualified Data.HashMap.Strict as HM
import qualified Data.Map as M
import Data.Monoid
import Paths_yi
import System.FilePath
import Yi.Buffer
import Yi.Command (cabalBuildE, cabalConfigureE, grepFind,
makeBuild, reloadProjectE, searchSources,
shell)
import Yi.Config
import Yi.Config.Misc
import Yi.Core (errorEditor, quitEditor)
import Yi.Editor
import Yi.Eval (publishedActions)
import Yi.File
import qualified Yi.Interact as I
import Yi.IReader (saveAsNewArticle)
import Yi.Keymap
import qualified Yi.Keymap.Cua as Cua
import qualified Yi.Keymap.Emacs as Emacs
import Yi.Keymap.Keys
import qualified Yi.Keymap.Vim as Vim
import Yi.Layout
import qualified Yi.Mode.Abella as Abella
import qualified Yi.Mode.Haskell as Haskell
import Yi.Mode.IReader (ireadMode, ireaderMode)
import qualified Yi.Mode.JavaScript as JS
import qualified Yi.Mode.Latex as Latex
import Yi.Modes
import qualified Yi.Rope as R
import Yi.Search
import Yi.Style.Library
import qualified Yi.UI.Batch
import Yi.Utils
import Yi.Types ()
#ifdef FRONTEND_VTY
import qualified Graphics.Vty.Config as Vty
import qualified Yi.UI.Vty
#endif
#ifdef FRONTEND_PANGO
import qualified Yi.UI.Pango
#endif
availableFrontends :: [(String, UIBoot)]
availableFrontends =
#ifdef FRONTEND_VTY
("vty", Yi.UI.Vty.start) :
#endif
#ifdef FRONTEND_PANGO
("pango", Yi.UI.Pango.start) :
#endif
[("batch", Yi.UI.Batch.start)]
-- | List of published Actions
-- THIS MUST BE OF THE FORM:
-- ("symbol", box symbol")
-- ... so we can hope getting rid of this someday.
-- Failing to conform to this rule exposes the code to instant deletion.
--
-- TODO: String → Text/YiString
defaultPublishedActions :: HM.HashMap String Action
defaultPublishedActions = HM.fromList
[
("atBoundaryB" , box atBoundaryB)
, ("cabalBuildE" , box cabalBuildE)
, ("cabalConfigureE" , box cabalConfigureE)
, ("closeBufferE" , box closeBufferE)
, ("deleteB" , box deleteB)
, ("deleteBlankLinesB" , box deleteBlankLinesB)
, ("getSelectRegionB" , box getSelectRegionB)
, ("grepFind" , box grepFind)
, ("insertB" , box insertB)
, ("iread" , box ireadMode)
, ("ireadSaveAsArticle" , box saveAsNewArticle)
, ("leftB" , box leftB)
, ("linePrefixSelectionB" , box linePrefixSelectionB)
, ("lineStreamB" , box lineStreamB)
-- , ("mkRegion" , box mkRegion) -- can't make 'instance Promptable Region'
, ("makeBuild" , box makeBuild)
, ("moveB" , box moveB)
, ("numberOfB" , box numberOfB)
, ("pointB" , box pointB)
, ("regionOfB" , box regionOfB)
, ("regionOfPartB" , box regionOfPartB)
, ("regionOfPartNonEmptyB" , box regionOfPartNonEmptyB)
, ("reloadProjectE" , box reloadProjectE)
, ("replaceString" , box replaceString)
, ("revertE" , box revertE)
, ("shell" , box shell)
, ("searchSources" , box searchSources)
, ("setAnyMode" , box setAnyMode)
, ("sortLines" , box sortLines)
, ("unLineCommentSelectionB", box unLineCommentSelectionB)
, ("writeB" , box writeB)
, ("ghciGet" , box Haskell.ghciGet)
, ("abella" , box Abella.abella)
]
where
box :: (Show x, YiAction a x) => a -> Action
box = makeAction
defaultConfig :: Config
defaultConfig =
publishedActions .~ defaultPublishedActions $
Config { startFrontEnd = case availableFrontends of
[] -> error "panic: no frontend compiled in! (configure with -fvty or another frontend.)"
((_,f):_) -> f
, configUI = UIConfig
{ configFontSize = Just 10
, configFontName = Nothing
, configScrollWheelAmount = 4
, configScrollStyle = Nothing
, configCursorStyle = FatWhenFocusedAndInserting
, configLineWrap = True
, configLeftSideScrollBar = True
, configAutoHideScrollBar = False
, configAutoHideTabBar = True
, configWindowFill = ' '
, configTheme = defaultTheme
#ifdef FRONTEND_VTY
, configVty = def
#endif
}
, defaultKm = modelessKeymapSet nilKeymap
, startActions = []
, initialActions = []
, modeTable = [AnyMode Haskell.cleverMode,
AnyMode Haskell.preciseMode,
AnyMode Latex.latexMode3,
AnyMode Latex.fastMode,
AnyMode Abella.abellaModeEmacs,
AnyMode cMode,
AnyMode objectiveCMode,
AnyMode cppMode,
AnyMode Haskell.literateMode,
AnyMode cabalMode,
AnyMode clojureMode,
AnyMode gnuMakeMode,
AnyMode srmcMode,
AnyMode ocamlMode,
AnyMode ottMode,
AnyMode perlMode,
AnyMode (JS.hooks JS.javaScriptMode),
AnyMode pythonMode,
AnyMode rubyMode,
AnyMode javaMode,
AnyMode jsonMode,
AnyMode ireaderMode,
AnyMode svnCommitMode,
AnyMode gitCommitMode,
AnyMode whitespaceMode,
AnyMode fundamentalMode]
, debugMode = False
, configKillringAccumulate = False
, configCheckExternalChangesObsessively = True
, configRegionStyle = Exclusive
, configInputPreprocess = I.idAutomaton
, bufferUpdateHandler = []
, layoutManagers = [hPairNStack 1, vPairNStack 1, tall, wide]
, configVars = mempty
}
defaultEmacsConfig, defaultVimConfig, defaultCuaConfig :: Config
defaultEmacsConfig = toEmacsStyleConfig defaultConfig
defaultVimConfig = toVimStyleConfig defaultConfig
defaultCuaConfig = toCuaStyleConfig defaultConfig
toEmacsStyleConfig, toVimStyleConfig, toCuaStyleConfig :: Config -> Config
toEmacsStyleConfig cfg
= cfg {
configUI = (configUI cfg)
{ configScrollStyle = Just SnapToCenter
#ifdef FRONTEND_VTY
-- corey: does this actually matter? escToMeta appears to perform all the
-- meta joining required. I'm not an emacs user and cannot evaluate feel. For
-- me these settings join esc;key to meta-key OK. The 100 millisecond lag in
-- ESC is terrible for me. Maybe that's just how it is under emacs...
, configVty = def { Vty.vtime = Just 100, Vty.vmin = Just 2 }
#endif
},
defaultKm = Emacs.keymap,
startActions = makeAction openScratchBuffer : startActions cfg,
configInputPreprocess = escToMeta,
configKillringAccumulate = True
}
-- | Input preprocessor: Transform Esc;Char into Meta-Char
-- Useful for emacs lovers ;)
escToMeta :: I.P Event Event
escToMeta = mkAutomaton $ forever $ (anyEvent >>= I.write) ||> do
void $ event (spec KEsc)
c <- printableChar
I.write (Event (KASCII c) [MMeta])
toVimStyleConfig cfg = cfg
{ defaultKm = Vim.keymapSet
, configUI = (configUI cfg)
{ configScrollStyle = Just SingleLine
#ifdef FRONTEND_VTY
, configVty = (configVty (configUI cfg)) { Vty.vtime = Just 0 }
#endif
}
, configRegionStyle = Inclusive
}
toCuaStyleConfig cfg = cfg {defaultKm = Cua.keymap}
-- | Open an emacs-like scratch buffer if no file is open.
openScratchBuffer :: YiM ()
openScratchBuffer = withEditor $ do
fileBufOpen <- any isFileOrDir . M.elems <$> use buffersA
unless fileBufOpen $
void . newBufferE (MemBuffer "scratch") $ R.unlines
[ "This buffer is for notes you don't want to save."
, "If you want to create a file, open that file,"
, "then enter the text in that file's own buffer."
, ""
]
where
isFileOrDir :: FBuffer -> Bool
isFileOrDir attrs = case attrs ^. identA of
MemBuffer _ -> attrs ^. directoryContentA
FileBuffer _ -> True
nilKeymap :: Keymap
nilKeymap = choice [
char 'q' ?>>! quitEditor,
char 'h' ?>>! configHelp
]
<|| (anyEvent >>! errorEditor "Keymap not defined, 'q' to quit, 'h' for help.")
where
configHelp :: YiM ()
configHelp = do
dataDir <- io getDataDir
let x <//> y = R.fromString (x </> y)
welcomeText = R.unlines
[ "This instance of Yi is not configured."
, ""
, "To get a standard reasonable keymap, you can run yi with"
, "either --as=cua, --as=vim or --as=emacs."
, ""
, "You should however create your own ~/.config/yi/yi.hs file."
, "As a starting point it's recommended to use one of the configs"
, "from " <> (dataDir <//> "example-configs/")
, ""
]
withEditor_ $ newBufferE (MemBuffer "configuration help") welcomeText
| TOSPIO/yi | src/library/Yi/Config/Default.hs | gpl-2.0 | 10,365 | 0 | 16 | 3,657 | 1,938 | 1,136 | 802 | 196 | 2 |
{- $Id: AFRPTestsTask.hs,v 1.2 2003/11/10 21:28:58 antony Exp $
******************************************************************************
* A F R P *
* *
* Module: AFRPTestsTask *
* Purpose: Test cases for tasks (AFRPTask) *
* Authors: Antony Courtney and Henrik Nilsson *
* *
* Copyright (c) Yale University, 2003 *
* *
******************************************************************************
-}
-- Very rudimentary testing of AFRPTask.
module AFRPTestsTask (task_tr, task_trs) where
import FRP.Yampa
import FRP.Yampa.Task
import AFRPTestsCommon
------------------------------------------------------------------------------
-- Test cases for tasks (AFRPTask)
------------------------------------------------------------------------------
task_t0 = testSF1 (runTask (do
mkTask (localTime
&&&(localTime >>> arr (>=5.0) >>> edge))
x <- snapT
return (x * 2.0))
)
task_t0r =
[Left 0.0, Left 0.25, Left 0.5, Left 0.75, Left 1.0,
Left 1.25, Left 1.5, Left 1.75, Left 2.0, Left 2.25,
Left 2.5, Left 2.75, Left 3.0, Left 3.25, Left 3.5,
Left 3.75, Left 4.0, Left 4.25, Left 4.5, Left 4.75,
Right 40.0, Right 40.0, Right 40.0, Right 40.0, Right 40.0]
task_t1 = testSF1 (runTask (do
mkTask (localTime
&&& (localTime>>>arr (>=5.0) >>> edge))
return () -- No time should pass!
return () -- No Time should pass!
snapT -- No time should pass!
snapT -- No time should pass!
x <- snapT
return (x * 2.0))
)
task_t2 = testSF1 (runTask (do
sleepT 1.51 42.0
x <- snapT
y <- snapT
sleepT 1.51 x
if x == y then
sleepT 1.51 (x * 2)
else
sleepT 0.51 (x * 3)
)
)
task_t2r =
[Left 42.0, Left 42.0, Left 42.0, Left 42.0, -- 0.0 s
Left 42.0, Left 42.0, Left 42.0, Left 7.0, -- 1.0 s
Left 7.0, Left 7.0, Left 7.0, Left 7.0, -- 2.0 s
Left 7.0, Left 7.0, Left 14.0, Left 14.0, -- 3.0 s
Left 14.0, Left 14.0, Left 14.0, Left 14.0, -- 4.0 s
Left 14.0, Right (), Right (), Right (), -- 5.0 s
Right ()]
task_t3 = testSF1 (runTask (do
c <- sawtooth `timeOut` 3.49
case c of
Nothing -> sleepT 1.51 (-10.0)
Just x -> sleepT 1.51 x
)
)
where
sawtooth =
forEver ((mkTask (constant 2.0 >>> integral &&& never))
`timeOut` 1.5)
task_t3r :: [Either Double ()]
task_t3r =
[Left 0.0, Left 0.5, Left 1.0, Left 1.5, -- 0.0 s
Left 2.0, Left 2.5, Left 0.0, Left 0.5, -- 1.0 s
Left 1.0, Left 1.5, Left 2.0, Left 2.5, -- 2.0 s
Left 0.0, Left 0.5, Left (-10.0), Left (-10.0), -- 3.0 s
Left (-10.0), Left (-10.0), Left (-10.0), Left (-10.0), -- 4.0 s
Left (-10.0), Right (), Right (), Right (), -- 5.0 s
Right ()]
task_t4 = testSF1 (runTask (do
c <- sawtooth `timeOut` 3.49
case c of
Nothing -> sleepT 1.51 (-10.0)
Just x -> sleepT 1.51 x
)
)
where
sawtooth = do
for 1 (+1) (<=2)
((mkTask (constant 2.0 >>> integral &&& never))
`timeOut` 1.5)
return (-42.0)
task_t4r :: [Either Double ()]
task_t4r =
[Left 0.0, Left 0.5, Left 1.0, Left 1.5, -- 0.0 s
Left 2.0, Left 2.5, Left 0.0, Left 0.5, -- 1.0 s
Left 1.0, Left 1.5, Left 2.0, Left 2.5, -- 2.0 s
Left (-42.0), Left (-42.0), Left (-42.0), Left (-42.0), -- 3.0 s
Left (-42.0), Left (-42.0), Left (-42.0), Right (), -- 4.0 s
Right (), Right (), Right (), Right (), -- 5.0 s
Right ()]
task_t5 = testSF1 (runTask (do
x<-(sawtoothCycle>>snapT) `repeatUntil` (>=20.0)
y<-snapT
return (x == y)
)
)
where
sawtoothCycle = mkTask (constant 2.0 >>> integral &&& after 1.5 ())
task_t5r :: [Either Double Bool]
task_t5r =
[Left 0.0, Left 0.5, Left 1.0, Left 1.5, -- 0.0 s, 0 - 3
Left 2.0, Left 2.5, Left 0.0, Left 0.5, -- 1.0 s, 4 - 7
Left 1.0, Left 1.5, Left 2.0, Left 2.5, -- 2.0 s, 8 - 11
Left 0.0, Left 0.5, Left 1.0, Left 1.5, -- 3.0 s, 12 - 15
Left 2.0, Left 2.5, Left 0.0, Left 0.5, -- 4.0 s, 16 - 19,
Left 1.0, Left 1.5, Left 2.0, Left 2.5, -- 5.0 s, 20 - 23
Right True]
task_t6 = testSF1 $ runTask $
do
x <- ((sawtoothCycle >> snapT) `repeatUntil` (>=20.0))
`abortWhen` (localTime >>> arr (>=3.51) >>> edge)
y <- snapT
return (x,y)
where
sawtoothCycle = mkTask (constant 2.0 >>> integral &&& after 1.5 ())
task_t6r :: [Either Double (Either Double (), Double)]
task_t6r =
[Left 0.0, Left 0.5, Left 1.0, Left 1.5, -- 0.0 s, 0 - 3
Left 2.0, Left 2.5, Left 0.0, Left 0.5, -- 1.0 s, 4 - 7
Left 1.0, Left 1.5, Left 2.0, Left 2.5, -- 2.0 s, 8 - 11
Left 0.0, Left 0.5, Left 1.0, Right (Right (),15.0), -- 3.0 s, 12 - 15
Right (Right (),15.0), Right (Right (),15.0), -- 4.0 s, 16, 17
Right (Right (),15.0), Right (Right (),15.0), -- 4.5 s, 18, 19
Right (Right (),15.0), Right (Right (),15.0), -- 5.0 s, 20, 21
Right (Right (),15.0), Right (Right (),15.0), -- 5.5 s, 22, 23
Right (Right (),15.0)]
task_t7 = testSF1 $ runTask $
do
x <- ((sawtoothCycle >> snapT) `repeatUntil` (>=20.0))
`abortWhen` (localTime >>> arr (>=5.75) >>> edge)
y <- snapT
return (x,y)
where
sawtoothCycle = mkTask (constant 2.0 >>> integral &&& after 1.5 ())
task_t7r :: [Either Double (Either Double (), Double)]
task_t7r =
[Left 0.0, Left 0.5, Left 1.0, Left 1.5, -- 0.0 s, 0 - 3
Left 2.0, Left 2.5, Left 0.0, Left 0.5, -- 1.0 s, 4 - 7
Left 1.0, Left 1.5, Left 2.0, Left 2.5, -- 2.0 s, 8 - 11
Left 0.0, Left 0.5, Left 1.0, Left 1.5, -- 3.0 s, 12 - 15
Left 2.0, Left 2.5, Left 0.0, Left 0.5, -- 4.0 s, 16 - 19
Left 1.0, Left 1.5, Left 2.0, Right (Right (),23.0), -- 5.0 s, 20 - 23
Right (Right (),23.0)]
task_t8 = testSF1 $ runTask $
do
x <- ((sawtoothCycle >> snapT) `repeatUntil` (>=20.0))
`abortWhen` (localTime >>> arr (>=5.76) >>> edge)
y <- snapT
return (x,y)
where
sawtoothCycle = mkTask (constant 2.0 >>> integral &&& after 1.5 ())
-- Since abortWhen uses lMergeEvent, the terminating event of the task
-- gets priority over the aborting event.
task_t8r :: [Either Double (Either Double (), Double)]
task_t8r =
[Left 0.0, Left 0.5, Left 1.0, Left 1.5, -- 0.0 s, 0 - 3
Left 2.0, Left 2.5, Left 0.0, Left 0.5, -- 1.0 s, 4 - 7
Left 1.0, Left 1.5, Left 2.0, Left 2.5, -- 2.0 s, 8 - 11
Left 0.0, Left 0.5, Left 1.0, Left 1.5, -- 3.0 s, 12 - 15
Left 2.0, Left 2.5, Left 0.0, Left 0.5, -- 4.0 s, 16 - 19
Left 1.0, Left 1.5, Left 2.0, Left 2.5, -- 5.0 s, 20 - 23
Right (Left 24.0,24.0)]
task_trs =
[ task_t0 ~= task_t0r,
task_t1 ~= task_t0r, -- Intentionally! task_t0 = task_t1!
task_t2 ~= task_t2r,
task_t3 ~= task_t3r,
task_t4 ~= task_t4r,
task_t5 ~= task_t5r,
task_t6 ~= task_t6r,
task_t7 ~= task_t7r,
task_t8 ~= task_t8r
]
task_tr = and task_trs
| ony/Yampa-core | tests/AFRPTestsTask.hs | bsd-3-clause | 7,676 | 47 | 18 | 2,590 | 2,700 | 1,426 | 1,274 | 151 | 2 |
module SubFun3 where
--Any unused parameter to a definition can be removed.
--In this example: remove x. The brackets enclosing 'foo' will also be removed.
main :: IO Int
main = do
let
foo x = h + t where (h,t) = head $ zip [1..10] [3..10]
return $ (foo 4) + (foo 5)
| RefactoringTools/HaRe | test/testdata/RmOneParameter/SubFun3.hs | bsd-3-clause | 278 | 0 | 14 | 65 | 91 | 48 | 43 | 6 | 1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE MultiWayIf #-}
module TwinkleBear.Lesson05 (main) where
import Prelude hiding (init)
import Control.Applicative
import Control.Monad
import Data.Monoid
import Foreign.C.Types
import Linear
import Linear.Affine ( Point(P) )
import qualified SDL
import Paths_sdl2 (getDataFileName)
#if !MIN_VERSION_base(4,8,0)
import Data.Foldable
#endif
screenWidth, screenHeight :: CInt
(screenWidth, screenHeight) = (640, 480)
spriteWidth, spriteHeight :: CInt
(spriteWidth, spriteHeight) = (100, 100)
type ClipRect = Maybe (SDL.Rectangle CInt)
data RenderPos = Centered | At (Point V2 CInt)
loadTexture :: SDL.Renderer -> FilePath -> IO SDL.Texture
loadTexture renderer path = do
bmp <- SDL.loadBMP path
SDL.createTextureFromSurface renderer bmp <* SDL.freeSurface bmp
renderTexture :: SDL.Renderer -> SDL.Texture -> ClipRect -> RenderPos -> IO ()
renderTexture renderer tex clipRect pos = do
ti <- SDL.queryTexture tex
let (w, h) = (SDL.textureWidth ti, SDL.textureHeight ti)
pos' = case pos of
At p -> p
Centered -> let cntr a b = (a - b) `div` 2
in P $ V2 (cntr screenWidth w) (cntr screenHeight h)
extent = (V2 w h)
SDL.copy renderer tex clipRect (Just $ SDL.Rectangle pos' extent)
main :: IO ()
main = do
SDL.initialize [ SDL.InitVideo ]
let winConfig = SDL.defaultWindow { SDL.windowInitialSize = V2 screenWidth screenHeight }
window <- SDL.createWindow "Lesson 5" winConfig
renderer <- SDL.createRenderer window (-1) SDL.defaultRenderer
spriteSheet <- getDataFileName "examples/twinklebear/spritesheet.bmp" >>= loadTexture renderer
let [spriteOne, spriteTwo, spriteThree, spriteFour] =
[ SDL.Rectangle (P (V2 (x * spriteWidth) (y * spriteHeight))) (V2 spriteWidth spriteHeight)
| x <- [0..1], y <- [0..1] ]
let loop spriteRect = do
let collectEvents = do
e <- SDL.pollEvent
case e of
Nothing -> return []
Just e' -> (e' :) <$> collectEvents
events <- collectEvents
let (Any quit, Last newSpriteRect) =
foldMap (\case
SDL.QuitEvent -> (Any True, mempty)
SDL.KeyboardEvent e ->
if | SDL.keyboardEventKeyMotion e == SDL.Pressed ->
let scancode = SDL.keysymScancode (SDL.keyboardEventKeysym e)
in if | scancode == SDL.Scancode1 -> (Any False, Last (Just spriteOne))
| scancode == SDL.Scancode2 -> (Any False, Last (Just spriteTwo))
| scancode == SDL.Scancode3 -> (Any False, Last (Just spriteThree))
| scancode == SDL.Scancode4 -> (Any False, Last (Just spriteFour))
| scancode == SDL.ScancodeQ -> (Any True, mempty)
| otherwise -> mempty
| otherwise -> mempty
_ -> mempty) $
map SDL.eventPayload events
spriteRect' = newSpriteRect <|> spriteRect
SDL.clear renderer
renderTexture renderer spriteSheet spriteRect' Centered
SDL.present renderer
unless quit $ loop spriteRect'
loop $ Just spriteOne
SDL.destroyRenderer renderer
SDL.destroyWindow window
SDL.quit
| dalaing/sdl2 | examples/twinklebear/Lesson05.hs | bsd-3-clause | 3,456 | 0 | 30 | 983 | 1,085 | 544 | 541 | 77 | 10 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.