code
stringlengths 2
1.05M
| repo_name
stringlengths 5
101
| path
stringlengths 4
991
| language
stringclasses 3
values | license
stringclasses 5
values | size
int64 2
1.05M
|
|---|---|---|---|---|---|
-----------------------------------------------------------------------------
-- |
-- Module : Application.HXournal.Type.Event
-- Copyright : (c) 2011, 2012 Ian-Woo Kim
--
-- License : BSD3
-- Maintainer : Ian-Woo Kim <ianwookim@gmail.com>
-- Stability : experimental
-- Portability : GHC
--
-----------------------------------------------------------------------------
module Application.HXournal.Type.Event where
import Application.HXournal.Type.Enum
import Application.HXournal.Device
import Graphics.UI.Gtk
-- |
data MyEvent = Initialized
| CanvasConfigure Int Double Double
| UpdateCanvas Int
| PenDown Int PenButton PointerCoord
| PenMove Int PointerCoord
| PenUp Int PointerCoord
| PenColorChanged PenColor
| PenWidthChanged Double
| HScrollBarMoved Int Double
| VScrollBarMoved Int Double
| VScrollBarStart Int Double
| VScrollBarEnd Int Double
| PaneMoveStart
| PaneMoveEnd
| ToViewAppendMode
| ToSelectMode
| ToSinglePage
| ToContSinglePage
| Menu MenuEvent
deriving (Show,Eq,Ord)
-- |
data MenuEvent = MenuNew
| MenuAnnotatePDF
| MenuOpen
| MenuSave
| MenuSaveAs
| MenuRecentDocument
| MenuPrint
| MenuExport
| MenuQuit
| MenuUndo
| MenuRedo
| MenuCut
| MenuCopy
| MenuPaste
| MenuDelete
-- | MenuNetCopy
-- | MenuNetPaste
| MenuFullScreen
| MenuZoom
| MenuZoomIn
| MenuZoomOut
| MenuNormalSize
| MenuPageWidth
| MenuPageHeight
| MenuSetZoom
| MenuFirstPage
| MenuPreviousPage
| MenuNextPage
| MenuLastPage
| MenuShowLayer
| MenuHideLayer
| MenuHSplit
| MenuVSplit
| MenuDelCanvas
| MenuNewPageBefore
| MenuNewPageAfter
| MenuNewPageAtEnd
| MenuDeletePage
| MenuNewLayer
| MenuNextLayer
| MenuPrevLayer
| MenuGotoLayer
| MenuDeleteLayer
| MenuPaperSize
| MenuPaperColor
| MenuPaperStyle
| MenuApplyToAllPages
| MenuLoadBackground
| MenuBackgroundScreenshot
| MenuDefaultPaper
| MenuSetAsDefaultPaper
| MenuShapeRecognizer
| MenuRuler
| MenuSelectRegion
| MenuSelectRectangle
| MenuVerticalSpace
| MenuHandTool
| MenuPenOptions
| MenuEraserOptions
| MenuHighlighterOptions
| MenuTextFont
| MenuDefaultPen
| MenuDefaultEraser
| MenuDefaultHighlighter
| MenuDefaultText
| MenuSetAsDefaultOption
| MenuRelaunch
| MenuUseXInput
| MenuDiscardCoreEvents
| MenuEraserTip
| MenuPressureSensitivity
| MenuPageHighlight
| MenuMultiplePageView
| MenuMultiplePages
| MenuButton2Mapping
| MenuButton3Mapping
| MenuAntialiasedBitmaps
| MenuProgressiveBackgrounds
| MenuPrintPaperRuling
| MenuLeftHandedScrollbar
| MenuShortenMenus
| MenuAutoSavePreferences
| MenuSavePreferences
| MenuAbout
| MenuDefault
deriving (Show, Ord, Eq)
viewModeToMyEvent :: RadioAction -> IO MyEvent
viewModeToMyEvent a = do
v <- radioActionGetCurrentValue a
case v of
1 -> return ToSinglePage
0 -> return ToContSinglePage
_ -> return ToSinglePage
|
wavewave/hxournal
|
lib/Application/HXournal/Type/Event.hs
|
Haskell
|
bsd-2-clause
| 4,348
|
{-| Implementation of cluster-wide logic.
This module holds all pure cluster-logic; I\/O related functionality
goes into the /Main/ module for the individual binaries.
-}
{-
Copyright (C) 2009, 2010, 2011, 2012, 2013 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Ganeti.HTools.Cluster
(
-- * Types
AllocDetails(..)
, AllocSolution(..)
, EvacSolution(..)
, Table(..)
, CStats(..)
, AllocNodes
, AllocResult
, AllocMethod
, AllocSolutionList
-- * Generic functions
, totalResources
, computeAllocationDelta
-- * First phase functions
, computeBadItems
-- * Second phase functions
, printSolutionLine
, formatCmds
, involvedNodes
, getMoves
, splitJobs
-- * Display functions
, printNodes
, printInsts
-- * Balacing functions
, doNextBalance
, tryBalance
, compCV
, compCVNodes
, compDetailedCV
, printStats
, iMoveToJob
-- * IAllocator functions
, genAllocNodes
, tryAlloc
, tryGroupAlloc
, tryMGAlloc
, tryNodeEvac
, tryChangeGroup
, collapseFailures
, allocList
-- * Allocation functions
, iterateAlloc
, tieredAlloc
-- * Node group functions
, instanceGroup
, findSplitInstances
, splitCluster
) where
import Control.Applicative ((<$>), liftA2)
import Control.Arrow ((&&&))
import Control.Monad (unless)
import qualified Data.IntSet as IntSet
import Data.List
import Data.Maybe (fromJust, fromMaybe, isJust, isNothing)
import Data.Ord (comparing)
import Text.Printf (printf)
import Ganeti.BasicTypes
import Ganeti.HTools.AlgorithmParams (AlgorithmOptions(..), defaultOptions)
import qualified Ganeti.HTools.Container as Container
import qualified Ganeti.HTools.Instance as Instance
import qualified Ganeti.HTools.Nic as Nic
import qualified Ganeti.HTools.Node as Node
import qualified Ganeti.HTools.Group as Group
import Ganeti.HTools.Types
import Ganeti.Compat
import qualified Ganeti.OpCodes as OpCodes
import Ganeti.Utils
import Ganeti.Utils.Statistics
import Ganeti.Types (EvacMode(..), mkNonEmpty)
-- * Types
-- | Allocation details for an instance, specifying
-- | required number of nodes, and
-- | an optional group (name) to allocate to
data AllocDetails = AllocDetails Int (Maybe String)
deriving (Show)
-- | Allocation\/relocation solution.
data AllocSolution = AllocSolution
{ asFailures :: [FailMode] -- ^ Failure counts
, asAllocs :: Int -- ^ Good allocation count
, asSolution :: Maybe Node.AllocElement -- ^ The actual allocation result
, asLog :: [String] -- ^ Informational messages
}
-- | Node evacuation/group change iallocator result type. This result
-- type consists of actual opcodes (a restricted subset) that are
-- transmitted back to Ganeti.
data EvacSolution = EvacSolution
{ esMoved :: [(Idx, Gdx, [Ndx])] -- ^ Instances moved successfully
, esFailed :: [(Idx, String)] -- ^ Instances which were not
-- relocated
, esOpCodes :: [[OpCodes.OpCode]] -- ^ List of jobs
} deriving (Show)
-- | Allocation results, as used in 'iterateAlloc' and 'tieredAlloc'.
type AllocResult = (FailStats, Node.List, Instance.List,
[Instance.Instance], [CStats])
-- | Type alias for easier handling.
type AllocSolutionList = [(Instance.Instance, AllocSolution)]
-- | A type denoting the valid allocation mode/pairs.
--
-- For a one-node allocation, this will be a @Left ['Ndx']@, whereas
-- for a two-node allocation, this will be a @Right [('Ndx',
-- ['Ndx'])]@. In the latter case, the list is basically an
-- association list, grouped by primary node and holding the potential
-- secondary nodes in the sub-list.
type AllocNodes = Either [Ndx] [(Ndx, [Ndx])]
-- | The empty solution we start with when computing allocations.
emptyAllocSolution :: AllocSolution
emptyAllocSolution = AllocSolution { asFailures = [], asAllocs = 0
, asSolution = Nothing, asLog = [] }
-- | The empty evac solution.
emptyEvacSolution :: EvacSolution
emptyEvacSolution = EvacSolution { esMoved = []
, esFailed = []
, esOpCodes = []
}
-- | The complete state for the balancing solution.
data Table = Table Node.List Instance.List Score [Placement]
deriving (Show)
-- | Cluster statistics data type.
data CStats = CStats
{ csFmem :: Integer -- ^ Cluster free mem
, csFdsk :: Integer -- ^ Cluster free disk
, csFspn :: Integer -- ^ Cluster free spindles
, csAmem :: Integer -- ^ Cluster allocatable mem
, csAdsk :: Integer -- ^ Cluster allocatable disk
, csAcpu :: Integer -- ^ Cluster allocatable cpus
, csMmem :: Integer -- ^ Max node allocatable mem
, csMdsk :: Integer -- ^ Max node allocatable disk
, csMcpu :: Integer -- ^ Max node allocatable cpu
, csImem :: Integer -- ^ Instance used mem
, csIdsk :: Integer -- ^ Instance used disk
, csIspn :: Integer -- ^ Instance used spindles
, csIcpu :: Integer -- ^ Instance used cpu
, csTmem :: Double -- ^ Cluster total mem
, csTdsk :: Double -- ^ Cluster total disk
, csTspn :: Double -- ^ Cluster total spindles
, csTcpu :: Double -- ^ Cluster total cpus
, csVcpu :: Integer -- ^ Cluster total virtual cpus
, csNcpu :: Double -- ^ Equivalent to 'csIcpu' but in terms of
-- physical CPUs, i.e. normalised used phys CPUs
, csXmem :: Integer -- ^ Unnacounted for mem
, csNmem :: Integer -- ^ Node own memory
, csScore :: Score -- ^ The cluster score
, csNinst :: Int -- ^ The total number of instances
} deriving (Show)
-- | A simple type for allocation functions.
type AllocMethod = Node.List -- ^ Node list
-> Instance.List -- ^ Instance list
-> Maybe Int -- ^ Optional allocation limit
-> Instance.Instance -- ^ Instance spec for allocation
-> AllocNodes -- ^ Which nodes we should allocate on
-> [Instance.Instance] -- ^ Allocated instances
-> [CStats] -- ^ Running cluster stats
-> Result AllocResult -- ^ Allocation result
-- | A simple type for the running solution of evacuations.
type EvacInnerState =
Either String (Node.List, Instance.Instance, Score, Ndx)
-- * Utility functions
-- | Verifies the N+1 status and return the affected nodes.
verifyN1 :: [Node.Node] -> [Node.Node]
verifyN1 = filter Node.failN1
{-| Computes the pair of bad nodes and instances.
The bad node list is computed via a simple 'verifyN1' check, and the
bad instance list is the list of primary and secondary instances of
those nodes.
-}
computeBadItems :: Node.List -> Instance.List ->
([Node.Node], [Instance.Instance])
computeBadItems nl il =
let bad_nodes = verifyN1 $ getOnline nl
bad_instances = map (`Container.find` il) .
sort . nub $
concatMap (\ n -> Node.sList n ++ Node.pList n) bad_nodes
in
(bad_nodes, bad_instances)
-- | Extracts the node pairs for an instance. This can fail if the
-- instance is single-homed. FIXME: this needs to be improved,
-- together with the general enhancement for handling non-DRBD moves.
instanceNodes :: Node.List -> Instance.Instance ->
(Ndx, Ndx, Node.Node, Node.Node)
instanceNodes nl inst =
let old_pdx = Instance.pNode inst
old_sdx = Instance.sNode inst
old_p = Container.find old_pdx nl
old_s = Container.find old_sdx nl
in (old_pdx, old_sdx, old_p, old_s)
-- | Zero-initializer for the CStats type.
emptyCStats :: CStats
emptyCStats = CStats 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
-- | Update stats with data from a new node.
updateCStats :: CStats -> Node.Node -> CStats
updateCStats cs node =
let CStats { csFmem = x_fmem, csFdsk = x_fdsk,
csAmem = x_amem, csAcpu = x_acpu, csAdsk = x_adsk,
csMmem = x_mmem, csMdsk = x_mdsk, csMcpu = x_mcpu,
csImem = x_imem, csIdsk = x_idsk, csIcpu = x_icpu,
csTmem = x_tmem, csTdsk = x_tdsk, csTcpu = x_tcpu,
csVcpu = x_vcpu, csNcpu = x_ncpu,
csXmem = x_xmem, csNmem = x_nmem, csNinst = x_ninst,
csFspn = x_fspn, csIspn = x_ispn, csTspn = x_tspn
}
= cs
inc_amem = Node.fMem node - Node.rMem node
inc_amem' = if inc_amem > 0 then inc_amem else 0
inc_adsk = Node.availDisk node
inc_imem = truncate (Node.tMem node) - Node.nMem node
- Node.xMem node - Node.fMem node
inc_icpu = Node.uCpu node
inc_idsk = truncate (Node.tDsk node) - Node.fDsk node
inc_ispn = Node.tSpindles node - Node.fSpindles node
inc_vcpu = Node.hiCpu node
inc_acpu = Node.availCpu node
inc_ncpu = fromIntegral (Node.uCpu node) /
iPolicyVcpuRatio (Node.iPolicy node)
in cs { csFmem = x_fmem + fromIntegral (Node.fMem node)
, csFdsk = x_fdsk + fromIntegral (Node.fDsk node)
, csFspn = x_fspn + fromIntegral (Node.fSpindles node)
, csAmem = x_amem + fromIntegral inc_amem'
, csAdsk = x_adsk + fromIntegral inc_adsk
, csAcpu = x_acpu + fromIntegral inc_acpu
, csMmem = max x_mmem (fromIntegral inc_amem')
, csMdsk = max x_mdsk (fromIntegral inc_adsk)
, csMcpu = max x_mcpu (fromIntegral inc_acpu)
, csImem = x_imem + fromIntegral inc_imem
, csIdsk = x_idsk + fromIntegral inc_idsk
, csIspn = x_ispn + fromIntegral inc_ispn
, csIcpu = x_icpu + fromIntegral inc_icpu
, csTmem = x_tmem + Node.tMem node
, csTdsk = x_tdsk + Node.tDsk node
, csTspn = x_tspn + fromIntegral (Node.tSpindles node)
, csTcpu = x_tcpu + Node.tCpu node
, csVcpu = x_vcpu + fromIntegral inc_vcpu
, csNcpu = x_ncpu + inc_ncpu
, csXmem = x_xmem + fromIntegral (Node.xMem node)
, csNmem = x_nmem + fromIntegral (Node.nMem node)
, csNinst = x_ninst + length (Node.pList node)
}
-- | Compute the total free disk and memory in the cluster.
totalResources :: Node.List -> CStats
totalResources nl =
let cs = foldl' updateCStats emptyCStats . Container.elems $ nl
in cs { csScore = compCV nl }
-- | Compute the delta between two cluster state.
--
-- This is used when doing allocations, to understand better the
-- available cluster resources. The return value is a triple of the
-- current used values, the delta that was still allocated, and what
-- was left unallocated.
computeAllocationDelta :: CStats -> CStats -> AllocStats
computeAllocationDelta cini cfin =
let CStats {csImem = i_imem, csIdsk = i_idsk, csIcpu = i_icpu,
csNcpu = i_ncpu, csIspn = i_ispn } = cini
CStats {csImem = f_imem, csIdsk = f_idsk, csIcpu = f_icpu,
csTmem = t_mem, csTdsk = t_dsk, csVcpu = f_vcpu,
csNcpu = f_ncpu, csTcpu = f_tcpu,
csIspn = f_ispn, csTspn = t_spn } = cfin
rini = AllocInfo { allocInfoVCpus = fromIntegral i_icpu
, allocInfoNCpus = i_ncpu
, allocInfoMem = fromIntegral i_imem
, allocInfoDisk = fromIntegral i_idsk
, allocInfoSpn = fromIntegral i_ispn
}
rfin = AllocInfo { allocInfoVCpus = fromIntegral (f_icpu - i_icpu)
, allocInfoNCpus = f_ncpu - i_ncpu
, allocInfoMem = fromIntegral (f_imem - i_imem)
, allocInfoDisk = fromIntegral (f_idsk - i_idsk)
, allocInfoSpn = fromIntegral (f_ispn - i_ispn)
}
runa = AllocInfo { allocInfoVCpus = fromIntegral (f_vcpu - f_icpu)
, allocInfoNCpus = f_tcpu - f_ncpu
, allocInfoMem = truncate t_mem - fromIntegral f_imem
, allocInfoDisk = truncate t_dsk - fromIntegral f_idsk
, allocInfoSpn = truncate t_spn - fromIntegral f_ispn
}
in (rini, rfin, runa)
-- | The names and weights of the individual elements in the CV list, together
-- with their statistical accumulation function and a bit to decide whether it
-- is a statistics for online nodes.
detailedCVInfoExt :: [((Double, String), ([Double] -> Statistics, Bool))]
detailedCVInfoExt = [ ((1, "free_mem_cv"), (getStdDevStatistics, True))
, ((1, "free_disk_cv"), (getStdDevStatistics, True))
, ((1, "n1_cnt"), (getSumStatistics, True))
, ((1, "reserved_mem_cv"), (getStdDevStatistics, True))
, ((4, "offline_all_cnt"), (getSumStatistics, False))
, ((16, "offline_pri_cnt"), (getSumStatistics, False))
, ((1, "vcpu_ratio_cv"), (getStdDevStatistics, True))
, ((1, "cpu_load_cv"), (getStdDevStatistics, True))
, ((1, "mem_load_cv"), (getStdDevStatistics, True))
, ((1, "disk_load_cv"), (getStdDevStatistics, True))
, ((1, "net_load_cv"), (getStdDevStatistics, True))
, ((2, "pri_tags_score"), (getSumStatistics, True))
, ((1, "spindles_cv"), (getStdDevStatistics, True))
]
-- | The names and weights of the individual elements in the CV list.
detailedCVInfo :: [(Double, String)]
detailedCVInfo = map fst detailedCVInfoExt
-- | Holds the weights used by 'compCVNodes' for each metric.
detailedCVWeights :: [Double]
detailedCVWeights = map fst detailedCVInfo
-- | The aggregation functions for the weights
detailedCVAggregation :: [([Double] -> Statistics, Bool)]
detailedCVAggregation = map snd detailedCVInfoExt
-- | The bit vector describing which parts of the statistics are
-- for online nodes.
detailedCVOnlineStatus :: [Bool]
detailedCVOnlineStatus = map snd detailedCVAggregation
-- | Compute statistical measures of a single node.
compDetailedCVNode :: Node.Node -> [Double]
compDetailedCVNode node =
let mem = Node.pMem node
dsk = Node.pDsk node
n1 = fromIntegral
$ if Node.failN1 node
then length (Node.sList node) + length (Node.pList node)
else 0
res = Node.pRem node
ipri = fromIntegral . length $ Node.pList node
isec = fromIntegral . length $ Node.sList node
ioff = ipri + isec
cpu = Node.pCpuEff node
DynUtil c1 m1 d1 nn1 = Node.utilLoad node
DynUtil c2 m2 d2 nn2 = Node.utilPool node
(c_load, m_load, d_load, n_load) = (c1/c2, m1/m2, d1/d2, nn1/nn2)
pri_tags = fromIntegral $ Node.conflictingPrimaries node
spindles = Node.instSpindles node / Node.hiSpindles node
in [ mem, dsk, n1, res, ioff, ipri, cpu
, c_load, m_load, d_load, n_load
, pri_tags, spindles
]
-- | Compute the statistics of a cluster.
compClusterStatistics :: [Node.Node] -> [Statistics]
compClusterStatistics all_nodes =
let (offline, nodes) = partition Node.offline all_nodes
offline_values = transpose (map compDetailedCVNode offline)
++ repeat []
-- transpose of an empty list is empty and not k times the empty list, as
-- would be the transpose of a 0 x k matrix
online_values = transpose $ map compDetailedCVNode nodes
aggregate (f, True) (onNodes, _) = f onNodes
aggregate (f, False) (_, offNodes) = f offNodes
in zipWith aggregate detailedCVAggregation
$ zip online_values offline_values
-- | Update a cluster statistics by replacing the contribution of one
-- node by that of another.
updateClusterStatistics :: [Statistics]
-> (Node.Node, Node.Node) -> [Statistics]
updateClusterStatistics stats (old, new) =
let update = zip (compDetailedCVNode old) (compDetailedCVNode new)
online = not $ Node.offline old
updateStat forOnline stat upd = if forOnline == online
then updateStatistics stat upd
else stat
in zipWith3 updateStat detailedCVOnlineStatus stats update
-- | Update a cluster statistics twice.
updateClusterStatisticsTwice :: [Statistics]
-> (Node.Node, Node.Node)
-> (Node.Node, Node.Node)
-> [Statistics]
updateClusterStatisticsTwice s a =
updateClusterStatistics (updateClusterStatistics s a)
-- | Compute cluster statistics
compDetailedCV :: [Node.Node] -> [Double]
compDetailedCV = map getStatisticValue . compClusterStatistics
-- | Compute the cluster score from its statistics
compCVfromStats :: [Statistics] -> Double
compCVfromStats = sum . zipWith (*) detailedCVWeights . map getStatisticValue
-- | Compute the /total/ variance.
compCVNodes :: [Node.Node] -> Double
compCVNodes = sum . zipWith (*) detailedCVWeights . compDetailedCV
-- | Wrapper over 'compCVNodes' for callers that have a 'Node.List'.
compCV :: Node.List -> Double
compCV = compCVNodes . Container.elems
-- | Compute online nodes from a 'Node.List'.
getOnline :: Node.List -> [Node.Node]
getOnline = filter (not . Node.offline) . Container.elems
-- * Balancing functions
-- | Compute best table. Note that the ordering of the arguments is important.
compareTables :: Table -> Table -> Table
compareTables a@(Table _ _ a_cv _) b@(Table _ _ b_cv _ ) =
if a_cv > b_cv then b else a
-- | Applies an instance move to a given node list and instance.
applyMoveEx :: Bool -- ^ whether to ignore soft errors
-> Node.List -> Instance.Instance
-> IMove -> OpResult (Node.List, Instance.Instance, Ndx, Ndx)
-- Failover (f)
applyMoveEx force nl inst Failover =
let (old_pdx, old_sdx, old_p, old_s) = instanceNodes nl inst
int_p = Node.removePri old_p inst
int_s = Node.removeSec old_s inst
new_nl = do -- OpResult
Node.checkMigration old_p old_s
new_p <- Node.addPriEx (Node.offline old_p || force) int_s inst
new_s <- Node.addSec int_p inst old_sdx
let new_inst = Instance.setBoth inst old_sdx old_pdx
return (Container.addTwo old_pdx new_s old_sdx new_p nl,
new_inst, old_sdx, old_pdx)
in new_nl
-- Failover to any (fa)
applyMoveEx force nl inst (FailoverToAny new_pdx) = do
let (old_pdx, old_sdx, old_pnode, _) = instanceNodes nl inst
new_pnode = Container.find new_pdx nl
force_failover = Node.offline old_pnode || force
Node.checkMigration old_pnode new_pnode
new_pnode' <- Node.addPriEx force_failover new_pnode inst
let old_pnode' = Node.removePri old_pnode inst
inst' = Instance.setPri inst new_pdx
nl' = Container.addTwo old_pdx old_pnode' new_pdx new_pnode' nl
return (nl', inst', new_pdx, old_sdx)
-- Replace the primary (f:, r:np, f)
applyMoveEx force nl inst (ReplacePrimary new_pdx) =
let (old_pdx, old_sdx, old_p, old_s) = instanceNodes nl inst
tgt_n = Container.find new_pdx nl
int_p = Node.removePri old_p inst
int_s = Node.removeSec old_s inst
force_p = Node.offline old_p || force
new_nl = do -- OpResult
-- check that the current secondary can host the instance
-- during the migration
Node.checkMigration old_p old_s
Node.checkMigration old_s tgt_n
tmp_s <- Node.addPriEx force_p int_s inst
let tmp_s' = Node.removePri tmp_s inst
new_p <- Node.addPriEx force_p tgt_n inst
new_s <- Node.addSecEx force_p tmp_s' inst new_pdx
let new_inst = Instance.setPri inst new_pdx
return (Container.add new_pdx new_p $
Container.addTwo old_pdx int_p old_sdx new_s nl,
new_inst, new_pdx, old_sdx)
in new_nl
-- Replace the secondary (r:ns)
applyMoveEx force nl inst (ReplaceSecondary new_sdx) =
let old_pdx = Instance.pNode inst
old_sdx = Instance.sNode inst
old_s = Container.find old_sdx nl
tgt_n = Container.find new_sdx nl
int_s = Node.removeSec old_s inst
force_s = Node.offline old_s || force
new_inst = Instance.setSec inst new_sdx
new_nl = Node.addSecEx force_s tgt_n inst old_pdx >>=
\new_s -> return (Container.addTwo new_sdx
new_s old_sdx int_s nl,
new_inst, old_pdx, new_sdx)
in new_nl
-- Replace the secondary and failover (r:np, f)
applyMoveEx force nl inst (ReplaceAndFailover new_pdx) =
let (old_pdx, old_sdx, old_p, old_s) = instanceNodes nl inst
tgt_n = Container.find new_pdx nl
int_p = Node.removePri old_p inst
int_s = Node.removeSec old_s inst
force_s = Node.offline old_s || force
new_nl = do -- OpResult
Node.checkMigration old_p tgt_n
new_p <- Node.addPri tgt_n inst
new_s <- Node.addSecEx force_s int_p inst new_pdx
let new_inst = Instance.setBoth inst new_pdx old_pdx
return (Container.add new_pdx new_p $
Container.addTwo old_pdx new_s old_sdx int_s nl,
new_inst, new_pdx, old_pdx)
in new_nl
-- Failver and replace the secondary (f, r:ns)
applyMoveEx force nl inst (FailoverAndReplace new_sdx) =
let (old_pdx, old_sdx, old_p, old_s) = instanceNodes nl inst
tgt_n = Container.find new_sdx nl
int_p = Node.removePri old_p inst
int_s = Node.removeSec old_s inst
force_p = Node.offline old_p || force
new_nl = do -- OpResult
Node.checkMigration old_p old_s
new_p <- Node.addPriEx force_p int_s inst
new_s <- Node.addSecEx force_p tgt_n inst old_sdx
let new_inst = Instance.setBoth inst old_sdx new_sdx
return (Container.add new_sdx new_s $
Container.addTwo old_sdx new_p old_pdx int_p nl,
new_inst, old_sdx, new_sdx)
in new_nl
-- | Applies an instance move to a given node list and instance.
applyMove :: Node.List -> Instance.Instance
-> IMove -> OpResult (Node.List, Instance.Instance, Ndx, Ndx)
applyMove = applyMoveEx False
-- | Tries to allocate an instance on one given node.
allocateOnSingle :: Node.List -> Instance.Instance -> Ndx
-> OpResult Node.AllocElement
allocateOnSingle nl inst new_pdx =
let p = Container.find new_pdx nl
new_inst = Instance.setBoth inst new_pdx Node.noSecondary
in do
Instance.instMatchesPolicy inst (Node.iPolicy p) (Node.exclStorage p)
new_p <- Node.addPri p inst
let new_nl = Container.add new_pdx new_p nl
new_score = compCV new_nl
return (new_nl, new_inst, [new_p], new_score)
-- | Tries to allocate an instance on a given pair of nodes.
allocateOnPair :: [Statistics]
-> Node.List -> Instance.Instance -> Ndx -> Ndx
-> OpResult Node.AllocElement
allocateOnPair stats nl inst new_pdx new_sdx =
let tgt_p = Container.find new_pdx nl
tgt_s = Container.find new_sdx nl
in do
Instance.instMatchesPolicy inst (Node.iPolicy tgt_p)
(Node.exclStorage tgt_p)
new_p <- Node.addPri tgt_p inst
new_s <- Node.addSec tgt_s inst new_pdx
let new_inst = Instance.setBoth inst new_pdx new_sdx
new_nl = Container.addTwo new_pdx new_p new_sdx new_s nl
new_stats = updateClusterStatisticsTwice stats
(tgt_p, new_p) (tgt_s, new_s)
return (new_nl, new_inst, [new_p, new_s], compCVfromStats new_stats)
-- | Tries to perform an instance move and returns the best table
-- between the original one and the new one.
checkSingleStep :: Bool -- ^ Whether to unconditionally ignore soft errors
-> Table -- ^ The original table
-> Instance.Instance -- ^ The instance to move
-> Table -- ^ The current best table
-> IMove -- ^ The move to apply
-> Table -- ^ The final best table
checkSingleStep force ini_tbl target cur_tbl move =
let Table ini_nl ini_il _ ini_plc = ini_tbl
tmp_resu = applyMoveEx force ini_nl target move
in case tmp_resu of
Bad _ -> cur_tbl
Ok (upd_nl, new_inst, pri_idx, sec_idx) ->
let tgt_idx = Instance.idx target
upd_cvar = compCV upd_nl
upd_il = Container.add tgt_idx new_inst ini_il
upd_plc = (tgt_idx, pri_idx, sec_idx, move, upd_cvar):ini_plc
upd_tbl = Table upd_nl upd_il upd_cvar upd_plc
in compareTables cur_tbl upd_tbl
-- | Given the status of the current secondary as a valid new node and
-- the current candidate target node, generate the possible moves for
-- a instance.
possibleMoves :: MirrorType -- ^ The mirroring type of the instance
-> Bool -- ^ Whether the secondary node is a valid new node
-> Bool -- ^ Whether we can change the primary node
-> (Bool, Bool) -- ^ Whether migration is restricted and whether
-- the instance primary is offline
-> Ndx -- ^ Target node candidate
-> [IMove] -- ^ List of valid result moves
possibleMoves MirrorNone _ _ _ _ = []
possibleMoves MirrorExternal _ False _ _ = []
possibleMoves MirrorExternal _ True _ tdx =
[ FailoverToAny tdx ]
possibleMoves MirrorInternal _ False _ tdx =
[ ReplaceSecondary tdx ]
possibleMoves MirrorInternal _ _ (True, False) tdx =
[ ReplaceSecondary tdx
]
possibleMoves MirrorInternal True True (False, _) tdx =
[ ReplaceSecondary tdx
, ReplaceAndFailover tdx
, ReplacePrimary tdx
, FailoverAndReplace tdx
]
possibleMoves MirrorInternal True True (True, True) tdx =
[ ReplaceSecondary tdx
, ReplaceAndFailover tdx
, FailoverAndReplace tdx
]
possibleMoves MirrorInternal False True _ tdx =
[ ReplaceSecondary tdx
, ReplaceAndFailover tdx
]
-- | Compute the best move for a given instance.
checkInstanceMove :: AlgorithmOptions -- ^ Algorithmic options for balancing
-> [Ndx] -- ^ Allowed target node indices
-> Table -- ^ Original table
-> Instance.Instance -- ^ Instance to move
-> Table -- ^ Best new table for this instance
checkInstanceMove opts nodes_idx ini_tbl@(Table nl _ _ _) target =
let force = algIgnoreSoftErrors opts
disk_moves = algDiskMoves opts
inst_moves = algInstanceMoves opts
rest_mig = algRestrictedMigration opts
opdx = Instance.pNode target
osdx = Instance.sNode target
bad_nodes = [opdx, osdx]
nodes = filter (`notElem` bad_nodes) nodes_idx
mir_type = Instance.mirrorType target
use_secondary = elem osdx nodes_idx && inst_moves
aft_failover = if mir_type == MirrorInternal && use_secondary
-- if drbd and allowed to failover
then checkSingleStep force ini_tbl target ini_tbl
Failover
else ini_tbl
primary_drained = Node.offline
. flip Container.find nl
$ Instance.pNode target
all_moves =
if disk_moves
then concatMap (possibleMoves mir_type use_secondary inst_moves
(rest_mig, primary_drained))
nodes
else []
in
-- iterate over the possible nodes for this instance
foldl' (checkSingleStep force ini_tbl target) aft_failover all_moves
-- | Compute the best next move.
checkMove :: AlgorithmOptions -- ^ Algorithmic options for balancing
-> [Ndx] -- ^ Allowed target node indices
-> Table -- ^ The current solution
-> [Instance.Instance] -- ^ List of instances still to move
-> Table -- ^ The new solution
checkMove opts nodes_idx ini_tbl victims =
let Table _ _ _ ini_plc = ini_tbl
-- we're using rwhnf from the Control.Parallel.Strategies
-- package; we don't need to use rnf as that would force too
-- much evaluation in single-threaded cases, and in
-- multi-threaded case the weak head normal form is enough to
-- spark the evaluation
tables = parMap rwhnf (checkInstanceMove opts nodes_idx ini_tbl)
victims
-- iterate over all instances, computing the best move
best_tbl = foldl' compareTables ini_tbl tables
Table _ _ _ best_plc = best_tbl
in if length best_plc == length ini_plc
then ini_tbl -- no advancement
else best_tbl
-- | Check if we are allowed to go deeper in the balancing.
doNextBalance :: Table -- ^ The starting table
-> Int -- ^ Remaining length
-> Score -- ^ Score at which to stop
-> Bool -- ^ The resulting table and commands
doNextBalance ini_tbl max_rounds min_score =
let Table _ _ ini_cv ini_plc = ini_tbl
ini_plc_len = length ini_plc
in (max_rounds < 0 || ini_plc_len < max_rounds) && ini_cv > min_score
-- | Run a balance move.
tryBalance :: AlgorithmOptions -- ^ Algorithmic options for balancing
-> Table -- ^ The starting table
-> Maybe Table -- ^ The resulting table and commands
tryBalance opts ini_tbl =
let evac_mode = algEvacMode opts
mg_limit = algMinGainLimit opts
min_gain = algMinGain opts
Table ini_nl ini_il ini_cv _ = ini_tbl
all_inst = Container.elems ini_il
all_nodes = Container.elems ini_nl
(offline_nodes, online_nodes) = partition Node.offline all_nodes
all_inst' = if evac_mode
then let bad_nodes = map Node.idx offline_nodes
in filter (any (`elem` bad_nodes) .
Instance.allNodes) all_inst
else all_inst
reloc_inst = filter (\i -> Instance.movable i &&
Instance.autoBalance i) all_inst'
node_idx = map Node.idx online_nodes
fin_tbl = checkMove opts node_idx ini_tbl reloc_inst
(Table _ _ fin_cv _) = fin_tbl
in
if fin_cv < ini_cv && (ini_cv > mg_limit || ini_cv - fin_cv >= min_gain)
then Just fin_tbl -- this round made success, return the new table
else Nothing
-- * Allocation functions
-- | Build failure stats out of a list of failures.
collapseFailures :: [FailMode] -> FailStats
collapseFailures flst =
map (\k -> (k, foldl' (\a e -> if e == k then a + 1 else a) 0 flst))
[minBound..maxBound]
-- | Compares two Maybe AllocElement and chooses the best score.
bestAllocElement :: Maybe Node.AllocElement
-> Maybe Node.AllocElement
-> Maybe Node.AllocElement
bestAllocElement a Nothing = a
bestAllocElement Nothing b = b
bestAllocElement a@(Just (_, _, _, ascore)) b@(Just (_, _, _, bscore)) =
if ascore < bscore then a else b
-- | Update current Allocation solution and failure stats with new
-- elements.
concatAllocs :: AllocSolution -> OpResult Node.AllocElement -> AllocSolution
concatAllocs as (Bad reason) = as { asFailures = reason : asFailures as }
concatAllocs as (Ok ns) =
let -- Choose the old or new solution, based on the cluster score
cntok = asAllocs as
osols = asSolution as
nsols = bestAllocElement osols (Just ns)
nsuc = cntok + 1
-- Note: we force evaluation of nsols here in order to keep the
-- memory profile low - we know that we will need nsols for sure
-- in the next cycle, so we force evaluation of nsols, since the
-- foldl' in the caller will only evaluate the tuple, but not the
-- elements of the tuple
in nsols `seq` nsuc `seq` as { asAllocs = nsuc, asSolution = nsols }
-- | Sums two 'AllocSolution' structures.
sumAllocs :: AllocSolution -> AllocSolution -> AllocSolution
sumAllocs (AllocSolution aFails aAllocs aSols aLog)
(AllocSolution bFails bAllocs bSols bLog) =
-- note: we add b first, since usually it will be smaller; when
-- fold'ing, a will grow and grow whereas b is the per-group
-- result, hence smaller
let nFails = bFails ++ aFails
nAllocs = aAllocs + bAllocs
nSols = bestAllocElement aSols bSols
nLog = bLog ++ aLog
in AllocSolution nFails nAllocs nSols nLog
-- | Given a solution, generates a reasonable description for it.
describeSolution :: AllocSolution -> String
describeSolution as =
let fcnt = asFailures as
sols = asSolution as
freasons =
intercalate ", " . map (\(a, b) -> printf "%s: %d" (show a) b) .
filter ((> 0) . snd) . collapseFailures $ fcnt
in case sols of
Nothing -> "No valid allocation solutions, failure reasons: " ++
(if null fcnt then "unknown reasons" else freasons)
Just (_, _, nodes, cv) ->
printf ("score: %.8f, successes %d, failures %d (%s)" ++
" for node(s) %s") cv (asAllocs as) (length fcnt) freasons
(intercalate "/" . map Node.name $ nodes)
-- | Annotates a solution with the appropriate string.
annotateSolution :: AllocSolution -> AllocSolution
annotateSolution as = as { asLog = describeSolution as : asLog as }
-- | Reverses an evacuation solution.
--
-- Rationale: we always concat the results to the top of the lists, so
-- for proper jobset execution, we should reverse all lists.
reverseEvacSolution :: EvacSolution -> EvacSolution
reverseEvacSolution (EvacSolution f m o) =
EvacSolution (reverse f) (reverse m) (reverse o)
-- | Generate the valid node allocation singles or pairs for a new instance.
genAllocNodes :: Group.List -- ^ Group list
-> Node.List -- ^ The node map
-> Int -- ^ The number of nodes required
-> Bool -- ^ Whether to drop or not
-- unallocable nodes
-> Result AllocNodes -- ^ The (monadic) result
genAllocNodes gl nl count drop_unalloc =
let filter_fn = if drop_unalloc
then filter (Group.isAllocable .
flip Container.find gl . Node.group)
else id
all_nodes = filter_fn $ getOnline nl
all_pairs = [(Node.idx p,
[Node.idx s | s <- all_nodes,
Node.idx p /= Node.idx s,
Node.group p == Node.group s]) |
p <- all_nodes]
in case count of
1 -> Ok (Left (map Node.idx all_nodes))
2 -> Ok (Right (filter (not . null . snd) all_pairs))
_ -> Bad "Unsupported number of nodes, only one or two supported"
-- | Try to allocate an instance on the cluster.
tryAlloc :: (Monad m) =>
Node.List -- ^ The node list
-> Instance.List -- ^ The instance list
-> Instance.Instance -- ^ The instance to allocate
-> AllocNodes -- ^ The allocation targets
-> m AllocSolution -- ^ Possible solution list
tryAlloc _ _ _ (Right []) = fail "Not enough online nodes"
tryAlloc nl _ inst (Right ok_pairs) =
let cstat = compClusterStatistics $ Container.elems nl
psols = parMap rwhnf (\(p, ss) ->
foldl' (\cstate ->
concatAllocs cstate .
allocateOnPair cstat nl inst p)
emptyAllocSolution ss) ok_pairs
sols = foldl' sumAllocs emptyAllocSolution psols
in return $ annotateSolution sols
tryAlloc _ _ _ (Left []) = fail "No online nodes"
tryAlloc nl _ inst (Left all_nodes) =
let sols = foldl' (\cstate ->
concatAllocs cstate . allocateOnSingle nl inst
) emptyAllocSolution all_nodes
in return $ annotateSolution sols
-- | Given a group/result, describe it as a nice (list of) messages.
solutionDescription :: (Group.Group, Result AllocSolution)
-> [String]
solutionDescription (grp, result) =
case result of
Ok solution -> map (printf "Group %s (%s): %s" gname pol) (asLog solution)
Bad message -> [printf "Group %s: error %s" gname message]
where gname = Group.name grp
pol = allocPolicyToRaw (Group.allocPolicy grp)
-- | From a list of possibly bad and possibly empty solutions, filter
-- only the groups with a valid result. Note that the result will be
-- reversed compared to the original list.
filterMGResults :: [(Group.Group, Result AllocSolution)]
-> [(Group.Group, AllocSolution)]
filterMGResults = foldl' fn []
where unallocable = not . Group.isAllocable
fn accu (grp, rasol) =
case rasol of
Bad _ -> accu
Ok sol | isNothing (asSolution sol) -> accu
| unallocable grp -> accu
| otherwise -> (grp, sol):accu
-- | Sort multigroup results based on policy and score.
sortMGResults :: [(Group.Group, AllocSolution)]
-> [(Group.Group, AllocSolution)]
sortMGResults sols =
let extractScore (_, _, _, x) = x
solScore (grp, sol) = (Group.allocPolicy grp,
(extractScore . fromJust . asSolution) sol)
in sortBy (comparing solScore) sols
-- | Determines if a group is connected to the networks required by the
-- | instance.
hasRequiredNetworks :: Group.Group -> Instance.Instance -> Bool
hasRequiredNetworks ng = all hasNetwork . Instance.nics
where hasNetwork = maybe True (`elem` Group.networks ng) . Nic.network
-- | Removes node groups which can't accommodate the instance
filterValidGroups :: [(Group.Group, (Node.List, Instance.List))]
-> Instance.Instance
-> ([(Group.Group, (Node.List, Instance.List))], [String])
filterValidGroups [] _ = ([], [])
filterValidGroups (ng:ngs) inst =
let (valid_ngs, msgs) = filterValidGroups ngs inst
in if hasRequiredNetworks (fst ng) inst
then (ng:valid_ngs, msgs)
else (valid_ngs,
("group " ++ Group.name (fst ng) ++
" is not connected to a network required by instance " ++
Instance.name inst):msgs)
-- | Finds an allocation solution for an instance on a group
findAllocation :: Group.List -- ^ The group list
-> Node.List -- ^ The node list
-> Instance.List -- ^ The instance list
-> Gdx -- ^ The group to allocate to
-> Instance.Instance -- ^ The instance to allocate
-> Int -- ^ Required number of nodes
-> Result (AllocSolution, [String])
findAllocation mggl mgnl mgil gdx inst cnt = do
let belongsTo nl' nidx = nidx `elem` map Node.idx (Container.elems nl')
nl = Container.filter ((== gdx) . Node.group) mgnl
il = Container.filter (belongsTo nl . Instance.pNode) mgil
group' = Container.find gdx mggl
unless (hasRequiredNetworks group' inst) . failError
$ "The group " ++ Group.name group' ++ " is not connected to\
\ a network required by instance " ++ Instance.name inst
solution <- genAllocNodes mggl nl cnt False >>= tryAlloc nl il inst
return (solution, solutionDescription (group', return solution))
-- | Finds the best group for an instance on a multi-group cluster.
--
-- Only solutions in @preferred@ and @last_resort@ groups will be
-- accepted as valid, and additionally if the allowed groups parameter
-- is not null then allocation will only be run for those group
-- indices.
findBestAllocGroup :: Group.List -- ^ The group list
-> Node.List -- ^ The node list
-> Instance.List -- ^ The instance list
-> Maybe [Gdx] -- ^ The allowed groups
-> Instance.Instance -- ^ The instance to allocate
-> Int -- ^ Required number of nodes
-> Result (Group.Group, AllocSolution, [String])
findBestAllocGroup mggl mgnl mgil allowed_gdxs inst cnt =
let groups_by_idx = splitCluster mgnl mgil
groups = map (\(gid, d) -> (Container.find gid mggl, d)) groups_by_idx
groups' = maybe groups
(\gs -> filter ((`elem` gs) . Group.idx . fst) groups)
allowed_gdxs
(groups'', filter_group_msgs) = filterValidGroups groups' inst
sols = map (\(gr, (nl, il)) ->
(gr, genAllocNodes mggl nl cnt False >>=
tryAlloc nl il inst))
groups''::[(Group.Group, Result AllocSolution)]
all_msgs = filter_group_msgs ++ concatMap solutionDescription sols
goodSols = filterMGResults sols
sortedSols = sortMGResults goodSols
in case sortedSols of
[] -> Bad $ if null groups'
then "no groups for evacuation: allowed groups was" ++
show allowed_gdxs ++ ", all groups: " ++
show (map fst groups)
else intercalate ", " all_msgs
(final_group, final_sol):_ -> return (final_group, final_sol, all_msgs)
-- | Try to allocate an instance on a multi-group cluster.
tryMGAlloc :: Group.List -- ^ The group list
-> Node.List -- ^ The node list
-> Instance.List -- ^ The instance list
-> Instance.Instance -- ^ The instance to allocate
-> Int -- ^ Required number of nodes
-> Result AllocSolution -- ^ Possible solution list
tryMGAlloc mggl mgnl mgil inst cnt = do
(best_group, solution, all_msgs) <-
findBestAllocGroup mggl mgnl mgil Nothing inst cnt
let group_name = Group.name best_group
selmsg = "Selected group: " ++ group_name
return $ solution { asLog = selmsg:all_msgs }
-- | Try to allocate an instance to a group.
tryGroupAlloc :: Group.List -- ^ The group list
-> Node.List -- ^ The node list
-> Instance.List -- ^ The instance list
-> String -- ^ The allocation group (name)
-> Instance.Instance -- ^ The instance to allocate
-> Int -- ^ Required number of nodes
-> Result AllocSolution -- ^ Solution
tryGroupAlloc mggl mgnl ngil gn inst cnt = do
gdx <- Group.idx <$> Container.findByName mggl gn
(solution, msgs) <- findAllocation mggl mgnl ngil gdx inst cnt
return $ solution { asLog = msgs }
-- | Calculate the new instance list after allocation solution.
updateIl :: Instance.List -- ^ The original instance list
-> Maybe Node.AllocElement -- ^ The result of the allocation attempt
-> Instance.List -- ^ The updated instance list
updateIl il Nothing = il
updateIl il (Just (_, xi, _, _)) = Container.add (Container.size il) xi il
-- | Extract the the new node list from the allocation solution.
extractNl :: Node.List -- ^ The original node list
-> Maybe Node.AllocElement -- ^ The result of the allocation attempt
-> Node.List -- ^ The new node list
extractNl nl Nothing = nl
extractNl _ (Just (xnl, _, _, _)) = xnl
-- | Try to allocate a list of instances on a multi-group cluster.
allocList :: Group.List -- ^ The group list
-> Node.List -- ^ The node list
-> Instance.List -- ^ The instance list
-> [(Instance.Instance, AllocDetails)] -- ^ The instance to
-- allocate
-> AllocSolutionList -- ^ Possible solution
-- list
-> Result (Node.List, Instance.List,
AllocSolutionList) -- ^ The final solution
-- list
allocList _ nl il [] result = Ok (nl, il, result)
allocList gl nl il ((xi, AllocDetails xicnt mgn):xies) result = do
ares <- case mgn of
Nothing -> tryMGAlloc gl nl il xi xicnt
Just gn -> tryGroupAlloc gl nl il gn xi xicnt
let sol = asSolution ares
nl' = extractNl nl sol
il' = updateIl il sol
allocList gl nl' il' xies ((xi, ares):result)
-- | Function which fails if the requested mode is change secondary.
--
-- This is useful since except DRBD, no other disk template can
-- execute change secondary; thus, we can just call this function
-- instead of always checking for secondary mode. After the call to
-- this function, whatever mode we have is just a primary change.
failOnSecondaryChange :: (Monad m) => EvacMode -> DiskTemplate -> m ()
failOnSecondaryChange ChangeSecondary dt =
fail $ "Instances with disk template '" ++ diskTemplateToRaw dt ++
"' can't execute change secondary"
failOnSecondaryChange _ _ = return ()
-- | Run evacuation for a single instance.
--
-- /Note:/ this function should correctly execute both intra-group
-- evacuations (in all modes) and inter-group evacuations (in the
-- 'ChangeAll' mode). Of course, this requires that the correct list
-- of target nodes is passed.
nodeEvacInstance :: AlgorithmOptions
-> Node.List -- ^ The node list (cluster-wide)
-> Instance.List -- ^ Instance list (cluster-wide)
-> EvacMode -- ^ The evacuation mode
-> Instance.Instance -- ^ The instance to be evacuated
-> Gdx -- ^ The group we're targetting
-> [Ndx] -- ^ The list of available nodes
-- for allocation
-> Result (Node.List, Instance.List, [OpCodes.OpCode])
nodeEvacInstance opts nl il mode inst@(Instance.Instance
{Instance.diskTemplate = dt@DTDiskless})
gdx avail_nodes =
failOnSecondaryChange mode dt >>
evacOneNodeOnly opts nl il inst gdx avail_nodes
nodeEvacInstance _ _ _ _ (Instance.Instance
{Instance.diskTemplate = DTPlain}) _ _ =
fail "Instances of type plain cannot be relocated"
nodeEvacInstance _ _ _ _ (Instance.Instance
{Instance.diskTemplate = DTFile}) _ _ =
fail "Instances of type file cannot be relocated"
nodeEvacInstance opts nl il mode inst@(Instance.Instance
{Instance.diskTemplate = dt@DTSharedFile})
gdx avail_nodes =
failOnSecondaryChange mode dt >>
evacOneNodeOnly opts nl il inst gdx avail_nodes
nodeEvacInstance opts nl il mode inst@(Instance.Instance
{Instance.diskTemplate = dt@DTBlock})
gdx avail_nodes =
failOnSecondaryChange mode dt >>
evacOneNodeOnly opts nl il inst gdx avail_nodes
nodeEvacInstance opts nl il mode inst@(Instance.Instance
{Instance.diskTemplate = dt@DTRbd})
gdx avail_nodes =
failOnSecondaryChange mode dt >>
evacOneNodeOnly opts nl il inst gdx avail_nodes
nodeEvacInstance opts nl il mode inst@(Instance.Instance
{Instance.diskTemplate = dt@DTExt})
gdx avail_nodes =
failOnSecondaryChange mode dt >>
evacOneNodeOnly opts nl il inst gdx avail_nodes
nodeEvacInstance opts nl il mode inst@(Instance.Instance
{Instance.diskTemplate = dt@DTGluster})
gdx avail_nodes =
failOnSecondaryChange mode dt >>
evacOneNodeOnly opts nl il inst gdx avail_nodes
nodeEvacInstance _ nl il ChangePrimary
inst@(Instance.Instance {Instance.diskTemplate = DTDrbd8})
_ _ =
do
(nl', inst', _, _) <- opToResult $ applyMove nl inst Failover
let idx = Instance.idx inst
il' = Container.add idx inst' il
ops = iMoveToJob nl' il' idx Failover
return (nl', il', ops)
nodeEvacInstance opts nl il ChangeSecondary
inst@(Instance.Instance {Instance.diskTemplate = DTDrbd8})
gdx avail_nodes =
evacOneNodeOnly opts nl il inst gdx avail_nodes
-- The algorithm for ChangeAll is as follows:
--
-- * generate all (primary, secondary) node pairs for the target groups
-- * for each pair, execute the needed moves (r:s, f, r:s) and compute
-- the final node list state and group score
-- * select the best choice via a foldl that uses the same Either
-- String solution as the ChangeSecondary mode
nodeEvacInstance opts nl il ChangeAll
inst@(Instance.Instance {Instance.diskTemplate = DTDrbd8})
gdx avail_nodes =
do
let no_nodes = Left "no nodes available"
node_pairs = [(p,s) | p <- avail_nodes, s <- avail_nodes, p /= s]
(nl', il', ops, _) <-
annotateResult "Can't find any good nodes for relocation" .
eitherToResult $
foldl'
(\accu nodes -> case evacDrbdAllInner opts nl il inst gdx nodes of
Bad msg ->
case accu of
Right _ -> accu
-- we don't need more details (which
-- nodes, etc.) as we only selected
-- this group if we can allocate on
-- it, hence failures will not
-- propagate out of this fold loop
Left _ -> Left $ "Allocation failed: " ++ msg
Ok result@(_, _, _, new_cv) ->
let new_accu = Right result in
case accu of
Left _ -> new_accu
Right (_, _, _, old_cv) ->
if old_cv < new_cv
then accu
else new_accu
) no_nodes node_pairs
return (nl', il', ops)
-- | Generic function for changing one node of an instance.
--
-- This is similar to 'nodeEvacInstance' but will be used in a few of
-- its sub-patterns. It folds the inner function 'evacOneNodeInner'
-- over the list of available nodes, which results in the best choice
-- for relocation.
evacOneNodeOnly :: AlgorithmOptions
-> Node.List -- ^ The node list (cluster-wide)
-> Instance.List -- ^ Instance list (cluster-wide)
-> Instance.Instance -- ^ The instance to be evacuated
-> Gdx -- ^ The group we're targetting
-> [Ndx] -- ^ The list of available nodes
-- for allocation
-> Result (Node.List, Instance.List, [OpCodes.OpCode])
evacOneNodeOnly opts nl il inst gdx avail_nodes = do
op_fn <- case Instance.mirrorType inst of
MirrorNone -> Bad "Can't relocate/evacuate non-mirrored instances"
MirrorInternal -> Ok ReplaceSecondary
MirrorExternal -> Ok FailoverToAny
(nl', inst', _, ndx) <- annotateResult "Can't find any good node" .
eitherToResult $
foldl' (evacOneNodeInner opts nl inst gdx op_fn)
(Left "") avail_nodes
let idx = Instance.idx inst
il' = Container.add idx inst' il
ops = iMoveToJob nl' il' idx (op_fn ndx)
return (nl', il', ops)
-- | Inner fold function for changing one node of an instance.
--
-- Depending on the instance disk template, this will either change
-- the secondary (for DRBD) or the primary node (for shared
-- storage). However, the operation is generic otherwise.
--
-- The running solution is either a @Left String@, which means we
-- don't have yet a working solution, or a @Right (...)@, which
-- represents a valid solution; it holds the modified node list, the
-- modified instance (after evacuation), the score of that solution,
-- and the new secondary node index.
evacOneNodeInner :: AlgorithmOptions
-> Node.List -- ^ Cluster node list
-> Instance.Instance -- ^ Instance being evacuated
-> Gdx -- ^ The group index of the instance
-> (Ndx -> IMove) -- ^ Operation constructor
-> EvacInnerState -- ^ Current best solution
-> Ndx -- ^ Node we're evaluating as target
-> EvacInnerState -- ^ New best solution
evacOneNodeInner opts nl inst gdx op_fn accu ndx =
case applyMoveEx (algIgnoreSoftErrors opts) nl inst (op_fn ndx) of
Bad fm -> let fail_msg = " Node " ++ Container.nameOf nl ndx ++
" failed: " ++ show fm ++ ";"
in either (Left . (++ fail_msg)) Right accu
Ok (nl', inst', _, _) ->
let nodes = Container.elems nl'
-- The fromJust below is ugly (it can fail nastily), but
-- at this point we should have any internal mismatches,
-- and adding a monad here would be quite involved
grpnodes = fromJust (gdx `lookup` Node.computeGroups nodes)
new_cv = compCVNodes grpnodes
new_accu = Right (nl', inst', new_cv, ndx)
in case accu of
Left _ -> new_accu
Right (_, _, old_cv, _) ->
if old_cv < new_cv
then accu
else new_accu
-- | Compute result of changing all nodes of a DRBD instance.
--
-- Given the target primary and secondary node (which might be in a
-- different group or not), this function will 'execute' all the
-- required steps and assuming all operations succceed, will return
-- the modified node and instance lists, the opcodes needed for this
-- and the new group score.
evacDrbdAllInner :: AlgorithmOptions
-> Node.List -- ^ Cluster node list
-> Instance.List -- ^ Cluster instance list
-> Instance.Instance -- ^ The instance to be moved
-> Gdx -- ^ The target group index
-- (which can differ from the
-- current group of the
-- instance)
-> (Ndx, Ndx) -- ^ Tuple of new
-- primary\/secondary nodes
-> Result (Node.List, Instance.List, [OpCodes.OpCode], Score)
evacDrbdAllInner opts nl il inst gdx (t_pdx, t_sdx) = do
let primary = Container.find (Instance.pNode inst) nl
idx = Instance.idx inst
apMove = applyMoveEx $ algIgnoreSoftErrors opts
-- if the primary is offline, then we first failover
(nl1, inst1, ops1) <-
if Node.offline primary
then do
(nl', inst', _, _) <-
annotateResult "Failing over to the secondary" .
opToResult $ apMove nl inst Failover
return (nl', inst', [Failover])
else return (nl, inst, [])
let (o1, o2, o3) = (ReplaceSecondary t_pdx,
Failover,
ReplaceSecondary t_sdx)
-- we now need to execute a replace secondary to the future
-- primary node
(nl2, inst2, _, _) <-
annotateResult "Changing secondary to new primary" .
opToResult $
apMove nl1 inst1 o1
let ops2 = o1:ops1
-- we now execute another failover, the primary stays fixed now
(nl3, inst3, _, _) <- annotateResult "Failing over to new primary" .
opToResult $ apMove nl2 inst2 o2
let ops3 = o2:ops2
-- and finally another replace secondary, to the final secondary
(nl4, inst4, _, _) <-
annotateResult "Changing secondary to final secondary" .
opToResult $
apMove nl3 inst3 o3
let ops4 = o3:ops3
il' = Container.add idx inst4 il
ops = concatMap (iMoveToJob nl4 il' idx) $ reverse ops4
let nodes = Container.elems nl4
-- The fromJust below is ugly (it can fail nastily), but
-- at this point we should have any internal mismatches,
-- and adding a monad here would be quite involved
grpnodes = fromJust (gdx `lookup` Node.computeGroups nodes)
new_cv = compCVNodes grpnodes
return (nl4, il', ops, new_cv)
-- | Computes the nodes in a given group which are available for
-- allocation.
availableGroupNodes :: [(Gdx, [Ndx])] -- ^ Group index/node index assoc list
-> IntSet.IntSet -- ^ Nodes that are excluded
-> Gdx -- ^ The group for which we
-- query the nodes
-> Result [Ndx] -- ^ List of available node indices
availableGroupNodes group_nodes excl_ndx gdx = do
local_nodes <- maybe (Bad $ "Can't find group with index " ++ show gdx)
Ok (lookup gdx group_nodes)
let avail_nodes = filter (not . flip IntSet.member excl_ndx) local_nodes
return avail_nodes
-- | Updates the evac solution with the results of an instance
-- evacuation.
updateEvacSolution :: (Node.List, Instance.List, EvacSolution)
-> Idx
-> Result (Node.List, Instance.List, [OpCodes.OpCode])
-> (Node.List, Instance.List, EvacSolution)
updateEvacSolution (nl, il, es) idx (Bad msg) =
(nl, il, es { esFailed = (idx, msg):esFailed es})
updateEvacSolution (_, _, es) idx (Ok (nl, il, opcodes)) =
(nl, il, es { esMoved = new_elem:esMoved es
, esOpCodes = opcodes:esOpCodes es })
where inst = Container.find idx il
new_elem = (idx,
instancePriGroup nl inst,
Instance.allNodes inst)
-- | Node-evacuation IAllocator mode main function.
tryNodeEvac :: AlgorithmOptions
-> Group.List -- ^ The cluster groups
-> Node.List -- ^ The node list (cluster-wide, not per group)
-> Instance.List -- ^ Instance list (cluster-wide)
-> EvacMode -- ^ The evacuation mode
-> [Idx] -- ^ List of instance (indices) to be evacuated
-> Result (Node.List, Instance.List, EvacSolution)
tryNodeEvac opts _ ini_nl ini_il mode idxs =
let evac_ndx = nodesToEvacuate ini_il mode idxs
offline = map Node.idx . filter Node.offline $ Container.elems ini_nl
excl_ndx = foldl' (flip IntSet.insert) evac_ndx offline
group_ndx = map (\(gdx, (nl, _)) -> (gdx, map Node.idx
(Container.elems nl))) $
splitCluster ini_nl ini_il
(fin_nl, fin_il, esol) =
foldl' (\state@(nl, il, _) inst ->
let gdx = instancePriGroup nl inst
pdx = Instance.pNode inst in
updateEvacSolution state (Instance.idx inst) $
availableGroupNodes group_ndx
(IntSet.insert pdx excl_ndx) gdx >>=
nodeEvacInstance opts nl il mode inst gdx
)
(ini_nl, ini_il, emptyEvacSolution)
(map (`Container.find` ini_il) idxs)
in return (fin_nl, fin_il, reverseEvacSolution esol)
-- | Change-group IAllocator mode main function.
--
-- This is very similar to 'tryNodeEvac', the only difference is that
-- we don't choose as target group the current instance group, but
-- instead:
--
-- 1. at the start of the function, we compute which are the target
-- groups; either no groups were passed in, in which case we choose
-- all groups out of which we don't evacuate instance, or there were
-- some groups passed, in which case we use those
--
-- 2. for each instance, we use 'findBestAllocGroup' to choose the
-- best group to hold the instance, and then we do what
-- 'tryNodeEvac' does, except for this group instead of the current
-- instance group.
--
-- Note that the correct behaviour of this function relies on the
-- function 'nodeEvacInstance' to be able to do correctly both
-- intra-group and inter-group moves when passed the 'ChangeAll' mode.
tryChangeGroup :: Group.List -- ^ The cluster groups
-> Node.List -- ^ The node list (cluster-wide)
-> Instance.List -- ^ Instance list (cluster-wide)
-> [Gdx] -- ^ Target groups; if empty, any
-- groups not being evacuated
-> [Idx] -- ^ List of instance (indices) to be evacuated
-> Result (Node.List, Instance.List, EvacSolution)
tryChangeGroup gl ini_nl ini_il gdxs idxs =
let evac_gdxs = nub $ map (instancePriGroup ini_nl .
flip Container.find ini_il) idxs
target_gdxs = (if null gdxs
then Container.keys gl
else gdxs) \\ evac_gdxs
offline = map Node.idx . filter Node.offline $ Container.elems ini_nl
excl_ndx = foldl' (flip IntSet.insert) IntSet.empty offline
group_ndx = map (\(gdx, (nl, _)) -> (gdx, map Node.idx
(Container.elems nl))) $
splitCluster ini_nl ini_il
(fin_nl, fin_il, esol) =
foldl' (\state@(nl, il, _) inst ->
let solution = do
let ncnt = Instance.requiredNodes $
Instance.diskTemplate inst
(grp, _, _) <- findBestAllocGroup gl nl il
(Just target_gdxs) inst ncnt
let gdx = Group.idx grp
av_nodes <- availableGroupNodes group_ndx
excl_ndx gdx
nodeEvacInstance defaultOptions
nl il ChangeAll inst gdx av_nodes
in updateEvacSolution state (Instance.idx inst) solution
)
(ini_nl, ini_il, emptyEvacSolution)
(map (`Container.find` ini_il) idxs)
in return (fin_nl, fin_il, reverseEvacSolution esol)
-- | Standard-sized allocation method.
--
-- This places instances of the same size on the cluster until we're
-- out of space. The result will be a list of identically-sized
-- instances.
iterateAlloc :: AllocMethod
iterateAlloc nl il limit newinst allocnodes ixes cstats =
let depth = length ixes
newname = printf "new-%d" depth::String
newidx = Container.size il
newi2 = Instance.setIdx (Instance.setName newinst newname) newidx
newlimit = fmap (flip (-) 1) limit
in case tryAlloc nl il newi2 allocnodes of
Bad s -> Bad s
Ok (AllocSolution { asFailures = errs, asSolution = sols3 }) ->
let newsol = Ok (collapseFailures errs, nl, il, ixes, cstats) in
case sols3 of
Nothing -> newsol
Just (xnl, xi, _, _) ->
if limit == Just 0
then newsol
else iterateAlloc xnl (Container.add newidx xi il)
newlimit newinst allocnodes (xi:ixes)
(totalResources xnl:cstats)
-- | Predicate whether shrinking a single resource can lead to a valid
-- allocation.
sufficesShrinking :: (Instance.Instance -> AllocSolution) -> Instance.Instance
-> FailMode -> Maybe Instance.Instance
sufficesShrinking allocFn inst fm =
case dropWhile (isNothing . asSolution . fst)
. takeWhile (liftA2 (||) (elem fm . asFailures . fst)
(isJust . asSolution . fst))
. map (allocFn &&& id) $
iterateOk (`Instance.shrinkByType` fm) inst
of x:_ -> Just . snd $ x
_ -> Nothing
-- | Tiered allocation method.
--
-- This places instances on the cluster, and decreases the spec until
-- we can allocate again. The result will be a list of decreasing
-- instance specs.
tieredAlloc :: AllocMethod
tieredAlloc nl il limit newinst allocnodes ixes cstats =
case iterateAlloc nl il limit newinst allocnodes ixes cstats of
Bad s -> Bad s
Ok (errs, nl', il', ixes', cstats') ->
let newsol = Ok (errs, nl', il', ixes', cstats')
ixes_cnt = length ixes'
(stop, newlimit) = case limit of
Nothing -> (False, Nothing)
Just n -> (n <= ixes_cnt,
Just (n - ixes_cnt))
sortedErrs = map fst $ sortBy (comparing snd) errs
suffShrink = sufficesShrinking (fromMaybe emptyAllocSolution
. flip (tryAlloc nl' il') allocnodes)
newinst
bigSteps = filter isJust . map suffShrink . reverse $ sortedErrs
progress (Ok (_, _, _, newil', _)) (Ok (_, _, _, newil, _)) =
length newil' > length newil
progress _ _ = False
in if stop then newsol else
let newsol' = case Instance.shrinkByType newinst . last
$ sortedErrs of
Bad _ -> newsol
Ok newinst' -> tieredAlloc nl' il' newlimit
newinst' allocnodes ixes' cstats'
in if progress newsol' newsol then newsol' else
case bigSteps of
Just newinst':_ -> tieredAlloc nl' il' newlimit
newinst' allocnodes ixes' cstats'
_ -> newsol
-- * Formatting functions
-- | Given the original and final nodes, computes the relocation description.
computeMoves :: Instance.Instance -- ^ The instance to be moved
-> String -- ^ The instance name
-> IMove -- ^ The move being performed
-> String -- ^ New primary
-> String -- ^ New secondary
-> (String, [String])
-- ^ Tuple of moves and commands list; moves is containing
-- either @/f/@ for failover or @/r:name/@ for replace
-- secondary, while the command list holds gnt-instance
-- commands (without that prefix), e.g \"@failover instance1@\"
computeMoves i inam mv c d =
case mv of
Failover -> ("f", [mig])
FailoverToAny _ -> (printf "fa:%s" c, [mig_any])
FailoverAndReplace _ -> (printf "f r:%s" d, [mig, rep d])
ReplaceSecondary _ -> (printf "r:%s" d, [rep d])
ReplaceAndFailover _ -> (printf "r:%s f" c, [rep c, mig])
ReplacePrimary _ -> (printf "f r:%s f" c, [mig, rep c, mig])
where morf = if Instance.isRunning i then "migrate" else "failover"
mig = printf "%s -f %s" morf inam::String
mig_any = printf "%s -f -n %s %s" morf c inam::String
rep n = printf "replace-disks -n %s %s" n inam::String
-- | Converts a placement to string format.
printSolutionLine :: Node.List -- ^ The node list
-> Instance.List -- ^ The instance list
-> Int -- ^ Maximum node name length
-> Int -- ^ Maximum instance name length
-> Placement -- ^ The current placement
-> Int -- ^ The index of the placement in
-- the solution
-> (String, [String])
printSolutionLine nl il nmlen imlen plc pos =
let pmlen = (2*nmlen + 1)
(i, p, s, mv, c) = plc
old_sec = Instance.sNode inst
inst = Container.find i il
inam = Instance.alias inst
npri = Node.alias $ Container.find p nl
nsec = Node.alias $ Container.find s nl
opri = Node.alias $ Container.find (Instance.pNode inst) nl
osec = Node.alias $ Container.find old_sec nl
(moves, cmds) = computeMoves inst inam mv npri nsec
-- FIXME: this should check instead/also the disk template
ostr = if old_sec == Node.noSecondary
then printf "%s" opri::String
else printf "%s:%s" opri osec::String
nstr = if s == Node.noSecondary
then printf "%s" npri::String
else printf "%s:%s" npri nsec::String
in (printf " %3d. %-*s %-*s => %-*s %12.8f a=%s"
pos imlen inam pmlen ostr pmlen nstr c moves,
cmds)
-- | Return the instance and involved nodes in an instance move.
--
-- Note that the output list length can vary, and is not required nor
-- guaranteed to be of any specific length.
involvedNodes :: Instance.List -- ^ Instance list, used for retrieving
-- the instance from its index; note
-- that this /must/ be the original
-- instance list, so that we can
-- retrieve the old nodes
-> Placement -- ^ The placement we're investigating,
-- containing the new nodes and
-- instance index
-> [Ndx] -- ^ Resulting list of node indices
involvedNodes il plc =
let (i, np, ns, _, _) = plc
inst = Container.find i il
in nub . filter (>= 0) $ [np, ns] ++ Instance.allNodes inst
-- | From two adjacent cluster tables get the list of moves that transitions
-- from to the other
getMoves :: (Table, Table) -> [MoveJob]
getMoves (Table _ initial_il _ initial_plc, Table final_nl _ _ final_plc) =
let
plctoMoves (plc@(idx, p, s, mv, _)) =
let inst = Container.find idx initial_il
inst_name = Instance.name inst
affected = involvedNodes initial_il plc
np = Node.alias $ Container.find p final_nl
ns = Node.alias $ Container.find s final_nl
(_, cmds) = computeMoves inst inst_name mv np ns
in (affected, idx, mv, cmds)
in map plctoMoves . reverse . drop (length initial_plc) $ reverse final_plc
-- | Inner function for splitJobs, that either appends the next job to
-- the current jobset, or starts a new jobset.
mergeJobs :: ([JobSet], [Ndx]) -> MoveJob -> ([JobSet], [Ndx])
mergeJobs ([], _) n@(ndx, _, _, _) = ([[n]], ndx)
mergeJobs (cjs@(j:js), nbuf) n@(ndx, _, _, _)
| null (ndx `intersect` nbuf) = ((n:j):js, ndx ++ nbuf)
| otherwise = ([n]:cjs, ndx)
-- | Break a list of moves into independent groups. Note that this
-- will reverse the order of jobs.
splitJobs :: [MoveJob] -> [JobSet]
splitJobs = fst . foldl mergeJobs ([], [])
-- | Given a list of commands, prefix them with @gnt-instance@ and
-- also beautify the display a little.
formatJob :: Int -> Int -> (Int, MoveJob) -> [String]
formatJob jsn jsl (sn, (_, _, _, cmds)) =
let out =
printf " echo job %d/%d" jsn sn:
printf " check":
map (" gnt-instance " ++) cmds
in if sn == 1
then ["", printf "echo jobset %d, %d jobs" jsn jsl] ++ out
else out
-- | Given a list of commands, prefix them with @gnt-instance@ and
-- also beautify the display a little.
formatCmds :: [JobSet] -> String
formatCmds =
unlines .
concatMap (\(jsn, js) -> concatMap (formatJob jsn (length js))
(zip [1..] js)) .
zip [1..]
-- | Print the node list.
printNodes :: Node.List -> [String] -> String
printNodes nl fs =
let fields = case fs of
[] -> Node.defaultFields
"+":rest -> Node.defaultFields ++ rest
_ -> fs
snl = sortBy (comparing Node.idx) (Container.elems nl)
(header, isnum) = unzip $ map Node.showHeader fields
in printTable "" header (map (Node.list fields) snl) isnum
-- | Print the instance list.
printInsts :: Node.List -> Instance.List -> String
printInsts nl il =
let sil = sortBy (comparing Instance.idx) (Container.elems il)
helper inst = [ if Instance.isRunning inst then "R" else " "
, Instance.name inst
, Container.nameOf nl (Instance.pNode inst)
, let sdx = Instance.sNode inst
in if sdx == Node.noSecondary
then ""
else Container.nameOf nl sdx
, if Instance.autoBalance inst then "Y" else "N"
, printf "%3d" $ Instance.vcpus inst
, printf "%5d" $ Instance.mem inst
, printf "%5d" $ Instance.dsk inst `div` 1024
, printf "%5.3f" lC
, printf "%5.3f" lM
, printf "%5.3f" lD
, printf "%5.3f" lN
]
where DynUtil lC lM lD lN = Instance.util inst
header = [ "F", "Name", "Pri_node", "Sec_node", "Auto_bal"
, "vcpu", "mem" , "dsk", "lCpu", "lMem", "lDsk", "lNet" ]
isnum = False:False:False:False:False:repeat True
in printTable "" header (map helper sil) isnum
-- | Shows statistics for a given node list.
printStats :: String -> Node.List -> String
printStats lp nl =
let dcvs = compDetailedCV $ Container.elems nl
(weights, names) = unzip detailedCVInfo
hd = zip3 (weights ++ repeat 1) (names ++ repeat "unknown") dcvs
header = [ "Field", "Value", "Weight" ]
formatted = map (\(w, h, val) ->
[ h
, printf "%.8f" val
, printf "x%.2f" w
]) hd
in printTable lp header formatted $ False:repeat True
-- | Convert a placement into a list of OpCodes (basically a job).
iMoveToJob :: Node.List -- ^ The node list; only used for node
-- names, so any version is good
-- (before or after the operation)
-> Instance.List -- ^ The instance list; also used for
-- names only
-> Idx -- ^ The index of the instance being
-- moved
-> IMove -- ^ The actual move to be described
-> [OpCodes.OpCode] -- ^ The list of opcodes equivalent to
-- the given move
iMoveToJob nl il idx move =
let inst = Container.find idx il
iname = Instance.name inst
lookNode n = case mkNonEmpty (Container.nameOf nl n) of
-- FIXME: convert htools codebase to non-empty strings
Bad msg -> error $ "Empty node name for idx " ++
show n ++ ": " ++ msg ++ "??"
Ok ne -> Just ne
opF = OpCodes.OpInstanceMigrate
{ OpCodes.opInstanceName = iname
, OpCodes.opInstanceUuid = Nothing
, OpCodes.opMigrationMode = Nothing -- default
, OpCodes.opOldLiveMode = Nothing -- default as well
, OpCodes.opTargetNode = Nothing -- this is drbd
, OpCodes.opTargetNodeUuid = Nothing
, OpCodes.opAllowRuntimeChanges = False
, OpCodes.opIgnoreIpolicy = False
, OpCodes.opMigrationCleanup = False
, OpCodes.opIallocator = Nothing
, OpCodes.opAllowFailover = True
, OpCodes.opIgnoreHvversions = True
}
opFA n = opF { OpCodes.opTargetNode = lookNode n } -- not drbd
opR n = OpCodes.OpInstanceReplaceDisks
{ OpCodes.opInstanceName = iname
, OpCodes.opInstanceUuid = Nothing
, OpCodes.opEarlyRelease = False
, OpCodes.opIgnoreIpolicy = False
, OpCodes.opReplaceDisksMode = OpCodes.ReplaceNewSecondary
, OpCodes.opReplaceDisksList = []
, OpCodes.opRemoteNode = lookNode n
, OpCodes.opRemoteNodeUuid = Nothing
, OpCodes.opIallocator = Nothing
}
in case move of
Failover -> [ opF ]
FailoverToAny np -> [ opFA np ]
ReplacePrimary np -> [ opF, opR np, opF ]
ReplaceSecondary ns -> [ opR ns ]
ReplaceAndFailover np -> [ opR np, opF ]
FailoverAndReplace ns -> [ opF, opR ns ]
-- * Node group functions
-- | Computes the group of an instance.
instanceGroup :: Node.List -> Instance.Instance -> Result Gdx
instanceGroup nl i =
let sidx = Instance.sNode i
pnode = Container.find (Instance.pNode i) nl
snode = if sidx == Node.noSecondary
then pnode
else Container.find sidx nl
pgroup = Node.group pnode
sgroup = Node.group snode
in if pgroup /= sgroup
then fail ("Instance placed accross two node groups, primary " ++
show pgroup ++ ", secondary " ++ show sgroup)
else return pgroup
-- | Computes the group of an instance per the primary node.
instancePriGroup :: Node.List -> Instance.Instance -> Gdx
instancePriGroup nl i =
let pnode = Container.find (Instance.pNode i) nl
in Node.group pnode
-- | Compute the list of badly allocated instances (split across node
-- groups).
findSplitInstances :: Node.List -> Instance.List -> [Instance.Instance]
findSplitInstances nl =
filter (not . isOk . instanceGroup nl) . Container.elems
-- | Splits a cluster into the component node groups.
splitCluster :: Node.List -> Instance.List ->
[(Gdx, (Node.List, Instance.List))]
splitCluster nl il =
let ngroups = Node.computeGroups (Container.elems nl)
in map (\(gdx, nodes) ->
let nidxs = map Node.idx nodes
nodes' = zip nidxs nodes
instances = Container.filter ((`elem` nidxs) . Instance.pNode) il
in (gdx, (Container.fromList nodes', instances))) ngroups
-- | Compute the list of nodes that are to be evacuated, given a list
-- of instances and an evacuation mode.
nodesToEvacuate :: Instance.List -- ^ The cluster-wide instance list
-> EvacMode -- ^ The evacuation mode we're using
-> [Idx] -- ^ List of instance indices being evacuated
-> IntSet.IntSet -- ^ Set of node indices
nodesToEvacuate il mode =
IntSet.delete Node.noSecondary .
foldl' (\ns idx ->
let i = Container.find idx il
pdx = Instance.pNode i
sdx = Instance.sNode i
dt = Instance.diskTemplate i
withSecondary = case dt of
DTDrbd8 -> IntSet.insert sdx ns
_ -> ns
in case mode of
ChangePrimary -> IntSet.insert pdx ns
ChangeSecondary -> withSecondary
ChangeAll -> IntSet.insert pdx withSecondary
) IntSet.empty
|
ganeti-github-testing/ganeti-test-1
|
src/Ganeti/HTools/Cluster.hs
|
Haskell
|
bsd-2-clause
| 80,090
|
import CircUtils.QacgBool
import Test.QuickCheck
import Text.Printf
main = mapM_ (\(s,a) -> printf "%-25s: " s >> a) tests
exp1 = Xor $ map V ["a","b","c"]
exp2 = Xor [And (map V ["e","f","g"]), V "a"]
exp3 = And [exp1,exp2]
exp4 = Xor [exp3,exp1]
exp5 = And [exp3,exp4]
prop_simp exp a = if length a < 7 then True else evaluate a (simplify exp) == evaluate a exp
prop_flat exp a = if length a < 7 then True else evaluate a (flatten exp) == evaluate a exp
tests = [("Simplify/exp1", quickCheck (prop_simp exp1))
,("Simplify/exp2", quickCheck (prop_simp exp2))
,("Simplify/exp3", quickCheck (prop_simp exp3))
,("Simplify/exp4", quickCheck (prop_simp exp4))
,("Simplify/exp5", quickCheck (prop_simp exp5))
,("flat/exp1", quickCheck (prop_flat exp1))
,("flat/exp2", quickCheck (prop_flat exp2))
,("flat/exp3", quickCheck (prop_flat exp3))
,("flat/exp4", quickCheck (prop_flat exp4))
,("flat/exp5", quickCheck (prop_flat exp5))]
|
aparent/qacg
|
src/QACG/CircUtils/QacgBoolTest.hs
|
Haskell
|
bsd-3-clause
| 1,005
|
{-# LANGUAGE TupleSections, TypeFamilies, FlexibleContexts, PackageImports #-}
module TestPusher (XmlPusher(..), Zero(..), One(..), Two(..), testPusher) where
import Control.Monad
import Control.Concurrent
import Data.Maybe
import Data.Pipe
import Data.Pipe.ByteString
import System.IO
import Text.XML.Pipe
import XmlPusher
testPusher :: XmlPusher xp =>
xp Handle -> NumOfHandle xp Handle -> PusherArg xp -> IO ()
testPusher tp hs as = do
xp <- generate hs as >>= return . (`asTypeOf` tp)
void . forkIO . runPipe_ $ readFrom xp
=$= convert (xmlString . (: []))
=$= toHandle stdout
runPipe_ $ fromHandle stdin
=$= xmlEvent
=$= convert fromJust
=$= xmlNode []
=$= writeTo xp
|
YoshikuniJujo/forest
|
subprojects/xml-push/TestPusher.hs
|
Haskell
|
bsd-3-clause
| 693
|
{-# LANGUAGE OverloadedStrings #-}
module Main ( main ) where
import Control.Applicative
import Data.Monoid
import qualified Data.Text as T
import qualified Data.Text.IO as T
import Options.Applicative hiding ( (&) )
import System.FilePath
import Text.LaTeX.Base as Tex
import Foreign.Inference.Interface
data Opts = Opts { destRoot :: FilePath
, interfaceFiles :: [FilePath]
}
deriving (Show)
cmdOpts :: Parser Opts
cmdOpts = Opts
<$> strOption
( long "root"
<> short 'r'
<> metavar "DIR"
<> help "The root directory in which generated tables will be placed")
<*> arguments str ( metavar "FILE" )
main :: IO ()
main = execParser args >>= realMain
where
args = info (helper <*> cmdOpts)
( fullDesc
<> progDesc "Output LaTeX tables for the dissertation"
<> header "iitables - generate LaTeX tables")
realMain :: Opts -> IO ()
realMain opts = do
interfaces <- mapM readLibraryInterface (interfaceFiles opts)
let mt = renderMemoryStatsTable interfaces
pt = renderPointerStatsTable interfaces
ot = renderOutStatsTable interfaces
at = renderArrayStatsTable interfaces
nt = renderNullStatsTable interfaces
T.writeFile (destRoot opts </> "pointers/overall-table.tex") (Tex.render pt)
T.writeFile (destRoot opts </> "pointers/null-table.tex") (Tex.render nt)
T.writeFile (destRoot opts </> "pointers/out-table.tex") (Tex.render ot)
T.writeFile (destRoot opts </> "pointers/array-table.tex") (Tex.render at)
T.writeFile (destRoot opts </> "memory/big-table.tex") (Tex.render mt)
-- renderPointerStatsTable :: [LibraryInterface] -> LaTeX
-- renderPointerStatsTable ifaces =
-- mconcat (map pointerSummaryToRow pointerSummaries)
-- <> (raw "\\midrule" %: "")
-- <> totals
-- where
-- pointerSummaries = map toPointerSummary ifaces
-- totals = textbf (texy ("Total" :: Text))
-- & summField psNumFuncs pointerSummaries
-- & summField psOutFuncs pointerSummaries
-- & summField psOutParams pointerSummaries
-- & summField psInOutFuncs pointerSummaries
-- & summField psInOutParams pointerSummaries
-- & summField psArrayFuncs pointerSummaries
-- & summField psArrayParams pointerSummaries
-- & texy (hmean (map psPercentAnnot pointerSummaries))
-- <> lnbk %: ""
renderPointerStatsTable :: [LibraryInterface] -> LaTeX
renderPointerStatsTable ifaces =
mconcat (map pointerSummaryToRow pointerSummaries)
<> (raw "\\midrule" %: "")
<> totals
where
pointerSummaryToRow :: PointerSummary -> LaTeX
pointerSummaryToRow ps =
texy (psLibraryName ps) &
texy (psNumFuncs ps) &
texy (psPercentAnnot ps) <>
lnbk %: psLibraryName ps
pointerSummaries = map toPointerSummary ifaces
totals = textbf (texy ("Total" :: Text))
& summField psNumFuncs pointerSummaries
& texy (hmean (map psPercentAnnot pointerSummaries))
<> lnbk %: ""
renderOutStatsTable :: [LibraryInterface] -> LaTeX
renderOutStatsTable ifaces =
mconcat (map pointerSummaryToRow pointerSummaries)
<> (raw "\\midrule" %: "")
<> totals
where
pointerSummaryToRow :: PointerSummary -> LaTeX
pointerSummaryToRow ps =
texy (psLibraryName ps) &
texy (psNumFuncs ps) &
texy (psOutFuncs ps) &
texy (psOutParams ps) &
texy (psInOutFuncs ps) &
texy (psInOutParams ps) <>
lnbk %: psLibraryName ps
pointerSummaries = map toPointerSummary ifaces
totals = textbf (texy ("Total" :: Text))
& summField psNumFuncs pointerSummaries
& summField psOutFuncs pointerSummaries
& summField psOutParams pointerSummaries
& summField psInOutFuncs pointerSummaries
& summField psInOutParams pointerSummaries
<> lnbk %: ""
renderArrayStatsTable :: [LibraryInterface] -> LaTeX
renderArrayStatsTable ifaces =
mconcat (map pointerSummaryToRow pointerSummaries)
<> (raw "\\midrule" %: "")
<> totals
where
pointerSummaryToRow :: PointerSummary -> LaTeX
pointerSummaryToRow ps =
texy (psLibraryName ps) &
texy (psNumFuncs ps) &
texy (psArrayFuncs ps) &
texy (psArrayParams ps) <>
lnbk %: psLibraryName ps
pointerSummaries = map toPointerSummary ifaces
totals = textbf (texy ("Total" :: Text))
& summField psNumFuncs pointerSummaries
& summField psArrayFuncs pointerSummaries
& summField psArrayParams pointerSummaries
<> lnbk %: ""
renderNullStatsTable :: [LibraryInterface] -> LaTeX
renderNullStatsTable ifaces =
mconcat (map pointerSummaryToRow pointerSummaries)
<> (raw "\\midrule" %: "")
<> totals
where
pointerSummaryToRow :: PointerSummary -> LaTeX
pointerSummaryToRow ps =
texy (psLibraryName ps) &
texy (psNumFuncs ps) &
texy (psNullFuncs ps) &
texy (psNullParams ps) <>
lnbk %: psLibraryName ps
pointerSummaries = map toPointerSummary ifaces
totals = textbf (texy ("Total" :: Text))
& summField psNumFuncs pointerSummaries
& summField psNullFuncs pointerSummaries
& summField psNullParams pointerSummaries
<> lnbk %: ""
renderMemoryStatsTable :: [LibraryInterface] -> LaTeX
renderMemoryStatsTable ifaces =
mconcat (map memorySummaryToRow memorySummaries)
<> (raw "\\midrule" %: "")
<> totals
where
memorySummaries = map toMemorySummary ifaces
totals = textbf (texy ("Total" :: Text))
& summField msNumFuncs memorySummaries
& summField msNumAllocators memorySummaries
& summField msNumFinalizers memorySummaries
<> lnbk %: ""
memorySummaryToRow :: MemorySummary -> LaTeX
memorySummaryToRow ms =
texy (msLibraryName ms)
& texy (msNumFuncs ms)
& texy (msNumAllocators ms)
& texy (msNumFinalizers ms)
<> lnbk %: ""
-- | Harmonic mean of a list of ints
hmean :: [Int] -> Int
hmean ns = round $ realN / sum recips
where
realN :: Double
realN = fromIntegral (length ns)
recips :: [Double]
recips = map ((1.0/) . fromIntegral) ns
summField :: (a -> Int) -> [a] -> LaTeX
summField f = texy . foldr (+) 0 . map f
data MemorySummary =
MemorySummary { msLibraryName :: Text
, msNumFuncs :: Int
, msNumAllocators :: Int
, msNumFinalizers :: Int
}
deriving (Eq, Ord, Show)
toMemorySummary :: LibraryInterface -> MemorySummary
toMemorySummary i =
MemorySummary { msLibraryName = T.pack $ dropExtensions $ libraryName i
, msNumFuncs = nFuncs
, msNumFinalizers = countIf (paramHasAnnot (==PAFinalize)) ps
, msNumAllocators = countIf (funcIs isAlloc) fs
}
where
nFuncs = length fs
fs = libraryFunctions i
ps = concatMap foreignFunctionParameters fs
isAlloc :: FuncAnnotation -> Bool
isAlloc (FAAllocator _) = True
isAlloc _ = False
data PointerSummary =
PointerSummary { psLibraryName :: Text
, psNumFuncs :: Int
, psOutFuncs :: Int
, psOutParams :: Int
, psInOutFuncs :: Int
, psInOutParams :: Int
, psArrayFuncs :: Int
, psArrayParams :: Int
, psNullFuncs :: Int
, psNullParams :: Int
, psPercentAnnot :: Int
}
deriving (Eq, Ord, Show)
toPointerSummary :: LibraryInterface -> PointerSummary
toPointerSummary i =
PointerSummary { psLibraryName = T.pack $ dropExtensions $ libraryName i
, psNumFuncs = nFuncs
, psOutFuncs = countIf (funcHasAnnot (==PAOut)) fs
, psOutParams = countIf (paramHasAnnot (==PAOut)) ps
, psInOutFuncs = countIf (funcHasAnnot (==PAInOut)) fs
, psInOutParams = countIf (paramHasAnnot (==PAInOut)) ps
, psArrayFuncs = countIf (funcHasAnnot isArray) fs
, psArrayParams = countIf (paramHasAnnot isArray) ps
, psNullFuncs = countIf (funcHasAnnot (==PANotNull)) fs
, psNullParams = countIf (paramHasAnnot (==PANotNull)) ps
, psPercentAnnot = round $ 100.0 * totalAnnotFuncs / fromIntegral nFuncs
}
where
totalAnnotFuncs :: Double
totalAnnotFuncs = fromIntegral $ countIf (funcHasAnnot isPointerAnnot) fs
nFuncs = length fs
fs = libraryFunctions i
ps = concatMap foreignFunctionParameters fs
isPointerAnnot :: ParamAnnotation -> Bool
isPointerAnnot (PAArray _) = True
isPointerAnnot PAOut = True
isPointerAnnot PAInOut = True
isPointerAnnot PANotNull = True
isPointerAnnot _ = False
isArray :: ParamAnnotation -> Bool
isArray (PAArray _) = True
isArray _ = False
countIf :: (t -> Bool) -> [t] -> Int
countIf p = length . filter p
paramHasAnnot :: (ParamAnnotation -> Bool) -> Parameter -> Bool
paramHasAnnot p = any p . parameterAnnotations
funcHasAnnot :: (ParamAnnotation -> Bool) -> ForeignFunction -> Bool
funcHasAnnot p = or . map (paramHasAnnot p) . foreignFunctionParameters
funcIs :: (FuncAnnotation -> Bool) -> ForeignFunction -> Bool
funcIs p = or . map p . foreignFunctionAnnotations
|
travitch/iiglue
|
tools/IITableOutput.hs
|
Haskell
|
bsd-3-clause
| 9,446
|
module Day11 where
import Data.List
import Control.Applicative
{- Day 11: Corporate Policy -}
input :: String
input = "cqjxjnds"
inc :: Char -> (Bool, String) -> (Bool, String)
inc x (carry, xs)
| not carry = (False, x:xs)
| x == 'z' = (True, 'a':xs)
| x `elem` "iol" = (False, (succ . succ $ x) : xs)
| otherwise = (False, succ x : xs)
incStr :: String -> String
incStr = snd . foldr inc (True, [])
has2Pairs :: String -> Bool
has2Pairs = (>= 2) . length . filter ((>= 2) . length) . group
hasStraight :: String -> Bool
hasStraight = any ((>= 3) . length) . group . zipWith ($)
(scanl' (.) id (repeat pred))
isValid :: String -> Bool
isValid = liftA2 (&&) has2Pairs hasStraight
getNextPassword :: String -> String
getNextPassword = head . filter isValid . tail . iterate incStr
day11 :: IO ()
day11 = print $ getNextPassword input
{- Part Two -}
input2 :: String
input2 = "cqjxxyzz"
day11' :: IO ()
day11' = print $ getNextPassword input2
|
Rydgel/advent-of-code
|
src/Day11.hs
|
Haskell
|
bsd-3-clause
| 1,013
|
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE TypeSynonymInstances #-}
module Sequent.Check
( CheckT
, Check
, evalCheck
, evalCheckT
, liftEnv) where
import Control.Applicative (Alternative)
import Control.Monad (MonadPlus)
import Control.Monad.Trans (MonadTrans, lift)
import Control.Monad.Trans.Maybe (MaybeT, runMaybeT)
import Control.Monad.Writer (MonadWriter, WriterT, listen, pass,
runWriterT, tell)
import Data.Functor.Identity (Identity, runIdentity)
import Sequent.Env (EnvT, evalEnvT)
-- Wrapper around String to add a new line between two non-empty lines
-- when appending them together
newtype Lines = Lines { unLines :: String }
deriving (Eq)
instance Show Lines where
show = unLines
instance Monoid Lines where
mempty = Lines ""
mappend a b
| b == mempty = a
| a == mempty = b
| otherwise = Lines (unLines a ++ "\n" ++ unLines b)
-- Everything that is needed when checking a proof:
-- + MaybeT to be able to terminate an incorrect proof
-- + EnvT to generate fresh variables
-- + WriterT to log the steps of the proof
-- TODO replace MaybeT with some instance of MonadError ?
newtype CheckT m a = CheckT { runCheckT :: MaybeT (WriterT Lines (EnvT m)) a }
deriving ( Functor
, Applicative
, Alternative
, MonadPlus
, Monad)
type Check = CheckT Identity
-- Write a custom instance to be able to use the "tell :: String -> _" interface
-- from the outside, keeping the Lines type hidden and have a custom mappend
instance Monad m => MonadWriter String (CheckT m) where
tell = CheckT . tell . Lines
listen = CheckT . fmap (fmap unLines) . listen . runCheckT
pass = CheckT . pass . fmap (fmap (\f -> Lines . f . unLines)) . runCheckT
-- The MonadTrans instance can't be automatically derived because of the StateT
-- in the EnvT. See (https://www.reddit.com/r/haskell/comments/3mrkwe/issue_deriving_monadtrans_for_chained_custom/)
instance MonadTrans CheckT where
lift = CheckT . lift . lift . lift
evalCheckT :: (Monad m) => CheckT m a -> m (Maybe a, String)
evalCheckT = fmap (fmap unLines) . evalEnvT . runWriterT . runMaybeT . runCheckT
evalCheck :: Check a -> (Maybe a, String)
evalCheck = runIdentity . evalCheckT
liftEnv :: (Monad m) => EnvT m a -> CheckT m a
liftEnv = CheckT . lift . lift
|
matthieubulte/sequent
|
src/Sequent/Check.hs
|
Haskell
|
bsd-3-clause
| 2,638
|
module Options.TypesSpec (main, spec) where
import Options.Types
import Test.Hspec
import Test.QuickCheck
import Test.QuickCheck.Instances
main :: IO ()
main = hspec spec
spec :: Spec
spec = do
describe "someFunction" $ do
it "should work fine" $ do
property someFunction
someFunction :: Bool -> Bool -> Property
someFunction x y = x === y
|
athanclark/optionz
|
test/Options/TypesSpec.hs
|
Haskell
|
bsd-3-clause
| 357
|
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE Rank2Types #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE PolymorphicComponents #-}
module NHorn.LaCarte (
Expr(..),
(:+:)(..),
(:<:),
foldExpr,
inj,
prj,
)
where
newtype Expr f = In (f (Expr f))
data (f2 :+: g) a s f e = Inl (f2 a s f e) | Inr (g a s f e)
instance (Functor (f2 a s f), Functor (g a s f)) => Functor ((f2 :+: g) a s f) where
fmap h (Inl f) = Inl (fmap h f)
fmap h (Inr g) = Inr (fmap h g)
foldExpr :: Functor f => (f a -> a) -> Expr f -> a
foldExpr f (In t) = f (fmap (foldExpr f) t)
--------------------- Quite a bit harder part of code
data Pos = Here | Le Pos | Ri Pos
data Res = Found Pos | NotFound | Ambiguous
type family Elem (e :: (* -> * -> *) -> * -> * -> * -> *) (p :: (* -> * -> *) -> * -> * -> * -> * ) :: Res where
Elem e e = Found Here
Elem e (l :+: r) = Choose (Elem e l ) (Elem e r )
Elem e p = NotFound
type family Choose (l :: Res) (r :: Res) :: Res where
Choose (Found x ) (Found y) = Ambiguous
Choose Ambiguous y = Ambiguous
Choose x Ambiguous = Ambiguous
Choose (Found x) y = Found (Le x )
Choose x (Found y)= Found (Ri y)
Choose x y = NotFound
data Proxy a = P
class Subsume (res :: Res) f2 g a s f where
inj' :: Proxy res -> f2 a s f a' -> g a s f a'
prj' :: Proxy res -> g a s f a' -> Maybe (f2 a s f a')
instance Subsume (Found Here) f2 f2 a s f where
inj' _ = id
prj' _ = Just
instance Subsume (Found p) f2 l a s f => Subsume (Found (Le p)) f2 (l :+: r ) a s f where
inj' _ = Inl . inj' (P :: Proxy (Found p))
prj' _ (Inl x ) = prj' (P :: Proxy (Found p)) x
prj' _ (Inr _) = Nothing
instance Subsume (Found p) f2 r a s f => Subsume (Found (Ri p)) f2 (l :+: r ) a s f where
inj' _ = Inr . inj' (P :: Proxy (Found p))
prj' _ (Inr x ) = prj' (P :: Proxy (Found p)) x
prj' _ (Inl _) = Nothing
type (f2 :<: g) a s f = Subsume (Elem f2 g) f2 g a s f
inj :: forall f2 g a s f e. (f2 :<: g) a s f => f2 a s f e -> g a s f e
inj = inj' (P :: Proxy (Elem f2 g))
prj :: forall f2 g a s f e. (f2 :<: g) a s f => g a s f e -> Maybe (f2 a s f e)
prj = prj' (P :: Proxy (Elem f2 g))
|
esengie/algebraic-checker
|
src/NHorn/LaCarte.hs
|
Haskell
|
bsd-3-clause
| 2,453
|
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE FlexibleContexts #-}
-- | Functions for traversing the comment AST and analyzing the nodes it contains.
--
-- To start traversing the comment AST, use 'Clang.Cursor.getParsedComment' to
-- retrieve the comment from an AST node that may be associated with one (for
-- example, any kind of declaration). You can access child nodes in the AST using
-- 'getChildren'. Most of the important information about comment AST nodes is
-- contained in the fields of the 'ParsedComment' type.
--
-- This module is intended to be imported qualified.
module Clang.Comment
(
-- * Navigating the comment AST
getChildren
-- * Predicates and transformations
, isWhitespace
, hasTrailingNewline
, getTagCommentAsString
, getFullCommentAsHTML
, getFullCommentAsXML
-- * Comment AST nodes
, ParsedComment(..)
, ParamPassDirection(..)
, FFI.ParamPassDirectionKind (..)
, FFI.InlineCommandRenderStyle (..)
) where
import Control.Applicative
import Control.Monad
import Control.Monad.IO.Class
import Data.Maybe
import Clang.Internal.Comment
import qualified Clang.Internal.FFI as FFI
import Clang.Internal.Monad
-- | Returns the children nodes of the given comment in the comment AST.
getChildren :: ClangBase m => ParsedComment s' -> ClangT s m [ParsedComment s]
getChildren pc = do
let c = getFFIComment pc
numC <- liftIO $ FFI.comment_getNumChildren c
mayCs <- forM [0..(numC - 1)] $ \i ->
parseComment =<< liftIO (FFI.comment_getChild mkProxy c i)
return $ catMaybes mayCs
-- | Returns 'True' if the provided comment is a 'TextComment' which is empty or contains
-- only whitespace, or if it is a 'ParagraphComment' which contains only whitespace
-- 'TextComment' nodes.
isWhitespace :: ClangBase m => ParsedComment s' -> ClangT s m Bool
isWhitespace c = liftIO $ FFI.comment_isWhitespace (getFFIComment c)
-- | Returns 'True' if the provided comment is inline content and has a newline immediately
-- following it in the comment text. Newlines between paragraphs do not count.
hasTrailingNewline :: ClangBase m => ParsedComment s' -> ClangT s m Bool
hasTrailingNewline c = liftIO $ FFI.inlineContentComment_hasTrailingNewline (getFFIComment c)
-- | Returns a string representation of an 'HTMLStartTagComment' or 'HTMLEndTagComment'.
-- For other kinds of comments, returns 'Nothing'.
getTagCommentAsString :: ClangBase m => ParsedComment s' -> ClangT s m (Maybe (FFI.ClangString s))
getTagCommentAsString (HTMLStartTagComment c _ _ _) = Just <$> FFI.hTMLTagComment_getAsString c
getTagCommentAsString (HTMLEndTagComment c _) = Just <$> FFI.hTMLTagComment_getAsString c
getTagCommentAsString _ = return Nothing
-- | Converts the given 'FullComment' to an HTML fragment.
--
-- Specific details of HTML layout are subject to change. Don't try to parse
-- this HTML back into an AST; use other APIs instead.
--
-- Currently the following CSS classes are used:
--
-- * \"para-brief\" for \'brief\' paragraph and equivalent commands;
--
-- * \"para-returns\" for \'returns\' paragraph and equivalent commands;
--
-- * \"word-returns\" for the \"Returns\" word in a \'returns\' paragraph.
--
-- Function argument documentation is rendered as a \<dl\> list with arguments
-- sorted in function prototype order. The following CSS classes are used:
--
-- * \"param-name-index-NUMBER\" for parameter name (\<dt\>);
--
-- * \"param-descr-index-NUMBER\" for parameter description (\<dd\>);
--
-- * \"param-name-index-invalid\" and \"param-descr-index-invalid\" are used if
-- parameter index is invalid.
--
-- Template parameter documentation is rendered as a \<dl\> list with
-- parameters sorted in template parameter list order. The following CSS classes are used:
--
-- * \"tparam-name-index-NUMBER\" for parameter name (\<dt\>);
--
-- * \"tparam-descr-index-NUMBER\" for parameter description (\<dd\>);
--
-- * \"tparam-name-index-other\" and \"tparam-descr-index-other\" are used for
-- names inside template template parameters;
--
-- * \"tparam-name-index-invalid\" and \"tparam-descr-index-invalid\" are used if
-- parameter position is invalid.
getFullCommentAsHTML :: ClangBase m => ParsedComment s' -> ClangT s m (Maybe (FFI.ClangString s))
getFullCommentAsHTML (FullComment c) = Just <$> FFI.fullComment_getAsHTML c
getFullCommentAsHTML _ = return Nothing
-- | Converts the given 'FullComment' to an XML document.
--
-- A Relax NG schema for the XML is distributed in the \"comment-xml-schema.rng\" file
-- inside the libclang source tree.
getFullCommentAsXML :: ClangBase m => ParsedComment s' -> ClangT s m (Maybe (FFI.ClangString s))
getFullCommentAsXML (FullComment c) = Just <$> FFI.fullComment_getAsXML c
getFullCommentAsXML _ = return Nothing
|
ony/LibClang
|
src/Clang/Comment.hs
|
Haskell
|
bsd-3-clause
| 4,775
|
{-# OPTIONS_GHC -cpp #-}
module Code28_Tupling where
(□) :: [a] -> [a] -> [a]
xs □ ys = mix xs (ys, reverse ys)
mix :: [a] -> ([a],[a]) -> [a]
mix [] (ys,_) = ys
mix (x:xs) (ys,sy) = ys ++ [x] ++ mix xs (sy,ys)
boxall :: [[a]] -> [a]
boxall = foldr (□) []
op :: [a] -> ([a], [a]) -> ([a], [a])
op xs (ys,sy) = (xs □ ys, xs ⊠ sy)
(⊠) :: [a] -> [a] -> [a]
#ifdef SPEC_OF_BOXTIMES
xs ⊠ sy = reverse (xs □ (reverse sy))
#else
[] ⊠ sy = sy
(x:xs) ⊠ sy = (xs ⊠ (reverse sy)) ++ [x] ++ sy
#endif
op1 :: [a] -> ([a], [a]) -> ([a], [a])
op1 [] (ys,sy) = (ys,sy)
op1 (x:xs) (ys,sy) = (ys ++ [x] ++ zs,sz ++ [x] ++ sy)
where (zs,sz) = op1 xs (sy,ys)
op2 :: [a] -> ([a], [a]) -> ([a], [a])
op2 xs (ys,sy) = if even (length xs)
then (mix xs (ys,sy), mix (reverse xs) (sy,ys))
else (mix xs (ys,sy), mix (reverse xs) (ys,sy))
|
sampou-org/pfad
|
Code/Code28_Tupling.hs
|
Haskell
|
bsd-3-clause
| 1,039
|
{-# LANGUAGE OverloadedStrings #-}
module Module ( Module(..)
, listModules
) where
import Data.Text (Text)
import Xml
data Codepool = Core | Community | Local deriving (Show)
data Module = Module { moduleCodePool :: Codepool
, moduleNameSpace :: String
, moduleName :: String
, moduleActive :: Bool
} deriving (Show)
data Config = Config {
} deriving (Show)
listModules :: Text -> IO ([Module])
listModules rootPath = do
fileMap <- readXmlFileMap
|
dxtr/hagento
|
src/Magento/Module.hs
|
Haskell
|
bsd-3-clause
| 589
|
{-# LANGUAGE TypeOperators #-}
module World (
World(..),FinalColor,Color, Object(..), Shape (..),calcNormal,
Light(..),cmul,colRound,cadd,convertColtoFCol
-- Creation functions
-- Standard Array functions
,vUp
,vDown
,vForward
,vBackward
,vRight
,vLeft
-- Color conviniece functions
,t2c
--
-- | World Construction
,emptyWorld
,addObjectToWorld
,addLightToWorld
,createSphere
,createPlane
,createLight
,TextColor (..)
,createObj
,Entity (..)
,createWorld
,v2Light
,v2Plaine
,v2Sphere
,clamp
) where
import qualified Data.Array.Repa as R
import Vector
import Data.Word
import Control.Monad.State.Lazy
-- | Color type
type Color = (Double,Double,Double)
-- | Color type that is compatible with the Repa-IO functions
type FinalColor = (Word8,Word8,Word8)
-- | Textual interface for some standard colors
data TextColor = Red | Blue | Green | Black | White
-- | Datatype managing the world
data World = World {
items :: [Object]
,lights :: [Light]
}
-- | A colective type for entitys that can be added into the world
data Entity = EntO Object | EntL Light
-- | The world state monad
type WorldWrapper = State World Entity
-- | Datatype displaying visible objects
data Object = Object {
shape :: Shape
,color :: Color
,reflectance :: Double
,shininess::Double
}
deriving (Show)
-- | datatype for the shapes of objects that can be displayed
data Shape =
Sphere {
spos :: DoubleVector
,radius :: Double
}
| Plane {
ppos :: DoubleVector
,pnormal :: DoubleVector
}
deriving (Show)
data Light = Light{
lpos :: DoubleVector
,lcolor:: Color
}
-- | Function for calculating the normal at a specific point
calcNormal :: Object -> DoubleVector -> DoubleVector
calcNormal o@Object{shape =s@Sphere{spos = pos}} pnt = sphereNormal s pnt
calcNormal o@Object{shape =s@Plane{pnormal = norm}} _ = norm
-- | Heplerfunction for calculating the shape of a sphere
sphereNormal :: Shape -> DoubleVector -> DoubleVector
sphereNormal s@Sphere{spos= pos} pnt = normalize $
R.computeUnboxedS $ R.zipWith (-) pnt pos
-- | Color management functions
-- | Color addition function
cadd :: Color -> Color -> Color
cadd (r1,g1,b1) (r2,g2,b2) = (r1 + r2,g1 + g2,b1 + b2)
-- | Color multiplication function
cmul :: Color -> Double -> Color
cmul (r,g,b) d = (r*d,g*d,b*d)
-- | Helper function to convert a color of type Double to Word8
colRound :: Double -> Word8
colRound d | d >= 255.0 = 255
| d <= 0.0 = 0
| otherwise = round d
-- | Function to convert a color of type Double to Word8
convertColtoFCol :: Color -> FinalColor
convertColtoFCol (r,g,b) = (colRound r, colRound g, colRound b)
-- | Function to convert the text color interface to an actual color
t2c :: TextColor -> Color
t2c Red = (255.0,0.0,0.0)
t2c Green = (0.0,255.0,0.0)
t2c Blue = (0.0,0.0,255.0)
t2c Black = (0.0,0.0,0.0)
t2c White = (255.0,255.0,255.0)
--
-- |Constructor Functions
-- Standard Array functions
-- | A Up Vector for the simple constructors
vUp :: (Double,Double,Double)
vUp = (0.0,1.0,0.0)
-- | A Down Vector for the simple constructors
vDown :: (Double,Double,Double)
vDown = (0.0,-1.0,0.0)
-- | A Forward Vector for the simple constructors
vForward :: (Double,Double,Double)
vForward = (1.0,0.0,0.0)
-- | A Backward Vector for the simple constructors
vBackward :: (Double,Double,Double)
vBackward = (-1.0,0.0,0.0)
-- | A Right Vector for the simple constructors
vRight:: (Double,Double,Double)
vRight = (1.0,0.0,1.0)
-- | A Left Vector for the simple constructors
vLeft :: (Double,Double,Double)
vLeft = (0.0,0.0,-1.0)
-- | World Construction
-- | Constrictor function to create an empty world
emptyWorld :: World
emptyWorld = World {
items = []
,lights = []
}
-- | Function to create a world from a list of entitys
createWorld::[Entity] -> State World ()
createWorld (e:[]) = createObj e
createWorld (e:el) = do
createObj e
createWorld el
-- | Function to add an object to the world state monad
createObj :: Entity -> State World ()
createObj (EntL e) = modify (addLightToWorld e)
createObj (EntO e) = modify (addObjectToWorld e)
-- | Function to add an object to an existing world
addObjectToWorld :: Object -> World -> World
addObjectToWorld o w@World{items= i} = w{items= (i ++ [o]) }
-- | Function to add a light to an existing world
addLightToWorld :: Light -> World -> World
addLightToWorld l w@World{lights= ls} = w{lights= (ls ++ [l]) }
-- | Constructor to create a sphere using the simpler type of vectors
createSphere :: Double -> (Double, Double , Double) -> Color -> Double -> Double-> Object
createSphere rad (x,y,z) col ref shin = Object{ shape=Sphere{
spos = R.fromListUnboxed (R.ix1 3) [x,y,z]
,radius = rad}
,color = col
,shininess = shin
,reflectance = (clamp ref 0.0 1.0)
}
-- | Constructor function to go from Repa array to an Sphere
v2Sphere::DoubleVector ->Color ->Double -> Double -> Double-> Object
v2Sphere pos colorIn rad ref shin = Object{ shape=Sphere{
spos = pos
,radius = rad}
,color = colorIn
, reflectance = (clamp ref 0.0 1.0)
,shininess = shin
}
-- | Constructor function to create a plane using the simpler types of vectors
createPlane ::(Double, Double , Double) ->(Double, Double , Double)
-> Color -> Double ->Double -> Object
createPlane (x,y,z) (nx,ny,nz) colIn ref shin =(v2Plaine
(R.fromListUnboxed (R.ix1 3) [x,y,z]) ( R.fromListUnboxed (R.ix1 3)
[nx,ny,nz]) colIn ref shin)
-- | Constructor function to go from Repa array to an Plane
v2Plaine::DoubleVector ->DoubleVector ->Color -> Double->Double -> Object
v2Plaine pposIn pnormalIn colorIn refIn shin = Object{ shape=Plane{
ppos = pposIn
,pnormal = pnormalIn}
,color = colorIn
, reflectance = clamp refIn 0.0 1.0
,shininess = shin
}
-- | Constructor function to create a light using the simpler types of vectors
createLight ::(Double, Double , Double) -> Color -> Light
createLight (x,y,z) (col1,col2,col3) = (v2Light
(R.fromListUnboxed (R.ix1 3)
[x,y,z]) (R.fromListUnboxed (R.ix1 3)[col1,col2,col3]))
-- | Constructor function to go from Repa array to an light
v2Light::DoubleVector -> DoubleVector -> Light
v2Light pos colorIn = Light{
lpos = pos
,lcolor = (colorIn R.! (R.Z R.:. 0),
colorIn R.! (R.Z R.:. 1),colorIn R.! (R.Z R.:. 2))
}
-- | Hepler function clamp ported from GLSL
clamp:: Double -> Double -> Double -> Double
clamp a min max | a < min = min
| a > max = max
| otherwise = a
|
axhav/AFPLAB3
|
World.hs
|
Haskell
|
bsd-3-clause
| 7,124
|
module Main where
incdInts :: [Integer]
incdInts = map (+1) [1..]
main :: IO ()
main = do
print (incdInts !! 1000)
print (incdInts !! 9001)
print (incdInts !! 90010)
print (incdInts !! 9001000)
print (incdInts !! 9501000)
print (incdInts !! 9901000)
|
chengzh2008/hpffp
|
src/ch28-BasicLibraries/largeCAF1.hs
|
Haskell
|
bsd-3-clause
| 264
|
----------------------------------------------------------------------------
-- |
-- Module : Source2
-- Copyright : (c) Sergey Vinokurov 2018
-- License : BSD3-style (see LICENSE)
-- Maintainer : serg.foo@gmail.com
----------------------------------------------------------------------------
module Source2 (foo2) where
foo2 :: Double -> Double
foo2 x = x + x
|
sergv/tags-server
|
test-data/0016reexport_of_missing_module/Source2.hs
|
Haskell
|
bsd-3-clause
| 379
|
module Twelve where
import Data.Char
sumJSON :: String -> Int
sumJSON [] = 0
sumJSON (x:xs)
| isDigit x = plus (read [x]) xs
| x == '-' = minus 0 xs
| otherwise = sumJSON xs
plus :: Int -> String -> Int
plus = applyOp (+)
minus :: Int -> String -> Int
minus = applyOp (-)
applyOp :: (Int -> Int -> Int) -> Int -> String -> Int
applyOp _ _ [] = 0
applyOp op n (x:xs)
| isDigit x = minus (n * 10 `op` read [x]) xs
| otherwise = n + sumJSON xs
twelve :: IO Int
twelve = do
json <- readFile "input/12.txt"
return $ sumJSON json
|
purcell/adventofcodeteam
|
app/Twelve.hs
|
Haskell
|
bsd-3-clause
| 558
|
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE OverloadedStrings #-}
module Types where
import Data.Aeson
import GHC.Generics
data SSHHost = SSHHost{host :: String, remoteport :: Integer} deriving (Show, Generic, FromJSON, ToJSON)
data SSHConfig = SSH{username :: String, hosts :: [SSHHost]} deriving (Show, Generic, FromJSON, ToJSON)
data Config =
Config{ ssh :: Maybe SSHConfig } deriving (Show, Generic, FromJSON, ToJSON)
data Address = Address String String deriving Show
getLink :: Address -> String
getLink (Address link _) = link
|
pwestling/hmonit
|
src/Types.hs
|
Haskell
|
bsd-3-clause
| 638
|
-- Copyright (c) 2014 Contributors as noted in the AUTHORS file
--
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation, either version 3 of the License, or
-- (at your option) any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with this program. If not, see <http://www.gnu.org/licenses/>.
import Arduino.Uno
main = compileProgram $ do
digitalOutput pin13 =: clock ~> toggle
uart =: timerDelta ~> mapSMany formatDelta ~> flattenS
formatDelta :: Expression Word -> [Expression [Byte]]
formatDelta delta = [ formatString "delta: "
, formatNumber delta
, formatString "\r\n"
]
|
cjdibbs/ardunio
|
examples/UART.hs
|
Haskell
|
bsd-3-clause
| 1,070
|
module Option
( buildOption
) where
import CommandLineOption (CommandLineOption)
import qualified CommandLineOption
import qualified Git
import Types
import Control.Applicative
import Data.Char (toUpper)
import Data.Maybe (catMaybes, fromJust, fromMaybe)
import Data.Time.Calendar (toGregorian)
import Data.Time.Clock (getCurrentTime, utctDay)
buildOption :: CommandLineOption -> IO Option
buildOption copt = do
let packageName' = CommandLineOption.packageName copt
moduleName' = fromMaybe (modularize packageName') (CommandLineOption.moduleName copt)
directoryName' = fromMaybe packageName' (CommandLineOption.directoryName copt)
source' = fromJust $ (Repo <$> CommandLineOption.repo copt) <|> (CabalPackage <$> CommandLineOption.cabalPackage copt)
afterCommands' = catMaybes [ CommandLineOption.afterCommand copt ]
dryRun' = CommandLineOption.dryRun copt
year' <- getCurrentYear
author' <- Git.config "user.name"
email' <- Git.config "user.email"
return Option { packageName = packageName'
, moduleName = moduleName'
, directoryName = directoryName'
, author = author'
, email = email'
, year = year'
, source = source'
, dryRun = dryRun'
, afterCommands = afterCommands'
}
getCurrentYear :: IO String
getCurrentYear = do
(y,_,_) <- (toGregorian . utctDay) <$> getCurrentTime
return $ show y
-- | Capitalize words and connect them with periods
--
-- >>> modularize "package"
-- "Package"
--
-- >>> modularize "package-name"
-- "Package.Name"
--
-- >>> modularize "another-package-name"
-- "Another.Package.Name"
--
modularize :: String -> String
modularize [] = []
modularize [x] = [toUpper x]
modularize (x:xs) = toUpper x : rest xs
where
rest [] = []
rest ('-':ys) = '.' : modularize ys
rest (y:ys) = y:rest ys
|
fujimura/chi
|
src/Option.hs
|
Haskell
|
bsd-3-clause
| 2,146
|
{-# LANGUAGE OverloadedStrings #-}
module Test.Helper
(
module Test.Hspec.Monadic,
module Test.Hspec.Expectations
) where
import Test.Hspec.Monadic
import Test.Hspec.HUnit()
import Test.Hspec.Expectations
|
fujimura/persistent-hspec-example
|
Test/Helper.hs
|
Haskell
|
bsd-3-clause
| 214
|
{-# LANGUAGE TypeFamilies, MultiParamTypeClasses, StaticPointers, RankNTypes, GADTs, ConstraintKinds, FlexibleContexts, TypeApplications, ScopedTypeVariables, FlexibleInstances #-}
module QueryArrow.Remote.NoTranslation.Server where
import QueryArrow.DB.DB
import QueryArrow.DB.NoTranslation
import QueryArrow.DB.ResultStream
import Foreign.StablePtr
import Control.Monad.Trans.Resource
import Control.Monad.IO.Class
import QueryArrow.Remote.NoTranslation.Definitions
import QueryArrow.Remote.Definitions
import QueryArrow.Syntax.Type
import Control.Exception.Lifted (catch, SomeException)
runQueryArrowServer :: forall db a. (Channel a, SendType a ~ RemoteResultSet, ReceiveType a ~ RemoteCommand,
DBFormulaType db ~ FormulaT,
RowType (StatementType (ConnectionType db)) ~ MapResultRow, IDatabase db) => a -> db -> ResourceT IO ()
runQueryArrowServer chan db = do
cmd <- liftIO $ receive chan
case cmd of
Quit -> return ()
_ -> do
res <- case cmd of
GetName -> return (StringResult (getName db))
GetPreds -> return (PredListResult (getPreds db))
Supported ret form vars ->
return (BoolResult (supported db ret form vars))
DBOpen -> do
(conn, key) <- allocate (dbOpen db) dbClose
liftIO $ ConnectionResult . castStablePtrToPtr <$> newStablePtr (db, conn, key)
DBClose connP -> do
let connSP = castPtrToStablePtr connP :: StablePtr (db, ConnectionType db, ReleaseKey)
(_, _, key) <- liftIO $ deRefStablePtr connSP
release key
liftIO $ freeStablePtr connSP
return UnitResult
DBBegin connSP -> liftIO $ do
(_, conn, _) <- deRefStablePtr (castPtrToStablePtr connSP :: StablePtr (db, ConnectionType db, ReleaseKey))
catch (do
dbBegin conn
return UnitResult
) (\e -> return (ErrorResult (-1, show (e::SomeException))))
DBPrepare connSP -> liftIO $
catch (do
(_, conn, _) <- deRefStablePtr (castPtrToStablePtr connSP :: StablePtr (db, ConnectionType db, ReleaseKey))
dbPrepare conn
return UnitResult
) (\e -> return (ErrorResult (-1, show (e::SomeException))))
DBCommit connSP -> liftIO $
catch (do
(_, conn, _) <- deRefStablePtr (castPtrToStablePtr connSP :: StablePtr (db, ConnectionType db, ReleaseKey))
dbCommit conn
return UnitResult
) (\e -> return (ErrorResult (-1, show (e::SomeException))))
DBRollback connSP -> liftIO $
catch (do
(_, conn, _) <- deRefStablePtr (castPtrToStablePtr connSP :: StablePtr (db, ConnectionType db, ReleaseKey))
dbRollback conn
return UnitResult
) (\e -> return (ErrorResult (-1, show (e::SomeException))))
DBStmtExec connSP (NTDBQuery vars2 form vars) rows ->
catch (do
(_, conn, _) <- liftIO $ deRefStablePtr (castPtrToStablePtr connSP :: StablePtr (db, ConnectionType db, ReleaseKey))
qu <- liftIO $ translateQuery db vars2 form vars
stmt <- liftIO $ prepareQuery conn qu
rows' <- getAllResultsInStream (dbStmtExec stmt (listResultStream rows))
liftIO $ dbStmtClose stmt
return (RowListResult rows')
) (\e -> return (ErrorResult (-1, show (e::SomeException))))
Quit -> error "error"
liftIO $ send chan res
runQueryArrowServer chan db
|
xu-hao/QueryArrow
|
QueryArrow-db-remote/src/QueryArrow/Remote/NoTranslation/Server.hs
|
Haskell
|
bsd-3-clause
| 3,533
|
{-# LANGUAGE CPP #-}
-----------------------------------------------------------------------------
--
-- (c) The University of Glasgow 2006
--
-- The purpose of this module is to transform an HsExpr into a CoreExpr which
-- when evaluated, returns a (Meta.Q Meta.Exp) computation analogous to the
-- input HsExpr. We do this in the DsM monad, which supplies access to
-- CoreExpr's of the "smart constructors" of the Meta.Exp datatype.
--
-- It also defines a bunch of knownKeyNames, in the same way as is done
-- in prelude/PrelNames. It's much more convenient to do it here, because
-- otherwise we have to recompile PrelNames whenever we add a Name, which is
-- a Royal Pain (triggers other recompilation).
-----------------------------------------------------------------------------
module DsMeta( dsBracket,
templateHaskellNames, qTyConName, nameTyConName,
liftName, liftStringName, expQTyConName, patQTyConName,
decQTyConName, decsQTyConName, typeQTyConName,
decTyConName, typeTyConName, mkNameG_dName, mkNameG_vName, mkNameG_tcName,
quoteExpName, quotePatName, quoteDecName, quoteTypeName,
tExpTyConName, tExpDataConName, unTypeName, unTypeQName,
unsafeTExpCoerceName
) where
#include "HsVersions.h"
import {-# SOURCE #-} DsExpr ( dsExpr )
import MatchLit
import DsMonad
import qualified Language.Haskell.TH as TH
import HsSyn
import Class
import PrelNames
-- To avoid clashes with DsMeta.varName we must make a local alias for
-- OccName.varName we do this by removing varName from the import of
-- OccName above, making a qualified instance of OccName and using
-- OccNameAlias.varName where varName ws previously used in this file.
import qualified OccName( isDataOcc, isVarOcc, isTcOcc, varName, tcName, dataName )
import Module
import Id
import Name hiding( isVarOcc, isTcOcc, varName, tcName )
import NameEnv
import TcType
import TyCon
import TysWiredIn
import TysPrim ( liftedTypeKindTyConName, constraintKindTyConName )
import CoreSyn
import MkCore
import CoreUtils
import SrcLoc
import Unique
import BasicTypes
import Outputable
import Bag
import DynFlags
import FastString
import ForeignCall
import Util
import Data.Maybe
import Control.Monad
import Data.List
-----------------------------------------------------------------------------
dsBracket :: HsBracket Name -> [PendingTcSplice] -> DsM CoreExpr
-- Returns a CoreExpr of type TH.ExpQ
-- The quoted thing is parameterised over Name, even though it has
-- been type checked. We don't want all those type decorations!
dsBracket brack splices
= dsExtendMetaEnv new_bit (do_brack brack)
where
new_bit = mkNameEnv [(n, Splice (unLoc e)) | PendSplice n e <- splices]
do_brack (VarBr _ n) = do { MkC e1 <- lookupOcc n ; return e1 }
do_brack (ExpBr e) = do { MkC e1 <- repLE e ; return e1 }
do_brack (PatBr p) = do { MkC p1 <- repTopP p ; return p1 }
do_brack (TypBr t) = do { MkC t1 <- repLTy t ; return t1 }
do_brack (DecBrG gp) = do { MkC ds1 <- repTopDs gp ; return ds1 }
do_brack (DecBrL _) = panic "dsBracket: unexpected DecBrL"
do_brack (TExpBr e) = do { MkC e1 <- repLE e ; return e1 }
{- -------------- Examples --------------------
[| \x -> x |]
====>
gensym (unpackString "x"#) `bindQ` \ x1::String ->
lam (pvar x1) (var x1)
[| \x -> $(f [| x |]) |]
====>
gensym (unpackString "x"#) `bindQ` \ x1::String ->
lam (pvar x1) (f (var x1))
-}
-------------------------------------------------------
-- Declarations
-------------------------------------------------------
repTopP :: LPat Name -> DsM (Core TH.PatQ)
repTopP pat = do { ss <- mkGenSyms (collectPatBinders pat)
; pat' <- addBinds ss (repLP pat)
; wrapGenSyms ss pat' }
repTopDs :: HsGroup Name -> DsM (Core (TH.Q [TH.Dec]))
repTopDs group@(HsGroup { hs_valds = valds
, hs_splcds = splcds
, hs_tyclds = tyclds
, hs_instds = instds
, hs_derivds = derivds
, hs_fixds = fixds
, hs_defds = defds
, hs_fords = fords
, hs_warnds = warnds
, hs_annds = annds
, hs_ruleds = ruleds
, hs_vects = vects
, hs_docs = docs })
= do { let { tv_bndrs = hsSigTvBinders valds
; bndrs = tv_bndrs ++ hsGroupBinders group } ;
ss <- mkGenSyms bndrs ;
-- Bind all the names mainly to avoid repeated use of explicit strings.
-- Thus we get
-- do { t :: String <- genSym "T" ;
-- return (Data t [] ...more t's... }
-- The other important reason is that the output must mention
-- only "T", not "Foo:T" where Foo is the current module
decls <- addBinds ss (
do { val_ds <- rep_val_binds valds
; _ <- mapM no_splice splcds
; tycl_ds <- mapM repTyClD (tyClGroupConcat tyclds)
; role_ds <- mapM repRoleD (concatMap group_roles tyclds)
; inst_ds <- mapM repInstD instds
; deriv_ds <- mapM repStandaloneDerivD derivds
; fix_ds <- mapM repFixD fixds
; _ <- mapM no_default_decl defds
; for_ds <- mapM repForD fords
; _ <- mapM no_warn warnds
; ann_ds <- mapM repAnnD annds
; rule_ds <- mapM repRuleD ruleds
; _ <- mapM no_vect vects
; _ <- mapM no_doc docs
-- more needed
; return (de_loc $ sort_by_loc $
val_ds ++ catMaybes tycl_ds ++ role_ds ++ fix_ds
++ inst_ds ++ rule_ds ++ for_ds
++ ann_ds ++ deriv_ds) }) ;
decl_ty <- lookupType decQTyConName ;
let { core_list = coreList' decl_ty decls } ;
dec_ty <- lookupType decTyConName ;
q_decs <- repSequenceQ dec_ty core_list ;
wrapGenSyms ss q_decs
}
where
no_splice (L loc _)
= notHandledL loc "Splices within declaration brackets" empty
no_default_decl (L loc decl)
= notHandledL loc "Default declarations" (ppr decl)
no_warn (L loc (Warning thing _))
= notHandledL loc "WARNING and DEPRECATION pragmas" $
text "Pragma for declaration of" <+> ppr thing
no_vect (L loc decl)
= notHandledL loc "Vectorisation pragmas" (ppr decl)
no_doc (L loc _)
= notHandledL loc "Haddock documentation" empty
hsSigTvBinders :: HsValBinds Name -> [Name]
-- See Note [Scoped type variables in bindings]
hsSigTvBinders binds
= [hsLTyVarName tv | L _ (TypeSig _ (L _ (HsForAllTy Explicit qtvs _ _))) <- sigs
, tv <- hsQTvBndrs qtvs]
where
sigs = case binds of
ValBindsIn _ sigs -> sigs
ValBindsOut _ sigs -> sigs
{- Notes
Note [Scoped type variables in bindings]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
f :: forall a. a -> a
f x = x::a
Here the 'forall a' brings 'a' into scope over the binding group.
To achieve this we
a) Gensym a binding for 'a' at the same time as we do one for 'f'
collecting the relevant binders with hsSigTvBinders
b) When processing the 'forall', don't gensym
The relevant places are signposted with references to this Note
Note [Binders and occurrences]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When we desugar [d| data T = MkT |]
we want to get
Data "T" [] [Con "MkT" []] []
and *not*
Data "Foo:T" [] [Con "Foo:MkT" []] []
That is, the new data decl should fit into whatever new module it is
asked to fit in. We do *not* clone, though; no need for this:
Data "T79" ....
But if we see this:
data T = MkT
foo = reifyDecl T
then we must desugar to
foo = Data "Foo:T" [] [Con "Foo:MkT" []] []
So in repTopDs we bring the binders into scope with mkGenSyms and addBinds.
And we use lookupOcc, rather than lookupBinder
in repTyClD and repC.
-}
-- represent associated family instances
--
repTyClD :: LTyClDecl Name -> DsM (Maybe (SrcSpan, Core TH.DecQ))
repTyClD (L loc (FamDecl { tcdFam = fam })) = liftM Just $ repFamilyDecl (L loc fam)
repTyClD (L loc (SynDecl { tcdLName = tc, tcdTyVars = tvs, tcdRhs = rhs }))
= do { tc1 <- lookupLOcc tc -- See note [Binders and occurrences]
; dec <- addTyClTyVarBinds tvs $ \bndrs ->
repSynDecl tc1 bndrs rhs
; return (Just (loc, dec)) }
repTyClD (L loc (DataDecl { tcdLName = tc, tcdTyVars = tvs, tcdDataDefn = defn }))
= do { tc1 <- lookupLOcc tc -- See note [Binders and occurrences]
; tc_tvs <- mk_extra_tvs tc tvs defn
; dec <- addTyClTyVarBinds tc_tvs $ \bndrs ->
repDataDefn tc1 bndrs Nothing (hsLTyVarNames tc_tvs) defn
; return (Just (loc, dec)) }
repTyClD (L loc (ClassDecl { tcdCtxt = cxt, tcdLName = cls,
tcdTyVars = tvs, tcdFDs = fds,
tcdSigs = sigs, tcdMeths = meth_binds,
tcdATs = ats, tcdATDefs = [] }))
= do { cls1 <- lookupLOcc cls -- See note [Binders and occurrences]
; dec <- addTyVarBinds tvs $ \bndrs ->
do { cxt1 <- repLContext cxt
; sigs1 <- rep_sigs sigs
; binds1 <- rep_binds meth_binds
; fds1 <- repLFunDeps fds
; ats1 <- repFamilyDecls ats
; decls1 <- coreList decQTyConName (ats1 ++ sigs1 ++ binds1)
; repClass cxt1 cls1 bndrs fds1 decls1
}
; return $ Just (loc, dec)
}
-- Un-handled cases
repTyClD (L loc d) = putSrcSpanDs loc $
do { warnDs (hang ds_msg 4 (ppr d))
; return Nothing }
-------------------------
repRoleD :: LRoleAnnotDecl Name -> DsM (SrcSpan, Core TH.DecQ)
repRoleD (L loc (RoleAnnotDecl tycon roles))
= do { tycon1 <- lookupLOcc tycon
; roles1 <- mapM repRole roles
; roles2 <- coreList roleTyConName roles1
; dec <- repRoleAnnotD tycon1 roles2
; return (loc, dec) }
-------------------------
repDataDefn :: Core TH.Name -> Core [TH.TyVarBndr]
-> Maybe (Core [TH.TypeQ])
-> [Name] -> HsDataDefn Name
-> DsM (Core TH.DecQ)
repDataDefn tc bndrs opt_tys tv_names
(HsDataDefn { dd_ND = new_or_data, dd_ctxt = cxt
, dd_cons = cons, dd_derivs = mb_derivs })
= do { cxt1 <- repLContext cxt
; derivs1 <- repDerivs mb_derivs
; case new_or_data of
NewType -> do { con1 <- repC tv_names (head cons)
; repNewtype cxt1 tc bndrs opt_tys con1 derivs1 }
DataType -> do { cons1 <- repList conQTyConName (repC tv_names) cons
; repData cxt1 tc bndrs opt_tys cons1 derivs1 } }
repSynDecl :: Core TH.Name -> Core [TH.TyVarBndr]
-> LHsType Name
-> DsM (Core TH.DecQ)
repSynDecl tc bndrs ty
= do { ty1 <- repLTy ty
; repTySyn tc bndrs ty1 }
repFamilyDecl :: LFamilyDecl Name -> DsM (SrcSpan, Core TH.DecQ)
repFamilyDecl (L loc (FamilyDecl { fdInfo = info,
fdLName = tc,
fdTyVars = tvs,
fdKindSig = opt_kind }))
= do { tc1 <- lookupLOcc tc -- See note [Binders and occurrences]
; dec <- addTyClTyVarBinds tvs $ \bndrs ->
case (opt_kind, info) of
(Nothing, ClosedTypeFamily eqns) ->
do { eqns1 <- mapM repTyFamEqn eqns
; eqns2 <- coreList tySynEqnQTyConName eqns1
; repClosedFamilyNoKind tc1 bndrs eqns2 }
(Just ki, ClosedTypeFamily eqns) ->
do { eqns1 <- mapM repTyFamEqn eqns
; eqns2 <- coreList tySynEqnQTyConName eqns1
; ki1 <- repLKind ki
; repClosedFamilyKind tc1 bndrs ki1 eqns2 }
(Nothing, _) ->
do { info' <- repFamilyInfo info
; repFamilyNoKind info' tc1 bndrs }
(Just ki, _) ->
do { info' <- repFamilyInfo info
; ki1 <- repLKind ki
; repFamilyKind info' tc1 bndrs ki1 }
; return (loc, dec)
}
repFamilyDecls :: [LFamilyDecl Name] -> DsM [Core TH.DecQ]
repFamilyDecls fds = liftM de_loc (mapM repFamilyDecl fds)
-------------------------
mk_extra_tvs :: Located Name -> LHsTyVarBndrs Name
-> HsDataDefn Name -> DsM (LHsTyVarBndrs Name)
-- If there is a kind signature it must be of form
-- k1 -> .. -> kn -> *
-- Return type variables [tv1:k1, tv2:k2, .., tvn:kn]
mk_extra_tvs tc tvs defn
| HsDataDefn { dd_kindSig = Just hs_kind } <- defn
= do { extra_tvs <- go hs_kind
; return (tvs { hsq_tvs = hsq_tvs tvs ++ extra_tvs }) }
| otherwise
= return tvs
where
go :: LHsKind Name -> DsM [LHsTyVarBndr Name]
go (L loc (HsFunTy kind rest))
= do { uniq <- newUnique
; let { occ = mkTyVarOccFS (fsLit "t")
; nm = mkInternalName uniq occ loc
; hs_tv = L loc (KindedTyVar nm kind) }
; hs_tvs <- go rest
; return (hs_tv : hs_tvs) }
go (L _ (HsTyVar n))
| n == liftedTypeKindTyConName
= return []
go _ = failWithDs (ptext (sLit "Malformed kind signature for") <+> ppr tc)
-------------------------
-- represent fundeps
--
repLFunDeps :: [Located (FunDep Name)] -> DsM (Core [TH.FunDep])
repLFunDeps fds = repList funDepTyConName repLFunDep fds
repLFunDep :: Located (FunDep Name) -> DsM (Core TH.FunDep)
repLFunDep (L _ (xs, ys)) = do xs' <- repList nameTyConName lookupBinder xs
ys' <- repList nameTyConName lookupBinder ys
repFunDep xs' ys'
-- represent family declaration flavours
--
repFamilyInfo :: FamilyInfo Name -> DsM (Core TH.FamFlavour)
repFamilyInfo OpenTypeFamily = rep2 typeFamName []
repFamilyInfo DataFamily = rep2 dataFamName []
repFamilyInfo ClosedTypeFamily {} = panic "repFamilyInfo"
-- Represent instance declarations
--
repInstD :: LInstDecl Name -> DsM (SrcSpan, Core TH.DecQ)
repInstD (L loc (TyFamInstD { tfid_inst = fi_decl }))
= do { dec <- repTyFamInstD fi_decl
; return (loc, dec) }
repInstD (L loc (DataFamInstD { dfid_inst = fi_decl }))
= do { dec <- repDataFamInstD fi_decl
; return (loc, dec) }
repInstD (L loc (ClsInstD { cid_inst = cls_decl }))
= do { dec <- repClsInstD cls_decl
; return (loc, dec) }
repClsInstD :: ClsInstDecl Name -> DsM (Core TH.DecQ)
repClsInstD (ClsInstDecl { cid_poly_ty = ty, cid_binds = binds
, cid_sigs = prags, cid_tyfam_insts = ats
, cid_datafam_insts = adts })
= addTyVarBinds tvs $ \_ ->
-- We must bring the type variables into scope, so their
-- occurrences don't fail, even though the binders don't
-- appear in the resulting data structure
--
-- But we do NOT bring the binders of 'binds' into scope
-- because they are properly regarded as occurrences
-- For example, the method names should be bound to
-- the selector Ids, not to fresh names (Trac #5410)
--
do { cxt1 <- repContext cxt
; cls_tcon <- repTy (HsTyVar (unLoc cls))
; cls_tys <- repLTys tys
; inst_ty1 <- repTapps cls_tcon cls_tys
; binds1 <- rep_binds binds
; prags1 <- rep_sigs prags
; ats1 <- mapM (repTyFamInstD . unLoc) ats
; adts1 <- mapM (repDataFamInstD . unLoc) adts
; decls <- coreList decQTyConName (ats1 ++ adts1 ++ binds1 ++ prags1)
; repInst cxt1 inst_ty1 decls }
where
Just (tvs, cxt, cls, tys) = splitLHsInstDeclTy_maybe ty
repStandaloneDerivD :: LDerivDecl Name -> DsM (SrcSpan, Core TH.DecQ)
repStandaloneDerivD (L loc (DerivDecl { deriv_type = ty }))
= do { dec <- addTyVarBinds tvs $ \_ ->
do { cxt' <- repContext cxt
; cls_tcon <- repTy (HsTyVar (unLoc cls))
; cls_tys <- repLTys tys
; inst_ty <- repTapps cls_tcon cls_tys
; repDeriv cxt' inst_ty }
; return (loc, dec) }
where
Just (tvs, cxt, cls, tys) = splitLHsInstDeclTy_maybe ty
repTyFamInstD :: TyFamInstDecl Name -> DsM (Core TH.DecQ)
repTyFamInstD decl@(TyFamInstDecl { tfid_eqn = eqn })
= do { let tc_name = tyFamInstDeclLName decl
; tc <- lookupLOcc tc_name -- See note [Binders and occurrences]
; eqn1 <- repTyFamEqn eqn
; repTySynInst tc eqn1 }
repTyFamEqn :: LTyFamInstEqn Name -> DsM (Core TH.TySynEqnQ)
repTyFamEqn (L loc (TyFamEqn { tfe_pats = HsWB { hswb_cts = tys
, hswb_kvs = kv_names
, hswb_tvs = tv_names }
, tfe_rhs = rhs }))
= do { let hs_tvs = HsQTvs { hsq_kvs = kv_names
, hsq_tvs = userHsTyVarBndrs loc tv_names } -- Yuk
; addTyClTyVarBinds hs_tvs $ \ _ ->
do { tys1 <- repLTys tys
; tys2 <- coreList typeQTyConName tys1
; rhs1 <- repLTy rhs
; repTySynEqn tys2 rhs1 } }
repDataFamInstD :: DataFamInstDecl Name -> DsM (Core TH.DecQ)
repDataFamInstD (DataFamInstDecl { dfid_tycon = tc_name
, dfid_pats = HsWB { hswb_cts = tys, hswb_kvs = kv_names, hswb_tvs = tv_names }
, dfid_defn = defn })
= do { tc <- lookupLOcc tc_name -- See note [Binders and occurrences]
; let loc = getLoc tc_name
hs_tvs = HsQTvs { hsq_kvs = kv_names, hsq_tvs = userHsTyVarBndrs loc tv_names } -- Yuk
; addTyClTyVarBinds hs_tvs $ \ bndrs ->
do { tys1 <- repList typeQTyConName repLTy tys
; repDataDefn tc bndrs (Just tys1) tv_names defn } }
repForD :: Located (ForeignDecl Name) -> DsM (SrcSpan, Core TH.DecQ)
repForD (L loc (ForeignImport name typ _ (CImport cc s mch cis)))
= do MkC name' <- lookupLOcc name
MkC typ' <- repLTy typ
MkC cc' <- repCCallConv cc
MkC s' <- repSafety s
cis' <- conv_cimportspec cis
MkC str <- coreStringLit (static ++ chStr ++ cis')
dec <- rep2 forImpDName [cc', s', str, name', typ']
return (loc, dec)
where
conv_cimportspec (CLabel cls) = notHandled "Foreign label" (doubleQuotes (ppr cls))
conv_cimportspec (CFunction DynamicTarget) = return "dynamic"
conv_cimportspec (CFunction (StaticTarget fs _ True)) = return (unpackFS fs)
conv_cimportspec (CFunction (StaticTarget _ _ False)) = panic "conv_cimportspec: values not supported yet"
conv_cimportspec CWrapper = return "wrapper"
static = case cis of
CFunction (StaticTarget _ _ _) -> "static "
_ -> ""
chStr = case mch of
Nothing -> ""
Just (Header h) -> unpackFS h ++ " "
repForD decl = notHandled "Foreign declaration" (ppr decl)
repCCallConv :: CCallConv -> DsM (Core TH.Callconv)
repCCallConv CCallConv = rep2 cCallName []
repCCallConv StdCallConv = rep2 stdCallName []
repCCallConv CApiConv = rep2 cApiCallName []
repCCallConv PrimCallConv = rep2 primCallName []
repCCallConv JavaScriptCallConv = rep2 javaScriptCallName []
repSafety :: Safety -> DsM (Core TH.Safety)
repSafety PlayRisky = rep2 unsafeName []
repSafety PlayInterruptible = rep2 interruptibleName []
repSafety PlaySafe = rep2 safeName []
repFixD :: LFixitySig Name -> DsM (SrcSpan, Core TH.DecQ)
repFixD (L loc (FixitySig name (Fixity prec dir)))
= do { MkC name' <- lookupLOcc name
; MkC prec' <- coreIntLit prec
; let rep_fn = case dir of
InfixL -> infixLDName
InfixR -> infixRDName
InfixN -> infixNDName
; dec <- rep2 rep_fn [prec', name']
; return (loc, dec) }
repRuleD :: LRuleDecl Name -> DsM (SrcSpan, Core TH.DecQ)
repRuleD (L loc (HsRule n act bndrs lhs _ rhs _))
= do { let bndr_names = concatMap ruleBndrNames bndrs
; ss <- mkGenSyms bndr_names
; rule1 <- addBinds ss $
do { bndrs' <- repList ruleBndrQTyConName repRuleBndr bndrs
; n' <- coreStringLit $ unpackFS n
; act' <- repPhases act
; lhs' <- repLE lhs
; rhs' <- repLE rhs
; repPragRule n' bndrs' lhs' rhs' act' }
; rule2 <- wrapGenSyms ss rule1
; return (loc, rule2) }
ruleBndrNames :: RuleBndr Name -> [Name]
ruleBndrNames (RuleBndr n) = [unLoc n]
ruleBndrNames (RuleBndrSig n (HsWB { hswb_kvs = kvs, hswb_tvs = tvs }))
= unLoc n : kvs ++ tvs
repRuleBndr :: RuleBndr Name -> DsM (Core TH.RuleBndrQ)
repRuleBndr (RuleBndr n)
= do { MkC n' <- lookupLBinder n
; rep2 ruleVarName [n'] }
repRuleBndr (RuleBndrSig n (HsWB { hswb_cts = ty }))
= do { MkC n' <- lookupLBinder n
; MkC ty' <- repLTy ty
; rep2 typedRuleVarName [n', ty'] }
repAnnD :: LAnnDecl Name -> DsM (SrcSpan, Core TH.DecQ)
repAnnD (L loc (HsAnnotation ann_prov (L _ exp)))
= do { target <- repAnnProv ann_prov
; exp' <- repE exp
; dec <- repPragAnn target exp'
; return (loc, dec) }
repAnnProv :: AnnProvenance Name -> DsM (Core TH.AnnTarget)
repAnnProv (ValueAnnProvenance n)
= do { MkC n' <- globalVar n -- ANNs are allowed only at top-level
; rep2 valueAnnotationName [ n' ] }
repAnnProv (TypeAnnProvenance n)
= do { MkC n' <- globalVar n
; rep2 typeAnnotationName [ n' ] }
repAnnProv ModuleAnnProvenance
= rep2 moduleAnnotationName []
ds_msg :: SDoc
ds_msg = ptext (sLit "Cannot desugar this Template Haskell declaration:")
-------------------------------------------------------
-- Constructors
-------------------------------------------------------
repC :: [Name] -> LConDecl Name -> DsM (Core TH.ConQ)
repC _ (L _ (ConDecl { con_name = con, con_qvars = con_tvs, con_cxt = L _ []
, con_details = details, con_res = ResTyH98 }))
| null (hsQTvBndrs con_tvs)
= do { con1 <- lookupLOcc con -- See Note [Binders and occurrences]
; repConstr con1 details }
repC tvs (L _ (ConDecl { con_name = con
, con_qvars = con_tvs, con_cxt = L _ ctxt
, con_details = details
, con_res = res_ty }))
= do { (eq_ctxt, con_tv_subst) <- mkGadtCtxt tvs res_ty
; let ex_tvs = HsQTvs { hsq_kvs = filterOut (in_subst con_tv_subst) (hsq_kvs con_tvs)
, hsq_tvs = filterOut (in_subst con_tv_subst . hsLTyVarName) (hsq_tvs con_tvs) }
; binds <- mapM dupBinder con_tv_subst
; dsExtendMetaEnv (mkNameEnv binds) $ -- Binds some of the con_tvs
addTyVarBinds ex_tvs $ \ ex_bndrs -> -- Binds the remaining con_tvs
do { con1 <- lookupLOcc con -- See Note [Binders and occurrences]
; c' <- repConstr con1 details
; ctxt' <- repContext (eq_ctxt ++ ctxt)
; rep2 forallCName [unC ex_bndrs, unC ctxt', unC c'] } }
in_subst :: [(Name,Name)] -> Name -> Bool
in_subst [] _ = False
in_subst ((n',_):ns) n = n==n' || in_subst ns n
mkGadtCtxt :: [Name] -- Tyvars of the data type
-> ResType (LHsType Name)
-> DsM (HsContext Name, [(Name,Name)])
-- Given a data type in GADT syntax, figure out the equality
-- context, so that we can represent it with an explicit
-- equality context, because that is the only way to express
-- the GADT in TH syntax
--
-- Example:
-- data T a b c where { MkT :: forall d e. d -> e -> T d [e] e
-- mkGadtCtxt [a,b,c] [d,e] (T d [e] e)
-- returns
-- (b~[e], c~e), [d->a]
--
-- This function is fiddly, but not really hard
mkGadtCtxt _ ResTyH98
= return ([], [])
mkGadtCtxt data_tvs (ResTyGADT res_ty)
| Just (_, tys) <- hsTyGetAppHead_maybe res_ty
, data_tvs `equalLength` tys
= return (go [] [] (data_tvs `zip` tys))
| otherwise
= failWithDs (ptext (sLit "Malformed constructor result type:") <+> ppr res_ty)
where
go cxt subst [] = (cxt, subst)
go cxt subst ((data_tv, ty) : rest)
| Just con_tv <- is_hs_tyvar ty
, isTyVarName con_tv
, not (in_subst subst con_tv)
= go cxt ((con_tv, data_tv) : subst) rest
| otherwise
= go (eq_pred : cxt) subst rest
where
loc = getLoc ty
eq_pred = L loc (HsEqTy (L loc (HsTyVar data_tv)) ty)
is_hs_tyvar (L _ (HsTyVar n)) = Just n -- Type variables *and* tycons
is_hs_tyvar (L _ (HsParTy ty)) = is_hs_tyvar ty
is_hs_tyvar _ = Nothing
repBangTy :: LBangType Name -> DsM (Core (TH.StrictTypeQ))
repBangTy ty= do
MkC s <- rep2 str []
MkC t <- repLTy ty'
rep2 strictTypeName [s, t]
where
(str, ty') = case ty of
L _ (HsBangTy (HsUserBang (Just True) True) ty) -> (unpackedName, ty)
L _ (HsBangTy (HsUserBang _ True) ty) -> (isStrictName, ty)
_ -> (notStrictName, ty)
-------------------------------------------------------
-- Deriving clause
-------------------------------------------------------
repDerivs :: Maybe [LHsType Name] -> DsM (Core [TH.Name])
repDerivs Nothing = coreList nameTyConName []
repDerivs (Just ctxt)
= repList nameTyConName rep_deriv ctxt
where
rep_deriv :: LHsType Name -> DsM (Core TH.Name)
-- Deriving clauses must have the simple H98 form
rep_deriv ty
| Just (cls, []) <- splitHsClassTy_maybe (unLoc ty)
= lookupOcc cls
| otherwise
= notHandled "Non-H98 deriving clause" (ppr ty)
-------------------------------------------------------
-- Signatures in a class decl, or a group of bindings
-------------------------------------------------------
rep_sigs :: [LSig Name] -> DsM [Core TH.DecQ]
rep_sigs sigs = do locs_cores <- rep_sigs' sigs
return $ de_loc $ sort_by_loc locs_cores
rep_sigs' :: [LSig Name] -> DsM [(SrcSpan, Core TH.DecQ)]
-- We silently ignore ones we don't recognise
rep_sigs' sigs = do { sigs1 <- mapM rep_sig sigs ;
return (concat sigs1) }
rep_sig :: LSig Name -> DsM [(SrcSpan, Core TH.DecQ)]
rep_sig (L loc (TypeSig nms ty)) = mapM (rep_ty_sig sigDName loc ty) nms
rep_sig (L _ (PatSynSig {})) = notHandled "Pattern type signatures" empty
rep_sig (L loc (GenericSig nms ty)) = mapM (rep_ty_sig defaultSigDName loc ty) nms
rep_sig d@(L _ (IdSig {})) = pprPanic "rep_sig IdSig" (ppr d)
rep_sig (L _ (FixSig {})) = return [] -- fixity sigs at top level
rep_sig (L loc (InlineSig nm ispec)) = rep_inline nm ispec loc
rep_sig (L loc (SpecSig nm ty ispec)) = rep_specialise nm ty ispec loc
rep_sig (L loc (SpecInstSig ty)) = rep_specialiseInst ty loc
rep_sig (L _ (MinimalSig {})) = notHandled "MINIMAL pragmas" empty
rep_ty_sig :: Name -> SrcSpan -> LHsType Name -> Located Name
-> DsM (SrcSpan, Core TH.DecQ)
rep_ty_sig mk_sig loc (L _ ty) nm
= do { nm1 <- lookupLOcc nm
; ty1 <- rep_ty ty
; sig <- repProto mk_sig nm1 ty1
; return (loc, sig) }
where
-- We must special-case the top-level explicit for-all of a TypeSig
-- See Note [Scoped type variables in bindings]
rep_ty (HsForAllTy Explicit tvs ctxt ty)
= do { let rep_in_scope_tv tv = do { name <- lookupBinder (hsLTyVarName tv)
; repTyVarBndrWithKind tv name }
; bndrs1 <- repList tyVarBndrTyConName rep_in_scope_tv (hsQTvBndrs tvs)
; ctxt1 <- repLContext ctxt
; ty1 <- repLTy ty
; repTForall bndrs1 ctxt1 ty1 }
rep_ty ty = repTy ty
rep_inline :: Located Name
-> InlinePragma -- Never defaultInlinePragma
-> SrcSpan
-> DsM [(SrcSpan, Core TH.DecQ)]
rep_inline nm ispec loc
= do { nm1 <- lookupLOcc nm
; inline <- repInline $ inl_inline ispec
; rm <- repRuleMatch $ inl_rule ispec
; phases <- repPhases $ inl_act ispec
; pragma <- repPragInl nm1 inline rm phases
; return [(loc, pragma)]
}
rep_specialise :: Located Name -> LHsType Name -> InlinePragma -> SrcSpan
-> DsM [(SrcSpan, Core TH.DecQ)]
rep_specialise nm ty ispec loc
= do { nm1 <- lookupLOcc nm
; ty1 <- repLTy ty
; phases <- repPhases $ inl_act ispec
; let inline = inl_inline ispec
; pragma <- if isEmptyInlineSpec inline
then -- SPECIALISE
repPragSpec nm1 ty1 phases
else -- SPECIALISE INLINE
do { inline1 <- repInline inline
; repPragSpecInl nm1 ty1 inline1 phases }
; return [(loc, pragma)]
}
rep_specialiseInst :: LHsType Name -> SrcSpan -> DsM [(SrcSpan, Core TH.DecQ)]
rep_specialiseInst ty loc
= do { ty1 <- repLTy ty
; pragma <- repPragSpecInst ty1
; return [(loc, pragma)] }
repInline :: InlineSpec -> DsM (Core TH.Inline)
repInline NoInline = dataCon noInlineDataConName
repInline Inline = dataCon inlineDataConName
repInline Inlinable = dataCon inlinableDataConName
repInline spec = notHandled "repInline" (ppr spec)
repRuleMatch :: RuleMatchInfo -> DsM (Core TH.RuleMatch)
repRuleMatch ConLike = dataCon conLikeDataConName
repRuleMatch FunLike = dataCon funLikeDataConName
repPhases :: Activation -> DsM (Core TH.Phases)
repPhases (ActiveBefore i) = do { MkC arg <- coreIntLit i
; dataCon' beforePhaseDataConName [arg] }
repPhases (ActiveAfter i) = do { MkC arg <- coreIntLit i
; dataCon' fromPhaseDataConName [arg] }
repPhases _ = dataCon allPhasesDataConName
-------------------------------------------------------
-- Types
-------------------------------------------------------
addTyVarBinds :: LHsTyVarBndrs Name -- the binders to be added
-> (Core [TH.TyVarBndr] -> DsM (Core (TH.Q a))) -- action in the ext env
-> DsM (Core (TH.Q a))
-- gensym a list of type variables and enter them into the meta environment;
-- the computations passed as the second argument is executed in that extended
-- meta environment and gets the *new* names on Core-level as an argument
addTyVarBinds (HsQTvs { hsq_kvs = kvs, hsq_tvs = tvs }) m
= do { fresh_kv_names <- mkGenSyms kvs
; fresh_tv_names <- mkGenSyms (map hsLTyVarName tvs)
; let fresh_names = fresh_kv_names ++ fresh_tv_names
; term <- addBinds fresh_names $
do { kbs <- repList tyVarBndrTyConName mk_tv_bndr (tvs `zip` fresh_tv_names)
; m kbs }
; wrapGenSyms fresh_names term }
where
mk_tv_bndr (tv, (_,v)) = repTyVarBndrWithKind tv (coreVar v)
addTyClTyVarBinds :: LHsTyVarBndrs Name
-> (Core [TH.TyVarBndr] -> DsM (Core (TH.Q a)))
-> DsM (Core (TH.Q a))
-- Used for data/newtype declarations, and family instances,
-- so that the nested type variables work right
-- instance C (T a) where
-- type W (T a) = blah
-- The 'a' in the type instance is the one bound by the instance decl
addTyClTyVarBinds tvs m
= do { let tv_names = hsLKiTyVarNames tvs
; env <- dsGetMetaEnv
; freshNames <- mkGenSyms (filterOut (`elemNameEnv` env) tv_names)
-- Make fresh names for the ones that are not already in scope
-- This makes things work for family declarations
; term <- addBinds freshNames $
do { kbs <- repList tyVarBndrTyConName mk_tv_bndr (hsQTvBndrs tvs)
; m kbs }
; wrapGenSyms freshNames term }
where
mk_tv_bndr tv = do { v <- lookupBinder (hsLTyVarName tv)
; repTyVarBndrWithKind tv v }
-- Produce kinded binder constructors from the Haskell tyvar binders
--
repTyVarBndrWithKind :: LHsTyVarBndr Name
-> Core TH.Name -> DsM (Core TH.TyVarBndr)
repTyVarBndrWithKind (L _ (UserTyVar _)) nm
= repPlainTV nm
repTyVarBndrWithKind (L _ (KindedTyVar _ ki)) nm
= repLKind ki >>= repKindedTV nm
-- represent a type context
--
repLContext :: LHsContext Name -> DsM (Core TH.CxtQ)
repLContext (L _ ctxt) = repContext ctxt
repContext :: HsContext Name -> DsM (Core TH.CxtQ)
repContext ctxt = do preds <- repList typeQTyConName repLTy ctxt
repCtxt preds
-- yield the representation of a list of types
--
repLTys :: [LHsType Name] -> DsM [Core TH.TypeQ]
repLTys tys = mapM repLTy tys
-- represent a type
--
repLTy :: LHsType Name -> DsM (Core TH.TypeQ)
repLTy (L _ ty) = repTy ty
repTy :: HsType Name -> DsM (Core TH.TypeQ)
repTy (HsForAllTy _ tvs ctxt ty) =
addTyVarBinds tvs $ \bndrs -> do
ctxt1 <- repLContext ctxt
ty1 <- repLTy ty
repTForall bndrs ctxt1 ty1
repTy (HsTyVar n)
| isTvOcc occ = do tv1 <- lookupOcc n
repTvar tv1
| isDataOcc occ = do tc1 <- lookupOcc n
repPromotedTyCon tc1
| otherwise = do tc1 <- lookupOcc n
repNamedTyCon tc1
where
occ = nameOccName n
repTy (HsAppTy f a) = do
f1 <- repLTy f
a1 <- repLTy a
repTapp f1 a1
repTy (HsFunTy f a) = do
f1 <- repLTy f
a1 <- repLTy a
tcon <- repArrowTyCon
repTapps tcon [f1, a1]
repTy (HsListTy t) = do
t1 <- repLTy t
tcon <- repListTyCon
repTapp tcon t1
repTy (HsPArrTy t) = do
t1 <- repLTy t
tcon <- repTy (HsTyVar (tyConName parrTyCon))
repTapp tcon t1
repTy (HsTupleTy HsUnboxedTuple tys) = do
tys1 <- repLTys tys
tcon <- repUnboxedTupleTyCon (length tys)
repTapps tcon tys1
repTy (HsTupleTy _ tys) = do tys1 <- repLTys tys
tcon <- repTupleTyCon (length tys)
repTapps tcon tys1
repTy (HsOpTy ty1 (_, n) ty2) = repLTy ((nlHsTyVar (unLoc n) `nlHsAppTy` ty1)
`nlHsAppTy` ty2)
repTy (HsParTy t) = repLTy t
repTy (HsEqTy t1 t2) = do
t1' <- repLTy t1
t2' <- repLTy t2
eq <- repTequality
repTapps eq [t1', t2']
repTy (HsKindSig t k) = do
t1 <- repLTy t
k1 <- repLKind k
repTSig t1 k1
repTy (HsSpliceTy splice _) = repSplice splice
repTy (HsExplicitListTy _ tys) = do
tys1 <- repLTys tys
repTPromotedList tys1
repTy (HsExplicitTupleTy _ tys) = do
tys1 <- repLTys tys
tcon <- repPromotedTupleTyCon (length tys)
repTapps tcon tys1
repTy (HsTyLit lit) = do
lit' <- repTyLit lit
repTLit lit'
repTy ty = notHandled "Exotic form of type" (ppr ty)
repTyLit :: HsTyLit -> DsM (Core TH.TyLitQ)
repTyLit (HsNumTy i) = do iExpr <- mkIntegerExpr i
rep2 numTyLitName [iExpr]
repTyLit (HsStrTy s) = do { s' <- mkStringExprFS s
; rep2 strTyLitName [s']
}
-- represent a kind
--
repLKind :: LHsKind Name -> DsM (Core TH.Kind)
repLKind ki
= do { let (kis, ki') = splitHsFunType ki
; kis_rep <- mapM repLKind kis
; ki'_rep <- repNonArrowLKind ki'
; kcon <- repKArrow
; let f k1 k2 = repKApp kcon k1 >>= flip repKApp k2
; foldrM f ki'_rep kis_rep
}
repNonArrowLKind :: LHsKind Name -> DsM (Core TH.Kind)
repNonArrowLKind (L _ ki) = repNonArrowKind ki
repNonArrowKind :: HsKind Name -> DsM (Core TH.Kind)
repNonArrowKind (HsTyVar name)
| name == liftedTypeKindTyConName = repKStar
| name == constraintKindTyConName = repKConstraint
| isTvOcc (nameOccName name) = lookupOcc name >>= repKVar
| otherwise = lookupOcc name >>= repKCon
repNonArrowKind (HsAppTy f a) = do { f' <- repLKind f
; a' <- repLKind a
; repKApp f' a'
}
repNonArrowKind (HsListTy k) = do { k' <- repLKind k
; kcon <- repKList
; repKApp kcon k'
}
repNonArrowKind (HsTupleTy _ ks) = do { ks' <- mapM repLKind ks
; kcon <- repKTuple (length ks)
; repKApps kcon ks'
}
repNonArrowKind k = notHandled "Exotic form of kind" (ppr k)
repRole :: Located (Maybe Role) -> DsM (Core TH.Role)
repRole (L _ (Just Nominal)) = rep2 nominalRName []
repRole (L _ (Just Representational)) = rep2 representationalRName []
repRole (L _ (Just Phantom)) = rep2 phantomRName []
repRole (L _ Nothing) = rep2 inferRName []
-----------------------------------------------------------------------------
-- Splices
-----------------------------------------------------------------------------
repSplice :: HsSplice Name -> DsM (Core a)
-- See Note [How brackets and nested splices are handled] in TcSplice
-- We return a CoreExpr of any old type; the context should know
repSplice (HsSplice n _)
= do { mb_val <- dsLookupMetaEnv n
; case mb_val of
Just (Splice e) -> do { e' <- dsExpr e
; return (MkC e') }
_ -> pprPanic "HsSplice" (ppr n) }
-- Should not happen; statically checked
-----------------------------------------------------------------------------
-- Expressions
-----------------------------------------------------------------------------
repLEs :: [LHsExpr Name] -> DsM (Core [TH.ExpQ])
repLEs es = repList expQTyConName repLE es
-- FIXME: some of these panics should be converted into proper error messages
-- unless we can make sure that constructs, which are plainly not
-- supported in TH already lead to error messages at an earlier stage
repLE :: LHsExpr Name -> DsM (Core TH.ExpQ)
repLE (L loc e) = putSrcSpanDs loc (repE e)
repE :: HsExpr Name -> DsM (Core TH.ExpQ)
repE (HsVar x) =
do { mb_val <- dsLookupMetaEnv x
; case mb_val of
Nothing -> do { str <- globalVar x
; repVarOrCon x str }
Just (Bound y) -> repVarOrCon x (coreVar y)
Just (Splice e) -> do { e' <- dsExpr e
; return (MkC e') } }
repE e@(HsIPVar _) = notHandled "Implicit parameters" (ppr e)
-- Remember, we're desugaring renamer output here, so
-- HsOverlit can definitely occur
repE (HsOverLit l) = do { a <- repOverloadedLiteral l; repLit a }
repE (HsLit l) = do { a <- repLiteral l; repLit a }
repE (HsLam (MG { mg_alts = [m] })) = repLambda m
repE (HsLamCase _ (MG { mg_alts = ms }))
= do { ms' <- mapM repMatchTup ms
; core_ms <- coreList matchQTyConName ms'
; repLamCase core_ms }
repE (HsApp x y) = do {a <- repLE x; b <- repLE y; repApp a b}
repE (OpApp e1 op _ e2) =
do { arg1 <- repLE e1;
arg2 <- repLE e2;
the_op <- repLE op ;
repInfixApp arg1 the_op arg2 }
repE (NegApp x _) = do
a <- repLE x
negateVar <- lookupOcc negateName >>= repVar
negateVar `repApp` a
repE (HsPar x) = repLE x
repE (SectionL x y) = do { a <- repLE x; b <- repLE y; repSectionL a b }
repE (SectionR x y) = do { a <- repLE x; b <- repLE y; repSectionR a b }
repE (HsCase e (MG { mg_alts = ms }))
= do { arg <- repLE e
; ms2 <- mapM repMatchTup ms
; core_ms2 <- coreList matchQTyConName ms2
; repCaseE arg core_ms2 }
repE (HsIf _ x y z) = do
a <- repLE x
b <- repLE y
c <- repLE z
repCond a b c
repE (HsMultiIf _ alts)
= do { (binds, alts') <- liftM unzip $ mapM repLGRHS alts
; expr' <- repMultiIf (nonEmptyCoreList alts')
; wrapGenSyms (concat binds) expr' }
repE (HsLet bs e) = do { (ss,ds) <- repBinds bs
; e2 <- addBinds ss (repLE e)
; z <- repLetE ds e2
; wrapGenSyms ss z }
-- FIXME: I haven't got the types here right yet
repE e@(HsDo ctxt sts _)
| case ctxt of { DoExpr -> True; GhciStmtCtxt -> True; _ -> False }
= do { (ss,zs) <- repLSts sts;
e' <- repDoE (nonEmptyCoreList zs);
wrapGenSyms ss e' }
| ListComp <- ctxt
= do { (ss,zs) <- repLSts sts;
e' <- repComp (nonEmptyCoreList zs);
wrapGenSyms ss e' }
| otherwise
= notHandled "mdo, monad comprehension and [: :]" (ppr e)
repE (ExplicitList _ _ es) = do { xs <- repLEs es; repListExp xs }
repE e@(ExplicitPArr _ _) = notHandled "Parallel arrays" (ppr e)
repE e@(ExplicitTuple es boxed)
| not (all tupArgPresent es) = notHandled "Tuple sections" (ppr e)
| isBoxed boxed = do { xs <- repLEs [e | Present e <- es]; repTup xs }
| otherwise = do { xs <- repLEs [e | Present e <- es]; repUnboxedTup xs }
repE (RecordCon c _ flds)
= do { x <- lookupLOcc c;
fs <- repFields flds;
repRecCon x fs }
repE (RecordUpd e flds _ _ _)
= do { x <- repLE e;
fs <- repFields flds;
repRecUpd x fs }
repE (ExprWithTySig e ty) = do { e1 <- repLE e; t1 <- repLTy ty; repSigExp e1 t1 }
repE (ArithSeq _ _ aseq) =
case aseq of
From e -> do { ds1 <- repLE e; repFrom ds1 }
FromThen e1 e2 -> do
ds1 <- repLE e1
ds2 <- repLE e2
repFromThen ds1 ds2
FromTo e1 e2 -> do
ds1 <- repLE e1
ds2 <- repLE e2
repFromTo ds1 ds2
FromThenTo e1 e2 e3 -> do
ds1 <- repLE e1
ds2 <- repLE e2
ds3 <- repLE e3
repFromThenTo ds1 ds2 ds3
repE (HsSpliceE _ splice) = repSplice splice
repE e@(PArrSeq {}) = notHandled "Parallel arrays" (ppr e)
repE e@(HsCoreAnn {}) = notHandled "Core annotations" (ppr e)
repE e@(HsSCC {}) = notHandled "Cost centres" (ppr e)
repE e@(HsTickPragma {}) = notHandled "Tick Pragma" (ppr e)
repE e@(HsTcBracketOut {}) = notHandled "TH brackets" (ppr e)
repE e = notHandled "Expression form" (ppr e)
-----------------------------------------------------------------------------
-- Building representations of auxillary structures like Match, Clause, Stmt,
repMatchTup :: LMatch Name (LHsExpr Name) -> DsM (Core TH.MatchQ)
repMatchTup (L _ (Match [p] _ (GRHSs guards wheres))) =
do { ss1 <- mkGenSyms (collectPatBinders p)
; addBinds ss1 $ do {
; p1 <- repLP p
; (ss2,ds) <- repBinds wheres
; addBinds ss2 $ do {
; gs <- repGuards guards
; match <- repMatch p1 gs ds
; wrapGenSyms (ss1++ss2) match }}}
repMatchTup _ = panic "repMatchTup: case alt with more than one arg"
repClauseTup :: LMatch Name (LHsExpr Name) -> DsM (Core TH.ClauseQ)
repClauseTup (L _ (Match ps _ (GRHSs guards wheres))) =
do { ss1 <- mkGenSyms (collectPatsBinders ps)
; addBinds ss1 $ do {
ps1 <- repLPs ps
; (ss2,ds) <- repBinds wheres
; addBinds ss2 $ do {
gs <- repGuards guards
; clause <- repClause ps1 gs ds
; wrapGenSyms (ss1++ss2) clause }}}
repGuards :: [LGRHS Name (LHsExpr Name)] -> DsM (Core TH.BodyQ)
repGuards [L _ (GRHS [] e)]
= do {a <- repLE e; repNormal a }
repGuards other
= do { zs <- mapM repLGRHS other
; let (xs, ys) = unzip zs
; gd <- repGuarded (nonEmptyCoreList ys)
; wrapGenSyms (concat xs) gd }
repLGRHS :: LGRHS Name (LHsExpr Name) -> DsM ([GenSymBind], (Core (TH.Q (TH.Guard, TH.Exp))))
repLGRHS (L _ (GRHS [L _ (BodyStmt e1 _ _ _)] e2))
= do { guarded <- repLNormalGE e1 e2
; return ([], guarded) }
repLGRHS (L _ (GRHS ss rhs))
= do { (gs, ss') <- repLSts ss
; rhs' <- addBinds gs $ repLE rhs
; guarded <- repPatGE (nonEmptyCoreList ss') rhs'
; return (gs, guarded) }
repFields :: HsRecordBinds Name -> DsM (Core [TH.Q TH.FieldExp])
repFields (HsRecFields { rec_flds = flds })
= repList fieldExpQTyConName rep_fld flds
where
rep_fld fld = do { fn <- lookupLOcc (hsRecFieldId fld)
; e <- repLE (hsRecFieldArg fld)
; repFieldExp fn e }
-----------------------------------------------------------------------------
-- Representing Stmt's is tricky, especially if bound variables
-- shadow each other. Consider: [| do { x <- f 1; x <- f x; g x } |]
-- First gensym new names for every variable in any of the patterns.
-- both static (x'1 and x'2), and dynamic ((gensym "x") and (gensym "y"))
-- if variables didn't shaddow, the static gensym wouldn't be necessary
-- and we could reuse the original names (x and x).
--
-- do { x'1 <- gensym "x"
-- ; x'2 <- gensym "x"
-- ; doE [ BindSt (pvar x'1) [| f 1 |]
-- , BindSt (pvar x'2) [| f x |]
-- , NoBindSt [| g x |]
-- ]
-- }
-- The strategy is to translate a whole list of do-bindings by building a
-- bigger environment, and a bigger set of meta bindings
-- (like: x'1 <- gensym "x" ) and then combining these with the translations
-- of the expressions within the Do
-----------------------------------------------------------------------------
-- The helper function repSts computes the translation of each sub expression
-- and a bunch of prefix bindings denoting the dynamic renaming.
repLSts :: [LStmt Name (LHsExpr Name)] -> DsM ([GenSymBind], [Core TH.StmtQ])
repLSts stmts = repSts (map unLoc stmts)
repSts :: [Stmt Name (LHsExpr Name)] -> DsM ([GenSymBind], [Core TH.StmtQ])
repSts (BindStmt p e _ _ : ss) =
do { e2 <- repLE e
; ss1 <- mkGenSyms (collectPatBinders p)
; addBinds ss1 $ do {
; p1 <- repLP p;
; (ss2,zs) <- repSts ss
; z <- repBindSt p1 e2
; return (ss1++ss2, z : zs) }}
repSts (LetStmt bs : ss) =
do { (ss1,ds) <- repBinds bs
; z <- repLetSt ds
; (ss2,zs) <- addBinds ss1 (repSts ss)
; return (ss1++ss2, z : zs) }
repSts (BodyStmt e _ _ _ : ss) =
do { e2 <- repLE e
; z <- repNoBindSt e2
; (ss2,zs) <- repSts ss
; return (ss2, z : zs) }
repSts (ParStmt stmt_blocks _ _ : ss) =
do { (ss_s, stmt_blocks1) <- mapAndUnzipM rep_stmt_block stmt_blocks
; let stmt_blocks2 = nonEmptyCoreList stmt_blocks1
ss1 = concat ss_s
; z <- repParSt stmt_blocks2
; (ss2, zs) <- addBinds ss1 (repSts ss)
; return (ss1++ss2, z : zs) }
where
rep_stmt_block :: ParStmtBlock Name Name -> DsM ([GenSymBind], Core [TH.StmtQ])
rep_stmt_block (ParStmtBlock stmts _ _) =
do { (ss1, zs) <- repSts (map unLoc stmts)
; zs1 <- coreList stmtQTyConName zs
; return (ss1, zs1) }
repSts [LastStmt e _]
= do { e2 <- repLE e
; z <- repNoBindSt e2
; return ([], [z]) }
repSts [] = return ([],[])
repSts other = notHandled "Exotic statement" (ppr other)
-----------------------------------------------------------
-- Bindings
-----------------------------------------------------------
repBinds :: HsLocalBinds Name -> DsM ([GenSymBind], Core [TH.DecQ])
repBinds EmptyLocalBinds
= do { core_list <- coreList decQTyConName []
; return ([], core_list) }
repBinds b@(HsIPBinds _) = notHandled "Implicit parameters" (ppr b)
repBinds (HsValBinds decs)
= do { let { bndrs = hsSigTvBinders decs ++ collectHsValBinders decs }
-- No need to worrry about detailed scopes within
-- the binding group, because we are talking Names
-- here, so we can safely treat it as a mutually
-- recursive group
-- For hsSigTvBinders see Note [Scoped type variables in bindings]
; ss <- mkGenSyms bndrs
; prs <- addBinds ss (rep_val_binds decs)
; core_list <- coreList decQTyConName
(de_loc (sort_by_loc prs))
; return (ss, core_list) }
rep_val_binds :: HsValBinds Name -> DsM [(SrcSpan, Core TH.DecQ)]
-- Assumes: all the binders of the binding are alrady in the meta-env
rep_val_binds (ValBindsOut binds sigs)
= do { core1 <- rep_binds' (unionManyBags (map snd binds))
; core2 <- rep_sigs' sigs
; return (core1 ++ core2) }
rep_val_binds (ValBindsIn _ _)
= panic "rep_val_binds: ValBindsIn"
rep_binds :: LHsBinds Name -> DsM [Core TH.DecQ]
rep_binds binds = do { binds_w_locs <- rep_binds' binds
; return (de_loc (sort_by_loc binds_w_locs)) }
rep_binds' :: LHsBinds Name -> DsM [(SrcSpan, Core TH.DecQ)]
rep_binds' = mapM rep_bind . bagToList
rep_bind :: LHsBind Name -> DsM (SrcSpan, Core TH.DecQ)
-- Assumes: all the binders of the binding are alrady in the meta-env
-- Note GHC treats declarations of a variable (not a pattern)
-- e.g. x = g 5 as a Fun MonoBinds. This is indicated by a single match
-- with an empty list of patterns
rep_bind (L loc (FunBind { fun_id = fn,
fun_matches = MG { mg_alts = [L _ (Match [] _ (GRHSs guards wheres))] } }))
= do { (ss,wherecore) <- repBinds wheres
; guardcore <- addBinds ss (repGuards guards)
; fn' <- lookupLBinder fn
; p <- repPvar fn'
; ans <- repVal p guardcore wherecore
; ans' <- wrapGenSyms ss ans
; return (loc, ans') }
rep_bind (L loc (FunBind { fun_id = fn, fun_matches = MG { mg_alts = ms } }))
= do { ms1 <- mapM repClauseTup ms
; fn' <- lookupLBinder fn
; ans <- repFun fn' (nonEmptyCoreList ms1)
; return (loc, ans) }
rep_bind (L loc (PatBind { pat_lhs = pat, pat_rhs = GRHSs guards wheres }))
= do { patcore <- repLP pat
; (ss,wherecore) <- repBinds wheres
; guardcore <- addBinds ss (repGuards guards)
; ans <- repVal patcore guardcore wherecore
; ans' <- wrapGenSyms ss ans
; return (loc, ans') }
rep_bind (L _ (VarBind { var_id = v, var_rhs = e}))
= do { v' <- lookupBinder v
; e2 <- repLE e
; x <- repNormal e2
; patcore <- repPvar v'
; empty_decls <- coreList decQTyConName []
; ans <- repVal patcore x empty_decls
; return (srcLocSpan (getSrcLoc v), ans) }
rep_bind (L _ (AbsBinds {})) = panic "rep_bind: AbsBinds"
rep_bind (L _ dec@(PatSynBind {})) = notHandled "pattern synonyms" (ppr dec)
-----------------------------------------------------------------------------
-- Since everything in a Bind is mutually recursive we need rename all
-- all the variables simultaneously. For example:
-- [| AndMonoBinds (f x = x + g 2) (g x = f 1 + 2) |] would translate to
-- do { f'1 <- gensym "f"
-- ; g'2 <- gensym "g"
-- ; [ do { x'3 <- gensym "x"; fun f'1 [pvar x'3] [| x + g2 |]},
-- do { x'4 <- gensym "x"; fun g'2 [pvar x'4] [| f 1 + 2 |]}
-- ]}
-- This requires collecting the bindings (f'1 <- gensym "f"), and the
-- environment ( f |-> f'1 ) from each binding, and then unioning them
-- together. As we do this we collect GenSymBinds's which represent the renamed
-- variables bound by the Bindings. In order not to lose track of these
-- representations we build a shadow datatype MB with the same structure as
-- MonoBinds, but which has slots for the representations
-----------------------------------------------------------------------------
-- GHC allows a more general form of lambda abstraction than specified
-- by Haskell 98. In particular it allows guarded lambda's like :
-- (\ x | even x -> 0 | odd x -> 1) at the moment we can't represent this in
-- Haskell Template's Meta.Exp type so we punt if it isn't a simple thing like
-- (\ p1 .. pn -> exp) by causing an error.
repLambda :: LMatch Name (LHsExpr Name) -> DsM (Core TH.ExpQ)
repLambda (L _ (Match ps _ (GRHSs [L _ (GRHS [] e)] EmptyLocalBinds)))
= do { let bndrs = collectPatsBinders ps ;
; ss <- mkGenSyms bndrs
; lam <- addBinds ss (
do { xs <- repLPs ps; body <- repLE e; repLam xs body })
; wrapGenSyms ss lam }
repLambda (L _ m) = notHandled "Guarded labmdas" (pprMatch (LambdaExpr :: HsMatchContext Name) m)
-----------------------------------------------------------------------------
-- Patterns
-- repP deals with patterns. It assumes that we have already
-- walked over the pattern(s) once to collect the binders, and
-- have extended the environment. So every pattern-bound
-- variable should already appear in the environment.
-- Process a list of patterns
repLPs :: [LPat Name] -> DsM (Core [TH.PatQ])
repLPs ps = repList patQTyConName repLP ps
repLP :: LPat Name -> DsM (Core TH.PatQ)
repLP (L _ p) = repP p
repP :: Pat Name -> DsM (Core TH.PatQ)
repP (WildPat _) = repPwild
repP (LitPat l) = do { l2 <- repLiteral l; repPlit l2 }
repP (VarPat x) = do { x' <- lookupBinder x; repPvar x' }
repP (LazyPat p) = do { p1 <- repLP p; repPtilde p1 }
repP (BangPat p) = do { p1 <- repLP p; repPbang p1 }
repP (AsPat x p) = do { x' <- lookupLBinder x; p1 <- repLP p; repPaspat x' p1 }
repP (ParPat p) = repLP p
repP (ListPat ps _ Nothing) = do { qs <- repLPs ps; repPlist qs }
repP (ListPat ps ty1 (Just (_,e))) = do { p <- repP (ListPat ps ty1 Nothing); e' <- repE e; repPview e' p}
repP (TuplePat ps boxed _)
| isBoxed boxed = do { qs <- repLPs ps; repPtup qs }
| otherwise = do { qs <- repLPs ps; repPunboxedTup qs }
repP (ConPatIn dc details)
= do { con_str <- lookupLOcc dc
; case details of
PrefixCon ps -> do { qs <- repLPs ps; repPcon con_str qs }
RecCon rec -> do { fps <- repList fieldPatQTyConName rep_fld (rec_flds rec)
; repPrec con_str fps }
InfixCon p1 p2 -> do { p1' <- repLP p1;
p2' <- repLP p2;
repPinfix p1' con_str p2' }
}
where
rep_fld fld = do { MkC v <- lookupLOcc (hsRecFieldId fld)
; MkC p <- repLP (hsRecFieldArg fld)
; rep2 fieldPatName [v,p] }
repP (NPat l Nothing _) = do { a <- repOverloadedLiteral l; repPlit a }
repP (ViewPat e p _) = do { e' <- repLE e; p' <- repLP p; repPview e' p' }
repP p@(NPat _ (Just _) _) = notHandled "Negative overloaded patterns" (ppr p)
repP p@(SigPatIn {}) = notHandled "Type signatures in patterns" (ppr p)
-- The problem is to do with scoped type variables.
-- To implement them, we have to implement the scoping rules
-- here in DsMeta, and I don't want to do that today!
-- do { p' <- repLP p; t' <- repLTy t; repPsig p' t' }
-- repPsig :: Core TH.PatQ -> Core TH.TypeQ -> DsM (Core TH.PatQ)
-- repPsig (MkC p) (MkC t) = rep2 sigPName [p, t]
repP (SplicePat splice) = repSplice splice
repP other = notHandled "Exotic pattern" (ppr other)
----------------------------------------------------------
-- Declaration ordering helpers
sort_by_loc :: [(SrcSpan, a)] -> [(SrcSpan, a)]
sort_by_loc xs = sortBy comp xs
where comp x y = compare (fst x) (fst y)
de_loc :: [(a, b)] -> [b]
de_loc = map snd
----------------------------------------------------------
-- The meta-environment
-- A name/identifier association for fresh names of locally bound entities
type GenSymBind = (Name, Id) -- Gensym the string and bind it to the Id
-- I.e. (x, x_id) means
-- let x_id = gensym "x" in ...
-- Generate a fresh name for a locally bound entity
mkGenSyms :: [Name] -> DsM [GenSymBind]
-- We can use the existing name. For example:
-- [| \x_77 -> x_77 + x_77 |]
-- desugars to
-- do { x_77 <- genSym "x"; .... }
-- We use the same x_77 in the desugared program, but with the type Bndr
-- instead of Int
--
-- We do make it an Internal name, though (hence localiseName)
--
-- Nevertheless, it's monadic because we have to generate nameTy
mkGenSyms ns = do { var_ty <- lookupType nameTyConName
; return [(nm, mkLocalId (localiseName nm) var_ty) | nm <- ns] }
addBinds :: [GenSymBind] -> DsM a -> DsM a
-- Add a list of fresh names for locally bound entities to the
-- meta environment (which is part of the state carried around
-- by the desugarer monad)
addBinds bs m = dsExtendMetaEnv (mkNameEnv [(n,Bound id) | (n,id) <- bs]) m
dupBinder :: (Name, Name) -> DsM (Name, DsMetaVal)
dupBinder (new, old)
= do { mb_val <- dsLookupMetaEnv old
; case mb_val of
Just val -> return (new, val)
Nothing -> pprPanic "dupBinder" (ppr old) }
-- Look up a locally bound name
--
lookupLBinder :: Located Name -> DsM (Core TH.Name)
lookupLBinder (L _ n) = lookupBinder n
lookupBinder :: Name -> DsM (Core TH.Name)
lookupBinder = lookupOcc
-- Binders are brought into scope before the pattern or what-not is
-- desugared. Moreover, in instance declaration the binder of a method
-- will be the selector Id and hence a global; so we need the
-- globalVar case of lookupOcc
-- Look up a name that is either locally bound or a global name
--
-- * If it is a global name, generate the "original name" representation (ie,
-- the <module>:<name> form) for the associated entity
--
lookupLOcc :: Located Name -> DsM (Core TH.Name)
-- Lookup an occurrence; it can't be a splice.
-- Use the in-scope bindings if they exist
lookupLOcc (L _ n) = lookupOcc n
lookupOcc :: Name -> DsM (Core TH.Name)
lookupOcc n
= do { mb_val <- dsLookupMetaEnv n ;
case mb_val of
Nothing -> globalVar n
Just (Bound x) -> return (coreVar x)
Just (Splice _) -> pprPanic "repE:lookupOcc" (ppr n)
}
globalVar :: Name -> DsM (Core TH.Name)
-- Not bound by the meta-env
-- Could be top-level; or could be local
-- f x = $(g [| x |])
-- Here the x will be local
globalVar name
| isExternalName name
= do { MkC mod <- coreStringLit name_mod
; MkC pkg <- coreStringLit name_pkg
; MkC occ <- occNameLit name
; rep2 mk_varg [pkg,mod,occ] }
| otherwise
= do { MkC occ <- occNameLit name
; MkC uni <- coreIntLit (getKey (getUnique name))
; rep2 mkNameLName [occ,uni] }
where
mod = ASSERT( isExternalName name) nameModule name
name_mod = moduleNameString (moduleName mod)
name_pkg = packageKeyString (modulePackageKey mod)
name_occ = nameOccName name
mk_varg | OccName.isDataOcc name_occ = mkNameG_dName
| OccName.isVarOcc name_occ = mkNameG_vName
| OccName.isTcOcc name_occ = mkNameG_tcName
| otherwise = pprPanic "DsMeta.globalVar" (ppr name)
lookupType :: Name -- Name of type constructor (e.g. TH.ExpQ)
-> DsM Type -- The type
lookupType tc_name = do { tc <- dsLookupTyCon tc_name ;
return (mkTyConApp tc []) }
wrapGenSyms :: [GenSymBind]
-> Core (TH.Q a) -> DsM (Core (TH.Q a))
-- wrapGenSyms [(nm1,id1), (nm2,id2)] y
-- --> bindQ (gensym nm1) (\ id1 ->
-- bindQ (gensym nm2 (\ id2 ->
-- y))
wrapGenSyms binds body@(MkC b)
= do { var_ty <- lookupType nameTyConName
; go var_ty binds }
where
[elt_ty] = tcTyConAppArgs (exprType b)
-- b :: Q a, so we can get the type 'a' by looking at the
-- argument type. NB: this relies on Q being a data/newtype,
-- not a type synonym
go _ [] = return body
go var_ty ((name,id) : binds)
= do { MkC body' <- go var_ty binds
; lit_str <- occNameLit name
; gensym_app <- repGensym lit_str
; repBindQ var_ty elt_ty
gensym_app (MkC (Lam id body')) }
occNameLit :: Name -> DsM (Core String)
occNameLit n = coreStringLit (occNameString (nameOccName n))
-- %*********************************************************************
-- %* *
-- Constructing code
-- %* *
-- %*********************************************************************
-----------------------------------------------------------------------------
-- PHANTOM TYPES for consistency. In order to make sure we do this correct
-- we invent a new datatype which uses phantom types.
newtype Core a = MkC CoreExpr
unC :: Core a -> CoreExpr
unC (MkC x) = x
rep2 :: Name -> [ CoreExpr ] -> DsM (Core a)
rep2 n xs = do { id <- dsLookupGlobalId n
; return (MkC (foldl App (Var id) xs)) }
dataCon' :: Name -> [CoreExpr] -> DsM (Core a)
dataCon' n args = do { id <- dsLookupDataCon n
; return $ MkC $ mkCoreConApps id args }
dataCon :: Name -> DsM (Core a)
dataCon n = dataCon' n []
-- Then we make "repConstructors" which use the phantom types for each of the
-- smart constructors of the Meta.Meta datatypes.
-- %*********************************************************************
-- %* *
-- The 'smart constructors'
-- %* *
-- %*********************************************************************
--------------- Patterns -----------------
repPlit :: Core TH.Lit -> DsM (Core TH.PatQ)
repPlit (MkC l) = rep2 litPName [l]
repPvar :: Core TH.Name -> DsM (Core TH.PatQ)
repPvar (MkC s) = rep2 varPName [s]
repPtup :: Core [TH.PatQ] -> DsM (Core TH.PatQ)
repPtup (MkC ps) = rep2 tupPName [ps]
repPunboxedTup :: Core [TH.PatQ] -> DsM (Core TH.PatQ)
repPunboxedTup (MkC ps) = rep2 unboxedTupPName [ps]
repPcon :: Core TH.Name -> Core [TH.PatQ] -> DsM (Core TH.PatQ)
repPcon (MkC s) (MkC ps) = rep2 conPName [s, ps]
repPrec :: Core TH.Name -> Core [(TH.Name,TH.PatQ)] -> DsM (Core TH.PatQ)
repPrec (MkC c) (MkC rps) = rep2 recPName [c,rps]
repPinfix :: Core TH.PatQ -> Core TH.Name -> Core TH.PatQ -> DsM (Core TH.PatQ)
repPinfix (MkC p1) (MkC n) (MkC p2) = rep2 infixPName [p1, n, p2]
repPtilde :: Core TH.PatQ -> DsM (Core TH.PatQ)
repPtilde (MkC p) = rep2 tildePName [p]
repPbang :: Core TH.PatQ -> DsM (Core TH.PatQ)
repPbang (MkC p) = rep2 bangPName [p]
repPaspat :: Core TH.Name -> Core TH.PatQ -> DsM (Core TH.PatQ)
repPaspat (MkC s) (MkC p) = rep2 asPName [s, p]
repPwild :: DsM (Core TH.PatQ)
repPwild = rep2 wildPName []
repPlist :: Core [TH.PatQ] -> DsM (Core TH.PatQ)
repPlist (MkC ps) = rep2 listPName [ps]
repPview :: Core TH.ExpQ -> Core TH.PatQ -> DsM (Core TH.PatQ)
repPview (MkC e) (MkC p) = rep2 viewPName [e,p]
--------------- Expressions -----------------
repVarOrCon :: Name -> Core TH.Name -> DsM (Core TH.ExpQ)
repVarOrCon vc str | isDataOcc (nameOccName vc) = repCon str
| otherwise = repVar str
repVar :: Core TH.Name -> DsM (Core TH.ExpQ)
repVar (MkC s) = rep2 varEName [s]
repCon :: Core TH.Name -> DsM (Core TH.ExpQ)
repCon (MkC s) = rep2 conEName [s]
repLit :: Core TH.Lit -> DsM (Core TH.ExpQ)
repLit (MkC c) = rep2 litEName [c]
repApp :: Core TH.ExpQ -> Core TH.ExpQ -> DsM (Core TH.ExpQ)
repApp (MkC x) (MkC y) = rep2 appEName [x,y]
repLam :: Core [TH.PatQ] -> Core TH.ExpQ -> DsM (Core TH.ExpQ)
repLam (MkC ps) (MkC e) = rep2 lamEName [ps, e]
repLamCase :: Core [TH.MatchQ] -> DsM (Core TH.ExpQ)
repLamCase (MkC ms) = rep2 lamCaseEName [ms]
repTup :: Core [TH.ExpQ] -> DsM (Core TH.ExpQ)
repTup (MkC es) = rep2 tupEName [es]
repUnboxedTup :: Core [TH.ExpQ] -> DsM (Core TH.ExpQ)
repUnboxedTup (MkC es) = rep2 unboxedTupEName [es]
repCond :: Core TH.ExpQ -> Core TH.ExpQ -> Core TH.ExpQ -> DsM (Core TH.ExpQ)
repCond (MkC x) (MkC y) (MkC z) = rep2 condEName [x,y,z]
repMultiIf :: Core [TH.Q (TH.Guard, TH.Exp)] -> DsM (Core TH.ExpQ)
repMultiIf (MkC alts) = rep2 multiIfEName [alts]
repLetE :: Core [TH.DecQ] -> Core TH.ExpQ -> DsM (Core TH.ExpQ)
repLetE (MkC ds) (MkC e) = rep2 letEName [ds, e]
repCaseE :: Core TH.ExpQ -> Core [TH.MatchQ] -> DsM( Core TH.ExpQ)
repCaseE (MkC e) (MkC ms) = rep2 caseEName [e, ms]
repDoE :: Core [TH.StmtQ] -> DsM (Core TH.ExpQ)
repDoE (MkC ss) = rep2 doEName [ss]
repComp :: Core [TH.StmtQ] -> DsM (Core TH.ExpQ)
repComp (MkC ss) = rep2 compEName [ss]
repListExp :: Core [TH.ExpQ] -> DsM (Core TH.ExpQ)
repListExp (MkC es) = rep2 listEName [es]
repSigExp :: Core TH.ExpQ -> Core TH.TypeQ -> DsM (Core TH.ExpQ)
repSigExp (MkC e) (MkC t) = rep2 sigEName [e,t]
repRecCon :: Core TH.Name -> Core [TH.Q TH.FieldExp]-> DsM (Core TH.ExpQ)
repRecCon (MkC c) (MkC fs) = rep2 recConEName [c,fs]
repRecUpd :: Core TH.ExpQ -> Core [TH.Q TH.FieldExp] -> DsM (Core TH.ExpQ)
repRecUpd (MkC e) (MkC fs) = rep2 recUpdEName [e,fs]
repFieldExp :: Core TH.Name -> Core TH.ExpQ -> DsM (Core (TH.Q TH.FieldExp))
repFieldExp (MkC n) (MkC x) = rep2 fieldExpName [n,x]
repInfixApp :: Core TH.ExpQ -> Core TH.ExpQ -> Core TH.ExpQ -> DsM (Core TH.ExpQ)
repInfixApp (MkC x) (MkC y) (MkC z) = rep2 infixAppName [x,y,z]
repSectionL :: Core TH.ExpQ -> Core TH.ExpQ -> DsM (Core TH.ExpQ)
repSectionL (MkC x) (MkC y) = rep2 sectionLName [x,y]
repSectionR :: Core TH.ExpQ -> Core TH.ExpQ -> DsM (Core TH.ExpQ)
repSectionR (MkC x) (MkC y) = rep2 sectionRName [x,y]
------------ Right hand sides (guarded expressions) ----
repGuarded :: Core [TH.Q (TH.Guard, TH.Exp)] -> DsM (Core TH.BodyQ)
repGuarded (MkC pairs) = rep2 guardedBName [pairs]
repNormal :: Core TH.ExpQ -> DsM (Core TH.BodyQ)
repNormal (MkC e) = rep2 normalBName [e]
------------ Guards ----
repLNormalGE :: LHsExpr Name -> LHsExpr Name -> DsM (Core (TH.Q (TH.Guard, TH.Exp)))
repLNormalGE g e = do g' <- repLE g
e' <- repLE e
repNormalGE g' e'
repNormalGE :: Core TH.ExpQ -> Core TH.ExpQ -> DsM (Core (TH.Q (TH.Guard, TH.Exp)))
repNormalGE (MkC g) (MkC e) = rep2 normalGEName [g, e]
repPatGE :: Core [TH.StmtQ] -> Core TH.ExpQ -> DsM (Core (TH.Q (TH.Guard, TH.Exp)))
repPatGE (MkC ss) (MkC e) = rep2 patGEName [ss, e]
------------- Stmts -------------------
repBindSt :: Core TH.PatQ -> Core TH.ExpQ -> DsM (Core TH.StmtQ)
repBindSt (MkC p) (MkC e) = rep2 bindSName [p,e]
repLetSt :: Core [TH.DecQ] -> DsM (Core TH.StmtQ)
repLetSt (MkC ds) = rep2 letSName [ds]
repNoBindSt :: Core TH.ExpQ -> DsM (Core TH.StmtQ)
repNoBindSt (MkC e) = rep2 noBindSName [e]
repParSt :: Core [[TH.StmtQ]] -> DsM (Core TH.StmtQ)
repParSt (MkC sss) = rep2 parSName [sss]
-------------- Range (Arithmetic sequences) -----------
repFrom :: Core TH.ExpQ -> DsM (Core TH.ExpQ)
repFrom (MkC x) = rep2 fromEName [x]
repFromThen :: Core TH.ExpQ -> Core TH.ExpQ -> DsM (Core TH.ExpQ)
repFromThen (MkC x) (MkC y) = rep2 fromThenEName [x,y]
repFromTo :: Core TH.ExpQ -> Core TH.ExpQ -> DsM (Core TH.ExpQ)
repFromTo (MkC x) (MkC y) = rep2 fromToEName [x,y]
repFromThenTo :: Core TH.ExpQ -> Core TH.ExpQ -> Core TH.ExpQ -> DsM (Core TH.ExpQ)
repFromThenTo (MkC x) (MkC y) (MkC z) = rep2 fromThenToEName [x,y,z]
------------ Match and Clause Tuples -----------
repMatch :: Core TH.PatQ -> Core TH.BodyQ -> Core [TH.DecQ] -> DsM (Core TH.MatchQ)
repMatch (MkC p) (MkC bod) (MkC ds) = rep2 matchName [p, bod, ds]
repClause :: Core [TH.PatQ] -> Core TH.BodyQ -> Core [TH.DecQ] -> DsM (Core TH.ClauseQ)
repClause (MkC ps) (MkC bod) (MkC ds) = rep2 clauseName [ps, bod, ds]
-------------- Dec -----------------------------
repVal :: Core TH.PatQ -> Core TH.BodyQ -> Core [TH.DecQ] -> DsM (Core TH.DecQ)
repVal (MkC p) (MkC b) (MkC ds) = rep2 valDName [p, b, ds]
repFun :: Core TH.Name -> Core [TH.ClauseQ] -> DsM (Core TH.DecQ)
repFun (MkC nm) (MkC b) = rep2 funDName [nm, b]
repData :: Core TH.CxtQ -> Core TH.Name -> Core [TH.TyVarBndr]
-> Maybe (Core [TH.TypeQ])
-> Core [TH.ConQ] -> Core [TH.Name] -> DsM (Core TH.DecQ)
repData (MkC cxt) (MkC nm) (MkC tvs) Nothing (MkC cons) (MkC derivs)
= rep2 dataDName [cxt, nm, tvs, cons, derivs]
repData (MkC cxt) (MkC nm) (MkC _) (Just (MkC tys)) (MkC cons) (MkC derivs)
= rep2 dataInstDName [cxt, nm, tys, cons, derivs]
repNewtype :: Core TH.CxtQ -> Core TH.Name -> Core [TH.TyVarBndr]
-> Maybe (Core [TH.TypeQ])
-> Core TH.ConQ -> Core [TH.Name] -> DsM (Core TH.DecQ)
repNewtype (MkC cxt) (MkC nm) (MkC tvs) Nothing (MkC con) (MkC derivs)
= rep2 newtypeDName [cxt, nm, tvs, con, derivs]
repNewtype (MkC cxt) (MkC nm) (MkC _) (Just (MkC tys)) (MkC con) (MkC derivs)
= rep2 newtypeInstDName [cxt, nm, tys, con, derivs]
repTySyn :: Core TH.Name -> Core [TH.TyVarBndr]
-> Core TH.TypeQ -> DsM (Core TH.DecQ)
repTySyn (MkC nm) (MkC tvs) (MkC rhs)
= rep2 tySynDName [nm, tvs, rhs]
repInst :: Core TH.CxtQ -> Core TH.TypeQ -> Core [TH.DecQ] -> DsM (Core TH.DecQ)
repInst (MkC cxt) (MkC ty) (MkC ds) = rep2 instanceDName [cxt, ty, ds]
repClass :: Core TH.CxtQ -> Core TH.Name -> Core [TH.TyVarBndr]
-> Core [TH.FunDep] -> Core [TH.DecQ]
-> DsM (Core TH.DecQ)
repClass (MkC cxt) (MkC cls) (MkC tvs) (MkC fds) (MkC ds)
= rep2 classDName [cxt, cls, tvs, fds, ds]
repDeriv :: Core TH.CxtQ -> Core TH.TypeQ -> DsM (Core TH.DecQ)
repDeriv (MkC cxt) (MkC ty) = rep2 standaloneDerivDName [cxt, ty]
repPragInl :: Core TH.Name -> Core TH.Inline -> Core TH.RuleMatch
-> Core TH.Phases -> DsM (Core TH.DecQ)
repPragInl (MkC nm) (MkC inline) (MkC rm) (MkC phases)
= rep2 pragInlDName [nm, inline, rm, phases]
repPragSpec :: Core TH.Name -> Core TH.TypeQ -> Core TH.Phases
-> DsM (Core TH.DecQ)
repPragSpec (MkC nm) (MkC ty) (MkC phases)
= rep2 pragSpecDName [nm, ty, phases]
repPragSpecInl :: Core TH.Name -> Core TH.TypeQ -> Core TH.Inline
-> Core TH.Phases -> DsM (Core TH.DecQ)
repPragSpecInl (MkC nm) (MkC ty) (MkC inline) (MkC phases)
= rep2 pragSpecInlDName [nm, ty, inline, phases]
repPragSpecInst :: Core TH.TypeQ -> DsM (Core TH.DecQ)
repPragSpecInst (MkC ty) = rep2 pragSpecInstDName [ty]
repPragRule :: Core String -> Core [TH.RuleBndrQ] -> Core TH.ExpQ
-> Core TH.ExpQ -> Core TH.Phases -> DsM (Core TH.DecQ)
repPragRule (MkC nm) (MkC bndrs) (MkC lhs) (MkC rhs) (MkC phases)
= rep2 pragRuleDName [nm, bndrs, lhs, rhs, phases]
repPragAnn :: Core TH.AnnTarget -> Core TH.ExpQ -> DsM (Core TH.DecQ)
repPragAnn (MkC targ) (MkC e) = rep2 pragAnnDName [targ, e]
repFamilyNoKind :: Core TH.FamFlavour -> Core TH.Name -> Core [TH.TyVarBndr]
-> DsM (Core TH.DecQ)
repFamilyNoKind (MkC flav) (MkC nm) (MkC tvs)
= rep2 familyNoKindDName [flav, nm, tvs]
repFamilyKind :: Core TH.FamFlavour -> Core TH.Name -> Core [TH.TyVarBndr]
-> Core TH.Kind
-> DsM (Core TH.DecQ)
repFamilyKind (MkC flav) (MkC nm) (MkC tvs) (MkC ki)
= rep2 familyKindDName [flav, nm, tvs, ki]
repTySynInst :: Core TH.Name -> Core TH.TySynEqnQ -> DsM (Core TH.DecQ)
repTySynInst (MkC nm) (MkC eqn)
= rep2 tySynInstDName [nm, eqn]
repClosedFamilyNoKind :: Core TH.Name
-> Core [TH.TyVarBndr]
-> Core [TH.TySynEqnQ]
-> DsM (Core TH.DecQ)
repClosedFamilyNoKind (MkC nm) (MkC tvs) (MkC eqns)
= rep2 closedTypeFamilyNoKindDName [nm, tvs, eqns]
repClosedFamilyKind :: Core TH.Name
-> Core [TH.TyVarBndr]
-> Core TH.Kind
-> Core [TH.TySynEqnQ]
-> DsM (Core TH.DecQ)
repClosedFamilyKind (MkC nm) (MkC tvs) (MkC ki) (MkC eqns)
= rep2 closedTypeFamilyKindDName [nm, tvs, ki, eqns]
repTySynEqn :: Core [TH.TypeQ] -> Core TH.TypeQ -> DsM (Core TH.TySynEqnQ)
repTySynEqn (MkC lhs) (MkC rhs)
= rep2 tySynEqnName [lhs, rhs]
repRoleAnnotD :: Core TH.Name -> Core [TH.Role] -> DsM (Core TH.DecQ)
repRoleAnnotD (MkC n) (MkC roles) = rep2 roleAnnotDName [n, roles]
repFunDep :: Core [TH.Name] -> Core [TH.Name] -> DsM (Core TH.FunDep)
repFunDep (MkC xs) (MkC ys) = rep2 funDepName [xs, ys]
repProto :: Name -> Core TH.Name -> Core TH.TypeQ -> DsM (Core TH.DecQ)
repProto mk_sig (MkC s) (MkC ty) = rep2 mk_sig [s, ty]
repCtxt :: Core [TH.PredQ] -> DsM (Core TH.CxtQ)
repCtxt (MkC tys) = rep2 cxtName [tys]
repConstr :: Core TH.Name -> HsConDeclDetails Name
-> DsM (Core TH.ConQ)
repConstr con (PrefixCon ps)
= do arg_tys <- repList strictTypeQTyConName repBangTy ps
rep2 normalCName [unC con, unC arg_tys]
repConstr con (RecCon ips)
= do { arg_vtys <- repList varStrictTypeQTyConName rep_ip ips
; rep2 recCName [unC con, unC arg_vtys] }
where
rep_ip ip = do { MkC v <- lookupLOcc (cd_fld_name ip)
; MkC ty <- repBangTy (cd_fld_type ip)
; rep2 varStrictTypeName [v,ty] }
repConstr con (InfixCon st1 st2)
= do arg1 <- repBangTy st1
arg2 <- repBangTy st2
rep2 infixCName [unC arg1, unC con, unC arg2]
------------ Types -------------------
repTForall :: Core [TH.TyVarBndr] -> Core TH.CxtQ -> Core TH.TypeQ
-> DsM (Core TH.TypeQ)
repTForall (MkC tvars) (MkC ctxt) (MkC ty)
= rep2 forallTName [tvars, ctxt, ty]
repTvar :: Core TH.Name -> DsM (Core TH.TypeQ)
repTvar (MkC s) = rep2 varTName [s]
repTapp :: Core TH.TypeQ -> Core TH.TypeQ -> DsM (Core TH.TypeQ)
repTapp (MkC t1) (MkC t2) = rep2 appTName [t1, t2]
repTapps :: Core TH.TypeQ -> [Core TH.TypeQ] -> DsM (Core TH.TypeQ)
repTapps f [] = return f
repTapps f (t:ts) = do { f1 <- repTapp f t; repTapps f1 ts }
repTSig :: Core TH.TypeQ -> Core TH.Kind -> DsM (Core TH.TypeQ)
repTSig (MkC ty) (MkC ki) = rep2 sigTName [ty, ki]
repTequality :: DsM (Core TH.TypeQ)
repTequality = rep2 equalityTName []
repTPromotedList :: [Core TH.TypeQ] -> DsM (Core TH.TypeQ)
repTPromotedList [] = repPromotedNilTyCon
repTPromotedList (t:ts) = do { tcon <- repPromotedConsTyCon
; f <- repTapp tcon t
; t' <- repTPromotedList ts
; repTapp f t'
}
repTLit :: Core TH.TyLitQ -> DsM (Core TH.TypeQ)
repTLit (MkC lit) = rep2 litTName [lit]
--------- Type constructors --------------
repNamedTyCon :: Core TH.Name -> DsM (Core TH.TypeQ)
repNamedTyCon (MkC s) = rep2 conTName [s]
repTupleTyCon :: Int -> DsM (Core TH.TypeQ)
-- Note: not Core Int; it's easier to be direct here
repTupleTyCon i = do dflags <- getDynFlags
rep2 tupleTName [mkIntExprInt dflags i]
repUnboxedTupleTyCon :: Int -> DsM (Core TH.TypeQ)
-- Note: not Core Int; it's easier to be direct here
repUnboxedTupleTyCon i = do dflags <- getDynFlags
rep2 unboxedTupleTName [mkIntExprInt dflags i]
repArrowTyCon :: DsM (Core TH.TypeQ)
repArrowTyCon = rep2 arrowTName []
repListTyCon :: DsM (Core TH.TypeQ)
repListTyCon = rep2 listTName []
repPromotedTyCon :: Core TH.Name -> DsM (Core TH.TypeQ)
repPromotedTyCon (MkC s) = rep2 promotedTName [s]
repPromotedTupleTyCon :: Int -> DsM (Core TH.TypeQ)
repPromotedTupleTyCon i = do dflags <- getDynFlags
rep2 promotedTupleTName [mkIntExprInt dflags i]
repPromotedNilTyCon :: DsM (Core TH.TypeQ)
repPromotedNilTyCon = rep2 promotedNilTName []
repPromotedConsTyCon :: DsM (Core TH.TypeQ)
repPromotedConsTyCon = rep2 promotedConsTName []
------------ Kinds -------------------
repPlainTV :: Core TH.Name -> DsM (Core TH.TyVarBndr)
repPlainTV (MkC nm) = rep2 plainTVName [nm]
repKindedTV :: Core TH.Name -> Core TH.Kind -> DsM (Core TH.TyVarBndr)
repKindedTV (MkC nm) (MkC ki) = rep2 kindedTVName [nm, ki]
repKVar :: Core TH.Name -> DsM (Core TH.Kind)
repKVar (MkC s) = rep2 varKName [s]
repKCon :: Core TH.Name -> DsM (Core TH.Kind)
repKCon (MkC s) = rep2 conKName [s]
repKTuple :: Int -> DsM (Core TH.Kind)
repKTuple i = do dflags <- getDynFlags
rep2 tupleKName [mkIntExprInt dflags i]
repKArrow :: DsM (Core TH.Kind)
repKArrow = rep2 arrowKName []
repKList :: DsM (Core TH.Kind)
repKList = rep2 listKName []
repKApp :: Core TH.Kind -> Core TH.Kind -> DsM (Core TH.Kind)
repKApp (MkC k1) (MkC k2) = rep2 appKName [k1, k2]
repKApps :: Core TH.Kind -> [Core TH.Kind] -> DsM (Core TH.Kind)
repKApps f [] = return f
repKApps f (k:ks) = do { f' <- repKApp f k; repKApps f' ks }
repKStar :: DsM (Core TH.Kind)
repKStar = rep2 starKName []
repKConstraint :: DsM (Core TH.Kind)
repKConstraint = rep2 constraintKName []
----------------------------------------------------------
-- Literals
repLiteral :: HsLit -> DsM (Core TH.Lit)
repLiteral lit
= do lit' <- case lit of
HsIntPrim i -> mk_integer i
HsWordPrim w -> mk_integer w
HsInt i -> mk_integer i
HsFloatPrim r -> mk_rational r
HsDoublePrim r -> mk_rational r
_ -> return lit
lit_expr <- dsLit lit'
case mb_lit_name of
Just lit_name -> rep2 lit_name [lit_expr]
Nothing -> notHandled "Exotic literal" (ppr lit)
where
mb_lit_name = case lit of
HsInteger _ _ -> Just integerLName
HsInt _ -> Just integerLName
HsIntPrim _ -> Just intPrimLName
HsWordPrim _ -> Just wordPrimLName
HsFloatPrim _ -> Just floatPrimLName
HsDoublePrim _ -> Just doublePrimLName
HsChar _ -> Just charLName
HsString _ -> Just stringLName
HsRat _ _ -> Just rationalLName
_ -> Nothing
mk_integer :: Integer -> DsM HsLit
mk_integer i = do integer_ty <- lookupType integerTyConName
return $ HsInteger i integer_ty
mk_rational :: FractionalLit -> DsM HsLit
mk_rational r = do rat_ty <- lookupType rationalTyConName
return $ HsRat r rat_ty
mk_string :: FastString -> DsM HsLit
mk_string s = return $ HsString s
repOverloadedLiteral :: HsOverLit Name -> DsM (Core TH.Lit)
repOverloadedLiteral (OverLit { ol_val = val})
= do { lit <- mk_lit val; repLiteral lit }
-- The type Rational will be in the environment, because
-- the smart constructor 'TH.Syntax.rationalL' uses it in its type,
-- and rationalL is sucked in when any TH stuff is used
mk_lit :: OverLitVal -> DsM HsLit
mk_lit (HsIntegral i) = mk_integer i
mk_lit (HsFractional f) = mk_rational f
mk_lit (HsIsString s) = mk_string s
--------------- Miscellaneous -------------------
repGensym :: Core String -> DsM (Core (TH.Q TH.Name))
repGensym (MkC lit_str) = rep2 newNameName [lit_str]
repBindQ :: Type -> Type -- a and b
-> Core (TH.Q a) -> Core (a -> TH.Q b) -> DsM (Core (TH.Q b))
repBindQ ty_a ty_b (MkC x) (MkC y)
= rep2 bindQName [Type ty_a, Type ty_b, x, y]
repSequenceQ :: Type -> Core [TH.Q a] -> DsM (Core (TH.Q [a]))
repSequenceQ ty_a (MkC list)
= rep2 sequenceQName [Type ty_a, list]
------------ Lists and Tuples -------------------
-- turn a list of patterns into a single pattern matching a list
repList :: Name -> (a -> DsM (Core b))
-> [a] -> DsM (Core [b])
repList tc_name f args
= do { args1 <- mapM f args
; coreList tc_name args1 }
coreList :: Name -- Of the TyCon of the element type
-> [Core a] -> DsM (Core [a])
coreList tc_name es
= do { elt_ty <- lookupType tc_name; return (coreList' elt_ty es) }
coreList' :: Type -- The element type
-> [Core a] -> Core [a]
coreList' elt_ty es = MkC (mkListExpr elt_ty (map unC es ))
nonEmptyCoreList :: [Core a] -> Core [a]
-- The list must be non-empty so we can get the element type
-- Otherwise use coreList
nonEmptyCoreList [] = panic "coreList: empty argument"
nonEmptyCoreList xs@(MkC x:_) = MkC (mkListExpr (exprType x) (map unC xs))
coreStringLit :: String -> DsM (Core String)
coreStringLit s = do { z <- mkStringExpr s; return(MkC z) }
------------ Literals & Variables -------------------
coreIntLit :: Int -> DsM (Core Int)
coreIntLit i = do dflags <- getDynFlags
return (MkC (mkIntExprInt dflags i))
coreVar :: Id -> Core TH.Name -- The Id has type Name
coreVar id = MkC (Var id)
----------------- Failure -----------------------
notHandledL :: SrcSpan -> String -> SDoc -> DsM a
notHandledL loc what doc
| isGoodSrcSpan loc
= putSrcSpanDs loc $ notHandled what doc
| otherwise
= notHandled what doc
notHandled :: String -> SDoc -> DsM a
notHandled what doc = failWithDs msg
where
msg = hang (text what <+> ptext (sLit "not (yet) handled by Template Haskell"))
2 doc
-- %************************************************************************
-- %* *
-- The known-key names for Template Haskell
-- %* *
-- %************************************************************************
-- To add a name, do three things
--
-- 1) Allocate a key
-- 2) Make a "Name"
-- 3) Add the name to knownKeyNames
templateHaskellNames :: [Name]
-- The names that are implicitly mentioned by ``bracket''
-- Should stay in sync with the import list of DsMeta
templateHaskellNames = [
returnQName, bindQName, sequenceQName, newNameName, liftName,
mkNameName, mkNameG_vName, mkNameG_dName, mkNameG_tcName, mkNameLName,
liftStringName,
unTypeName,
unTypeQName,
unsafeTExpCoerceName,
-- Lit
charLName, stringLName, integerLName, intPrimLName, wordPrimLName,
floatPrimLName, doublePrimLName, rationalLName,
-- Pat
litPName, varPName, tupPName, unboxedTupPName,
conPName, tildePName, bangPName, infixPName,
asPName, wildPName, recPName, listPName, sigPName, viewPName,
-- FieldPat
fieldPatName,
-- Match
matchName,
-- Clause
clauseName,
-- Exp
varEName, conEName, litEName, appEName, infixEName,
infixAppName, sectionLName, sectionRName, lamEName, lamCaseEName,
tupEName, unboxedTupEName,
condEName, multiIfEName, letEName, caseEName, doEName, compEName,
fromEName, fromThenEName, fromToEName, fromThenToEName,
listEName, sigEName, recConEName, recUpdEName,
-- FieldExp
fieldExpName,
-- Body
guardedBName, normalBName,
-- Guard
normalGEName, patGEName,
-- Stmt
bindSName, letSName, noBindSName, parSName,
-- Dec
funDName, valDName, dataDName, newtypeDName, tySynDName,
classDName, instanceDName, standaloneDerivDName, sigDName, forImpDName,
pragInlDName, pragSpecDName, pragSpecInlDName, pragSpecInstDName,
pragRuleDName, pragAnnDName, defaultSigDName,
familyNoKindDName, familyKindDName, dataInstDName, newtypeInstDName,
tySynInstDName, closedTypeFamilyKindDName, closedTypeFamilyNoKindDName,
infixLDName, infixRDName, infixNDName,
roleAnnotDName,
-- Cxt
cxtName,
-- Strict
isStrictName, notStrictName, unpackedName,
-- Con
normalCName, recCName, infixCName, forallCName,
-- StrictType
strictTypeName,
-- VarStrictType
varStrictTypeName,
-- Type
forallTName, varTName, conTName, appTName, equalityTName,
tupleTName, unboxedTupleTName, arrowTName, listTName, sigTName, litTName,
promotedTName, promotedTupleTName, promotedNilTName, promotedConsTName,
-- TyLit
numTyLitName, strTyLitName,
-- TyVarBndr
plainTVName, kindedTVName,
-- Role
nominalRName, representationalRName, phantomRName, inferRName,
-- Kind
varKName, conKName, tupleKName, arrowKName, listKName, appKName,
starKName, constraintKName,
-- Callconv
cCallName, stdCallName, cApiCallName, primCallName, javaScriptCallName,
-- Safety
unsafeName,
safeName,
interruptibleName,
-- Inline
noInlineDataConName, inlineDataConName, inlinableDataConName,
-- RuleMatch
conLikeDataConName, funLikeDataConName,
-- Phases
allPhasesDataConName, fromPhaseDataConName, beforePhaseDataConName,
-- TExp
tExpDataConName,
-- RuleBndr
ruleVarName, typedRuleVarName,
-- FunDep
funDepName,
-- FamFlavour
typeFamName, dataFamName,
-- TySynEqn
tySynEqnName,
-- AnnTarget
valueAnnotationName, typeAnnotationName, moduleAnnotationName,
-- And the tycons
qTyConName, nameTyConName, patTyConName, fieldPatTyConName, matchQTyConName,
clauseQTyConName, expQTyConName, fieldExpTyConName, predTyConName,
stmtQTyConName, decQTyConName, conQTyConName, strictTypeQTyConName,
varStrictTypeQTyConName, typeQTyConName, expTyConName, decTyConName,
typeTyConName, tyVarBndrTyConName, matchTyConName, clauseTyConName,
patQTyConName, fieldPatQTyConName, fieldExpQTyConName, funDepTyConName,
predQTyConName, decsQTyConName, ruleBndrQTyConName, tySynEqnQTyConName,
roleTyConName, tExpTyConName,
-- Quasiquoting
quoteDecName, quoteTypeName, quoteExpName, quotePatName]
thSyn, thLib, qqLib :: Module
thSyn = mkTHModule (fsLit "Language.Haskell.TH.Syntax")
thLib = mkTHModule (fsLit "Language.Haskell.TH.Lib")
qqLib = mkTHModule (fsLit "Language.Haskell.TH.Quote")
mkTHModule :: FastString -> Module
mkTHModule m = mkModule thPackageKey (mkModuleNameFS m)
libFun, libTc, thFun, thTc, thCon, qqFun :: FastString -> Unique -> Name
libFun = mk_known_key_name OccName.varName thLib
libTc = mk_known_key_name OccName.tcName thLib
thFun = mk_known_key_name OccName.varName thSyn
thTc = mk_known_key_name OccName.tcName thSyn
thCon = mk_known_key_name OccName.dataName thSyn
qqFun = mk_known_key_name OccName.varName qqLib
-------------------- TH.Syntax -----------------------
qTyConName, nameTyConName, fieldExpTyConName, patTyConName,
fieldPatTyConName, expTyConName, decTyConName, typeTyConName,
tyVarBndrTyConName, matchTyConName, clauseTyConName, funDepTyConName,
predTyConName, tExpTyConName :: Name
qTyConName = thTc (fsLit "Q") qTyConKey
nameTyConName = thTc (fsLit "Name") nameTyConKey
fieldExpTyConName = thTc (fsLit "FieldExp") fieldExpTyConKey
patTyConName = thTc (fsLit "Pat") patTyConKey
fieldPatTyConName = thTc (fsLit "FieldPat") fieldPatTyConKey
expTyConName = thTc (fsLit "Exp") expTyConKey
decTyConName = thTc (fsLit "Dec") decTyConKey
typeTyConName = thTc (fsLit "Type") typeTyConKey
tyVarBndrTyConName= thTc (fsLit "TyVarBndr") tyVarBndrTyConKey
matchTyConName = thTc (fsLit "Match") matchTyConKey
clauseTyConName = thTc (fsLit "Clause") clauseTyConKey
funDepTyConName = thTc (fsLit "FunDep") funDepTyConKey
predTyConName = thTc (fsLit "Pred") predTyConKey
tExpTyConName = thTc (fsLit "TExp") tExpTyConKey
returnQName, bindQName, sequenceQName, newNameName, liftName,
mkNameName, mkNameG_vName, mkNameG_dName, mkNameG_tcName,
mkNameLName, liftStringName, unTypeName, unTypeQName,
unsafeTExpCoerceName :: Name
returnQName = thFun (fsLit "returnQ") returnQIdKey
bindQName = thFun (fsLit "bindQ") bindQIdKey
sequenceQName = thFun (fsLit "sequenceQ") sequenceQIdKey
newNameName = thFun (fsLit "newName") newNameIdKey
liftName = thFun (fsLit "lift") liftIdKey
liftStringName = thFun (fsLit "liftString") liftStringIdKey
mkNameName = thFun (fsLit "mkName") mkNameIdKey
mkNameG_vName = thFun (fsLit "mkNameG_v") mkNameG_vIdKey
mkNameG_dName = thFun (fsLit "mkNameG_d") mkNameG_dIdKey
mkNameG_tcName = thFun (fsLit "mkNameG_tc") mkNameG_tcIdKey
mkNameLName = thFun (fsLit "mkNameL") mkNameLIdKey
unTypeName = thFun (fsLit "unType") unTypeIdKey
unTypeQName = thFun (fsLit "unTypeQ") unTypeQIdKey
unsafeTExpCoerceName = thFun (fsLit "unsafeTExpCoerce") unsafeTExpCoerceIdKey
-------------------- TH.Lib -----------------------
-- data Lit = ...
charLName, stringLName, integerLName, intPrimLName, wordPrimLName,
floatPrimLName, doublePrimLName, rationalLName :: Name
charLName = libFun (fsLit "charL") charLIdKey
stringLName = libFun (fsLit "stringL") stringLIdKey
integerLName = libFun (fsLit "integerL") integerLIdKey
intPrimLName = libFun (fsLit "intPrimL") intPrimLIdKey
wordPrimLName = libFun (fsLit "wordPrimL") wordPrimLIdKey
floatPrimLName = libFun (fsLit "floatPrimL") floatPrimLIdKey
doublePrimLName = libFun (fsLit "doublePrimL") doublePrimLIdKey
rationalLName = libFun (fsLit "rationalL") rationalLIdKey
-- data Pat = ...
litPName, varPName, tupPName, unboxedTupPName, conPName, infixPName, tildePName, bangPName,
asPName, wildPName, recPName, listPName, sigPName, viewPName :: Name
litPName = libFun (fsLit "litP") litPIdKey
varPName = libFun (fsLit "varP") varPIdKey
tupPName = libFun (fsLit "tupP") tupPIdKey
unboxedTupPName = libFun (fsLit "unboxedTupP") unboxedTupPIdKey
conPName = libFun (fsLit "conP") conPIdKey
infixPName = libFun (fsLit "infixP") infixPIdKey
tildePName = libFun (fsLit "tildeP") tildePIdKey
bangPName = libFun (fsLit "bangP") bangPIdKey
asPName = libFun (fsLit "asP") asPIdKey
wildPName = libFun (fsLit "wildP") wildPIdKey
recPName = libFun (fsLit "recP") recPIdKey
listPName = libFun (fsLit "listP") listPIdKey
sigPName = libFun (fsLit "sigP") sigPIdKey
viewPName = libFun (fsLit "viewP") viewPIdKey
-- type FieldPat = ...
fieldPatName :: Name
fieldPatName = libFun (fsLit "fieldPat") fieldPatIdKey
-- data Match = ...
matchName :: Name
matchName = libFun (fsLit "match") matchIdKey
-- data Clause = ...
clauseName :: Name
clauseName = libFun (fsLit "clause") clauseIdKey
-- data Exp = ...
varEName, conEName, litEName, appEName, infixEName, infixAppName,
sectionLName, sectionRName, lamEName, lamCaseEName, tupEName,
unboxedTupEName, condEName, multiIfEName, letEName, caseEName,
doEName, compEName :: Name
varEName = libFun (fsLit "varE") varEIdKey
conEName = libFun (fsLit "conE") conEIdKey
litEName = libFun (fsLit "litE") litEIdKey
appEName = libFun (fsLit "appE") appEIdKey
infixEName = libFun (fsLit "infixE") infixEIdKey
infixAppName = libFun (fsLit "infixApp") infixAppIdKey
sectionLName = libFun (fsLit "sectionL") sectionLIdKey
sectionRName = libFun (fsLit "sectionR") sectionRIdKey
lamEName = libFun (fsLit "lamE") lamEIdKey
lamCaseEName = libFun (fsLit "lamCaseE") lamCaseEIdKey
tupEName = libFun (fsLit "tupE") tupEIdKey
unboxedTupEName = libFun (fsLit "unboxedTupE") unboxedTupEIdKey
condEName = libFun (fsLit "condE") condEIdKey
multiIfEName = libFun (fsLit "multiIfE") multiIfEIdKey
letEName = libFun (fsLit "letE") letEIdKey
caseEName = libFun (fsLit "caseE") caseEIdKey
doEName = libFun (fsLit "doE") doEIdKey
compEName = libFun (fsLit "compE") compEIdKey
-- ArithSeq skips a level
fromEName, fromThenEName, fromToEName, fromThenToEName :: Name
fromEName = libFun (fsLit "fromE") fromEIdKey
fromThenEName = libFun (fsLit "fromThenE") fromThenEIdKey
fromToEName = libFun (fsLit "fromToE") fromToEIdKey
fromThenToEName = libFun (fsLit "fromThenToE") fromThenToEIdKey
-- end ArithSeq
listEName, sigEName, recConEName, recUpdEName :: Name
listEName = libFun (fsLit "listE") listEIdKey
sigEName = libFun (fsLit "sigE") sigEIdKey
recConEName = libFun (fsLit "recConE") recConEIdKey
recUpdEName = libFun (fsLit "recUpdE") recUpdEIdKey
-- type FieldExp = ...
fieldExpName :: Name
fieldExpName = libFun (fsLit "fieldExp") fieldExpIdKey
-- data Body = ...
guardedBName, normalBName :: Name
guardedBName = libFun (fsLit "guardedB") guardedBIdKey
normalBName = libFun (fsLit "normalB") normalBIdKey
-- data Guard = ...
normalGEName, patGEName :: Name
normalGEName = libFun (fsLit "normalGE") normalGEIdKey
patGEName = libFun (fsLit "patGE") patGEIdKey
-- data Stmt = ...
bindSName, letSName, noBindSName, parSName :: Name
bindSName = libFun (fsLit "bindS") bindSIdKey
letSName = libFun (fsLit "letS") letSIdKey
noBindSName = libFun (fsLit "noBindS") noBindSIdKey
parSName = libFun (fsLit "parS") parSIdKey
-- data Dec = ...
funDName, valDName, dataDName, newtypeDName, tySynDName, classDName,
instanceDName, sigDName, forImpDName, pragInlDName, pragSpecDName,
pragSpecInlDName, pragSpecInstDName, pragRuleDName, pragAnnDName,
familyNoKindDName, standaloneDerivDName, defaultSigDName,
familyKindDName, dataInstDName, newtypeInstDName, tySynInstDName,
closedTypeFamilyKindDName, closedTypeFamilyNoKindDName,
infixLDName, infixRDName, infixNDName, roleAnnotDName :: Name
funDName = libFun (fsLit "funD") funDIdKey
valDName = libFun (fsLit "valD") valDIdKey
dataDName = libFun (fsLit "dataD") dataDIdKey
newtypeDName = libFun (fsLit "newtypeD") newtypeDIdKey
tySynDName = libFun (fsLit "tySynD") tySynDIdKey
classDName = libFun (fsLit "classD") classDIdKey
instanceDName = libFun (fsLit "instanceD") instanceDIdKey
standaloneDerivDName
= libFun (fsLit "standaloneDerivD") standaloneDerivDIdKey
sigDName = libFun (fsLit "sigD") sigDIdKey
defaultSigDName = libFun (fsLit "defaultSigD") defaultSigDIdKey
forImpDName = libFun (fsLit "forImpD") forImpDIdKey
pragInlDName = libFun (fsLit "pragInlD") pragInlDIdKey
pragSpecDName = libFun (fsLit "pragSpecD") pragSpecDIdKey
pragSpecInlDName = libFun (fsLit "pragSpecInlD") pragSpecInlDIdKey
pragSpecInstDName = libFun (fsLit "pragSpecInstD") pragSpecInstDIdKey
pragRuleDName = libFun (fsLit "pragRuleD") pragRuleDIdKey
pragAnnDName = libFun (fsLit "pragAnnD") pragAnnDIdKey
familyNoKindDName = libFun (fsLit "familyNoKindD") familyNoKindDIdKey
familyKindDName = libFun (fsLit "familyKindD") familyKindDIdKey
dataInstDName = libFun (fsLit "dataInstD") dataInstDIdKey
newtypeInstDName = libFun (fsLit "newtypeInstD") newtypeInstDIdKey
tySynInstDName = libFun (fsLit "tySynInstD") tySynInstDIdKey
closedTypeFamilyKindDName
= libFun (fsLit "closedTypeFamilyKindD") closedTypeFamilyKindDIdKey
closedTypeFamilyNoKindDName
= libFun (fsLit "closedTypeFamilyNoKindD") closedTypeFamilyNoKindDIdKey
infixLDName = libFun (fsLit "infixLD") infixLDIdKey
infixRDName = libFun (fsLit "infixRD") infixRDIdKey
infixNDName = libFun (fsLit "infixND") infixNDIdKey
roleAnnotDName = libFun (fsLit "roleAnnotD") roleAnnotDIdKey
-- type Ctxt = ...
cxtName :: Name
cxtName = libFun (fsLit "cxt") cxtIdKey
-- data Strict = ...
isStrictName, notStrictName, unpackedName :: Name
isStrictName = libFun (fsLit "isStrict") isStrictKey
notStrictName = libFun (fsLit "notStrict") notStrictKey
unpackedName = libFun (fsLit "unpacked") unpackedKey
-- data Con = ...
normalCName, recCName, infixCName, forallCName :: Name
normalCName = libFun (fsLit "normalC") normalCIdKey
recCName = libFun (fsLit "recC") recCIdKey
infixCName = libFun (fsLit "infixC") infixCIdKey
forallCName = libFun (fsLit "forallC") forallCIdKey
-- type StrictType = ...
strictTypeName :: Name
strictTypeName = libFun (fsLit "strictType") strictTKey
-- type VarStrictType = ...
varStrictTypeName :: Name
varStrictTypeName = libFun (fsLit "varStrictType") varStrictTKey
-- data Type = ...
forallTName, varTName, conTName, tupleTName, unboxedTupleTName, arrowTName,
listTName, appTName, sigTName, equalityTName, litTName,
promotedTName, promotedTupleTName,
promotedNilTName, promotedConsTName :: Name
forallTName = libFun (fsLit "forallT") forallTIdKey
varTName = libFun (fsLit "varT") varTIdKey
conTName = libFun (fsLit "conT") conTIdKey
tupleTName = libFun (fsLit "tupleT") tupleTIdKey
unboxedTupleTName = libFun (fsLit "unboxedTupleT") unboxedTupleTIdKey
arrowTName = libFun (fsLit "arrowT") arrowTIdKey
listTName = libFun (fsLit "listT") listTIdKey
appTName = libFun (fsLit "appT") appTIdKey
sigTName = libFun (fsLit "sigT") sigTIdKey
equalityTName = libFun (fsLit "equalityT") equalityTIdKey
litTName = libFun (fsLit "litT") litTIdKey
promotedTName = libFun (fsLit "promotedT") promotedTIdKey
promotedTupleTName = libFun (fsLit "promotedTupleT") promotedTupleTIdKey
promotedNilTName = libFun (fsLit "promotedNilT") promotedNilTIdKey
promotedConsTName = libFun (fsLit "promotedConsT") promotedConsTIdKey
-- data TyLit = ...
numTyLitName, strTyLitName :: Name
numTyLitName = libFun (fsLit "numTyLit") numTyLitIdKey
strTyLitName = libFun (fsLit "strTyLit") strTyLitIdKey
-- data TyVarBndr = ...
plainTVName, kindedTVName :: Name
plainTVName = libFun (fsLit "plainTV") plainTVIdKey
kindedTVName = libFun (fsLit "kindedTV") kindedTVIdKey
-- data Role = ...
nominalRName, representationalRName, phantomRName, inferRName :: Name
nominalRName = libFun (fsLit "nominalR") nominalRIdKey
representationalRName = libFun (fsLit "representationalR") representationalRIdKey
phantomRName = libFun (fsLit "phantomR") phantomRIdKey
inferRName = libFun (fsLit "inferR") inferRIdKey
-- data Kind = ...
varKName, conKName, tupleKName, arrowKName, listKName, appKName,
starKName, constraintKName :: Name
varKName = libFun (fsLit "varK") varKIdKey
conKName = libFun (fsLit "conK") conKIdKey
tupleKName = libFun (fsLit "tupleK") tupleKIdKey
arrowKName = libFun (fsLit "arrowK") arrowKIdKey
listKName = libFun (fsLit "listK") listKIdKey
appKName = libFun (fsLit "appK") appKIdKey
starKName = libFun (fsLit "starK") starKIdKey
constraintKName = libFun (fsLit "constraintK") constraintKIdKey
-- data Callconv = ...
cCallName, stdCallName, cApiCallName, primCallName, javaScriptCallName :: Name
cCallName = libFun (fsLit "cCall") cCallIdKey
stdCallName = libFun (fsLit "stdCall") stdCallIdKey
cApiCallName = libFun (fsLit "cApi") cApiCallIdKey
primCallName = libFun (fsLit "prim") primCallIdKey
javaScriptCallName = libFun (fsLit "javaScript") javaScriptCallIdKey
-- data Safety = ...
unsafeName, safeName, interruptibleName :: Name
unsafeName = libFun (fsLit "unsafe") unsafeIdKey
safeName = libFun (fsLit "safe") safeIdKey
interruptibleName = libFun (fsLit "interruptible") interruptibleIdKey
-- data Inline = ...
noInlineDataConName, inlineDataConName, inlinableDataConName :: Name
noInlineDataConName = thCon (fsLit "NoInline") noInlineDataConKey
inlineDataConName = thCon (fsLit "Inline") inlineDataConKey
inlinableDataConName = thCon (fsLit "Inlinable") inlinableDataConKey
-- data RuleMatch = ...
conLikeDataConName, funLikeDataConName :: Name
conLikeDataConName = thCon (fsLit "ConLike") conLikeDataConKey
funLikeDataConName = thCon (fsLit "FunLike") funLikeDataConKey
-- data Phases = ...
allPhasesDataConName, fromPhaseDataConName, beforePhaseDataConName :: Name
allPhasesDataConName = thCon (fsLit "AllPhases") allPhasesDataConKey
fromPhaseDataConName = thCon (fsLit "FromPhase") fromPhaseDataConKey
beforePhaseDataConName = thCon (fsLit "BeforePhase") beforePhaseDataConKey
-- newtype TExp a = ...
tExpDataConName :: Name
tExpDataConName = thCon (fsLit "TExp") tExpDataConKey
-- data RuleBndr = ...
ruleVarName, typedRuleVarName :: Name
ruleVarName = libFun (fsLit ("ruleVar")) ruleVarIdKey
typedRuleVarName = libFun (fsLit ("typedRuleVar")) typedRuleVarIdKey
-- data FunDep = ...
funDepName :: Name
funDepName = libFun (fsLit "funDep") funDepIdKey
-- data FamFlavour = ...
typeFamName, dataFamName :: Name
typeFamName = libFun (fsLit "typeFam") typeFamIdKey
dataFamName = libFun (fsLit "dataFam") dataFamIdKey
-- data TySynEqn = ...
tySynEqnName :: Name
tySynEqnName = libFun (fsLit "tySynEqn") tySynEqnIdKey
-- data AnnTarget = ...
valueAnnotationName, typeAnnotationName, moduleAnnotationName :: Name
valueAnnotationName = libFun (fsLit "valueAnnotation") valueAnnotationIdKey
typeAnnotationName = libFun (fsLit "typeAnnotation") typeAnnotationIdKey
moduleAnnotationName = libFun (fsLit "moduleAnnotation") moduleAnnotationIdKey
matchQTyConName, clauseQTyConName, expQTyConName, stmtQTyConName,
decQTyConName, conQTyConName, strictTypeQTyConName,
varStrictTypeQTyConName, typeQTyConName, fieldExpQTyConName,
patQTyConName, fieldPatQTyConName, predQTyConName, decsQTyConName,
ruleBndrQTyConName, tySynEqnQTyConName, roleTyConName :: Name
matchQTyConName = libTc (fsLit "MatchQ") matchQTyConKey
clauseQTyConName = libTc (fsLit "ClauseQ") clauseQTyConKey
expQTyConName = libTc (fsLit "ExpQ") expQTyConKey
stmtQTyConName = libTc (fsLit "StmtQ") stmtQTyConKey
decQTyConName = libTc (fsLit "DecQ") decQTyConKey
decsQTyConName = libTc (fsLit "DecsQ") decsQTyConKey -- Q [Dec]
conQTyConName = libTc (fsLit "ConQ") conQTyConKey
strictTypeQTyConName = libTc (fsLit "StrictTypeQ") strictTypeQTyConKey
varStrictTypeQTyConName = libTc (fsLit "VarStrictTypeQ") varStrictTypeQTyConKey
typeQTyConName = libTc (fsLit "TypeQ") typeQTyConKey
fieldExpQTyConName = libTc (fsLit "FieldExpQ") fieldExpQTyConKey
patQTyConName = libTc (fsLit "PatQ") patQTyConKey
fieldPatQTyConName = libTc (fsLit "FieldPatQ") fieldPatQTyConKey
predQTyConName = libTc (fsLit "PredQ") predQTyConKey
ruleBndrQTyConName = libTc (fsLit "RuleBndrQ") ruleBndrQTyConKey
tySynEqnQTyConName = libTc (fsLit "TySynEqnQ") tySynEqnQTyConKey
roleTyConName = libTc (fsLit "Role") roleTyConKey
-- quasiquoting
quoteExpName, quotePatName, quoteDecName, quoteTypeName :: Name
quoteExpName = qqFun (fsLit "quoteExp") quoteExpKey
quotePatName = qqFun (fsLit "quotePat") quotePatKey
quoteDecName = qqFun (fsLit "quoteDec") quoteDecKey
quoteTypeName = qqFun (fsLit "quoteType") quoteTypeKey
-- TyConUniques available: 200-299
-- Check in PrelNames if you want to change this
expTyConKey, matchTyConKey, clauseTyConKey, qTyConKey, expQTyConKey,
decQTyConKey, patTyConKey, matchQTyConKey, clauseQTyConKey,
stmtQTyConKey, conQTyConKey, typeQTyConKey, typeTyConKey, tyVarBndrTyConKey,
decTyConKey, varStrictTypeQTyConKey, strictTypeQTyConKey,
fieldExpTyConKey, fieldPatTyConKey, nameTyConKey, patQTyConKey,
fieldPatQTyConKey, fieldExpQTyConKey, funDepTyConKey, predTyConKey,
predQTyConKey, decsQTyConKey, ruleBndrQTyConKey, tySynEqnQTyConKey,
roleTyConKey, tExpTyConKey :: Unique
expTyConKey = mkPreludeTyConUnique 200
matchTyConKey = mkPreludeTyConUnique 201
clauseTyConKey = mkPreludeTyConUnique 202
qTyConKey = mkPreludeTyConUnique 203
expQTyConKey = mkPreludeTyConUnique 204
decQTyConKey = mkPreludeTyConUnique 205
patTyConKey = mkPreludeTyConUnique 206
matchQTyConKey = mkPreludeTyConUnique 207
clauseQTyConKey = mkPreludeTyConUnique 208
stmtQTyConKey = mkPreludeTyConUnique 209
conQTyConKey = mkPreludeTyConUnique 210
typeQTyConKey = mkPreludeTyConUnique 211
typeTyConKey = mkPreludeTyConUnique 212
decTyConKey = mkPreludeTyConUnique 213
varStrictTypeQTyConKey = mkPreludeTyConUnique 214
strictTypeQTyConKey = mkPreludeTyConUnique 215
fieldExpTyConKey = mkPreludeTyConUnique 216
fieldPatTyConKey = mkPreludeTyConUnique 217
nameTyConKey = mkPreludeTyConUnique 218
patQTyConKey = mkPreludeTyConUnique 219
fieldPatQTyConKey = mkPreludeTyConUnique 220
fieldExpQTyConKey = mkPreludeTyConUnique 221
funDepTyConKey = mkPreludeTyConUnique 222
predTyConKey = mkPreludeTyConUnique 223
predQTyConKey = mkPreludeTyConUnique 224
tyVarBndrTyConKey = mkPreludeTyConUnique 225
decsQTyConKey = mkPreludeTyConUnique 226
ruleBndrQTyConKey = mkPreludeTyConUnique 227
tySynEqnQTyConKey = mkPreludeTyConUnique 228
roleTyConKey = mkPreludeTyConUnique 229
tExpTyConKey = mkPreludeTyConUnique 230
-- IdUniques available: 200-499
-- If you want to change this, make sure you check in PrelNames
returnQIdKey, bindQIdKey, sequenceQIdKey, liftIdKey, newNameIdKey,
mkNameIdKey, mkNameG_vIdKey, mkNameG_dIdKey, mkNameG_tcIdKey,
mkNameLIdKey, unTypeIdKey, unTypeQIdKey, unsafeTExpCoerceIdKey :: Unique
returnQIdKey = mkPreludeMiscIdUnique 200
bindQIdKey = mkPreludeMiscIdUnique 201
sequenceQIdKey = mkPreludeMiscIdUnique 202
liftIdKey = mkPreludeMiscIdUnique 203
newNameIdKey = mkPreludeMiscIdUnique 204
mkNameIdKey = mkPreludeMiscIdUnique 205
mkNameG_vIdKey = mkPreludeMiscIdUnique 206
mkNameG_dIdKey = mkPreludeMiscIdUnique 207
mkNameG_tcIdKey = mkPreludeMiscIdUnique 208
mkNameLIdKey = mkPreludeMiscIdUnique 209
unTypeIdKey = mkPreludeMiscIdUnique 210
unTypeQIdKey = mkPreludeMiscIdUnique 211
unsafeTExpCoerceIdKey = mkPreludeMiscIdUnique 212
-- data Lit = ...
charLIdKey, stringLIdKey, integerLIdKey, intPrimLIdKey, wordPrimLIdKey,
floatPrimLIdKey, doublePrimLIdKey, rationalLIdKey :: Unique
charLIdKey = mkPreludeMiscIdUnique 220
stringLIdKey = mkPreludeMiscIdUnique 221
integerLIdKey = mkPreludeMiscIdUnique 222
intPrimLIdKey = mkPreludeMiscIdUnique 223
wordPrimLIdKey = mkPreludeMiscIdUnique 224
floatPrimLIdKey = mkPreludeMiscIdUnique 225
doublePrimLIdKey = mkPreludeMiscIdUnique 226
rationalLIdKey = mkPreludeMiscIdUnique 227
liftStringIdKey :: Unique
liftStringIdKey = mkPreludeMiscIdUnique 228
-- data Pat = ...
litPIdKey, varPIdKey, tupPIdKey, unboxedTupPIdKey, conPIdKey, infixPIdKey, tildePIdKey, bangPIdKey,
asPIdKey, wildPIdKey, recPIdKey, listPIdKey, sigPIdKey, viewPIdKey :: Unique
litPIdKey = mkPreludeMiscIdUnique 240
varPIdKey = mkPreludeMiscIdUnique 241
tupPIdKey = mkPreludeMiscIdUnique 242
unboxedTupPIdKey = mkPreludeMiscIdUnique 243
conPIdKey = mkPreludeMiscIdUnique 244
infixPIdKey = mkPreludeMiscIdUnique 245
tildePIdKey = mkPreludeMiscIdUnique 246
bangPIdKey = mkPreludeMiscIdUnique 247
asPIdKey = mkPreludeMiscIdUnique 248
wildPIdKey = mkPreludeMiscIdUnique 249
recPIdKey = mkPreludeMiscIdUnique 250
listPIdKey = mkPreludeMiscIdUnique 251
sigPIdKey = mkPreludeMiscIdUnique 252
viewPIdKey = mkPreludeMiscIdUnique 253
-- type FieldPat = ...
fieldPatIdKey :: Unique
fieldPatIdKey = mkPreludeMiscIdUnique 260
-- data Match = ...
matchIdKey :: Unique
matchIdKey = mkPreludeMiscIdUnique 261
-- data Clause = ...
clauseIdKey :: Unique
clauseIdKey = mkPreludeMiscIdUnique 262
-- data Exp = ...
varEIdKey, conEIdKey, litEIdKey, appEIdKey, infixEIdKey, infixAppIdKey,
sectionLIdKey, sectionRIdKey, lamEIdKey, lamCaseEIdKey, tupEIdKey,
unboxedTupEIdKey, condEIdKey, multiIfEIdKey,
letEIdKey, caseEIdKey, doEIdKey, compEIdKey,
fromEIdKey, fromThenEIdKey, fromToEIdKey, fromThenToEIdKey,
listEIdKey, sigEIdKey, recConEIdKey, recUpdEIdKey :: Unique
varEIdKey = mkPreludeMiscIdUnique 270
conEIdKey = mkPreludeMiscIdUnique 271
litEIdKey = mkPreludeMiscIdUnique 272
appEIdKey = mkPreludeMiscIdUnique 273
infixEIdKey = mkPreludeMiscIdUnique 274
infixAppIdKey = mkPreludeMiscIdUnique 275
sectionLIdKey = mkPreludeMiscIdUnique 276
sectionRIdKey = mkPreludeMiscIdUnique 277
lamEIdKey = mkPreludeMiscIdUnique 278
lamCaseEIdKey = mkPreludeMiscIdUnique 279
tupEIdKey = mkPreludeMiscIdUnique 280
unboxedTupEIdKey = mkPreludeMiscIdUnique 281
condEIdKey = mkPreludeMiscIdUnique 282
multiIfEIdKey = mkPreludeMiscIdUnique 283
letEIdKey = mkPreludeMiscIdUnique 284
caseEIdKey = mkPreludeMiscIdUnique 285
doEIdKey = mkPreludeMiscIdUnique 286
compEIdKey = mkPreludeMiscIdUnique 287
fromEIdKey = mkPreludeMiscIdUnique 288
fromThenEIdKey = mkPreludeMiscIdUnique 289
fromToEIdKey = mkPreludeMiscIdUnique 290
fromThenToEIdKey = mkPreludeMiscIdUnique 291
listEIdKey = mkPreludeMiscIdUnique 292
sigEIdKey = mkPreludeMiscIdUnique 293
recConEIdKey = mkPreludeMiscIdUnique 294
recUpdEIdKey = mkPreludeMiscIdUnique 295
-- type FieldExp = ...
fieldExpIdKey :: Unique
fieldExpIdKey = mkPreludeMiscIdUnique 310
-- data Body = ...
guardedBIdKey, normalBIdKey :: Unique
guardedBIdKey = mkPreludeMiscIdUnique 311
normalBIdKey = mkPreludeMiscIdUnique 312
-- data Guard = ...
normalGEIdKey, patGEIdKey :: Unique
normalGEIdKey = mkPreludeMiscIdUnique 313
patGEIdKey = mkPreludeMiscIdUnique 314
-- data Stmt = ...
bindSIdKey, letSIdKey, noBindSIdKey, parSIdKey :: Unique
bindSIdKey = mkPreludeMiscIdUnique 320
letSIdKey = mkPreludeMiscIdUnique 321
noBindSIdKey = mkPreludeMiscIdUnique 322
parSIdKey = mkPreludeMiscIdUnique 323
-- data Dec = ...
funDIdKey, valDIdKey, dataDIdKey, newtypeDIdKey, tySynDIdKey,
classDIdKey, instanceDIdKey, sigDIdKey, forImpDIdKey, pragInlDIdKey,
pragSpecDIdKey, pragSpecInlDIdKey, pragSpecInstDIdKey, pragRuleDIdKey,
pragAnnDIdKey, familyNoKindDIdKey, familyKindDIdKey, defaultSigDIdKey,
dataInstDIdKey, newtypeInstDIdKey, tySynInstDIdKey, standaloneDerivDIdKey,
closedTypeFamilyKindDIdKey, closedTypeFamilyNoKindDIdKey,
infixLDIdKey, infixRDIdKey, infixNDIdKey, roleAnnotDIdKey :: Unique
funDIdKey = mkPreludeMiscIdUnique 330
valDIdKey = mkPreludeMiscIdUnique 331
dataDIdKey = mkPreludeMiscIdUnique 332
newtypeDIdKey = mkPreludeMiscIdUnique 333
tySynDIdKey = mkPreludeMiscIdUnique 334
classDIdKey = mkPreludeMiscIdUnique 335
instanceDIdKey = mkPreludeMiscIdUnique 336
sigDIdKey = mkPreludeMiscIdUnique 337
forImpDIdKey = mkPreludeMiscIdUnique 338
pragInlDIdKey = mkPreludeMiscIdUnique 339
pragSpecDIdKey = mkPreludeMiscIdUnique 340
pragSpecInlDIdKey = mkPreludeMiscIdUnique 341
pragSpecInstDIdKey = mkPreludeMiscIdUnique 342
pragRuleDIdKey = mkPreludeMiscIdUnique 343
pragAnnDIdKey = mkPreludeMiscIdUnique 344
familyNoKindDIdKey = mkPreludeMiscIdUnique 345
familyKindDIdKey = mkPreludeMiscIdUnique 346
dataInstDIdKey = mkPreludeMiscIdUnique 347
newtypeInstDIdKey = mkPreludeMiscIdUnique 348
tySynInstDIdKey = mkPreludeMiscIdUnique 349
closedTypeFamilyKindDIdKey = mkPreludeMiscIdUnique 350
closedTypeFamilyNoKindDIdKey = mkPreludeMiscIdUnique 351
infixLDIdKey = mkPreludeMiscIdUnique 352
infixRDIdKey = mkPreludeMiscIdUnique 353
infixNDIdKey = mkPreludeMiscIdUnique 354
roleAnnotDIdKey = mkPreludeMiscIdUnique 355
standaloneDerivDIdKey = mkPreludeMiscIdUnique 356
defaultSigDIdKey = mkPreludeMiscIdUnique 357
-- type Cxt = ...
cxtIdKey :: Unique
cxtIdKey = mkPreludeMiscIdUnique 360
-- data Strict = ...
isStrictKey, notStrictKey, unpackedKey :: Unique
isStrictKey = mkPreludeMiscIdUnique 363
notStrictKey = mkPreludeMiscIdUnique 364
unpackedKey = mkPreludeMiscIdUnique 365
-- data Con = ...
normalCIdKey, recCIdKey, infixCIdKey, forallCIdKey :: Unique
normalCIdKey = mkPreludeMiscIdUnique 370
recCIdKey = mkPreludeMiscIdUnique 371
infixCIdKey = mkPreludeMiscIdUnique 372
forallCIdKey = mkPreludeMiscIdUnique 373
-- type StrictType = ...
strictTKey :: Unique
strictTKey = mkPreludeMiscIdUnique 374
-- type VarStrictType = ...
varStrictTKey :: Unique
varStrictTKey = mkPreludeMiscIdUnique 375
-- data Type = ...
forallTIdKey, varTIdKey, conTIdKey, tupleTIdKey, unboxedTupleTIdKey, arrowTIdKey,
listTIdKey, appTIdKey, sigTIdKey, equalityTIdKey, litTIdKey,
promotedTIdKey, promotedTupleTIdKey,
promotedNilTIdKey, promotedConsTIdKey :: Unique
forallTIdKey = mkPreludeMiscIdUnique 380
varTIdKey = mkPreludeMiscIdUnique 381
conTIdKey = mkPreludeMiscIdUnique 382
tupleTIdKey = mkPreludeMiscIdUnique 383
unboxedTupleTIdKey = mkPreludeMiscIdUnique 384
arrowTIdKey = mkPreludeMiscIdUnique 385
listTIdKey = mkPreludeMiscIdUnique 386
appTIdKey = mkPreludeMiscIdUnique 387
sigTIdKey = mkPreludeMiscIdUnique 388
equalityTIdKey = mkPreludeMiscIdUnique 389
litTIdKey = mkPreludeMiscIdUnique 390
promotedTIdKey = mkPreludeMiscIdUnique 391
promotedTupleTIdKey = mkPreludeMiscIdUnique 392
promotedNilTIdKey = mkPreludeMiscIdUnique 393
promotedConsTIdKey = mkPreludeMiscIdUnique 394
-- data TyLit = ...
numTyLitIdKey, strTyLitIdKey :: Unique
numTyLitIdKey = mkPreludeMiscIdUnique 395
strTyLitIdKey = mkPreludeMiscIdUnique 396
-- data TyVarBndr = ...
plainTVIdKey, kindedTVIdKey :: Unique
plainTVIdKey = mkPreludeMiscIdUnique 397
kindedTVIdKey = mkPreludeMiscIdUnique 398
-- data Role = ...
nominalRIdKey, representationalRIdKey, phantomRIdKey, inferRIdKey :: Unique
nominalRIdKey = mkPreludeMiscIdUnique 400
representationalRIdKey = mkPreludeMiscIdUnique 401
phantomRIdKey = mkPreludeMiscIdUnique 402
inferRIdKey = mkPreludeMiscIdUnique 403
-- data Kind = ...
varKIdKey, conKIdKey, tupleKIdKey, arrowKIdKey, listKIdKey, appKIdKey,
starKIdKey, constraintKIdKey :: Unique
varKIdKey = mkPreludeMiscIdUnique 404
conKIdKey = mkPreludeMiscIdUnique 405
tupleKIdKey = mkPreludeMiscIdUnique 406
arrowKIdKey = mkPreludeMiscIdUnique 407
listKIdKey = mkPreludeMiscIdUnique 408
appKIdKey = mkPreludeMiscIdUnique 409
starKIdKey = mkPreludeMiscIdUnique 410
constraintKIdKey = mkPreludeMiscIdUnique 411
-- data Callconv = ...
cCallIdKey, stdCallIdKey, cApiCallIdKey, primCallIdKey,
javaScriptCallIdKey :: Unique
cCallIdKey = mkPreludeMiscIdUnique 420
stdCallIdKey = mkPreludeMiscIdUnique 421
cApiCallIdKey = mkPreludeMiscIdUnique 422
primCallIdKey = mkPreludeMiscIdUnique 423
javaScriptCallIdKey = mkPreludeMiscIdUnique 424
-- data Safety = ...
unsafeIdKey, safeIdKey, interruptibleIdKey :: Unique
unsafeIdKey = mkPreludeMiscIdUnique 430
safeIdKey = mkPreludeMiscIdUnique 431
interruptibleIdKey = mkPreludeMiscIdUnique 432
-- data Inline = ...
noInlineDataConKey, inlineDataConKey, inlinableDataConKey :: Unique
noInlineDataConKey = mkPreludeDataConUnique 40
inlineDataConKey = mkPreludeDataConUnique 41
inlinableDataConKey = mkPreludeDataConUnique 42
-- data RuleMatch = ...
conLikeDataConKey, funLikeDataConKey :: Unique
conLikeDataConKey = mkPreludeDataConUnique 43
funLikeDataConKey = mkPreludeDataConUnique 44
-- data Phases = ...
allPhasesDataConKey, fromPhaseDataConKey, beforePhaseDataConKey :: Unique
allPhasesDataConKey = mkPreludeDataConUnique 45
fromPhaseDataConKey = mkPreludeDataConUnique 46
beforePhaseDataConKey = mkPreludeDataConUnique 47
-- newtype TExp a = ...
tExpDataConKey :: Unique
tExpDataConKey = mkPreludeDataConUnique 48
-- data FunDep = ...
funDepIdKey :: Unique
funDepIdKey = mkPreludeMiscIdUnique 440
-- data FamFlavour = ...
typeFamIdKey, dataFamIdKey :: Unique
typeFamIdKey = mkPreludeMiscIdUnique 450
dataFamIdKey = mkPreludeMiscIdUnique 451
-- data TySynEqn = ...
tySynEqnIdKey :: Unique
tySynEqnIdKey = mkPreludeMiscIdUnique 460
-- quasiquoting
quoteExpKey, quotePatKey, quoteDecKey, quoteTypeKey :: Unique
quoteExpKey = mkPreludeMiscIdUnique 470
quotePatKey = mkPreludeMiscIdUnique 471
quoteDecKey = mkPreludeMiscIdUnique 472
quoteTypeKey = mkPreludeMiscIdUnique 473
-- data RuleBndr = ...
ruleVarIdKey, typedRuleVarIdKey :: Unique
ruleVarIdKey = mkPreludeMiscIdUnique 480
typedRuleVarIdKey = mkPreludeMiscIdUnique 481
-- data AnnTarget = ...
valueAnnotationIdKey, typeAnnotationIdKey, moduleAnnotationIdKey :: Unique
valueAnnotationIdKey = mkPreludeMiscIdUnique 490
typeAnnotationIdKey = mkPreludeMiscIdUnique 491
moduleAnnotationIdKey = mkPreludeMiscIdUnique 492
|
jstolarek/ghc
|
compiler/deSugar/DsMeta.hs
|
Haskell
|
bsd-3-clause
| 119,075
|
#!/usr/bin/env runhaskell
import Prelude hiding (print)
import System.Directory
import System.FilePath
import Data.List
import Data.Either
import Control.Monad
import System.Environment.UTF8
import System.IO.UTF8
import System.IO (stderr, stdin, stdout)
import Language.Haskell.Exts.Annotated.ExactPrint
import HPath.Path
import HPath.Hierarchy
import qualified HPath.HaskellSrcExts as HaskellSrcExts
import qualified HPath.Cabal as Cabal
usage name = unlines
[ "USAGE: " ++ name ++ " <Haskell identifier>"
, ""
, " Print the source text corresponding to the Haskell identifier, assuming"
, " we are in a project directory where this source can be found."
, ""
]
main = do
args <- getArgs
usage' <- usage `fmap` getProgName
case args of
["-h"] -> out usage'
["-?"] -> out usage'
["--help"] -> out usage'
[arg] -> case parse arg of
Left e -> do
err ("Not a path: " ++ arg ++ "\n" ++ show e)
err usage'
Right path -> do
err (url path)
dir <- getCurrentDirectory
(exts, roots) <- Cabal.info dir
let files = nub [ r </> p | p <- paths path, r <- roots ]
converted = nub (HaskellSrcExts.extension_conversion exts)
when ((not . null) converted)
(err (unlines ("Extensions:" : fmap show converted)))
(mods, errs) <- HaskellSrcExts.modules files converted
let parse_errors = fst errs
io_exceptions = snd errs
when ((not . null) parse_errors)
(err "Parse errors:" >> mapM_ (err . show) parse_errors)
when ((not . null) io_exceptions)
(err "Varied exceptions:" >> mapM_ (err . show) io_exceptions)
if null mods
then err "No files corresponding to this identifier." >> err usage'
else do
let decls = HaskellSrcExts.search path (take 1 mods)
mapM_ (out . exactPrint') decls
_ -> err usage'
err s = hPutStrLn stderr s
out s = hPutStrLn stdout s
{-| We wrap 'exactPrint' to get rid of the many newlines it normally places in
front of the declaration (it positions the output on the same line as it
would have been on in the input).
-}
exactPrint' ep = dropWhile (=='\n') (exactPrint ep [])
|
solidsnack/hpath
|
Main.hs
|
Haskell
|
bsd-3-clause
| 2,594
|
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-missing-fields #-}
{-# OPTIONS_GHC -fno-warn-missing-signatures #-}
{-# OPTIONS_GHC -fno-warn-name-shadowing #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-----------------------------------------------------------------
-- Autogenerated by Thrift
-- --
-- DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
-- @generated
-----------------------------------------------------------------
module MyService where
import Prelude ( Bool(..), Enum, Float, IO, Double, String, Maybe(..),
Eq, Show, Ord,
concat, error, fromIntegral, fromEnum, length, map,
maybe, not, null, otherwise, return, show, toEnum,
enumFromTo, Bounded, minBound, maxBound, seq, succ,
pred, enumFrom, enumFromThen, enumFromThenTo,
(.), (&&), (||), (==), (++), ($), (-), (>>=), (>>))
import qualified Control.Applicative as Applicative (ZipList(..))
import Control.Applicative ( (<*>) )
import qualified Control.DeepSeq as DeepSeq
import qualified Control.Exception as Exception
import qualified Control.Monad as Monad ( liftM, ap, when )
import qualified Data.ByteString.Lazy as BS
import Data.Functor ( (<$>) )
import qualified Data.Hashable as Hashable
import qualified Data.Int as Int
import Data.List
import qualified Data.Maybe as Maybe (catMaybes)
import qualified Data.Text.Lazy.Encoding as Encoding ( decodeUtf8, encodeUtf8 )
import qualified Data.Text.Lazy as LT
import qualified Data.Typeable as Typeable ( Typeable )
import qualified Data.HashMap.Strict as Map
import qualified Data.HashSet as Set
import qualified Data.Vector as Vector
import qualified Test.QuickCheck.Arbitrary as Arbitrary ( Arbitrary(..) )
import qualified Test.QuickCheck as QuickCheck ( elements )
import qualified Thrift
import qualified Thrift.Types as Types
import qualified Thrift.Serializable as Serializable
import qualified Thrift.Arbitraries as Arbitraries
import qualified Module_Types as Module_Types
import qualified Includes_Types as Includes_Types
import qualified Service_Types
import qualified MyService_Iface as Iface
-- HELPER FUNCTIONS AND STRUCTURES --
data Query_args = Query_args
{ query_args_s :: Module_Types.MyStruct
, query_args_i :: Includes_Types.Included
} deriving (Show,Eq,Typeable.Typeable)
instance Serializable.ThriftSerializable Query_args where
encode = encode_Query_args
decode = decode_Query_args
instance Hashable.Hashable Query_args where
hashWithSalt salt record = salt `Hashable.hashWithSalt` query_args_s record `Hashable.hashWithSalt` query_args_i record
instance DeepSeq.NFData Query_args where
rnf _record0 =
DeepSeq.rnf (query_args_s _record0) `seq`
DeepSeq.rnf (query_args_i _record0) `seq`
()
instance Arbitrary.Arbitrary Query_args where
arbitrary = Monad.liftM Query_args (Arbitrary.arbitrary)
`Monad.ap`(Arbitrary.arbitrary)
shrink obj | obj == default_Query_args = []
| otherwise = Maybe.catMaybes
[ if obj == default_Query_args{query_args_s = query_args_s obj} then Nothing else Just $ default_Query_args{query_args_s = query_args_s obj}
, if obj == default_Query_args{query_args_i = query_args_i obj} then Nothing else Just $ default_Query_args{query_args_i = query_args_i obj}
]
from_Query_args :: Query_args -> Types.ThriftVal
from_Query_args record = Types.TStruct $ Map.fromList $ Maybe.catMaybes
[ (\_v3 -> Just (1, ("s",Module_Types.from_MyStruct _v3))) $ query_args_s record
, (\_v3 -> Just (2, ("i",Includes_Types.from_Included _v3))) $ query_args_i record
]
write_Query_args :: (Thrift.Protocol p, Thrift.Transport t) => p t -> Query_args -> IO ()
write_Query_args oprot record = Thrift.writeVal oprot $ from_Query_args record
encode_Query_args :: (Thrift.Protocol p, Thrift.Transport t) => p t -> Query_args -> BS.ByteString
encode_Query_args oprot record = Thrift.serializeVal oprot $ from_Query_args record
to_Query_args :: Types.ThriftVal -> Query_args
to_Query_args (Types.TStruct fields) = Query_args{
query_args_s = maybe (query_args_s default_Query_args) (\(_,_val5) -> (case _val5 of {Types.TStruct _val6 -> (Module_Types.to_MyStruct (Types.TStruct _val6)); _ -> error "wrong type"})) (Map.lookup (1) fields),
query_args_i = maybe (query_args_i default_Query_args) (\(_,_val5) -> (case _val5 of {Types.TStruct _val7 -> (Includes_Types.to_Included (Types.TStruct _val7)); _ -> error "wrong type"})) (Map.lookup (2) fields)
}
to_Query_args _ = error "not a struct"
read_Query_args :: (Thrift.Transport t, Thrift.Protocol p) => p t -> IO Query_args
read_Query_args iprot = to_Query_args <$> Thrift.readVal iprot (Types.T_STRUCT typemap_Query_args)
decode_Query_args :: (Thrift.Protocol p, Thrift.Transport t) => p t -> BS.ByteString -> Query_args
decode_Query_args iprot bs = to_Query_args $ Thrift.deserializeVal iprot (Types.T_STRUCT typemap_Query_args) bs
typemap_Query_args :: Types.TypeMap
typemap_Query_args = Map.fromList [("s",(1,(Types.T_STRUCT Module_Types.typemap_MyStruct))),("i",(2,(Types.T_STRUCT Includes_Types.typemap_Included)))]
default_Query_args :: Query_args
default_Query_args = Query_args{
query_args_s = Module_Types.default_MyStruct,
query_args_i = Includes_Types.default_Included}
data Query_result = Query_result
deriving (Show,Eq,Typeable.Typeable)
instance Serializable.ThriftSerializable Query_result where
encode = encode_Query_result
decode = decode_Query_result
instance Hashable.Hashable Query_result where
hashWithSalt salt record = salt
instance DeepSeq.NFData Query_result where
rnf _record8 =
()
instance Arbitrary.Arbitrary Query_result where
arbitrary = QuickCheck.elements [Query_result]
from_Query_result :: Query_result -> Types.ThriftVal
from_Query_result record = Types.TStruct $ Map.fromList $ Maybe.catMaybes
[]
write_Query_result :: (Thrift.Protocol p, Thrift.Transport t) => p t -> Query_result -> IO ()
write_Query_result oprot record = Thrift.writeVal oprot $ from_Query_result record
encode_Query_result :: (Thrift.Protocol p, Thrift.Transport t) => p t -> Query_result -> BS.ByteString
encode_Query_result oprot record = Thrift.serializeVal oprot $ from_Query_result record
to_Query_result :: Types.ThriftVal -> Query_result
to_Query_result (Types.TStruct fields) = Query_result{
}
to_Query_result _ = error "not a struct"
read_Query_result :: (Thrift.Transport t, Thrift.Protocol p) => p t -> IO Query_result
read_Query_result iprot = to_Query_result <$> Thrift.readVal iprot (Types.T_STRUCT typemap_Query_result)
decode_Query_result :: (Thrift.Protocol p, Thrift.Transport t) => p t -> BS.ByteString -> Query_result
decode_Query_result iprot bs = to_Query_result $ Thrift.deserializeVal iprot (Types.T_STRUCT typemap_Query_result) bs
typemap_Query_result :: Types.TypeMap
typemap_Query_result = Map.fromList []
default_Query_result :: Query_result
default_Query_result = Query_result{
}
data Has_arg_docs_args = Has_arg_docs_args
{ has_arg_docs_args_s :: Module_Types.MyStruct
, has_arg_docs_args_i :: Includes_Types.Included
} deriving (Show,Eq,Typeable.Typeable)
instance Serializable.ThriftSerializable Has_arg_docs_args where
encode = encode_Has_arg_docs_args
decode = decode_Has_arg_docs_args
instance Hashable.Hashable Has_arg_docs_args where
hashWithSalt salt record = salt `Hashable.hashWithSalt` has_arg_docs_args_s record `Hashable.hashWithSalt` has_arg_docs_args_i record
instance DeepSeq.NFData Has_arg_docs_args where
rnf _record14 =
DeepSeq.rnf (has_arg_docs_args_s _record14) `seq`
DeepSeq.rnf (has_arg_docs_args_i _record14) `seq`
()
instance Arbitrary.Arbitrary Has_arg_docs_args where
arbitrary = Monad.liftM Has_arg_docs_args (Arbitrary.arbitrary)
`Monad.ap`(Arbitrary.arbitrary)
shrink obj | obj == default_Has_arg_docs_args = []
| otherwise = Maybe.catMaybes
[ if obj == default_Has_arg_docs_args{has_arg_docs_args_s = has_arg_docs_args_s obj} then Nothing else Just $ default_Has_arg_docs_args{has_arg_docs_args_s = has_arg_docs_args_s obj}
, if obj == default_Has_arg_docs_args{has_arg_docs_args_i = has_arg_docs_args_i obj} then Nothing else Just $ default_Has_arg_docs_args{has_arg_docs_args_i = has_arg_docs_args_i obj}
]
from_Has_arg_docs_args :: Has_arg_docs_args -> Types.ThriftVal
from_Has_arg_docs_args record = Types.TStruct $ Map.fromList $ Maybe.catMaybes
[ (\_v17 -> Just (1, ("s",Module_Types.from_MyStruct _v17))) $ has_arg_docs_args_s record
, (\_v17 -> Just (2, ("i",Includes_Types.from_Included _v17))) $ has_arg_docs_args_i record
]
write_Has_arg_docs_args :: (Thrift.Protocol p, Thrift.Transport t) => p t -> Has_arg_docs_args -> IO ()
write_Has_arg_docs_args oprot record = Thrift.writeVal oprot $ from_Has_arg_docs_args record
encode_Has_arg_docs_args :: (Thrift.Protocol p, Thrift.Transport t) => p t -> Has_arg_docs_args -> BS.ByteString
encode_Has_arg_docs_args oprot record = Thrift.serializeVal oprot $ from_Has_arg_docs_args record
to_Has_arg_docs_args :: Types.ThriftVal -> Has_arg_docs_args
to_Has_arg_docs_args (Types.TStruct fields) = Has_arg_docs_args{
has_arg_docs_args_s = maybe (has_arg_docs_args_s default_Has_arg_docs_args) (\(_,_val19) -> (case _val19 of {Types.TStruct _val20 -> (Module_Types.to_MyStruct (Types.TStruct _val20)); _ -> error "wrong type"})) (Map.lookup (1) fields),
has_arg_docs_args_i = maybe (has_arg_docs_args_i default_Has_arg_docs_args) (\(_,_val19) -> (case _val19 of {Types.TStruct _val21 -> (Includes_Types.to_Included (Types.TStruct _val21)); _ -> error "wrong type"})) (Map.lookup (2) fields)
}
to_Has_arg_docs_args _ = error "not a struct"
read_Has_arg_docs_args :: (Thrift.Transport t, Thrift.Protocol p) => p t -> IO Has_arg_docs_args
read_Has_arg_docs_args iprot = to_Has_arg_docs_args <$> Thrift.readVal iprot (Types.T_STRUCT typemap_Has_arg_docs_args)
decode_Has_arg_docs_args :: (Thrift.Protocol p, Thrift.Transport t) => p t -> BS.ByteString -> Has_arg_docs_args
decode_Has_arg_docs_args iprot bs = to_Has_arg_docs_args $ Thrift.deserializeVal iprot (Types.T_STRUCT typemap_Has_arg_docs_args) bs
typemap_Has_arg_docs_args :: Types.TypeMap
typemap_Has_arg_docs_args = Map.fromList [("s",(1,(Types.T_STRUCT Module_Types.typemap_MyStruct))),("i",(2,(Types.T_STRUCT Includes_Types.typemap_Included)))]
default_Has_arg_docs_args :: Has_arg_docs_args
default_Has_arg_docs_args = Has_arg_docs_args{
has_arg_docs_args_s = Module_Types.default_MyStruct,
has_arg_docs_args_i = Includes_Types.default_Included}
data Has_arg_docs_result = Has_arg_docs_result
deriving (Show,Eq,Typeable.Typeable)
instance Serializable.ThriftSerializable Has_arg_docs_result where
encode = encode_Has_arg_docs_result
decode = decode_Has_arg_docs_result
instance Hashable.Hashable Has_arg_docs_result where
hashWithSalt salt record = salt
instance DeepSeq.NFData Has_arg_docs_result where
rnf _record22 =
()
instance Arbitrary.Arbitrary Has_arg_docs_result where
arbitrary = QuickCheck.elements [Has_arg_docs_result]
from_Has_arg_docs_result :: Has_arg_docs_result -> Types.ThriftVal
from_Has_arg_docs_result record = Types.TStruct $ Map.fromList $ Maybe.catMaybes
[]
write_Has_arg_docs_result :: (Thrift.Protocol p, Thrift.Transport t) => p t -> Has_arg_docs_result -> IO ()
write_Has_arg_docs_result oprot record = Thrift.writeVal oprot $ from_Has_arg_docs_result record
encode_Has_arg_docs_result :: (Thrift.Protocol p, Thrift.Transport t) => p t -> Has_arg_docs_result -> BS.ByteString
encode_Has_arg_docs_result oprot record = Thrift.serializeVal oprot $ from_Has_arg_docs_result record
to_Has_arg_docs_result :: Types.ThriftVal -> Has_arg_docs_result
to_Has_arg_docs_result (Types.TStruct fields) = Has_arg_docs_result{
}
to_Has_arg_docs_result _ = error "not a struct"
read_Has_arg_docs_result :: (Thrift.Transport t, Thrift.Protocol p) => p t -> IO Has_arg_docs_result
read_Has_arg_docs_result iprot = to_Has_arg_docs_result <$> Thrift.readVal iprot (Types.T_STRUCT typemap_Has_arg_docs_result)
decode_Has_arg_docs_result :: (Thrift.Protocol p, Thrift.Transport t) => p t -> BS.ByteString -> Has_arg_docs_result
decode_Has_arg_docs_result iprot bs = to_Has_arg_docs_result $ Thrift.deserializeVal iprot (Types.T_STRUCT typemap_Has_arg_docs_result) bs
typemap_Has_arg_docs_result :: Types.TypeMap
typemap_Has_arg_docs_result = Map.fromList []
default_Has_arg_docs_result :: Has_arg_docs_result
default_Has_arg_docs_result = Has_arg_docs_result{
}
process_query (seqid, iprot, oprot, handler) = do
args <- MyService.read_Query_args iprot
(Exception.catch
(do
Iface.query handler (query_args_s args) (query_args_i args)
let res = default_Query_result
Thrift.writeMessage oprot ("query", Types.M_REPLY, seqid) $
write_Query_result oprot res
Thrift.tFlush (Thrift.getTransport oprot))
((\_ -> do
Thrift.writeMessage oprot ("query", Types.M_EXCEPTION, seqid) $
Thrift.writeAppExn oprot (Thrift.AppExn Thrift.AE_UNKNOWN "")
Thrift.tFlush (Thrift.getTransport oprot)) :: Exception.SomeException -> IO ()))
process_has_arg_docs (seqid, iprot, oprot, handler) = do
args <- MyService.read_Has_arg_docs_args iprot
(Exception.catch
(do
Iface.has_arg_docs handler (has_arg_docs_args_s args) (has_arg_docs_args_i args)
let res = default_Has_arg_docs_result
Thrift.writeMessage oprot ("has_arg_docs", Types.M_REPLY, seqid) $
write_Has_arg_docs_result oprot res
Thrift.tFlush (Thrift.getTransport oprot))
((\_ -> do
Thrift.writeMessage oprot ("has_arg_docs", Types.M_EXCEPTION, seqid) $
Thrift.writeAppExn oprot (Thrift.AppExn Thrift.AE_UNKNOWN "")
Thrift.tFlush (Thrift.getTransport oprot)) :: Exception.SomeException -> IO ()))
proc_ handler (iprot,oprot) (name,typ,seqid) = case name of
"query" -> process_query (seqid,iprot,oprot,handler)
"has_arg_docs" -> process_has_arg_docs (seqid,iprot,oprot,handler)
_ -> do
_ <- Thrift.readVal iprot (Types.T_STRUCT Map.empty)
Thrift.writeMessage oprot (name,Types.M_EXCEPTION,seqid) $
Thrift.writeAppExn oprot (Thrift.AppExn Thrift.AE_UNKNOWN_METHOD ("Unknown function " ++ LT.unpack name))
Thrift.tFlush (Thrift.getTransport oprot)
process handler (iprot, oprot) =
Thrift.readMessage iprot (proc_ handler (iprot,oprot)) >> return True
|
getyourguide/fbthrift
|
thrift/compiler/test/fixtures/includes/gen-hs/MyService.hs
|
Haskell
|
apache-2.0
| 14,518
|
{-# LANGUAGE TypeFamilies #-}
-- | Perlin noise implemented in Zeldspar.
-- TODO: PNG/BMP sink, to write pretty pictures to disk.
import qualified Prelude
import Zeldspar
import Zeldspar.Parallel
-- * Image settings; too large imgWidth, imgHeight or imgOctaves may cause you
-- to run out of stack space. Even when using small values, you should use
-- @ulimit@ to set a generous stack limit. Telling GCC to compile your program
-- with a larger stack also helps.
imgWidth = 400
imgHeight = 400
imgOctaves = 5
imgPersistence = 0.5
-- | Decently fast hash function.
xs32 :: Data Int32 -> Data Int32
xs32 x =
share (x `xor` (x `shiftR` 13)) $ \x' ->
share (x' `xor` (x' `shiftL` 17)) $ \x'' ->
x'' `xor` (x'' `shiftR` 5)
-- | Hash over octave and coordinates.
xs32_2 :: Data Int32 -> Data Int32 -> Data Int32 -> Data Int32
xs32_2 octave x y =
xs32 $ xs32 (octave*887) + xs32 (x*2251) + xs32 (y*7919)
-- | Ridiculous, inlined vector gradient function. Supposedly very fast,
-- but quite incomprehensible.
grad :: Data Int32
-> Data Int32 -> Data Int32
-> Data Double -> Data Double
-> Data Double
grad octhash xi yi xf yf = share (xs32_2 octhash xi yi `rem` 8) $ \val ->
(val == 0)
? (xf + yf)
$ (val == 1)
? (yf - xf)
$ (val == 2)
? (xf - yf)
$ (val == 3)
? (negate xf - yf)
$ (val == 4)
? xf
$ (val == 5)
? negate xf
$ (val == 6)
? yf
$ negate yf
-- | Linear interpolation between two numbers.
lerp :: Data Double -> Data Double -> Data Double -> Data Double
lerp a b t = share a $ \a' -> a + t * (b-a)
-- | Smooth interpolation constant.
fade :: Data Double -> Data Double
fade t = share t $ \t' -> ((t' * t') * t') * (t' * (t' * 6 - 15) + 10)
-- | Basic implementation of Perlin noise for one octave: given a point in 2D
-- space, calculate the hash of each corner of the integral "box" surrounding
-- the point, e.g. if the point is (1.5, 1.5) the box is the described by
-- ((1, 1), (2, 2)).
-- Then, interpolate between these hashes depending on the point's location
-- inside the box. In the above example, the point (1.5, 1.5) is in the very
-- middle of the box ((1, 1), (2, 2)). The resulting value is the noise value
-- of that point.
-- Note that with Perlin noise, integral coordinates always have a zero noise
-- value.
perlin :: Data Int32 -> Data Double -> Data Double -> Data Double
perlin octave x y =
share (xs32 octave) $ \octhash ->
share (grad octhash x0i y0i (x-x0) (y-y0)) $ \a ->
share (grad octhash x1i y0i (x-x1) (y-y0)) $ \b ->
share (grad octhash x0i y1i (x-x0) (y-y1)) $ \c ->
share (grad octhash x1i y1i (x-x1) (y-y1)) $ \d ->
share (fade $ decimalPart x) $ \u ->
share (fade $ decimalPart y) $ \v ->
lerp (lerp a b u) (lerp c d u) v
where
x0i = f2n x0
y0i = f2n y0
x1i = f2n x1
y1i = f2n y1
x0 = floor x
y0 = floor y
x1 = x0 + 1
y1 = y0 + 1
decimalPart :: Data Double -> Data Double
decimalPart x = x - floor x
-- TODO: only works for positive numbers
f2n :: Data Double -> Data Int32
f2n x = share (round x) $ \xi ->
x - i2n xi >= 0 ? xi $ xi-1
-- TODO: only works for positive numbers
floor :: Data Double -> Data Double
floor = i2n . f2n
pow :: Data Double -> Word32 -> Data Double
pow n 1 = n
pow n k = pow (n*n) (k-1)
-- | Use one Zeldspar pipeline stage for each noise octave.
-- Replace @|>> 5 `ofLength` sz >>|@ with @>>>@ for the sequential version.
perlinVec :: Data Word32
-> Data Word32
-> Int32
-> Data Double
-> ParZun (Manifest (Data Double)) (Manifest (Data Double)) ()
perlinVec width height octaves persistence = liftP $ do
go 1 1 1
where
maxdensity _ 1 _ = 1
maxdensity p o a = maxdensity p (o-1) (a*p) + a*p
sz = width*height
go octave freq amp
| octave Prelude.>= octaves =
step (Prelude.fromIntegral octave) freq amp
|>> 5 `ofLength` sz >>| finalize
| otherwise =
step (Prelude.fromIntegral octave) freq amp
|>> 5 `ofLength` sz >>| go (octave+1) (freq*2) (amp*persistence)
-- Divide all noise values by the maximum possible noise value.
finalize = loop $ do
let md = maxdensity persistence octaves 1
inp <- take
out <- lift $ manifestFresh $ map (/md) inp
emit out
-- Calculate noise value for one octave.
-- TODO: get rid of the manifest vector of coordinates.
step octave freq amp = loop $ do
inp <- take
let xs = replicate height $ (0 ... width)
ys = map (replicate width) $ (0 ... height)
coords = concat height $ zipWith zip xs ys
coords' <- lift $ manifestFresh coords
m <- lift $ manifestFresh $ map addOctave (zip coords' inp)
emit m
where
addOctave :: ((Data Index, Data Index), Data Double) -> Data Double
addOctave ((x, y), total) =
total + amp*perlin octave (i2n x/75*freq) (i2n y/75*freq)
-- | Add a source and a sink to the program, producing ten identical images.
-- The sink prints the noise values ad some arbitrarily selected coordinates.
preparePar :: Data Length
-> Data Length
-> ParZun (Manifest (Data Double)) (Manifest (Data Double)) ()
-> Run ()
preparePar w h p = do
r <- initRef 0
runParZ p (src r) (5 `ofLength` sz) snk (5 `ofLength` sz)
where
sz = w*h
src :: Ref (Data Int32) -> Run (Manifest (Data Double), Data Bool)
src r = do
count <- getRef r
setRef r (count+1)
m <- manifestFresh $ replicate (h*w) (0 :: Data Double)
pure (m, count < 10)
snk :: Manifest (Data Double) -> Run (Data Bool)
snk v = do
printf "%f %f %f %f %f\n" (v ! 801) (v ! 802) (v ! 803) (v ! 804) (v ! 805)
pure true
-- | Compile the program to a C file.
-- The file needs to be compiled with
-- @-lm -lpthread -I/path_to_imperative-edsl/include path_to_imperative-edsl/csrc/chan.c@.
compileToC :: IO ()
compileToC =
Prelude.writeFile "noise.c"
$ compile
$ preparePar imgWidth imgHeight
$ perlinVec imgWidth imgHeight imgOctaves imgPersistence
main = compileToC
|
kmate/zeldspar
|
examples/noise.hs
|
Haskell
|
bsd-3-clause
| 6,270
|
{-# LANGUAGE CPP, ScopedTypeVariables #-}
module Main where
import Control.Applicative ((<$>))
import Control.Exception
import Data.List (isPrefixOf)
import qualified Data.Int as Int
import GHC.Int
import Ros.Internal.Msg.SrvInfo
import Ros.Internal.RosBinary
import Ros.Service (callService)
import Ros.Service.ServiceTypes
import qualified Ros.Test_srvs.AddTwoIntsRequest as Req
import qualified Ros.Test_srvs.AddTwoIntsResponse as Res
import Ros.Test_srvs.EmptyRequest
import Ros.Test_srvs.EmptyResponse
import Test.Tasty
import Test.Tasty.HUnit
-- To run:
-- 1. start ros: run "roscore"
-- 2. in another terminal start the add_two_ints server:
-- python roshask/Tests/ServiceClientTests/add_two_ints_server.py
-- 3. in a new terminal make sure $ROS_MASTER_URI is correct and run
-- cabal test servicetest --show-details=always
type Response a = IO (Either ServiceResponseExcept a)
main :: IO ()
main = defaultMain $ testGroup "Service Tests" [
addIntsTest 4 7
, notOkTest 100 100
, requestResponseDontMatchTest
, noProviderTest
, connectionHeaderBadMD5Test
, emptyServiceTest]
addIntsTest :: GHC.Int.Int64 -> GHC.Int.Int64 -> TestTree
addIntsTest x y = testCase ("add_two_ints, add " ++ show x ++ " + " ++ show y) $
do res <- callService "/add_two_ints" Req.AddTwoIntsRequest{Req.a=x, Req.b=y} :: Response Res.AddTwoIntsResponse
Right (Res.AddTwoIntsResponse (x + y)) @=? res
-- add_two_ints_server returns None (triggering the NotOkError) if both a and b are 100
notOkTest :: GHC.Int.Int64 -> GHC.Int.Int64 -> TestTree
notOkTest x y = testCase ("NotOKError, add_two_ints, add " ++ show x ++ " + " ++ show y) $
do res <- callService "/add_two_ints" Req.AddTwoIntsRequest{Req.a=x, Req.b=y} :: Response Res.AddTwoIntsResponse
Left (NotOkExcept "service cannot process request: service handler returned None") @=? res
-- tests that an error is returned if the server is not registered with the master
noProviderTest :: TestTree
noProviderTest = testCase ("service not registered error") $
do res <- callService "/not_add_two_ints" Req.AddTwoIntsRequest{Req.a=x, Req.b=y} :: Response Res.AddTwoIntsResponse
Left (MasterExcept "lookupService failed, code: -1, statusMessage: no provider") @=? res
where
x = 10
y = 10
-- From the deprecated @testpack@ package by John Goerzen
assertRaises :: forall a e. (Show a, Control.Exception.Exception e, Show e, Eq e)
=> String -> e -> IO a -> IO ()
assertRaises msg selector action =
let thetest :: e -> IO ()
thetest e = if isPrefixOf (show selector) (show e) then return ()
else assertFailure $
msg ++ "\nReceived unexpected exception: "
++ show e
++ "\ninstead of exception: " ++ show selector
in do
r <- Control.Exception.try action
case r of
Left e -> thetest e
Right _ -> assertFailure $
msg ++ "\nReceived no exception, "
++ "but was expecting exception: "
++ show selector
requestResponseDontMatchTest :: TestTree
requestResponseDontMatchTest =
testGroup "check request and response" [testMd5, testName]
where
testMd5 = testCase ("check md5") $ do
assertRaises "Failed to detect mismatch"
(ErrorCall "Request and response type do not match")
--(callService "/add_two_ints" (Req.AddTwoIntsRequest 1 1) :: IO (Either ServiceResponseExcept BadMD5))
(callService "/add_two_ints" (Req.AddTwoIntsRequest 1 1) :: Response BadMD5)
testName = testCase ("check name") $ do
assertRaises "Failed to detect mismatch"
(ErrorCall "Request and response type do not match")
(callService "/add_two_ints" (Req.AddTwoIntsRequest 1 1) :: IO (Either ServiceResponseExcept BadName))
connectionHeaderBadMD5Test :: TestTree
connectionHeaderBadMD5Test = testCase "connection header wrong MD5 error" $
do res <- callService "/add_two_ints" $ BadMD5 10 :: Response BadMD5
Left (ConHeadExcept "Connection header from server has error, connection header is: [(\"error\",\"request from [roshask]: md5sums do not match: [6a2e34150c00229791cc89ff309fff22] vs. [6a2e34150c00229791cc89ff309fff21]\")]") @=? res
emptyServiceTest :: TestTree
emptyServiceTest =
testCase "emptyService" $
do res <- callService "/empty_srv" $ EmptyRequest :: Response EmptyResponse
Right (EmptyResponse) @=? res
data BadMD5 = BadMD5 {a :: Int.Int64} deriving (Show, Eq)
instance SrvInfo BadMD5 where
srvMD5 _ = "6a2e34150c00229791cc89ff309fff22"
srvTypeName _ = "test_srvs/AddTwoInts"
instance RosBinary BadMD5 where
put obj' = put (a obj')
get = BadMD5 <$> get
data BadName = BadName Int.Int64 deriving (Show, Eq)
instance SrvInfo BadName where
srvMD5 _ = "6a2e34150c00229791cc89ff309fff21"
srvTypeName _ = "test_srvs/AddTwoIntu"
instance RosBinary BadName where
put (BadName x) = put x
get = BadName <$> get
#if MIN_VERSION_base(4,7,0)
#else
instance Eq ErrorCall where
x == y = (show x) == (show y)
#endif
|
acowley/roshask
|
Tests/ServiceClientTests/ServiceClientTest.hs
|
Haskell
|
bsd-3-clause
| 5,169
|
{-# LANGUAGE RecordWildCards #-}
import Control.Applicative
import Control.Exception
import Control.Monad
import Control.Monad.Trans.Resource (runResourceT)
import qualified Data.ByteString.Char8 as S8
import qualified Data.ByteString.Lazy.Char8 as L8
import Data.List
import Data.Maybe
import Distribution.PackageDescription.Parse
import Distribution.Text
import Distribution.System
import Distribution.Package
import Distribution.PackageDescription hiding (options)
import Distribution.Verbosity
import System.Console.GetOpt
import System.Environment
import System.Directory
import System.IO.Error
import System.Process
import System.Exit
import qualified Codec.Archive.Tar as Tar
import qualified Codec.Archive.Zip as Zip
import qualified Codec.Compression.GZip as GZip
import Data.Aeson
import qualified Data.CaseInsensitive as CI
import Data.Conduit
import qualified Data.Conduit.Combinators as CC
import Data.List.Extra
import qualified Data.Text as T
import Development.Shake
import Development.Shake.FilePath
import Network.HTTP.Conduit
import Network.HTTP.Types
import Network.Mime
import Prelude -- Silence AMP warning
-- | Entrypoint.
main :: IO ()
main =
shakeArgsWith
shakeOptions { shakeFiles = releaseDir
, shakeVerbosity = Chatty
, shakeChange = ChangeModtimeAndDigestInput }
options $
\flags args -> do
gStackPackageDescription <-
packageDescription <$> readPackageDescription silent "stack.cabal"
gGithubAuthToken <- lookupEnv githubAuthTokenEnvVar
gGitRevCount <- length . lines <$> readProcess "git" ["rev-list", "HEAD"] ""
gGitSha <- trim <$> readProcess "git" ["rev-parse", "HEAD"] ""
gHomeDir <- getHomeDirectory
let gGpgKey = "9BEFB442"
gAllowDirty = False
gGithubReleaseTag = Nothing
Platform arch _ = buildPlatform
gArch = arch
gBinarySuffix = ""
gUploadLabel = Nothing
gProjectRoot = "" -- Set to real value velow.
global0 = foldl (flip id) Global{..} flags
-- Need to get paths after options since the '--arch' argument can effect them.
localInstallRoot' <- getStackPath global0 "local-install-root"
projectRoot' <- getStackPath global0 "project-root"
let global = global0
{ gProjectRoot = projectRoot' }
return $ Just $ rules global args
where
getStackPath global path = do
out <- readProcess stackProgName (stackArgs global ++ ["path", "--" ++ path]) ""
return $ trim $ fromMaybe out $ stripPrefix (path ++ ":") out
-- | Additional command-line options.
options :: [OptDescr (Either String (Global -> Global))]
options =
[ Option "" [gpgKeyOptName]
(ReqArg (\v -> Right $ \g -> g{gGpgKey = v}) "USER-ID")
"GPG user ID to sign distribution package with."
, Option "" [allowDirtyOptName] (NoArg $ Right $ \g -> g{gAllowDirty = True})
"Allow a dirty working tree for release."
, Option "" [githubAuthTokenOptName]
(ReqArg (\v -> Right $ \g -> g{gGithubAuthToken = Just v}) "TOKEN")
("Github personal access token (defaults to " ++
githubAuthTokenEnvVar ++
" environment variable).")
, Option "" [githubReleaseTagOptName]
(ReqArg (\v -> Right $ \g -> g{gGithubReleaseTag = Just v}) "TAG")
"Github release tag to upload to."
, Option "" [archOptName]
(ReqArg
(\v -> case simpleParse v of
Nothing -> Left $ "Unknown architecture in --arch option: " ++ v
Just arch -> Right $ \g -> g{gArch = arch})
"ARCHITECTURE")
"Architecture to build (e.g. 'i386' or 'x86_64')."
, Option "" [binaryVariantOptName]
(ReqArg (\v -> Right $ \g -> g{gBinarySuffix = v}) "SUFFIX")
"Extra suffix to add to binary executable archive filename."
, Option "" [uploadLabelOptName]
(ReqArg (\v -> Right $ \g -> g{gUploadLabel = Just v}) "LABEL")
"Label to give the uploaded release asset" ]
-- | Shake rules.
rules :: Global -> [String] -> Rules ()
rules global@Global{..} args = do
case args of
[] -> error "No wanted target(s) specified."
_ -> want args
phony releasePhony $ do
need [checkPhony]
need [uploadPhony]
phony cleanPhony $
removeFilesAfter releaseDir ["//*"]
phony checkPhony $
need [releaseCheckDir </> binaryExeFileName]
phony uploadPhony $
mapM_ (\f -> need [releaseDir </> f <.> uploadExt]) binaryPkgFileNames
phony buildPhony $
mapM_ (\f -> need [releaseDir </> f]) binaryPkgFileNames
distroPhonies ubuntuDistro ubuntuVersions debPackageFileName
distroPhonies debianDistro debianVersions debPackageFileName
distroPhonies centosDistro centosVersions rpmPackageFileName
distroPhonies fedoraDistro fedoraVersions rpmPackageFileName
phony archUploadPhony $ need [archDir </> archPackageFileName <.> uploadExt]
phony archBuildPhony $ need [archDir </> archPackageFileName]
releaseDir </> "*" <.> uploadExt %> \out -> do
let srcFile = dropExtension out
mUploadLabel =
if takeExtension srcFile == ascExt
then fmap (++ " (GPG signature)") gUploadLabel
else gUploadLabel
uploadToGithubRelease global srcFile mUploadLabel
copyFileChanged srcFile out
releaseCheckDir </> binaryExeFileName %> \out -> do
need [releaseBinDir </> binaryName </> stackExeFileName]
Stdout dirty <- cmd "git status --porcelain"
when (not gAllowDirty && not (null (trim dirty))) $
error ("Working tree is dirty. Use --" ++ allowDirtyOptName ++ " option to continue anyway.")
withTempDir $ \tmpDir -> do
let cmd0 = cmd (releaseBinDir </> binaryName </> stackExeFileName)
(stackArgs global)
["--local-bin-path=" ++ tmpDir]
() <- cmd0 "install --pedantic --haddock --no-haddock-deps"
() <- cmd0 "install cabal-install"
let cmd' = cmd (AddPath [tmpDir] []) stackProgName (stackArgs global)
() <- cmd' "clean"
() <- cmd' "build --pedantic"
() <- cmd' "test --pedantic --flag stack:integration-tests"
return ()
copyFileChanged (releaseBinDir </> binaryName </> stackExeFileName) out
releaseDir </> binaryPkgZipFileName %> \out -> do
stageFiles <- getBinaryPkgStageFiles
putNormal $ "zip " ++ out
liftIO $ do
entries <- forM stageFiles $ \stageFile -> do
Zip.readEntry
[Zip.OptLocation
(dropDirectoryPrefix (releaseStageDir </> binaryName) stageFile)
False]
stageFile
let archive = foldr Zip.addEntryToArchive Zip.emptyArchive entries
L8.writeFile out (Zip.fromArchive archive)
releaseDir </> binaryPkgTarGzFileName %> \out -> do
stageFiles <- getBinaryPkgStageFiles
writeTarGz out releaseStageDir stageFiles
releaseStageDir </> binaryName </> stackExeFileName %> \out -> do
copyFileChanged (releaseDir </> binaryExeFileName) out
releaseStageDir </> (binaryName ++ "//*") %> \out -> do
copyFileChanged
(dropDirectoryPrefix (releaseStageDir </> binaryName) out)
out
releaseDir </> binaryExeFileName %> \out -> do
need [releaseBinDir </> binaryName </> stackExeFileName]
(Stdout versionOut) <- cmd (releaseBinDir </> binaryName </> stackExeFileName) "--version"
when (not gAllowDirty && "dirty" `isInfixOf` lower versionOut) $
error ("Refusing continue because 'stack --version' reports dirty. Use --" ++
allowDirtyOptName ++ " option to continue anyway.")
case platformOS of
Windows -> do
-- Windows doesn't have or need a 'strip' command, so skip it.
-- Instead, we sign the executable
liftIO $ copyFile (releaseBinDir </> binaryName </> stackExeFileName) out
actionOnException
(command_ [] "c:\\Program Files\\Microsoft SDKs\\Windows\\v7.1\\Bin\\signtool.exe"
["sign"
,"/v"
,"/d", synopsis gStackPackageDescription
,"/du", homepage gStackPackageDescription
,"/n", "FP Complete, Corporation"
,"/t", "http://timestamp.verisign.com/scripts/timestamp.dll"
,out])
(removeFile out)
Linux ->
cmd "strip -p --strip-unneeded --remove-section=.comment -o"
[out, releaseBinDir </> binaryName </> stackExeFileName]
_ ->
cmd "strip -o"
[out, releaseBinDir </> binaryName </> stackExeFileName]
releaseDir </> binaryPkgSignatureFileName %> \out -> do
need [out -<.> ""]
_ <- liftIO $ tryJust (guard . isDoesNotExistError) (removeFile out)
cmd "gpg --detach-sig --armor"
[ "-u", gGpgKey
, dropExtension out ]
releaseBinDir </> binaryName </> stackExeFileName %> \out -> do
alwaysRerun
actionOnException
(cmd stackProgName
(stackArgs global)
["--local-bin-path=" ++ takeDirectory out]
"install --pedantic")
(removeFile out)
debDistroRules ubuntuDistro ubuntuVersions
debDistroRules debianDistro debianVersions
rpmDistroRules centosDistro centosVersions
rpmDistroRules fedoraDistro fedoraVersions
archDir </> archPackageFileName <.> uploadExt %> \out -> do
let pkgFile = dropExtension out
need [pkgFile]
() <- cmd "aws s3 cp"
[ pkgFile
, "s3://download.fpcomplete.com/archlinux/" ++ takeFileName pkgFile ]
copyFileChanged pkgFile out
archDir </> archPackageFileName %> \out -> do
docFiles <- getDocFiles
let inputFiles = concat
[[archStagedExeFile
,archStagedBashCompletionFile]
,map (archStagedDocDir </>) docFiles]
need inputFiles
putNormal $ "tar gzip " ++ out
writeTarGz out archStagingDir inputFiles
archStagedExeFile %> \out -> do
copyFileChanged (releaseDir </> binaryExeFileName) out
archStagedBashCompletionFile %> \out -> do
writeBashCompletion archStagedExeFile archStagingDir out
archStagedDocDir ++ "//*" %> \out -> do
let origFile = dropDirectoryPrefix archStagedDocDir out
copyFileChanged origFile out
where
debDistroRules debDistro0 debVersions = do
let anyVersion0 = anyDistroVersion debDistro0
distroVersionDir anyVersion0 </> debPackageFileName anyVersion0 <.> uploadExt %> \out -> do
let DistroVersion{..} = distroVersionFromPath out debVersions
pkgFile = dropExtension out
need [pkgFile]
() <- cmd "deb-s3 upload -b download.fpcomplete.com --preserve-versions"
[ "--sign=" ++ gGpgKey
, "--prefix=" ++ dvDistro ++ "/" ++ dvCodeName
, pkgFile ]
copyFileChanged pkgFile out
distroVersionDir anyVersion0 </> debPackageFileName anyVersion0 %> \out -> do
docFiles <- getDocFiles
let dv@DistroVersion{..} = distroVersionFromPath out debVersions
inputFiles = concat
[[debStagedExeFile dv
,debStagedBashCompletionFile dv]
,map (debStagedDocDir dv </>) docFiles]
need inputFiles
cmd "fpm -f -s dir -t deb"
"--deb-recommends git --deb-recommends gnupg"
"-d g++ -d gcc -d libc6-dev -d libffi-dev -d libgmp-dev -d make -d xz-utils -d zlib1g-dev"
["-n", stackProgName
,"-C", debStagingDir dv
,"-v", debPackageVersionStr dv
,"-p", out
,"-m", maintainer gStackPackageDescription
,"--description", synopsis gStackPackageDescription
,"--license", display (license gStackPackageDescription)
,"--url", homepage gStackPackageDescription]
(map (dropDirectoryPrefix (debStagingDir dv)) inputFiles)
debStagedExeFile anyVersion0 %> \out -> do
copyFileChanged (releaseDir </> binaryExeFileName) out
debStagedBashCompletionFile anyVersion0 %> \out -> do
let dv = distroVersionFromPath out debVersions
writeBashCompletion (debStagedExeFile dv) (debStagingDir dv) out
debStagedDocDir anyVersion0 ++ "//*" %> \out -> do
let dv@DistroVersion{..} = distroVersionFromPath out debVersions
origFile = dropDirectoryPrefix (debStagedDocDir dv) out
copyFileChanged origFile out
rpmDistroRules rpmDistro0 rpmVersions = do
let anyVersion0 = anyDistroVersion rpmDistro0
distroVersionDir anyVersion0 </> rpmPackageFileName anyVersion0 <.> uploadExt %> \out -> do
let DistroVersion{..} = distroVersionFromPath out rpmVersions
pkgFile = dropExtension out
need [pkgFile]
let rpmmacrosFile = gHomeDir </> ".rpmmacros"
rpmmacrosExists <- liftIO $ System.Directory.doesFileExist rpmmacrosFile
when rpmmacrosExists $
error ("'" ++ rpmmacrosFile ++ "' already exists. Move it out of the way first.")
actionFinally
(do writeFileLines rpmmacrosFile
[ "%_signature gpg"
, "%_gpg_name " ++ gGpgKey ]
() <- cmd "rpm-s3 --verbose --sign --bucket=download.fpcomplete.com"
[ "--repopath=" ++ dvDistro ++ "/" ++ dvVersion
, pkgFile ]
return ())
(liftIO $ removeFile rpmmacrosFile)
copyFileChanged pkgFile out
distroVersionDir anyVersion0 </> rpmPackageFileName anyVersion0 %> \out -> do
docFiles <- getDocFiles
let dv@DistroVersion{..} = distroVersionFromPath out rpmVersions
inputFiles = concat
[[rpmStagedExeFile dv
,rpmStagedBashCompletionFile dv]
,map (rpmStagedDocDir dv </>) docFiles]
need inputFiles
cmd "fpm -s dir -t rpm"
"-d perl -d make -d automake -d gcc -d gmp-devel -d libffi -d zlib -d xz -d tar"
["-n", stackProgName
,"-C", rpmStagingDir dv
,"-v", rpmPackageVersionStr dv
,"--iteration", rpmPackageIterationStr dv
,"-p", out
,"-m", maintainer gStackPackageDescription
,"--description", synopsis gStackPackageDescription
,"--license", display (license gStackPackageDescription)
,"--url", homepage gStackPackageDescription]
(map (dropDirectoryPrefix (rpmStagingDir dv)) inputFiles)
rpmStagedExeFile anyVersion0 %> \out -> do
copyFileChanged (releaseDir </> binaryExeFileName) out
rpmStagedBashCompletionFile anyVersion0 %> \out -> do
let dv = distroVersionFromPath out rpmVersions
writeBashCompletion (rpmStagedExeFile dv) (rpmStagingDir dv) out
rpmStagedDocDir anyVersion0 ++ "//*" %> \out -> do
let dv@DistroVersion{..} = distroVersionFromPath out rpmVersions
origFile = dropDirectoryPrefix (rpmStagedDocDir dv) out
copyFileChanged origFile out
writeBashCompletion stagedStackExeFile stageDir out = do
need [stagedStackExeFile]
(Stdout bashCompletionScript) <- cmd [stagedStackExeFile] "--bash-completion-script" ["/" ++ dropDirectoryPrefix stageDir stagedStackExeFile]
writeFileChanged out bashCompletionScript
getBinaryPkgStageFiles = do
docFiles <- getDocFiles
let stageFiles = concat
[[releaseStageDir </> binaryName </> stackExeFileName]
,map ((releaseStageDir </> binaryName) </>) docFiles]
need stageFiles
return stageFiles
getDocFiles = getDirectoryFiles "." ["LICENSE", "*.md", "doc//*"]
distroVersionFromPath path versions =
let path' = dropDirectoryPrefix releaseDir path
version = takeDirectory1 (dropDirectory1 path')
in DistroVersion (takeDirectory1 path') version (lookupVersionCodeName version versions)
distroPhonies distro0 versions0 makePackageFileName =
forM_ versions0 $ \(version0,_) -> do
let dv@DistroVersion{..} = DistroVersion distro0 version0 (lookupVersionCodeName version0 versions0)
phony (distroUploadPhony dv) $ need [distroVersionDir dv </> makePackageFileName dv <.> uploadExt]
phony (distroBuildPhony dv) $ need [distroVersionDir dv </> makePackageFileName dv]
lookupVersionCodeName version versions =
fromMaybe (error $ "lookupVersionCodeName: could not find " ++ show version ++ " in " ++ show versions) $
lookup version versions
releasePhony = "release"
checkPhony = "check"
uploadPhony = "upload"
cleanPhony = "clean"
buildPhony = "build"
distroUploadPhony DistroVersion{..} = "upload-" ++ dvDistro ++ "-" ++ dvVersion
distroBuildPhony DistroVersion{..} = "build-" ++ dvDistro ++ "-" ++ dvVersion
archUploadPhony = "upload-" ++ archDistro
archBuildPhony = "build-" ++ archDistro
releaseCheckDir = releaseDir </> "check"
releaseStageDir = releaseDir </> "stage"
releaseBinDir = releaseDir </> "bin"
distroVersionDir DistroVersion{..} = releaseDir </> dvDistro </> dvVersion
binaryPkgFileNames = [binaryPkgFileName, binaryPkgSignatureFileName]
binaryPkgSignatureFileName = binaryPkgFileName <.> ascExt
binaryPkgFileName =
case platformOS of
Windows -> binaryPkgZipFileName
_ -> binaryPkgTarGzFileName
binaryPkgZipFileName = binaryName <.> zipExt
binaryPkgTarGzFileName = binaryName <.> tarGzExt
binaryExeFileName = binaryName <.> exe
binaryName =
concat
[ stackProgName
, "-"
, stackVersionStr global
, "-"
, display platformOS
, "-"
, display gArch
, if null gBinarySuffix then "" else "-" ++ gBinarySuffix ]
stackExeFileName = stackProgName <.> exe
debStagedDocDir dv = debStagingDir dv </> "usr/share/doc" </> stackProgName
debStagedBashCompletionFile dv = debStagingDir dv </> "etc/bash_completion.d/stack"
debStagedExeFile dv = debStagingDir dv </> "usr/bin/stack"
debStagingDir dv = distroVersionDir dv </> debPackageName dv
debPackageFileName dv = debPackageName dv <.> debExt
debPackageName dv = stackProgName ++ "_" ++ debPackageVersionStr dv ++ "_amd64"
debPackageVersionStr DistroVersion{..} = stackVersionStr global ++ "-0~" ++ dvCodeName
rpmStagedDocDir dv = rpmStagingDir dv </> "usr/share/doc" </> (stackProgName ++ "-" ++ rpmPackageVersionStr dv)
rpmStagedBashCompletionFile dv = rpmStagingDir dv </> "etc/bash_completion.d/stack"
rpmStagedExeFile dv = rpmStagingDir dv </> "usr/bin/stack"
rpmStagingDir dv = distroVersionDir dv </> rpmPackageName dv
rpmPackageFileName dv = rpmPackageName dv <.> rpmExt
rpmPackageName dv = stackProgName ++ "-" ++ rpmPackageVersionStr dv ++ "-" ++ rpmPackageIterationStr dv ++ ".x86_64"
rpmPackageIterationStr DistroVersion{..} = "0." ++ dvCodeName
rpmPackageVersionStr _ = stackVersionStr global
archStagedDocDir = archStagingDir </> "usr/share/doc" </> stackProgName
archStagedBashCompletionFile = archStagingDir </> "usr/share/bash-completion/completions/stack"
archStagedExeFile = archStagingDir </> "usr/bin/stack"
archStagingDir = archDir </> archPackageName
archPackageFileName = archPackageName <.> tarGzExt
archPackageName = stackProgName ++ "_" ++ stackVersionStr global ++ "-" ++ "x86_64"
archDir = releaseDir </> archDistro
ubuntuVersions =
[ ("12.04", "precise")
, ("14.04", "trusty")
, ("14.10", "utopic")
, ("15.04", "vivid")
, ("15.10", "wily") ]
debianVersions =
[ ("7", "wheezy")
, ("8", "jessie") ]
centosVersions =
[ ("7", "el7")
, ("6", "el6") ]
fedoraVersions =
[ ("21", "fc21")
, ("22", "fc22") ]
ubuntuDistro = "ubuntu"
debianDistro = "debian"
centosDistro = "centos"
fedoraDistro = "fedora"
archDistro = "arch"
anyDistroVersion distro = DistroVersion distro "*" "*"
zipExt = ".zip"
tarGzExt = tarExt <.> gzExt
gzExt = ".gz"
tarExt = ".tar"
ascExt = ".asc"
uploadExt = ".upload"
debExt = ".deb"
rpmExt = ".rpm"
-- | Upload file to Github release.
uploadToGithubRelease :: Global -> FilePath -> Maybe String -> Action ()
uploadToGithubRelease global@Global{..} file mUploadLabel = do
need [file]
putNormal $ "Uploading to Github: " ++ file
GithubRelease{..} <- getGithubRelease
resp <- liftIO $ callGithubApi global
[(CI.mk $ S8.pack "Content-Type", defaultMimeLookup (T.pack file))]
(Just file)
(replace
"{?name,label}"
("?name=" ++ urlEncodeStr (takeFileName file) ++
(case mUploadLabel of
Nothing -> ""
Just uploadLabel -> "&label=" ++ urlEncodeStr uploadLabel))
relUploadUrl)
case eitherDecode resp of
Left e -> error ("Could not parse Github asset upload response (" ++ e ++ "):\n" ++ L8.unpack resp ++ "\n")
Right (GithubReleaseAsset{..}) ->
when (assetState /= "uploaded") $
error ("Invalid asset state after Github asset upload: " ++ assetState)
where
urlEncodeStr = S8.unpack . urlEncode True . S8.pack
getGithubRelease = do
releases <- getGithubReleases
let tag = fromMaybe ("v" ++ stackVersionStr global) gGithubReleaseTag
return $ fromMaybe
(error ("Could not find Github release with tag '" ++ tag ++ "'.\n" ++
"Use --" ++ githubReleaseTagOptName ++ " option to specify a different tag."))
(find (\r -> relTagName r == tag) releases)
getGithubReleases :: Action [GithubRelease]
getGithubReleases = do
resp <- liftIO $ callGithubApi global
[] Nothing "https://api.github.com/repos/commercialhaskell/stack/releases"
case eitherDecode resp of
Left e -> error ("Could not parse Github releases (" ++ e ++ "):\n" ++ L8.unpack resp ++ "\n")
Right r -> return r
-- | Make a request to the Github API and return the response.
callGithubApi :: Global -> RequestHeaders -> Maybe FilePath -> String -> IO L8.ByteString
callGithubApi Global{..} headers mpostFile url = do
req0 <- parseUrl url
let authToken =
fromMaybe
(error $
"Github auth token required.\n" ++
"Use " ++ githubAuthTokenEnvVar ++ " environment variable\n" ++
"or --" ++ githubAuthTokenOptName ++ " option to specify.")
gGithubAuthToken
req1 =
req0
{ checkStatus = \_ _ _ -> Nothing
, requestHeaders =
[ (CI.mk $ S8.pack "Authorization", S8.pack $ "token " ++ authToken)
, (CI.mk $ S8.pack "User-Agent", S8.pack "commercialhaskell/stack") ] ++
headers }
req <- case mpostFile of
Nothing -> return req1
Just postFile -> do
lbs <- L8.readFile postFile
return $ req1
{ method = S8.pack "POST"
, requestBody = RequestBodyLBS lbs }
manager <- newManager tlsManagerSettings
runResourceT $ do
res <- http req manager
responseBody res $$+- CC.sinkLazy
-- | Create a .tar.gz files from files. The paths should be absolute, and will
-- be made relative to the base directory in the tarball.
writeTarGz :: FilePath -> FilePath -> [FilePath] -> Action ()
writeTarGz out baseDir inputFiles = liftIO $ do
content <- Tar.pack baseDir $ map (dropDirectoryPrefix baseDir) inputFiles
L8.writeFile out $ GZip.compress $ Tar.write content
-- | Drops a directory prefix from a path. The prefix automatically has a path
-- separator character appended. Fails if the path does not begin with the prefix.
dropDirectoryPrefix :: FilePath -> FilePath -> FilePath
dropDirectoryPrefix prefix path =
case stripPrefix (toStandard prefix ++ "/") (toStandard path) of
Nothing -> error ("dropDirectoryPrefix: cannot drop " ++ show prefix ++ " from " ++ show path)
Just stripped -> stripped
-- | String representation of stack package version.
stackVersionStr :: Global -> String
stackVersionStr =
display . pkgVersion . package . gStackPackageDescription
-- | Current operating system.
platformOS :: OS
platformOS =
let Platform _ os = buildPlatform
in os
-- | Directory in which to store build and intermediate files.
releaseDir :: FilePath
releaseDir = "_release"
-- | @GITHUB_AUTH_TOKEN@ environment variale name.
githubAuthTokenEnvVar :: String
githubAuthTokenEnvVar = "GITHUB_AUTH_TOKEN"
-- | @--github-auth-token@ command-line option name.
githubAuthTokenOptName :: String
githubAuthTokenOptName = "github-auth-token"
-- | @--github-release-tag@ command-line option name.
githubReleaseTagOptName :: String
githubReleaseTagOptName = "github-release-tag"
-- | @--gpg-key@ command-line option name.
gpgKeyOptName :: String
gpgKeyOptName = "gpg-key"
-- | @--allow-dirty@ command-line option name.
allowDirtyOptName :: String
allowDirtyOptName = "allow-dirty"
-- | @--arch@ command-line option name.
archOptName :: String
archOptName = "arch"
-- | @--binary-variant@ command-line option name.
binaryVariantOptName :: String
binaryVariantOptName = "binary-variant"
-- | @--upload-label@ command-line option name.
uploadLabelOptName :: String
uploadLabelOptName = "upload-label"
-- | Arguments to pass to all 'stack' invocations.
stackArgs :: Global -> [String]
stackArgs Global{..} = ["--install-ghc", "--arch=" ++ display gArch]
-- | Name of the 'stack' program.
stackProgName :: FilePath
stackProgName = "stack"
-- | Linux distribution/version combination.
data DistroVersion = DistroVersion
{ dvDistro :: !String
, dvVersion :: !String
, dvCodeName :: !String }
-- | A Github release, as returned by the Github API.
data GithubRelease = GithubRelease
{ relUploadUrl :: !String
, relTagName :: !String }
deriving (Show)
instance FromJSON GithubRelease where
parseJSON = withObject "GithubRelease" $ \o ->
GithubRelease
<$> o .: T.pack "upload_url"
<*> o .: T.pack "tag_name"
-- | A Github release asset, as returned by the Github API.
data GithubReleaseAsset = GithubReleaseAsset
{ assetState :: !String }
deriving (Show)
instance FromJSON GithubReleaseAsset where
parseJSON = withObject "GithubReleaseAsset" $ \o ->
GithubReleaseAsset
<$> o .: T.pack "state"
-- | Global values and options.
data Global = Global
{ gStackPackageDescription :: !PackageDescription
, gGpgKey :: !String
, gAllowDirty :: !Bool
, gGithubAuthToken :: !(Maybe String)
, gGithubReleaseTag :: !(Maybe String)
, gGitRevCount :: !Int
, gGitSha :: !String
, gProjectRoot :: !FilePath
, gHomeDir :: !FilePath
, gArch :: !Arch
, gBinarySuffix :: !String
, gUploadLabel ::(Maybe String)}
deriving (Show)
|
mathhun/stack
|
etc/scripts/release.hs
|
Haskell
|
bsd-3-clause
| 28,060
|
module B (idd) where
idd :: Int
idd = 100000242418429
|
sdiehl/ghc
|
testsuite/tests/ghci/caf_crash/B.hs
|
Haskell
|
bsd-3-clause
| 56
|
{-@ LIQUID "--notermination" @-}
{-# OPTIONS_GHC -cpp -fglasgow-exts #-}
-- #prune
-- |
-- Module : Data.ByteString.Char8
-- Copyright : (c) Don Stewart 2006
-- License : BSD-style
--
-- Maintainer : dons@cse.unsw.edu.au
-- Stability : experimental
-- Portability : portable
--
-- Manipulate 'ByteString's using 'Char' operations. All Chars will be
-- truncated to 8 bits. It can be expected that these functions will run
-- at identical speeds to their 'Word8' equivalents in "Data.ByteString".
--
-- More specifically these byte strings are taken to be in the
-- subset of Unicode covered by code points 0-255. This covers
-- Unicode Basic Latin, Latin-1 Supplement and C0+C1 Controls.
--
-- See:
--
-- * <http://www.unicode.org/charts/>
--
-- * <http://www.unicode.org/charts/PDF/U0000.pdf>
--
-- * <http://www.unicode.org/charts/PDF/U0080.pdf>
--
-- This module is intended to be imported @qualified@, to avoid name
-- clashes with "Prelude" functions. eg.
--
-- > import qualified Data.ByteString.Char8 as B
--
module Data.ByteString.Char8 (
-- * The @ByteString@ type
ByteString, -- abstract, instances: Eq, Ord, Show, Read, Data, Typeable, Monoid
-- * Introducing and eliminating 'ByteString's
empty, -- :: ByteString
singleton, -- :: Char -> ByteString
pack, -- :: String -> ByteString
unpack, -- :: ByteString -> String
-- * Basic interface
cons, -- :: Char -> ByteString -> ByteString
snoc, -- :: ByteString -> Char -> ByteString
append, -- :: ByteString -> ByteString -> ByteString
head, -- :: ByteString -> Char
uncons, -- :: ByteString -> Maybe (Char, ByteString)
last, -- :: ByteString -> Char
tail, -- :: ByteString -> ByteString
init, -- :: ByteString -> ByteString
null, -- :: ByteString -> Bool
length, -- :: ByteString -> Int
-- * Transformating ByteStrings
map, -- :: (Char -> Char) -> ByteString -> ByteString
reverse, -- :: ByteString -> ByteString
intersperse, -- :: Char -> ByteString -> ByteString
intercalate, -- :: ByteString -> [ByteString] -> ByteString
transpose, -- :: [ByteString] -> [ByteString]
-- * Reducing 'ByteString's (folds)
foldl, -- :: (a -> Char -> a) -> a -> ByteString -> a
foldl', -- :: (a -> Char -> a) -> a -> ByteString -> a
foldl1, -- :: (Char -> Char -> Char) -> ByteString -> Char
foldl1', -- :: (Char -> Char -> Char) -> ByteString -> Char
foldr, -- :: (Char -> a -> a) -> a -> ByteString -> a
foldr', -- :: (Char -> a -> a) -> a -> ByteString -> a
foldr1, -- :: (Char -> Char -> Char) -> ByteString -> Char
foldr1', -- :: (Char -> Char -> Char) -> ByteString -> Char
-- ** Special folds
concat, -- :: [ByteString] -> ByteString
concatMap, -- :: (Char -> ByteString) -> ByteString -> ByteString
any, -- :: (Char -> Bool) -> ByteString -> Bool
all, -- :: (Char -> Bool) -> ByteString -> Bool
maximum, -- :: ByteString -> Char
minimum, -- :: ByteString -> Char
-- * Building ByteStrings
-- ** Scans
scanl, -- :: (Char -> Char -> Char) -> Char -> ByteString -> ByteString
scanl1, -- :: (Char -> Char -> Char) -> ByteString -> ByteString
scanr, -- :: (Char -> Char -> Char) -> Char -> ByteString -> ByteString
scanr1, -- :: (Char -> Char -> Char) -> ByteString -> ByteString
-- ** Accumulating maps
mapAccumL, -- :: (acc -> Char -> (acc, Char)) -> acc -> ByteString -> (acc, ByteString)
mapAccumR, -- :: (acc -> Char -> (acc, Char)) -> acc -> ByteString -> (acc, ByteString)
mapIndexed, -- :: (Int -> Char -> Char) -> ByteString -> ByteString
-- ** Generating and unfolding ByteStrings
replicate, -- :: Int -> Char -> ByteString
unfoldr, -- :: (a -> Maybe (Char, a)) -> a -> ByteString
unfoldrN, -- :: Int -> (a -> Maybe (Char, a)) -> a -> (ByteString, Maybe a)
-- * Substrings
-- ** Breaking strings
take, -- :: Int -> ByteString -> ByteString
drop, -- :: Int -> ByteString -> ByteString
splitAt, -- :: Int -> ByteString -> (ByteString, ByteString)
takeWhile, -- :: (Char -> Bool) -> ByteString -> ByteString
dropWhile, -- :: (Char -> Bool) -> ByteString -> ByteString
span, -- :: (Char -> Bool) -> ByteString -> (ByteString, ByteString)
spanEnd, -- :: (Char -> Bool) -> ByteString -> (ByteString, ByteString)
break, -- :: (Char -> Bool) -> ByteString -> (ByteString, ByteString)
breakEnd, -- :: (Char -> Bool) -> ByteString -> (ByteString, ByteString)
group, -- :: ByteString -> [ByteString]
groupBy, -- :: (Char -> Char -> Bool) -> ByteString -> [ByteString]
inits, -- :: ByteString -> [ByteString]
tails, -- :: ByteString -> [ByteString]
-- ** Breaking into many substrings
split, -- :: Char -> ByteString -> [ByteString]
splitWith, -- :: (Char -> Bool) -> ByteString -> [ByteString]
-- ** Breaking into lines and words
lines, -- :: ByteString -> [ByteString]
words, -- :: ByteString -> [ByteString]
unlines, -- :: [ByteString] -> ByteString
unwords, -- :: ByteString -> [ByteString]
-- * Predicates
isPrefixOf, -- :: ByteString -> ByteString -> Bool
isSuffixOf, -- :: ByteString -> ByteString -> Bool
isInfixOf, -- :: ByteString -> ByteString -> Bool
isSubstringOf, -- :: ByteString -> ByteString -> Bool
-- ** Search for arbitrary substrings
findSubstring, -- :: ByteString -> ByteString -> Maybe Int
findSubstrings, -- :: ByteString -> ByteString -> [Int]
-- * Searching ByteStrings
-- ** Searching by equality
elem, -- :: Char -> ByteString -> Bool
notElem, -- :: Char -> ByteString -> Bool
-- ** Searching with a predicate
find, -- :: (Char -> Bool) -> ByteString -> Maybe Char
filter, -- :: (Char -> Bool) -> ByteString -> ByteString
-- partition -- :: (Char -> Bool) -> ByteString -> (ByteString, ByteString)
-- * Indexing ByteStrings
index, -- :: ByteString -> Int -> Char
elemIndex, -- :: Char -> ByteString -> Maybe Int
elemIndices, -- :: Char -> ByteString -> [Int]
elemIndexEnd, -- :: Char -> ByteString -> Maybe Int
findIndex, -- :: (Char -> Bool) -> ByteString -> Maybe Int
findIndices, -- :: (Char -> Bool) -> ByteString -> [Int]
count, -- :: Char -> ByteString -> Int
-- * Zipping and unzipping ByteStrings
zip, -- :: ByteString -> ByteString -> [(Char,Char)]
zipWith, -- :: (Char -> Char -> c) -> ByteString -> ByteString -> [c]
unzip, -- :: [(Char,Char)] -> (ByteString,ByteString)
-- * Ordered ByteStrings
--LIQUID sort, -- :: ByteString -> ByteString
-- * Reading from ByteStrings
readInt, -- :: ByteString -> Maybe (Int, ByteString)
readInteger, -- :: ByteString -> Maybe (Integer, ByteString)
-- * Low level CString conversions
-- ** Copying ByteStrings
copy, -- :: ByteString -> ByteString
-- ** Packing CStrings and pointers
packCString, -- :: CString -> IO ByteString
packCStringLen, -- :: CStringLen -> IO ByteString
-- ** Using ByteStrings as CStrings
useAsCString, -- :: ByteString -> (CString -> IO a) -> IO a
useAsCStringLen, -- :: ByteString -> (CStringLen -> IO a) -> IO a
-- * I\/O with 'ByteString's
-- ** Standard input and output
getLine, -- :: IO ByteString
getContents, -- :: IO ByteString
putStr, -- :: ByteString -> IO ()
putStrLn, -- :: ByteString -> IO ()
interact, -- :: (ByteString -> ByteString) -> IO ()
-- ** Files
readFile, -- :: FilePath -> IO ByteString
writeFile, -- :: FilePath -> ByteString -> IO ()
appendFile, -- :: FilePath -> ByteString -> IO ()
-- mmapFile, -- :: FilePath -> IO ByteString
-- ** I\/O with Handles
hGetLine, -- :: Handle -> IO ByteString
hGetContents, -- :: Handle -> IO ByteString
hGet, -- :: Handle -> Int -> IO ByteString
hGetNonBlocking, -- :: Handle -> Int -> IO ByteString
hPut, -- :: Handle -> ByteString -> IO ()
hPutStr, -- :: Handle -> ByteString -> IO ()
hPutStrLn, -- :: Handle -> ByteString -> IO ()
-- undocumented deprecated things:
join -- :: ByteString -> [ByteString] -> ByteString
) where
import qualified Prelude as P
import Prelude hiding (reverse,head,tail,last,init,null
,length,map,lines,foldl,foldr,unlines
,concat,any,take,drop,splitAt,takeWhile
,dropWhile,span,break,elem,filter,unwords
,words,maximum,minimum,all,concatMap
,scanl,scanl1,scanr,scanr1
,appendFile,readFile,writeFile
,foldl1,foldr1,replicate
,getContents,getLine,putStr,putStrLn,interact
,zip,zipWith,unzip,notElem)
import qualified Data.ByteString as B
import qualified Data.ByteString.Internal as B
import qualified Data.ByteString.Unsafe as B
-- Listy functions transparently exported
import Data.ByteString (empty,null,length,tail,init,append
,inits,tails,reverse,transpose
,concat,take,drop,splitAt,intercalate
,{-LIQUID sort,-}isPrefixOf,isSuffixOf,isInfixOf,isSubstringOf
,findSubstring,findSubstrings,copy,group
,getLine, getContents, putStr, putStrLn, interact
,hGetContents, hGet, hPut, hPutStr, hPutStrLn
,hGetLine, hGetNonBlocking
,packCString,packCStringLen
,useAsCString,useAsCStringLen
)
import Data.ByteString.Internal (ByteString(PS), c2w, w2c, isSpaceWord8
,inlinePerformIO)
#if defined(__GLASGOW_HASKELL__)
import Data.ByteString.Unsafe (unsafePackAddress) -- for the rule
#endif
import Data.Char ( isSpace )
import qualified Data.List as List (intersperse)
import System.IO (openFile,hClose,hFileSize,IOMode(..))
#ifndef __NHC__
import Control.Exception (bracket)
#else
import IO (bracket)
#endif
import Foreign
#if defined(__GLASGOW_HASKELL__)
import GHC.Base (Char(..),unpackCString#,ord#,int2Word#)
import GHC.Prim (Addr#,writeWord8OffAddr#,plusAddr#)
import GHC.Ptr (Ptr(..))
import GHC.ST (ST(..))
#endif
#define STRICT1(f) f a | a `seq` False = undefined
#define STRICT2(f) f a b | a `seq` b `seq` False = undefined
#define STRICT3(f) f a b c | a `seq` b `seq` c `seq` False = undefined
#define STRICT4(f) f a b c d | a `seq` b `seq` c `seq` d `seq` False = undefined
#if __GLASGOW_HASKELL__ >= 611
import Data.IORef
import GHC.IO.Handle.Internals
import GHC.IO.Handle.Types
import GHC.IO.Buffer
import GHC.IO.BufferedIO as Buffered
import GHC.IO (stToIO, unsafePerformIO)
import Data.Char (ord)
import Foreign.Marshal.Utils (copyBytes)
#else
import System.IO.Error (isEOFError)
import GHC.IOBase
import GHC.Handle
#endif
--LIQUID
import Data.ByteString.Fusion (PairS(..), MaybeS(..))
import System.IO (Handle)
import Foreign.ForeignPtr
import Foreign.Ptr
import Language.Haskell.Liquid.Prelude
------------------------------------------------------------------------
-- | /O(1)/ Convert a 'Char' into a 'ByteString'
singleton :: Char -> ByteString
singleton = B.singleton . c2w
{-# INLINE singleton #-}
-- | /O(n)/ Convert a 'String' into a 'ByteString'
--
-- For applications with large numbers of string literals, pack can be a
-- bottleneck.
pack :: String -> ByteString
#if !defined(__GLASGOW_HASKELL__)
pack str = B.unsafeCreate (P.length str) $ \p -> go p str
where go _ [] = return ()
go p (x:xs) = poke p (c2w x) >> go (p `plusPtr` 1) xs
#else /* hack away */
pack str = B.unsafeCreate (P.length str) $ \(Ptr p) -> stToIO (pack_go p str)
where
{-@ Decrease pack_go 2 @-}
pack_go :: Addr# -> [Char] -> ST a ()
pack_go _ [] = return ()
pack_go p (C# c:cs) = writeByte p (int2Word# (ord# c)) >> pack_go (p `plusAddr#` 1#) cs
writeByte p c = ST $ \s# ->
case writeWord8OffAddr# p 0# c s# of s2# -> (# s2#, () #)
{-# INLINE writeByte #-}
{-# INLINE [1] pack #-}
{-# RULES
"FPS pack/packAddress" forall s .
pack (unpackCString# s) = inlinePerformIO (B.unsafePackAddress s)
#-}
#endif
-- | /O(n)/ Converts a 'ByteString' to a 'String'.
unpack :: ByteString -> [Char]
unpack = P.map w2c . B.unpack
{-# INLINE unpack #-}
-- | /O(n)/ 'cons' is analogous to (:) for lists, but of different
-- complexity, as it requires a memcpy.
cons :: Char -> ByteString -> ByteString
cons = B.cons . c2w
{-# INLINE cons #-}
-- | /O(n)/ Append a Char to the end of a 'ByteString'. Similar to
-- 'cons', this function performs a memcpy.
snoc :: ByteString -> Char -> ByteString
snoc p = B.snoc p . c2w
{-# INLINE snoc #-}
-- | /O(1)/ Extract the head and tail of a ByteString, returning Nothing
-- if it is empty.
uncons :: ByteString -> Maybe (Char, ByteString)
uncons bs = case B.uncons bs of
Nothing -> Nothing
Just (w, bs') -> Just (w2c w, bs')
{-# INLINE uncons #-}
-- | /O(1)/ Extract the first element of a ByteString, which must be non-empty.
{-@ head :: ByteStringNE -> Char @-}
head :: ByteString -> Char
head = w2c . B.head
{-# INLINE head #-}
-- | /O(1)/ Extract the last element of a packed string, which must be non-empty.
{-@ last :: ByteStringNE -> Char @-}
last :: ByteString -> Char
last = w2c . B.last
{-# INLINE last #-}
-- | /O(n)/ 'map' @f xs@ is the ByteString obtained by applying @f@ to each element of @xs@
map :: (Char -> Char) -> ByteString -> ByteString
map f = B.map (c2w . f . w2c)
{-# INLINE map #-}
-- | /O(n)/ The 'intersperse' function takes a Char and a 'ByteString'
-- and \`intersperses\' that Char between the elements of the
-- 'ByteString'. It is analogous to the intersperse function on Lists.
intersperse :: Char -> ByteString -> ByteString
intersperse = B.intersperse . c2w
{-# INLINE intersperse #-}
join :: ByteString -> [ByteString] -> ByteString
join = intercalate
{-# DEPRECATED join "use intercalate" #-}
-- | 'foldl', applied to a binary operator, a starting value (typically
-- the left-identity of the operator), and a ByteString, reduces the
-- ByteString using the binary operator, from left to right.
foldl :: (a -> Char -> a) -> a -> ByteString -> a
foldl f = B.foldl (\a c -> f a (w2c c))
{-# INLINE foldl #-}
-- | 'foldl\'' is like foldl, but strict in the accumulator.
foldl' :: (a -> Char -> a) -> a -> ByteString -> a
foldl' f = B.foldl' (\a c -> f a (w2c c))
{-# INLINE foldl' #-}
-- | 'foldr', applied to a binary operator, a starting value
-- (typically the right-identity of the operator), and a packed string,
-- reduces the packed string using the binary operator, from right to left.
foldr :: (Char -> a -> a) -> a -> ByteString -> a
foldr f = B.foldr (\c a -> f (w2c c) a)
{-# INLINE foldr #-}
-- | 'foldr\'' is a strict variant of foldr
foldr' :: (Char -> a -> a) -> a -> ByteString -> a
foldr' f = B.foldr' (\c a -> f (w2c c) a)
{-# INLINE foldr' #-}
-- | 'foldl1' is a variant of 'foldl' that has no starting value
-- argument, and thus must be applied to non-empty 'ByteStrings'.
{-@ foldl1 :: (Char -> Char -> Char) -> ByteStringNE -> Char @-}
foldl1 :: (Char -> Char -> Char) -> ByteString -> Char
foldl1 f ps = w2c (B.foldl1 (\x y -> c2w (f (w2c x) (w2c y))) ps)
{-# INLINE foldl1 #-}
-- | A strict version of 'foldl1'
{-@ foldl1' :: (Char -> Char -> Char) -> ByteStringNE -> Char @-}
foldl1' :: (Char -> Char -> Char) -> ByteString -> Char
foldl1' f ps = w2c (B.foldl1' (\x y -> c2w (f (w2c x) (w2c y))) ps)
{-# INLINE foldl1' #-}
-- | 'foldr1' is a variant of 'foldr' that has no starting value argument,
-- and thus must be applied to non-empty 'ByteString's
{-@ foldr1 :: (Char -> Char -> Char) -> ByteStringNE -> Char @-}
foldr1 :: (Char -> Char -> Char) -> ByteString -> Char
foldr1 f ps = w2c (B.foldr1 (\x y -> c2w (f (w2c x) (w2c y))) ps)
{-# INLINE foldr1 #-}
-- | A strict variant of foldr1
{-@ foldr1' :: (Char -> Char -> Char) -> ByteStringNE -> Char @-}
foldr1' :: (Char -> Char -> Char) -> ByteString -> Char
foldr1' f ps = w2c (B.foldr1' (\x y -> c2w (f (w2c x) (w2c y))) ps)
{-# INLINE foldr1' #-}
-- | Map a function over a 'ByteString' and concatenate the results
concatMap :: (Char -> ByteString) -> ByteString -> ByteString
concatMap f = B.concatMap (f . w2c)
{-# INLINE concatMap #-}
-- | Applied to a predicate and a ByteString, 'any' determines if
-- any element of the 'ByteString' satisfies the predicate.
any :: (Char -> Bool) -> ByteString -> Bool
any f = B.any (f . w2c)
{-# INLINE any #-}
-- | Applied to a predicate and a 'ByteString', 'all' determines if
-- all elements of the 'ByteString' satisfy the predicate.
all :: (Char -> Bool) -> ByteString -> Bool
all f = B.all (f . w2c)
{-# INLINE all #-}
-- | 'maximum' returns the maximum value from a 'ByteString'
{-@ maximum :: ByteStringNE -> Char @-}
maximum :: ByteString -> Char
maximum = w2c . B.maximum
{-# INLINE maximum #-}
-- | 'minimum' returns the minimum value from a 'ByteString'
{-@ minimum :: ByteStringNE -> Char @-}
minimum :: ByteString -> Char
minimum = w2c . B.minimum
{-# INLINE minimum #-}
-- | /O(n)/ map Char functions, provided with the index at each position
mapIndexed :: (Int -> Char -> Char) -> ByteString -> ByteString
mapIndexed f = B.mapIndexed (\i c -> c2w (f i (w2c c)))
{-# INLINE mapIndexed #-}
-- | The 'mapAccumL' function behaves like a combination of 'map' and
-- 'foldl'; it applies a function to each element of a ByteString,
-- passing an accumulating parameter from left to right, and returning a
-- final value of this accumulator together with the new list.
mapAccumL :: (acc -> Char -> (acc, Char)) -> acc -> ByteString -> (acc, ByteString)
mapAccumL f = B.mapAccumL (\acc w -> case f acc (w2c w) of (acc', c) -> (acc', c2w c))
-- | The 'mapAccumR' function behaves like a combination of 'map' and
-- 'foldr'; it applies a function to each element of a ByteString,
-- passing an accumulating parameter from right to left, and returning a
-- final value of this accumulator together with the new ByteString.
mapAccumR :: (acc -> Char -> (acc, Char)) -> acc -> ByteString -> (acc, ByteString)
mapAccumR f = B.mapAccumR (\acc w -> case f acc (w2c w) of (acc', c) -> (acc', c2w c))
-- | 'scanl' is similar to 'foldl', but returns a list of successive
-- reduced values from the left:
--
-- > scanl f z [x1, x2, ...] == [z, z `f` x1, (z `f` x1) `f` x2, ...]
--
-- Note that
--
-- > last (scanl f z xs) == foldl f z xs.
scanl :: (Char -> Char -> Char) -> Char -> ByteString -> ByteString
scanl f z = B.scanl (\a b -> c2w (f (w2c a) (w2c b))) (c2w z)
-- | 'scanl1' is a variant of 'scanl' that has no starting value argument:
--
-- > scanl1 f [x1, x2, ...] == [x1, x1 `f` x2, ...]
{-@ scanl1 :: (Char -> Char -> Char) -> ByteStringNE -> ByteString @-}
scanl1 :: (Char -> Char -> Char) -> ByteString -> ByteString
scanl1 f = B.scanl1 (\a b -> c2w (f (w2c a) (w2c b)))
-- | scanr is the right-to-left dual of scanl.
scanr :: (Char -> Char -> Char) -> Char -> ByteString -> ByteString
scanr f z = B.scanr (\a b -> c2w (f (w2c a) (w2c b))) (c2w z)
-- | 'scanr1' is a variant of 'scanr' that has no starting value argument.
{-@ scanr1 :: (Char -> Char -> Char) -> ByteStringNE -> ByteString @-}
scanr1 :: (Char -> Char -> Char) -> ByteString -> ByteString
scanr1 f = B.scanr1 (\a b -> c2w (f (w2c a) (w2c b)))
-- | /O(n)/ 'replicate' @n x@ is a ByteString of length @n@ with @x@
-- the value of every element. The following holds:
--
-- > replicate w c = unfoldr w (\u -> Just (u,u)) c
--
-- This implemenation uses @memset(3)@
{-@ replicate :: n:Nat -> Char -> {v:ByteString | (bLength v) = (if n > 0 then n else 0)} @-}
replicate :: Int -> Char -> ByteString
replicate w = B.replicate w . c2w
{-# INLINE replicate #-}
-- | /O(n)/, where /n/ is the length of the result. The 'unfoldr'
-- function is analogous to the List \'unfoldr\'. 'unfoldr' builds a
-- ByteString from a seed value. The function takes the element and
-- returns 'Nothing' if it is done producing the ByteString or returns
-- 'Just' @(a,b)@, in which case, @a@ is the next character in the string,
-- and @b@ is the seed value for further production.
--
-- Examples:
--
-- > unfoldr (\x -> if x <= '9' then Just (x, succ x) else Nothing) '0' == "0123456789"
unfoldr :: (a -> Maybe (Char, a)) -> a -> ByteString
unfoldr f x0 = B.unfoldr (fmap k . f) x0
where k (i, j) = (c2w i, j)
-- | /O(n)/ Like 'unfoldr', 'unfoldrN' builds a ByteString from a seed
-- value. However, the length of the result is limited by the first
-- argument to 'unfoldrN'. This function is more efficient than 'unfoldr'
-- when the maximum length of the result is known.
--
-- The following equation relates 'unfoldrN' and 'unfoldr':
--
-- > unfoldrN n f s == take n (unfoldr f s)
{-@ unfoldrN :: i:Nat -> (a -> Maybe (Char, a)) -> a -> ({v:ByteString | (bLength v) <= i}, Maybe a) @-}
unfoldrN :: Int -> (a -> Maybe (Char, a)) -> a -> (ByteString, Maybe a)
unfoldrN n f w = B.unfoldrN n ((k `fmap`) . f) w
where k (i,j) = (c2w i, j)
{-# INLINE unfoldrN #-}
-- | 'takeWhile', applied to a predicate @p@ and a ByteString @xs@,
-- returns the longest prefix (possibly empty) of @xs@ of elements that
-- satisfy @p@.
takeWhile :: (Char -> Bool) -> ByteString -> ByteString
takeWhile f = B.takeWhile (f . w2c)
{-# INLINE takeWhile #-}
-- | 'dropWhile' @p xs@ returns the suffix remaining after 'takeWhile' @p xs@.
dropWhile :: (Char -> Bool) -> ByteString -> ByteString
dropWhile f = B.dropWhile (f . w2c)
#if defined(__GLASGOW_HASKELL__)
{-# INLINE [1] dropWhile #-}
#endif
-- | 'break' @p@ is equivalent to @'span' ('not' . p)@.
break :: (Char -> Bool) -> ByteString -> (ByteString, ByteString)
break f = B.break (f . w2c)
#if defined(__GLASGOW_HASKELL__)
{-# INLINE [1] break #-}
#endif
-- | 'span' @p xs@ breaks the ByteString into two segments. It is
-- equivalent to @('takeWhile' p xs, 'dropWhile' p xs)@
span :: (Char -> Bool) -> ByteString -> (ByteString, ByteString)
span f = B.span (f . w2c)
{-# INLINE span #-}
-- | 'spanEnd' behaves like 'span' but from the end of the 'ByteString'.
-- We have
--
-- > spanEnd (not.isSpace) "x y z" == ("x y ","z")
--
-- and
--
-- > spanEnd (not . isSpace) ps
-- > ==
-- > let (x,y) = span (not.isSpace) (reverse ps) in (reverse y, reverse x)
--
spanEnd :: (Char -> Bool) -> ByteString -> (ByteString, ByteString)
spanEnd f = B.spanEnd (f . w2c)
{-# INLINE spanEnd #-}
-- | 'breakEnd' behaves like 'break' but from the end of the 'ByteString'
--
-- breakEnd p == spanEnd (not.p)
breakEnd :: (Char -> Bool) -> ByteString -> (ByteString, ByteString)
breakEnd f = B.breakEnd (f . w2c)
{-# INLINE breakEnd #-}
{-
-- | 'breakChar' breaks its ByteString argument at the first occurence
-- of the specified Char. It is more efficient than 'break' as it is
-- implemented with @memchr(3)@. I.e.
--
-- > break (=='c') "abcd" == breakChar 'c' "abcd"
--
breakChar :: Char -> ByteString -> (ByteString, ByteString)
breakChar = B.breakByte . c2w
{-# INLINE breakChar #-}
-- | 'spanChar' breaks its ByteString argument at the first
-- occurence of a Char other than its argument. It is more efficient
-- than 'span (==)'
--
-- > span (=='c') "abcd" == spanByte 'c' "abcd"
--
spanChar :: Char -> ByteString -> (ByteString, ByteString)
spanChar = B.spanByte . c2w
{-# INLINE spanChar #-}
-}
-- | /O(n)/ Break a 'ByteString' into pieces separated by the byte
-- argument, consuming the delimiter. I.e.
--
-- > split '\n' "a\nb\nd\ne" == ["a","b","d","e"]
-- > split 'a' "aXaXaXa" == ["","X","X","X",""]
-- > split 'x' "x" == ["",""]
--
-- and
--
-- > intercalate [c] . split c == id
-- > split == splitWith . (==)
--
-- As for all splitting functions in this library, this function does
-- not copy the substrings, it just constructs new 'ByteStrings' that
-- are slices of the original.
--
{-@ split :: Char -> b:ByteStringNE -> (ByteStringSplit b) @-}
split :: Char -> ByteString -> [ByteString]
split = B.split . c2w
{-# INLINE split #-}
-- | /O(n)/ Splits a 'ByteString' into components delimited by
-- separators, where the predicate returns True for a separator element.
-- The resulting components do not contain the separators. Two adjacent
-- separators result in an empty component in the output. eg.
--
-- > splitWith (=='a') "aabbaca" == ["","","bb","c",""]
--
{-@ splitWith :: (Char -> Bool) -> b:ByteStringNE -> (ByteStringSplit b) @-}
splitWith :: (Char -> Bool) -> ByteString -> [ByteString]
splitWith f = B.splitWith (f . w2c)
{-# INLINE splitWith #-}
-- the inline makes a big difference here.
{-
-- | Like 'splitWith', except that sequences of adjacent separators are
-- treated as a single separator. eg.
--
-- > tokens (=='a') "aabbaca" == ["bb","c"]
--
tokens :: (Char -> Bool) -> ByteString -> [ByteString]
tokens f = B.tokens (f . w2c)
{-# INLINE tokens #-}
-}
-- | The 'groupBy' function is the non-overloaded version of 'group'.
groupBy :: (Char -> Char -> Bool) -> ByteString -> [ByteString]
groupBy k = B.groupBy (\a b -> k (w2c a) (w2c b))
-- | /O(1)/ 'ByteString' index (subscript) operator, starting from 0.
{-@ index :: b:ByteString -> {v:Nat | v < (bLength b)} -> Char @-}
index :: ByteString -> Int -> Char
--LIQUID index = (w2c .) . B.index
index b i = w2c $ B.index b i
{-# INLINE index #-}
-- | /O(n)/ The 'elemIndex' function returns the index of the first
-- element in the given 'ByteString' which is equal (by memchr) to the
-- query element, or 'Nothing' if there is no such element.
{-@ elemIndex :: Char -> b:ByteString -> Maybe {v:Nat | v < (bLength b)} @-}
elemIndex :: Char -> ByteString -> Maybe Int
elemIndex = B.elemIndex . c2w
{-# INLINE elemIndex #-}
-- | /O(n)/ The 'elemIndexEnd' function returns the last index of the
-- element in the given 'ByteString' which is equal to the query
-- element, or 'Nothing' if there is no such element. The following
-- holds:
--
-- > elemIndexEnd c xs ==
-- > (-) (length xs - 1) `fmap` elemIndex c (reverse xs)
--
elemIndexEnd :: Char -> ByteString -> Maybe Int
elemIndexEnd = B.elemIndexEnd . c2w
{-# INLINE elemIndexEnd #-}
-- | /O(n)/ The 'elemIndices' function extends 'elemIndex', by returning
-- the indices of all elements equal to the query element, in ascending order.
elemIndices :: Char -> ByteString -> [Int]
elemIndices = B.elemIndices . c2w
{-# INLINE elemIndices #-}
-- | The 'findIndex' function takes a predicate and a 'ByteString' and
-- returns the index of the first element in the ByteString satisfying the predicate.
findIndex :: (Char -> Bool) -> ByteString -> Maybe Int
findIndex f = B.findIndex (f . w2c)
{-# INLINE findIndex #-}
-- | The 'findIndices' function extends 'findIndex', by returning the
-- indices of all elements satisfying the predicate, in ascending order.
findIndices :: (Char -> Bool) -> ByteString -> [Int]
findIndices f = B.findIndices (f . w2c)
-- | count returns the number of times its argument appears in the ByteString
--
-- > count = length . elemIndices
--
-- Also
--
-- > count '\n' == length . lines
--
-- But more efficiently than using length on the intermediate list.
count :: Char -> ByteString -> Int
count c = B.count (c2w c)
-- | /O(n)/ 'elem' is the 'ByteString' membership predicate. This
-- implementation uses @memchr(3)@.
elem :: Char -> ByteString -> Bool
elem c = B.elem (c2w c)
{-# INLINE elem #-}
-- | /O(n)/ 'notElem' is the inverse of 'elem'
notElem :: Char -> ByteString -> Bool
notElem c = B.notElem (c2w c)
{-# INLINE notElem #-}
-- | /O(n)/ 'filter', applied to a predicate and a ByteString,
-- returns a ByteString containing those characters that satisfy the
-- predicate.
filter :: (Char -> Bool) -> ByteString -> ByteString
filter f = B.filter (f . w2c)
{-# INLINE [1] filter #-}
-- | /O(n)/ and /O(n\/c) space/ A first order equivalent of /filter .
-- (==)/, for the common case of filtering a single Char. It is more
-- efficient to use /filterChar/ in this case.
--
-- > filterByte == filter . (==)
--
-- filterChar is around 10x faster, and uses much less space, than its
-- filter equivalent
--
filterChar :: Char -> ByteString -> ByteString
filterChar c ps = replicate (count c ps) c
{-# INLINE filterChar #-}
{-# RULES
"FPS specialise filter (== x)" forall x.
filter ((==) x) = filterChar x
#-}
{-# RULES
"FPS specialise filter (== x)" forall x.
filter (== x) = filterChar x
#-}
-- | /O(n)/ The 'find' function takes a predicate and a ByteString,
-- and returns the first element in matching the predicate, or 'Nothing'
-- if there is no such element.
find :: (Char -> Bool) -> ByteString -> Maybe Char
find f ps = w2c `fmap` B.find (f . w2c) ps
{-# INLINE find #-}
{-
-- | /O(n)/ A first order equivalent of /filter . (==)/, for the common
-- case of filtering a single Char. It is more efficient to use
-- filterChar in this case.
--
-- > filterChar == filter . (==)
--
-- filterChar is around 10x faster, and uses much less space, than its
-- filter equivalent
--
filterChar :: Char -> ByteString -> ByteString
filterChar c = B.filterByte (c2w c)
{-# INLINE filterChar #-}
-- | /O(n)/ A first order equivalent of /filter . (\/=)/, for the common
-- case of filtering a single Char out of a list. It is more efficient
-- to use /filterNotChar/ in this case.
--
-- > filterNotChar == filter . (/=)
--
-- filterNotChar is around 3x faster, and uses much less space, than its
-- filter equivalent
--
filterNotChar :: Char -> ByteString -> ByteString
filterNotChar c = B.filterNotByte (c2w c)
{-# INLINE filterNotChar #-}
-}
-- | /O(n)/ 'zip' takes two ByteStrings and returns a list of
-- corresponding pairs of Chars. If one input ByteString is short,
-- excess elements of the longer ByteString are discarded. This is
-- equivalent to a pair of 'unpack' operations, and so space
-- usage may be large for multi-megabyte ByteStrings
{-@ zip :: ByteString -> ByteString -> [(Char,Char)] @-}
zip :: ByteString -> ByteString -> [(Char,Char)]
zip ps qs
| B.null ps || B.null qs = []
| otherwise = (unsafeHead ps, unsafeHead qs) : zip (B.unsafeTail ps) (B.unsafeTail qs)
-- | 'zipWith' generalises 'zip' by zipping with the function given as
-- the first argument, instead of a tupling function. For example,
-- @'zipWith' (+)@ is applied to two ByteStrings to produce the list
-- of corresponding sums.
zipWith :: (Char -> Char -> a) -> ByteString -> ByteString -> [a]
zipWith f = B.zipWith ((. w2c) . f . w2c)
-- | 'unzip' transforms a list of pairs of Chars into a pair of
-- ByteStrings. Note that this performs two 'pack' operations.
unzip :: [(Char,Char)] -> (ByteString,ByteString)
unzip ls = (pack (P.map fst ls), pack (P.map snd ls))
{-# INLINE unzip #-}
-- | A variety of 'head' for non-empty ByteStrings. 'unsafeHead' omits
-- the check for the empty case, which is good for performance, but
-- there is an obligation on the programmer to provide a proof that the
-- ByteString is non-empty.
{-@ unsafeHead :: ByteStringNE -> Char @-}
unsafeHead :: ByteString -> Char
unsafeHead = w2c . B.unsafeHead
{-# INLINE unsafeHead #-}
-- ---------------------------------------------------------------------
-- Things that depend on the encoding
{-# RULES
"FPS specialise break -> breakSpace"
break isSpace = breakSpace
#-}
-- | 'breakSpace' returns the pair of ByteStrings when the argument is
-- broken at the first whitespace byte. I.e.
--
-- > break isSpace == breakSpace
--
breakSpace :: ByteString -> (ByteString,ByteString)
breakSpace (PS x s l) = inlinePerformIO $ withForeignPtr x $ \p -> do
i <- firstspace (p `plusPtr` s) 0 l
return $! case () of {_
| i == 0 -> (empty, PS x s l)
| i == l -> (PS x s l, empty)
| otherwise -> (PS x s i, PS x (s+i) (l-i))
}
{-# INLINE breakSpace #-}
firstspace :: Ptr Word8 -> Int -> Int -> IO Int
STRICT3(firstspace)
--LIQUID GHOST firstspace ptr n m
--LIQUID GHOST | n >= m = return n
--LIQUID GHOST | otherwise = do w <- peekByteOff ptr n
--LIQUID GHOST if (not $ isSpaceWord8 w) then firstspace ptr (n+1) m else return n
firstspace ptr n m = go m ptr n m
{- LIQUID WITNESS -}
where go (d :: Int) ptr n m
| n >= m = return n
| otherwise = do w <- peekByteOff ptr n
if (not $ isSpaceWord8 w) then go (d-1) ptr (n+1) m else return n
{-# RULES
"FPS specialise dropWhile isSpace -> dropSpace"
dropWhile isSpace = dropSpace
#-}
-- | 'dropSpace' efficiently returns the 'ByteString' argument with
-- white space Chars removed from the front. It is more efficient than
-- calling dropWhile for removing whitespace. I.e.
--
-- > dropWhile isSpace == dropSpace
--
dropSpace :: ByteString -> ByteString
dropSpace (PS x s l) = inlinePerformIO $ withForeignPtr x $ \p -> do
i <- firstnonspace (p `plusPtr` s) 0 l
return $! if i == l then empty else PS x (s+i) (l-i)
{-# INLINE dropSpace #-}
firstnonspace :: Ptr Word8 -> Int -> Int -> IO Int
STRICT3(firstnonspace)
--LIQUID GHOST firstnonspace ptr n m
--LIQUID GHOST | n >= m = return n
--LIQUID GHOST | otherwise = do w <- peekElemOff ptr n
--LIQUID GHOST if isSpaceWord8 w then firstnonspace ptr (n+1) m else return n
firstnonspace ptr n m = go m ptr n m
{- LIQUID WITNESS -}
where go (d :: Int) ptr n m
| n >= m = return n
| otherwise = do w <- peekElemOff ptr n
if isSpaceWord8 w then go (d-1) ptr (n+1) m else return n
{-
-- | 'dropSpaceEnd' efficiently returns the 'ByteString' argument with
-- white space removed from the end. I.e.
--
-- > reverse . (dropWhile isSpace) . reverse == dropSpaceEnd
--
-- but it is more efficient than using multiple reverses.
--
dropSpaceEnd :: ByteString -> ByteString
dropSpaceEnd (PS x s l) = inlinePerformIO $ withForeignPtr x $ \p -> do
i <- lastnonspace (p `plusPtr` s) (l-1)
return $! if i == (-1) then empty else PS x s (i+1)
{-# INLINE dropSpaceEnd #-}
lastnonspace :: Ptr Word8 -> Int -> IO Int
STRICT2(lastnonspace)
lastnonspace ptr n
| n < 0 = return n
| otherwise = do w <- peekElemOff ptr n
if isSpaceWord8 w then lastnonspace ptr (n-1) else return n
-}
-- | 'lines' breaks a ByteString up into a list of ByteStrings at
-- newline Chars. The resulting strings do not contain newlines.
--
{-@ lines :: ByteString -> [ByteString] @-}
lines :: ByteString -> [ByteString]
lines ps
| null ps = []
| otherwise = case search ps of
Nothing -> [ps]
Just n -> take n ps : lines (drop (n+1) ps)
where search = elemIndex '\n'
{-# INLINE lines #-}
{-
-- Just as fast, but more complex. Should be much faster, I thought.
lines :: ByteString -> [ByteString]
lines (PS _ _ 0) = []
lines (PS x s l) = inlinePerformIO $ withForeignPtr x $ \p -> do
let ptr = p `plusPtr` s
STRICT1(loop)
loop n = do
let q = memchr (ptr `plusPtr` n) 0x0a (fromIntegral (l-n))
if q == nullPtr
then return [PS x (s+n) (l-n)]
else do let i = q `minusPtr` ptr
ls <- loop (i+1)
return $! PS x (s+n) (i-n) : ls
loop 0
-}
-- | 'unlines' is an inverse operation to 'lines'. It joins lines,
-- after appending a terminating newline to each.
unlines :: [ByteString] -> ByteString
unlines [] = empty
unlines ss = (concat $ List.intersperse nl ss) `append` nl -- half as much space
where nl = singleton '\n'
-- | 'words' breaks a ByteString up into a list of words, which
-- were delimited by Chars representing white space.
--LIQUID FIXME: splitWith requires non-empty bytestrings for now..
{-@ words :: ByteStringNE -> [ByteString] @-}
words :: ByteString -> [ByteString]
words = P.filter (not . B.null) . B.splitWith isSpaceWord8
{-# INLINE words #-}
-- | The 'unwords' function is analogous to the 'unlines' function, on words.
unwords :: [ByteString] -> ByteString
unwords = intercalate (singleton ' ')
{-# INLINE unwords #-}
-- ---------------------------------------------------------------------
-- Reading from ByteStrings
-- | readInt reads an Int from the beginning of the ByteString. If there is no
-- integer at the beginning of the string, it returns Nothing, otherwise
-- it just returns the int read, and the rest of the string.
readInt :: ByteString -> Maybe (Int, ByteString)
readInt as
| null as = Nothing
| otherwise =
case unsafeHead as of
'-' -> loop True 0 0 (B.unsafeTail as)
'+' -> loop False 0 0 (B.unsafeTail as)
_ -> loop False 0 0 as
where loop :: Bool -> Int -> Int -> ByteString -> Maybe (Int, ByteString)
{-@ Decrease loop 4 @-}
STRICT4(loop)
loop neg i n ps
| null ps = end neg i n ps
| otherwise =
case B.unsafeHead ps of
w | w >= 0x30
&& w <= 0x39 -> loop neg (i+1)
(n * 10 + (fromIntegral w - 0x30))
(B.unsafeTail ps)
| otherwise -> end neg i n ps
end _ 0 _ _ = Nothing
end True _ n ps = Just (negate n, ps)
end _ _ n ps = Just (n, ps)
-- | readInteger reads an Integer from the beginning of the ByteString. If
-- there is no integer at the beginning of the string, it returns Nothing,
-- otherwise it just returns the int read, and the rest of the string.
readInteger :: ByteString -> Maybe (Integer, ByteString)
readInteger as
| null as = Nothing
| otherwise =
case unsafeHead as of
'-' -> first (B.unsafeTail as) >>= \(n, bs) -> return (-n, bs)
'+' -> first (B.unsafeTail as)
_ -> first as
where first ps | null ps = Nothing
| otherwise =
case B.unsafeHead ps of
w | w >= 0x30 && w <= 0x39 -> Just $
loop 1 (fromIntegral w - 0x30) [] (B.unsafeTail ps)
| otherwise -> Nothing
loop :: Int -> Int -> [Integer]
-> ByteString -> (Integer, ByteString)
{-@ Decrease loop 4 @-}
STRICT4(loop)
loop d acc ns ps
| null ps = combine d acc ns empty
| otherwise =
case B.unsafeHead ps of
w | w >= 0x30 && w <= 0x39 ->
if d == 9 then loop 1 (fromIntegral w - 0x30)
(toInteger acc : ns)
(B.unsafeTail ps)
else loop (d+1)
(10*acc + (fromIntegral w - 0x30))
ns (B.unsafeTail ps)
| otherwise -> combine d acc ns ps
combine _ acc [] ps = (toInteger acc, ps)
combine d acc ns ps =
((10^d * combine1 1000000000 ns + toInteger acc), ps)
--LIQUID combine1 _ [n] = n
--LIQUID combine1 b ns = combine1 (b*b) $ combine2 b ns
--LIQUID
--LIQUID combine2 b (n:m:ns) = let t = m*b + n in t `seq` (t : combine2 b ns)
--LIQUID combine2 _ ns = ns
{-@ combine1 :: Integer -> x:{v:[Integer] | (len v) > 0}
-> Integer
@-}
{-@ Decrease combine1 2 @-}
combine1 :: Integer -> [Integer] -> Integer
combine1 _ [] = error "impossible"
combine1 _ [n] = n
combine1 b ns = combine1 (b*b) $ combine2 b ns
{-@ combine2 :: Integer -> x:[Integer]
-> {v:[Integer] | if len x > 1
then (len v < len x && len v > 0)
else (len v <= len x)}
@-}
{-@ Decrease combine2 2 @-}
combine2 :: Integer -> [Integer] -> [Integer]
combine2 b (n:m:ns) = let t = m*b + n in t `seq` (t : combine2 b ns)
combine2 _ ns = ns
-- | Read an entire file strictly into a 'ByteString'. This is far more
-- efficient than reading the characters into a 'String' and then using
-- 'pack'. It also may be more efficient than opening the file and
-- reading it using hGet.
readFile :: FilePath -> IO ByteString
readFile f = bracket (openFile f ReadMode) hClose
(\h -> hFileSize h >>= hGet h . fromIntegral)
-- | Write a 'ByteString' to a file.
writeFile :: FilePath -> ByteString -> IO ()
writeFile f txt = bracket (openFile f WriteMode) hClose
(\h -> hPut h txt)
-- | Append a 'ByteString' to a file.
appendFile :: FilePath -> ByteString -> IO ()
appendFile f txt = bracket (openFile f AppendMode) hClose
(\h -> hPut h txt)
|
mightymoose/liquidhaskell
|
benchmarks/bytestring-0.9.2.1/Data/ByteString/Char8.hs
|
Haskell
|
bsd-3-clause
| 43,150
|
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE CPP, BangPatterns, NoImplicitPrelude,
NondecreasingIndentation, MagicHash #-}
module GHC.IO.Encoding.CodePage(
#if defined(mingw32_HOST_OS)
codePageEncoding, mkCodePageEncoding,
localeEncoding, mkLocaleEncoding
#endif
) where
#if !defined(mingw32_HOST_OS)
import GHC.Base () -- Build ordering
#else
import GHC.Base
import GHC.Show
import GHC.Num
import GHC.Enum
import GHC.Word
import GHC.IO (unsafePerformIO)
import GHC.IO.Encoding.Failure
import GHC.IO.Encoding.Types
import GHC.IO.Buffer
import Data.Bits
import Data.Maybe
import Data.OldList (lookup)
import qualified GHC.IO.Encoding.CodePage.API as API
import GHC.IO.Encoding.CodePage.Table
import GHC.IO.Encoding.UTF8 (mkUTF8)
import GHC.IO.Encoding.UTF16 (mkUTF16le, mkUTF16be)
import GHC.IO.Encoding.UTF32 (mkUTF32le, mkUTF32be)
#if defined(mingw32_HOST_OS)
# if defined(i386_HOST_ARCH)
# define WINDOWS_CCONV stdcall
# elif defined(x86_64_HOST_ARCH)
# define WINDOWS_CCONV ccall
# else
# error Unknown mingw32 arch
# endif
#endif
-- note CodePage = UInt which might not work on Win64. But the Win32 package
-- also has this issue.
getCurrentCodePage :: IO Word32
getCurrentCodePage = do
conCP <- getConsoleCP
if conCP > 0
then return conCP
else getACP
-- Since the Win32 package depends on base, we have to import these ourselves:
foreign import WINDOWS_CCONV unsafe "windows.h GetConsoleCP"
getConsoleCP :: IO Word32
foreign import WINDOWS_CCONV unsafe "windows.h GetACP"
getACP :: IO Word32
{-# NOINLINE currentCodePage #-}
currentCodePage :: Word32
currentCodePage = unsafePerformIO getCurrentCodePage
localeEncoding :: TextEncoding
localeEncoding = mkLocaleEncoding ErrorOnCodingFailure
mkLocaleEncoding :: CodingFailureMode -> TextEncoding
mkLocaleEncoding cfm = mkCodePageEncoding cfm currentCodePage
codePageEncoding :: Word32 -> TextEncoding
codePageEncoding = mkCodePageEncoding ErrorOnCodingFailure
mkCodePageEncoding :: CodingFailureMode -> Word32 -> TextEncoding
mkCodePageEncoding cfm 65001 = mkUTF8 cfm
mkCodePageEncoding cfm 1200 = mkUTF16le cfm
mkCodePageEncoding cfm 1201 = mkUTF16be cfm
mkCodePageEncoding cfm 12000 = mkUTF32le cfm
mkCodePageEncoding cfm 12001 = mkUTF32be cfm
mkCodePageEncoding cfm cp = maybe (API.mkCodePageEncoding cfm cp) (buildEncoding cfm cp) (lookup cp codePageMap)
buildEncoding :: CodingFailureMode -> Word32 -> CodePageArrays -> TextEncoding
buildEncoding cfm cp SingleByteCP {decoderArray = dec, encoderArray = enc}
= TextEncoding {
textEncodingName = "CP" ++ show cp
, mkTextDecoder = return $ simpleCodec (recoverDecode cfm) $ decodeFromSingleByte dec
, mkTextEncoder = return $ simpleCodec (recoverEncode cfm) $ encodeToSingleByte enc
}
simpleCodec :: (Buffer from -> Buffer to -> IO (Buffer from, Buffer to))
-> (Buffer from -> Buffer to -> IO (CodingProgress, Buffer from, Buffer to))
-> BufferCodec from to ()
simpleCodec r f = BufferCodec {
encode = f,
recover = r,
close = return (),
getState = return (),
setState = return
}
decodeFromSingleByte :: ConvArray Char -> DecodeBuffer
decodeFromSingleByte convArr
input@Buffer { bufRaw=iraw, bufL=ir0, bufR=iw, bufSize=_ }
output@Buffer { bufRaw=oraw, bufL=_, bufR=ow0, bufSize=os }
= let
done why !ir !ow = return (why,
if ir==iw then input{ bufL=0, bufR=0}
else input{ bufL=ir},
output {bufR=ow})
loop !ir !ow
| ow >= os = done OutputUnderflow ir ow
| ir >= iw = done InputUnderflow ir ow
| otherwise = do
b <- readWord8Buf iraw ir
let c = lookupConv convArr b
if c=='\0' && b /= 0 then invalid else do
ow' <- writeCharBuf oraw ow c
loop (ir+1) ow'
where
invalid = done InvalidSequence ir ow
in loop ir0 ow0
encodeToSingleByte :: CompactArray Char Word8 -> EncodeBuffer
encodeToSingleByte CompactArray { encoderMax = maxChar,
encoderIndices = indices,
encoderValues = values }
input@Buffer{ bufRaw=iraw, bufL=ir0, bufR=iw, bufSize=_ }
output@Buffer{ bufRaw=oraw, bufL=_, bufR=ow0, bufSize=os }
= let
done why !ir !ow = return (why,
if ir==iw then input { bufL=0, bufR=0 }
else input { bufL=ir },
output {bufR=ow})
loop !ir !ow
| ow >= os = done OutputUnderflow ir ow
| ir >= iw = done InputUnderflow ir ow
| otherwise = do
(c,ir') <- readCharBuf iraw ir
case lookupCompact maxChar indices values c of
Nothing -> invalid
Just 0 | c /= '\0' -> invalid
Just b -> do
writeWord8Buf oraw ow b
loop ir' (ow+1)
where
invalid = done InvalidSequence ir ow
in
loop ir0 ow0
--------------------------------------------
-- Array access functions
-- {-# INLINE lookupConv #-}
lookupConv :: ConvArray Char -> Word8 -> Char
lookupConv a = indexChar a . fromEnum
{-# INLINE lookupCompact #-}
lookupCompact :: Char -> ConvArray Int -> ConvArray Word8 -> Char -> Maybe Word8
lookupCompact maxVal indexes values x
| x > maxVal = Nothing
| otherwise = Just $ indexWord8 values $ j + (i .&. mask)
where
i = fromEnum x
mask = (1 `shiftL` n) - 1
k = i `shiftR` n
j = indexInt indexes k
n = blockBitSize
{-# INLINE indexInt #-}
indexInt :: ConvArray Int -> Int -> Int
indexInt (ConvArray p) (I# i) = I# (indexInt16OffAddr# p i)
{-# INLINE indexWord8 #-}
indexWord8 :: ConvArray Word8 -> Int -> Word8
indexWord8 (ConvArray p) (I# i) = W8# (indexWord8OffAddr# p i)
{-# INLINE indexChar #-}
indexChar :: ConvArray Char -> Int -> Char
indexChar (ConvArray p) (I# i) = C# (chr# (indexInt16OffAddr# p i))
#endif
|
ezyang/ghc
|
libraries/base/GHC/IO/Encoding/CodePage.hs
|
Haskell
|
bsd-3-clause
| 6,233
|
module F6 where
f6f = \h -> \x -> h x 0
f6t = \y -> \z -> y + z
f6 = f6f f6t 3
|
siddhanathan/ghc
|
testsuite/tests/arityanal/f6.hs
|
Haskell
|
bsd-3-clause
| 84
|
module UnitTests.Distribution.Client.Sandbox (
tests
) where
import Distribution.Client.Sandbox (withSandboxBinDirOnSearchPath)
import Test.Tasty
import Test.Tasty.HUnit
import System.FilePath (getSearchPath, (</>))
tests :: [TestTree]
tests = [ testCase "sandboxBinDirOnSearchPath" sandboxBinDirOnSearchPathTest
, testCase "oldSearchPathRestored" oldSearchPathRestoreTest
]
sandboxBinDirOnSearchPathTest :: Assertion
sandboxBinDirOnSearchPathTest =
withSandboxBinDirOnSearchPath "foo" $ do
r <- getSearchPath
assertBool "'foo/bin' not on search path" $ ("foo" </> "bin") `elem` r
oldSearchPathRestoreTest :: Assertion
oldSearchPathRestoreTest = do
r <- getSearchPath
withSandboxBinDirOnSearchPath "foo" $ return ()
r' <- getSearchPath
assertEqual "Old search path wasn't restored" r r'
|
enolan/cabal
|
cabal-install/tests/UnitTests/Distribution/Client/Sandbox.hs
|
Haskell
|
bsd-3-clause
| 843
|
import Data.Ix
import Data.Int
main = print (index (minBound::Int16,maxBound) maxBound)
|
beni55/ghcjs
|
test/pkg/base/ix001.hs
|
Haskell
|
mit
| 89
|
module RankN where
|
vladfi1/hs-misc
|
RankN.hs
|
Haskell
|
mit
| 23
|
solve :: Double -> Double
solve x = 1 + x + (x^2/2) + (x^3/6) + (x^4/24) + (x^5/120) + (x^6/720) + (x^7/5040) + (x^8/40320) + (x^9/362880) -- + (x^10/3628800)-- Insert your code here --
main :: IO ()
main = getContents >>= mapM_ print. map solve. map (read::String->Double). tail. words
|
JsWatt/Free-Parking
|
hacker_rank/functional_programming/introduction/evaluating_e^x.hs
|
Haskell
|
mit
| 288
|
module Typing.Subtyping
( (<:)
, (\/)
, (/\)
, (//)
, (\\)
) where
import Typing.Types
import Typing.Substitution
import Data.List (intersect, union)
import Data.Maybe (fromJust)
import qualified Data.List ((\\))
(<:) :: Type -> Type -> Bool
-- S-Refl
t <: u | t == u = True
-- S-Top
_ <: Top = True
-- S-Bot
Bot <: _ = True
-- S-Fun
(Fun v1 p1 t1) <: (Fun v2 p2 t2) | v1 == v2 =
all (uncurry (<:)) (zip p2 p1) && t1 <: t2
-- α-equivalence of functions
(Fun v1 p1 t1) <: f2@(Fun v2 _ _)
| map snd v1 == map snd v2 =
let s = zipSubst (map fst v1) (map (uncurry Var) v2)
in applySubst s (Fun v2 p1 t1) <: f2
-- α-equivalence of polymorphic types
(Forall v1 t1) <: t2@(Forall v2 _)
| length v1 == length v2 =
let s = zipSubst v1 (map (flip Var []) v2)
in applySubst s (Forall v2 t1) <: t2
-- α-equivalence of type constructors
(TyAbs v1 t1) <: t2@(TyAbs v2 _)
| length v1 == length v2 =
let s = zipSubst v1 (map (flip Var []) v2)
in applySubst s (TyAbs v2 t1) <: t2
(Forall gen t12) <: t2@(TyApp _t21 args) | length gen == length args =
let t1' = applySubst (zipSubst gen args) t12
in t1' <: t2
(TyApp t11 t12) <: (TyApp t21 t22) =
t11 <: t21 && and (zipWith (<:) t12 t22)
(Rec r1) <: (Rec r2) =
all aux r2
where
aux (k, t2) = case lookup k r1 of
Nothing -> False
Just t1 -> t1 <: t2
_ <: _ = False
-- Least Upper Bound
(\/) :: Type -> Type -> Type
s \/ t | s <: t = t
s \/ t | t <: s = s
(Fun x v p) \/ (Fun x' w q) | x == x' =
Fun x (zipWith (/\) v w) (p \/ q)
(Forall xs p) \/ (Forall ys q) | xs == ys =
Forall xs (p \/ q)
(TyApp p xs) \/ (TyApp q ys) | length xs == length ys =
TyApp (p \/ q) (zipWith (\/) xs ys)
(Rec f1) \/ (Rec f2) =
let fields = (fst <$> f1) `intersect` (fst <$> f2)
in Rec $ map (\f -> (f, fromJust (lookup f f1) \/ fromJust (lookup f f2))) fields
_ \/ _ = Top
-- Greatest Lower Bound
(/\) :: Type -> Type -> Type
s /\ t | s <: t = s
s /\ t | t <: s = t
(Fun x v p) /\ (Fun x' w q) | x == x' =
Fun x (zipWith (\/) v w) (p /\ q)
(Forall xs p) /\ (Forall ys q) | xs == ys =
Forall xs (p /\ q)
(TyApp p xs) /\ (TyApp q ys) | length xs == length ys =
TyApp (p /\ q) (zipWith (/\) xs ys)
(Rec f1) /\ (Rec f2) =
let fields = (fst <$> f1) `union` (fst <$> f2)
in Rec $ map (\f -> (f, maybe Top id (lookup f f1) /\ maybe Top id (lookup f f2))) fields
_ /\ _ = Bot
-- VARIABLE ELIMINATION
-- Eliminate Up: S ⇑V T
(//) :: [Var] -> Type -> Type
-- VU-Top
_ // Top = Top
-- VU-Bot
_ // Bot = Bot
-- VU-Con
_ // (Con x) = (Con x)
-- VU-Cls
_ // (Cls name) = (Cls name)
v // var@(Var x _)
-- VU-Var-1
| x `elem` v = Top
-- VU-Var-2
| otherwise = var
-- VU-Fun
v // (Fun x s t) =
let u = map ((\\) v) s in
let r = v // t in
Fun x u r
v // (Rec fields) =
let fields' = map (\(k, t) -> (k, v // t)) fields
in Rec fields'
v // (Forall gen ty) =
let v' = v Data.List.\\ gen
in Forall gen (v' // ty)
v // (TyAbs gen ty) =
let v' = v Data.List.\\ gen
in TyAbs gen (v' // ty)
v // (TyApp ty args) =
TyApp (v // ty) (map ((//) v) args)
-- Eliminate Down: S ⇓V T
(\\) :: [Var] -> Type -> Type
-- VD-Top
_ \\ Top = Top
-- VD-Bot
_ \\ Bot = Bot
--
-- VD-Con
_ \\ (Con x) = (Con x)
--
-- VD-Cls
_ \\ (Cls name) = (Cls name)
v \\ var@(Var x _)
-- VD-Var-1
| x `elem` v = Bot
-- VD-Var-2
| otherwise = var
-- VD-Fun
v \\ (Fun x s t) =
let u = map ((//) v) s in
let r = v \\ t in
Fun x u r
v \\ (Rec fields) =
let fields' = map (\(k, t) -> (k, v \\ t)) fields
in Rec fields'
v \\ (Forall gen ty) =
let v' = v Data.List.\\ gen
in Forall gen (v' \\ ty)
v \\ (TyAbs gen ty) =
let v' = v Data.List.\\ gen
in TyAbs gen (v' \\ ty)
v \\ (TyApp ty args) =
TyApp (v \\ ty) (map ((\\) v) args)
|
tadeuzagallo/verve-lang
|
src/Typing/Subtyping.hs
|
Haskell
|
mit
| 3,789
|
-- | The DSL for creating a grammar/tokenizer definition for 'Text.Tokenify.tokenizer'
module Text.Tokenify.DSL where
import Prelude hiding (concat, any)
import qualified Text.Tokenify.Response as Response
import qualified Text.Tokenify.Regex as Regex
import Text.Tokenify.Regex (Regex)
import Text.Tokenify.Types
-- * Response Constructors
-- | Creates a response which will fail on a regex
fails :: Regex s -> Token s a
fails r = (r, Response.Error)
-- | Creates a response which will ignore a regex
ignore :: Regex s -> Token s a
ignore r = (r, Response.Ignore)
-- | Creates a response which consumes the text position
insert :: Regex s -> (Pos -> a) -> Token s a
insert r f = (r, Response.Display f)
-- | Creates a response which consumes the captures 'CharSeq' and the text position
evaluate :: Regex s -> (s -> Pos -> a) -> Token s a
evaluate r f = (r, Response.Process f)
-- * Regex Constructors
-- | Creates a regex that matches a string
string :: s -> Regex s
string = Regex.String
-- | Creates a regex that matches a char
char :: Char -> Regex s
char = Regex.Char
-- | Creates a create that will match a range of characters
range :: Char -> Char -> Regex s
range = Regex.Range
-- | Creates a regex that will attmpt to make the regex on the left, if
-- that fails it will attmpt to match the regex on the right
alt :: Regex s -> Regex s -> Regex s
alt = Regex.Alt
-- | Creates a regex that will attmpt to match a Sequence of regex\'s
-- in a sequencial order
any :: [Regex s] -> Regex s
any [] = Regex.NoPass
any (x:[]) = x
any (x:xs) = Regex.Alt x (any xs)
-- | Create a regex that appends the result of two regex\'s
append :: Regex s -> Regex s -> Regex s
append = Regex.Append
-- | Create a regex that appends the result of a sequence of regex\'s
concat :: [Regex s] -> Regex s
concat [] = Regex.NoPass
concat (x:[]) = x
concat (x:xs) = Regex.Append x (concat xs)
-- | Create a regex that may or may not match a regex
option :: Regex s -> Regex s
option = Regex.Option
-- | Create a regex that matches zero or more of a regex
repeat :: Regex s -> Regex s
repeat = Regex.Repeat
-- | Create a regex that matches one or more of a regex
repeat1 :: Regex s -> Regex s
repeat1 = Regex.Repeat1
|
AKST/tokenify
|
src/Text/Tokenify/DSL.hs
|
Haskell
|
mit
| 2,233
|
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE DefaultSignatures #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE OverlappingInstances #-}
module Main where
import Prelude hiding (writeFile)
import Data.Map (empty)
import Text.XML
import Text.XML.Generic
import GHC.Generics
data Hobby = Hobby String deriving (Show, Generic)
data User = User {
firstName :: String,
lastName :: String,
age :: Int,
hobbies :: [Hobby]
}
deriving (Show, Generic)
instance ToXml Hobby
instance ToXml User
main :: IO()
main = do
writeFile def { rsPretty = True } "users.xml" $ Document (Prologue [] Nothing []) root []
where
john = User "John" "Doe" 44 [Hobby "jokes", Hobby "laughing"]
jane = User "Jane" "Doe" 38 []
users = (toXml) john ++ (toXml jane)
root = Element "users" empty users
|
jhedev/xml-conduit-generics
|
examples/Users.hs
|
Haskell
|
mit
| 1,022
|
module Input where
import Data.Vector (Vector)
import qualified Data.Vector as V
import DailyChart
type Input = Vector Double
type SixDailyCharts = (DailyChart, DailyChart, DailyChart, DailyChart, DailyChart, DailyChart)
fromDailyCharts :: SixDailyCharts -> (Bool, Input)
fromDailyCharts (d1,d2,d3,d4,d5,d6) = (answer, input)
where
input = V.fromList $ init [ fromIntegral (f d) | f <- [open, close], d <- [d1,d2,d3,d4,d5,d6] ]
answer = close d5 < close d6
makeInputs :: Vector DailyChart -> Vector (Bool, Input)
makeInputs ds = V.map fromDailyCharts $ V.zip6 ds (V.drop 1 ds) (V.drop 2 ds) (V.drop 3 ds) (V.drop 4 ds) (V.drop 5 ds)
|
cohei/stock-value-prediction
|
Input.hs
|
Haskell
|
mit
| 650
|
--The MIT License (MIT)
--
--Copyright (c) 2016-2017 Steffen Michels (mail@steffen-michels.de)
--
--Permission is hereby granted, free of charge, to any person obtaining a copy of
--this software and associated documentation files (the "Software"), to deal in
--the Software without restriction, including without limitation the rights to use,
--copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the
--Software, and to permit persons to whom the Software is furnished to do so,
--subject to the following conditions:
--
--The above copyright notice and this permission notice shall be included in all
--copies or substantial portions of the Software.
--
--THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
--IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
--FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
--COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
--IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
--CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
{-# LANGUAGE CPP #-}
#if __GLASGOW_HASKELL__ >= 800
{-# LANGUAGE Strict#-}
#endif
module HPT
( HPT(..)
, HPTLeaf(..)
, HPTLeafFormulas(..)
, CachedSplitPoints(..)
, SplitPoint(..)
, Proof(..)
, Choice(..)
, FNodeType(..)
, LazyNode
, initialHPT
, bounds
, nextLeaf
, addLeaf
, addLeafWithinEvidence
) where
import qualified KnowledgeBase as KB
import Probability
import qualified GroundedAST
import Data.HashMap (Map)
import qualified Data.HashMap as Map
import Data.Text (Text)
import GHC.Generics (Generic)
import Data.Hashable (Hashable)
import qualified Data.Hashable as Hashable
import qualified Data.Set as PQ -- set used as PQ here
import Data.HashSet (Set)
import Control.Monad (when)
type PQ = PQ.Set
-- Hybrid Probability Tree
data HPT = HPT (PQ HPTLeaf) ProbabilityQuadruple (Map HPTLeafFormulas Probability)
data HPTLeaf = HPTLeaf HPTLeafFormulas Probability deriving Eq
data HPTLeafFormulas = MaybeWithinEv LazyNode (KB.NodeRef CachedSplitPoints) Int
| WithinEv (KB.NodeRef CachedSplitPoints) Int
deriving (Eq, Generic)
instance Ord HPTLeafFormulas where
compare WithinEv{} MaybeWithinEv{} = LT
compare (WithinEv qx hx) (WithinEv qy hy) = case compare hx hy of
EQ -> compare qx qy
res -> res
compare (MaybeWithinEv qx ex hx) (MaybeWithinEv qy ey hy) = case compare hx hy of
EQ -> case compare ex ey of
EQ -> compare qx qy -- comparing lazy queries is most expensive
res -> res
res -> res
compare MaybeWithinEv{} WithinEv{} = GT
instance Hashable HPTLeafFormulas where
hashWithSalt salt (MaybeWithinEv _ _ h) = Hashable.hashWithSalt salt h
hashWithSalt salt (WithinEv _ h) = Hashable.hashWithSalt salt h
type LazyNode = (KB.NodeRef CachedSplitPoints, KB.Conditions)
instance Ord HPTLeaf where
HPTLeaf fx px <= HPTLeaf fy py
| px == py = fx <= fy
| otherwise = px <= py
-- CachedSplitPoints "true proofs" "false proofs" "all point [+ scores]"
data CachedSplitPoints = CachedSplitPoints (Set Proof) (Set Proof) FNodeType
data FNodeType = Primitive (Set SplitPoint) | Composed (Map SplitPoint Int)
data SplitPoint = BoolSplit (GroundedAST.PFunc Bool)
| StringSplit (GroundedAST.PFunc Text) (Set Text) -- left branch: all string in this set, right branch: all remaining strings
| ContinuousSplit (GroundedAST.PFunc GroundedAST.RealN) Rational
| ObjectSplit (GroundedAST.PFunc GroundedAST.Object) Integer -- left branch: including this object, right branch: excluding this object
| ObjectIntervSplit (GroundedAST.PFunc GroundedAST.Object) Integer -- left branch: including this object
deriving (Eq, Generic, Ord)
instance Hashable SplitPoint
newtype Proof = Proof (Map SplitPoint Choice) deriving (Eq, Ord, Generic)
instance Hashable Proof
data Choice = Left | Right deriving (Eq, Ord, Generic)
instance Hashable Choice
initialHPT :: KB.NodeRef CachedSplitPoints -> KB.NodeRef CachedSplitPoints -> KB.KBState CachedSplitPoints HPT
initialHPT q e = addLeaf (q, KB.noConditions) e 1.0 $ HPT PQ.empty (ProbabilityQuadruple 0.0 0.0 0.0 0.0) Map.empty
nextLeaf :: HPT -> Maybe (HPTLeaf, HPT)
nextLeaf (HPT leaves (ProbabilityQuadruple t f e u) leafSet) = case PQ.maxView leaves of
Nothing -> Nothing
Just (leaf@(HPTLeaf fs p), leaves') -> Just (leaf, HPT leaves' quad $ Map.delete fs leafSet)
where
quad = case fs of
MaybeWithinEv{} -> ProbabilityQuadruple t f e (u - p)
WithinEv{} -> ProbabilityQuadruple t f (e - p) u
addLeaf :: LazyNode -> KB.NodeRef CachedSplitPoints -> Probability -> HPT -> KB.KBState CachedSplitPoints HPT
addLeaf qWithConds@(q, qConds) ev p hpt@(HPT leaves (ProbabilityQuadruple t f e u) leafSet) = case KB.deterministicNodeRef ev of
Just True -> do
q' <- KB.augmentWithEntry q
q'' <- KB.condition q' qConds
KB.dereference q
addLeafWithinEvidence (KB.entryRef q'') p hpt
Just False -> return hpt
Nothing -> do
when merged $ KB.dereference q >> KB.dereference ev
return $ HPT pq' (ProbabilityQuadruple t f e (u + p)) leafSet'
where
(pq', leafSet', merged) = insertIntoPQ
(MaybeWithinEv qWithConds ev $ Hashable.hashWithSalt (Hashable.hash qWithConds) ev)
p
leaves
leafSet
addLeafWithinEvidence :: KB.NodeRef CachedSplitPoints -> Probability -> HPT -> KB.KBState CachedSplitPoints HPT
addLeafWithinEvidence q p (HPT leaves (ProbabilityQuadruple t f e u) leafSet) = case KB.deterministicNodeRef q of
Just True -> return $ HPT leaves (ProbabilityQuadruple (t + p) f e u) leafSet
Just False -> return $ HPT leaves (ProbabilityQuadruple t (f + p) e u) leafSet
Nothing -> do
when merged $ KB.dereference q
return $ HPT pq' (ProbabilityQuadruple t f (e + p) u) leafSet'
where
(pq', leafSet', merged) = insertIntoPQ (WithinEv q $ Hashable.hash q) p leaves leafSet
insertIntoPQ :: HPTLeafFormulas
-> Probability
-> PQ HPTLeaf
-> Map HPTLeafFormulas Probability
-> (PQ HPTLeaf, Map HPTLeafFormulas Probability, Bool)
insertIntoPQ fs p pq leafSet = case Map.lookup fs leafSet of
Just p' ->
let p'' = p + p'
in (PQ.insert (HPTLeaf fs p'') $ PQ.delete (HPTLeaf fs p') pq, Map.insert fs p'' leafSet, True)
Nothing -> (PQ.insert (HPTLeaf fs p) pq, Map.insert fs p leafSet, False)
-- Nothing if evidence is inconsistent
bounds :: HPT -> Maybe ProbabilityBounds
bounds (HPT _ (ProbabilityQuadruple 0.0 0.0 0.0 0.0) _) = Nothing
bounds (HPT _ (ProbabilityQuadruple t f e u) _) =
Just $ ProbabilityBounds lo up
where
lo = t / (t + f + e + u)
up | upDen == 0.0 = 1.0
| up' <= 1.0 = up'
| otherwise = 1.0
~up' = (t + e + u) / upDen -- lazy to prevent division by zero
upDen = t + f + e
-- (true prob, false prob (within evidence), within evidence, unknown prob)
data ProbabilityQuadruple = ProbabilityQuadruple Probability Probability Probability Probability
|
SteffenMichels/IHPMC
|
src/HPT.hs
|
Haskell
|
mit
| 7,452
|
{-# LANGUAGE OverloadedStrings #-}
module Y2018.M06.D05.Exercise where
{--
Another day, another data structure.
We have a smart-tagging algorithm that saves its results to JSON. We want to
take those results in store them in a database. But before we do that, we need
to parse the JSON into Haskell structures because Haskell structures are ...
... cute?
--}
import Data.Aeson
data Entity a = Entity { name :: String, wiki :: WikiInfo, related :: [a] }
deriving (Eq, Show)
instance FromJSON a => FromJSON (Entity a) where
parseJSON (Object o) = undefined
data WikiInfo = Wiki { wikiname, wikisummary :: String,
wikiimages :: [FilePath], wikilink :: FilePath }
deriving (Eq, Show)
instance FromJSON WikiInfo where
parseJSON (Object o) = undefined
-- the entities are stored here
exDir, entitiesFile :: FilePath
exDir = "Y2018/M06/D05/"
entitiesFile = "smart_tagging.json"
readEntities :: FilePath -> IO [Entity Value]
readEntities file = undefined
-- How many entities are there?
-- What is the name of the entity that has the most related articles?
-- How many related articles does it have?
{-- PART DUEX! --------------------------------------------------------------
Okay. Look at the structure of the JSON.
Why do people do this? That is to say. They have entity information: great.
They have wiki information that they get from a separate call: great.
But why flatten into a pancake the wiki information with the entity information,
commingling the two?
Somebody ought to write a book: "Badly Structured Data, and How to Avoid it!"
or: "Good Data Structures."
Oh, somebody has written a book? Several somebodies?
Huh. It's like people look at JSON and are like: "I know data structures because
I've seen JSON once! Hold my beer!"
This JSON.
So. Output the JSON in well-structured (hierarchical) form.
--}
instance ToJSON a => ToJSON (Entity a) where
toJSON entity = undefined
-- Then rebuke the bad JSON by writing out that good JSON to file
writeEntities :: ToJSON a => FilePath -> [Entity a] -> IO ()
writeEntities output entities = undefined
|
geophf/1HaskellADay
|
exercises/HAD/Y2018/M06/D05/Exercise.hs
|
Haskell
|
mit
| 2,113
|
import Test.Hspec
-- Problem 16
-- Drop every N'th element from a list.
dropEvery :: Eq a => [a] -> Int -> [a]
dropEvery ls n = get' ls n n
where
get' [] _ _ = []
get' (_:xs) t 1 = get' xs t t
get' (x:xs) t i = x : get' xs t (i-1)
main :: IO()
main = hspec $
describe "99-exercises.16 = Drop every N'th element from a list" $
it "should drop each n'th element in a list" $
dropEvery "abcdefghik" 3 `shouldBe` "abdeghk"
|
baldore/haskell-99-exercises
|
16.hs
|
Haskell
|
mit
| 451
|
{-# LANGUAGE RecordWildCards #-}
import Data.Char (isSpace)
import Data.Foldable (for_)
import Data.Function (on)
import Test.Hspec (Spec, describe, it, shouldBe, shouldMatchList)
import Test.Hspec.Runner (configFastFail, defaultConfig, hspecWith)
import CryptoSquare (encode)
main :: IO ()
main = hspecWith defaultConfig {configFastFail = True} specs
specs :: Spec
specs = describe "encode" $ for_ cases test
where
test Case{..} = describe description $ do
let shouldMatchString = shouldBe `on` filter (not . isSpace)
shouldMatchChars = shouldMatchList `on` filter (not . isSpace)
it "normalizes the input" $ encode input `shouldMatchChars` expected
it "reorders the characters" $ encode input `shouldMatchString` expected
it "groups the output" $ encode input `shouldBe` expected
data Case = Case { description :: String
, input :: String
, expected :: String
}
cases :: [Case]
cases = [ Case { description = "empty plaintext results in an empty ciphertext"
, input = ""
, expected = ""
}
, Case { description = "Lowercase"
, input = "A"
, expected = "a"
}
, Case { description = "Remove spaces"
, input = " b "
, expected = "b"
}
, Case { description = "Remove punctuation"
, input = "@1,%!"
, expected = "1"
}
, Case { description = "9 character plaintext results in 3 chunks of 3 characters"
, input = "This is fun!"
, expected = "tsf hiu isn"
}
, Case { description = "8 character plaintext results in 3 chunks, the last one with a trailing space"
, input = "Chill out."
, expected = "clu hlt io "
}
, Case { description = "54 character plaintext results in 7 chunks, the last two padded with spaces"
, input = "If man was meant to stay on the ground, god would have given us roots."
, expected = "imtgdvs fearwer mayoogo anouuio ntnnlvt wttddes aohghn sseoau "
}
]
-- de97c99c0d129ce1af95e8986917ac3964292f42
|
exercism/xhaskell
|
exercises/practice/crypto-square/test/Tests.hs
|
Haskell
|
mit
| 2,412
|
module Y2016.M07.D19.Solution where
{--
A Trigon, also known in some circles as a 'triangle,' is a three-SIDED shape.
Trigons are have several interesting characteristics. A trigon also defines a
plane (in which it lies), and a set of trigons can be rendered efficiently these
days to represent, e.g. characters in 3 dimensions, such as pokémon, for example
... now that I have your attention.
Look at the figure tri2.gif at this directory or at the URL:
https://github.com/geophf/1HaskellADay/blob/master/exercises/HAD/Y2016/M07/D19/tri2.gif
Today's #haskell exercise is to declare the type Trigon, and then to compute
the number of trigons in that figure. Also compute the total area, because fun.
--}
import Data.Map (Map)
import qualified Data.Map as Map
data Trigon = A3SidedPolygon deriving Show
type Point2d = (Float, Float)
type Figure = Map Char Point2d
figure2 :: Figure
figure2 = Map.fromList (zip "abgcdthfjkmnp"
[(0,0), (15,10),(25,10),(35,10),(50,0),
(35,-10),(25,-10),(15,-10),
(15,0),(20,0),(25,0),(30,0),(35,0)])
countingTrigons :: Figure -> Int
countingTrigons = undefined
-- hint: it is possible for trigons to overlap or to contain other trigons
-- within them
{-- BONUS -----------------------------------------------------------------
The area of a trigon is its bh / 2
where b = length of the base of the trigon
h = height of the trigon
Of course the area of a 'square' is the square of the length of its side ...
that's why a square is called 'square,' you see.
But I digress ... or do I?
What is the area of the figure?
--}
area :: Figure -> Float
area = undefined
-- BONUS-BONUS: why is area called 'area'? What is its etimology?
-- The figure is figure2 because we'll do a bit of exploration with shapes
-- this week.
|
geophf/1HaskellADay
|
exercises/HAD/Y2016/M07/D19/Solution.hs
|
Haskell
|
mit
| 1,792
|
import Prelude hiding ((++),concat)
-- just for kicks & gigs
(++) :: [a] -> [a] -> [a]
[] ++ ys = ys
(x:xs) ++ ys = x : (xs ++ ys)
concat :: [[a]] -> [a]
concat [] = []
concat (xs:xss) = xs ++ concat xss
test = concat [["a","b"],["c","d"],["e","f"]]
|
calebgregory/fp101x
|
wk3/concat.hs
|
Haskell
|
mit
| 271
|
module MyLen where
myLen :: [a] -> Int
myLen (_:xs) = 1 + myLen xs
myLen [] = 0
|
lpenz/realworldhaskell-exercises
|
ch03/MyLen.hs
|
Haskell
|
mit
| 86
|
-- WeIrD StRiNg CaSe
-- http://www.codewars.com/kata/52b757663a95b11b3d00062d/train/haskell
module WeIrDStRiNgCaSe where
import Data.Char(toLower, toUpper)
toWeirdCase :: String -> String
toWeirdCase = unwords . map (zipWith ($) (cycle [toUpper, toLower])) . words
|
gafiatulin/codewars
|
src/6 kyu/WeIrDStRiNgCaSe.hs
|
Haskell
|
mit
| 268
|
module Vacivity.FOV.ShadowCast(
calcFOV
) where
import Prelude hiding (any, all, foldl, concat)
import Control.Applicative ((<$>))
import Data.Foldable hiding (toList)
import qualified Data.Set as Set
import qualified Antiqua.Data.Array2d as A2D
import Antiqua.Common
import Vacivity.FOV.Common
import Vacivity.Utils
import Debug.Trace
inRadius :: XY -> Int -> Bool
inRadius (x, y) r =
x*x + y*y <= r*r
inRange :: XY -> (Int,Int,Int,Int) -> Bool
inRange (x, y) (rx, ry, rw, rh)
| x >= rx && y >= ry && x < rx + rw && y < ry + rh = True
| otherwise = False
isSolid :: Mask -> XY -> Bool
isSolid msk c = any not (A2D.get msk c)
data ShadowArgs = ShadowArgs { s :: Double,
ns :: Double,
b :: Bool,
lit :: Col XY
}
type Col a = Set.Set a
toList :: Col a -> [a]
toList = Set.toList
append :: Ord a => a -> Col a -> Col a
append x = Set.insert x
single :: a -> Col a
single = Set.singleton
empty :: Col a
empty = Set.empty
calcFOV :: Mask -> XY -> Int -> [XY]
calcFOV msk@(A2D.Array2d w h _) (sx, sy) r =
let dirs = [ (-1,1), (1,-1), (-1,-1), (1,1) ] in
let cast = castLight 1 1.0 0.0 msk in
let seed = single (sx, sy) in
let calls = concat $ (\(i, j) -> [cast i 0 0 j, cast 0 i j 0]) <$> dirs in
let lsts = ($ empty) <$> calls in
toList $ Set.unions (seed:lsts)
where castLight row start end mask xx xy yx yy l =
if start < end
then l
else lit $ outer row (ShadowArgs start 0.0 False l)
where recast d st ed lit' = castLight d st ed mask xx xy yx yy lit'
outer d args =
if d > r || b args
then args
else (outer (d + 1) . inner d (-d) (-d)) args
inner d dy dx args =
let reinner = inner d dy (dx + 1) in
if dx > 0
then args
else let pos = (sx + dx * xx + dy * xy
,sy + dx * yx + dy * yy) in
let f sigx sigy = (fromIntegral dx + sigx*0.5)
/ (fromIntegral dy + sigy*0.5)
in
let ls = f (-1) 1 in
let rs = f 1 (-1) in
if (not . inRange pos) (0, 0, w, h) || s args < rs
then reinner args
else if end > ls
then args
else let lit' = onlyIf (inRadius (dx, dy) r) (append pos) (lit args) in
let solid = isSolid mask pos in
let args' = args { lit = lit' } in
reinner $ if b args'
then if solid
then args' { ns = rs }
else args' { s = ns args', b = False}
else if solid && d < r
then let lit'' = recast (d + 1) (s args') ls lit' in
args' { ns = rs, b = True, lit = lit'' }
else args'
|
olive/vacivity
|
src/Vacivity/FOV/ShadowCast.hs
|
Haskell
|
mit
| 3,298
|
{-
******************************************************************************
* JSHOP *
* *
* Module: AST *
* Purpose: JavaScript Abstract Syntax Tree *
* Authors: Nick Brunt, Henrik Nilsson *
* *
* Based on the HMTC equivalent *
* Copyright (c) Henrik Nilsson, 2006 - 2011 *
* http://www.cs.nott.ac.uk/~nhn/ *
* *
* Revisions for JavaScript *
* Copyright (c) Nick Brunt, 2011 - 2012 *
* *
******************************************************************************
-}
-- | JavaScript Abstract Syntax Tree. Representation of JavaScript programs
-- after parsing.
module AST where
{-
AST (..), -- Not abstract. Instances: HasSrcPos.
Command (..), -- Not abstract. Instances: HasSrcPos.
Expression (..), -- Not abstract. Instances: HasSrcPos.
Declaration (..), -- Not abstract. Instances: HasSrcPos.
TypeDenoter (..) -- Not abstract. Instances: HasSrcPos.
-}
-- JSHOP module imports
--import Name
--import SrcPos
-- Note on Naming Conventions for Constructors and Field Labels
--
-- In Haskell, two (or more) datatypes that are in scope simultaneoulsy
-- must not have any constructors or field labels in common. However,
-- different constructors of the same type may have common field names,
-- provided the fields all have the same type. This is very different
-- from records in languages like Pascal or C, and from objects in OO
-- languages like Java, where sharing names across different records or
-- objects are both possible and common.
--
-- To avoid name clashes, while still making it possible to use similar
-- names for similar things in different type declarations, some simple
-- naming conventins have been adopted:
--
-- * Constructors get prefix which is an abbreviation of the name of
-- the data type. E.g. for 'Command', the prefix is 'Cmd', and a
-- typical constructor name is 'CmdAssign', and for 'TypeDenoter',
-- te prefix is 'TD'.
--
-- * Field names that are common to one or more constructors, get the
-- same prefix as the constructor, but in lower-case.
--
-- * Field names that are specific to a contructor get a lower-case
-- prefix that is an abbreviation of the constructor. E.g. the
-- prefix for 'CmdAssign' is 'ca', and one of its fields is 'caVar'.
-- | Abstract syntax for the syntactic category Program
--data AST = AST { astCmd :: Command }
data Program
= Program [Source]
deriving Show
data Source
= Statement Statement
| SFuncDecl FuncDecl
deriving Show
data FuncDecl
= FuncDecl (Maybe String) [String] [Source]
deriving Show
data Statement
= EmptyStmt
| IfStmt IfStmt
| IterativeStmt IterativeStmt
| ExprStmt Expression
| Block [Statement]
| VarStmt [VarDecl]
| TryStmt TryStmt
| ContinueStmt (Maybe String)
| BreakStmt (Maybe String)
| ReturnStmt (Maybe Expression)
| WithStmt Expression Statement
| LabelledStmt String Statement
| Switch Switch
| ThrowExpr Expression
deriving Show
data IfStmt
= IfElse Expression Statement Statement
| If Expression Statement
-- | If2 Expression
-- | If3
deriving Show
data IterativeStmt
= DoWhile Statement Expression
| While Expression Statement
| For (Maybe Expression) (Maybe Expression) (Maybe Expression) Statement
| ForVar [VarDecl] (Maybe Expression) (Maybe Expression) Statement
| ForIn [VarDecl] Expression Statement
-- | It2 Expression
deriving Show
data TryStmt
= TryBC [Statement] [Catch]
| TryBF [Statement] [Statement]
| TryBCF [Statement] [Catch] [Statement]
deriving Show
data Catch
= Catch String [Statement]
| CatchIf String [Statement] Expression
deriving Show
data Switch
= SSwitch Expression CaseBlock
deriving Show
data CaseBlock
= CaseBlock [CaseClause] [DefaultClause] [CaseClause]
deriving Show
data CaseClause
= CaseClause Expression [Statement]
deriving Show
data DefaultClause
= DefaultClause [Statement]
deriving Show
data Expression
= Assignment Assignment
deriving Show
data VarDecl
= VarDecl String (Maybe Assignment)
deriving Show
-- | Abstract syntax for the syntactic category Assignment
data Assignment
= CondExpr CondExpr
| Assign LeftExpr AssignOp Assignment
| AssignFuncDecl FuncDecl
deriving Show
data LeftExpr
= NewExpr NewExpr
| CallExpr CallExpr
deriving Show
data AssignOp
= AssignNormal
| AssignOpMult
| AssignOpDiv
| AssignOpMod
| AssignOpPlus
| AssignOpMinus
deriving Show
data CondExpr
= LogOr LogOr
| CondIf LogOr Assignment Assignment
deriving Show
data NewExpr
= MemberExpr MemberExpr
| NewNewExpr NewExpr
deriving Show
data CallExpr
= CallMember MemberExpr [Assignment]
| CallCall CallExpr [Assignment]
| CallSquare CallExpr Expression
| CallDot CallExpr String
deriving Show
data MemberExpr
= MemExpression PrimaryExpr
| ArrayExpr MemberExpr Expression
| MemberNew MemberExpr [Assignment]
| MemberCall MemberExpr String
deriving Show
-- | Abstract syntax for the syntactic category PrimaryExpr
data PrimaryExpr
-- | Literal integer
= ExpLitInt Integer
-- | Literal strings
| ExpLitStr String
-- | Identifier
| ExpId String
-- | Bracketed expression
| ExpBrackExp Expression
-- | This (current object)
| ExpThis
-- | Regular PrimaryExpr
| ExpRegex String
-- | Arrays
| ExpArray ArrayLit
-- | Objects
| ExpObject [(PropName, Assignment)]
deriving Show
-- | Abstract syntax for the syntactic category Array Literal
data ArrayLit
-- | Simple array
= ArraySimp [Assignment]
deriving Show
data PropName
= PropNameId String
| PropNameStr String
| PropNameInt Integer
deriving Show
data LogOr
= LogAnd LogAnd
| LOLogOr LogOr LogAnd
deriving Show
data LogAnd
= BitOR BitOR
| LALogAnd LogAnd BitOR
deriving Show
data BitOR
= BitXOR BitXOR
| BOBitOR BitOR BitXOR
deriving Show
data BitXOR
= BitAnd BitAnd
| BXBitXOR BitXOR BitAnd
deriving Show
data BitAnd
= EqualExpr EqualExpr
| BABitAnd BitAnd EqualExpr
deriving Show
data EqualExpr
= RelExpr RelExpr
| Equal EqualExpr RelExpr
| NotEqual EqualExpr RelExpr
| EqualTo EqualExpr RelExpr
| NotEqualTo EqualExpr RelExpr
deriving Show
data RelExpr
= ShiftExpr ShiftExpr
| LessThan RelExpr ShiftExpr
| GreaterThan RelExpr ShiftExpr
| LessEqual RelExpr ShiftExpr
| GreaterEqual RelExpr ShiftExpr
| InstanceOf RelExpr ShiftExpr
| InObject RelExpr ShiftExpr
deriving Show
data ShiftExpr
= AddExpr AddExpr
| ShiftLeft ShiftExpr AddExpr
| ShiftRight ShiftExpr AddExpr
| ShiftRight2 ShiftExpr AddExpr
deriving Show
data AddExpr
= MultExpr MultExpr
| Plus AddExpr MultExpr
| Minus AddExpr MultExpr
deriving Show
data MultExpr
= UnaryExpr UnaryExpr
| Times MultExpr UnaryExpr
| Div MultExpr UnaryExpr
| Mod MultExpr UnaryExpr
deriving Show
data UnaryExpr
= PostFix PostFix
| Delete UnaryExpr
| Void UnaryExpr
| TypeOf UnaryExpr
| PlusPlus UnaryExpr
| MinusMinus UnaryExpr
| UnaryPlus UnaryExpr
| UnaryMinus UnaryExpr
| Not UnaryExpr
| BitNot UnaryExpr
deriving Show
data PostFix
= LeftExpr LeftExpr
| PostInc LeftExpr
| PostDec LeftExpr
deriving Show
{-
instance HasSrcPos AST where
srcPos = cmdSrcPos . astCmd
-}
-- | Abstract syntax for the syntactic category Command
-- For generality, the variable being assigned to, the procedure being
-- called, and the function being applied (currently only operators) are
-- represented by expressions as opposed to just an identifier (for
-- variables, procedures, and functions) or an operator. Consider
-- assignment to an array element, for example, where the RHS (e.g. x[i])
-- really is an expression that gets evaluated to a memory reference
-- (sink). Also, this arrangement facilitates error reporting, as a
-- variable expression has an associated source position, whereas names,
-- currently represented by strings, have not.
{-
data Command
-- | Assignment
= CmdAssign {
caVar :: PrimaryExpr, -- ^ Assigned variable
caVal :: PrimaryExpr, -- ^ Right-hand-side expression
cmdSrcPos :: SrcPos
}
-- | Procedure call
| CmdCall {
ccProc :: PrimaryExpr, -- ^ Called procedure
ccArgs :: [PrimaryExpr], -- ^ Arguments
cmdSrcPos :: SrcPos
}
-- | Command sequence (block)
| CmdSeq {
csCmds :: [Command], -- ^ Commands
cmdSrcPos :: SrcPos
}
{- Original version
-- | Conditional command
| CmdIf {
ciCond :: PrimaryExpr, -- ^ Condition
ciThen :: Command, -- ^ Then-branch
ciElse :: Command, -- ^ Else-branch
cmdSrcPos :: SrcPos
}
-}
-- Extended version
| CmdIf {
ciCondThens :: [(PrimaryExpr, [Command])], -- ^ Conditional branches
ciElse :: [Command], -- ^ Optional else-branch
cmdSrcPos :: SrcPos
}
-- | While-loop
| CmdWhile {
cwCond :: PrimaryExpr, -- ^ Loop-condition
cwBody :: Command, -- ^ Loop-body
cmdSrcPos :: SrcPos
}
-- | Repeat-loop
{- | CmdRepeat {
crBody :: Command, -- ^ Loop-body
crCond :: PrimaryExpr, -- ^ Loop-condition
cmdSrcPos :: SrcPos
}
-}
{-
-- | Let-command
| CmdLet {
clDecls :: [Declaration], -- ^ Declarations
clBody :: Command, -- ^ Let-body
cmdSrcPos :: SrcPos
}
-}
-}
{-
instance HasSrcPos Command where
srcPos = cmdSrcPos
-}
{-
-- | Variable reference
| ExpVar {
evVar :: Name, -- ^ Name of referenced variable
expSrcPos :: SrcPos
}
-- | Function or n-ary operator application
| ExpApp {
eaFun :: PrimaryExpr, -- ^ Applied function or operator
eaArgs :: [PrimaryExpr], -- ^ Arguments
expSrcPos :: SrcPos
}
-- | Conditional expression
| ExpCond {
ecCond :: PrimaryExpr, -- ^ Condition
ecTrue :: PrimaryExpr, -- ^ Value if condition true
ecFalse :: PrimaryExpr, -- ^ Value if condition false
expSrcPos :: SrcPos
}
-}
{-
instance HasSrcPos PrimaryExpr where
srcPos = expSrcPos
-}
-- | Abstract syntax for the syntactic category Declaration
--data Declaration
{-
{-
-- | Constant declaration
= DeclConst {
dcConst :: Name, -- ^ Name of defined constant
dcType :: TypeDenoter, -- ^ Type of defined constant
dcVal :: PrimaryExpr, -- ^ Value of defined constant
declSrcPos :: SrcPos
}
-}
-- | Variable declaration
= DeclVar {
dvVar :: Name, -- ^ Name of declared variable
dvType :: TypeDenoter, -- ^ Type of declared variable
dvMbVal :: Maybe PrimaryExpr, -- ^ Initial value of declared
-- varible, if any
declSrcPos :: SrcPos
}
-}
{-
instance HasSrcPos Declaration where
srcPos = declSrcPos
-}
-- | Abstract syntax for the syntactic category TypeDenoter
-- Right now, the only types are simple base types like Integer and Bool.
-- If MiniTriangle were extended to allow users to express e.g. function
-- types, then this data declaration would have to be extended.
--data TypeDenoter
{-
-- | Base Type
= TDBaseType {
tdbtName :: Name, -- ^ Name of the base type
tdSrcPos :: SrcPos
}
-}
{-
instance HasSrcPos TypeDenoter where
srcPos = tdSrcPos
-}
|
nbrunt/JSHOP
|
src/old/ver2/AST.hs
|
Haskell
|
mit
| 13,397
|
module Chapter17 where
import Data.List (elemIndex)
added :: Maybe Integer
added = (+3) <$> (lookup 3 $ zip [1, 2, 3] [4, 5, 6])
y :: Maybe Integer
y = lookup 3 $ zip [1, 2, 3] [4, 5, 6]
z :: Maybe Integer
z = lookup 2 $ zip [1, 2, 3] [4, 5, 6]
tupled :: Maybe (Integer, Integer)
tupled = (,) <$> y <*> z
x' :: Maybe Int
x' = elemIndex 3 [1, 2, 3, 4, 5]
y' :: Maybe Int
y' = elemIndex 4 [1, 2, 3, 4, 5]
max' :: Int -> Int -> Int
max' = max
maxed :: Maybe Int
maxed = max' <$> x' <*> y'
xs = [1, 2, 3]
ys = [4, 5, 6]
x'' :: Maybe Integer
x'' = lookup 3 $ zip xs ys
y'' :: Maybe Integer
y'' = lookup 2 $ zip xs ys
summed :: Maybe Integer
summed = fmap sum $ (,) <$> x'' <*> y''
|
prt2121/haskell-practice
|
ch6-11-17-25/src/Chapter17.hs
|
Haskell
|
apache-2.0
| 689
|
{-# LANGUAGE ScopedTypeVariables #-}
module Kernel.GPU.Hogbom ( cleanPrepare, cleanKernel ) where
import Control.Monad
import Data.Word
import Foreign.Marshal.Alloc
import Foreign.Storable ( sizeOf, peek )
import Foreign.C.Types
import Data
import Kernel.GPU.Common as CUDA
import Vector
type Peak = (Int, Int, Double)
cleanPrepare :: CleanPar -> Image -> IO Image
cleanPrepare _ psf = do
-- Transfer PSF to GPU
psfv <- toDeviceVector (imgData psf)
let psf' = psf{ imgData = psfv }
-- TODO: Cache peak?
return psf'
cleanKernel :: CleanPar -> Image -> Image -> IO (Image, Image)
cleanKernel cleanp dirty psf = do
-- Check configuration - the peak find kernel requires a quadratic
-- grid without gaps.
let width = gridWidth (imgPar dirty)
when (width /= gridPitch (imgPar dirty) || width /= gridHeight (imgPar dirty)) $
fail "Cleaning kernel assumes quadratic grid without internal padding!"
-- Furthermore, the reduction requires the field size to be a power of 2.
let powers = map (2^) [1..32 :: Int]
when (width `notElem` powers) $
fail "Cleaning kernel requires a grid size that is a power of two!"
-- Transfer images, if required
dirtyv <- toDeviceVector (imgData dirty)
let dirty' = dirty{ imgData = dirtyv }
-- Allocate model
modelv <- allocCVector (2 * imageSize (imgPar dirty))
let model = Image (imgPar dirty) 0 modelv
-- Find peak in PSF
(psfx, psfy, psfv) <- findPeak psf
-- Minor cleaning loop
let loop res 0 = return (res, model)
loop res fuel = do
-- Find peak in residual
(resx, resy, resv) <- findPeak res
-- Below threshold?
if abs resv < cleanThreshold cleanp then do
return (res, model)
else do
-- Subtract PSF
let mval = cleanGain cleanp * resv / psfv
res' <- subtractImg res psf (resx - psfx, resy - psfy) mval
-- Update model, loop
let ix = resx + resy * width
mpx <- peekVector modelv ix
pokeVector modelv ix (mpx + mval)
loop res' (fuel-1)
loop dirty' (cleanIter cleanp)
foreign import ccall unsafe findPeak_init :: IO CInt
foreign import ccall unsafe "&" findPeak_512_e2 :: Fun
-- | Number of blocks of a certain size required to cover data of a given size
blockCount :: Int -> Int -> Int
blockCount datSize blkSize = (datSize + blkSize - 1) `div` blkSize
-- | Finds the position with the highest intensity in the image
findPeak :: Image -> IO Peak
findPeak img = do
-- Get data
let DeviceVector _ imgp = imgData img
-- Set up reduction kernel
nothingIfOk . toEnum . fromIntegral =<< findPeak_init
sync
-- Run
let width = gridWidth $ imgPar img
placeSize = sizeOf (undefined :: CULong) + sizeOf (undefined :: Double)
blocks = blockCount width 1024
CUDA.allocaArray (placeSize * blocks) $ \(workArea :: DevicePtr Word8) -> do
launchKernel findPeak_512_e2
(blocks, 1, 1) (512, 1, 1) (512 * fromIntegral placeSize) Nothing $
mapArgs imgp workArea (width * width)
sync
-- Load final result value(s) from start of work area
let peekd p = alloca (\p' -> peekArray 1 p p' >> peek p')
pos <- peekd $ castDevPtr workArea :: IO Word64
val <- peekd $ castDevPtr (workArea `plusDevPtr` sizeOf (undefined :: CULong))
return (fromIntegral (pos `mod` fromIntegral width),
fromIntegral (pos `div` fromIntegral width),
val)
foreign import ccall unsafe "&" subtract_psf_kernel :: Fun
-- | Subtract two images from each other at an offset and
-- muliplier. The first image parameter is the one getting updated.
subtractImg :: Image -> Image -> (Int, Int) -> Double -> IO Image
subtractImg res psf (x,y) gain = do
-- Calculate data movement distance and amount
resv@(DeviceVector _ resp) <- toDeviceVector (imgData res)
let DeviceVector _ psfp = imgData psf
width = gridWidth (imgPar res)
diff = x + y * width
resp' = resp -- `plusDevPtr` max 0 diff
psfp' = psfp -- `plusDevPtr` max 0 (-diff)
stopx = width - abs x
stopy = width - abs y
blockDim = 16
-- Run the kernel
launchKernel subtract_psf_kernel
(blockCount stopx blockDim, blockCount stopy blockDim, 1) (blockDim, blockDim, 1) 0 Nothing $
mapArgs resp' psfp' gain diff stopx stopy width
sync
-- Done
return res{imgData=resv}
|
SKA-ScienceDataProcessor/RC
|
MS4/dna-programs/Kernel/GPU/Hogbom.hs
|
Haskell
|
apache-2.0
| 4,411
|
module Main where
import Network.Libre.TLS.FFI.Internal
main = putStrLn "hello"
|
cartazio/libressl-hs
|
tests/hunit.hs
|
Haskell
|
bsd-2-clause
| 81
|
{-# OPTIONS -fglasgow-exts #-}
-----------------------------------------------------------------------------
{-| Module : QMatrix.hs
Copyright : (c) David Harley 2010
Project : qtHaskell
Version : 1.1.4
Modified : 2010-09-02 17:02:31
Warning : this file is machine generated - do not modify.
--}
-----------------------------------------------------------------------------
module Qtc.Core.QMatrix (
QqMatrix(..)
,QqMatrix_nf(..)
,det
,inverted
,isIdentity
,isInvertible
,m11
,m12
,m21
,m22
,Qqmap(..), Qqqmap(..)
,QmapRect(..), QqmapRect(..)
,qmapToPolygon, mapToPolygon
,qMatrix_delete
)
where
import Qth.ClassTypes.Core
import Qtc.Enums.Base
import Qtc.Classes.Base
import Qtc.Classes.Qccs
import Qtc.Classes.Core
import Qtc.ClassTypes.Core
import Qth.ClassTypes.Core
import Qtc.Classes.Gui
import Qtc.ClassTypes.Gui
class QqMatrix x1 where
qMatrix :: x1 -> IO (QMatrix ())
instance QqMatrix (()) where
qMatrix ()
= withQMatrixResult $
qtc_QMatrix
foreign import ccall "qtc_QMatrix" qtc_QMatrix :: IO (Ptr (TQMatrix ()))
instance QqMatrix ((QMatrix t1)) where
qMatrix (x1)
= withQMatrixResult $
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMatrix1 cobj_x1
foreign import ccall "qtc_QMatrix1" qtc_QMatrix1 :: Ptr (TQMatrix t1) -> IO (Ptr (TQMatrix ()))
instance QqMatrix ((Double, Double, Double, Double, Double, Double)) where
qMatrix (x1, x2, x3, x4, x5, x6)
= withQMatrixResult $
qtc_QMatrix2 (toCDouble x1) (toCDouble x2) (toCDouble x3) (toCDouble x4) (toCDouble x5) (toCDouble x6)
foreign import ccall "qtc_QMatrix2" qtc_QMatrix2 :: CDouble -> CDouble -> CDouble -> CDouble -> CDouble -> CDouble -> IO (Ptr (TQMatrix ()))
class QqMatrix_nf x1 where
qMatrix_nf :: x1 -> IO (QMatrix ())
instance QqMatrix_nf (()) where
qMatrix_nf ()
= withObjectRefResult $
qtc_QMatrix
instance QqMatrix_nf ((QMatrix t1)) where
qMatrix_nf (x1)
= withObjectRefResult $
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMatrix1 cobj_x1
instance QqMatrix_nf ((Double, Double, Double, Double, Double, Double)) where
qMatrix_nf (x1, x2, x3, x4, x5, x6)
= withObjectRefResult $
qtc_QMatrix2 (toCDouble x1) (toCDouble x2) (toCDouble x3) (toCDouble x4) (toCDouble x5) (toCDouble x6)
det :: QMatrix a -> (()) -> IO (Double)
det x0 ()
= withDoubleResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QMatrix_det cobj_x0
foreign import ccall "qtc_QMatrix_det" qtc_QMatrix_det :: Ptr (TQMatrix a) -> IO CDouble
instance Qqdx (QMatrix a) (()) (IO (Double)) where
qdx x0 ()
= withDoubleResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QMatrix_dx cobj_x0
foreign import ccall "qtc_QMatrix_dx" qtc_QMatrix_dx :: Ptr (TQMatrix a) -> IO CDouble
instance Qqdy (QMatrix a) (()) (IO (Double)) where
qdy x0 ()
= withDoubleResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QMatrix_dy cobj_x0
foreign import ccall "qtc_QMatrix_dy" qtc_QMatrix_dy :: Ptr (TQMatrix a) -> IO CDouble
inverted :: QMatrix a -> (()) -> IO (QMatrix ())
inverted x0 ()
= withQMatrixResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QMatrix_inverted cobj_x0
foreign import ccall "qtc_QMatrix_inverted" qtc_QMatrix_inverted :: Ptr (TQMatrix a) -> IO (Ptr (TQMatrix ()))
isIdentity :: QMatrix a -> (()) -> IO (Bool)
isIdentity x0 ()
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QMatrix_isIdentity cobj_x0
foreign import ccall "qtc_QMatrix_isIdentity" qtc_QMatrix_isIdentity :: Ptr (TQMatrix a) -> IO CBool
isInvertible :: QMatrix a -> (()) -> IO (Bool)
isInvertible x0 ()
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QMatrix_isInvertible cobj_x0
foreign import ccall "qtc_QMatrix_isInvertible" qtc_QMatrix_isInvertible :: Ptr (TQMatrix a) -> IO CBool
m11 :: QMatrix a -> (()) -> IO (Double)
m11 x0 ()
= withDoubleResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QMatrix_m11 cobj_x0
foreign import ccall "qtc_QMatrix_m11" qtc_QMatrix_m11 :: Ptr (TQMatrix a) -> IO CDouble
m12 :: QMatrix a -> (()) -> IO (Double)
m12 x0 ()
= withDoubleResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QMatrix_m12 cobj_x0
foreign import ccall "qtc_QMatrix_m12" qtc_QMatrix_m12 :: Ptr (TQMatrix a) -> IO CDouble
m21 :: QMatrix a -> (()) -> IO (Double)
m21 x0 ()
= withDoubleResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QMatrix_m21 cobj_x0
foreign import ccall "qtc_QMatrix_m21" qtc_QMatrix_m21 :: Ptr (TQMatrix a) -> IO CDouble
m22 :: QMatrix a -> (()) -> IO (Double)
m22 x0 ()
= withDoubleResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QMatrix_m22 cobj_x0
foreign import ccall "qtc_QMatrix_m22" qtc_QMatrix_m22 :: Ptr (TQMatrix a) -> IO CDouble
class Qqmap x1 xr where
qmap :: QMatrix a -> x1 -> xr
class Qqqmap x1 xr where
qqmap :: QMatrix a -> x1 -> xr
instance Qqmap ((Line)) (IO (Line)) where
qmap x0 (x1)
= withLineResult $ \cline_ret_x1 cline_ret_y1 cline_ret_x2 cline_ret_y2 ->
withObjectPtr x0 $ \cobj_x0 ->
withCLine x1 $ \cline_x1_x1 cline_x1_y1 cline_x1_x2 cline_x1_y2 ->
qtc_QMatrix_map7_qth cobj_x0 cline_x1_x1 cline_x1_y1 cline_x1_x2 cline_x1_y2 cline_ret_x1 cline_ret_y1 cline_ret_x2 cline_ret_y2
foreign import ccall "qtc_QMatrix_map7_qth" qtc_QMatrix_map7_qth :: Ptr (TQMatrix a) -> CInt -> CInt -> CInt -> CInt -> Ptr CInt -> Ptr CInt -> Ptr CInt -> Ptr CInt -> IO ()
instance Qqmap ((LineF)) (IO (LineF)) where
qmap x0 (x1)
= withLineFResult $ \clinef_ret_x1 clinef_ret_y1 clinef_ret_x2 clinef_ret_y2 ->
withObjectPtr x0 $ \cobj_x0 ->
withCLineF x1 $ \clinef_x1_x1 clinef_x1_y1 clinef_x1_x2 clinef_x1_y2 ->
qtc_QMatrix_map6_qth cobj_x0 clinef_x1_x1 clinef_x1_y1 clinef_x1_x2 clinef_x1_y2 clinef_ret_x1 clinef_ret_y1 clinef_ret_x2 clinef_ret_y2
foreign import ccall "qtc_QMatrix_map6_qth" qtc_QMatrix_map6_qth :: Ptr (TQMatrix a) -> CDouble -> CDouble -> CDouble -> CDouble -> Ptr CDouble -> Ptr CDouble -> Ptr CDouble -> Ptr CDouble -> IO ()
instance Qqmap ((Point)) (IO (Point)) where
qmap x0 (x1)
= withPointResult $ \cpoint_ret_x cpoint_ret_y ->
withObjectPtr x0 $ \cobj_x0 ->
withCPoint x1 $ \cpoint_x1_x cpoint_x1_y ->
qtc_QMatrix_map4_qth cobj_x0 cpoint_x1_x cpoint_x1_y cpoint_ret_x cpoint_ret_y
foreign import ccall "qtc_QMatrix_map4_qth" qtc_QMatrix_map4_qth :: Ptr (TQMatrix a) -> CInt -> CInt -> Ptr CInt -> Ptr CInt -> IO ()
instance Qqmap ((PointF)) (IO (PointF)) where
qmap x0 (x1)
= withPointFResult $ \cpointf_ret_x cpointf_ret_y ->
withObjectPtr x0 $ \cobj_x0 ->
withCPointF x1 $ \cpointf_x1_x cpointf_x1_y ->
qtc_QMatrix_map3_qth cobj_x0 cpointf_x1_x cpointf_x1_y cpointf_ret_x cpointf_ret_y
foreign import ccall "qtc_QMatrix_map3_qth" qtc_QMatrix_map3_qth :: Ptr (TQMatrix a) -> CDouble -> CDouble -> Ptr CDouble -> Ptr CDouble -> IO ()
instance Qqqmap ((QLine t1)) (IO (QLine ())) where
qqmap x0 (x1)
= withQLineResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMatrix_map7 cobj_x0 cobj_x1
foreign import ccall "qtc_QMatrix_map7" qtc_QMatrix_map7 :: Ptr (TQMatrix a) -> Ptr (TQLine t1) -> IO (Ptr (TQLine ()))
instance Qqqmap ((QLineF t1)) (IO (QLineF ())) where
qqmap x0 (x1)
= withQLineFResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMatrix_map6 cobj_x0 cobj_x1
foreign import ccall "qtc_QMatrix_map6" qtc_QMatrix_map6 :: Ptr (TQMatrix a) -> Ptr (TQLineF t1) -> IO (Ptr (TQLineF ()))
instance Qqmap ((QPainterPath t1)) (IO (QPainterPath ())) where
qmap x0 (x1)
= withQPainterPathResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMatrix_map5 cobj_x0 cobj_x1
foreign import ccall "qtc_QMatrix_map5" qtc_QMatrix_map5 :: Ptr (TQMatrix a) -> Ptr (TQPainterPath t1) -> IO (Ptr (TQPainterPath ()))
instance Qqqmap ((QPoint t1)) (IO (QPoint ())) where
qqmap x0 (x1)
= withQPointResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMatrix_map4 cobj_x0 cobj_x1
foreign import ccall "qtc_QMatrix_map4" qtc_QMatrix_map4 :: Ptr (TQMatrix a) -> Ptr (TQPoint t1) -> IO (Ptr (TQPoint ()))
instance Qqqmap ((QPointF t1)) (IO (QPointF ())) where
qqmap x0 (x1)
= withQPointFResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMatrix_map3 cobj_x0 cobj_x1
foreign import ccall "qtc_QMatrix_map3" qtc_QMatrix_map3 :: Ptr (TQMatrix a) -> Ptr (TQPointF t1) -> IO (Ptr (TQPointF ()))
instance Qqmap ((QPolygon t1)) (IO (QPolygon ())) where
qmap x0 (x1)
= withQPolygonResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMatrix_map2 cobj_x0 cobj_x1
foreign import ccall "qtc_QMatrix_map2" qtc_QMatrix_map2 :: Ptr (TQMatrix a) -> Ptr (TQPolygon t1) -> IO (Ptr (TQPolygon ()))
instance Qqmap ((QPolygonF t1)) (IO (QPolygonF ())) where
qmap x0 (x1)
= withQPolygonFResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMatrix_map1 cobj_x0 cobj_x1
foreign import ccall "qtc_QMatrix_map1" qtc_QMatrix_map1 :: Ptr (TQMatrix a) -> Ptr (TQPolygonF t1) -> IO (Ptr (TQPolygonF ()))
instance Qqmap ((QRegion t1)) (IO (QRegion ())) where
qmap x0 (x1)
= withQRegionResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMatrix_map cobj_x0 cobj_x1
foreign import ccall "qtc_QMatrix_map" qtc_QMatrix_map :: Ptr (TQMatrix a) -> Ptr (TQRegion t1) -> IO (Ptr (TQRegion ()))
class QmapRect x1 xr where
mapRect :: QMatrix a -> x1 -> xr
class QqmapRect x1 xr where
qmapRect :: QMatrix a -> x1 -> xr
instance QqmapRect ((QRect t1)) (IO (QRect ())) where
qmapRect x0 (x1)
= withQRectResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMatrix_mapRect cobj_x0 cobj_x1
foreign import ccall "qtc_QMatrix_mapRect" qtc_QMatrix_mapRect :: Ptr (TQMatrix a) -> Ptr (TQRect t1) -> IO (Ptr (TQRect ()))
instance QqmapRect ((QRectF t1)) (IO (QRectF ())) where
qmapRect x0 (x1)
= withQRectFResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMatrix_mapRect1 cobj_x0 cobj_x1
foreign import ccall "qtc_QMatrix_mapRect1" qtc_QMatrix_mapRect1 :: Ptr (TQMatrix a) -> Ptr (TQRectF t1) -> IO (Ptr (TQRectF ()))
instance QmapRect ((Rect)) (IO (Rect)) where
mapRect x0 (x1)
= withRectResult $ \crect_ret_x crect_ret_y crect_ret_w crect_ret_h ->
withObjectPtr x0 $ \cobj_x0 ->
withCRect x1 $ \crect_x1_x crect_x1_y crect_x1_w crect_x1_h ->
qtc_QMatrix_mapRect_qth cobj_x0 crect_x1_x crect_x1_y crect_x1_w crect_x1_h crect_ret_x crect_ret_y crect_ret_w crect_ret_h
foreign import ccall "qtc_QMatrix_mapRect_qth" qtc_QMatrix_mapRect_qth :: Ptr (TQMatrix a) -> CInt -> CInt -> CInt -> CInt -> Ptr CInt -> Ptr CInt -> Ptr CInt -> Ptr CInt -> IO ()
instance QmapRect ((RectF)) (IO (RectF)) where
mapRect x0 (x1)
= withRectFResult $ \crectf_ret_x crectf_ret_y crectf_ret_w crectf_ret_h ->
withObjectPtr x0 $ \cobj_x0 ->
withCRectF x1 $ \crectf_x1_x crectf_x1_y crectf_x1_w crectf_x1_h ->
qtc_QMatrix_mapRect1_qth cobj_x0 crectf_x1_x crectf_x1_y crectf_x1_w crectf_x1_h crectf_ret_x crectf_ret_y crectf_ret_w crectf_ret_h
foreign import ccall "qtc_QMatrix_mapRect1_qth" qtc_QMatrix_mapRect1_qth :: Ptr (TQMatrix a) -> CDouble -> CDouble -> CDouble -> CDouble -> Ptr CDouble -> Ptr CDouble -> Ptr CDouble -> Ptr CDouble -> IO ()
qmapToPolygon :: QMatrix a -> ((QRect t1)) -> IO (QPolygon ())
qmapToPolygon x0 (x1)
= withQPolygonResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QMatrix_mapToPolygon cobj_x0 cobj_x1
foreign import ccall "qtc_QMatrix_mapToPolygon" qtc_QMatrix_mapToPolygon :: Ptr (TQMatrix a) -> Ptr (TQRect t1) -> IO (Ptr (TQPolygon ()))
mapToPolygon :: QMatrix a -> ((Rect)) -> IO (QPolygon ())
mapToPolygon x0 (x1)
= withQPolygonResult $
withObjectPtr x0 $ \cobj_x0 ->
withCRect x1 $ \crect_x1_x crect_x1_y crect_x1_w crect_x1_h ->
qtc_QMatrix_mapToPolygon_qth cobj_x0 crect_x1_x crect_x1_y crect_x1_w crect_x1_h
foreign import ccall "qtc_QMatrix_mapToPolygon_qth" qtc_QMatrix_mapToPolygon_qth :: Ptr (TQMatrix a) -> CInt -> CInt -> CInt -> CInt -> IO (Ptr (TQPolygon ()))
instance Qreset (QMatrix a) (()) (IO ()) where
reset x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QMatrix_reset cobj_x0
foreign import ccall "qtc_QMatrix_reset" qtc_QMatrix_reset :: Ptr (TQMatrix a) -> IO ()
instance QsetMatrix (QMatrix a) ((Double, Double, Double, Double, Double, Double)) where
setMatrix x0 (x1, x2, x3, x4, x5, x6)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QMatrix_setMatrix cobj_x0 (toCDouble x1) (toCDouble x2) (toCDouble x3) (toCDouble x4) (toCDouble x5) (toCDouble x6)
foreign import ccall "qtc_QMatrix_setMatrix" qtc_QMatrix_setMatrix :: Ptr (TQMatrix a) -> CDouble -> CDouble -> CDouble -> CDouble -> CDouble -> CDouble -> IO ()
qMatrix_delete :: QMatrix a -> IO ()
qMatrix_delete x0
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QMatrix_delete cobj_x0
foreign import ccall "qtc_QMatrix_delete" qtc_QMatrix_delete :: Ptr (TQMatrix a) -> IO ()
|
uduki/hsQt
|
Qtc/Core/QMatrix.hs
|
Haskell
|
bsd-2-clause
| 13,057
|
{-# LANGUAGE RecordWildCards #-}
module NLP.Nerf2.Delta.Ref
( delta
, delta'
) where
import Data.List (foldl')
import NLP.Nerf2.Types
import NLP.Nerf2.Forest.Set
import NLP.Nerf2.Forest.Phi
import qualified NLP.Nerf2.Env as Env
delta :: Env.InSent e => e -> N -> Pos -> LogReal
delta env x i = sumForests env $ forestSetB env x i
delta' :: Env.InSent e => e -> N -> Pos -> LogReal
delta' env x i = sumForests env $ forestSetB' env x i
sumForests :: Env.InSent e => e -> [Forest] -> LogReal
sumForests env = foldl' (+) 0 . map (phiForest env)
|
kawu/nerf-proto
|
src/NLP/Nerf2/Delta/Ref.hs
|
Haskell
|
bsd-2-clause
| 548
|
{-# LANGUAGE DataKinds,UnboxedTuples,MagicHash,TemplateHaskell,RankNTypes,TupleSections #-}
module HLearn.Data.SpaceTree.Algorithms.NearestNeighbor
(
-- * data types
Neighbor (..)
, ValidNeighbor (..)
, NeighborList (..)
, nlSingleton
, getknnL
, nlMaxDist
, Param_k
, _k
-- * functions
, findAllNeighbors
, findAllNeighbors'
, findNeighborList
, findEpsilonNeighborListWith
)
where
import qualified Prelude as P
import Data.Strict.Tuple (Pair(..))
import SubHask
import SubHask.Algebra.Container
import SubHask.Compatibility.Containers
import SubHask.Compatibility.Vector
import SubHask.Compatibility.Vector.Lebesgue
import SubHask.Monad
import SubHask.TemplateHaskell.Deriving
import HLearn.Data.SpaceTree
import Data.Params
-------------------------------------------------------------------------------
data Neighbor dp = Neighbor
{ neighbor :: !dp
, neighborDistance :: !(Scalar dp)
-- { neighbor :: {-#UNPACK#-}!(L2 UnboxedVector Float)
-- , neighborDistance :: {-#UNPACK#-}!Float
}
type instance Scalar (Neighbor dp) = Bool
type instance Logic (Neighbor dp) = Bool
type ValidNeighbor dp =
( Metric dp
, Bounded (Scalar dp)
, CanError (Scalar dp)
, Logic dp~Bool
-- , dp ~ (L2 UnboxedVector Float)
)
deriving instance (Read dp, Read (Scalar dp)) => Read (Neighbor dp)
deriving instance (Show dp, Show (Scalar dp)) => Show (Neighbor dp)
instance Eq (Scalar dp) => Eq_ (Neighbor dp) where
{-# INLINE (==) #-}
a == b = neighborDistance a == neighborDistance b
-- instance Ord (Scalar dp) => Ord (Neighbor dp) where
-- compare a b = compare (neighborDistance a) (neighborDistance b)
instance (NFData dp, NFData (Scalar dp)) => NFData (Neighbor dp) where
rnf (Neighbor _ _) = ()
-- rnf n = deepseq (neighbor n) $ rnf (neighborDistance n)
------------------------------------------------------------------------------
data NeighborList (k :: Config Nat) dp
= NL_Nil
| NL_Cons {-#UNPACK#-}!(Neighbor dp) !(NeighborList k dp)
-- | NL_Err
mkParams ''NeighborList
-- | update the distances in the NeighborList based on a new data point
resetNL :: ValidNeighbor dp => dp -> NeighborList k dp -> NeighborList k dp
resetNL p NL_Nil = NL_Nil
resetNL p (NL_Cons (Neighbor q _) nl)
= NL_Cons (Neighbor q $ distance p q) $ resetNL p nl
type instance Logic (NeighborList k dp) = Bool
deriving instance (Read dp, Read (Scalar dp)) => Read (NeighborList k dp)
deriving instance (Show dp, Show (Scalar dp)) => Show (NeighborList k dp)
instance (NFData dp, NFData (Scalar dp)) => NFData (NeighborList k dp) where
rnf NL_Nil = ()
-- rnf NL_Err = ()
rnf (NL_Cons n ns) = ()
-- rnf (NL_Cons n ns) = deepseq n $ rnf ns
instance (ValidNeighbor dp, Eq_ dp) => Eq_ (NeighborList k dp) where
(NL_Cons x xs) == (NL_Cons y ys) = x==y && xs==ys
NL_Nil == NL_Nil = True
-- NL_Err == NL_Err = True
_ == _ = False
property_orderedNeighborList :: (Logic dp~Bool, Metric dp) => NeighborList k dp -> Bool
property_orderedNeighborList NL_Nil = True
property_orderedNeighborList (NL_Cons n NL_Nil) = True
property_orderedNeighborList (NL_Cons n (NL_Cons n2 ns)) = if neighborDistance n < neighborDistance n2
then property_orderedNeighborList (NL_Cons n2 ns)
else False
{-# INLINE nlSingleton #-}
nlSingleton ::
( ValidNeighbor dp
) => Neighbor dp -> NeighborList k dp
nlSingleton !n = NL_Cons n NL_Nil
-- {-# INLINE mkNeighborList #-}
-- mkNeighborList ::
-- ( ValidNeighbor dp
-- ) => dp -> Scalar dp -> NeighborList k dp
-- mkNeighborList !dp !dist = NL_Cons (Neighbor dp dist) NL_Nil
{-# INLINE getknnL #-}
getknnL ::
( ValidNeighbor dp
) => NeighborList k dp -> [Neighbor dp]
getknnL NL_Nil = []
getknnL (NL_Cons n ns) = n:getknnL ns
-- getknnL NL_Err = error "getknnL: NL_Err"
{-# INLINE nlMaxDist #-}
nlMaxDist ::
( ValidNeighbor dp
) => NeighborList k dp -> Scalar dp
nlMaxDist !nl = go nl
where
go (NL_Cons n NL_Nil) = neighborDistance n
go (NL_Cons n ns) = go ns
go NL_Nil = maxBound
-- go NL_Err = maxBound
instance CanError (NeighborList k dp) where
{-# INLINE errorVal #-}
errorVal = NL_Nil
-- errorVal = NL_Err
{-# INLINE isError #-}
isError NL_Nil = True
-- isError NL_Err = True
isError _ = False
instance
-- ( KnownNat k
( ViewParam Param_k (NeighborList k dp)
, Metric dp
, Eq dp
, ValidNeighbor dp
) => Monoid (NeighborList k dp)
where
{-# INLINE zero #-}
zero = NL_Nil
instance
( ViewParam Param_k (NeighborList k dp)
, Metric dp
, Eq dp
, ValidNeighbor dp
) => Semigroup (NeighborList k dp)
where
{-# INLINE (+) #-}
-- nl1 + NL_Err = nl1
-- NL_Err + nl2 = nl2
nl1 + NL_Nil = nl1
NL_Nil + nl2 = nl2
nl1 + nl2 = {-# SCC notNiL #-} ret
where
ret = go nl1 nl2 (viewParam _k nl1)
go _ _ 0 = NL_Nil
go (NL_Cons n1 ns1) (NL_Cons n2 ns2) k = if neighborDistance n1 > neighborDistance n2
then NL_Cons n2 $ go (NL_Cons n1 ns1) ns2 (k-1)
else NL_Cons n1 $ go ns1 (NL_Cons n2 ns2) (k-1)
go NL_Nil (NL_Cons n2 ns2) k = NL_Cons n2 $ go NL_Nil ns2 (k-1)
go (NL_Cons n1 ns1) NL_Nil k = NL_Cons n1 $ go ns1 NL_Nil (k-1)
go NL_Nil NL_Nil k = NL_Nil
{-# INLINE nlAddNeighbor #-}
nlAddNeighbor :: forall k dp.
( ViewParam Param_k (NeighborList k dp)
, ValidNeighbor dp
) => NeighborList k dp -> Neighbor dp -> NeighborList k dp
-- nlAddNeighbor NL_Nil n' = NL_Cons n' NL_Nil
-- nlAddNeighbor (NL_Cons n NL_Nil) n' = if neighborDistance n' > neighborDistance n
-- then NL_Cons n' NL_Nil
-- else NL_Cons n NL_Nil
nlAddNeighbor !nl !n = {-# SCC nlAddNeighbor #-} nl + NL_Cons n NL_Nil
-- mappend (NeighborList (x:.xs) ) (NeighborList (y:.ys) ) = {-# SCC mappend_NeighborList #-} case k of
-- 1 -> if x < y then NeighborList (x:.Strict.Nil) else NeighborList (y:.Strict.Nil)
-- otherwise -> NeighborList $ Strict.take k $ interleave (x:.xs) (y:.ys)
-- where
-- k=fromIntegral $ natVal (Proxy :: Proxy k)
--
-- interleave !xs Strict.Nil = xs
-- interleave Strict.Nil !ys = ys
-- interleave (x:.xs) (y:.ys) = case compare x y of
-- LT -> x:.(interleave xs (y:.ys))
-- GT -> y:.(interleave (x:.xs) ys)
-- EQ -> if neighbor x == neighbor y
-- then x:.interleave xs ys
-- else x:.(y:.(interleave xs ys))
-------------------------------------------------------------------------------
-- single tree
{-# INLINABLE findNeighborList #-}
findNeighborList ::
-- ( KnownNat k
( ViewParam Param_k (NeighborList k dp)
, SpaceTree t dp
, Eq dp
, Floating (Scalar dp)
, CanError (Scalar dp)
, ValidNeighbor dp
) => t dp -> dp -> NeighborList k dp
findNeighborList !t !query = findEpsilonNeighborListWith zero zero t query
{-# INLINABLE findEpsilonNeighborListWith #-}
findEpsilonNeighborListWith ::
-- ( KnownNat k
( ViewParam Param_k (NeighborList k dp)
, SpaceTree t dp
, Eq dp
, Floating (Scalar dp)
, CanError (Scalar dp)
, ValidNeighbor dp
) => NeighborList k dp -> Scalar dp -> t dp -> dp -> NeighborList k dp
findEpsilonNeighborListWith !knn !epsilon !t !query =
{-# SCC findEpsilonNeighborListWith #-}
-- prunefoldC (knn_catadp smudge query) knn t
-- prunefoldB_CanError_sort query (knn_catadp smudge query) (knn_cata_dist smudge query) knn t
prunefoldB_CanError (knn_catadp smudge query) (knn_cata smudge query) knn t
-- prunefoldD (knn_catadp smudge query) (knn_cata2 smudge query) knn t
where
smudge = 1/(1+epsilon)
{-# INLINABLE knn_catadp #-}
-- {-# INLINE knn_catadp #-}
knn_catadp :: forall k dp.
-- ( KnownNat k
( ViewParam Param_k (NeighborList k dp)
, Metric dp
, Eq dp
, CanError (Scalar dp)
, ValidNeighbor dp
) => Scalar dp -> dp -> dp -> NeighborList k dp -> NeighborList k dp
knn_catadp !smudge !query !dp !knn = {-# SCC knn_catadp #-}
-- dist==0 is equivalent to query==dp,
-- but we have to calculate dist anyways so it's faster
if dist==0 || dist>bound
-- if dist==0 || isError dist
then knn
else nlAddNeighbor knn $ Neighbor dp dist
where
dist = distanceUB dp query bound
bound = smudge*nlMaxDist knn
-- dist = isFartherThanWithDistanceCanError dp query
-- $ nlMaxDist knn * smudge
-- {-# INLINABLE knn_cata #-}
{-# INLINE knn_cata #-}
knn_cata :: forall k t dp.
( ViewParam Param_k (NeighborList k dp)
, SpaceTree t dp
, Floating (Scalar dp)
, Eq dp
, CanError (Scalar dp)
, ValidNeighbor dp
) => Scalar dp -> dp -> t dp -> NeighborList k dp -> NeighborList k dp
knn_cata !smudge !query !t !knn = {-# SCC knn_cata #-}
if dist==0
then if isError knn
then nlSingleton $ Neighbor (stNode t) maxBound
else knn
else if isError dist
then errorVal
else nlAddNeighbor knn $ Neighbor (stNode t) dist
where
dist = stIsMinDistanceDpFartherThanWithDistanceCanError t query
$ nlMaxDist knn * smudge
{-# INLINABLE prunefoldB_CanError_sort #-}
prunefoldB_CanError_sort ::
( SpaceTree t a
, ValidNeighbor a
, b ~ NeighborList k a
, ClassicalLogic a
, CanError (Scalar a)
, Bounded (Scalar a)
) =>
a -> (a -> b -> b) -> (Scalar a -> t a -> b -> b) -> b -> t a -> b
prunefoldB_CanError_sort !query !f1 !f2 !b !t = {-# SCC prunefoldB_CanError_sort #-}
go ( distance (stNode t) query :!: t ) b
where
go !( dist :!: t ) !b = if isError res
then b
else foldr' go b'' children'
where
res = f2 dist t b
b'' = foldr' f1 res (stLeaves t)
children'
= {-# SCC children' #-} qsortHalf (\( d1 :!: _ ) ( d2 :!: _ ) -> compare d2 d1)
$ map (\x -> ( stIsMinDistanceDpFartherThanWithDistanceCanError x query maxdist
:!: x ))
-- $ map (\x -> ( distanceUB (stNode x) query (lambda t+maxdist), x ))
-- $ map (\x -> ( distance (stNode x) query , x ))
$ toList
$ stChildren t
maxdist = nlMaxDist b''
-- | This is a version of quicksort that only descends on its lower half.
-- That is, it only "approximately" sorts a list.
-- It is modified from http://en.literateprograms.org/Quicksort_%28Haskell%29
{-# INLINABLE qsortHalf #-}
qsortHalf :: (a -> a -> Ordering) -> [a] -> [a]
qsortHalf !cmp !x = {-# SCC qsortHalf #-} go x []
where
go [] !y = y
go [x] !y = x:y
go (x:xs) !y = part xs [] [x] []
where
part [] !l !e !g = go l (e ++ g ++ y)
part (z:zs) !l !e !g = case cmp z x of
GT -> part zs l e (z:g)
LT -> part zs (z:l) e g
EQ -> part zs l (z:e) g
{-# INLINABLE knn_cata_dist #-}
knn_cata_dist :: forall k t dp.
( ViewParam Param_k (NeighborList k dp)
, SpaceTree t dp
, Floating (Scalar dp)
, Eq dp
, CanError (Scalar dp)
, ValidNeighbor dp
) => Scalar dp -> dp -> Scalar dp -> t dp -> NeighborList k dp -> NeighborList k dp
knn_cata_dist !smudge !query !dist !t !knn = {-# SCC knn_cata #-}
if dist==0
then if isError knn
then nlSingleton $ Neighbor (stNode t) maxBound
else knn
-- else if dist - lambda t > nlMaxDist knn * smudge -- isError dist
else if isError dist
then errorVal
else nlAddNeighbor knn $ Neighbor (stNode t) dist
-- where
-- dist = stIsMinDistanceDpFartherThanWithDistanceCanError t query
-- $ nlMaxDist knn * smudge
---------------------------------------
{-# INLINABLE findAllNeighbors #-}
findAllNeighbors :: forall k dp t.
( ViewParam Param_k (NeighborList k dp)
, SpaceTree t dp
, NFData (Scalar dp)
, NFData dp
, Floating (Scalar dp)
, CanError (Scalar dp)
, ValidNeighbor dp
) => Scalar dp -> t dp -> [dp] -> Seq (dp,NeighborList k dp)
findAllNeighbors epsilon rtree qs = reduce $ map
(\dp -> singleton (dp, findEpsilonNeighborListWith zero epsilon rtree dp))
qs
{-# INLINABLE findAllNeighbors' #-}
findAllNeighbors' :: forall k dp t.
( ViewParam Param_k (NeighborList k dp)
, SpaceTree t dp
, NFData (Scalar dp)
, NFData dp
, Floating (Scalar dp)
, CanError (Scalar dp)
, ValidNeighbor dp
) => Scalar dp -> t dp -> [dp] -> Seq (Labeled' dp (NeighborList k dp))
findAllNeighbors' epsilon rtree qs = fromList $ map
(\dp -> mkLabeled' dp $ findEpsilonNeighborListWith zero epsilon rtree dp)
qs
-- findAllNeighbors' epsilon rtree qs = reduce $ map
-- (\dp -> singleton $ mkLabeled' dp $ findEpsilonNeighborListWith zero epsilon rtree dp)
-- qs
mkLabeled' :: x -> y -> Labeled' x y
mkLabeled' x y = Labeled' x y
|
ehlemur/HLearn
|
src/HLearn/Data/SpaceTree/Algorithms/NearestNeighbor.hs
|
Haskell
|
bsd-3-clause
| 13,421
|
{-# LANGUAGE GADTs #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ExistentialQuantification #-}
module TestModule where
-- Product
data MyProd = MyProd Bool Int
type MyProdAlias = MyProd
-- Strict product
data MyStrict = MyStrict !Bool !Int
-- Polymorphic
data MyPoly a = MyPoly a
type MyPolyAlias = MyPoly Int
-- Regular datatype
data List a = Nil | Cons a (List a)
-- Mutual recursive datatypes
data MutRecA a = MRANill a | MutRecA (MutRecB a)
data MutRecB b = MRBNill b | MutRecB (MutRecA b)
-- Nested datatype
data Perfect a = Perfect (Perfect (a,a))
-- Existential
data Exist = forall a. Exist a
-- GADTs
data Expr a where
I :: Int -> Expr Int
B :: Bool -> Expr Bool
Add :: Expr Int -> Expr Int -> Expr Int
Mul :: Expr Int -> Expr Int -> Expr Int
Eq :: Expr Int -> Expr Int -> Expr Bool
-- Newtype
newtype Foo = Foo Int
data Tree = Empty | Leaf Int | Node Tree Tree Tree
data TTree a = Tip Int | Branch (TTree a) a (TTree a)
data Toeplitz a = Toeplitz a [(a,a)]
data Comp f g a = C (f (g a))
data HFix f a = Hln (f (HFix f) a)
|
norm2782/DGG
|
examples/testmodule.hs
|
Haskell
|
bsd-3-clause
| 1,083
|
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE IncoherentInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE NoMonomorphismRestriction #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE UndecidableInstances #-}
module Language.Rsc.Pretty.Types (
) where
import qualified Data.HashMap.Strict as HM
import qualified Data.Map.Strict as M
import Language.Fixpoint.Misc (intersperse)
import qualified Language.Fixpoint.Types as F
import Language.Rsc.Pretty.Common
import Language.Rsc.Program
import Language.Rsc.Typecheck.Subst
import Language.Rsc.Typecheck.Types
import Language.Rsc.Types
import Text.PrettyPrint.HughesPJ
angles p = langle <> p <> rangle
langle = char '<'
rangle = char '>'
instance PP Bool where
pp True = text "True"
pp False = text "False"
instance PP () where
pp _ = text ""
instance PP a => PP (Maybe a) where
pp = maybe (text "Nothing") pp
instance PP Char where
pp = char
instance (F.Reftable r, PP r) => PP (RTypeQ q r) where
pp (TPrim c r) = F.ppTy r $ pp c
pp (TVar α r) = F.ppTy r $ pp α
pp (TOr [] _) = pp (TPrim TBot ())
pp (TOr (t:ts) r) = F.ppTy r $ sep $ pp t : map ((text "+" <+>) . pp) ts
pp (TAnd ts) = vcat [text "/\\" <+> pp t | t <- ts]
pp t@TRef{} | mutRelated t = ppMut t
pp (TRef t r) = F.ppTy r (pp t)
pp (TObj m ms r) = parens (pp (toType m)) <+> ppBody ms r
where
ppBody ms r = F.ppTy r (hsep [lbrace, nest 2 (pp ms), rbrace])
pp (TClass t) = text "class" <+> pp t
pp (TMod t) = text "module" <+> pp t
pp t@(TAll _ _) = ppArgs angles comma αs <> pp t' where (αs, t') = bkAll t
pp (TFun xts t _) = ppArgs parens comma xts <+> text "=>" <+> pp t
pp (TExp e) = pprint e
ppMut t@TRef{} | isUQ t = pp "UQ"
| isIM t = pp "IM"
| isAF t = pp "AF"
| isRO t = pp "RO"
| isMU t = pp "MU"
ppMut (TVar v _ ) = pp v
ppMut _ = pp "MUT???"
instance (F.Reftable r, PP r) => PP (TypeMembersQ q r) where
pp (TM ms sms cs cts sidx nidx)
= ppMem ms $+$
ppSMem sms $+$
ppCall cs $+$
ppCtor cts $+$
ppSIdx sidx $+$
ppNIdx nidx
ppMem = sep . map (\(_, f) -> pp f <> semi) . F.toListSEnv
ppSMem = sep . map (\(_, f) -> pp "static" <+> pp f <> semi) . F.toListSEnv
ppCall optT | Just t <- optT = pp t <> semi | otherwise = empty
ppCtor optT | Just t <- optT = pp "new" <+> pp t <> semi | otherwise = empty
ppSIdx (Just t) = pp "[x: string]:" <+> pp t <> semi
ppSIdx _ = empty
ppNIdx (Just t) = pp "[x: number]:" <+> pp t <> semi
ppNIdx _ = empty
instance PPR r => PP (TypeMemberQ q r) where
pp (FI s o m t) = parens (pp (toType m)) <+> pp s <> pp o <> colon <+> pp t
pp (MI s o mts) = vcat (map (\(m,t) -> char '@' <> pp (toType m) <+> pp s <> pp o <> pp t) mts)
instance PP Optionality where
pp Opt = text "?"
pp Req = empty
instance (F.Reftable r, PP r) => PP (TGenQ q r) where
pp (Gen x []) = pp x
pp (Gen x (m:ts)) = pp x <> angles (intersperse comma (ppMut m : map pp ts))
instance (F.Reftable r, PP r) => PP (BTGenQ q r) where
pp (BGen x []) = pp x
pp (BGen x ts) = pp x <> ppArgs angles comma ts
instance PP TVar where
pp = pprint . F.symbol
instance (F.Reftable r, PP r) => PP (BTVarQ q r) where
pp (BTV v _ (Just t)) = pprint v <+> text "<:" <+> pp t
pp (BTV v _ _ ) = pprint v
instance PP TPrim where
pp TString = text "string"
pp (TStrLit s) = text "\"" <> text s <> text "\""
pp TNumber = text "number"
pp TReal = text "real"
pp TBoolean = text "boolean"
pp TBV32 = text "bitvector32"
pp TVoid = text "void"
pp TUndefined = text "undefined"
pp TNull = text "null"
pp TBot = text "_|_"
pp TTop = text "Top"
pp TAny = text "any"
pp TFPBool = text "_bool_"
instance (PP r, F.Reftable r) => PP (BindQ q r) where
pp (B x o t) = pp x <> pp o <> colon <+> pp t
instance (PP s, PP t) => PP (M.Map s t) where
pp m = vcat $ pp <$> M.toList m
instance PP Locality where
pp Exported = text "Exported"
pp Local = text "Local"
instance PP Assignability where
pp Ambient = text "Ambient"
pp RdOnly = text "ReadOnly"
pp WriteLocal = text "WriteLocal"
pp ForeignLocal = text "ForeignLocal"
pp WriteGlobal = text "WriteGlobal"
pp ReturnVar = text "ReturnVar"
instance (PP r, F.Reftable r) => PP (TypeDeclQ q r) where
pp (TD s p m) = pp s $+$ text "pred" <+> pp p $+$
lbrace $+$ nest 4 (pp m) $+$ rbrace
instance (PP r, F.Reftable r) => PP (TypeSigQ q r) where
pp (TS k n h) = pp k <+> pp n <+> ppHeritage h
instance PP TypeDeclKind where
pp InterfaceTDK = text "interface"
pp ClassTDK = text "class"
ppHeritage (es,is) = ppExtends es <+> ppImplements is
ppExtends [] = text ""
ppExtends (n:_) = text "extends" <+> pp n
ppImplements [] = text ""
ppImplements ts = text "implements" <+> intersperse comma (pp <$> ts)
instance PP EnumDef where
pp (EnumDef n m) = pp n <+> braces (pp m)
instance PP IContext where
pp (IC x) = text "Context: " <+> pp x
instance (PP a, PP s, PP t) => PP (Alias a s t) where
pp (Alias n αs xs body) = text "alias" <+> pp n <> withnull angles comma αs <>
withnull brackets comma xs <+> text "=" <+> pp body
where
withnull _ _ [] = empty
withnull s p xs = s $ intersperse p (map pp xs)
instance (PP r, F.Reftable r) => PP (Rsc a r) where
pp pgm@(Rsc {code = (Src s) }) = extras
$+$ text "\n// CODE"
$+$ pp s
where
extras = -- text "\nCONSTANTS" $+$ nest 4 (pp (consts pgm)) $+$
text "\nPREDICATE ALIASES" $+$ nest 4 (pp (pAlias pgm))
$+$ text "\nTYPE ALIASES" $+$ nest 4 (pp (tAlias pgm))
-- $+$ text "\nQUALIFIERS" $+$ nest 4 (vcat (F.toFix <$> take 3 (pQuals pgm)))
-- $+$ text "\nQUALIFIERS" $+$ nest 4 (vcat (F.toFix <$> pQuals pgm))
-- $+$ text "..."
$+$ text "\nINVARIANTS" $+$ nest 4 (vcat (pp <$> invts pgm))
instance (F.Reftable r, PP r) => PP (RSubst r) where
pp (Su m) | HM.null m = text "empty"
| HM.size m < 10 = intersperse comma $ (ppBind <$>) $ HM.toList m
| otherwise = vcat $ (ppBind <$>) $ HM.toList m
ppBind (x, t) = pp x <+> text ":=" <+> pp t
-- | PP Fixpoint
instance PP F.Sort where
pp = pprint
instance PP (F.SortedReft) where
pp (F.RR s b) = braces (pp s <+> text "|" <+> pp b)
instance PP F.Reft where
pp = pprint
instance PP (F.SubC c) where
pp s = parens (pp (F.slhs s)) <+> text " => " <+> pp (F.srhs s)
|
UCSD-PL/RefScript
|
src/Language/Rsc/Pretty/Types.hs
|
Haskell
|
bsd-3-clause
| 7,069
|
{-|
Module : Control.Lens.Extra
Description : Extra utility functions for working with lenses.
Copyright : (c) Henry J. Wylde, 2016
License : BSD3
Maintainer : public@hjwylde.com
Extra utility functions for working with lenses.
-}
{-# LANGUAGE Rank2Types #-}
module Control.Lens.Extra (
module Control.Lens,
-- * Folds
is, isn't, hasuse, hasn'tuse,
-- * Traversals
filteredBy,
) where
import Control.Lens hiding (isn't, filteredBy)
import Control.Monad.State
import Data.Monoid
-- | The counter-part to 'isn't', but more general as it takes a 'Getting' instead.
--
-- @'is' = 'has'@
is :: Getting Any s a -> s -> Bool
is = has
-- | A re-write of 'Control.Lens.Prism.isn't' to be more general by taking a 'Getting' instead.
--
-- @'isn't' = 'hasn't'@
isn't :: Getting All s a -> s -> Bool
isn't = hasn't
-- | Check to see if this 'Fold' or 'Traversal' matches 1 or more entries in the current state.
--
-- @'hasuse' = 'gets' . 'has'@
hasuse :: MonadState s m => Getting Any s a -> m Bool
hasuse = gets . has
-- | Check to see if this 'Fold' or 'Traversal' has no matches in the current state.
--
-- @'hasn'tuse' = 'gets' . 'hasn't'@
hasn'tuse :: MonadState s m => Getting All s a -> m Bool
hasn'tuse = gets . hasn't
-- | A companion to 'filtered' that, rather than using a predicate, filters on the given lens for
-- matches.
filteredBy :: Eq b => Lens' a b -> b -> Traversal' a a
filteredBy lens value = filtered ((value ==) . view lens)
|
hjwylde/werewolf
|
src/Control/Lens/Extra.hs
|
Haskell
|
bsd-3-clause
| 1,491
|
module ETCS.SDM.Intern where
import Control.Lens hiding ((*~), _2)
import ETCS.SDM.Helper
import ETCS.SDM.Types
import Numeric.Units.Dimensional.TF.Prelude
import Prelude ()
validConvertion :: (HasConvertingBreakingModelInput i f, RealFloat f) => i -> Bool
validConvertion i =
let v = i ^. bmiMaxVelocity
bp = i ^. bmiBreakingPercentage
l = i ^. bmiTrainLength
lmax = case (i ^. bmiBreakPosition) of
PassangerTrainP -> 900 *~ meter
FreightTrainG -> 1500 *~ meter
FreightTrainP -> 1500 *~ meter
in (0 *~ kmh <= v) && (v <= 200 *~ kmh) &&
(30 *~ one < bp ) && (bp <= 250 *~ one) &&
(0 *~ meter < l) && (l <= lmax)
breakingModelConverter'
:: (HasConvertingBreakingModelInput i f, RealFloat f, Floating f) => i ->
ConvertedBreakingModel f
breakingModelConverter' i =
let (ea, sa) = basicDeceleration $ i ^. bmiBreakingPercentage
bpos = i ^. bmiBreakPosition
l = i ^. bmiTrainLength
in ConvertedBreakingModel {
_cbmBreakingModelInput = i ^. convertingBreakingModelInput,
_cbmBreakingModel =
BreakingModelBase (ea, sa, t_brake_emergency_cm bpos l
, t_brake_service_cm bpos l)
}
basicDeceleration :: (RealFloat f, Floating f) =>
BreakingPercentage f -> (A_Break f, A_Break f)
basicDeceleration lambda =
let l0_emergency = lambda
l0_service = min (135.0 *~ one) lambda
ad_n l0 v =
let vlim = v_lim l0
(a3_n, a2_n, a1_n, a0_n) = a_n_ms n
n = nfromV vlim v
in if (v <= vlim) then (ad_0 l0)
else a3_n * (l0 ** (3 *~ one)) + a2_n * (l0 ** (2 *~ one)) +
a1_n * l0 + a0_n
in ( ad_n l0_emergency, ad_n l0_service )
t_brake_service_cm :: (RealFloat f, Floating f) =>
BreakPosition -> Length f -> Velocity f -> Time f
t_brake_service_cm = t_brake_cm t_brake_basic_sb
t_brake_emergency_cm :: (RealFloat f, Floating f) =>
BreakPosition -> Length f -> Velocity f -> Time f
t_brake_emergency_cm = t_brake_cm t_brake_basic_eb
ad_0 :: (RealFloat f, Floating f) => BreakingPercentage f -> Acceleration f
ad_0 l0 = a * l0 + b
where a = 0.0075 *~ ms2
b = 0.076 *~ ms2
v_lim :: (RealFloat f, Floating f) => BreakingPercentage f -> Velocity f
v_lim l0 = ((l0 ** y)) * x
where x = 16.85 *~ kmh
y = 0.428 *~ one
a_n_ms :: (RealFloat f, Floating f) => Int -> ( Acceleration f, Acceleration f
, Acceleration f, Acceleration f)
a_n_ms 1 = ((-6.30e-7) *~ ms2, 6.10e-5 *~ ms2, 4.72e-3 *~ ms2, 0.0663 *~ ms2)
a_n_ms 2 = ( 2.73e-7 *~ ms2, (-4.54e-6) *~ ms2, 5.13e-3 *~ ms2, 0.1300 *~ ms2)
a_n_ms 3 = ( 5.58e-8 *~ ms2, (-6.76e-6) *~ ms2, 5.81e-3 *~ ms2, 0.0479 *~ ms2)
a_n_ms 4 = ( 3.00e-8 *~ ms2, (-3.85e-6) *~ ms2, 5.52e-3 *~ ms2, 0.0480 *~ ms2)
a_n_ms 5 = ( 3.23e-9 *~ ms2, 1.66e-6 *~ ms2, 5.06e-3 *~ ms2, 0.0559 *~ ms2)
a_n_ms _ = error "a_n_ms called for undefined n"
nfromV :: (RealFloat f, Floating f) => Velocity f -> Velocity f -> Int
nfromV vlim v
| ((vlim < v) && (v <= 100 *~ kmh) &&
(vlim <= 100 *~ kmh)) = 1
| ((vlim < v) && (v <= 120 *~ kmh) &&
(100 *~ kmh < vlim) && (vlim <= 120 *~ kmh)) = 2
| ((100 *~ kmh < v) && (v <= 120 *~ kmh) &&
(vlim <= 100 *~ kmh)) = 2
| ((vlim < v) && (v <= 150 *~ kmh) &&
(120 *~ kmh < vlim) && (vlim <= 150 *~ kmh)) = 3
| ((120 *~ kmh < v) && (v <= 150 *~ kmh) &&
(vlim <= 120 *~ kmh)) = 3
| ((vlim < v) && (v <= 180 *~ kmh) &&
(150 *~ kmh < vlim) && (vlim <= 180 *~ kmh)) = 4
| ((150 *~ kmh < v) && (v <= 180 *~ kmh) &&
(vlim <= 150 *~ kmh)) = 4
| ((vlim < v) && (vlim > 180 *~ kmh)) = 5
| ((180 *~ kmh < v) && (vlim <= 180 *~ kmh)) = 5
| otherwise = error "nfromV: undefined range"
t_brake_basic :: (RealFloat f, Floating f) =>
(Length f -> Length f) ->
Length f -> Time f -> Time f -> Time f -> Time f
t_brake_basic fl l' a b c = a + (b * l) + (c * (l ** _2))
where l = (fl l') / (100 *~ meter)
t_brake_basic_eb :: (RealFloat f, Floating f) => BreakPosition -> Length f -> Time f
t_brake_basic_eb PassangerTrainP l =
t_brake_basic (max (400 *~ meter))
l (2.30 *~ second) (0 *~ second) (0.17 *~ second)
t_brake_basic_eb FreightTrainP l
| l <= 900 *~ meter =
t_brake_basic (max (400 *~ meter))
l (2.30 *~ second) (0 *~ second) (0.17 *~ second)
| otherwise =
t_brake_basic (max (400 *~ meter))
l ((-0.4) *~ second) (1.6 *~ second) (0.03 *~ second)
t_brake_basic_eb FreightTrainG l
| l <= 900 *~ meter =
t_brake_basic id
l (12.0 *~ second) (0 *~ second) (0.05 *~ second)
| otherwise =
t_brake_basic id
l ((-0.4) *~ second) (1.6 *~ second) (0.03 *~ second)
t_brake_basic_sb :: (RealFloat f, Floating f) => BreakPosition -> Length f -> Time f
t_brake_basic_sb PassangerTrainP l =
t_brake_basic id
l (3.00 *~ second) (1.5 *~ second) (0.1 *~ second)
t_brake_basic_sb FreightTrainP l
| l <= 900 *~ meter =
t_brake_basic id
l (3 *~ second) (2.77 *~ second) (0 *~ second)
| otherwise =
t_brake_basic id
l (10.5 *~ second) (0.32 *~ second) (0.18 *~ second)
t_brake_basic_sb FreightTrainG l
| l <= 900 *~ meter =
t_brake_basic (max (400 *~ meter))
l (3 *~ second) (2.77 *~ second) (0 *~ second)
| otherwise =
t_brake_basic (max (400 *~ meter))
l (10.5 *~ second) (0.32 *~ second) (0.18 *~ second)
t_brake_cm :: (RealFloat f, Floating f) =>
(BreakPosition -> Length f -> Time f) ->
BreakPosition -> Length f -> Velocity f -> Time f
t_brake_cm f bp l v = t_brake_cm' f v bp l
t_brake_cm' :: (RealFloat f, Floating f) =>
(BreakPosition -> Length f -> Time f) ->
Velocity f -> BreakPosition -> Length f -> Time f
t_brake_cm' f v_target
| (v_target == 0 *~ kmh) = f
| (v_target > 0 *~ kmh) = (\bp l -> f bp l * (kto bp))
| otherwise = error $ "t_brake_cm undefined for v_target < 0 m/s"
kto :: (RealFloat f, Floating f) => BreakPosition -> Dimensionless f
kto FreightTrainG = 0.16 *~ one
kto FreightTrainP = 0.20 *~ one
kto PassangerTrainP = 0.20 *~one
|
open-etcs/openetcs-sdm
|
src/ETCS/SDM/Intern.hs
|
Haskell
|
bsd-3-clause
| 6,268
|
{- Data/Singletons/Util.hs
(c) Richard Eisenberg 2012
eir@cis.upenn.edu
This file contains helper functions internal to the singletons package.
Users of the package should not need to consult this file.
-}
{-# OPTIONS_GHC -fwarn-incomplete-patterns #-}
module Data.Singletons.Util where
import Language.Haskell.TH
import Language.Haskell.TH.Syntax
import Data.Char
import Data.Maybe
import Data.Data
import Data.List
import Control.Monad
import Control.Monad.Writer
import qualified Data.Map as Map
import Data.Generics
-- reify a declaration, warning the user about splices if the reify fails
reifyWithWarning :: Name -> Q Info
reifyWithWarning name = recover
(fail $ "Looking up " ++ (show name) ++ " in the list of available " ++
"declarations failed.\nThis lookup fails if the declaration " ++
"referenced was made in same Template\nHaskell splice as the use " ++
"of the declaration. If this is the case, put\nthe reference to " ++
"the declaration in a new splice.")
(reify name)
-- check if a string is the name of a tuple
isTupleString :: String -> Bool
isTupleString s =
(length s > 1) &&
(head s == '(') &&
(last s == ')') &&
((length (takeWhile (== ',') (tail s))) == ((length s) - 2))
-- check if a name is a tuple name
isTupleName :: Name -> Bool
isTupleName = isTupleString . nameBase
-- extract the degree of a tuple
tupleDegree :: String -> Int
tupleDegree "()" = 0
tupleDegree s = length s - 1
-- reduce the four cases of a 'Con' to just two: monomorphic and polymorphic
-- and convert 'StrictType' to 'Type'
ctorCases :: (Name -> [Type] -> a) -> ([TyVarBndr] -> Cxt -> Con -> a) -> Con -> a
ctorCases genFun forallFun ctor = case ctor of
NormalC name stypes -> genFun name (map snd stypes)
RecC name vstypes -> genFun name (map (\(_,_,ty) -> ty) vstypes)
InfixC (_,ty1) name (_,ty2) -> genFun name [ty1, ty2]
ForallC [] [] ctor' -> ctorCases genFun forallFun ctor'
ForallC tvbs cx ctor' -> forallFun tvbs cx ctor'
-- reduce the four cases of a 'Con' to just 1: a polymorphic Con is treated
-- as a monomorphic one
ctor1Case :: (Name -> [Type] -> a) -> Con -> a
ctor1Case mono = ctorCases mono (\_ _ ctor -> ctor1Case mono ctor)
-- extract the name and number of arguments to a constructor
extractNameArgs :: Con -> (Name, Int)
extractNameArgs = ctor1Case (\name tys -> (name, length tys))
-- reinterpret a name. This is useful when a Name has an associated
-- namespace that we wish to forget
reinterpret :: Name -> Name
reinterpret = mkName . nameBase
-- is an identifier uppercase?
isUpcase :: Name -> Bool
isUpcase n = let first = head (nameBase n) in isUpper first || first == ':'
-- make an identifier uppercase
upcase :: Name -> Name
upcase n =
let str = nameBase n
first = head str in
if isLetter first
then mkName ((toUpper first) : tail str)
else mkName (':' : str)
-- make an identifier lowercase
locase :: Name -> Name
locase n =
let str = nameBase n
first = head str in
if isLetter first
then mkName ((toLower first) : tail str)
else mkName (tail str) -- remove the ":"
-- put an uppercase prefix on a name. Takes two prefixes: one for identifiers
-- and one for symbols
prefixUCName :: String -> String -> Name -> Name
prefixUCName pre tyPre n = case (nameBase n) of
(':' : rest) -> mkName (tyPre ++ rest)
alpha -> mkName (pre ++ alpha)
-- put a lowercase prefix on a name. Takes two prefixes: one for identifiers
-- and one for symbols
prefixLCName :: String -> String -> Name -> Name
prefixLCName pre tyPre n =
let str = nameBase n
first = head str in
if isLetter first
then mkName (pre ++ str)
else mkName (tyPre ++ str)
-- extract the name from a TyVarBndr
extractTvbName :: TyVarBndr -> Name
extractTvbName (PlainTV n) = n
extractTvbName (KindedTV n _) = n
-- extract the kind from a TyVarBndr. Returns '*' by default.
extractTvbKind :: TyVarBndr -> Kind
extractTvbKind (PlainTV _) = StarT -- FIXME: This seems wrong.
extractTvbKind (KindedTV _ k) = k
-- apply a type to a list of types
foldType :: Type -> [Type] -> Type
foldType = foldl AppT
-- apply an expression to a list of expressions
foldExp :: Exp -> [Exp] -> Exp
foldExp = foldl AppE
-- is a kind a variable?
isVarK :: Kind -> Bool
isVarK (VarT _) = True
isVarK _ = False
-- a monad transformer for writing a monoid alongside returning a Q
type QWithAux m = WriterT m Q
-- run a computation with an auxiliary monoid, discarding the monoid result
evalWithoutAux :: QWithAux m a -> Q a
evalWithoutAux = liftM fst . runWriterT
-- run a computation with an auxiliary monoid, returning only the monoid result
evalForAux :: QWithAux m a -> Q m
evalForAux = execWriterT
-- run a computation with an auxiliary monoid, return both the result
-- of the computation and the monoid result
evalForPair :: QWithAux m a -> Q (a, m)
evalForPair = runWriterT
-- in a computation with an auxiliary map, add a binding to the map
addBinding :: Ord k => k -> v -> QWithAux (Map.Map k v) ()
addBinding k v = tell (Map.singleton k v)
-- in a computation with an auxiliar list, add an element to the list
addElement :: elt -> QWithAux [elt] ()
addElement elt = tell [elt]
-- does a TH structure contain a name?
containsName :: Data a => Name -> a -> Bool
containsName n = everything (||) (mkQ False (== n))
|
jonsterling/singletons
|
Data/Singletons/Util.hs
|
Haskell
|
bsd-3-clause
| 5,340
|
module Data.Minecraft.Snapshot15w40b
( module Data.Minecraft.Snapshot15w40b.Protocol
, module Data.Minecraft.Snapshot15w40b.Version
) where
import Data.Minecraft.Snapshot15w40b.Protocol
import Data.Minecraft.Snapshot15w40b.Version
|
oldmanmike/hs-minecraft-protocol
|
src/Data/Minecraft/Snapshot15w40b.hs
|
Haskell
|
bsd-3-clause
| 238
|
{-# LANGUAGE CPP, GeneralizedNewtypeDeriving, FlexibleInstances, MultiParamTypeClasses, UndecidableInstances, TypeFamilies, DeriveDataTypeable #-}
module Web.Scotty.Internal.Types where
import Blaze.ByteString.Builder (Builder)
import Control.Applicative
import qualified Control.Exception as E
import Control.Monad.Base (MonadBase, liftBase, liftBaseDefault)
import Control.Monad.Error.Class
import Control.Monad.Reader
import Control.Monad.State
import Control.Monad.Trans.Control (MonadBaseControl, StM, liftBaseWith, restoreM, ComposeSt, defaultLiftBaseWith, defaultRestoreM, MonadTransControl, StT, liftWith, restoreT)
import Control.Monad.Trans.Except
import qualified Data.ByteString as BS
import Data.ByteString.Lazy.Char8 (ByteString)
import Data.Default.Class (Default, def)
#if !(MIN_VERSION_base(4,8,0))
import Data.Monoid (mempty)
#endif
import Data.String (IsString(..))
import Data.Text.Lazy (Text, pack)
import Data.Typeable (Typeable)
import Network.HTTP.Types
import Network.Wai hiding (Middleware, Application)
import qualified Network.Wai as Wai
import Network.Wai.Handler.Warp (Settings, defaultSettings, setFdCacheDuration)
import Network.Wai.Parse (FileInfo)
--------------------- Options -----------------------
data Options = Options { verbose :: Int -- ^ 0 = silent, 1(def) = startup banner
, settings :: Settings -- ^ Warp 'Settings'
-- Note: to work around an issue in warp,
-- the default FD cache duration is set to 0
-- so changes to static files are always picked
-- up. This likely has performance implications,
-- so you may want to modify this for production
-- servers using `setFdCacheDuration`.
}
instance Default Options where
def = Options 1 (setFdCacheDuration 0 defaultSettings)
----- Transformer Aware Applications/Middleware -----
type Middleware m = Application m -> Application m
type Application m = Request -> m Response
--------------- Scotty Applications -----------------
data ScottyState e m =
ScottyState { middlewares :: [Wai.Middleware]
, routes :: [Middleware m]
, handler :: ErrorHandler e m
}
instance Default (ScottyState e m) where
def = ScottyState [] [] Nothing
addMiddleware :: Wai.Middleware -> ScottyState e m -> ScottyState e m
addMiddleware m s@(ScottyState {middlewares = ms}) = s { middlewares = m:ms }
addRoute :: Middleware m -> ScottyState e m -> ScottyState e m
addRoute r s@(ScottyState {routes = rs}) = s { routes = r:rs }
addHandler :: ErrorHandler e m -> ScottyState e m -> ScottyState e m
addHandler h s = s { handler = h }
newtype ScottyT e m a = ScottyT { runS :: State (ScottyState e m) a }
deriving ( Functor, Applicative, Monad )
------------------ Scotty Errors --------------------
data ActionError e = Redirect Text
| Next
| ActionError e
-- | In order to use a custom exception type (aside from 'Text'), you must
-- define an instance of 'ScottyError' for that type.
class ScottyError e where
stringError :: String -> e
showError :: e -> Text
instance ScottyError Text where
stringError = pack
showError = id
instance ScottyError e => ScottyError (ActionError e) where
stringError = ActionError . stringError
showError (Redirect url) = url
showError Next = pack "Next"
showError (ActionError e) = showError e
type ErrorHandler e m = Maybe (e -> ActionT e m ())
------------------ Scotty Actions -------------------
type Param = (Text, Text)
type File = (Text, FileInfo ByteString)
data ActionEnv = Env { getReq :: Request
, getParams :: [Param]
, getBody :: IO ByteString
, getBodyChunk :: IO BS.ByteString
, getFiles :: [File]
}
data RequestBodyState = BodyUntouched
| BodyCached ByteString [BS.ByteString] -- whole body, chunks left to stream
| BodyCorrupted
data BodyPartiallyStreamed = BodyPartiallyStreamed deriving (Show, Typeable)
instance E.Exception BodyPartiallyStreamed
data Content = ContentBuilder Builder
| ContentFile FilePath
| ContentStream StreamingBody
data ScottyResponse = SR { srStatus :: Status
, srHeaders :: ResponseHeaders
, srContent :: Content
}
instance Default ScottyResponse where
def = SR status200 [] (ContentBuilder mempty)
newtype ActionT e m a = ActionT { runAM :: ExceptT (ActionError e) (ReaderT ActionEnv (StateT ScottyResponse m)) a }
deriving ( Functor, Applicative, MonadIO )
instance (Monad m, ScottyError e) => Monad (ActionT e m) where
return = ActionT . return
ActionT m >>= k = ActionT (m >>= runAM . k)
fail = ActionT . throwError . stringError
instance ( Monad m, ScottyError e
#if !(MIN_VERSION_base(4,8,0))
, Functor m
#endif
) => Alternative (ActionT e m) where
empty = mzero
(<|>) = mplus
instance (Monad m, ScottyError e) => MonadPlus (ActionT e m) where
mzero = ActionT . ExceptT . return $ Left Next
ActionT m `mplus` ActionT n = ActionT . ExceptT $ do
a <- runExceptT m
case a of
Left _ -> runExceptT n
Right r -> return $ Right r
instance MonadTrans (ActionT e) where
lift = ActionT . lift . lift . lift
instance (ScottyError e, Monad m) => MonadError (ActionError e) (ActionT e m) where
throwError = ActionT . throwError
catchError (ActionT m) f = ActionT (catchError m (runAM . f))
instance (MonadBase b m, ScottyError e) => MonadBase b (ActionT e m) where
liftBase = liftBaseDefault
instance MonadTransControl (ActionT e) where
type StT (ActionT e) a = StT (StateT ScottyResponse) (StT (ReaderT ActionEnv) (StT (ExceptT (ActionError e)) a))
liftWith = \f ->
ActionT $ liftWith $ \run ->
liftWith $ \run' ->
liftWith $ \run'' ->
f $ run'' . run' . run . runAM
restoreT = ActionT . restoreT . restoreT . restoreT
instance (ScottyError e, MonadBaseControl b m) => MonadBaseControl b (ActionT e m) where
type StM (ActionT e m) a = ComposeSt (ActionT e) m a
liftBaseWith = defaultLiftBaseWith
restoreM = defaultRestoreM
------------------ Scotty Routes --------------------
data RoutePattern = Capture Text
| Literal Text
| Function (Request -> Maybe [Param])
instance IsString RoutePattern where
fromString = Capture . pack
|
beni55/scotty
|
Web/Scotty/Internal/Types.hs
|
Haskell
|
bsd-3-clause
| 7,087
|
-----------------------------------------------------------------------------
--
-- Machine-dependent assembly language
--
-- (c) The University of Glasgow 1993-2004
--
-----------------------------------------------------------------------------
{-# OPTIONS -fno-warn-tabs #-}
-- The above warning supression flag is a temporary kludge.
-- While working on this module you are encouraged to remove it and
-- detab the module (please do the detabbing in a separate patch). See
-- http://hackage.haskell.org/trac/ghc/wiki/Commentary/CodingStyle#TabsvsSpaces
-- for details
#include "HsVersions.h"
#include "nativeGen/NCG.h"
module SPARC.Instr (
RI(..),
riZero,
fpRelEA,
moveSp,
isUnconditionalJump,
Instr(..),
maxSpillSlots
)
where
import SPARC.Stack
import SPARC.Imm
import SPARC.AddrMode
import SPARC.Cond
import SPARC.Regs
import SPARC.RegPlate
import SPARC.Base
import TargetReg
import Instruction
import RegClass
import Reg
import Size
import CLabel
import BlockId
import OldCmm
import FastString
import FastBool
import Outputable
import Platform
-- | Register or immediate
data RI
= RIReg Reg
| RIImm Imm
-- | Check if a RI represents a zero value.
-- - a literal zero
-- - register %g0, which is always zero.
--
riZero :: RI -> Bool
riZero (RIImm (ImmInt 0)) = True
riZero (RIImm (ImmInteger 0)) = True
riZero (RIReg (RegReal (RealRegSingle 0))) = True
riZero _ = False
-- | Calculate the effective address which would be used by the
-- corresponding fpRel sequence.
fpRelEA :: Int -> Reg -> Instr
fpRelEA n dst
= ADD False False fp (RIImm (ImmInt (n * wordLength))) dst
-- | Code to shift the stack pointer by n words.
moveSp :: Int -> Instr
moveSp n
= ADD False False sp (RIImm (ImmInt (n * wordLength))) sp
-- | An instruction that will cause the one after it never to be exectuted
isUnconditionalJump :: Instr -> Bool
isUnconditionalJump ii
= case ii of
CALL{} -> True
JMP{} -> True
JMP_TBL{} -> True
BI ALWAYS _ _ -> True
BF ALWAYS _ _ -> True
_ -> False
-- | instance for sparc instruction set
instance Instruction Instr where
regUsageOfInstr = sparc_regUsageOfInstr
patchRegsOfInstr = sparc_patchRegsOfInstr
isJumpishInstr = sparc_isJumpishInstr
jumpDestsOfInstr = sparc_jumpDestsOfInstr
patchJumpInstr = sparc_patchJumpInstr
mkSpillInstr = sparc_mkSpillInstr
mkLoadInstr = sparc_mkLoadInstr
takeDeltaInstr = sparc_takeDeltaInstr
isMetaInstr = sparc_isMetaInstr
mkRegRegMoveInstr = sparc_mkRegRegMoveInstr
takeRegRegMoveInstr = sparc_takeRegRegMoveInstr
mkJumpInstr = sparc_mkJumpInstr
-- | SPARC instruction set.
-- Not complete. This is only the ones we need.
--
data Instr
-- meta ops --------------------------------------------------
-- comment pseudo-op
= COMMENT FastString
-- some static data spat out during code generation.
-- Will be extracted before pretty-printing.
| LDATA Section CmmStatics
-- Start a new basic block. Useful during codegen, removed later.
-- Preceding instruction should be a jump, as per the invariants
-- for a BasicBlock (see Cmm).
| NEWBLOCK BlockId
-- specify current stack offset for benefit of subsequent passes.
| DELTA Int
-- real instrs -----------------------------------------------
-- Loads and stores.
| LD Size AddrMode Reg -- size, src, dst
| ST Size Reg AddrMode -- size, src, dst
-- Int Arithmetic.
-- x: add/sub with carry bit.
-- In SPARC V9 addx and friends were renamed addc.
--
-- cc: modify condition codes
--
| ADD Bool Bool Reg RI Reg -- x?, cc?, src1, src2, dst
| SUB Bool Bool Reg RI Reg -- x?, cc?, src1, src2, dst
| UMUL Bool Reg RI Reg -- cc?, src1, src2, dst
| SMUL Bool Reg RI Reg -- cc?, src1, src2, dst
-- The SPARC divide instructions perform 64bit by 32bit division
-- The Y register is xored into the first operand.
-- On _some implementations_ the Y register is overwritten by
-- the remainder, so we have to make sure it is 0 each time.
-- dst <- ((Y `shiftL` 32) `or` src1) `div` src2
| UDIV Bool Reg RI Reg -- cc?, src1, src2, dst
| SDIV Bool Reg RI Reg -- cc?, src1, src2, dst
| RDY Reg -- move contents of Y register to reg
| WRY Reg Reg -- Y <- src1 `xor` src2
-- Logic operations.
| AND Bool Reg RI Reg -- cc?, src1, src2, dst
| ANDN Bool Reg RI Reg -- cc?, src1, src2, dst
| OR Bool Reg RI Reg -- cc?, src1, src2, dst
| ORN Bool Reg RI Reg -- cc?, src1, src2, dst
| XOR Bool Reg RI Reg -- cc?, src1, src2, dst
| XNOR Bool Reg RI Reg -- cc?, src1, src2, dst
| SLL Reg RI Reg -- src1, src2, dst
| SRL Reg RI Reg -- src1, src2, dst
| SRA Reg RI Reg -- src1, src2, dst
-- Load immediates.
| SETHI Imm Reg -- src, dst
-- Do nothing.
-- Implemented by the assembler as SETHI 0, %g0, but worth an alias
| NOP
-- Float Arithmetic.
-- Note that we cheat by treating F{ABS,MOV,NEG} of doubles as single
-- instructions right up until we spit them out.
--
| FABS Size Reg Reg -- src dst
| FADD Size Reg Reg Reg -- src1, src2, dst
| FCMP Bool Size Reg Reg -- exception?, src1, src2, dst
| FDIV Size Reg Reg Reg -- src1, src2, dst
| FMOV Size Reg Reg -- src, dst
| FMUL Size Reg Reg Reg -- src1, src2, dst
| FNEG Size Reg Reg -- src, dst
| FSQRT Size Reg Reg -- src, dst
| FSUB Size Reg Reg Reg -- src1, src2, dst
| FxTOy Size Size Reg Reg -- src, dst
-- Jumping around.
| BI Cond Bool BlockId -- cond, annul?, target
| BF Cond Bool BlockId -- cond, annul?, target
| JMP AddrMode -- target
-- With a tabled jump we know all the possible destinations.
-- We also need this info so we can work out what regs are live across the jump.
--
| JMP_TBL AddrMode [Maybe BlockId] CLabel
| CALL (Either Imm Reg) Int Bool -- target, args, terminal
-- | regUsage returns the sets of src and destination registers used
-- by a particular instruction. Machine registers that are
-- pre-allocated to stgRegs are filtered out, because they are
-- uninteresting from a register allocation standpoint. (We wouldn't
-- want them to end up on the free list!) As far as we are concerned,
-- the fixed registers simply don't exist (for allocation purposes,
-- anyway).
-- regUsage doesn't need to do any trickery for jumps and such. Just
-- state precisely the regs read and written by that insn. The
-- consequences of control flow transfers, as far as register
-- allocation goes, are taken care of by the register allocator.
--
sparc_regUsageOfInstr :: Platform -> Instr -> RegUsage
sparc_regUsageOfInstr _ instr
= case instr of
LD _ addr reg -> usage (regAddr addr, [reg])
ST _ reg addr -> usage (reg : regAddr addr, [])
ADD _ _ r1 ar r2 -> usage (r1 : regRI ar, [r2])
SUB _ _ r1 ar r2 -> usage (r1 : regRI ar, [r2])
UMUL _ r1 ar r2 -> usage (r1 : regRI ar, [r2])
SMUL _ r1 ar r2 -> usage (r1 : regRI ar, [r2])
UDIV _ r1 ar r2 -> usage (r1 : regRI ar, [r2])
SDIV _ r1 ar r2 -> usage (r1 : regRI ar, [r2])
RDY rd -> usage ([], [rd])
WRY r1 r2 -> usage ([r1, r2], [])
AND _ r1 ar r2 -> usage (r1 : regRI ar, [r2])
ANDN _ r1 ar r2 -> usage (r1 : regRI ar, [r2])
OR _ r1 ar r2 -> usage (r1 : regRI ar, [r2])
ORN _ r1 ar r2 -> usage (r1 : regRI ar, [r2])
XOR _ r1 ar r2 -> usage (r1 : regRI ar, [r2])
XNOR _ r1 ar r2 -> usage (r1 : regRI ar, [r2])
SLL r1 ar r2 -> usage (r1 : regRI ar, [r2])
SRL r1 ar r2 -> usage (r1 : regRI ar, [r2])
SRA r1 ar r2 -> usage (r1 : regRI ar, [r2])
SETHI _ reg -> usage ([], [reg])
FABS _ r1 r2 -> usage ([r1], [r2])
FADD _ r1 r2 r3 -> usage ([r1, r2], [r3])
FCMP _ _ r1 r2 -> usage ([r1, r2], [])
FDIV _ r1 r2 r3 -> usage ([r1, r2], [r3])
FMOV _ r1 r2 -> usage ([r1], [r2])
FMUL _ r1 r2 r3 -> usage ([r1, r2], [r3])
FNEG _ r1 r2 -> usage ([r1], [r2])
FSQRT _ r1 r2 -> usage ([r1], [r2])
FSUB _ r1 r2 r3 -> usage ([r1, r2], [r3])
FxTOy _ _ r1 r2 -> usage ([r1], [r2])
JMP addr -> usage (regAddr addr, [])
JMP_TBL addr _ _ -> usage (regAddr addr, [])
CALL (Left _ ) _ True -> noUsage
CALL (Left _ ) n False -> usage (argRegs n, callClobberedRegs)
CALL (Right reg) _ True -> usage ([reg], [])
CALL (Right reg) n False -> usage (reg : (argRegs n), callClobberedRegs)
_ -> noUsage
where
usage (src, dst)
= RU (filter interesting src) (filter interesting dst)
regAddr (AddrRegReg r1 r2) = [r1, r2]
regAddr (AddrRegImm r1 _) = [r1]
regRI (RIReg r) = [r]
regRI _ = []
-- | Interesting regs are virtuals, or ones that are allocatable
-- by the register allocator.
interesting :: Reg -> Bool
interesting reg
= case reg of
RegVirtual _ -> True
RegReal (RealRegSingle r1) -> isFastTrue (freeReg r1)
RegReal (RealRegPair r1 _) -> isFastTrue (freeReg r1)
-- | Apply a given mapping to tall the register references in this instruction.
sparc_patchRegsOfInstr :: Instr -> (Reg -> Reg) -> Instr
sparc_patchRegsOfInstr instr env = case instr of
LD sz addr reg -> LD sz (fixAddr addr) (env reg)
ST sz reg addr -> ST sz (env reg) (fixAddr addr)
ADD x cc r1 ar r2 -> ADD x cc (env r1) (fixRI ar) (env r2)
SUB x cc r1 ar r2 -> SUB x cc (env r1) (fixRI ar) (env r2)
UMUL cc r1 ar r2 -> UMUL cc (env r1) (fixRI ar) (env r2)
SMUL cc r1 ar r2 -> SMUL cc (env r1) (fixRI ar) (env r2)
UDIV cc r1 ar r2 -> UDIV cc (env r1) (fixRI ar) (env r2)
SDIV cc r1 ar r2 -> SDIV cc (env r1) (fixRI ar) (env r2)
RDY rd -> RDY (env rd)
WRY r1 r2 -> WRY (env r1) (env r2)
AND b r1 ar r2 -> AND b (env r1) (fixRI ar) (env r2)
ANDN b r1 ar r2 -> ANDN b (env r1) (fixRI ar) (env r2)
OR b r1 ar r2 -> OR b (env r1) (fixRI ar) (env r2)
ORN b r1 ar r2 -> ORN b (env r1) (fixRI ar) (env r2)
XOR b r1 ar r2 -> XOR b (env r1) (fixRI ar) (env r2)
XNOR b r1 ar r2 -> XNOR b (env r1) (fixRI ar) (env r2)
SLL r1 ar r2 -> SLL (env r1) (fixRI ar) (env r2)
SRL r1 ar r2 -> SRL (env r1) (fixRI ar) (env r2)
SRA r1 ar r2 -> SRA (env r1) (fixRI ar) (env r2)
SETHI imm reg -> SETHI imm (env reg)
FABS s r1 r2 -> FABS s (env r1) (env r2)
FADD s r1 r2 r3 -> FADD s (env r1) (env r2) (env r3)
FCMP e s r1 r2 -> FCMP e s (env r1) (env r2)
FDIV s r1 r2 r3 -> FDIV s (env r1) (env r2) (env r3)
FMOV s r1 r2 -> FMOV s (env r1) (env r2)
FMUL s r1 r2 r3 -> FMUL s (env r1) (env r2) (env r3)
FNEG s r1 r2 -> FNEG s (env r1) (env r2)
FSQRT s r1 r2 -> FSQRT s (env r1) (env r2)
FSUB s r1 r2 r3 -> FSUB s (env r1) (env r2) (env r3)
FxTOy s1 s2 r1 r2 -> FxTOy s1 s2 (env r1) (env r2)
JMP addr -> JMP (fixAddr addr)
JMP_TBL addr ids l -> JMP_TBL (fixAddr addr) ids l
CALL (Left i) n t -> CALL (Left i) n t
CALL (Right r) n t -> CALL (Right (env r)) n t
_ -> instr
where
fixAddr (AddrRegReg r1 r2) = AddrRegReg (env r1) (env r2)
fixAddr (AddrRegImm r1 i) = AddrRegImm (env r1) i
fixRI (RIReg r) = RIReg (env r)
fixRI other = other
--------------------------------------------------------------------------------
sparc_isJumpishInstr :: Instr -> Bool
sparc_isJumpishInstr instr
= case instr of
BI{} -> True
BF{} -> True
JMP{} -> True
JMP_TBL{} -> True
CALL{} -> True
_ -> False
sparc_jumpDestsOfInstr :: Instr -> [BlockId]
sparc_jumpDestsOfInstr insn
= case insn of
BI _ _ id -> [id]
BF _ _ id -> [id]
JMP_TBL _ ids _ -> [id | Just id <- ids]
_ -> []
sparc_patchJumpInstr :: Instr -> (BlockId -> BlockId) -> Instr
sparc_patchJumpInstr insn patchF
= case insn of
BI cc annul id -> BI cc annul (patchF id)
BF cc annul id -> BF cc annul (patchF id)
JMP_TBL n ids l -> JMP_TBL n (map (fmap patchF) ids) l
_ -> insn
--------------------------------------------------------------------------------
-- | Make a spill instruction.
-- On SPARC we spill below frame pointer leaving 2 words/spill
sparc_mkSpillInstr
:: Platform
-> Reg -- ^ register to spill
-> Int -- ^ current stack delta
-> Int -- ^ spill slot to use
-> Instr
sparc_mkSpillInstr platform reg _ slot
= let off = spillSlotToOffset slot
off_w = 1 + (off `div` 4)
sz = case targetClassOfReg platform reg of
RcInteger -> II32
RcFloat -> FF32
RcDouble -> FF64
_ -> panic "sparc_mkSpillInstr"
in ST sz reg (fpRel (negate off_w))
-- | Make a spill reload instruction.
sparc_mkLoadInstr
:: Platform
-> Reg -- ^ register to load into
-> Int -- ^ current stack delta
-> Int -- ^ spill slot to use
-> Instr
sparc_mkLoadInstr platform reg _ slot
= let off = spillSlotToOffset slot
off_w = 1 + (off `div` 4)
sz = case targetClassOfReg platform reg of
RcInteger -> II32
RcFloat -> FF32
RcDouble -> FF64
_ -> panic "sparc_mkLoadInstr"
in LD sz (fpRel (- off_w)) reg
--------------------------------------------------------------------------------
-- | See if this instruction is telling us the current C stack delta
sparc_takeDeltaInstr
:: Instr
-> Maybe Int
sparc_takeDeltaInstr instr
= case instr of
DELTA i -> Just i
_ -> Nothing
sparc_isMetaInstr
:: Instr
-> Bool
sparc_isMetaInstr instr
= case instr of
COMMENT{} -> True
LDATA{} -> True
NEWBLOCK{} -> True
DELTA{} -> True
_ -> False
-- | Make a reg-reg move instruction.
-- On SPARC v8 there are no instructions to move directly between
-- floating point and integer regs. If we need to do that then we
-- have to go via memory.
--
sparc_mkRegRegMoveInstr
:: Platform
-> Reg
-> Reg
-> Instr
sparc_mkRegRegMoveInstr platform src dst
| srcClass <- targetClassOfReg platform src
, dstClass <- targetClassOfReg platform dst
, srcClass == dstClass
= case srcClass of
RcInteger -> ADD False False src (RIReg g0) dst
RcDouble -> FMOV FF64 src dst
RcFloat -> FMOV FF32 src dst
_ -> panic "sparc_mkRegRegMoveInstr"
| otherwise
= panic "SPARC.Instr.mkRegRegMoveInstr: classes of src and dest not the same"
-- | Check whether an instruction represents a reg-reg move.
-- The register allocator attempts to eliminate reg->reg moves whenever it can,
-- by assigning the src and dest temporaries to the same real register.
--
sparc_takeRegRegMoveInstr :: Instr -> Maybe (Reg,Reg)
sparc_takeRegRegMoveInstr instr
= case instr of
ADD False False src (RIReg src2) dst
| g0 == src2 -> Just (src, dst)
FMOV FF64 src dst -> Just (src, dst)
FMOV FF32 src dst -> Just (src, dst)
_ -> Nothing
-- | Make an unconditional branch instruction.
sparc_mkJumpInstr
:: BlockId
-> [Instr]
sparc_mkJumpInstr id
= [BI ALWAYS False id
, NOP] -- fill the branch delay slot.
|
nomeata/ghc
|
compiler/nativeGen/SPARC/Instr.hs
|
Haskell
|
bsd-3-clause
| 15,521
|
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
module Jenkins where
import Control.Lens hiding (deep) -- lens
import Control.Monad.IO.Class
import Control.Monad.Trans.Control
import Data.Aeson.Lens -- lens-aeson
import Data.Map (empty)
import Data.Text hiding (empty)
import Jenkins.Rest (Jenkins, (-?-), (-=-)) -- libjenkins
import qualified Jenkins.Rest as JR
import Text.Hamlet.XML
import Text.XML
import Prelude hiding (unwords)
{-
data BuildPlan = BuildPlan {
vcsInfo :: Maybe VCSRoot,
buildWith :: Text,
extraParams :: [Text]
} deriving(Eq, Show)
-- | Information about a VCS root.
class VCSInfo v where
vcsTool :: v -> Text
vcsRootUrl :: v -> Text
data VCSRoot = VCSGit VCSInfoGit | VCSSvn VCSInfoSvn deriving (Eq, Show)
data VCSInfoGit = VCSInfoGit {
gitRepoUrl :: Text
} deriving (Eq, Show)
instance VCSInfo VCSInfoGit where
vcsTool _ = "git"
vcsRootUrl = gitRepoUrl
data VCSInfoSvn = VCSInfoSvn {
svnRepoUrl :: Text
} deriving (Eq, Show)
instance VCSInfo VCSInfoSvn where
vcsTool _ = "svn"
vcsRootUrl = svnRepoUrl
-- | The jenkins master (hardcoded for now)
master :: JR.Master
master = JR.defaultMaster &
JR.url .~ "http://192.168.59.103:8080"
-- | Test configration, describes a job with a single build step that
-- echoes test
testConfig :: Element
testConfig = Element "project" empty [xml|
<actions>
<description>
<keepDependencies>
false
<properties>
<scm class="hudson.scm.NullSCM">
<canRoam>true
<disabled> false
<blockBuildWhenDownstreamBuilding>false
<blockBuildWhenUpstreamBuilding>false
<triggers>
<concurrentBuild>
false
<builders>
<hudson.tasks.Shell>
<command>
echo "test"
<publishers>
<buildWrappers>
|]
testPlan = BuildPlan {
vcsInfo = Just $
VCSGit $
VCSInfoGit "https://github.com/wayofthepie/github-maven-example",
buildWith = "mvn",
extraParams = ["clean", "install"]
}
gitPluginVersion = "git@2.3.1"
-- |
-- To be accurate about plugin values, the api should be queried at
-- http://(jenkins)/pluginManager/api/json?depth=1.
--
-- TODO: Create a converter for plans to xml.
plan2Cfg :: BuildPlan -> Element
plan2Cfg b = Element "project" empty [xml|
<actions>
<description>
<keepDependencies>
false
<properties>
<scm class=hudson.plugins.git.GitSCM>
<configVersion>
2
<userRemoteConfigs>
<hudson.plugins.git.UserRemoteConfig>
<url>#{repoUrl b}
<branches>
<hudson.plugins.git.BranchSpec>
<name>
*/master
<doGenerateSubmoduleConfigurations>
false
<submoduleCfg class="list">
<extensions>
<canRoam>
true
<disabled>
false
<blockBuildWhenDownstreamBuilding>
false
<blockBuildWhenUpstreamBuilding>
false
<triggers>
<concurrentBuild>
false
<builders>
<hudson.tasks.Shell>
<command>#{buildWith b} #{unwords $ extraParams b}
<publishers>
<buildWrappers>
|]
-- | Create a job named __n__ with the config __c__
createJob :: ( MonadBaseControl IO m, MonadIO m ) => Text -> Element -> m ( JR.Result () )
createJob n c =
JR.run (JR.defaultMaster &
JR.url .~ ("http://192.168.59.103:8080/")) $
JR.postXml ("createItem" -?- "name" -=- n) $ e2bs c
where
e2bs xml = renderLBS def $ Document (Prologue [] Nothing []) xml []
-}
|
wayofthepie/riverd
|
src/lib/Jenkins.hs
|
Haskell
|
bsd-3-clause
| 3,688
|
-----------------------------------------------------------------------------
-- |
-- Module : A
-- Copyright : (c) 2008 - 2010 Universiteit Utrecht
-- License : BSD3
--
-- Maintainer : generics@haskell.org
--
-- An example type representation.
-----------------------------------------------------------------------------
-- {-# OPTIONS_GHC -Wall #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE OverlappingInstances #-}
{-# LANGUAGE UndecidableInstances #-}
module A where
import Prelude hiding (Read, Show)
import qualified Prelude as P (Read, Show)
import Data.Generics (Data, Typeable)
import Control.Applicative (Alternative, pure)
import Generics.EMGM.Base
import Generics.EMGM.Functions.Collect
import Generics.EMGM.Functions.Everywhere
import Generics.EMGM.Functions.Meta
data A a
= A1 a
| A2 Integer (A a)
| A3 { unA3 :: Double }
| A4 { unA4a :: A a, unA4b :: Int }
| A5 { unA5a :: Char, unA5b :: A a, unA5c :: a }
| A a :^: Float
| (:<>:) { unA7a :: A a, unA7b :: A a }
deriving (P.Show, P.Read, Eq, Ord, Data, Typeable)
infixr 6 :^:
infixl 5 :<>:
type AS a
{- A1 -} = a
{- A2 -} :+: Integer :*: A a
{- A3 -} :+: Double
{- A4 -} :+: A a :*: Int
{- A5 -} :+: Char :*: A a :*: a
{- :^: -} :+: A a :*: Float
{- :<>: -} :+: A a :*: A a
fromA :: A a -> AS a
fromA t = case t of
A1 x1 -> L x1
A2 x1 x2 -> R (L (x1 :*: x2))
A3 x1 -> R (R (L x1))
A4 x1 x2 -> R (R (R (L (x1 :*: x2))))
A5 x1 x2 x3 -> R (R (R (R (L (x1 :*: x2 :*: x3)))))
x1 :^: x2 -> R (R (R (R (R (L (x1 :*: x2))))))
x1 :<>: x2 -> R (R (R (R (R (R (x1 :*: x2))))))
toA :: AS a -> A a
toA s = case s of
L x1 -> A1 x1
R (L (x1 :*: x2)) -> A2 x1 x2
R (R (L x1)) -> A3 x1
R (R (R (L (x1 :*: x2)))) -> A4 x1 x2
R (R (R (R (L (x1 :*: x2 :*: x3))))) -> A5 x1 x2 x3
R (R (R (R (R (L (x1 :*: x2)))))) -> x1 :^: x2
R (R (R (R (R (R (x1 :*: x2)))))) -> x1 :<>: x2
epA :: EP (A a) (AS a)
epA = EP fromA toA
instance HasEP (A a) (AS a) where
epOf _ = epA
conA1 = ConDescr "A1" 1 False Prefix
conA2 = ConDescr "A2" 2 False Prefix
conA3 = ConDescr "A3" 1 True Prefix
conA4 = ConDescr "A4" 2 True Prefix
conA5 = ConDescr "A5" 3 True Prefix
conA6 = ConDescr ":^:" 2 False (Infix RightAssoc 6)
conA7 = ConDescr ":<>:" 2 True (Infix LeftAssoc 5)
lblUnA3 = LblDescr "unA3"
lblUnA4a = LblDescr "unA4a"
lblUnA4b = LblDescr "unA4b"
lblUnA5a = LblDescr "unA5a"
lblUnA5b = LblDescr "unA5b"
lblUnA5c = LblDescr "unA5c"
lblUnA7a = LblDescr "unA7a"
lblUnA7b = LblDescr "unA7b"
instance (Generic g, Rep g a, Rep g Char, Rep g Double, Rep g Float, Rep g Integer, Rep g Int) => Rep g (A a) where
rep = rtype epA
$ rcon conA1 rep
`rsum` rcon conA2 (rep `rprod` rep)
`rsum` rcon conA3 (rlbl lblUnA3 rep)
`rsum` rcon conA4 (rlbl lblUnA4a rep `rprod` rlbl lblUnA4b rep)
`rsum` rcon conA5 (rlbl lblUnA5a rep `rprod` rlbl lblUnA5b rep `rprod` rlbl lblUnA5c rep)
`rsum` rcon conA6 (rep `rprod` rep)
`rsum` rcon conA7 (rlbl lblUnA7a rep `rprod` rlbl lblUnA7b rep)
instance (Generic g) => FRep g A where
frep ra = rtype epA
$ rcon conA1 ra
`rsum` rcon conA2 (rinteger `rprod` frep ra)
`rsum` rcon conA3 (rlbl lblUnA3 rdouble)
`rsum` rcon conA4 (rlbl lblUnA4a (frep ra) `rprod` rlbl lblUnA4b rint)
`rsum` rcon conA5 (rlbl lblUnA5a rchar `rprod` rlbl lblUnA5b (frep ra) `rprod` rlbl lblUnA5c ra)
`rsum` rcon conA6 (frep ra `rprod` rfloat)
`rsum` rcon conA7 (rlbl lblUnA7a (frep ra) `rprod` rlbl lblUnA7b (frep ra))
instance (Generic2 g) => FRep2 g A where
frep2 ra = rtype2 epA epA
$ rcon2 conA1 ra
`rsum2` rcon2 conA2 (rinteger2 `rprod2` frep2 ra)
`rsum2` rcon2 conA3 (rlbl2 lblUnA3 rdouble2)
`rsum2` rcon2 conA4 (rlbl2 lblUnA4a (frep2 ra) `rprod2` rlbl2 lblUnA4b rint2)
`rsum2` rcon2 conA5 (rlbl2 lblUnA5a rchar2 `rprod2` rlbl2 lblUnA5b (frep2 ra) `rprod2` rlbl2 lblUnA5c ra)
`rsum2` rcon2 conA6 (frep2 ra `rprod2` rfloat2)
`rsum2` rcon2 conA7 (rlbl2 lblUnA7a (frep2 ra) `rprod2` rlbl2 lblUnA7b (frep2 ra))
instance (Generic3 g) => FRep3 g A where
frep3 ra = rtype3 epA epA epA
$ rcon3 conA1 ra
`rsum3` rcon3 conA2 (rinteger3 `rprod3` frep3 ra)
`rsum3` rcon3 conA3 (rlbl3 lblUnA3 rdouble3)
`rsum3` rcon3 conA4 (rlbl3 lblUnA4a (frep3 ra) `rprod3` rlbl3 lblUnA4b rint3)
`rsum3` rcon3 conA5 (rlbl3 lblUnA5a rchar3 `rprod3` rlbl3 lblUnA5b (frep3 ra) `rprod3` rlbl3 lblUnA5c ra)
`rsum3` rcon3 conA6 (frep3 ra `rprod3` rfloat3)
`rsum3` rcon3 conA7 (rlbl3 lblUnA7a (frep3 ra) `rprod3` rlbl3 lblUnA7b (frep3 ra))
instance (Alternative f) => Rep (Collect f (A a)) (A a) where
rep = Collect pure
instance (Rep (Everywhere (A a)) a) => Rep (Everywhere (A a)) (A a) where
rep = Everywhere app
where
app f x =
case x of
A1 x1 -> f (A1 (selEverywhere rep f x1))
A2 x1 x2 -> f (A2 (selEverywhere rep f x1) (selEverywhere rep f x2))
A3 x1 -> f (A3 (selEverywhere rep f x1))
A4 x1 x2 -> f (A4 (selEverywhere rep f x1) (selEverywhere rep f x2))
A5 x1 x2 x3 -> f (A5 (selEverywhere rep f x1) (selEverywhere rep f x2) (selEverywhere rep f x3))
x1 :^: x2 -> f (selEverywhere rep f x1 :^: selEverywhere rep f x2)
x1 :<>: x2 -> f (selEverywhere rep f x1 :<>: selEverywhere rep f x2)
instance Rep (Everywhere' (A a)) (A a) where
rep = Everywhere' ($)
v1 = A1 (5 :: Int)
v2 = A2 37 v1
v3 = A3 9999.9999 :: A Float
v4 = A4 v3 79
v5 = A5 'a' v4 5.0
v6 = v5 :^: 0.12345
v7 = v6 :<>: v6
|
spl/emgm
|
tests/A.hs
|
Haskell
|
bsd-3-clause
| 5,933
|
{-# OPTIONS -fno-warn-unused-imports #-}
#include "HsConfigure.h"
-- #hide
module Data.Time.Calendar.Days
(
-- * Days
Day(..),addDays,diffDays
) where
import Control.DeepSeq
import Data.Ix
import Data.Typeable
#if LANGUAGE_Rank2Types
import Data.Data
#endif
-- | The Modified Julian Day is a standard count of days, with zero being the day 1858-11-17.
--
-- For the 'Read' instance of 'Day',
-- import "Data.Time" or "Data.Time.Format".
newtype Day = ModifiedJulianDay {toModifiedJulianDay :: Integer} deriving (Eq,Ord
#if LANGUAGE_DeriveDataTypeable
#if LANGUAGE_Rank2Types
,Data, Typeable
#endif
#endif
)
instance NFData Day where
rnf (ModifiedJulianDay a) = rnf a
-- necessary because H98 doesn't have "cunning newtype" derivation
instance Enum Day where
succ (ModifiedJulianDay a) = ModifiedJulianDay (succ a)
pred (ModifiedJulianDay a) = ModifiedJulianDay (pred a)
toEnum = ModifiedJulianDay . toEnum
fromEnum (ModifiedJulianDay a) = fromEnum a
enumFrom (ModifiedJulianDay a) = fmap ModifiedJulianDay (enumFrom a)
enumFromThen (ModifiedJulianDay a) (ModifiedJulianDay b) = fmap ModifiedJulianDay (enumFromThen a b)
enumFromTo (ModifiedJulianDay a) (ModifiedJulianDay b) = fmap ModifiedJulianDay (enumFromTo a b)
enumFromThenTo (ModifiedJulianDay a) (ModifiedJulianDay b) (ModifiedJulianDay c) = fmap ModifiedJulianDay (enumFromThenTo a b c)
-- necessary because H98 doesn't have "cunning newtype" derivation
instance Ix Day where
range (ModifiedJulianDay a,ModifiedJulianDay b) = fmap ModifiedJulianDay (range (a,b))
index (ModifiedJulianDay a,ModifiedJulianDay b) (ModifiedJulianDay c) = index (a,b) c
inRange (ModifiedJulianDay a,ModifiedJulianDay b) (ModifiedJulianDay c) = inRange (a,b) c
rangeSize (ModifiedJulianDay a,ModifiedJulianDay b) = rangeSize (a,b)
addDays :: Integer -> Day -> Day
addDays n (ModifiedJulianDay a) = ModifiedJulianDay (a + n)
diffDays :: Day -> Day -> Integer
diffDays (ModifiedJulianDay a) (ModifiedJulianDay b) = a - b
|
bergmark/time
|
lib/Data/Time/Calendar/Days.hs
|
Haskell
|
bsd-3-clause
| 2,031
|
{-# OPTIONS_GHC -Wall #-}
{-# LANGUAGE OverloadedStrings, RecordWildCards #-}
module HW05 where
import Data.ByteString.Lazy (ByteString)
import Data.Map.Strict (Map)
import System.Environment (getArgs)
import Data.Word8 (Word8)
import Data.List
import Data.Ord
import Data.Maybe
import qualified Data.ByteString.Lazy as BS
import qualified Data.Map.Strict as Map
import qualified Data.Bits as Bits
-- import qualified Data.ByteString.Lazy.Char8 as C
import Parser
-- Exercise 1 -----------------------------------------
getSecret :: FilePath -> FilePath -> IO ByteString
getSecret f1 f2 = do
rf1 <- BS.readFile f1
rf2 <- BS.readFile f2
return (BS.filter (/=0) (BS.pack $ BS.zipWith (Bits.xor) rf1 rf2))
-- Exercise 2 -----------------------------------------
decryptWithKey :: ByteString -> FilePath -> IO ()
decryptWithKey k f = do
rf <- BS.readFile $ f ++ ".enc"
BS.writeFile f $ BS.pack $ go k rf
where go :: ByteString -> ByteString -> [Word8]
go k' b = zipWith (Bits.xor) (cycle $ BS.unpack k') (BS.unpack b)
-- Exercise 3 -----------------------------------------
parseFile :: FromJSON a => FilePath -> IO (Maybe a)
parseFile f = do
rf <- BS.readFile f
return $ decode rf
-- Exercise 4 -----------------------------------------
getBadTs :: FilePath -> FilePath -> IO (Maybe [Transaction])
getBadTs vp tp = do
mvs <- parseFile vp :: IO (Maybe [TId])
mts <- parseFile tp
case (mvs, mts) of
(Just vs, Just ts) -> return $ Just $ filter(\ t -> elem (tid t) vs) ts
(_, _) -> return Nothing
-- case mvs of
-- Nothing -> return mts
-- Just vs -> do
-- case mts of
-- Nothing -> return Nothing
-- Just ts -> return $ Just $ filter(\t -> elem (tid t) vs) ts
test :: IO (Map String Integer)
test = do
t <- (getBadTs "victims.json" "transactions.json")
return $ getFlow $ fromJust t
-- Exercise 5 -----------------------------------------
getFlow :: [Transaction] -> Map String Integer
getFlow ts' = go ts' Map.empty
where go :: [Transaction] -> Map String Integer -> Map String Integer
go [] m = m
go (t:ts) m = let u1 = ((to t), (amount t))
u2 = ((from t), negate (amount t))
m' = Map.insertWith (+) (fst u1) (snd u1) m
m'' = Map.insertWith (+) (fst u2) (snd u2) m'
in go ts m''
-- Exercise 6 -----------------------------------------
getCriminal :: Map String Integer -> String
getCriminal m = fst $ maximumBy (comparing $ snd) (Map.toList m)
-- Exercise 7 -----------------------------------------
undoTs :: Map String Integer -> [TId] -> [Transaction]
undoTs m ts = makeT ts (payers m) (payees m)
where payers :: Map String Integer -> [(String, Integer)]
payers m' = reverse $ sort' $ filter (\ x -> (snd x) > 0) (Map.toList m')
payees :: Map String Integer -> [(String, Integer)]
payees m'' = sort' $ filter (\ x -> (snd x) < 0) (Map.toList m'')
sort' :: [(String, Integer)] -> [(String, Integer)]
sort' ls = sortBy (comparing $ snd) ls
makeT :: [TId] -> [(String, Integer)] -> [(String, Integer)] -> [Transaction]
makeT ts [] [] = []
makeT (t:ts) (p1:p1s) (p2:p2s) = let am = if (snd p1) > (abs $ snd p2) then (abs $ snd p2) else snd p1
nt = Transaction {from = (fst p1), to = (fst p2), amount = am, tid = t}
p1s' = if (snd p1) == am then p1s else (fst p1, (snd p1 - am)) : p1s
p2s' = if (abs $ snd p2) == am then p2s else (fst p2, (snd p2 + am)) : p2s
in (nt : makeT ts p1s' p2s')
-- Exercise 8 -----------------------------------------
writeJSON :: ToJSON a => FilePath -> a -> IO ()
writeJSON fp ts = BS.writeFile fp $ encode ts
-- Exercise 9 -----------------------------------------
doEverything :: FilePath -> FilePath -> FilePath -> FilePath -> FilePath
-> FilePath -> IO String
doEverything dog1 dog2 trans vict fids out = do
key <- getSecret dog1 dog2
decryptWithKey key vict
mts <- getBadTs vict trans
case mts of
Nothing -> error "No Transactions"
Just ts -> do
mids <- parseFile fids
case mids of
Nothing -> error "No ids"
Just ids -> do
let flow = getFlow ts
writeJSON out (undoTs flow ids)
return (getCriminal flow)
main' :: IO ()
main' = do
args <- getArgs
crim <-
case args of
dog1:dog2:trans:vict:ids:out:_ ->
doEverything dog1 dog2 trans vict ids out
_ -> doEverything "dog-original.jpg"
"dog.jpg"
"transactions.json"
"victims.json"
"new-ids.json"
"new-transactions.json"
putStrLn crim
|
ImsungChoi/haskell-test
|
src/HW05.hs
|
Haskell
|
bsd-3-clause
| 4,930
|
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE DeriveFunctor #-}
module FreshNames where
import Prelude hiding ((<))
newtype PolyFreshNames a = FreshNames { unFreshNames :: a }
deriving (Show, Eq, Ord, Functor)
type FreshNames = PolyFreshNames Int
class FreshName a where
getFreshName :: PolyFreshNames a -> (a, PolyFreshNames a)
defaultNames :: PolyFreshNames a
getNames :: Int -> PolyFreshNames a -> ([a], PolyFreshNames a)
instance FreshName Int where
getFreshName (FreshNames x) = (x, FreshNames $ succ x)
defaultNames = FreshNames 0
getNames n (FreshNames x) =
let (xs, h:_) = splitAt n $ enumFrom x
in (xs, FreshNames h)
|
kajigor/uKanren_transformations
|
src/FreshNames.hs
|
Haskell
|
bsd-3-clause
| 723
|
-- |
-- Module : Database.Monarch
-- Copyright : 2013 Noriyuki OHKAWA
-- License : BSD3
--
-- Maintainer : n.ohkawa@gmail.com
-- Stability : experimental
-- Portability : unknown
--
-- Provide TokyoTyrant monadic access interface.
--
module Database.Monarch
(
Monarch, MonarchT
, Connection, ConnectionPool
, withMonarchConn
, withMonarchPool
, runMonarchConn
, runMonarchPool
, ExtOption(..), RestoreOption(..), MiscOption(..)
, Code(..)
, MonadMonarch(..)
) where
import Database.Monarch.Types hiding (sendLBS, recvLBS)
import Database.Monarch.Action ()
|
notogawa/monarch
|
src/Database/Monarch.hs
|
Haskell
|
bsd-3-clause
| 617
|
{-# LANGUAGE TemplateHaskell #-}
-- | Generate AST types, functions and instances for tuples.
module Database.DSH.Frontend.TupleTypes
( -- * Generate tuple types, functions and instances
mkQAInstances
, mkTAInstances
, mkTupleConstructors
, mkTupleAccessors
, mkTupElemType
, mkTupElemCompile
, mkReifyInstances
, mkTranslateTupleTerm
, mkTranslateType
, mkViewInstances
, mkTupleAstComponents
-- * Helper functions
, innerConst
, outerConst
, tupAccName
, mkTupElemTerm
, mkTupConstTerm
, tupTyConstName
) where
import Data.List
import Text.Printf
import Language.Haskell.TH
import Database.DSH.Common.Impossible
import Database.DSH.Common.TH
import Database.DSH.Common.Nat
import qualified Database.DSH.Common.Type as T
import qualified Database.DSH.CL.Primitives as CP
import qualified Database.DSH.CL.Lang as CL
--------------------------------------------------------------------------------
-- Tuple Accessors
-- | Generate all constructors for a given tuple width.
mkTupElemCons :: Name -> Name -> Int -> Q [Con]
mkTupElemCons aTyVar bTyVar width = do
boundTyVars <- mapM (\i -> newName $ printf "t%d" i) [1..width-1]
mapM (mkTupElemCon aTyVar bTyVar boundTyVars width) [1..width]
mkTupType :: Int -> Int -> [Name] -> Name -> Type
mkTupType elemIdx width boundTyVars bTyVar =
let elemTys = map VarT $ take (elemIdx - 1) boundTyVars
++ [bTyVar]
++ drop (elemIdx - 1) boundTyVars
in foldl' AppT (TupleT width) elemTys
mkTupElemCon :: Name -> Name -> [Name] -> Int -> Int -> Q Con
mkTupElemCon aTyVar bTyVar boundTyVars width elemIdx = do
let binders = map PlainTV boundTyVars
let tupTy = mkTupType elemIdx width boundTyVars bTyVar
let con = tupAccName width elemIdx
let ctx = [equalConstrTy (VarT aTyVar) tupTy]
return $ ForallC binders ctx (NormalC con [])
-- | Generate the complete type of tuple acccessors for all tuple
-- widths.
--
-- @
-- data TupElem a b where
-- Tup2_1 :: TupElem (a, b) a
-- Tup2_2 :: TupElem (a, b) b
-- Tup3_1 :: TupElem (a, b, c) a
-- Tup3_2 :: TupElem (a, b, c) b
-- Tup3_3 :: TupElem (a, b, c) c
-- ...
-- @
--
-- Due to the lack of support for proper GADT syntax in TH, we have
-- to work with explicit universal quantification:
--
-- @
-- data TupElem a b =
-- | forall d. a ~ (b, d) => Tup2_1
-- | forall d. a ~ (d, b) => Tup2_2
--
-- | forall d e. a ~ (b, d, e) => Tup3_1
-- | forall d e. a ~ (d, b, e) => Tup3_2
-- | forall d e. a ~ (d, e, b) => Tup3_3
-- ...
-- @
mkTupElemType :: Int -> Q [Dec]
mkTupElemType maxWidth = do
let tyName = mkName "TupElem"
aTyVar <- newName "a"
bTyVar <- newName "b"
let tyVars = map PlainTV [aTyVar, bTyVar]
cons <- concat <$> mapM (mkTupElemCons aTyVar bTyVar) [2..maxWidth]
return $ [DataD [] tyName tyVars Nothing cons []]
--------------------------------------------------------------------------------
-- Translation of tuple accessors to CL
-- TupElem a b -> Exp a -> Compile CL.Expr
-- \te e ->
-- case te of
-- Tup{2}_{1} -> CP.tupElem (indIndex 1) <$> translate e
-- Tup{2}_{k} -> CP.tupElem (indIndex k) <$> translate e
-- Tup{3}_{1} -> CP.tupElem (indIndex 1) <$> translate e
-- ...
-- Tup{n}_{j} -> CP.tupElem (indIndex j) <$> translate e
-- FIXME mkTupElemCompile does not depend on 'translate'
-- anymore. Therefore, we could inject a regular global binding for
-- the function instead of a lambda.
mkCompileMatch :: (Name, Int) -> Q Match
mkCompileMatch (con, elemIdx) = do
let idxLit = return $ LitE $ IntegerL $ fromIntegral elemIdx
bodyExp <- [| CP.tupElem (intIndex $idxLit) |]
let body = NormalB $ bodyExp
return $ Match (ConP con []) body []
mkTupElemCompile :: Int -> Q Exp
mkTupElemCompile maxWidth = do
let cons = concat [ [ (tupAccName width idx, idx)
| idx <- [1..width]
]
| width <- [2..maxWidth]
]
opName <- newName "te"
matches <- mapM mkCompileMatch cons
let lamBody = CaseE (VarE opName) matches
return $ LamE [VarP opName] lamBody
--------------------------------------------------------------------------------
-- Reify instances for tuple types
reifyType :: Name -> Exp
reifyType tyName = AppE (VarE $ mkName "reify") (SigE (VarE 'undefined) (VarT tyName))
mkReifyFun :: [Name] -> Dec
mkReifyFun tyNames =
let argTys = map reifyType tyNames
body = AppE (ConE $ mkName "TupleT")
$ foldl' AppE (ConE $ tupTyConstName "" $ length tyNames) argTys
in FunD (mkName "reify") [Clause [WildP] (NormalB body) []]
mkReifyInstance :: Int -> Dec
mkReifyInstance width =
let tyNames = map (\i -> mkName $ "t" ++ show i) [1..width]
instTy = AppT (ConT $ mkName "Reify") $ tupleType $ map VarT tyNames
reifyCxt = map (\tyName -> nameTyApp (mkName "Reify") (VarT tyName)) tyNames
in InstanceD Nothing reifyCxt instTy [mkReifyFun tyNames]
mkReifyInstances :: Int -> Q [Dec]
mkReifyInstances maxWidth = return $ map mkReifyInstance [2..maxWidth]
--------------------------------------------------------------------------------
-- QA instances for tuple types
mkToExp :: Int -> [Name] -> Dec
mkToExp width elemNames =
let toExpVar = VarE $ mkName "toExp"
elemArgs = map (\n -> AppE toExpVar (VarE n)) elemNames
body = NormalB $ AppE (ConE $ outerConst "")
$ foldl' AppE (ConE $ innerConst "" width) elemArgs
tupClause = Clause [TupP $ map VarP elemNames] body []
in FunD (mkName "toExp") [tupClause]
mkFrExp :: Int -> [Name] -> Q Dec
mkFrExp width elemNames = do
impossibleExpr <- [| error $(litE $ StringL $ printf "frExp %d" width) |]
let tupPattern = ConP (outerConst "")
[ConP (innerConst "" width) (map VarP elemNames) ]
tupleExpr = TupE $ map (\n -> AppE (VarE $ mkName "frExp") (VarE n))
elemNames
tupleClause = Clause [tupPattern] (NormalB tupleExpr) []
impossibleClause = Clause [WildP] (NormalB impossibleExpr) []
return $ FunD (mkName "frExp") [tupleClause, impossibleClause]
mkRep :: Int -> [Name] -> Type -> Dec
mkRep width tyNames tupTyPat =
let resTy = foldl' AppT (TupleT width)
$ map (AppT $ ConT $ mkName "Rep")
$ map VarT tyNames
in TySynInstD (mkName "Rep") (TySynEqn [tupTyPat] resTy)
mkQAInstance :: Int -> Q Dec
mkQAInstance width = do
let tyNames = map (\i -> mkName $ "t" ++ show i) [1..width]
tupTy = tupleType $ map VarT tyNames
instTy = AppT (ConT $ mkName "QA") tupTy
qaCxt = map (\tyName -> nameTyApp (mkName "QA") (VarT tyName)) tyNames
rep = mkRep width tyNames tupTy
toExp = mkToExp width tyNames
frExp <- mkFrExp width tyNames
return $ InstanceD Nothing qaCxt instTy [rep, toExp, frExp]
-- | Generate QA instances for tuple types according to the following template:
--
-- @
-- instance (QA t1, ..., QA tn) => QA (t1, ..., tn) where
-- type Rep (t1, ..., tn) = (Rep t1, ..., Rep tn)
-- toExp (v1, ..., vn) = TupleConstE (Tuple<n>E (toExp v1) ... (toExp vn))
-- frExp (TupleConstE (Tuple<n>E v1 ... vn)) = (frExp v1, ... b, frExp vn)
-- frExp _ = $impossible
-- @
mkQAInstances :: Int -> Q [Dec]
mkQAInstances maxWidth = mapM mkQAInstance [2..maxWidth]
--------------------------------------------------------------------------------
-- TA instances for tuple types
mkTAInstance :: Int -> Dec
mkTAInstance width =
let tyNames = map (\i -> mkName $ "t" ++ show i) [1..width]
tupTy = foldl' AppT (TupleT width) $ map VarT tyNames
instTy = AppT (ConT $ mkName "TA") tupTy
taCxt = map (\tyName -> nameTyApp (mkName "BasicType") (VarT tyName)) tyNames
in InstanceD Nothing taCxt instTy []
-- | Generate TA instances for tuple types according to the following template:
--
-- @
-- instance (BasicType t1, ..., BasicType tn) => TA (t1, ..., tn) where
-- @
mkTAInstances :: Int -> Q [Dec]
mkTAInstances maxWidth = return $ map mkTAInstance [2..maxWidth]
--------------------------------------------------------------------------------
-- Smart constructors for tuple values
tupConName :: Int -> Name
tupConName width = mkName $ printf "tup%d" width
mkArrowTy :: Type -> Type -> Type
mkArrowTy domTy coDomTy = AppT (AppT ArrowT domTy) coDomTy
mkTupleConstructor :: Int -> [Dec]
mkTupleConstructor width =
let tyNames = map (\i -> mkName $ "t" ++ show i) [1..width]
-- Type stuff
tupTy = AppT (ConT qName) $ foldl' AppT (TupleT width) $ map VarT tyNames
elemTys = map (AppT (ConT qName)) $ map VarT tyNames
arrowTy = foldr mkArrowTy tupTy elemTys
qaConstr = map (\n -> nameTyApp (mkName "QA") (VarT n)) tyNames
funTy = ForallT (map PlainTV tyNames) qaConstr arrowTy
-- Term stuff
qPats = map (\n -> ConP qName [VarP n]) tyNames
tupConApp = foldl' AppE (ConE $ innerConst "" width) $ map VarE tyNames
bodyExp = AppE (ConE qName) (AppE (ConE $ outerConst "") tupConApp)
sig = SigD (tupConName width) funTy
body = FunD (tupConName width) [Clause qPats (NormalB bodyExp) []]
in [sig, body]
-- | Construct smart constructors for tuple types according to the
-- following template.
--
-- @
-- tup<n> :: (QA t1, ...,QA tn) => Q t1 -> ... -> Q tn -> Q (t1, ..., tn)
-- tup<n> (Q v1) ... (Q vn)= Q (TupleConstE (Tuple<n>E v1 ... vn))
-- @
mkTupleConstructors :: Int -> Q [Dec]
mkTupleConstructors maxWidth = return $ concatMap mkTupleConstructor [2..maxWidth]
--------------------------------------------------------------------------------
-- Tuple accessors
mkTupleAccessor :: Int -> Int -> Q [Dec]
mkTupleAccessor width idx = do
-- Construct the function type
fieldTyName <- newName "a"
otherFieldTyNames <- mapM (\i -> newName $ printf "b%d" i) [1..width-1]
let elemTyNames = take (idx - 1) otherFieldTyNames
++ [fieldTyName]
++ drop (idx - 1) otherFieldTyNames
elemTyVars = map VarT elemTyNames
qaCxt = map (\tyName -> nameTyApp (mkName "QA") (VarT tyName)) elemTyNames
tupTy = AppT (ConT qName) $ foldl' AppT (TupleT width) elemTyVars
fieldTy = AppT (ConT qName) (VarT fieldTyName)
arrowTy = mkArrowTy tupTy fieldTy
funTy = ForallT (map PlainTV elemTyNames) qaCxt arrowTy
funSig = SigD (tupAccFunName width idx) funTy
-- Construct the function equation
exprName <- newName "e"
funBody <- appE (conE qName) $ mkTupElemTerm width idx (VarE exprName)
let qPat = ConP qName [VarP exprName]
funDef = FunD (tupAccFunName width idx) [Clause [qPat] (NormalB funBody) []]
return [funSig, funDef]
-- | Construct field accessor functions for tuple types.
--
-- @
-- tup<n>_<i> :: (QA t1, ..., QA t_n) => Q (t_1, ..., t_n) -> Q t_i
-- tup<n>_<i> (Q e) = Q (AppE (TupElem Tup<n>_<i>) e)
-- @
mkTupleAccessors :: Int -> Q [Dec]
mkTupleAccessors maxWidth = concat <$> sequence [ mkTupleAccessor width idx
| width <- [2..maxWidth]
, idx <- [1..width]
]
--------------------------------------------------------------------------------
-- Translation function for tuple constructors in terms
{-
\t -> case t of
Tuple2E a b -> do
a' <- translate a
b' <- translate b
return $ CL.MkTuple (T.TupleT $ map T.typeOf [a', b']) [a', b']
Tuple3E a b c -> ...
-}
mkTransBind :: Name -> Name -> Stmt
mkTransBind argName resName =
BindS (VarP resName) (AppE (VarE $ mkName "translate") (VarE argName))
-- | Generate the translation case for a particular tuple value
-- constructor.
mkTranslateTermMatch :: Int -> Q Match
mkTranslateTermMatch width = do
let names = map (\c -> [c]) $ take width ['a' .. 'z']
subTermNames = map mkName names
transTermNames = map (mkName . (++ "'")) names
transBinds = zipWith mkTransBind subTermNames transTermNames
transTerms = listE $ map varE transTermNames
conStmt <- NoBindS <$>
[| return $ CL.MkTuple (T.TupleT $ map T.typeOf $transTerms) $transTerms |]
let matchBody = DoE $ transBinds ++ [conStmt]
matchPat = ConP (innerConst "" width) (map VarP subTermNames)
return $ Match matchPat (NormalB matchBody) []
-- | Generate the lambda expression that translates frontend tuple
-- value constructors into CL tuple constructors.
mkTranslateTupleTerm :: Int -> Q Exp
mkTranslateTupleTerm maxWidth = do
lamArgName <- newName "tupleConst"
matches <- mapM mkTranslateTermMatch [2..maxWidth]
let lamBody = CaseE (VarE lamArgName) matches
return $ LamE [VarP lamArgName] lamBody
--------------------------------------------------------------------------------
-- Translation function for tuple types
{-
\t -> case t of
Tuple3T t1 t2 t3 -> T.TupleT [translateType t1, translateType t2, translateType t3]
-}
mkTranslateTypeMatch :: Int -> Q Match
mkTranslateTypeMatch width = do
let subTyNames = map mkName $ map (\c -> [c]) $ take width ['a' .. 'z']
matchPat = ConP (tupTyConstName "" width) (map VarP subTyNames)
transElemTys = ListE $ map (\n -> AppE (VarE $ mkName "translateType") (VarE n)) subTyNames
let matchBody = AppE (ConE 'T.TupleT) transElemTys
return $ Match matchPat (NormalB matchBody) []
mkTranslateType :: Int -> Q Exp
mkTranslateType maxWidth = do
lamArgName <- newName "typeConst"
matches <- mapM mkTranslateTypeMatch [2..maxWidth]
let lamBody = CaseE (VarE lamArgName) matches
return $ LamE [VarP lamArgName] lamBody
--------------------------------------------------------------------------------
-- View instances
{-
instance (QA a,QA b,QA c) => View (Q (a,b,c)) where
type ToView (Q (a,b,c)) = (Q a,Q b,Q c)
view (Q e) = ( Q (AppE (TupElem Tup3_1) e)
, Q (AppE (TupElem Tup3_2) e)
, Q (AppE (TupElem Tup3_3) e)
)
-}
mkToView :: [Name] -> Type -> Dec
mkToView names tupTyPat =
let qTupPat = AppT (ConT qName) tupTyPat
resTupTy = tupleType $ map (\n -> AppT (ConT qName) (VarT n)) names
in TySynInstD (mkName "ToView") (TySynEqn [qTupPat] resTupTy)
mkViewFun :: Int -> Q Dec
mkViewFun width = do
expName <- newName "e"
let expVar = VarE expName
qPat = ConP qName [VarP expName]
viewBodyExp <- TupE <$> mapM (\idx -> appE (conE qName) $ mkTupElemTerm width idx expVar)
[1..width]
let viewClause = Clause [qPat] (NormalB viewBodyExp) []
return $ FunD (mkName "view") [viewClause]
mkViewInstance :: Int -> Q Dec
mkViewInstance width = do
let names = map (\i -> mkName $ "t" ++ show i) [1..width]
tupTy = tupleType $ map VarT names
instTy = AppT (ConT $ mkName "View") (AppT (ConT qName) tupTy)
viewCxt = map (\n -> nameTyApp (mkName "QA") (VarT n)) names
toViewDec = mkToView names tupTy
viewDec <- mkViewFun width
return $ InstanceD Nothing viewCxt instTy [toViewDec, viewDec]
mkViewInstances :: Int -> Q [Dec]
mkViewInstances maxWidth = mapM mkViewInstance [2..maxWidth]
--------------------------------------------------------------------------------
-- Generate the 'TupleConst' type
tupElemTyName :: Int -> Q Name
tupElemTyName i = newName $ printf "t%d" i
-- | Generate a single constructor for the 'TabTuple' type.
mkTupleCons :: Name -> (Int -> Name) -> (Type -> Type) -> Int -> Q Con
mkTupleCons tupTyName conName elemTyCons width = do
tupElemTyNames <- mapM tupElemTyName [1..width]
let tyVarBinders = map PlainTV tupElemTyNames
-- (t1, ..., t<n>)
tupTy = foldl' AppT (TupleT width)
$ map VarT tupElemTyNames
-- a ~ (t1, ..., t<n>)
tupConstraint = equalConstrTy (VarT tupTyName) tupTy
-- Reify t1, ..., Reify t<n>
reifyConstraints = map (\n -> nameTyApp (mkName "Reify") (VarT n)) tupElemTyNames
constraints = tupConstraint : reifyConstraints
let -- '(Exp/Type t1) ... (Exp/Type t<n>)'
elemTys = [ (strict, elemTyCons (VarT t))
| t <- tupElemTyNames
]
return $ ForallC tyVarBinders constraints
$ NormalC (conName width) elemTys
where
strict = Bang NoSourceUnpackedness SourceStrict
-- | Generate the types for AST type and term tuple constructors: 'TupleConst' and
-- 'TupleType'. The first parameter is the name of the type. The second parameter
-- is the type constructor for element fields and the third parameter generates
-- the constructor name for a given tuple width.
--
-- @
-- data TupleConst a where
-- Tuple<n>E :: (Reify t1, ..., Reify t<n>) => Exp t1
-- -> ...
-- -> Exp t<n>
-- -> TupleConst (t1, ..., t<n>)
-- @
--
-- Because TH does not directly support GADT syntax, we have to
-- emulate it using explicit universal quantification:
--
-- @
-- data TupleConst a =
-- forall t1, ..., t<n>. a ~ (t1, ..., t<n>),
-- Reify t1,
-- ...
-- Reify t<n> =>
-- Exp t1 -> ... -> Exp t<n>
-- @
mkTupleASTTy :: Name -> (Type -> Type) -> (Int -> Name) -> Int -> Q [Dec]
mkTupleASTTy tyName elemTyCons conName maxWidth = do
tupTyName <- newName "a"
cons <- mapM (mkTupleCons tupTyName conName elemTyCons) [2..maxWidth]
return [DataD [] tyName [PlainTV tupTyName] Nothing cons []]
-- | Generate the 'TupleConst' AST type for tuple term construction
mkAstTupleConst :: Int -> Q [Dec]
mkAstTupleConst maxWidth =
mkTupleASTTy (mkName "TupleConst") expCon (innerConst "") maxWidth
where
expCon = AppT $ ConT $ mkName "Exp"
-- | Generate the 'TupleConst' AST type for tuple term construction
mkAstTupleType :: Int -> Q [Dec]
mkAstTupleType maxWidth =
mkTupleASTTy (mkName "TupleType") expCon (tupTyConstName "") maxWidth
where
expCon = AppT $ ConT $ mkName "Type"
mkTupleAstComponents :: Int -> Q [Dec]
mkTupleAstComponents maxWidth = (++) <$> mkAstTupleConst maxWidth <*> mkAstTupleType maxWidth
--------------------------------------------------------------------------------
-- Helper functions
-- | The name of the constructor that constructs a tuple construction
-- term.
outerConst :: String -> Name
outerConst "" = mkName "TupleConstE"
outerConst m = mkName $ printf "%s.TupleConstE" m
-- | The name of the constructor for a given tuple width.
innerConst :: String -> Int -> Name
innerConst "" width = mkName $ printf "Tuple%dE" width
innerConst m width = mkName $ printf "%s.Tuple%dE" m width
-- | The name of a tuple access constructor for a given tuple width
-- and element index.
tupAccName :: Int -> Int -> Name
tupAccName width elemIdx = mkName $ printf "Tup%d_%d" width elemIdx
-- | The name of a tuple access function for a given tuple width and element
-- index.
tupAccFunName :: Int -> Int -> Name
tupAccFunName width elemIdx = mkName $ printf "tup%d_%d" width elemIdx
-- | The name of the tuple type constructor for a given tuple width.
tupTyConstName :: String -> Int -> Name
tupTyConstName "" width = mkName $ printf "Tuple%dT" width
tupTyConstName m width = mkName $ printf "%s.Tuple%dT" m width
-- |
tupleType :: [Type] -> Type
tupleType elemTypes = foldl' AppT (TupleT width) elemTypes
where
width = length elemTypes
qName :: Name
qName = mkName "Q"
-- | Construct a DSH term that accesses a specificed tuple element.
mkTupElemTerm :: Int -> Int -> Exp -> Q Exp
mkTupElemTerm width idx arg = do
let ta = ConE $ tupAccName width idx
return $ AppE (AppE (ConE $ mkName "AppE") (AppE (ConE $ mkName "TupElem") ta)) arg
-- | From a list of operand terms, construct a DSH tuple term.
mkTupConstTerm :: [Exp] -> Q Exp
mkTupConstTerm ts
| length ts <= 16 = return $ AppE (ConE $ mkName "TupleConstE")
$ foldl' AppE (ConE $ innerConst "" $ length ts) ts
| otherwise = impossible
|
ulricha/dsh
|
src/Database/DSH/Frontend/TupleTypes.hs
|
Haskell
|
bsd-3-clause
| 20,752
|
import Control.Monad.Logger
import Data.ByteString.Char8 (pack)
import Meadowstalk.Application
import Network.Wai.Handler.Warp
import System.Environment
-------------------------------------------------------------------------------
main :: IO ()
main = do
port <- read <$> getEnv "PORT"
connstr <- pack <$> getEnv "DB"
app <- makeApplication connstr
runSettings (setPort port defaultSettings) app
|
HalfWayMan/meadowstalk
|
src/Main.hs
|
Haskell
|
bsd-3-clause
| 415
|
module System.Console.Haskeline.Prefs(
Prefs(..),
defaultPrefs,
readPrefs,
CompletionType(..),
BellStyle(..),
EditMode(..),
HistoryDuplicates(..),
lookupKeyBinding
) where
import Control.Monad.Catch (handle)
import Control.Exception (IOException)
import Data.Char(isSpace,toLower)
import Data.List(foldl')
import qualified Data.Map as Map
import System.Console.Haskeline.Key
{- |
'Prefs' allow the user to customize the terminal-style line-editing interface. They are
read by default from @~/.haskeline@; to override that behavior, use
'readPrefs' and @runInputTWithPrefs@.
Each line of a @.haskeline@ file defines
one field of the 'Prefs' datatype; field names are case-insensitive and
unparseable lines are ignored. For example:
> editMode: Vi
> completionType: MenuCompletion
> maxhistorysize: Just 40
-}
data Prefs = Prefs { bellStyle :: !BellStyle,
editMode :: !EditMode,
maxHistorySize :: !(Maybe Int),
historyDuplicates :: HistoryDuplicates,
completionType :: !CompletionType,
completionPaging :: !Bool,
-- ^ When listing completion alternatives, only display
-- one screen of possibilities at a time.
completionPromptLimit :: !(Maybe Int),
-- ^ If more than this number of completion
-- possibilities are found, then ask before listing
-- them.
listCompletionsImmediately :: !Bool,
-- ^ If 'False', completions with multiple possibilities
-- will ring the bell and only display them if the user
-- presses @TAB@ again.
customBindings :: Map.Map Key [Key],
-- (termName, keysequence, key)
customKeySequences :: [(Maybe String, String,Key)]
}
deriving Show
data CompletionType = ListCompletion | MenuCompletion
deriving (Read,Show)
data BellStyle = NoBell | VisualBell | AudibleBell
deriving (Show, Read)
data EditMode = Vi | Emacs
deriving (Show,Read)
data HistoryDuplicates = AlwaysAdd | IgnoreConsecutive | IgnoreAll
deriving (Show,Read)
-- | The default preferences which may be overwritten in the
-- @.haskeline@ file.
defaultPrefs :: Prefs
defaultPrefs = Prefs {bellStyle = AudibleBell,
maxHistorySize = Just 100,
editMode = Emacs,
completionType = ListCompletion,
completionPaging = True,
completionPromptLimit = Just 100,
listCompletionsImmediately = True,
historyDuplicates = AlwaysAdd,
customBindings = Map.empty,
customKeySequences = []
}
mkSettor :: Read a => (a -> Prefs -> Prefs) -> String -> Prefs -> Prefs
mkSettor f str = maybe id f (readMaybe str)
readMaybe :: Read a => String -> Maybe a
readMaybe str = case reads str of
[(x,_)] -> Just x
_ -> Nothing
settors :: [(String, String -> Prefs -> Prefs)]
settors = [("bellstyle", mkSettor $ \x p -> p {bellStyle = x})
,("editmode", mkSettor $ \x p -> p {editMode = x})
,("maxhistorysize", mkSettor $ \x p -> p {maxHistorySize = x})
,("completiontype", mkSettor $ \x p -> p {completionType = x})
,("completionpaging", mkSettor $ \x p -> p {completionPaging = x})
,("completionpromptlimit", mkSettor $ \x p -> p {completionPromptLimit = x})
,("listcompletionsimmediately", mkSettor $ \x p -> p {listCompletionsImmediately = x})
,("historyduplicates", mkSettor $ \x p -> p {historyDuplicates = x})
,("bind", addCustomBinding)
,("keyseq", addCustomKeySequence)
]
addCustomBinding :: String -> Prefs -> Prefs
addCustomBinding str p = case mapM parseKey (words str) of
Just (k:ks) -> p {customBindings = Map.insert k ks (customBindings p)}
_ -> p
addCustomKeySequence :: String -> Prefs -> Prefs
addCustomKeySequence str = maybe id addKS maybeParse
where
maybeParse :: Maybe (Maybe String, String,Key)
maybeParse = case words str of
[cstr,kstr] -> parseWords Nothing cstr kstr
[term,cstr,kstr] -> parseWords (Just term) cstr kstr
_ -> Nothing
parseWords mterm cstr kstr = do
k <- parseKey kstr
cs <- readMaybe cstr
return (mterm,cs,k)
addKS ks p = p {customKeySequences = ks:customKeySequences p}
lookupKeyBinding :: Key -> Prefs -> [Key]
lookupKeyBinding k = Map.findWithDefault [k] k . customBindings
-- | Read 'Prefs' from a given file. If there is an error reading the file,
-- the 'defaultPrefs' will be returned.
readPrefs :: FilePath -> IO Prefs
readPrefs file = handle (\(_::IOException) -> return defaultPrefs) $ do
ls <- fmap lines $ readFile file
return $! foldl' applyField defaultPrefs ls
where
applyField p l = case break (==':') l of
(name,val) -> case lookup (map toLower $ trimSpaces name) settors of
Nothing -> p
Just set -> set (drop 1 val) p -- drop initial ":", don't crash if val==""
trimSpaces = dropWhile isSpace . reverse . dropWhile isSpace . reverse
|
judah/haskeline
|
System/Console/Haskeline/Prefs.hs
|
Haskell
|
bsd-3-clause
| 5,774
|
{-# LANGUAGE FlexibleContexts #-}
module Stencil2 where
import Control.Monad
import Control.Exception
import System.Random.MWC
import Data.Array.Unboxed hiding (Array)
import Data.Array.Accelerate hiding (round, min, max, fromIntegral)
import qualified Data.Array.IArray as IArray
stencil2D2 :: Floating (Exp a) => Stencil3x3 a -> Stencil3x3 a -> Exp a
stencil2D2 ((_,t,_), (_,x,_), (_,b,_))
((_,_,_), (l,y,r), (_,_,_)) = t + b + l + r - ((x+y) / 2)
test_stencil2_2D :: Int -> IO (() -> UArray (Int,Int) Float, () -> Acc (Array DIM2 Float))
test_stencil2_2D n2 = withSystemRandom $ \gen -> do
let n = round $ sqrt (fromIntegral n2 :: Double)
m = n * 2
u = m `div` 3
v = n + m
m1 <- listArray ((0,0),(n-1,m-1)) `fmap` replicateM (n*m) (uniformR (-1,1) gen) :: IO (UArray (Int,Int) Float)
m2 <- listArray ((0,0),(u-1,v-1)) `fmap` replicateM (u*v) (uniformR (-1,1) gen) :: IO (UArray (Int,Int) Float)
m1' <- let m1' = fromIArray m1 in evaluate (m1' `indexArray` (Z:.0:.0)) >> return m1'
m2' <- let m2' = fromIArray m2 in evaluate (m2' `indexArray` (Z:.0:.0)) >> return m2'
--
return (\() -> run_ref m1 m2, \() -> run_acc m1' m2')
where
run_acc xs ys = stencil2 stencil2D2 Mirror (use xs) Wrap (use ys)
run_ref xs ys =
let (_,(n,m)) = bounds xs
(_,(u,v)) = bounds ys
sh = ((0,0), (n `min` u, m `min` v))
-- boundary conditions are placed on the *source* arrays
--
get1 (x,y) = xs IArray.! (mirror n x, mirror m y)
get2 (x,y) = ys IArray.! (wrap u x, wrap v y)
mirror sz i
| i < 0 = -i
| i > sz = sz - (i-sz)
| otherwise = i
wrap sz i
| i < 0 = sz + i + 1
| i > sz = i - sz - 1
| otherwise = i
f (ix,iy) = let t = get1 (ix, iy-1)
b = get1 (ix, iy+1)
x = get1 (ix, iy)
l = get2 (ix-1,iy)
r = get2 (ix+1,iy)
y = get2 (ix, iy)
in
t + b + l + r - ((x+y) / 2)
in
array sh [(ix, f ix) | ix <- range sh]
-- Main
-- ----
run2D :: String -> Int -> IO (() -> UArray (Int,Int) Float, () -> Acc (Array DIM2 Float))
run2D "2D" = test_stencil2_2D
run2D x = error $ "unknown variant: " ++ x
|
wilbowma/accelerate
|
accelerate-examples/tests/primitives/Stencil2.hs
|
Haskell
|
bsd-3-clause
| 2,440
|
-- -----------------------------------------------------------------------------
--
-- CharSet.hs, part of Alex
--
-- (c) Chris Dornan 1995-2000, Simon Marlow 2003
--
-- An abstract CharSet type for Alex. To begin with we'll use Alex's
-- original definition of sets as functions, then later will
-- transition to something that will work better with Unicode.
--
-- ----------------------------------------------------------------------------}
module CharSet (
setSingleton,
Encoding(..),
Byte,
ByteSet,
byteSetSingleton,
byteRanges,
byteSetRange,
CharSet, -- abstract
emptyCharSet,
charSetSingleton,
charSet,
charSetMinus,
charSetComplement,
charSetRange,
charSetUnion,
charSetQuote,
setUnions,
byteSetToArray,
byteSetElems,
byteSetElem
) where
import Data.Array
import Data.Ranged
import Data.Word
import Data.Maybe (catMaybes)
import Data.Char (chr,ord)
import UTF8
type Byte = Word8
-- Implementation as functions
type CharSet = RSet Char
type ByteSet = RSet Byte
-- type Utf8Set = RSet [Byte]
type Utf8Range = Span [Byte]
data Encoding = Latin1 | UTF8
emptyCharSet :: CharSet
emptyCharSet = rSetEmpty
byteSetElem :: ByteSet -> Byte -> Bool
byteSetElem = rSetHas
charSetSingleton :: Char -> CharSet
charSetSingleton = rSingleton
setSingleton :: DiscreteOrdered a => a -> RSet a
setSingleton = rSingleton
charSet :: [Char] -> CharSet
charSet = setUnions . fmap charSetSingleton
charSetMinus :: CharSet -> CharSet -> CharSet
charSetMinus = rSetDifference
charSetUnion :: CharSet -> CharSet -> CharSet
charSetUnion = rSetUnion
setUnions :: DiscreteOrdered a => [RSet a] -> RSet a
setUnions = foldr rSetUnion rSetEmpty
charSetComplement :: CharSet -> CharSet
charSetComplement = rSetNegation
charSetRange :: Char -> Char -> CharSet
charSetRange c1 c2 = makeRangedSet [Range (BoundaryBelow c1) (BoundaryAbove c2)]
byteSetToArray :: ByteSet -> Array Byte Bool
byteSetToArray set = array (fst (head ass), fst (last ass)) ass
where ass = [(c,rSetHas set c) | c <- [0..0xff]]
byteSetElems :: ByteSet -> [Byte]
byteSetElems set = [c | c <- [0 .. 0xff], rSetHas set c]
charToRanges :: Encoding -> CharSet -> [Utf8Range]
charToRanges Latin1 =
map (fmap ((: []).fromIntegral.ord)) -- Span [Byte]
. catMaybes
. fmap (charRangeToCharSpan False)
. rSetRanges
charToRanges UTF8 =
concat -- Span [Byte]
. fmap toUtfRange -- [Span [Byte]]
. fmap (fmap UTF8.encode) -- Span [Byte]
. catMaybes
. fmap (charRangeToCharSpan True)
. rSetRanges
-- | Turns a range of characters expressed as a pair of UTF-8 byte sequences into a set of ranges, in which each range of the resulting set is between pairs of sequences of the same length
toUtfRange :: Span [Byte] -> [Span [Byte]]
toUtfRange (Span x y) = fix x y
fix :: [Byte] -> [Byte] -> [Span [Byte]]
fix x y
| length x == length y = [Span x y]
| length x == 1 = Span x [0x7F] : fix [0xC2,0x80] y
| length x == 2 = Span x [0xDF,0xBF] : fix [0xE0,0x80,0x80] y
| length x == 3 = Span x [0xEF,0xBF,0xBF] : fix [0xF0,0x80,0x80,0x80] y
| otherwise = error "fix: incorrect input given"
byteRangeToBytePair :: Span [Byte] -> ([Byte],[Byte])
byteRangeToBytePair (Span x y) = (x,y)
data Span a = Span a a -- lower bound inclusive, higher bound exclusive
-- (SDM: upper bound inclusive, surely??)
instance Functor Span where
fmap f (Span x y) = Span (f x) (f y)
charRangeToCharSpan :: Bool -> Range Char -> Maybe (Span Char)
charRangeToCharSpan _ (Range BoundaryAboveAll _) = Nothing
charRangeToCharSpan _ (Range _ BoundaryBelowAll) = Nothing
charRangeToCharSpan uni (Range x y) = Just (Span (l x) (h y))
where l b = case b of
BoundaryBelowAll -> '\0'
BoundaryBelow a -> a
BoundaryAbove a -> succ a
BoundaryAboveAll -> error "panic: charRangeToCharSpan"
h b = case b of
BoundaryBelowAll -> error "panic: charRangeToCharSpan"
BoundaryBelow a -> pred a
BoundaryAbove a -> a
BoundaryAboveAll | uni -> chr 0x10ffff
| otherwise -> chr 0xff
byteRanges :: Encoding -> CharSet -> [([Byte],[Byte])]
byteRanges enc = fmap byteRangeToBytePair . charToRanges enc
byteSetRange :: Byte -> Byte -> ByteSet
byteSetRange c1 c2 = makeRangedSet [Range (BoundaryBelow c1) (BoundaryAbove c2)]
byteSetSingleton :: Byte -> ByteSet
byteSetSingleton = rSingleton
instance DiscreteOrdered Word8 where
adjacent x y = x + 1 == y
adjacentBelow 0 = Nothing
adjacentBelow x = Just (x-1)
-- TODO: More efficient generated code!
charSetQuote :: CharSet -> String
charSetQuote s = "(\\c -> " ++ foldr (\x y -> x ++ " || " ++ y) "False" (map quoteRange (rSetRanges s)) ++ ")"
where quoteRange (Range l h) = quoteL l ++ " && " ++ quoteH h
quoteL (BoundaryAbove a) = "c > " ++ show a
quoteL (BoundaryBelow a) = "c >= " ++ show a
quoteL (BoundaryAboveAll) = "False"
quoteL (BoundaryBelowAll) = "True"
quoteH (BoundaryAbove a) = "c <= " ++ show a
quoteH (BoundaryBelow a) = "c < " ++ show a
quoteH (BoundaryAboveAll) = "True"
quoteH (BoundaryBelowAll) = "False"
|
beni55/alex
|
src/CharSet.hs
|
Haskell
|
bsd-3-clause
| 5,261
|
{-# OPTIONS_GHC -fno-implicit-prelude -#include "HsBase.h" #-}
#undef DEBUG_DUMP
-----------------------------------------------------------------------------
-- |
-- Module : GHC.IO
-- Copyright : (c) The University of Glasgow, 1992-2001
-- License : see libraries/base/LICENSE
--
-- Maintainer : libraries@haskell.org
-- Stability : internal
-- Portability : non-portable
--
-- String I\/O functions
--
-----------------------------------------------------------------------------
-- #hide
module GHC.IO (
hWaitForInput, hGetChar, hGetLine, hGetContents, hPutChar, hPutStr,
commitBuffer', -- hack, see below
hGetcBuffered, -- needed by ghc/compiler/utils/StringBuffer.lhs
hGetBuf, hGetBufNonBlocking, hPutBuf, hPutBufNonBlocking, slurpFile,
memcpy_ba_baoff,
memcpy_ptr_baoff,
memcpy_baoff_ba,
memcpy_baoff_ptr,
) where
import Foreign
import Foreign.C
import System.IO.Error
import Data.Maybe
import Control.Monad
import System.Posix.Internals
import GHC.Enum
import GHC.Base
import GHC.IOBase
import GHC.Handle -- much of the real stuff is in here
import GHC.Real
import GHC.Num
import GHC.Show
import GHC.List
import GHC.Exception ( ioError, catch )
#ifdef mingw32_HOST_OS
import GHC.Conc
#endif
-- ---------------------------------------------------------------------------
-- Simple input operations
-- If hWaitForInput finds anything in the Handle's buffer, it
-- immediately returns. If not, it tries to read from the underlying
-- OS handle. Notice that for buffered Handles connected to terminals
-- this means waiting until a complete line is available.
-- | Computation 'hWaitForInput' @hdl t@
-- waits until input is available on handle @hdl@.
-- It returns 'True' as soon as input is available on @hdl@,
-- or 'False' if no input is available within @t@ milliseconds.
--
-- If @t@ is less than zero, then @hWaitForInput@ waits indefinitely.
-- NOTE: in the current implementation, this is the only case that works
-- correctly (if @t@ is non-zero, then all other concurrent threads are
-- blocked until data is available).
--
-- This operation may fail with:
--
-- * 'isEOFError' if the end of file has been reached.
hWaitForInput :: Handle -> Int -> IO Bool
hWaitForInput h msecs = do
wantReadableHandle "hWaitForInput" h $ \ handle_ -> do
let ref = haBuffer handle_
buf <- readIORef ref
if not (bufferEmpty buf)
then return True
else do
if msecs < 0
then do buf' <- fillReadBuffer (haFD handle_) True
(haIsStream handle_) buf
writeIORef ref buf'
return True
else do r <- throwErrnoIfMinus1Retry "hWaitForInput" $
inputReady (fromIntegral (haFD handle_))
(fromIntegral msecs) (haIsStream handle_)
return (r /= 0)
foreign import ccall safe "inputReady"
inputReady :: CInt -> CInt -> Bool -> IO CInt
-- ---------------------------------------------------------------------------
-- hGetChar
-- | Computation 'hGetChar' @hdl@ reads a character from the file or
-- channel managed by @hdl@, blocking until a character is available.
--
-- This operation may fail with:
--
-- * 'isEOFError' if the end of file has been reached.
hGetChar :: Handle -> IO Char
hGetChar handle =
wantReadableHandle "hGetChar" handle $ \handle_ -> do
let fd = haFD handle_
ref = haBuffer handle_
buf <- readIORef ref
if not (bufferEmpty buf)
then hGetcBuffered fd ref buf
else do
-- buffer is empty.
case haBufferMode handle_ of
LineBuffering -> do
new_buf <- fillReadBuffer fd True (haIsStream handle_) buf
hGetcBuffered fd ref new_buf
BlockBuffering _ -> do
new_buf <- fillReadBuffer fd True (haIsStream handle_) buf
-- ^^^^
-- don't wait for a completely full buffer.
hGetcBuffered fd ref new_buf
NoBuffering -> do
-- make use of the minimal buffer we already have
let raw = bufBuf buf
r <- readRawBuffer "hGetChar" (fromIntegral fd) (haIsStream handle_) raw 0 1
if r == 0
then ioe_EOF
else do (c,_) <- readCharFromBuffer raw 0
return c
hGetcBuffered fd ref buf@Buffer{ bufBuf=b, bufRPtr=r, bufWPtr=w }
= do (c,r) <- readCharFromBuffer b r
let new_buf | r == w = buf{ bufRPtr=0, bufWPtr=0 }
| otherwise = buf{ bufRPtr=r }
writeIORef ref new_buf
return c
-- ---------------------------------------------------------------------------
-- hGetLine
-- ToDo: the unbuffered case is wrong: it doesn't lock the handle for
-- the duration.
-- | Computation 'hGetLine' @hdl@ reads a line from the file or
-- channel managed by @hdl@.
--
-- This operation may fail with:
--
-- * 'isEOFError' if the end of file is encountered when reading
-- the /first/ character of the line.
--
-- If 'hGetLine' encounters end-of-file at any other point while reading
-- in a line, it is treated as a line terminator and the (partial)
-- line is returned.
hGetLine :: Handle -> IO String
hGetLine h = do
m <- wantReadableHandle "hGetLine" h $ \ handle_ -> do
case haBufferMode handle_ of
NoBuffering -> return Nothing
LineBuffering -> do
l <- hGetLineBuffered handle_
return (Just l)
BlockBuffering _ -> do
l <- hGetLineBuffered handle_
return (Just l)
case m of
Nothing -> hGetLineUnBuffered h
Just l -> return l
hGetLineBuffered handle_ = do
let ref = haBuffer handle_
buf <- readIORef ref
hGetLineBufferedLoop handle_ ref buf []
hGetLineBufferedLoop handle_ ref
buf@Buffer{ bufRPtr=r, bufWPtr=w, bufBuf=raw } xss =
let
-- find the end-of-line character, if there is one
loop raw r
| r == w = return (False, w)
| otherwise = do
(c,r') <- readCharFromBuffer raw r
if c == '\n'
then return (True, r) -- NB. not r': don't include the '\n'
else loop raw r'
in do
(eol, off) <- loop raw r
#ifdef DEBUG_DUMP
puts ("hGetLineBufferedLoop: r=" ++ show r ++ ", w=" ++ show w ++ ", off=" ++ show off ++ "\n")
#endif
xs <- unpack raw r off
-- if eol == True, then off is the offset of the '\n'
-- otherwise off == w and the buffer is now empty.
if eol
then do if (w == off + 1)
then writeIORef ref buf{ bufRPtr=0, bufWPtr=0 }
else writeIORef ref buf{ bufRPtr = off + 1 }
return (concat (reverse (xs:xss)))
else do
maybe_buf <- maybeFillReadBuffer (haFD handle_) True (haIsStream handle_)
buf{ bufWPtr=0, bufRPtr=0 }
case maybe_buf of
-- Nothing indicates we caught an EOF, and we may have a
-- partial line to return.
Nothing -> do
writeIORef ref buf{ bufRPtr=0, bufWPtr=0 }
let str = concat (reverse (xs:xss))
if not (null str)
then return str
else ioe_EOF
Just new_buf ->
hGetLineBufferedLoop handle_ ref new_buf (xs:xss)
maybeFillReadBuffer fd is_line is_stream buf
= catch
(do buf <- fillReadBuffer fd is_line is_stream buf
return (Just buf)
)
(\e -> do if isEOFError e
then return Nothing
else ioError e)
unpack :: RawBuffer -> Int -> Int -> IO [Char]
unpack buf r 0 = return ""
unpack buf (I# r) (I# len) = IO $ \s -> unpack [] (len -# 1#) s
where
unpack acc i s
| i <# r = (# s, acc #)
| otherwise =
case readCharArray# buf i s of
(# s, ch #) -> unpack (C# ch : acc) (i -# 1#) s
hGetLineUnBuffered :: Handle -> IO String
hGetLineUnBuffered h = do
c <- hGetChar h
if c == '\n' then
return ""
else do
l <- getRest
return (c:l)
where
getRest = do
c <-
catch
(hGetChar h)
(\ err -> do
if isEOFError err then
return '\n'
else
ioError err)
if c == '\n' then
return ""
else do
s <- getRest
return (c:s)
-- -----------------------------------------------------------------------------
-- hGetContents
-- hGetContents on a DuplexHandle only affects the read side: you can
-- carry on writing to it afterwards.
-- | Computation 'hGetContents' @hdl@ returns the list of characters
-- corresponding to the unread portion of the channel or file managed
-- by @hdl@, which is put into an intermediate state, /semi-closed/.
-- In this state, @hdl@ is effectively closed,
-- but items are read from @hdl@ on demand and accumulated in a special
-- list returned by 'hGetContents' @hdl@.
--
-- Any operation that fails because a handle is closed,
-- also fails if a handle is semi-closed. The only exception is 'hClose'.
-- A semi-closed handle becomes closed:
--
-- * if 'hClose' is applied to it;
--
-- * if an I\/O error occurs when reading an item from the handle;
--
-- * or once the entire contents of the handle has been read.
--
-- Once a semi-closed handle becomes closed, the contents of the
-- associated list becomes fixed. The contents of this final list is
-- only partially specified: it will contain at least all the items of
-- the stream that were evaluated prior to the handle becoming closed.
--
-- Any I\/O errors encountered while a handle is semi-closed are simply
-- discarded.
--
-- This operation may fail with:
--
-- * 'isEOFError' if the end of file has been reached.
hGetContents :: Handle -> IO String
hGetContents handle =
withHandle "hGetContents" handle $ \handle_ ->
case haType handle_ of
ClosedHandle -> ioe_closedHandle
SemiClosedHandle -> ioe_closedHandle
AppendHandle -> ioe_notReadable
WriteHandle -> ioe_notReadable
_ -> do xs <- lazyRead handle
return (handle_{ haType=SemiClosedHandle}, xs )
-- Note that someone may close the semi-closed handle (or change its
-- buffering), so each time these lazy read functions are pulled on,
-- they have to check whether the handle has indeed been closed.
lazyRead :: Handle -> IO String
lazyRead handle =
unsafeInterleaveIO $
withHandle "lazyRead" handle $ \ handle_ -> do
case haType handle_ of
ClosedHandle -> return (handle_, "")
SemiClosedHandle -> lazyRead' handle handle_
_ -> ioException
(IOError (Just handle) IllegalOperation "lazyRead"
"illegal handle type" Nothing)
lazyRead' h handle_ = do
let ref = haBuffer handle_
fd = haFD handle_
-- even a NoBuffering handle can have a char in the buffer...
-- (see hLookAhead)
buf <- readIORef ref
if not (bufferEmpty buf)
then lazyReadHaveBuffer h handle_ fd ref buf
else do
case haBufferMode handle_ of
NoBuffering -> do
-- make use of the minimal buffer we already have
let raw = bufBuf buf
r <- readRawBuffer "lazyRead" (fromIntegral fd) (haIsStream handle_) raw 0 1
if r == 0
then do handle_ <- hClose_help handle_
return (handle_, "")
else do (c,_) <- readCharFromBuffer raw 0
rest <- lazyRead h
return (handle_, c : rest)
LineBuffering -> lazyReadBuffered h handle_ fd ref buf
BlockBuffering _ -> lazyReadBuffered h handle_ fd ref buf
-- we never want to block during the read, so we call fillReadBuffer with
-- is_line==True, which tells it to "just read what there is".
lazyReadBuffered h handle_ fd ref buf = do
catch
(do buf <- fillReadBuffer fd True{-is_line-} (haIsStream handle_) buf
lazyReadHaveBuffer h handle_ fd ref buf
)
-- all I/O errors are discarded. Additionally, we close the handle.
(\e -> do handle_ <- hClose_help handle_
return (handle_, "")
)
lazyReadHaveBuffer h handle_ fd ref buf = do
more <- lazyRead h
writeIORef ref buf{ bufRPtr=0, bufWPtr=0 }
s <- unpackAcc (bufBuf buf) (bufRPtr buf) (bufWPtr buf) more
return (handle_, s)
unpackAcc :: RawBuffer -> Int -> Int -> [Char] -> IO [Char]
unpackAcc buf r 0 acc = return acc
unpackAcc buf (I# r) (I# len) acc = IO $ \s -> unpack acc (len -# 1#) s
where
unpack acc i s
| i <# r = (# s, acc #)
| otherwise =
case readCharArray# buf i s of
(# s, ch #) -> unpack (C# ch : acc) (i -# 1#) s
-- ---------------------------------------------------------------------------
-- hPutChar
-- | Computation 'hPutChar' @hdl ch@ writes the character @ch@ to the
-- file or channel managed by @hdl@. Characters may be buffered if
-- buffering is enabled for @hdl@.
--
-- This operation may fail with:
--
-- * 'isFullError' if the device is full; or
--
-- * 'isPermissionError' if another system resource limit would be exceeded.
hPutChar :: Handle -> Char -> IO ()
hPutChar handle c = do
c `seq` return ()
wantWritableHandle "hPutChar" handle $ \ handle_ -> do
let fd = haFD handle_
case haBufferMode handle_ of
LineBuffering -> hPutcBuffered handle_ True c
BlockBuffering _ -> hPutcBuffered handle_ False c
NoBuffering ->
with (castCharToCChar c) $ \buf -> do
writeRawBufferPtr "hPutChar" (fromIntegral fd) (haIsStream handle_) buf 0 1
return ()
hPutcBuffered handle_ is_line c = do
let ref = haBuffer handle_
buf <- readIORef ref
let w = bufWPtr buf
w' <- writeCharIntoBuffer (bufBuf buf) w c
let new_buf = buf{ bufWPtr = w' }
if bufferFull new_buf || is_line && c == '\n'
then do
flushed_buf <- flushWriteBuffer (haFD handle_) (haIsStream handle_) new_buf
writeIORef ref flushed_buf
else do
writeIORef ref new_buf
hPutChars :: Handle -> [Char] -> IO ()
hPutChars handle [] = return ()
hPutChars handle (c:cs) = hPutChar handle c >> hPutChars handle cs
-- ---------------------------------------------------------------------------
-- hPutStr
-- We go to some trouble to avoid keeping the handle locked while we're
-- evaluating the string argument to hPutStr, in case doing so triggers another
-- I/O operation on the same handle which would lead to deadlock. The classic
-- case is
--
-- putStr (trace "hello" "world")
--
-- so the basic scheme is this:
--
-- * copy the string into a fresh buffer,
-- * "commit" the buffer to the handle.
--
-- Committing may involve simply copying the contents of the new
-- buffer into the handle's buffer, flushing one or both buffers, or
-- maybe just swapping the buffers over (if the handle's buffer was
-- empty). See commitBuffer below.
-- | Computation 'hPutStr' @hdl s@ writes the string
-- @s@ to the file or channel managed by @hdl@.
--
-- This operation may fail with:
--
-- * 'isFullError' if the device is full; or
--
-- * 'isPermissionError' if another system resource limit would be exceeded.
hPutStr :: Handle -> String -> IO ()
hPutStr handle str = do
buffer_mode <- wantWritableHandle "hPutStr" handle
(\ handle_ -> do getSpareBuffer handle_)
case buffer_mode of
(NoBuffering, _) -> do
hPutChars handle str -- v. slow, but we don't care
(LineBuffering, buf) -> do
writeLines handle buf str
(BlockBuffering _, buf) -> do
writeBlocks handle buf str
getSpareBuffer :: Handle__ -> IO (BufferMode, Buffer)
getSpareBuffer Handle__{haBuffer=ref,
haBuffers=spare_ref,
haBufferMode=mode}
= do
case mode of
NoBuffering -> return (mode, error "no buffer!")
_ -> do
bufs <- readIORef spare_ref
buf <- readIORef ref
case bufs of
BufferListCons b rest -> do
writeIORef spare_ref rest
return ( mode, newEmptyBuffer b WriteBuffer (bufSize buf))
BufferListNil -> do
new_buf <- allocateBuffer (bufSize buf) WriteBuffer
return (mode, new_buf)
writeLines :: Handle -> Buffer -> String -> IO ()
writeLines hdl Buffer{ bufBuf=raw, bufSize=len } s =
let
shoveString :: Int -> [Char] -> IO ()
-- check n == len first, to ensure that shoveString is strict in n.
shoveString n cs | n == len = do
new_buf <- commitBuffer hdl raw len n True{-needs flush-} False
writeLines hdl new_buf cs
shoveString n [] = do
commitBuffer hdl raw len n False{-no flush-} True{-release-}
return ()
shoveString n (c:cs) = do
n' <- writeCharIntoBuffer raw n c
if (c == '\n')
then do
new_buf <- commitBuffer hdl raw len n' True{-needs flush-} False
writeLines hdl new_buf cs
else
shoveString n' cs
in
shoveString 0 s
writeBlocks :: Handle -> Buffer -> String -> IO ()
writeBlocks hdl Buffer{ bufBuf=raw, bufSize=len } s =
let
shoveString :: Int -> [Char] -> IO ()
-- check n == len first, to ensure that shoveString is strict in n.
shoveString n cs | n == len = do
new_buf <- commitBuffer hdl raw len n True{-needs flush-} False
writeBlocks hdl new_buf cs
shoveString n [] = do
commitBuffer hdl raw len n False{-no flush-} True{-release-}
return ()
shoveString n (c:cs) = do
n' <- writeCharIntoBuffer raw n c
shoveString n' cs
in
shoveString 0 s
-- -----------------------------------------------------------------------------
-- commitBuffer handle buf sz count flush release
--
-- Write the contents of the buffer 'buf' ('sz' bytes long, containing
-- 'count' bytes of data) to handle (handle must be block or line buffered).
--
-- Implementation:
--
-- for block/line buffering,
-- 1. If there isn't room in the handle buffer, flush the handle
-- buffer.
--
-- 2. If the handle buffer is empty,
-- if flush,
-- then write buf directly to the device.
-- else swap the handle buffer with buf.
--
-- 3. If the handle buffer is non-empty, copy buf into the
-- handle buffer. Then, if flush != 0, flush
-- the buffer.
commitBuffer
:: Handle -- handle to commit to
-> RawBuffer -> Int -- address and size (in bytes) of buffer
-> Int -- number of bytes of data in buffer
-> Bool -- True <=> flush the handle afterward
-> Bool -- release the buffer?
-> IO Buffer
commitBuffer hdl raw sz@(I# _) count@(I# _) flush release = do
wantWritableHandle "commitAndReleaseBuffer" hdl $
commitBuffer' raw sz count flush release
-- Explicitly lambda-lift this function to subvert GHC's full laziness
-- optimisations, which otherwise tends to float out subexpressions
-- past the \handle, which is really a pessimisation in this case because
-- that lambda is a one-shot lambda.
--
-- Don't forget to export the function, to stop it being inlined too
-- (this appears to be better than NOINLINE, because the strictness
-- analyser still gets to worker-wrapper it).
--
-- This hack is a fairly big win for hPutStr performance. --SDM 18/9/2001
--
commitBuffer' raw sz@(I# _) count@(I# _) flush release
handle_@Handle__{ haFD=fd, haBuffer=ref, haBuffers=spare_buf_ref } = do
#ifdef DEBUG_DUMP
puts ("commitBuffer: sz=" ++ show sz ++ ", count=" ++ show count
++ ", flush=" ++ show flush ++ ", release=" ++ show release ++"\n")
#endif
old_buf@Buffer{ bufBuf=old_raw, bufRPtr=r, bufWPtr=w, bufSize=size }
<- readIORef ref
buf_ret <-
-- enough room in handle buffer?
if (not flush && (size - w > count))
-- The > is to be sure that we never exactly fill
-- up the buffer, which would require a flush. So
-- if copying the new data into the buffer would
-- make the buffer full, we just flush the existing
-- buffer and the new data immediately, rather than
-- copying before flushing.
-- not flushing, and there's enough room in the buffer:
-- just copy the data in and update bufWPtr.
then do memcpy_baoff_ba old_raw w raw (fromIntegral count)
writeIORef ref old_buf{ bufWPtr = w + count }
return (newEmptyBuffer raw WriteBuffer sz)
-- else, we have to flush
else do flushed_buf <- flushWriteBuffer fd (haIsStream handle_) old_buf
let this_buf =
Buffer{ bufBuf=raw, bufState=WriteBuffer,
bufRPtr=0, bufWPtr=count, bufSize=sz }
-- if: (a) we don't have to flush, and
-- (b) size(new buffer) == size(old buffer), and
-- (c) new buffer is not full,
-- we can just just swap them over...
if (not flush && sz == size && count /= sz)
then do
writeIORef ref this_buf
return flushed_buf
-- otherwise, we have to flush the new data too,
-- and start with a fresh buffer
else do
flushWriteBuffer fd (haIsStream handle_) this_buf
writeIORef ref flushed_buf
-- if the sizes were different, then allocate
-- a new buffer of the correct size.
if sz == size
then return (newEmptyBuffer raw WriteBuffer sz)
else allocateBuffer size WriteBuffer
-- release the buffer if necessary
case buf_ret of
Buffer{ bufSize=buf_ret_sz, bufBuf=buf_ret_raw } -> do
if release && buf_ret_sz == size
then do
spare_bufs <- readIORef spare_buf_ref
writeIORef spare_buf_ref
(BufferListCons buf_ret_raw spare_bufs)
return buf_ret
else
return buf_ret
-- ---------------------------------------------------------------------------
-- Reading/writing sequences of bytes.
-- ---------------------------------------------------------------------------
-- hPutBuf
-- | 'hPutBuf' @hdl buf count@ writes @count@ 8-bit bytes from the
-- buffer @buf@ to the handle @hdl@. It returns ().
--
-- This operation may fail with:
--
-- * 'ResourceVanished' if the handle is a pipe or socket, and the
-- reading end is closed. (If this is a POSIX system, and the program
-- has not asked to ignore SIGPIPE, then a SIGPIPE may be delivered
-- instead, whose default action is to terminate the program).
hPutBuf :: Handle -- handle to write to
-> Ptr a -- address of buffer
-> Int -- number of bytes of data in buffer
-> IO ()
hPutBuf h ptr count = do hPutBuf' h ptr count True; return ()
hPutBufNonBlocking
:: Handle -- handle to write to
-> Ptr a -- address of buffer
-> Int -- number of bytes of data in buffer
-> IO Int -- returns: number of bytes written
hPutBufNonBlocking h ptr count = hPutBuf' h ptr count False
hPutBuf':: Handle -- handle to write to
-> Ptr a -- address of buffer
-> Int -- number of bytes of data in buffer
-> Bool -- allow blocking?
-> IO Int
hPutBuf' handle ptr count can_block
| count == 0 = return 0
| count < 0 = illegalBufferSize handle "hPutBuf" count
| otherwise =
wantWritableHandle "hPutBuf" handle $
\ handle_@Handle__{ haFD=fd, haBuffer=ref, haIsStream=is_stream } ->
bufWrite fd ref is_stream ptr count can_block
bufWrite fd ref is_stream ptr count can_block =
seq count $ seq fd $ do -- strictness hack
old_buf@Buffer{ bufBuf=old_raw, bufRPtr=r, bufWPtr=w, bufSize=size }
<- readIORef ref
-- enough room in handle buffer?
if (size - w > count)
-- There's enough room in the buffer:
-- just copy the data in and update bufWPtr.
then do memcpy_baoff_ptr old_raw w ptr (fromIntegral count)
writeIORef ref old_buf{ bufWPtr = w + count }
return count
-- else, we have to flush
else do flushed_buf <- flushWriteBuffer fd is_stream old_buf
-- TODO: we should do a non-blocking flush here
writeIORef ref flushed_buf
-- if we can fit in the buffer, then just loop
if count < size
then bufWrite fd ref is_stream ptr count can_block
else if can_block
then do writeChunk fd is_stream (castPtr ptr) count
return count
else writeChunkNonBlocking fd is_stream ptr count
writeChunk :: FD -> Bool -> Ptr CChar -> Int -> IO ()
writeChunk fd is_stream ptr bytes = loop 0 bytes
where
loop :: Int -> Int -> IO ()
loop _ bytes | bytes <= 0 = return ()
loop off bytes = do
r <- fromIntegral `liftM`
writeRawBufferPtr "writeChunk" (fromIntegral fd) is_stream ptr
off (fromIntegral bytes)
-- write can't return 0
loop (off + r) (bytes - r)
writeChunkNonBlocking :: FD -> Bool -> Ptr a -> Int -> IO Int
writeChunkNonBlocking fd is_stream ptr bytes = loop 0 bytes
where
loop :: Int -> Int -> IO Int
loop off bytes | bytes <= 0 = return off
loop off bytes = do
#ifndef mingw32_HOST_OS
ssize <- c_write (fromIntegral fd) (ptr `plusPtr` off) (fromIntegral bytes)
let r = fromIntegral ssize :: Int
if (r == -1)
then do errno <- getErrno
if (errno == eAGAIN || errno == eWOULDBLOCK)
then return off
else throwErrno "writeChunk"
else loop (off + r) (bytes - r)
#else
(ssize, rc) <- asyncWrite fd (fromIntegral $ fromEnum is_stream)
(fromIntegral bytes)
(ptr `plusPtr` off)
let r = fromIntegral ssize :: Int
if r == (-1)
then ioError (errnoToIOError "hPutBufNonBlocking" (Errno (fromIntegral rc)) Nothing Nothing)
else loop (off + r) (bytes - r)
#endif
-- ---------------------------------------------------------------------------
-- hGetBuf
-- | 'hGetBuf' @hdl buf count@ reads data from the handle @hdl@
-- into the buffer @buf@ until either EOF is reached or
-- @count@ 8-bit bytes have been read.
-- It returns the number of bytes actually read. This may be zero if
-- EOF was reached before any data was read (or if @count@ is zero).
--
-- 'hGetBuf' never raises an EOF exception, instead it returns a value
-- smaller than @count@.
--
-- If the handle is a pipe or socket, and the writing end
-- is closed, 'hGetBuf' will behave as if EOF was reached.
hGetBuf :: Handle -> Ptr a -> Int -> IO Int
hGetBuf h ptr count
| count == 0 = return 0
| count < 0 = illegalBufferSize h "hGetBuf" count
| otherwise =
wantReadableHandle "hGetBuf" h $
\ handle_@Handle__{ haFD=fd, haBuffer=ref, haIsStream=is_stream } -> do
bufRead fd ref is_stream ptr 0 count
-- small reads go through the buffer, large reads are satisfied by
-- taking data first from the buffer and then direct from the file
-- descriptor.
bufRead fd ref is_stream ptr so_far count =
seq fd $ seq so_far $ seq count $ do -- strictness hack
buf@Buffer{ bufBuf=raw, bufWPtr=w, bufRPtr=r, bufSize=sz } <- readIORef ref
if bufferEmpty buf
then if count > sz -- small read?
then do rest <- readChunk fd is_stream ptr count
return (so_far + rest)
else do mb_buf <- maybeFillReadBuffer fd True is_stream buf
case mb_buf of
Nothing -> return so_far -- got nothing, we're done
Just buf' -> do
writeIORef ref buf'
bufRead fd ref is_stream ptr so_far count
else do
let avail = w - r
if (count == avail)
then do
memcpy_ptr_baoff ptr raw r (fromIntegral count)
writeIORef ref buf{ bufWPtr=0, bufRPtr=0 }
return (so_far + count)
else do
if (count < avail)
then do
memcpy_ptr_baoff ptr raw r (fromIntegral count)
writeIORef ref buf{ bufRPtr = r + count }
return (so_far + count)
else do
memcpy_ptr_baoff ptr raw r (fromIntegral avail)
writeIORef ref buf{ bufWPtr=0, bufRPtr=0 }
let remaining = count - avail
so_far' = so_far + avail
ptr' = ptr `plusPtr` avail
if remaining < sz
then bufRead fd ref is_stream ptr' so_far' remaining
else do
rest <- readChunk fd is_stream ptr' remaining
return (so_far' + rest)
readChunk :: FD -> Bool -> Ptr a -> Int -> IO Int
readChunk fd is_stream ptr bytes = loop 0 bytes
where
loop :: Int -> Int -> IO Int
loop off bytes | bytes <= 0 = return off
loop off bytes = do
r <- fromIntegral `liftM`
readRawBufferPtr "readChunk" (fromIntegral fd) is_stream
(castPtr ptr) off (fromIntegral bytes)
if r == 0
then return off
else loop (off + r) (bytes - r)
-- | 'hGetBufNonBlocking' @hdl buf count@ reads data from the handle @hdl@
-- into the buffer @buf@ until either EOF is reached, or
-- @count@ 8-bit bytes have been read, or there is no more data available
-- to read immediately.
--
-- 'hGetBufNonBlocking' is identical to 'hGetBuf', except that it will
-- never block waiting for data to become available, instead it returns
-- only whatever data is available. To wait for data to arrive before
-- calling 'hGetBufNonBlocking', use 'hWaitForInput'.
--
-- If the handle is a pipe or socket, and the writing end
-- is closed, 'hGetBufNonBlocking' will behave as if EOF was reached.
--
hGetBufNonBlocking :: Handle -> Ptr a -> Int -> IO Int
hGetBufNonBlocking h ptr count
| count == 0 = return 0
| count < 0 = illegalBufferSize h "hGetBufNonBlocking" count
| otherwise =
wantReadableHandle "hGetBufNonBlocking" h $
\ handle_@Handle__{ haFD=fd, haBuffer=ref, haIsStream=is_stream } -> do
bufReadNonBlocking fd ref is_stream ptr 0 count
bufReadNonBlocking fd ref is_stream ptr so_far count =
seq fd $ seq so_far $ seq count $ do -- strictness hack
buf@Buffer{ bufBuf=raw, bufWPtr=w, bufRPtr=r, bufSize=sz } <- readIORef ref
if bufferEmpty buf
then if count > sz -- large read?
then do rest <- readChunkNonBlocking fd is_stream ptr count
return (so_far + rest)
else do buf' <- fillReadBufferWithoutBlocking fd is_stream buf
case buf' of { Buffer{ bufWPtr=w } ->
if (w == 0)
then return so_far
else do writeIORef ref buf'
bufReadNonBlocking fd ref is_stream ptr
so_far (min count w)
-- NOTE: new count is 'min count w'
-- so we will just copy the contents of the
-- buffer in the recursive call, and not
-- loop again.
}
else do
let avail = w - r
if (count == avail)
then do
memcpy_ptr_baoff ptr raw r (fromIntegral count)
writeIORef ref buf{ bufWPtr=0, bufRPtr=0 }
return (so_far + count)
else do
if (count < avail)
then do
memcpy_ptr_baoff ptr raw r (fromIntegral count)
writeIORef ref buf{ bufRPtr = r + count }
return (so_far + count)
else do
memcpy_ptr_baoff ptr raw r (fromIntegral avail)
writeIORef ref buf{ bufWPtr=0, bufRPtr=0 }
let remaining = count - avail
so_far' = so_far + avail
ptr' = ptr `plusPtr` avail
-- we haven't attempted to read anything yet if we get to here.
if remaining < sz
then bufReadNonBlocking fd ref is_stream ptr' so_far' remaining
else do
rest <- readChunkNonBlocking fd is_stream ptr' remaining
return (so_far' + rest)
readChunkNonBlocking :: FD -> Bool -> Ptr a -> Int -> IO Int
readChunkNonBlocking fd is_stream ptr bytes = do
#ifndef mingw32_HOST_OS
ssize <- c_read (fromIntegral fd) (castPtr ptr) (fromIntegral bytes)
let r = fromIntegral ssize :: Int
if (r == -1)
then do errno <- getErrno
if (errno == eAGAIN || errno == eWOULDBLOCK)
then return 0
else throwErrno "readChunk"
else return r
#else
(ssize, rc) <- asyncRead fd (fromIntegral $ fromEnum is_stream)
(fromIntegral bytes) ptr
let r = fromIntegral ssize :: Int
if r == (-1)
then ioError (errnoToIOError "hGetBufNonBlocking" (Errno (fromIntegral rc)) Nothing Nothing)
else return r
#endif
slurpFile :: FilePath -> IO (Ptr (), Int)
slurpFile fname = do
handle <- openFile fname ReadMode
sz <- hFileSize handle
if sz > fromIntegral (maxBound::Int) then
ioError (userError "slurpFile: file too big")
else do
let sz_i = fromIntegral sz
if sz_i == 0 then return (nullPtr, 0) else do
chunk <- mallocBytes sz_i
r <- hGetBuf handle chunk sz_i
hClose handle
return (chunk, r)
-- ---------------------------------------------------------------------------
-- memcpy wrappers
foreign import ccall unsafe "__hscore_memcpy_src_off"
memcpy_ba_baoff :: RawBuffer -> RawBuffer -> Int -> CSize -> IO (Ptr ())
foreign import ccall unsafe "__hscore_memcpy_src_off"
memcpy_ptr_baoff :: Ptr a -> RawBuffer -> Int -> CSize -> IO (Ptr ())
foreign import ccall unsafe "__hscore_memcpy_dst_off"
memcpy_baoff_ba :: RawBuffer -> Int -> RawBuffer -> CSize -> IO (Ptr ())
foreign import ccall unsafe "__hscore_memcpy_dst_off"
memcpy_baoff_ptr :: RawBuffer -> Int -> Ptr a -> CSize -> IO (Ptr ())
-----------------------------------------------------------------------------
-- Internal Utils
illegalBufferSize :: Handle -> String -> Int -> IO a
illegalBufferSize handle fn (sz :: Int) =
ioException (IOError (Just handle)
InvalidArgument fn
("illegal buffer size " ++ showsPrec 9 sz [])
Nothing)
|
FranklinChen/hugs98-plus-Sep2006
|
packages/base/GHC/IO.hs
|
Haskell
|
bsd-3-clause
| 31,744
|
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE RankNTypes #-}
module Diagrams.Backend.OpenGL.TwoD.Attributes
( GLRenderM, GLRenderState (..), withStyleState , initialGLRenderState)
where
-- General Haskell
import Control.Monad.State
import Control.Lens (op, Lens', (.~))
import Control.Lens.TH
-- From Diagrams
import Diagrams.Prelude as D hiding (Attribute)
import Diagrams.TwoD.Path
import Diagrams.Backend.OpenGL.TwoD.Outlines (trlVertices, Convex(..))
import Diagrams.Backend.OpenGL.TwoD.Tesselate
type GLRenderM a = State GLRenderState a
data GLRenderState =
GLRenderState{ _currentLineColor :: AlphaColour Double
, _currentFillColor :: AlphaColour Double
, _currentOpacity :: Double
, _currentLineWidth :: Double
, _currentLineCap :: LineCap
, _currentLineJoin :: LineJoin
, _currentFillRule :: TessWinding
, _currentDashing :: Dashing
, _currentClip :: [Convex]
}
makeLenses ''GLRenderState
initialGLRenderState :: GLRenderState
initialGLRenderState = GLRenderState
{ _currentLineColor = (opaque black)
, _currentFillColor = transparent
, _currentOpacity = 1
, _currentLineWidth = 0.01
, _currentLineCap = LineCapButt
, _currentLineJoin = LineJoinMiter
, _currentFillRule = TessWindingNonzero
, _currentDashing = Dashing [] 0
, _currentClip = []
}
{- Style changes -}
withStyleState :: Style R2 -> GLRenderM a -> GLRenderM a
withStyleState s act = do
prev <- get
modify . foldr1 (.) . map ($ s) $
[ changeWith (toAlphaColour . getLineColor) currentLineColor
, changeWith (toAlphaColour . getFillColor) currentFillColor
, changeWith getOpacity currentOpacity
, changeWith getLineWidth currentLineWidth
, changeWith getLineCap currentLineCap
, changeWith getLineJoin currentLineJoin
, changeWith (fr . getFillRule) currentFillRule
, changeWith getDashing currentDashing
, changeClip
]
r <- act
put prev -- TODO restore only changed values?
return r
-- | @changeWith get set sty@ is @id@ if @sty@ does not have the
-- 'Attribute' specified by @get@. If the @Attribute@ is available,
-- @changeWith@ returns a function which sets it.
changeWith :: AttributeClass a =>
(a -> b) -> (Lens' GLRenderState b) -> Style R2 -> GLRenderState -> GLRenderState
changeWith g s sty = case g <$> getAttr sty of
Just v -> s .~ v
Nothing -> id
changeClip :: Style R2 -> GLRenderState -> GLRenderState
changeClip s = case op Clip <$> getAttr s of
Just (Path trs:_) ->
currentClip .~ (tessRegion TessWindingNonzero $ map trlVertices trs)
_ -> id
fr :: FillRule -> TessWinding
fr Winding = TessWindingNonzero
fr EvenOdd = TessWindingOdd
|
bergey/diagrams-opengl
|
src/Diagrams/Backend/OpenGL/TwoD/Attributes.hs
|
Haskell
|
bsd-3-clause
| 3,142
|
{-# Language RankNTypes, ViewPatterns, PatternSynonyms, TypeOperators, ScopedTypeVariables,
KindSignatures, PolyKinds, DataKinds, TypeFamilies, TypeInType, GADTs #-}
module T14552 where
import Data.Kind
import Data.Proxy
data family Sing a
type a --> b = (a, b) -> Type
type family F (f::a --> b) (x::a) :: b
newtype Limit :: (k --> Type) -> Type where
Limit :: (forall xx. Proxy xx -> F f xx) -> Limit f
data Exp :: [Type] -> Type -> Type where
TLam :: (forall aa. Proxy aa -> Exp xs (F w aa))
-> Exp xs (Limit w)
pattern FOO f <- TLam (($ Proxy) -> f)
{-
TLam :: forall (xs::[Type]) (b::Type). -- Universal
forall k (w :: k --> Type). -- Existential
(b ~ Limit w) =>
=> (forall (aa :: k). Proxy aa -> Exp xs (F w aa))
-> Exp xs b
-}
{-
mfoo :: Exp xs b
-> (forall k (w :: k --> Type).
(b ~ Limit w)
=> Exp xs (F w aa)
-> r)
-> r
mfoo scrut k = case srcut of
TLam g -> k (g Proxy)
-}
|
shlevy/ghc
|
testsuite/tests/patsyn/should_fail/T14552.hs
|
Haskell
|
bsd-3-clause
| 1,019
|
module FFI
(module Fay.FFI)
where
import Fay.FFI
|
fpco/fay-base
|
src/FFI.hs
|
Haskell
|
bsd-3-clause
| 54
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TemplateHaskell #-}
-- | Test suite for GHCi like applications including both GHCi and Intero.
module Stack.GhciSpec where
import qualified Data.ByteString.Lazy as LBS
import qualified Data.Map as M
import qualified Data.Set as S
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import Distribution.License (License (BSD3))
import qualified Distribution.ModuleName as ModuleName
import Stack.Types.Package
import Stack.Types.PackageName
import Stack.Types.Version
import Test.Hspec
import NeatInterpolation
import Path
import Path.Extra (pathToText)
import qualified System.FilePath as FP
import Stack.Ghci
import Stack.Ghci.Script (scriptToLazyByteString)
import Stack.Ghci.PortableFakePaths
textToLazy :: Text -> LBS.ByteString
textToLazy = LBS.fromStrict . T.encodeUtf8
-- | Matches two strings, after converting line-ends in the second to Unix ones
-- (in a hacky way) and converting both to the same type. Workaround for
-- https://github.com/nikita-volkov/neat-interpolation/issues/14.
shouldBeLE :: LBS.ByteString -> Text -> Expectation
shouldBeLE actual expected = shouldBe actual (textToLazy $ T.filter (/= '\r') expected)
baseProjDir, projDirA, projDirB :: Path Abs Dir
baseProjDir = $(mkAbsDir $ defaultDrive FP.</> "Users" FP.</> "someone" FP.</> "src")
projDirA = baseProjDir </> $(mkRelDir "project-a")
projDirB = baseProjDir </> $(mkRelDir "project-b")
relFile :: Path Rel File
relFile = $(mkRelFile $ "exe" FP.</> "Main.hs")
absFile :: Path Abs File
absFile = projDirA </> relFile
projDirAT, projDirBT, relFileT, absFileT :: Text
projDirAT = pathToText projDirA
projDirBT = pathToText projDirB
relFileT = pathToText relFile
absFileT = pathToText absFile
spec :: Spec
spec = do
describe "GHCi" $ do
describe "Script rendering" $ do
describe "should render GHCi scripts" $ do
it "with one library package" $ do
let res = scriptToLazyByteString $ renderScriptGhci packages_singlePackage Nothing
res `shouldBeLE` ghciScript_projectWithLib
it "with one main package" $ do
let res = scriptToLazyByteString $ renderScriptGhci []
(Just absFile)
res `shouldBeLE` ghciScript_projectWithMain
it "with one library and main package" $ do
let res = scriptToLazyByteString $ renderScriptGhci packages_singlePackage
(Just absFile)
res `shouldBeLE` ghciScript_projectWithLibAndMain
it "with multiple library packages" $ do
let res = scriptToLazyByteString $ renderScriptGhci packages_multiplePackages Nothing
res `shouldBeLE` ghciScript_multipleProjectsWithLib
describe "should render intero scripts" $ do
it "with one library package" $ do
let res = scriptToLazyByteString $ renderScriptIntero packages_singlePackage Nothing
res `shouldBeLE` interoScript_projectWithLib
it "with one main package" $ do
let res = scriptToLazyByteString $ renderScriptIntero packages_singlePackage
(Just absFile)
res `shouldBeLE` interoScript_projectWithMain
it "with one library and main package" $ do
let res = scriptToLazyByteString $ renderScriptIntero packages_singlePackage
(Just absFile)
res `shouldBeLE` interoScript_projectWithLibAndMain
it "with multiple library packages" $ do
let res = scriptToLazyByteString $ renderScriptIntero packages_multiplePackages Nothing
res `shouldBeLE` interoScript_multipleProjectsWithLib
-- Exptected Intero scripts
interoScript_projectWithLib :: Text
interoScript_projectWithLib = [text|
:cd-ghc $projDirAT
:add Lib.A
:module + Lib.A
|]
interoScript_projectWithMain :: Text
interoScript_projectWithMain = [text|
:cd-ghc $projDirAT
:add Lib.A
:cd-ghc $projDirAT
:add $absFileT
:module + Lib.A
|]
interoScript_projectWithLibAndMain :: Text
interoScript_projectWithLibAndMain = [text|
:cd-ghc $projDirAT
:add Lib.A
:cd-ghc $projDirAT
:add $absFileT
:module + Lib.A
|]
interoScript_multipleProjectsWithLib :: Text
interoScript_multipleProjectsWithLib = [text|
:cd-ghc $projDirAT
:add Lib.A
:cd-ghc $projDirBT
:add Lib.B
:module + Lib.A Lib.B
|]
-- Expected GHCi Scripts
ghciScript_projectWithLib :: Text
ghciScript_projectWithLib = [text|
:add Lib.A
:module + Lib.A
|]
ghciScript_projectWithMain :: Text
ghciScript_projectWithMain = [text|
:add $absFileT
:module +
|]
ghciScript_projectWithLibAndMain :: Text
ghciScript_projectWithLibAndMain = [text|
:add Lib.A
:add $absFileT
:module + Lib.A
|]
ghciScript_multipleProjectsWithLib :: Text
ghciScript_multipleProjectsWithLib = [text|
:add Lib.A
:add Lib.B
:module + Lib.A Lib.B
|]
-- Expected Legacy GHCi scripts
ghciLegacyScript_projectWithMain :: Text
ghciLegacyScript_projectWithMain = [text|
:add
:add $absFileT
:module +
|]
ghciLegacyScript_projectWithLibAndMain :: Text
ghciLegacyScript_projectWithLibAndMain = [text|
:add Lib.A
:add $absFileT
:module + Lib.A
|]
ghciLegacyScript_multipleProjectsWithLib :: Text
ghciLegacyScript_multipleProjectsWithLib = [text|
:add Lib.A Lib.B
:module + Lib.A Lib.B
|]
-- Sample GHCi load configs
packages_singlePackage :: [GhciPkgInfo]
packages_singlePackage =
[ GhciPkgInfo
{ ghciPkgModules = S.fromList [ModuleName.fromString "Lib.A"]
, ghciPkgDir = projDirA
, ghciPkgName = $(mkPackageName "package-a")
, ghciPkgOpts = []
, ghciPkgModFiles = S.empty
, ghciPkgCFiles = S.empty
, ghciPkgMainIs = M.empty
, ghciPkgTargetFiles = Nothing
, ghciPkgPackage =
Package
{ packageName = $(mkPackageName "package-a")
, packageVersion = $(mkVersion "0.1.0.0")
, packageLicense = BSD3
, packageFiles = GetPackageFiles undefined
, packageDeps = M.empty
, packageTools = []
, packageAllDeps = S.empty
, packageGhcOptions = []
, packageFlags = M.empty
, packageDefaultFlags = M.empty
, packageHasLibrary = True
, packageTests = M.empty
, packageBenchmarks = S.empty
, packageExes = S.empty
, packageOpts = GetPackageOpts undefined
, packageHasExposedModules = True
, packageSimpleType = True
, packageSetupDeps = Nothing
}
}
]
packages_multiplePackages :: [GhciPkgInfo]
packages_multiplePackages =
[ GhciPkgInfo
{ ghciPkgModules = S.fromList [ModuleName.fromString "Lib.A"]
, ghciPkgDir = projDirA
, ghciPkgName = $(mkPackageName "package-a")
, ghciPkgOpts = []
, ghciPkgModFiles = S.empty
, ghciPkgCFiles = S.empty
, ghciPkgMainIs = M.empty
, ghciPkgTargetFiles = Nothing
, ghciPkgPackage =
Package
{ packageName = $(mkPackageName "package-a")
, packageVersion = $(mkVersion "0.1.0.0")
, packageLicense = BSD3
, packageFiles = GetPackageFiles undefined
, packageDeps = M.empty
, packageTools = []
, packageAllDeps = S.empty
, packageGhcOptions = []
, packageFlags = M.empty
, packageDefaultFlags = M.empty
, packageHasLibrary = True
, packageTests = M.empty
, packageBenchmarks = S.empty
, packageExes = S.empty
, packageOpts = GetPackageOpts undefined
, packageHasExposedModules = True
, packageSimpleType = True
, packageSetupDeps = Nothing
}
}
, GhciPkgInfo
{ ghciPkgModules = S.fromList [ModuleName.fromString "Lib.B"]
, ghciPkgDir = projDirB
, ghciPkgName = $(mkPackageName "package-b")
, ghciPkgOpts = []
, ghciPkgModFiles = S.empty
, ghciPkgCFiles = S.empty
, ghciPkgMainIs = M.empty
, ghciPkgTargetFiles = Nothing
, ghciPkgPackage =
Package
{ packageName = $(mkPackageName "package-b")
, packageVersion = $(mkVersion "0.1.0.0")
, packageLicense = BSD3
, packageFiles = GetPackageFiles undefined
, packageDeps = M.empty
, packageTools = []
, packageAllDeps = S.empty
, packageGhcOptions = []
, packageFlags = M.empty
, packageDefaultFlags = M.empty
, packageHasLibrary = True
, packageTests = M.empty
, packageBenchmarks = S.empty
, packageExes = S.empty
, packageOpts = GetPackageOpts undefined
, packageHasExposedModules = True
, packageSimpleType = True
, packageSetupDeps = Nothing
}
}
]
|
AndreasPK/stack
|
src/test/Stack/GhciSpec.hs
|
Haskell
|
bsd-3-clause
| 8,765
|
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-
Created : 2015 Aug 26 (Wed) 11:56:37 by Harold Carr.
Last Modified : 2015 Sep 12 (Sat) 11:39:39 by Harold Carr.
-}
module Msg where
import Data.Aeson (FromJSON, ToJSON)
import GHC.Generics
type Name = String
type MsgId = Int
data Msg = Msg { name :: Name, msgId :: MsgId, txt :: String } deriving (Generic, Show)
instance ToJSON Msg
instance FromJSON Msg
|
splodingsocks/utah-haskell
|
infrastructure/src/Msg.hs
|
Haskell
|
apache-2.0
| 464
|
{-# LANGUAGE StandaloneKindSignatures #-}
{-# LANGUAGE PolyKinds, ExplicitForAll #-}
module SAKS_015 where
import Data.Kind (Type)
type T :: forall k -> k -> Type
data T (k :: Type) (a :: k)
|
sdiehl/ghc
|
testsuite/tests/saks/should_compile/saks015.hs
|
Haskell
|
bsd-3-clause
| 194
|
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="pt-BR">
<title>All In One Notes Add-On</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
thc202/zap-extensions
|
addOns/allinonenotes/src/main/javahelp/org/zaproxy/zap/extension/allinonenotes/resources/help_pt_BR/helpset_pt_BR.hs
|
Haskell
|
apache-2.0
| 968
|
{-# OPTIONS -fno-warn-redundant-constraints #-}
{-# LANGUAGE TypeFamilies, GeneralizedNewtypeDeriving, StandaloneDeriving, FlexibleInstances #-}
-- Test #2856
module T2856 where
import Data.Ratio
----------------------
class C a where
data D a
instance C Bool where
newtype D Bool = DInt Int deriving (Eq, Show, Num)
instance C a => C [a] where
newtype D [a] = DList (Ratio a) deriving (Eq, Show, Num)
----------------------
data family W a
newtype instance W Bool = WInt Int deriving( Eq, Show )
newtype instance W [a] = WList (Ratio a) deriving( Eq, Show )
deriving instance Num (W Bool)
deriving instance (Integral a, Num a) => Num (W [a])
-- Integral needed because superclass Eq needs it,
-- because of the stupid context on Ratio
|
sdiehl/ghc
|
testsuite/tests/deriving/should_compile/T2856.hs
|
Haskell
|
bsd-3-clause
| 761
|
module WhereIn7 where
--A definition can be demoted to the local 'where' binding of a friend declaration,
--if it is only used by this friend declaration.
--Demoting a definition narrows down the scope of the definition.
--In this example, demote the top level 'sq' to 'sumSquares'
--This example also aims to test the split of type signature.
sumSquares x y = sq x + sq y
sq,anotherFun :: Int -> Int
sq 0 = 0
sq z = z^pow
where pow=2
anotherFun x = x^2
|
kmate/HaRe
|
test/testdata/Demote/WhereIn7.hs
|
Haskell
|
bsd-3-clause
| 472
|
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE TypeFamilies #-}
module Distribution.Types.ComponentLocalBuildInfo (
ComponentLocalBuildInfo(..),
componentIsIndefinite,
maybeComponentInstantiatedWith,
) where
import Prelude ()
import Distribution.Compat.Prelude
import Distribution.ModuleName
import Distribution.Backpack
import Distribution.Compat.Graph
import Distribution.Types.ComponentId
import Distribution.Types.MungedPackageId
import Distribution.Types.UnitId
import Distribution.Types.ComponentName
import Distribution.Types.MungedPackageName
import Distribution.PackageDescription
import qualified Distribution.InstalledPackageInfo as Installed
-- | The first five fields are common across all algebraic variants.
data ComponentLocalBuildInfo
= LibComponentLocalBuildInfo {
-- | It would be very convenient to store the literal Library here,
-- but if we do that, it will get serialized (via the Binary)
-- instance twice. So instead we just provide the ComponentName,
-- which can be used to find the Component in the
-- PackageDescription. NB: eventually, this will NOT uniquely
-- identify the ComponentLocalBuildInfo.
componentLocalName :: ComponentName,
-- | The computed 'ComponentId' of this component.
componentComponentId :: ComponentId,
-- | The computed 'UnitId' which uniquely identifies this
-- component. Might be hashed.
componentUnitId :: UnitId,
-- | Is this an indefinite component (i.e. has unfilled holes)?
componentIsIndefinite_ :: Bool,
-- | How the component was instantiated
componentInstantiatedWith :: [(ModuleName, OpenModule)],
-- | Resolved internal and external package dependencies for this component.
-- The 'BuildInfo' specifies a set of build dependencies that must be
-- satisfied in terms of version ranges. This field fixes those dependencies
-- to the specific versions available on this machine for this compiler.
componentPackageDeps :: [(UnitId, MungedPackageId)],
-- | The set of packages that are brought into scope during
-- compilation, including a 'ModuleRenaming' which may used
-- to hide or rename modules. This is what gets translated into
-- @-package-id@ arguments. This is a modernized version of
-- 'componentPackageDeps', which is kept around for BC purposes.
componentIncludes :: [(OpenUnitId, ModuleRenaming)],
componentExeDeps :: [UnitId],
-- | The internal dependencies which induce a graph on the
-- 'ComponentLocalBuildInfo' of this package. This does NOT
-- coincide with 'componentPackageDeps' because it ALSO records
-- 'build-tool' dependencies on executables. Maybe one day
-- @cabal-install@ will also handle these correctly too!
componentInternalDeps :: [UnitId],
-- | Compatibility "package key" that we pass to older versions of GHC.
componentCompatPackageKey :: String,
-- | Compatibility "package name" that we register this component as.
componentCompatPackageName :: MungedPackageName,
-- | A list of exposed modules (either defined in this component,
-- or reexported from another component.)
componentExposedModules :: [Installed.ExposedModule],
-- | Convenience field, specifying whether or not this is the
-- "public library" that has the same name as the package.
componentIsPublic :: Bool
}
-- TODO: refactor all these duplicates
| FLibComponentLocalBuildInfo {
componentLocalName :: ComponentName,
componentComponentId :: ComponentId,
componentUnitId :: UnitId,
componentPackageDeps :: [(UnitId, MungedPackageId)],
componentIncludes :: [(OpenUnitId, ModuleRenaming)],
componentExeDeps :: [UnitId],
componentInternalDeps :: [UnitId]
}
| ExeComponentLocalBuildInfo {
componentLocalName :: ComponentName,
componentComponentId :: ComponentId,
componentUnitId :: UnitId,
componentPackageDeps :: [(UnitId, MungedPackageId)],
componentIncludes :: [(OpenUnitId, ModuleRenaming)],
componentExeDeps :: [UnitId],
componentInternalDeps :: [UnitId]
}
| TestComponentLocalBuildInfo {
componentLocalName :: ComponentName,
componentComponentId :: ComponentId,
componentUnitId :: UnitId,
componentPackageDeps :: [(UnitId, MungedPackageId)],
componentIncludes :: [(OpenUnitId, ModuleRenaming)],
componentExeDeps :: [UnitId],
componentInternalDeps :: [UnitId]
}
| BenchComponentLocalBuildInfo {
componentLocalName :: ComponentName,
componentComponentId :: ComponentId,
componentUnitId :: UnitId,
componentPackageDeps :: [(UnitId, MungedPackageId)],
componentIncludes :: [(OpenUnitId, ModuleRenaming)],
componentExeDeps :: [UnitId],
componentInternalDeps :: [UnitId]
}
deriving (Generic, Read, Show)
instance Binary ComponentLocalBuildInfo
instance IsNode ComponentLocalBuildInfo where
type Key ComponentLocalBuildInfo = UnitId
nodeKey = componentUnitId
nodeNeighbors = componentInternalDeps
componentIsIndefinite :: ComponentLocalBuildInfo -> Bool
componentIsIndefinite LibComponentLocalBuildInfo{ componentIsIndefinite_ = b } = b
componentIsIndefinite _ = False
maybeComponentInstantiatedWith :: ComponentLocalBuildInfo -> Maybe [(ModuleName, OpenModule)]
maybeComponentInstantiatedWith
LibComponentLocalBuildInfo { componentInstantiatedWith = insts } = Just insts
maybeComponentInstantiatedWith _ = Nothing
|
mydaum/cabal
|
Cabal/Distribution/Types/ComponentLocalBuildInfo.hs
|
Haskell
|
bsd-3-clause
| 5,430
|
{-# LANGUAGE CPP #-}
#include "MachDeps.h"
module Main where
import Data.Bits
#if WORD_SIZE_IN_BITS != 64 && WORD_SIZE_IN_BITS != 32
# error unsupported WORD_SIZE_IN_BITS config
#endif
-- a negative integer the size of GMP_LIMB_BITS*2
negativeBigInteger :: Integer
negativeBigInteger = 1 - (1 `shiftL` (64 * 2))
main = do
-- rigt shift by GMP_LIMB_BITS
print $ negativeBigInteger `shiftR` 64
|
ezyang/ghc
|
testsuite/tests/numeric/should_run/T12136.hs
|
Haskell
|
bsd-3-clause
| 406
|
module Geometry.SpatialHash where
import Algebra.Vector as V
import Data.List as List
import Data.List.Extensions as ListExt
import Data.Map as Map
import Data.Ratio as Ratio
import Data.Ratio.Extensions as RatioExt
import Data.Tuple.Extensions as TupleExt
import Geometry.AABB as AABB
type SpatialHash a = (Map Vector a, Vector, Vector)
positionMap = first3
origin = second3
binDimensions = third3
setPositionMap = setFirst3
setOrigin = setSecond3
setBinDimensions = setThird3
pointHash :: Vector -> Vector -> Vector
pointHash = \bin_dimensions point -> let
divided = (ListExt.map2 (/) (V.toList point) (V.toList bin_dimensions))
rounded = (List.map (RatioExt.setPrecision 1) divided)
in (V.fromList rounded)
aabbHash :: Vector -> AABB -> [Vector]
aabbHash = \bin_dimensions aabb -> let
min_hash = (V.toList (pointHash bin_dimensions (minCorner aabb)))
max_hash = (V.toList (pointHash bin_dimensions (maxCorner aabb)))
uniformSequence = \min max -> (ListExt.uniformSequence 1 min ((+) max ((%) 1 2)))
ranges = (ListExt.map2 uniformSequence min_hash max_hash)
hashes = (ListExt.crossProducts ranges)
in (List.map V.fromList hashes)
centeredPointHash = \spatial_hash point -> let
in (pointHash (binDimensions spatial_hash) (V.subtract point (origin spatial_hash)))
centeredAABBHash = \spatial_hash aabb -> let
centered_aabb = (AABB.translate aabb (V.negate (origin spatial_hash)))
in (aabbHash (binDimensions spatial_hash) centered_aabb)
insert = \position bin spatial_hash -> let
hash = (centeredPointHash spatial_hash position)
in (setPositionMap spatial_hash (Map.insert hash bin (positionMap spatial_hash)))
lookupPoint = \position spatial_hash -> let
in ((!) (positionMap spatial_hash) (centeredPointHash spatial_hash position))
lookupAABB = \aabb spatial_hash -> let
hashes = (centeredAABBHash spatial_hash aabb) :: [Vector]
in (List.map ((!) (positionMap spatial_hash)) hashes)
|
stevedonnelly/haskell
|
code/Geometry/SpatialHash.hs
|
Haskell
|
mit
| 1,967
|
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE TemplateHaskell #-}
-- | Warning! This module is considered internal and may have breaking changes
module Routes.TH.Types
( -- * Data types
Resource (..)
, ResourceTree (..)
, Piece (..)
, Dispatch (..)
, CheckOverlap
, FlatResource (..)
-- ** Helper functions
, resourceMulti
, resourceTreePieces
, resourceTreeName
, flatten
) where
import Language.Haskell.TH.Syntax
data ResourceTree typ
= ResourceLeaf (Resource typ)
| ResourceParent String CheckOverlap [Piece typ] [ResourceTree typ]
deriving Functor
resourceTreePieces :: ResourceTree typ -> [Piece typ]
resourceTreePieces (ResourceLeaf r) = resourcePieces r
resourceTreePieces (ResourceParent _ _ x _) = x
resourceTreeName :: ResourceTree typ -> String
resourceTreeName (ResourceLeaf r) = resourceName r
resourceTreeName (ResourceParent x _ _ _) = x
instance Lift t => Lift (ResourceTree t) where
lift (ResourceLeaf r) = [|ResourceLeaf $(lift r)|]
lift (ResourceParent a b c d) = [|ResourceParent $(lift a) $(lift b) $(lift c) $(lift d)|]
data Resource typ = Resource
{ resourceName :: String
, resourcePieces :: [Piece typ]
, resourceDispatch :: Dispatch typ
, resourceAttrs :: [String]
, resourceCheck :: CheckOverlap
}
deriving (Show, Functor)
type CheckOverlap = Bool
instance Lift t => Lift (Resource t) where
lift (Resource a b c d e) = [|Resource a b c d e|]
data Piece typ = Static String | Dynamic typ
deriving Show
instance Functor Piece where
fmap _ (Static s) = Static s
fmap f (Dynamic t) = Dynamic (f t)
instance Lift t => Lift (Piece t) where
lift (Static s) = [|Static $(lift s)|]
lift (Dynamic t) = [|Dynamic $(lift t)|]
data Dispatch typ =
Methods
{ methodsMulti :: Maybe typ -- ^ type of the multi piece at the end
, methodsMethods :: [String] -- ^ supported request methods
}
| Subsite
{ subsiteType :: typ
, subsiteFunc :: String
}
deriving Show
instance Functor Dispatch where
fmap f (Methods a b) = Methods (fmap f a) b
fmap f (Subsite a b) = Subsite (f a) b
instance Lift t => Lift (Dispatch t) where
lift (Methods Nothing b) = [|Methods Nothing $(lift b)|]
lift (Methods (Just t) b) = [|Methods (Just $(lift t)) $(lift b)|]
lift (Subsite t b) = [|Subsite $(lift t) $(lift b)|]
resourceMulti :: Resource typ -> Maybe typ
resourceMulti Resource { resourceDispatch = Methods (Just t) _ } = Just t
resourceMulti _ = Nothing
data FlatResource a = FlatResource
{ frParentPieces :: [(String, [Piece a])]
, frName :: String
, frPieces :: [Piece a]
, frDispatch :: Dispatch a
, frCheck :: Bool
}
flatten :: [ResourceTree a] -> [FlatResource a]
flatten =
concatMap (go id True)
where
go front check' (ResourceLeaf (Resource a b c _ check)) = [FlatResource (front []) a b c (check' && check)]
go front check' (ResourceParent name check pieces children) =
concatMap (go (front . ((name, pieces):)) (check && check')) children
|
ajnsit/snap-routes
|
src/Routes/TH/Types.hs
|
Haskell
|
mit
| 3,114
|
module Lambency.Shader.Var where
--------------------------------------------------------------------------------
import Lambency.Shader.Base
import Linear
--------------------------------------------------------------------------------
matrix2Ty :: ShaderVarTy (M22 Float)
matrix2Ty = ShaderVarTy Matrix2Ty
matrix3Ty :: ShaderVarTy (M33 Float)
matrix3Ty = ShaderVarTy Matrix3Ty
matrix4Ty :: ShaderVarTy (M44 Float)
matrix4Ty = ShaderVarTy Matrix4Ty
vector2fTy :: ShaderVarTy (V2 Float)
vector2fTy = ShaderVarTy Vector2Ty
vector3fTy :: ShaderVarTy (V3 Float)
vector3fTy = ShaderVarTy Vector3Ty
vector4fTy :: ShaderVarTy (V4 Float)
vector4fTy = ShaderVarTy Vector4Ty
vector2iTy :: ShaderVarTy (V2 Int)
vector2iTy = ShaderVarTy Vector2Ty
vector3iTy :: ShaderVarTy (V3 Int)
vector3iTy = ShaderVarTy Vector3Ty
vector4iTy :: ShaderVarTy (V4 Int)
vector4iTy = ShaderVarTy Vector4Ty
intTy :: ShaderVarTy Int
intTy = ShaderVarTy IntTy
floatTy :: ShaderVarTy Float
floatTy = ShaderVarTy FloatTy
sampler1DTy :: ShaderVarTy Sampler1D
sampler1DTy = ShaderVarTy Sampler1DTy
sampler2DTy :: ShaderVarTy Sampler2D
sampler2DTy = ShaderVarTy Sampler2DTy
sampler3DTy :: ShaderVarTy Sampler3D
sampler3DTy = ShaderVarTy Sampler3DTy
shadow2DTy :: ShaderVarTy Shadow2D
shadow2DTy = ShaderVarTy Shadow2DTy
|
Mokosha/Lambency
|
lib/Lambency/Shader/Var.hs
|
Haskell
|
mit
| 1,299
|
module Physie.List(
maximumByNeighbors
, boolToList
) where
import Control.Lens (view, _2)
import Data.Function (on)
import Data.List (maximumBy)
maximumByNeighbors :: Ord a => (a -> a -> Ordering) -> [a] -> (a,a,a)
maximumByNeighbors f ls = let cls = cycle ls
in maximumBy (f `on` view _2) $ zip3 (drop (length ls - 1) cls) ls (drop 1 cls)
boolToList :: Bool -> a -> [a]
boolToList b a = [a | b]
|
pmiddend/physie
|
src/Physie/List.hs
|
Haskell
|
mit
| 469
|
import Utils
nNumbers nDig = 9 * 10^(nDig-1)
lens = map nNumbers [1..]
relativeShifts = zipWith (*) lens [1..]
absoluteShifts = zip [1..] $ scanl (+) 0 relativeShifts
nthDigit n = digit
--nthDigit n = (nDigits, shift, numberShift, nAsDigits, digitShift, digit)
where (nDigits, shift) = last $ takeWhile (\(a,b) -> b<n) absoluteShifts
numberShift = ((n - shift - 1) `div` nDigits) + 1
nAsDigits = numberToDigits $ 10^(nDigits-1) + numberShift - 1
digitShift = (n - shift - 1) `mod` nDigits
digit = nAsDigits !! (fromIntegral digitShift)
answer = product digits
digits = map nthDigit [1, 10, 100, 1000, 10000, 100000, 1000000]
|
arekfu/project_euler
|
p0040/p0040.hs
|
Haskell
|
mit
| 696
|
{-# LANGUAGE RankNTypes #-}
{- |
Module : Orville.PostgreSQL.Connection
Copyright : Flipstone Technology Partners 2016-2021
License : MIT
-}
module Orville.PostgreSQL.Connection
( Connection,
Pool,
ConnectionUsedAfterCloseError,
ConnectionError,
SqlExecutionError (..),
NoticeReporting (EnableNoticeReporting, DisableNoticeReporting),
createConnectionPool,
executeRaw,
executeRawVoid,
escapeStringLiteral,
)
where
import Control.Concurrent (threadWaitRead, threadWaitWrite)
import Control.Concurrent.MVar (MVar, newMVar, tryReadMVar, tryTakeMVar)
import Control.Exception (Exception, mask, throwIO)
import Control.Monad (void)
import qualified Data.ByteString as BS
import qualified Data.ByteString.Char8 as B8
import Data.Maybe (fromMaybe)
import Data.Pool (Pool, createPool)
import qualified Data.Text as T
import qualified Data.Text.Encoding as Enc
import Data.Time (NominalDiffTime)
import qualified Database.PostgreSQL.LibPQ as LibPQ
import Orville.PostgreSQL.Internal.PgTextFormatValue (NULByteFoundError (NULByteFoundError), PgTextFormatValue, toBytesForLibPQ)
{- |
An option for 'createConnectionPool' than indicates whether the LibPQ should
print notice reports for warnings to the console
-}
data NoticeReporting
= EnableNoticeReporting
| DisableNoticeReporting
{- |
'createConnectionPool' allocates a pool of connections to a PosgreSQL server.
-}
createConnectionPool ::
-- | Whether or not notice reporting from LibPQ should be enabled
NoticeReporting ->
-- | Number of stripes in the connection pool
Int ->
-- | Linger time before closing an idle connection
NominalDiffTime ->
-- | Max number of connections to allocate per stripe
Int ->
-- | A PostgreSQL connection string
BS.ByteString ->
IO (Pool Connection)
createConnectionPool noticeReporting stripes linger maxRes connectionString =
createPool (connect noticeReporting connectionString) close stripes linger maxRes
{- |
'executeRaw' runs a given SQL statement returning the raw underlying result.
All handling of stepping through the result set is left to the caller. This
potentially leaves connections open much longer than one would expect if all
of the results are not iterated through immediately *and* the data copied.
Use with caution.
-}
executeRaw ::
Connection ->
BS.ByteString ->
[Maybe PgTextFormatValue] ->
IO LibPQ.Result
executeRaw connection bs params =
case traverse (traverse toBytesForLibPQ) params of
Left NULByteFoundError ->
throwIO NULByteFoundError
Right paramBytes ->
underlyingExecute bs paramBytes connection
{- |
'executeRawVoid' a version of 'executeRaw' that completely ignores the result.
If an error occurs it is raised as an exception.
Use with caution.
-}
executeRawVoid :: Connection -> BS.ByteString -> [Maybe PgTextFormatValue] -> IO ()
executeRawVoid connection bs params =
void $ executeRaw connection bs params
{- |
The basic connection interface.
-}
newtype Connection = Connection (MVar LibPQ.Connection)
{- |
'connect' is the internal, primitive connection function.
This should not be exposed to end users, but instead wrapped in something to create a pool.
Note that handling the libpq connection with the polling is described at
<https://hackage.haskell.org/package/postgresql-libpq-0.9.4.2/docs/Database-PostgreSQL-LibPQ.html>.
-}
connect :: NoticeReporting -> BS.ByteString -> IO Connection
connect noticeReporting connectionString =
let checkSocketAndThreadWait conn threadWaitFn = do
fd <- LibPQ.socket conn
case fd of
Nothing -> do
throwConnectionError "connect: failed to get file descriptor for socket" conn
Just fd' -> do
threadWaitFn fd'
poll conn
poll conn = do
pollStatus <- LibPQ.connectPoll conn
case pollStatus of
LibPQ.PollingFailed -> do
throwConnectionError "connect: polling failed while connecting to database server" conn
LibPQ.PollingReading ->
checkSocketAndThreadWait conn threadWaitRead
LibPQ.PollingWriting ->
checkSocketAndThreadWait conn threadWaitWrite
LibPQ.PollingOk -> do
connectionHandle <- newMVar conn
pure (Connection connectionHandle)
in do
connection <- LibPQ.connectStart connectionString
case noticeReporting of
DisableNoticeReporting -> LibPQ.disableNoticeReporting connection
EnableNoticeReporting -> LibPQ.enableNoticeReporting connection
poll connection
{- |
'close' has many subtleties to it.
First note that async exceptions are masked. 'mask' though, only works for
things that are not interruptible
<https://www.stackage.org/haddock/lts-16.15/base-4.13.0.0/Control-Exception.html#g:13>
From the previous link, 'tryTakeMVar' is not interruptible, where 'takeMVar'
*is*. So by using 'tryTakeMVar' along with 'mask', we should be safe from
async exceptions causing us to not finish an underlying connection. Notice
that the only place the MVar is ever taken is here so 'tryTakeMVar' gives us
both the non-blocking semantics to protect from async exceptions with 'mask'
_and_ should never truly return an empty unless two threads were racing to
close the connection, in which case.. one of them will close the connection.
-}
close :: Connection -> IO ()
close (Connection handle') =
let underlyingFinish :: (forall a. IO a -> IO a) -> IO (Maybe ())
underlyingFinish restore = do
underlyingConnection <- tryTakeMVar handle'
restore (traverse LibPQ.finish underlyingConnection)
in void $ mask underlyingFinish
{- |
'underlyingExecute' is the internal, primitive execute function.
This is not intended to be directly exposed to end users, but instead wrapped
in something using a pool. Note there are potential dragons here in that
this calls `tryReadMvar` and then returns an error if the MVar is not full.
The intent is to never expose the ability to empty the `MVar` outside of this
module, so unless a connection has been closed it *should* never be empty.
And a connection should be closed upon removal from a resource pool (in which
case it can't be used for this function in the first place).
-}
underlyingExecute ::
BS.ByteString ->
[Maybe BS.ByteString] ->
Connection ->
IO LibPQ.Result
underlyingExecute bs params connection = do
libPQConn <- readLibPQConnectionOrFailIfClosed connection
mbResult <-
LibPQ.execParams libPQConn bs (map mkInferredTextParam params) LibPQ.Text
case mbResult of
Nothing -> do
throwConnectionError "No result returned from exec by libpq" libPQConn
Just result -> do
execStatus <- LibPQ.resultStatus result
if isRowReadableStatus execStatus
then pure result
else do
throwLibPQResultError result execStatus bs
{- |
Escapes a string for use as a literal within a SQL command that will be
execute on the given connection. This uses the @PQescapeStringConn@ function
from libpq, which takes the character encoding of the connection into
account. This function only escapes the characters to be used in as a string
literal -- it does not add the surrounding quotes.
-}
escapeStringLiteral :: Connection -> BS.ByteString -> IO BS.ByteString
escapeStringLiteral connection unescapedString = do
libPQConn <- readLibPQConnectionOrFailIfClosed connection
mbEscapedString <- LibPQ.escapeStringConn libPQConn unescapedString
case mbEscapedString of
Nothing ->
throwConnectionError "Error while escaping string literal" libPQConn
Just escapedString ->
pure escapedString
readLibPQConnectionOrFailIfClosed :: Connection -> IO LibPQ.Connection
readLibPQConnectionOrFailIfClosed (Connection handle) = do
mbConn <- tryReadMVar handle
case mbConn of
Nothing ->
throwIO ConnectionUsedAfterCloseError
Just conn ->
pure conn
throwConnectionError :: String -> LibPQ.Connection -> IO a
throwConnectionError message conn = do
mbLibPQError <- LibPQ.errorMessage conn
throwIO $
ConnectionError
{ connectionErrorMessage = message
, connectionErrorLibPQMessage = mbLibPQError
}
throwLibPQResultError ::
LibPQ.Result ->
LibPQ.ExecStatus ->
BS.ByteString ->
IO a
throwLibPQResultError result execStatus queryBS = do
mbLibPQError <- LibPQ.resultErrorMessage result
mbSqlState <- LibPQ.resultErrorField result LibPQ.DiagSqlstate
throwIO $
SqlExecutionError
{ sqlExecutionErrorExecStatus = execStatus
, sqlExecutionErrorMessage = fromMaybe (B8.pack "No error message available from LibPQ") mbLibPQError
, sqlExecutionErrorSqlState = mbSqlState
, sqlExecutionErrorSqlQuery = queryBS
}
isRowReadableStatus :: LibPQ.ExecStatus -> Bool
isRowReadableStatus status =
case status of
LibPQ.CommandOk -> True -- ??
LibPQ.TuplesOk -> True -- Returned on successful query, even if there are 0 rows.
LibPQ.SingleTuple -> True -- Only returned when a query is executed is single row mode
LibPQ.EmptyQuery -> False
LibPQ.CopyOut -> False
LibPQ.CopyIn -> False
LibPQ.CopyBoth -> False -- CopyBoth is only used for streaming replication, so should not occur in ordinary applications
LibPQ.BadResponse -> False
LibPQ.NonfatalError -> False -- NonfatalError never returned from LibPQ query execution functions. It passes them to the notice processor instead.
LibPQ.FatalError -> False
{- |
Packages a bytestring parameter value (which is assumed to be a value encoded
as text that the database can use) as a parameter for executing a query.
This uses Oid 0 to cause the database to infer the type of the paremeter and
explicitly marks the parameter as being in Text format.
-}
mkInferredTextParam :: Maybe BS.ByteString -> Maybe (LibPQ.Oid, BS.ByteString, LibPQ.Format)
mkInferredTextParam mbValue =
case mbValue of
Nothing ->
Nothing
Just value ->
Just (LibPQ.Oid 0, value, LibPQ.Text)
data ConnectionError = ConnectionError
{ connectionErrorMessage :: String
, connectionErrorLibPQMessage :: Maybe BS.ByteString
}
instance Show ConnectionError where
show err =
let libPQErrorMsg =
case connectionErrorLibPQMessage err of
Nothing ->
"<no underying error available>"
Just libPQMsg ->
case Enc.decodeUtf8' libPQMsg of
Right decoded ->
T.unpack decoded
Left decodingErr ->
"Error decoding libPQ messages as utf8: " <> show decodingErr
in connectionErrorMessage err <> ": " <> libPQErrorMsg
instance Exception ConnectionError
data SqlExecutionError = SqlExecutionError
{ sqlExecutionErrorExecStatus :: LibPQ.ExecStatus
, sqlExecutionErrorMessage :: BS.ByteString
, sqlExecutionErrorSqlState :: Maybe BS.ByteString
, sqlExecutionErrorSqlQuery :: BS.ByteString
}
deriving (Show)
instance Exception SqlExecutionError
data ConnectionUsedAfterCloseError
= ConnectionUsedAfterCloseError
deriving (Show)
instance Exception ConnectionUsedAfterCloseError
|
flipstone/orville
|
orville-postgresql-libpq/src/Orville/PostgreSQL/Connection.hs
|
Haskell
|
mit
| 11,198
|
{-
Copyright (c) 2008, 2013
Russell O'Connor
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
-}
-- |Defines the Y'CbCr and Y'PbPr colour spaces in accordance with
-- ITU-R Recommendation BT.601 used for 625-line (PAL) standard
-- definition television (SDTV).
--
-- For high definition television (HDTV) see "Data.Colour.HDTV".
module Data.Colour.SDTV625
{-
(Colour
,luma
,y'PbPr, toY'PbPr
,y'CbCr, toY'CbCr
)
-}
where
import Data.Word
import Data.Colour.RGBSpace
import Data.Colour.SRGB (sRGBSpace)
import Data.Colour.CIE.Illuminant (d65)
import Data.Colour.CIE
import Data.Colour.SDTV
import qualified Data.Colour.Luma as L
space :: (Ord a, Floating a) => RGBSpace a
space = mkRGBSpace gamut transfer
where
gamut = mkRGBGamut (RGB (mkChromaticity 0.64 0.33)
(mkChromaticity 0.29 0.60)
(mkChromaticity 0.15 0.06))
d65
transfer = transferFunction sRGBSpace
{- rec 601 luma -}
-- |Luma (Y') approximates the 'Data.Colour.CIE.lightness' of a 'Colour'.
luma :: (Ord a, Floating a) => Colour a -> a
luma = L.luma lumaCoef space
-- |Construct a 'Colour' from Y'PbPr coordinates.
y'PbPr :: (Ord a, Floating a) => a -> a -> a -> Colour a
y'PbPr = L.y'PbPr lumaCoef space
-- |Returns the Y'PbPr coordinates of a 'Colour'.
toY'PbPr :: (Ord a, Floating a) => Colour a -> (a, a, a)
toY'PbPr = L.toY'PbPr lumaCoef space
-- |Construct a 'Colour' from Y'CbRr studio 8-bit coordinates.
y'CbCr :: (Floating a, RealFrac a) => Word8 -> Word8 -> Word8 -> Colour a
y'CbCr = L.y'CbCr lumaCoef space
-- |Returns the Y'CbCr studio 8-bit coordinates of a 'Colour'.
toY'CbCr :: (Floating a, RealFrac a) => Colour a -> (Word8, Word8, Word8)
toY'CbCr = L.toY'CbCr lumaCoef space
-- |Construct a 'Colour' from R'G'B' studio 8-bit coordinates.
r'g'b' :: (Floating a, RealFrac a) => Word8 -> Word8 -> Word8 -> Colour a
r'g'b' = L.r'g'b' space
-- |Returns the Y'CbCr studio 8-bit coordinates of a 'Colour'.
toR'G'B' :: (Floating a, RealFrac a) => Colour a -> RGB Word8
toR'G'B' = L.toR'G'B' space
|
haasn/colour
|
Data/Colour/SDTV625.hs
|
Haskell
|
mit
| 3,037
|
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE FlexibleInstances #-}
-- | This module creates a class for automating the construction and
-- destruction of JSON objects
module JSON where
import Haste
import Haste.Graphics.Canvas
import Haste.JSON
import Prelude hiding (head, tail, init, last, read, (!!))
import Safe (atMay,headMay)
-- | Values which can be converted back and forth from JSON. The main
-- class laws is that
--
-- > fromJSON . toJSON = id
--
-- Note that that
--
-- > toJSON . fromJSON == id
--
-- does *NOT* hold, as that would imply preserving whitespace and
-- ordering of generic JSON files.
class JSONable a where
toJSON :: a -> JSON -- ^ Convert the value into JSON
fromJSON :: JSON -> Maybe a -- ^ Extract a value from JSON or return Nothing on a failure
-- | Turns a Double into a generic JSON number
instance JSONable Double where
toJSON = Num
fromJSON (Num x) = Just x
fromJSON _ = Nothing
-- | Turns a string into a generic JSON number. Note that it doesn't
-- actually work, since String = [Char], so Haskell can't distinguish
-- from a [Char], even though [Char] is not an instance of JSONable
instance JSONable String where
toJSON x = Str $ toJSString x
fromJSON (Str x) = Just $ fromJSStr x
fromJSON _ = Nothing
-- | Turns a list of JSONable objects into a JSON array
instance JSONable a => JSONable [a] where
toJSON = Arr . map toJSON
fromJSON (Arr x) = mapM fromJSON x
fromJSON _ = Nothing
-- | Turns a Point into a two element JSON array
instance JSONable Point where
toJSON (x,y) = Arr . map toJSON $ [x,y]
fromJSON (Arr ps) = (,) <$> (headMay ps >>= fromJSON)
<*> (ps `atMay` 1 >>= fromJSON)
fromJSON _ = Nothing
-- | Pull a value from a JSON object
(~~>) :: (JSONable a) => JSON -> JSString -> Maybe a
d ~~> key = do
v <- d ~> key
fromJSON v
|
rprospero/PhotoAlign
|
JSON.hs
|
Haskell
|
mit
| 1,880
|
-- Double all integers in a list.
module Double where
double :: [Integer] -> [Integer]
double [] = []
double (integer : remainingIntegers)
= (2 * integer) : double remainingIntegers
{- GHCi>
double []
double [1]
double [1, 1]
-}
-- []
-- [2]
-- [2, 2]
|
pascal-knodel/haskell-craft
|
Examples/· Recursion/· Primitive Recursion/Lists/Double.hs
|
Haskell
|
mit
| 266
|
-- Core.hs: The core λ calculus of simpl.
-- Copyright 2014 Jack Pugmire
--
-- Licensed under the Apache License, Version 2.0 (the "License");
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an "AS IS" BASIS,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.
module Simpl.Core where
import Data.List (find)
type Name = String
type Result a = Either String a
data Global = Global { gName :: Name
, gValue :: Value
, gType :: Type
}
type Env = [Global]
data Term = EAnn Term Type
| EApp Term Term
| EGlobal Name
| ELam Term
| EVar Int
| EUnit Bool
data Type = TUnit
| TFun Type Type
| TVar Name
deriving (Eq)
instance Show Type where
show TUnit = "Unit"
show (TFun t t') = case t of
(TFun _ _) -> "(" ++ show t ++ ") -> " ++ show t'
_ -> show t ++ " -> " ++ show t'
show (TVar n) = n
data Value = VUnit Bool
| VLam (Value -> Result Value)
instance Show Value where
show (VUnit True) = "T"
show (VUnit False) = "F"
show (VLam _) = "λ"
-- Look up the value of a global
lookupGlobal :: String -> Env -> Result Value
lookupGlobal n = f . find (\(Global n' _ _) -> n == n')
where f = maybe (Left ("No such global " ++ show n)) (Right . gValue)
-- Look up the type of a global
lookupGlobalT :: Name -> Env -> Result Type
lookupGlobalT n = f . find (\(Global n' _ _) -> n == n')
where f = maybe (Left ("No such global " ++ show n)) (Right . gType)
|
jepugs/simpl
|
Simpl/Core.hs
|
Haskell
|
apache-2.0
| 1,932
|
{- |
Module : Bio.Motions.Utils.Geometry
Description : Utility geometry functions.
License : Apache
Stability : experimental
Portability : unportable
-}
{-# LANGUAGE RecordWildCards #-}
module Bio.Motions.Utils.Geometry where
import Control.Monad
import Control.Applicative
import Data.Maybe
import Linear
type Point = V3 Int
data Triangle = Triangle
{ p1 :: Point
, p2 :: Point
, p3 :: Point
}
data Segment = Segment
{ p :: Point
, q :: Point
}
data Angle = Angle
{ v1 :: V3 Int
, v2 :: V3 Int
}
data Ray = Ray
{ o :: Point
, dir :: V3 Int
}
data Cube = Cube
{ minCorner :: Point
, maxCorner :: Point
}
-- |Tests whether a segment intersects a triangle in 3D.
-- Gives correct results when the triangle is actually a segment.
-- Works well with all nondegenerate edge cases, e.g. intersection at a vertex.
-- Gives unspecified results in more degenerate cases, i.e. the triangle or the segment is a point.
-- Warning: possible integer overflows with large distances.
intersectsTriangle :: Triangle -> Segment -> Bool
intersectsTriangle tri@Triangle{..} seg
| w /= 0 = isJust $ do
let sgn = signum w
guard $ sgn * s <= 0
let w2 = a `cross` d
t = w2 `dot` c
guard $ sgn * t >= 0
let u = - w2 `dot` b
guard $ sgn * u >= 0
let v = w - s - t - u
guard $ sgn * v >= 0
| s /= 0 = isJust $ do
let sgn = signum s
let w2 = d `cross` a
t = w2 `dot` c
guard $ sgn * t >= 0
let u = - w2 `dot` b
guard $ sgn * u >= 0
let v = s - w - t - u
guard $ sgn * v >= 0
| otherwise = any (intersectsSegment seg) [Segment p1 p2, Segment p1 p3, Segment p2 p3]
|| pointInsideTriangle (p seg) tri
where
a = p seg - p3
b = p1 - p3
c = p2 - p3
d = q seg - p3
w1 = b `cross` c
w = a `dot` w1
s = d `dot` w1
-- |Tests whether two segments in 3D intersect.
-- Returns true for all nondegenerate edge cases, e.g. intersection at a vertex.
-- Gives unspecified results in degenerate cases.
-- Warning: possible integer overflows with large distances.
intersectsSegment :: Segment -> Segment -> Bool
intersectsSegment s1 s2 =
let r = q s1 - p s1
s = q s2 - p s2
rxs = r `cross` s
p1p2 = p s2 - p s1
in if rxs == 0 && p1p2 `cross` r == 0 then
let t0' = p1p2 `dot` r
t1' = t0' + s `dot` r
(t0, t1) = if s `dot` r < 0 then (t1', t0') else (t0', t1')
dr = r `dot` r
in (0 <= t0 && t0 <= dr) || (0 <= t1 && t1 <= dr) || (t0 <= 0 && dr <= t1)
else
let d1 = (p1p2 `cross` s) `dot` rxs
d2 = (p1p2 `cross` r) `dot` rxs
drxs = rxs `dot` rxs
in rxs /= 0 && 0 <= d1 && d1 <= drxs && 0 <= d2 && d2 <= drxs
&& drxs *^ p1p2 == d1 *^ r - d2 *^ s
-- |Tests whether a segment goes through a point.
-- Assumes that the segment is nondegenerate, i.e. it is not a point.
-- Warning: possible integer overflows with large distances.
pointInsideSegment :: Point -> Segment -> Bool
pointInsideSegment v Segment{..} =
pv `cross` pq == 0 && 0 <= dpvpq && dpvpq <= dpqpq
where
pv = v - p
pq = q - p
dpvpq = pv `dot` pq
dpqpq = pq `dot` pq
-- |Tests whether a point is (not necessarily strictly) inside a triangle.
-- Assumes that the three points defining the triangle are pairwise different.
-- Works well when the triangle is actually a segment.
-- Warning: possible integer overflows with large distances.
pointInsideTriangle :: Point -> Triangle -> Bool
pointInsideTriangle p t@Triangle{..} =
det33 (V3 (p2 - p1) (p3 - p1) (p - p1)) == 0 && pointInsideTriangle2D p t
-- |Tests whether a point is (not necessarily strictly) inside a triangle.
-- Assumes that the three points defining the triangle are pairwise different.
-- Works well when the triangle is actually a segment.
-- Assumes that the point and the triangle are coplanar.
-- Warning: possible integer overflows with large distances.
pointInsideTriangle2D :: Point -> Triangle -> Bool
pointInsideTriangle2D p Triangle{..} =
vectorInsideAngle p1p (Angle p1p2 p1p3) &&
vectorInsideAngle p2p (Angle p2p1 p2p3) &&
vectorInsideAngle p3p (Angle p3p1 p3p2)
where
p1p = p - p1
p2p = p - p2
p3p = p - p3
p1p2 = p2 - p1
p1p3 = p3 - p1
p2p3 = p3 - p2
p2p1 = - p1p2
p3p1 = - p1p3
p3p2 = - p2p3
-- |Tests whether a vector is (not necessarily strictly) inside an angle.
-- When any of the angle vectors is zero or the angle vectors have opposing directions,
-- returns true for all vectors.
-- Warning: possible integer overflows with large distances.
vectorInsideAngle :: V3 Int -> Angle -> Bool
vectorInsideAngle v a@Angle{..} = det33 (V3 v v1 v2) == 0 && vectorInsideAngle2D v a
-- |Tests whether a vector is (not necessarily strictly) inside an angle.
-- When any of the angle vectors is zero or the angle vectors have opposing directions,
-- returns true for all vectors.
-- Assumes that the vector and the angle are coplanar.
-- Warning: possible integer overflows with large distances.
vectorInsideAngle2D :: V3 Int -> Angle -> Bool
vectorInsideAngle2D v Angle{..} =
v1cv2 * v1cv >= 0 && v2cv1 * v2cv >= 0 && v1cv * v2cv <= 0 &&
(v1cv2 /= 0 || v `dot` v1 >= 0 || v `dot` v2 >= 0)
where
v1cv2 = v1 `cross` v2
v1cv = v1 `cross` v
v2cv = v2 `cross` v
v2cv1 = - v1cv2
-- |Tests whether the given ray goes through the given point.
-- Assumes that the ray's vector is nonzero.
-- Warning: possible integer overflows with large distances.
pointInsideRay :: Point -> Ray -> Bool
pointInsideRay p Ray{..} =
op `cross` dir == 0 && 0 <= op `dot` dir
where op = p - o
-- |The bounding cube for a set of points.
-- Warning: possible integer overflows with large distances.
boundingCube :: [Point] -> Cube
boundingCube ps = Cube{..}
where
minCorner = foldr1 (liftA2 min) ps
maxCorner = foldr1 (liftA2 max) ps
-- |A cube extended in each direction by the given radius.
-- Warning: possible integer overflows with large distances.
extendedCube :: Int -> Cube -> Cube
extendedCube radius Cube{..} = Cube minCorner' maxCorner'
where
minCorner' = fmap (\x -> x - radius) minCorner
maxCorner' = fmap (+ radius) maxCorner
|
Motions/motions
|
src/Bio/Motions/Utils/Geometry.hs
|
Haskell
|
apache-2.0
| 6,405
|
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Application
( makeApplication
, getApplicationDev
, makeFoundation
) where
import Import
import Settings
import Yesod.Auth
import Yesod.Default.Config
import Yesod.Default.Main
import Yesod.Default.Handlers
import Network.Wai.Middleware.RequestLogger (logStdout, logStdoutDev)
import qualified Database.Persist.Store
import Network.HTTP.Conduit (newManager, def)
import Database.Persist.GenericSql (runMigration)
import Data.HashMap.Strict as H
import Data.Aeson.Types as AT
#ifndef DEVELOPMENT
import qualified Web.Heroku
#endif
-- Import all relevant handler modules here.
-- Don't forget to add new modules to your cabal file!
import Handler.Home
import Handler.Feedings
import Handler.User
-- This line actually creates our YesodDispatch instance. It is the second half
-- of the call to mkYesodData which occurs in Foundation.hs. Please see the
-- comments there for more details.
mkYesodDispatch "App" resourcesApp
-- This function allocates resources (such as a database connection pool),
-- performs initialization and creates a WAI application. This is also the
-- place to put your migrate statements to have automatic database
-- migrations handled by Yesod.
makeApplication :: AppConfig DefaultEnv Extra -> IO Application
makeApplication conf = do
foundation <- makeFoundation conf
app <- toWaiAppPlain foundation
return $ logWare app
where
logWare = if development then logStdoutDev
else logStdout
{- commented out for heroku
makeFoundation :: AppConfig DefaultEnv Extra -> IO App
makeFoundation conf = do
manager <- newManager def
s <- staticSite
dbconf <- withYamlEnvironment "config/mongoDB.yml" (appEnv conf)
Database.Persist.Store.loadConfig >>=
Database.Persist.Store.applyEnv
p <- Database.Persist.Store.createPoolConfig (dbconf :: Settings.PersistConfig)
return $ App conf s p manager dbconf
-}
-- for yesod devel
getApplicationDev :: IO (Int, Application)
getApplicationDev =
defaultDevelApp loader makeApplication
where
loader = loadConfig (configSettings Development)
{ csParseExtra = parseExtra
}
makeFoundation :: AppConfig DefaultEnv Extra -> IO App
makeFoundation conf = do
manager <- newManager def
s <- staticSite
hconfig <- loadHerokuConfig
dbconf <- withYamlEnvironment "config/postgresql.yml" (appEnv conf)
(Database.Persist.Store.loadConfig . combineMappings hconfig) >>=
Database.Persist.Store.applyEnv
p <- Database.Persist.Store.createPoolConfig (dbconf :: Settings.PersistConfig)
Database.Persist.Store.runPool dbconf (runMigration migrateAll) p
return $ App conf s p manager dbconf
#ifndef DEVELOPMENT
canonicalizeKey :: (Text, val) -> (Text, val)
canonicalizeKey ("dbname", val) = ("database", val)
canonicalizeKey pair = pair
toMapping :: [(Text, Text)] -> AT.Value
toMapping xs = AT.Object $ H.fromList $ Import.map (\(key, val) -> (key, AT.String val)) xs
#endif
combineMappings :: AT.Value -> AT.Value -> AT.Value
combineMappings (AT.Object m1) (AT.Object m2) = AT.Object $ m1 `H.union` m2
combineMappings _ _ = error "Data.Object is not a Mapping."
loadHerokuConfig :: IO AT.Value
loadHerokuConfig = do
#ifdef DEVELOPMENT
return $ AT.Object H.empty
#else
Web.Heroku.dbConnParams >>= return . toMapping . Import.map canonicalizeKey
#endif
|
svdberg/yesod-milk
|
Application.hs
|
Haskell
|
bsd-2-clause
| 3,427
|
module Main where
import System.Console.CmdArgs
import Network.HTTP.Neon.ProgType
import Network.HTTP.Neon.Command
main :: IO ()
main = do
putStrLn "hneon"
param <- cmdArgs mode
commandLineProcess param
|
wavewave/hneon
|
exe/hneon.hs
|
Haskell
|
bsd-2-clause
| 214
|
{-# LANGUAGE CPP #-}
{-# OPTIONS_GHC -fno-warn-missing-import-lists #-}
{-# OPTIONS_GHC -fno-warn-implicit-prelude #-}
module Paths_ChatServer (
version,
getBinDir, getLibDir, getDynLibDir, getDataDir, getLibexecDir,
getDataFileName, getSysconfDir
) where
import qualified Control.Exception as Exception
import Data.Version (Version(..))
import System.Environment (getEnv)
import Prelude
#if defined(VERSION_base)
#if MIN_VERSION_base(4,0,0)
catchIO :: IO a -> (Exception.IOException -> IO a) -> IO a
#else
catchIO :: IO a -> (Exception.Exception -> IO a) -> IO a
#endif
#else
catchIO :: IO a -> (Exception.IOException -> IO a) -> IO a
#endif
catchIO = Exception.catch
version :: Version
version = Version [0,1,0,0] []
bindir, libdir, dynlibdir, datadir, libexecdir, sysconfdir :: FilePath
bindir = "C:\\Users\\diarm\\IdeaProjects\\ChatServer\\.cabal-sandbox\\bin"
libdir = "C:\\Users\\diarm\\IdeaProjects\\ChatServer\\.cabal-sandbox\\x86_64-windows-ghc-8.2.1\\ChatServer-0.1.0.0-I7LJzhEVoAY7jb523Kzp1o"
dynlibdir = "C:\\Users\\diarm\\IdeaProjects\\ChatServer\\.cabal-sandbox\\x86_64-windows-ghc-8.2.1"
datadir = "C:\\Users\\diarm\\IdeaProjects\\ChatServer\\.cabal-sandbox\\x86_64-windows-ghc-8.2.1\\ChatServer-0.1.0.0"
libexecdir = "C:\\Users\\diarm\\IdeaProjects\\ChatServer\\.cabal-sandbox\\ChatServer-0.1.0.0-I7LJzhEVoAY7jb523Kzp1o\\x86_64-windows-ghc-8.2.1\\ChatServer-0.1.0.0"
sysconfdir = "C:\\Users\\diarm\\IdeaProjects\\ChatServer\\.cabal-sandbox\\etc"
getBinDir, getLibDir, getDynLibDir, getDataDir, getLibexecDir, getSysconfDir :: IO FilePath
getBinDir = catchIO (getEnv "ChatServer_bindir") (\_ -> return bindir)
getLibDir = catchIO (getEnv "ChatServer_libdir") (\_ -> return libdir)
getDynLibDir = catchIO (getEnv "ChatServer_dynlibdir") (\_ -> return dynlibdir)
getDataDir = catchIO (getEnv "ChatServer_datadir") (\_ -> return datadir)
getLibexecDir = catchIO (getEnv "ChatServer_libexecdir") (\_ -> return libexecdir)
getSysconfDir = catchIO (getEnv "ChatServer_sysconfdir") (\_ -> return sysconfdir)
getDataFileName :: FilePath -> IO FilePath
getDataFileName name = do
dir <- getDataDir
return (dir ++ "\\" ++ name)
|
mcdonndi/ChatServer
|
dist/dist-sandbox-a117d482/build/ChatServer/autogen/Paths_ChatServer.hs
|
Haskell
|
bsd-3-clause
| 2,170
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.