code stringlengths 5 1.03M | repo_name stringlengths 5 90 | path stringlengths 4 158 | license stringclasses 15 values | size int64 5 1.03M | n_ast_errors int64 0 53.9k | ast_max_depth int64 2 4.17k | n_whitespaces int64 0 365k | n_ast_nodes int64 3 317k | n_ast_terminals int64 1 171k | n_ast_nonterminals int64 1 146k | loc int64 -1 37.3k | cycloplexity int64 -1 1.31k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE MultiParamTypeClasses
, FunctionalDependencies
, NoMonomorphismRestriction
, FlexibleInstances
, UndecidableInstances #-}
module Graphics.Colorful.Colorful where
import Control.Applicative((<$>))
import Control.Monad.Random.Class
import Data.Fixed (mod')
import Graphics.Colorful.Utils
singleUniform :: (MonadRandom m, Color c) => m c
singleUniform = do
x <- getRandomR (0.0, 1.0)
y <- getRandomR (0.0, 1.0)
z <- getRandomR (0.0, 1.0)
return $ mkColor x y z
uniform :: (MonadRandom m, Color c) => Int -> m [c]
uniform = generate singleUniform
singleOffset :: (MonadRandom m, Color c)
=> Double
-> c
-> m c
singleOffset offset seed = do
let avg = average seed
r <- getRandomR (0.0, 1.0)
let newVal = avg + 2*r * offset - offset
let ratio = newVal / (avg + 0.00001)
return $ mapColor (*ratio) seed
offset :: (MonadRandom m, Color c) => Double -> c -> Int -> m [c]
offset ofs seed = generate (singleOffset ofs seed)
randomMix :: (Functor m, MonadRandom m, Color c)
=> [c]
-> Double {- | "grey control" -}
-> m c
randomMix cs g = do
index <- getRandomR (0, length cs - 1)
rands <- take (length cs) <$> getRandomRs (0.0, 1.0)
let ratios = mapIth index id (*g) rands
let sum' = sum ratios
let divRatios = ratios <$$> (/sum')
let mult f = sum $ zipWith (*) (f <$> cs) divRatios
return $ mkColor (mult getX) (mult getY) (mult getZ)
triad :: (Functor m, MonadRandom m, Color c)
=> c
-> c
-> c
-> Double
-> m c
triad c0 c1 c2 = randomMix [c0, c1, c2]
singleHarmony :: (Functor m, MonadRandom m)
=> Double {- some random angle -}
-> Double
-> Double
-> Double
-> Double
-> Double
-> Double
-> Double
-> m ColorHSL
singleHarmony ref o1 o2 r0 r1 r2 sat lum = do
r <- (* (r0 + r1 + r2)) <$> getRandomR (0.0, 1.0)
let r' = if r > r0
then if r < r0 + r1
then r + o1
else r + o2
else r
let hue = (ref + r') `mod'` 1.0
return $ mkColorHSL hue sat lum
harmony :: (Functor m, MonadRandom m)
=> Double
-> Double
-> Double
-> Double
-> Double
-> Double
-> Double
-> m ColorHSL
harmony o1 o2 r0 r1 r2 sat lum = do
ref <- getRandomR (0.0, 1.0)
singleHarmony ref o1 o2 r0 r1 r2 sat lum
singleRainbow :: MonadRandom m
=> Double
-> Double
-> Double
-> Double
-> Double
-> Int
-> Int
-> m ColorHSL
singleRainbow hStart hEnd co sat lum i n = do
let hr = hEnd - hStart
let cellSize = hr / fromIntegral n
let hue = cellSize * (fromIntegral i) + co + hStart
return $ mkColorHSL hue sat lum
--fixme -- hue step should be baked relative to the nunmber of colors ??
rainbow :: MonadRandom m
=> Double {- | Start hue in [0,1) -}
-> Double {- | End hue in [0,1), must be >= start hue -}
-> Double {- | Cell offset = Random double in [0, hue step) -}
-> Double {- | Sat -}
-> Double {- | Lum -}
-> Int {- | Number of colors to generate-}
-> m [ColorHSL]
rainbow hStart hEnd co sat lum n =
generateC2 (singleRainbow hStart hEnd co sat lum) n
goldenRatioConjugate :: Double
goldenRatioConjugate = 0.618033988749895
singleGoldenRatio :: MonadRandom m
=> Double
-> Double
-> Double
-> Int
-> m ColorHSL
singleGoldenRatio sat lum hue i = do
let hue' = (hue + goldenRatioConjugate * (fromIntegral i)) `mod'` 1.0
return $ mkColorHSL hue sat lum
{- | Generates a number of colors that are in high contrast to each other.
Note, this is not very useful for more than 5-6 colors. -}
goldenRatioRainbow :: MonadRandom m
=> Double {- | The saturation of the resulting colors. -}
-> Double {- | The lightness of the resulting colors. -}
-> Int {- | The number of desired colors. -}
-> m [ColorHSL]
goldenRatioRainbow sat lum n = do
hue <- getRandomR (0.0, 1.0)
generateC (singleGoldenRatio sat lum hue) n
generate :: (MonadRandom m)
=> m c
-> Int
-> m [c]
generate gen =
generateC2 (\_ _ -> gen)
generateC :: (MonadRandom m)
=> (Int -> m c)
-> Int
-> m [c]
generateC gen =
generateC2 (\_ -> gen)
generateC2 :: (MonadRandom m)
=> (Int -> Int -> m c)
-> Int
-> m [c]
generateC2 gen n =
helper [] 0 gen
where helper acc i ma
| i >= n = return acc
| otherwise = do m <- ma i n
helper (m : acc) (i + 1) ma
| deweyvm/colorful | src/Graphics/Colorful/Colorful.hs | mit | 5,184 | 0 | 14 | 1,991 | 1,590 | 808 | 782 | 145 | 3 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE OverloadedStrings #-}
module Main where
import Control.Concurrent (threadDelay)
import Control.Monad (forever, when, void)
import Data.Int (Int64)
import Data.Maybe (fromMaybe)
import Control.Monad.Trans (liftIO)
import qualified Data.Map as M
import Data.Text (Text)
import Data.Time (formatTime, getCurrentTime)
import qualified Network.MPD as MPD
import Network.Lastfm
import qualified Network.Lastfm.Track as Track
import Loh.Client (send)
import System.Locale (defaultTimeLocale)
data TrackInfo = TrackInfo
{ _artist :: Text
, _album :: Text
, _track :: Text
, _duration :: Int64
, _timestamp :: Int64
}
ak :: Request f a APIKey
ak = apiKey "__YOUR_API_KEY__"
sk :: Request f Sign SessionKey
sk = sessionKey "__YOUR_SESSION_KEY__"
secret :: Secret
secret = "__YOUR_SECRET__"
main :: IO ()
main = forever $ eventLoop (\lti ti -> setNowPlaying ti >> scrobble lti) >> threadDelay 1000000
eventLoop :: (TrackInfo -> TrackInfo -> IO a) -> IO (MPD.Response ())
eventLoop handler = MPD.withMPD $ do
ts <- read . formatTime defaultTimeLocale "%s" <$> liftIO getCurrentTime
loop (MPD.defaultSong "") ts
where
loop ls lts = do
liftIO $ threadDelay 1000000
maybeSong <- MPD.currentSong
state <- MPD.stState <$> MPD.status
nts <- read . formatTime defaultTimeLocale "%s" <$> liftIO getCurrentTime
when (state == MPD.Playing) $
case maybeSong of
Just s ->
case () of
_ | s /= ls || nts - lts > fromIntegral (MPD.sgLength ls) -> do
liftIO $ handler (trackInfo ls lts) (trackInfo s nts)
loop s nts
| otherwise -> loop s lts
Nothing -> loop ls lts
where
trackInfo song ts = TrackInfo
{ _artist = MPD.toText getArtist
, _album = MPD.toText getAlbum
, _track = MPD.toText getTrack
, _duration = fromIntegral $ MPD.sgLength song
, _timestamp = fromIntegral ts
}
where
info = MPD.sgTags song
getTrack = fromMaybe "No Title" $ head <$> M.lookup MPD.Title info
getArtist = fromMaybe "No Artist" $ head <$> M.lookup MPD.Artist info
getAlbum = fromMaybe "No Album" $ head <$> M.lookup MPD.Album info
setNowPlaying :: TrackInfo -> IO ()
setNowPlaying TrackInfo { _artist = ar, _track = t, _album = al, _duration = d } =
void $ send "localhost" 9114 . sign secret $
Track.updateNowPlaying <*> artist ar <*> track t <* album al <* duration d <*> ak <*> sk
scrobble :: TrackInfo -> IO ()
scrobble TrackInfo { _artist = ar, _track = t, _album = al, _duration = d, _timestamp = ts } = do
nts <- read . formatTime defaultTimeLocale "%s" <$> getCurrentTime
when (nts - ts > fromIntegral (d `div` 2)) $
void $ send "localhost" 9114 . sign secret $
Track.scrobble <*> artist ar <*> track t <*> timestamp nts <* album al <*> ak <*> sk
| dmalikov/loh | examples/scrobbler.hs | mit | 2,943 | 0 | 23 | 732 | 1,001 | 517 | 484 | 68 | 2 |
module Probability.Distribution.OnTree where
import Probability.Random
import BAliPhy.ATModel
import Tree
import SModel
import Bio.Sequence -- for sequence_to_indices
import Bio.Alignment
import Bio.Alphabet -- for type Alphabet
import Data.Matrix
-- FIXME: need polymorphism.
-- This needs to be after weighted_frequency_matrix.
-- Because we have no polymorphism, wfm needs to be defined after MixtureModel and MixtureModels.
data CTMCOnTreeProperties = CTMCOnTreeProperties {
subst_root :: Int,
transition_ps :: Array Int (EVector (Matrix Double)),
cond_likes :: Array Int (EVector ()),
anc_seqs :: EVector (),
likelihood :: LogDouble,
taxa :: [ CPPString ],
get_weighted_frequency_matrix :: Matrix Double,
smap :: EVector Int,
leaf_sequences :: Array Int (EVector Int),
alphabet :: Alphabet,
as :: Array Int PairwiseAlignment,
n_states :: Int,
n_base_models :: Int
}
annotated_subst_like_on_tree tree alignment smodel sequences = do
let subst_root = modifiable (numNodes tree - 1)
let n_leaves = numLeaves tree
as = pairwise_alignments alignment
taxa = get_labels tree
leaf_sequences = listArray' $ map (sequence_to_indices alphabet) $ reorder_sequences taxa sequences
alphabet = getAlphabet smodel
smap = stateLetters smodel
smodel_on_tree = SingleBranchLengthModel tree smodel
transition_ps = transition_p_index smodel_on_tree
f = weighted_frequency_matrix smodel
cls = cached_conditional_likelihoods
tree
leaf_sequences
as
alphabet
transition_ps
f
smap
likelihood = if n_leaves == 1 then
peel_likelihood_1 (leaf_sequences ! 0) alphabet f
else if n_leaves == 2 then
peel_likelihood_2 (leaf_sequences ! 0) (leaf_sequences ! 1) alphabet (as ! 0) (transition_ps ! 0) f
else
peel_likelihood tree cls as (weighted_frequency_matrix smodel) subst_root
ancestral_sequences = if n_leaves == 1 then
0
else if n_leaves == 2 then
0
else
array_to_vector $ sample_ancestral_sequences tree subst_root leaf_sequences as alphabet transition_ps f cls smap
in_edge "tree" tree
in_edge "alignment" alignment
in_edge "smodel" smodel
property "subst_root" subst_root
property "transition_ps" transition_ps
property "cond_likes" cls
property "anc_seqs" ancestral_sequences
property "likelihood" likelihood
property "taxa" (map list_to_string taxa)
property "weighted_frequency_matrix" f
property "smap" smap
property "leaf_sequences" leaf_sequences
property "alphabet" alphabet
property "as" as
property "n_states" (SModel.nStates smodel)
property "n_base_models" (SModel.nBaseModels smodel)
property "properties" (CTMCOnTreeProperties subst_root transition_ps cls ancestral_sequences likelihood (listArray' $ map list_to_string taxa) f smap leaf_sequences alphabet as (SModel.nStates smodel) (SModel.nBaseModels smodel) )
return [likelihood]
ctmc_on_tree tree alignment smodel =
Distribution "ctmc_on_tree" (annotated_subst_like_on_tree tree alignment smodel) (no_quantile "ctmc_on_tree") () ()
annotated_subst_likelihood_fixed_A tree smodel sequences = do
let subst_root = modifiable (numNodes tree - 1)
let a0 = alignment_from_sequences alphabet sequences
(compressed_alignment',column_counts,mapping) = compress_alignment $ a0
n_leaves = numLeaves tree
taxa = get_labels tree
compressed_alignment = reorder_alignment taxa compressed_alignment'
alphabet = getAlphabet smodel
smap = stateLetters smodel
smodel_on_tree = SingleBranchLengthModel tree smodel
leaf_sequences = listArray' $ sequences_from_alignment compressed_alignment
transition_ps = transition_p_index smodel_on_tree
f = weighted_frequency_matrix smodel
cls = cached_conditional_likelihoods_SEV
tree
leaf_sequences
alphabet
transition_ps
f
compressed_alignment
smap
likelihood = if n_leaves == 1 then
peel_likelihood_1_SEV compressed_alignment alphabet f column_counts
else if n_leaves == 2 then
peel_likelihood_2_SEV compressed_alignment alphabet (transition_ps!0) f column_counts
else
peel_likelihood_SEV tree cls f subst_root column_counts
-- This also needs the map from columns to compressed columns:
ancestral_sequences = if n_leaves == 1 then
a0
else if n_leaves == 2 then
a0
else
let ancestral_states = array_to_vector $ sample_ancestral_sequences_SEV
tree
subst_root
leaf_sequences
alphabet
transition_ps
f
cls
smap
mapping
in ancestral_sequence_alignment a0 ancestral_states smap
in_edge "tree" tree
in_edge "smodel" smodel
property "subst_root" subst_root
property "transition_ps" transition_ps
property "cond_likes" cls
property "anc_seqs" ancestral_sequences
property "likelihood" likelihood
property "taxa" (map list_to_string taxa)
property "weighted_frequency_matrix" f
property "smap" smap
property "leaf_sequences" leaf_sequences
property "alphabet" alphabet
property "n_states" (SModel.nStates smodel)
property "n_base_models" (SModel.nBaseModels smodel)
-- How about stuff related to alignment compression?
property "properties" (CTMCOnTreeProperties subst_root transition_ps cls ancestral_sequences likelihood (map list_to_string taxa) f smap leaf_sequences alphabet Nothing (SModel.nStates smodel) (SModel.nBaseModels smodel) )
return [likelihood]
ctmc_on_tree_fixed_A tree smodel =
Distribution "ctmc_on_tree_fixed_A" (annotated_subst_likelihood_fixed_A tree smodel) (no_quantile "ctmc_on_tree_fixed_A") () ()
| bredelings/BAli-Phy | haskell/Probability/Distribution/OnTree.hs | gpl-2.0 | 6,620 | 0 | 17 | 2,026 | 1,323 | 652 | 671 | 131 | 5 |
module Main where
import System.Environment
import qualified Server as S
import qualified Network.Socket as S
import HTTPWorker
import Proxy
import ProxyAuth
import Data.Default.Class
import Data.Maybe
import System.Exit
import Control.Monad
data Settings = Settings { bindAddress :: String
, bufferSize :: Int
, authentication :: String
, realm :: String
, https :: Maybe S.HTTPS
, http :: Maybe S.HTTP
} deriving (Show)
instance Default Settings where
def = Settings { bindAddress = "0.0.0.0"
, bufferSize = 2^18
, authentication = ""
, realm = ""
, https = Nothing
, http = Nothing
}
main = do
args <- getArgs
let settings = parseArgs args def :: Settings
let servSett = def { S.bindAddress = bindAddress settings
, S.bufferSize = bufferSize settings
, S.http = http settings
, S.https = https settings
} :: S.ServerSettings
when ((isJust . https) settings &&
((null . S.key . fromJust . https) settings ||
(null . S.cert . fromJust . https) settings)) $ do
print "You must specify --key and --cert for https to work"
exitFailure
when ((isNothing . http) settings && (isNothing . https) settings) $ do
print "You must specify at least one of --http or --https parameters"
exitFailure
let handler = if null (authentication settings) then
handleRequest
else
proxyAuth (authentication settings) (realm settings) handleRequest
S.server servSett.httpWorker handler $ (Nothing, [])
parseArgs :: [String] -> Settings -> Settings
parseArgs [] s = s
parseArgs ("-p":as) s = parseArgs ("--port":as) s
parseArgs ("-b":as) s = parseArgs ("--bindaddr":as) s
parseArgs ("-a":as) s = parseArgs ("--auth":as) s
parseArgs ("--bindaddr":as) s = case as of
[] -> error "Please specify bind address in front of --bindaddr"
(b:as) -> parseArgs as $ s { bindAddress = b }
parseArgs ("--auth":as) s = case as of
[] -> error "Please specify authentication in front of --auth"
(a:as) -> parseArgs as $ s { authentication = a }
parseArgs ("--realm":as) s = case as of
[] -> error "Please specify realm in front of --realm"
(r:as) -> parseArgs as $ s { realm = r }
parseArgs ("--http":as) s = case as of
[] -> error "Please specify http port in front of --http"
(r:as) -> parseArgs as $ s { http = Just (def { S.httpPort = r }) }
parseArgs ("--https":as) s = case as of
[] -> error "Please specify https port in front of --https"
(r:as) -> parseArgs as $ s { https = Just (def { S.httpsPort = r }) }
parseArgs ("--cert":as) s = case as of
[] -> error "Please specify certificate path in front of --cert"
(r:as) -> parseArgs as $ s { https = Just ((fromJust $ https s) { S.cert = r }) }
parseArgs ("--key":as) s = case as of
[] -> error "Please specify key path in front of --key"
(r:as) -> parseArgs as $ s { https = Just ((fromJust $ https s) { S.key = r }) }
| amir-sabbaghi/proxy | app/Main.hs | gpl-3.0 | 3,350 | 0 | 18 | 1,096 | 1,076 | 571 | 505 | 71 | 8 |
-- Module : Network.Metric.Sink.Graphite
-- Copyright : (c) 2012-2015 Brendan Hay <brendan.g.hay@gmail.com>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
module Network.Metric.Sink.Graphite (
-- * Sink Functions
Sink(..)
, open
-- * Re-exports
, Group
, Bucket
, Metric(..)
) where
import Data.Time.Clock.POSIX (POSIXTime, getPOSIXTime)
import Network.Metric.Internal
import Network.Socket (SocketType (..))
import qualified Data.ByteString.Char8 as BS
import qualified Data.ByteString.Lazy.Char8 as BL
-- | A handle to a Graphite sink
data Graphite = Graphite (Maybe Host) Handle deriving (Show)
instance Sink Graphite where
push (Graphite host hd) m = do
time <- getPOSIXTime
mapM_ (hPush hd . (`BL.append` "\n") . flat . enc time) (measure m)
where
flat s = BL.fromChunks [BS.intercalate " " $ BL.toChunks s]
enc t (Counter g b v) = put host g b v t
enc t (Timer g b v) = put host g b v t
enc t (Gauge g b v) = put host g b v t
close (Graphite _ hd) = hClose hd
-- | Open a new Graphite sink
open :: Maybe Host -> HostName -> PortNumber -> IO AnySink
open host = fOpen (Graphite host) Stream
-- | Encode a metric into the Graphite format
put :: Encodable a
=> Maybe Host
-> Group
-> Bucket
-> a
-> POSIXTime
-> BL.ByteString
put host group bucket value time =
BL.fromChunks [key host (safe group) bucket, encode value, timestamp]
where
timestamp = BS.pack $ show (truncate time :: Integer)
safe :: BS.ByteString -> BS.ByteString
safe = BS.map fn
where
fn ' ' = '.'
fn c = c
| brendanhay/network-metrics | src/Network/Metric/Sink/Graphite.hs | mpl-2.0 | 2,035 | 0 | 13 | 571 | 536 | 290 | 246 | 37 | 2 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.LibraryAgent.Shelves.Books.Borrow
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Borrow a book from the library. Returns the book if it is borrowed
-- successfully. Returns NOT_FOUND if the book does not exist in the
-- library. Returns quota exceeded error if the amount of books borrowed
-- exceeds allocation quota in any dimensions.
--
-- /See:/ <https://cloud.google.com/docs/quota Library Agent API Reference> for @libraryagent.shelves.books.borrow@.
module Network.Google.Resource.LibraryAgent.Shelves.Books.Borrow
(
-- * REST Resource
ShelvesBooksBorrowResource
-- * Creating a Request
, shelvesBooksBorrow
, ShelvesBooksBorrow
-- * Request Lenses
, sbbXgafv
, sbbUploadProtocol
, sbbAccessToken
, sbbUploadType
, sbbName
, sbbCallback
) where
import Network.Google.LibraryAgent.Types
import Network.Google.Prelude
-- | A resource alias for @libraryagent.shelves.books.borrow@ method which the
-- 'ShelvesBooksBorrow' request conforms to.
type ShelvesBooksBorrowResource =
"v1" :>
CaptureMode "name" "borrow" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Post '[JSON] GoogleExampleLibraryagentV1Book
-- | Borrow a book from the library. Returns the book if it is borrowed
-- successfully. Returns NOT_FOUND if the book does not exist in the
-- library. Returns quota exceeded error if the amount of books borrowed
-- exceeds allocation quota in any dimensions.
--
-- /See:/ 'shelvesBooksBorrow' smart constructor.
data ShelvesBooksBorrow =
ShelvesBooksBorrow'
{ _sbbXgafv :: !(Maybe Xgafv)
, _sbbUploadProtocol :: !(Maybe Text)
, _sbbAccessToken :: !(Maybe Text)
, _sbbUploadType :: !(Maybe Text)
, _sbbName :: !Text
, _sbbCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ShelvesBooksBorrow' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'sbbXgafv'
--
-- * 'sbbUploadProtocol'
--
-- * 'sbbAccessToken'
--
-- * 'sbbUploadType'
--
-- * 'sbbName'
--
-- * 'sbbCallback'
shelvesBooksBorrow
:: Text -- ^ 'sbbName'
-> ShelvesBooksBorrow
shelvesBooksBorrow pSbbName_ =
ShelvesBooksBorrow'
{ _sbbXgafv = Nothing
, _sbbUploadProtocol = Nothing
, _sbbAccessToken = Nothing
, _sbbUploadType = Nothing
, _sbbName = pSbbName_
, _sbbCallback = Nothing
}
-- | V1 error format.
sbbXgafv :: Lens' ShelvesBooksBorrow (Maybe Xgafv)
sbbXgafv = lens _sbbXgafv (\ s a -> s{_sbbXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
sbbUploadProtocol :: Lens' ShelvesBooksBorrow (Maybe Text)
sbbUploadProtocol
= lens _sbbUploadProtocol
(\ s a -> s{_sbbUploadProtocol = a})
-- | OAuth access token.
sbbAccessToken :: Lens' ShelvesBooksBorrow (Maybe Text)
sbbAccessToken
= lens _sbbAccessToken
(\ s a -> s{_sbbAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
sbbUploadType :: Lens' ShelvesBooksBorrow (Maybe Text)
sbbUploadType
= lens _sbbUploadType
(\ s a -> s{_sbbUploadType = a})
-- | Required. The name of the book to borrow.
sbbName :: Lens' ShelvesBooksBorrow Text
sbbName = lens _sbbName (\ s a -> s{_sbbName = a})
-- | JSONP
sbbCallback :: Lens' ShelvesBooksBorrow (Maybe Text)
sbbCallback
= lens _sbbCallback (\ s a -> s{_sbbCallback = a})
instance GoogleRequest ShelvesBooksBorrow where
type Rs ShelvesBooksBorrow =
GoogleExampleLibraryagentV1Book
type Scopes ShelvesBooksBorrow =
'["https://www.googleapis.com/auth/cloud-platform"]
requestClient ShelvesBooksBorrow'{..}
= go _sbbName _sbbXgafv _sbbUploadProtocol
_sbbAccessToken
_sbbUploadType
_sbbCallback
(Just AltJSON)
libraryAgentService
where go
= buildClient
(Proxy :: Proxy ShelvesBooksBorrowResource)
mempty
| brendanhay/gogol | gogol-libraryagent/gen/Network/Google/Resource/LibraryAgent/Shelves/Books/Borrow.hs | mpl-2.0 | 4,996 | 0 | 15 | 1,137 | 703 | 413 | 290 | 102 | 1 |
module Main where
--
import Graphics.UI.WX
import Graphics.UI.WXCore
import System.Process
main = start gui
gui = do f <- frame [text := "Main Window"]
icn <-iconCreateDefault
tbi <- taskBarIconCreate
taskBarIconSetIcon tbi icn "Application Icon"
evtHandlerOnTaskBarIconEvent tbi (onTaskBarEvt f tbi)
btClose <- button f [text := "Close",
on command := do taskBarIconDelete tbi
close f]
set f [layout := margin 5 $
hfloatRight $ widget btClose ]
onTaskBarEvt f tbi TaskBarIconRightDown =
do
popmenu <- menuPane []
m1 <- menuItem popmenu [text := "Show main Window",
on command := set f [visible := True]]
m2 <- menuItem popmenu [text := "Quit",
on command := infoDialog f "Dialog" "Quit pressed"]
taskBarIconPopupMenu tbi popmenu
return ()
onTaskBarEvt _ _ _ = return () | thielema/wxhaskell | samples/wx/TestTaskBarIcon.hs | lgpl-2.1 | 1,148 | 0 | 13 | 497 | 296 | 140 | 156 | 25 | 1 |
module Main(main) where
import Test.Tasty(defaultMain, testGroup)
import qualified RPM.Version.Tests
main :: IO ()
main = defaultMain $ testGroup "Tests"
[ RPM.Version.Tests.vercmpTests ]
| dashea/bdcs | haskell-rpm/tests/Main.hs | lgpl-2.1 | 204 | 0 | 8 | 38 | 60 | 36 | 24 | 6 | 1 |
module ViperVM.VirtualPlatform.MetaObjects.Matrix where
import ViperVM.VirtualPlatform.MetaObject
newtype MetaMatrix = MetaMatrix [[MetaObject]]
| hsyl20/HViperVM | lib/ViperVM/VirtualPlatform/MetaObjects/Matrix.hs | lgpl-3.0 | 148 | 0 | 7 | 13 | 28 | 19 | 9 | 3 | 0 |
{-# LANGUAGE TypeOperators #-}
import Data.Array.Repa (Z(..), (:.)(..), (!))
import qualified Data.Array.Repa as R
-- import Data.List (foldl')
main :: IO ()
main = do
let t :: R.Array R.U R.DIM2 Int
t = R.fromListUnboxed (Z :. 1024 :. 1024)
[x+y | x <- [0..1023], y <- [0..1023]]
-- s az (x,y,z) = insert z (x,y) az
-- c = [(x,y,x+y) | x <- [0..1023], y <- [0..1023]]
-- t' = foldl' s t c
-- t' = insertRange 1 ((43,27),(1954,2004)) t
-- v = foldl' (+) 0 [t ! (Z :. x :. y) | x <- [0..1023], y <- [0..1023]]
-- v' = (abs (1954 - 43) + 1) * (abs (2004 - 27) + 1)
v <- R.foldAllP (+) 0 t
-- print t'
print v
-- print v'
| bflyblue/quadtree | trepa.hs | unlicense | 711 | 0 | 14 | 224 | 166 | 97 | 69 | 10 | 1 |
{-# LANGUAGE OverloadedStrings #-}
--compile with `cabal exec ghc -- examples/DynamicDatabaseContextFunctions.hs -package project-m36`
--load with `loaddatabasecontextfunctions "DynamicDatabaseContextFunctions" "someDBCFunctions" "examples/DynamicDatabaseContextFunctions.o"`
module DynamicDatabaseContextFunctions where
import ProjectM36.Base
import ProjectM36.Relation
import ProjectM36.DatabaseContextFunction
import ProjectM36.DatabaseContextFunctionError
import qualified ProjectM36.Attribute as A
import qualified Data.Map as M
someDBCFunctions :: [DatabaseContextFunction]
someDBCFunctions = [Function {
funcName = "addtestrel",
funcType = [],
funcBody = externalDatabaseContextFunction addTestRel
}]
where
addTestRel _ ctx = do
let attrExprs = [NakedAttributeExpr (Attribute "word" TextAtomType)]
newRelExpr = MakeRelationFromExprs (Just attrExprs) (TupleExprs UncommittedContextMarker [TupleExpr (M.singleton "word" (NakedAtomExpr (TextAtom "nice")))])
pure $ ctx { relationVariables =
M.insert "testRel" newRelExpr (relationVariables ctx) }
| agentm/project-m36 | examples/DynamicDatabaseContextFunctions.hs | unlicense | 1,205 | 0 | 22 | 250 | 201 | 113 | 88 | 18 | 1 |
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE GADTs #-}
module Lycopene.Core.Store
( Change
, Volatile
, Stored
) where
import Lycopene.Core.Stage (Stage(..))
type Change a = a -> a
type Modification b = b -> b
-- | Operations for store.
data Store :: Stage -> * -> * where
Add :: Store 'Transient a -> Store 'Persistent a
Update :: Change a -> Store 'Persistent a -> Store 'Persistent a
Fetch :: Store 'Persistent a
Remove :: Store 'Persistent a -> Store 'Transient a
-- |
type Volatile = Store 'Transient
-- |
type Stored = Store 'Persistent
| utky/lycopene | src/Lycopene/Core/Store.hs | apache-2.0 | 629 | 0 | 9 | 140 | 187 | 106 | 81 | 18 | 0 |
{-# LANGUAGE OverloadedStrings #-}
module Topical.Text.RegexSpec where
import Data.Monoid
import Test.Hspec
import Topical.Text.Regex
spec :: Spec
spec = describe "Topical.Text.Regex" $ do
let the = "the"
them = "the(m)"
xyzzy = "xyzzy"
describe "replace" $ do
it "should replace one instance of a regular expression in a string." $
replace the "a" "the boy and the girl" `shouldBe`
"a boy and the girl"
it "should return the original string if there were no matches." $
replace xyzzy "!!!" "the boy and the girl" `shouldBe`
"the boy and the girl"
describe "replaceAll" $ do
it "should replace all instances of a regular expression in a string." $
replaceAll the "a" "the boy and the girl" `shouldBe`
"a boy and a girl"
it "should return the original string if there were no matches." $
replaceAll xyzzy "!!!" "the boy and the girl" `shouldBe`
"the boy and the girl"
describe "rgroup" $ do
it "should pull out a captured group." $
replace them (rgroup 1) "them boys and those girls" `shouldBe`
"m boys and those girls"
it "should combine with other replacements." $
replace them ("<" <> rgroup 1 <> ">") "them boys and those girls" `shouldBe`
"<m> boys and those girls"
describe "parseReplace" $ do
it "should handle raw strings." $
replace them (parseReplace "those") "them boys and those girls" `shouldBe`
"those boys and those girls"
it "should handle $N groups." $
replace them (parseReplace "$1") "them boys and those girls" `shouldBe`
"m boys and those girls"
it "should handle ${N} groups." $
replace them (parseReplace "${1}") "them boys and those girls" `shouldBe`
"m boys and those girls"
it "should handle interpolating $N groups and raw strings." $
replace them (parseReplace "l $1 n") "them boys and those girls" `shouldBe`
"l m n boys and those girls"
it "should handle interpolating ${N} groups and raw strings." $
replace them (parseReplace "0${1}2") "them boys and those girls" `shouldBe`
"0m2 boys and those girls"
it "should interpolate the whole match for $0." $
replace them (parseReplace "<$0>") "them boys and those girls" `shouldBe`
"<them> boys and those girls"
it "should interpolate an empty string for $N, where N > the number of groups." $
replace them (parseReplace "<$10>") "them boys and those girls" `shouldBe`
"<> boys and those girls"
| erochest/topical | specs/Topical/Text/RegexSpec.hs | apache-2.0 | 2,819 | 0 | 17 | 924 | 461 | 225 | 236 | 53 | 1 |
{-# LANGUAGE DeriveDataTypeable, GeneralizedNewtypeDeriving #-}
-- |
-- Module: Network.Riak.JSON
-- Copyright: (c) 2011 MailRank, Inc.
-- License: Apache
-- Maintainer: Bryan O'Sullivan <bos@mailrank.com>
-- Stability: experimental
-- Portability: portable
--
-- This module allows storage and retrieval of JSON-encoded data.
--
-- The functions in this module do not perform any conflict resolution.
module Network.Riak.JSON
(
JSON
, json
, plain
, get
, getMany
, put
, put_
, putMany
, putMany_
) where
import Control.Applicative ((<$>))
import Control.Arrow (first)
import Data.Aeson.Types (FromJSON(..), ToJSON(..))
import Data.Monoid (Monoid)
import Data.Typeable (Typeable)
import Network.Riak.Types.Internal
import qualified Network.Riak.Value as V
newtype JSON a = J {
plain :: a -- ^ Unwrap a 'JSON'-wrapped value.
} deriving (Eq, Ord, Show, Read, Bounded, Typeable, Monoid)
-- | Wrap up a value so that it will be encoded and decoded as JSON
-- when converted to/from 'Content'.
json :: (FromJSON a, ToJSON a) => a -> JSON a
json = J
{-# INLINE json #-}
instance Functor JSON where
fmap f (J a) = J (f a)
{-# INLINE fmap #-}
instance (FromJSON a, ToJSON a) => V.IsContent (JSON a) where
parseContent b c = J `fmap` (V.parseContent b c >>= parseJSON)
{-# INLINE parseContent #-}
toContent (J a) = V.toContent (toJSON a)
{-# INLINE toContent #-}
-- | Retrieve a value. This may return multiple conflicting siblings.
-- Choosing among them is your responsibility.
get :: (FromJSON c, ToJSON c) => Connection -> Bucket -> Key -> R
-> IO (Maybe ([c], VClock))
get conn bucket key r = fmap convert <$> V.get conn bucket key r
getMany :: (FromJSON c, ToJSON c) => Connection -> Bucket -> [Key] -> R
-> IO [Maybe ([c], VClock)]
getMany conn bucket ks r = map (fmap convert) <$> V.getMany conn bucket ks r
-- | Store a single value. This may return multiple conflicting
-- siblings. Choosing among them, and storing a new value, is your
-- responsibility.
--
-- You should /only/ supply 'Nothing' as a 'T.VClock' if you are sure
-- that the given bucket+key combination does not already exist. If
-- you omit a 'T.VClock' but the bucket+key /does/ exist, your value
-- will not be stored.
put :: (FromJSON c, ToJSON c) =>
Connection -> Bucket -> Key -> Maybe VClock -> c
-> W -> DW -> IO ([c], VClock)
put conn bucket key mvclock val w dw =
convert <$> V.put conn bucket key mvclock (json val) w dw
-- | Store a single value, without the possibility of conflict
-- resolution.
--
-- You should /only/ supply 'Nothing' as a 'T.VClock' if you are sure
-- that the given bucket+key combination does not already exist. If
-- you omit a 'T.VClock' but the bucket+key /does/ exist, your value
-- will not be stored, and you will not be notified.
put_ :: (FromJSON c, ToJSON c) =>
Connection -> Bucket -> Key -> Maybe VClock -> c
-> W -> DW -> IO ()
put_ conn bucket key mvclock val w dw =
V.put_ conn bucket key mvclock (json val) w dw
-- | Store many values. This may return multiple conflicting siblings
-- for each value stored. Choosing among them, and storing a new
-- value in each case, is your responsibility.
--
-- You should /only/ supply 'Nothing' as a 'T.VClock' if you are sure
-- that the given bucket+key combination does not already exist. If
-- you omit a 'T.VClock' but the bucket+key /does/ exist, your value
-- will not be stored.
putMany :: (FromJSON c, ToJSON c) =>
Connection -> Bucket -> [(Key, Maybe VClock, c)]
-> W -> DW -> IO [([c], VClock)]
putMany conn bucket puts w dw =
map convert <$> V.putMany conn bucket (map f puts) w dw
where f (k,v,c) = (k,v,json c)
-- | Store many values, without the possibility of conflict
-- resolution.
--
-- You should /only/ supply 'Nothing' as a 'T.VClock' if you are sure
-- that the given bucket+key combination does not already exist. If
-- you omit a 'T.VClock' but the bucket+key /does/ exist, your value
-- will not be stored, and you will not be notified.
putMany_ :: (FromJSON c, ToJSON c) =>
Connection -> Bucket -> [(Key, Maybe VClock, c)]
-> W -> DW -> IO ()
putMany_ conn bucket puts w dw = V.putMany_ conn bucket (map f puts) w dw
where f (k,v,c) = (k,v,json c)
convert :: ([JSON a], VClock) -> ([a], VClock)
convert = first (map plain)
| bumptech/riak-haskell-client | src/Network/Riak/JSON.hs | apache-2.0 | 4,415 | 0 | 15 | 954 | 1,070 | 600 | 470 | 62 | 1 |
--------------------------------------------------------------------
-- |
-- Copyright : (c) Edward Kmett and Dan Doel 2012-2013
-- License : BSD3
-- Maintainer: Edward Kmett <ekmett@gmail.com>
-- Stability : experimental
-- Portability: non-portable
--
--------------------------------------------------------------------
module Ermine.Console.Completion
( settings
) where
import Control.Lens
import Data.Char
import Data.List
import Data.Set as Set
import Data.Set.Lens
import Data.Monoid
import Ermine.Parser.Keywords
import Ermine.Console.Command
import Ermine.Console.State
import System.Console.Haskeline
startingKeywordSet, keywordSet :: Set String
startingKeywordSet = setOf folded startingKeywords
<> setOf (folded.cmdName.to (':':)) commands
keywordSet = setOf folded keywords
loading :: String -> Bool
loading zs = isPrefixOf ":l" xs && isPrefixOf xs ":load"
where xs = takeWhile (not . isSpace) $ dropWhile isSpace zs
completed :: (String,String) -> Console (String, [Completion])
completed (ls, rs)
| ' ' `notElem` ls = completeWith startingKeywordSet (ls, rs)
| loading rls = completeFilename (ls, rs)
| otherwise = completeWith keywordSet (ls, rs)
where rls = reverse ls
completeWith :: Set String -> CompletionFunc Console
completeWith kws = completeWord Nothing " ,()[]{}" $ \s -> do
strs <- use consoleIds
return $ (strs <> kws)^..folded.filtered (s `isPrefixOf`).to (\o -> Completion o o True)
settings :: Settings Console
settings = setComplete completed defaultSettings
{ historyFile = Just ".ermine_history"
}
| ekmett/ermine | src/Ermine/Console/Completion.hs | bsd-2-clause | 1,595 | 0 | 13 | 256 | 427 | 234 | 193 | -1 | -1 |
import System.Directory
import System.IO (hPutStrLn, stderr)
import Options.Applicative
import qualified Data.List as L
import qualified Data.Map.Strict as M
-- import qualified System.IO.Strict as St
import NLP.General
import NLP.Freq
import NLP.Tools
percent :: Parser Int
percent = option auto (long "percent"
<> short 'p'
<> metavar "PERCENT"
<> value 10
<> showDefault
<> help h)
where h = "Percentage of text to be withheld for testing, as \
\a number in the range [0,100]"
dbname :: Parser String
dbname = strOption (long "output"
<> short 'o'
<> metavar "FILENAME"
<> value "nlp.db"
<> showDefault
<> help h)
where h = "Filename of the database to be built"
rootdir :: Parser String
rootdir = strOption (long "source"
<> short 's'
<> metavar "DIRECTORY"
<> value "/data/crubadan"
<> showDefault
<> help h)
where h = "Root directory of source language data"
desc = fullDesc
<> progDesc "Build database for NLP Tools from \
\language data on the filesystem"
<> header "builddb - build nlp database"
data Opts = Opts String String Int
parser = Opts <$> dbname <*> rootdir <*> percent
execOpts = execParser (info (helper <*> parser) desc)
main = execOpts >>= mkdatabase
mkdatabase (Opts dbname dataroot prc) =
do dirs <- datadirs dataroot
let files = datafilenames dataroot dirs
createDB dbname
db <- connectDB dbname
sequence_ (fmap (processFile db prc) files)
disconnectDB db
processFile :: Database -> Int -> (String, String) -> IO ()
processFile db p (l,fn) =
do sents <- datafile (l,fn)
let (aSents, bSents) = splitLang p sents
f = smap ftrig
allData = f sents
aData = f aSents
bData = f bSents
store db "dataAll" allData
store db "dataA" aData
store db "dataB" bData
hPutStrLn stderr $ "Inserted lang <" ++ l ++ "> ..."
store :: Database -> String -> (String, FreqList TriGram) -> IO ()
store db set d = do insertTriGrams db (mkTGRows set d)
insertLengths db [(mkLRow set (smap len d))]
mkTGRows :: String
-> (String, FreqList TriGram)
-> [(String,String,TriGram,Int,Int)]
mkTGRows set (lang,fl) = let toRow ((tg,fr),n) = (set,lang,tg,fr,n)
in fmap toRow (zip ((tSort . M.toList)
(freqMap fl)) [0,1..])
tSort = L.sortBy (\(l1,f1) (l2,f2) -> compare f2 f1)
mkLRow :: String -> (String, Double) -> (String, String, Double)
mkLRow set (lang,len) = (set,lang,len)
datafilenames :: String -> [String] -> [(String,String)]
datafilenames root dirs =
fmap (\n -> (n, root ++ "/" ++ n ++ "/SAMPSENTS")) dirs
datadirs = fmap (L.delete ".")
. fmap (L.delete "..")
. getDirectoryContents
datafile :: (String, String) -> IO (String, String)
datafile (lang,fn) = (,) lang <$> readFile fn
ftrig :: String -> FreqList TriGram
ftrig = features
splitLang :: Int
-> (String, String)
-> ((String, String), (String, String))
splitLang p (l,d) = let dls = lines d
i = ((length dls) * p) `div` 100
(d1,d2) = splitAt i dls
in ( (l, concat d1)
, (l, concat d2))
| RoboNickBot/nlp-tools | src/BuildDB.hs | bsd-2-clause | 3,599 | 0 | 14 | 1,224 | 1,186 | 622 | 564 | 88 | 1 |
{-# LANGUAGE TemplateHaskell #-}
--------------------------------------------------------------------
-- |
-- Copyright : (c) Edward Kmett and Dan Doel 2012-2013
-- License : BSD3
-- Maintainer: Edward Kmett <ekmett@gmail.com>
-- Stability : experimental
-- Portability: non-portable
--
--------------------------------------------------------------------
module Ermine.Console.State
( Console
, ConsoleState(..)
, HasConsoleState(..)
) where
import Control.Lens
import Control.Monad.State.Strict
import Data.Default
import Data.Set as Set
import Data.Monoid
-- | The monad in which we perform our console interactions
type Console = StateT ConsoleState IO
-- | The extra state we need in order to perform auto-completion in the console
newtype ConsoleState = ConsoleState
{ _consoleIds :: Set String -- ^ The set of extra names that are in scope for auto-completion.
}
instance Default ConsoleState where
def = ConsoleState mempty
makeClassy ''ConsoleState
| ekmett/ermine | src/Ermine/Console/State.hs | bsd-2-clause | 983 | 0 | 7 | 147 | 119 | 77 | 42 | 16 | 0 |
-- 51161058134250
nn = 100
-- increasing/decreasing after digit x is related to (n-1) figurate numbers
-- where n is the number of digits in the number
-- sum over every digit, minus 9 which are both
-- sum over all numbers of digits
countNonBouncy n = sum [n1 + n2 | x <- [1..9],
let n1 = product [10-x..8-x+n],
let n2 = product [x+1..x+n-1]] `div` f - 9
where f = product [1..n-1]
totalNonBouncy n = sum $ map countNonBouncy [1..n]
main = putStrLn $ show $ totalNonBouncy nn
| higgsd/euler | hs/113.hs | bsd-2-clause | 532 | 0 | 16 | 145 | 169 | 89 | 80 | 7 | 1 |
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE TypeFamilies #-}
-- necessary for using the Vector instances,
-- which we want to get specialized mapM_ etc
{-# LANGUAGE OverlappingInstances #-}
module Data.Foldable.Mono (
MFoldable (..)
) where
import Prelude hiding (foldl, foldl1, foldr, foldr1)
import Data.Maybe
import Data.Monoid
-- for instances
import qualified Data.Foldable as Fold
import qualified Data.ByteString as B
import qualified Data.ByteString.Internal as B
import qualified Data.ByteString.Unsafe as B
import Data.Word (Word8)
import Foreign.Storable (Storable (..))
import System.IO.Unsafe (unsafePerformIO)
import Control.Monad.ST (ST)
import qualified Data.Text as T
import qualified Data.Vector as V
import qualified Data.Vector.Storable as VS
import qualified Data.Vector.Unboxed as VU
{- | Monomorphic data structures that can be folded
Minimal complete definition: 'foldMap' or 'foldr'
-}
class MFoldable t where
type (Elem t) :: *
{- | Map each element to a monoid and combine the results -}
foldMap :: Monoid m => (Elem t -> m) -> t -> m
foldMap f = foldr (mappend . f) mempty
{- | Left-associative fold -}
foldl :: (a -> Elem t -> a) -> a -> t -> a
foldl f z t = appEndo (getDual (foldMap (Dual . Endo . flip f) t)) z
{- | Strict version of 'foldl'. -}
foldl' :: (a -> Elem t -> a) -> a -> t -> a
-- This implementation from Data.Foldable
foldl' f a xs = foldr f' id xs a
where f' x k z = k $! f z x
-- | A variant of 'foldl' with no base case. Requires at least 1
-- list element.
foldl1 :: (Elem t -> Elem t -> Elem t) -> t -> Elem t
-- This implementation from Data.Foldable
foldl1 f xs = fromMaybe (error "fold1: empty structure")
(foldl mf Nothing xs)
where mf Nothing y = Just y
mf (Just x) y = Just (f x y)
{- | Right-associative fold -}
foldr :: (Elem t -> b -> b) -> b -> t -> b
foldr f z t = appEndo (foldMap (Endo . f) t) z
-- | Strict version of 'foldr'
foldr' :: (Elem t -> b -> b) -> b -> t -> b
-- This implementation from Data.Foldable
foldr' f a xs = foldl f' id xs a
where f' k x z = k $! f x z
-- | Like 'foldr', but with no starting value
foldr1 :: (Elem t -> Elem t -> Elem t) -> t -> Elem t
-- This implementation from Data.Foldable
foldr1 f xs = fromMaybe (error "foldr1: empty structure")
(foldr mf Nothing xs)
where mf x Nothing = Just x
mf x (Just y) = Just (f x y)
-- | Monadic left fold
foldM :: (Monad m, MFoldable t) => (a -> Elem t -> m a) -> a -> t -> m a
foldM f z xs = foldr (\x rest a -> f a x >>= rest) return xs z
-- | Monadic map, discarding results
mapM_ :: (MFoldable t, Monad m) => (Elem t -> m b) -> t -> m ()
mapM_ f = foldr ((>>) . f) (return ())
instance (Fold.Foldable t) => MFoldable (t a) where
type Elem (t a) = a
foldMap = Fold.foldMap
foldr = Fold.foldr
foldr' = Fold.foldr'
foldr1 = Fold.foldr1
foldl = Fold.foldl
foldl' = Fold.foldl'
foldl1 = Fold.foldl1
instance MFoldable B.ByteString where
type Elem B.ByteString = Word8
foldr = B.foldr
foldr' = B.foldr'
foldr1 = B.foldr1
foldl = B.foldl
foldl' = B.foldl'
foldl1 = B.foldl1
mapM_ = bsMapM_gen
{-# SPECIALISE bsMapM_gen :: (Word8 -> IO a) -> B.ByteString -> IO () #-}
{-# SPECIALISE bsMapM_gen :: (Word8 -> ST s a) -> B.ByteString -> ST s () #-}
bsMapM_gen :: Monad m => (Word8 -> m a) -> B.ByteString -> m ()
bsMapM_gen f s = unsafePerformIO $ B.unsafeUseAsCStringLen s mapp
where
mapp (ptr, len) = return $ go 0
where
go i | i == len = return ()
| otherwise = let !b = B.inlinePerformIO $
peekByteOff ptr i
in f b >> go (i+1)
instance MFoldable T.Text where
type Elem T.Text = Char
foldr = T.foldr
foldr1 = T.foldr1
foldl = T.foldl
foldl' = T.foldl'
foldl1 = T.foldl1
instance MFoldable (V.Vector a) where
type Elem (V.Vector a) = a
foldr = V.foldr
foldr' = V.foldr'
foldr1 = V.foldr1
foldl = V.foldl
foldl' = V.foldl'
foldl1 = V.foldl1
foldM = V.foldM
mapM_ = V.mapM_
instance (Storable a) => MFoldable (VS.Vector a) where
type Elem (VS.Vector a) = a
foldr = VS.foldr
foldr' = VS.foldr'
foldr1 = VS.foldr1
foldl = VS.foldl
foldl' = VS.foldl'
foldl1 = VS.foldl1
foldM = VS.foldM
mapM_ = VS.mapM_
instance (VU.Unbox a) => MFoldable (VU.Vector a) where
type Elem (VU.Vector a) = a
foldr = VU.foldr
foldr' = VU.foldr'
foldr1 = VU.foldr1
foldl = VU.foldl
foldl' = VU.foldl'
foldl1 = VU.foldl1
foldM = VU.foldM
mapM_ = VU.mapM_
| JohnLato/mono-foldable | src/Data/Foldable/Mono.hs | bsd-3-clause | 4,975 | 0 | 15 | 1,468 | 1,584 | 854 | 730 | 114 | 1 |
{-# LANGUAGE ScopedTypeVariables #-}
module Tests.P1Tests
( p1Tests
) where
import Test.Hspec
import Test.QuickCheck
import Haskell99Pointfree.P1
import Data.Maybe
import Control.Exception
import Data.Either
p1Tests :: SpecWith ()
p1Tests = describe "Problem 1: return the last element of a list" $ do
describe "testing version p1" $ do
it "p1 with [1,2,3,4]" $
p1 [1,2,3,4] `shouldBe` Just 4
it "p1 with []" $
p1 ([] :: [Int]) `shouldBe` Nothing
describe "testing porperties of p1" $
it "p1 with nonempty list" $ property
(\(x::[Int]) -> not (null x) ==> (p1 x == (Just $ last x)))
describe "testing version p1\'" $ do
it "p1\' with [1,2,3,4]" $
p1' [1,2,3,4] `shouldBe` 4
it "p1\' with []" $
evaluate (p1' []) `shouldThrow` anyException
describe "testing properties of p1\'" $
it "p1\' with nonempty list" $ property
(\(x::[Bool]) -> not (null x) ==> (p1' x == last x))
describe "testing version p1\'\'" $ do
it "p1\'\' with [1,2,3,4]" $
p1'' [1,2,3,4] `shouldBe` 4
it "p1\'\' with []" $
evaluate (p1'' []) `shouldThrow` anyException
describe "testing properties of p1\'\'" $
it "p1\'' with nonempty list" $ property
(\(x::String) -> not (null x) ==> (p1'' x == last x))
describe "testing version p1'''" $ do
it "p1''' with [1,2,3,4]" $
p1''' [1,2,3,4] `shouldBe` Right 4
it "p1''' with []" $
p1''' ([] :: [Int]) `shouldBe` Left "no last element for empty lists"
describe "testing properties of p1'''" $
it "p1''' with arbitrary lists" $ property
(\(x::String) -> (null x && isLeft (p1''' []) ) || (p1''' x == Right (last x)))
describe "testing version p1''''" $ do
it "p1'''' with [1,2,3,4] 5" $
p1'''' [1,2,3,4] 5 `shouldBe` 4
it "p1'''' with [] 5" $
p1'''' [] 5 `shouldBe` 5
describe "testing properties of p1''''" $
it "p1'''' with arbitrary lists" $ property
(\(x::[Int]) (y::Int) -> if null x then p1'''' x y == y else p1'''' x y == last x )
describe "testing version p1_5" $ do
it "p1_5 with [1,2,3,4]" $
p1_5 [1,2,3,4] `shouldBe` Just 4
it "p1_5 with []" $
p1_5 ([] :: [Int]) `shouldBe` Nothing
describe "testing porperties of p1_5" $
it "p1_5 with nonempty list" $ property
(\(x::[Int]) -> (null x && isNothing (p1_5 x) ) || (p1_5 x == Just (last x)))
| SvenWille/Haskell99Pointfree | test/Tests/P1Tests.hs | bsd-3-clause | 2,389 | 0 | 18 | 596 | 925 | 464 | 461 | 59 | 2 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
module Common where
import Control.Monad
import Control.Monad.Free
import Control.Monad.Trans.Demarcate
import Control.Monad.Trans.Class
-- | Instructions for a toy language.
data ProgramF next
= Output String next -- output something
| Input (String -> next) -- get something
instance Functor ProgramF where
fmap f (Output s x) = Output s (f x)
fmap f (Input g) = Input (f . g)
-- | Low-level program is just a Free monad over ProgramF.
type Program = Free ProgramF
-- | Output command.
output :: (MonadFree ProgramF m, MonadTrans t) => String -> t m ()
output s = lift . liftF $ Output s ()
-- | Input command.
input :: (MonadFree ProgramF m, MonadTrans t) => t m String
input = lift . liftF $ Input id
type P t = Demarcate t Program
-- | Interpreter for a low-level program.
runProgram :: Program a -> IO a
runProgram = iterM runProgramF
where
runProgramF (Output s next) = putStrLn s >> next
runProgramF (Input next) = getLine >>= next
runP :: (Monad (t Program), MonadTrans t) => (t Program a -> Program b) -> P t a -> IO b
runP runT = runProgram . runT . execDemarcate
-- | A hacking transformation.
-- It prepend to each low-level output some extra code.
-- Other commands are left untouched.
hack :: P t ()-> P t a -> P t a
hack h = transformDemarcateFree hackF
where
hackF cmd@(Output s next) = do
h
output $ "[hacked] " ++ s
next
hackF cmd =
wrapT cmd
| fizruk/demarcate | examples/Common.hs | bsd-3-clause | 1,511 | 0 | 10 | 342 | 477 | 249 | 228 | 33 | 2 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TupleSections #-}
-- |
-- Copyright : (c) 2016 Harendra Kumar
--
-- License : BSD-3-Clause
-- Maintainer : harendra.kumar@gmail.com
-- Stability : experimental
-- Portability : GHC
--
import Control.Monad (when)
import Data.Char (chr, isSpace, ord, toUpper)
#if MIN_VERSION_base(4,8,0)
import Data.Function ((&))
#endif
import Data.List (intercalate, isPrefixOf)
import Data.List.Split (splitOn)
import Data.Text (Text)
import qualified Data.Text as T
import Data.Text.Normalize (NormalizationMode(NFD, NFKD, NFC, NFKC), normalize)
import Text.Printf (printf)
#if !MIN_VERSION_base(4,8,0)
(&) :: a -> (a -> b) -> b
x & f = f x
#endif
chrToHex :: Char -> [Char]
chrToHex = (map toUpper) . (printf "%.4x") . ord
strToHex :: [Char] -> String
strToHex = unwords . (map chrToHex)
checkEqual :: String -> (Text -> Text) -> (Text, Text) -> IO Bool
checkEqual opName op (c1, c2) =
if c1 /= op c2 then do
putStrLn $ opName ++ " " ++ txtToHex c2
++ " = " ++ txtToHex (op c2)
++ "; Expected: " ++ txtToHex c1
return False
else return True
where
txtToHex = strToHex . T.unpack
checkOp :: String -> NormalizationMode -> [(Text, Text)] -> IO Bool
checkOp name op pairs = do
res <- mapM (checkEqual name ((normalize op))) pairs
return $ all (== True) res
checkNFC :: (Text, Text, Text, Text, Text) -> IO Bool
checkNFC (c1, c2, c3, c4, c5) =
checkOp "toNFC" NFC $
concat [ map (c2,) [c1, c2, c3]
, map (c4,) [c4, c5]
]
checkNFD :: (Text, Text, Text, Text, Text) -> IO Bool
checkNFD (c1, c2, c3, c4, c5) =
checkOp "toNFD" NFD $
concat [ map (c3,) [c1, c2, c3]
, map (c5,) [c4, c5]
]
checkNFKC :: (Text, Text, Text, Text, Text) -> IO Bool
checkNFKC (c1, c2, c3, c4, c5) =
checkOp "toNFKC" NFKC $ map (c4,) [c1, c2, c3, c4, c5]
checkNFKD :: (Text, Text, Text, Text, Text) -> IO Bool
checkNFKD (c1, c2, c3, c4, c5) =
checkOp "toNFKD" NFKD $ map (c5,) [c1, c2, c3, c4, c5]
checkAllTestCases :: Int -> String -> IO ()
checkAllTestCases lineno line = do
case splitOn ";" line of
c1 : c2 : c3 : c4 : c5 : _ -> do
let cps = map cpToText [c1, c2, c3, c4, c5]
mapM_ (checkOneTestCase cps)
[checkNFD, checkNFKD, checkNFC, checkNFKC]
_ -> error $ "Unrecognized line: " ++ line
where
cpToText xs = T.pack $ map (chr . read . ("0x" ++)) (words xs)
checkOneTestCase cps f = do
res <- f (tuplify cps)
when (not res) $ do
putStrLn ("Failed at line: " ++ show lineno)
putStrLn line
putStrLn $ codes ++ "; # (" ++ txt
error "Bailing out"
where
strs = map T.unpack cps
codes = intercalate ";" $ map strToHex strs
txt = intercalate "; " (map T.unpack cps)
tuplify [c1, c2, c3, c4, c5] = (c1, c2, c3, c4, c5)
tuplify _ = error "tuplify bad arguments"
checkLine :: (Int, String) -> IO ()
checkLine (lineno, line) = do
-- marker lines indicating a test block start with @
if "@" `isPrefixOf` line
then
putStrLn line
else
checkAllTestCases lineno line
testNormalize :: FilePath -> IO ()
testNormalize file = do
contents <- readFile file
let ls = lines contents -- split into lines
& map (dropWhile isSpace) -- trim leading spaces
& zip [1..] -- add numbering
& filter ((/= []) . snd) -- remove blank lines
& filter (not . ("#" `isPrefixOf`) . snd) -- remove comments
checkAll ls
where
checkAll (x:xs) = checkLine x >> checkAll xs
checkAll [] = return ()
main :: IO ()
main = do
testNormalize "unicode-data/ucd/NormalizationTest.txt"
-- Additional test cases not in the unicode standard suite
testNormalize "unicode-data/extra/NormalizationTest.txt"
| harendra-kumar/unicode-transforms | test/NormalizationTest.hs | bsd-3-clause | 4,219 | 0 | 16 | 1,359 | 1,435 | 784 | 651 | 87 | 3 |
{-# language ForeignFunctionInterface, CPP #-}
-- | This module can be used to get and set
-- STICKYKEYS.SKF_HOTKEYACTIVE on windows.
-- If set to True, pressing shift five
-- times will result in a window popping up
-- asking whether to activate the sticky keys
-- feature.
--
-- On other platforms, all functions have no
-- effect but can be used riskless.
module System.Win32.StickyKeysHotKey where
import Control.Applicative
import Control.Exception
#ifdef mingw32_HOST_OS
-- | Returns the current state of STICKYKEYS.SKF_HOTKEYACTIVE.
foreign import ccall unsafe "c_getHotKeyActive" getHotKeyActive
:: IO Bool
-- | Sets the current state of STICKYKEYS.SKF_HOTKEYACTIVE.
foreign import ccall unsafe "c_setHotKeyActive" setHotKeyActive
:: Bool -> IO ()
-- | Sets STICKYKEYS.SKF_HOTKEYACTIVE to False during the
-- execution of the given command. Resets the original state
-- afterwards
withHotKeyDeactivated :: IO a -> IO a
withHotKeyDeactivated cmd =
bracket
(getHotKeyActive <* setHotKeyActive False)
setHotKeyActive
(const cmd)
#else
-- | Returns the current state of STICKYKEYS.SKF_HOTKEYACTIVE.
getHotKeyActive :: IO Bool
getHotKeyActive = return False
-- | Sets the current state of STICKYKEYS.SKF_HOTKEYACTIVE.
setHotKeyActive :: Bool -> IO ()
setHotKeyActive = const $ return ()
-- | Sets STICKYKEYS.SKF_HOTKEYACTIVE to False during the
-- execution of the given command. Resets the original state
-- afterwards
withHotKeyDeactivated :: IO a -> IO a
withHotKeyDeactivated = id
#endif
| nikki-and-the-robots/stickyKeysHotKey | src/System/Win32/StickyKeysHotKey.hs | bsd-3-clause | 1,601 | 0 | 8 | 320 | 121 | 72 | 49 | 10 | 1 |
module Main (
main
) where
import System.Environment (getArgs)
import Parser
import Render
import Controller
import Geometry
import Environment
import SampleInput
run s = case parseInput s of
Left err -> show err
Right parsedInput -> render . runOverallInput $ parsedInput
printHelpMessage = putStrLn . unlines $ [
"rover --interactive",
" accept rover input on standard input and print output to standard out",
"rover --run",
" run the sample rover input and print the results to standard out",
"rover --parse",
" accept rover input on standard input and show parsed version on standard out"
]
main = do args <- getArgs
case args of
["--interactive"] -> interact run
["--run"] -> putStr $ run overallInput
["--parse"] -> print $ parseInput overallInput
_ -> printHelpMessage
| garethrowlands/marsrover | console/Main.hs | bsd-3-clause | 964 | 0 | 11 | 311 | 181 | 97 | 84 | 25 | 4 |
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-|
Module : Numeric.AERN.IVP.Solver.Events.Bisection
Description : hybrid system simulation
Copyright : (c) Michal Konecny
License : BSD3
Maintainer : mikkonecny@gmail.com
Stability : experimental
Portability : portable
Hybrid system simulation with time bisection.
-}
module Numeric.AERN.IVP.Solver.Events.Bisection
(
solveHybridIVP_UsingPicardAndEventTree_Bisect
)
where
import Numeric.AERN.IVP.Solver.Events.EventTree
import Numeric.AERN.IVP.Specification.Hybrid
import Numeric.AERN.IVP.Solver.Bisection
import Numeric.AERN.RmToRn.Domain
import Numeric.AERN.RmToRn.New
import Numeric.AERN.RmToRn.Evaluation
import Numeric.AERN.RmToRn.Integration
import qualified Numeric.AERN.RealArithmetic.RefinementOrderRounding as ArithInOut
--import Numeric.AERN.RealArithmetic.RefinementOrderRounding.OpsImplicitEffort
import Numeric.AERN.RealArithmetic.Measures
--import Numeric.AERN.RealArithmetic.ExactOps
import qualified Numeric.AERN.NumericOrder as NumOrd
--import Numeric.AERN.NumericOrder.OpsDefaultEffort
import qualified Numeric.AERN.RefinementOrder as RefOrd
--import Numeric.AERN.RefinementOrder.OpsImplicitEffort
import Numeric.AERN.Basics.SizeLimits
import Numeric.AERN.Basics.Consistency
import qualified Data.Map as Map
--import qualified Data.Set as Set
--import qualified Data.List as List
import Numeric.AERN.Misc.Debug
_ = unsafePrint
solveHybridIVP_UsingPicardAndEventTree_Bisect ::
(CanAddVariables f,
CanRenameVariables f,
CanEvaluate f,
CanCompose f,
CanPartiallyEvaluate f,
HasProjections f,
HasConstFns f,
RefOrd.IntervalLike f,
HasAntiConsistency f,
NumOrd.RefinementRoundedLattice f,
RefOrd.PartialComparison f,
RoundedIntegration f,
ArithInOut.RoundedAdd f,
ArithInOut.RoundedSubtr f,
ArithInOut.RoundedMultiply f,
ArithInOut.RoundedMixedAdd f (Domain f),
ArithInOut.RoundedReal (Domain f),
RefOrd.IntervalLike (Domain f),
HasAntiConsistency (Domain f),
Domain f ~ Imprecision (Domain f),
solvingInfo ~ (Domain f, Maybe (HybridSystemUncertainState (Domain f)), [(HybSysMode, EventInfo f)]),
Show f, Show (Domain f), Show (Var f), Eq (Var f))
=>
SizeLimits f {-^ size limits for all function -} ->
PartialEvaluationEffortIndicator f ->
CompositionEffortIndicator f ->
EvaluationEffortIndicator f ->
IntegrationEffortIndicator f ->
RefOrd.PartialCompareEffortIndicator f ->
ArithInOut.AddEffortIndicator f ->
ArithInOut.MultEffortIndicator f ->
ArithInOut.MixedAddEffortIndicator f (Domain f) ->
ArithInOut.RoundedRealEffortIndicator (Domain f) ->
Domain f {-^ initial widening @delta@ -} ->
Int {-^ @m@ -} ->
Var f {-^ @t0@ - the initial time variable -} ->
Domain f {-^ min step size @s@ -} ->
Domain f {-^ max step size @s@ -} ->
Imprecision (Domain f) {-^ split improvement threshold @eps@ -} ->
HybridIVP f
->
(
Maybe (HybridSystemUncertainState (Domain f))
,
(
BisectionInfo solvingInfo (solvingInfo, Maybe (Imprecision (Domain f)))
)
)
solveHybridIVP_UsingPicardAndEventTree_Bisect
sizeLimits effPEval effCompose effEval effInteg effInclFn effAddFn effMultFn effAddFnDom effDom
delta m t0Var minStepSize maxStepSize splitImprovementThreshold
hybivpG
= solve hybivpG
where
solve hybivp =
solveHybridIVPByBisectingT
directSolver
effDom splitImprovementThreshold minStepSize maxStepSize
hybivp
directSolver depth hybivp =
(maybeFinalStateWithInvariants, (tEnd, maybeFinalStateWithInvariants, modeEventInfoList))
where
tEnd = hybivp_tEnd hybivp
maybeFinalStateWithInvariants
= fmap filterInvariants maybeFinalState
where
filterInvariants st =
Map.mapWithKey filterInvariantsVec st
where
filterInvariantsVec mode vec =
case invariant vec of
Just res -> res
_ -> error $
"mode invariant failed on a value passed between two segments:"
++ "\n mode = " ++ show mode
++ "\n vec = " ++ show vec
where
Just invariant =
Map.lookup mode modeInvariants
modeInvariants = hybsys_modeInvariants $ hybivp_system hybivp
(maybeFinalState, modeEventInfoList) =
solveHybridIVP_UsingPicardAndEventTree
sizeLimits effPEval effCompose effEval effInteg effInclFn effAddFn effMultFn effAddFnDom effDom
(10 + 2 * depth)
delta m
t0Var
hybivp
| michalkonecny/aern | aern-ivp/src/Numeric/AERN/IVP/Solver/Events/Bisection.hs | bsd-3-clause | 5,098 | 0 | 31 | 1,357 | 899 | 484 | 415 | 101 | 2 |
{-# OPTIONS_GHC -Wall #-}
{-# Language DoAndIfThenElse #-}
module Classy.State ( System(..)
, StateT
, State
, Identity
-- * scalars
, addParam, addAction, addCoord, addSpeed
, derivIsSpeed
, setDeriv
-- * bases
, newtonianBases
, rotXYZ, rotX, rotY, rotZ
, basesWithAngVel
-- * bodies
, addParticle
, addRigidBody
, addForce
, addForceAtCm
, addMoment
, kanes
-- * monad transformer utils
, getSystem
, getSystemT
, liftIO
, run
) where
import Control.Monad.Identity ( Identity, runIdentity )
import Control.Monad.State.Lazy ( StateT, State, get, put, execStateT )
import Control.Monad.Trans ( liftIO )
import Data.HashMap.Lazy ( HashMap )
import qualified Data.HashMap.Lazy as HM
import Dvda ( symDependent, sym )
import qualified Classy.Convenience as CC
import Classy.Differentiation ( ddt )
import Classy.System
import Classy.Types
import Classy.VectorMath ( scaleBasis )
import Classy.OrderedHashSet ( OrderedHashSet )
import qualified Classy.OrderedHashSet as HS
data DCM = SimpleRot XYZ Sca deriving Show
data System = System { csCoords :: OrderedHashSet Sca
, csSpeeds :: OrderedHashSet Sca
, csParams :: OrderedHashSet Sca
, csActions :: OrderedHashSet Sca
, csCoordDerivs :: HashMap Sca Sca
, csBases :: OrderedHashSet Bases
, csDots :: HashMap (Basis, Basis) Sca
, csBodies :: HashMap Body (Forces,Moments)
, csNewtonianBases :: Maybe Bases
} deriving Show
getSystem :: StateT System Identity a -> System
getSystem = runIdentity . getSystemT
getSystemT :: Monad a => StateT System a b -> a System
getSystemT = flip execStateT emptySystem
where
emptySystem = System { csCoords = HS.empty
, csSpeeds = HS.empty
, csParams = HS.empty
, csActions = HS.empty
, csCoordDerivs = HM.empty
, csBases = HS.empty
, csDots = HM.empty
, csBodies = HM.empty
, csNewtonianBases = Nothing
}
-- | the one unique newtonian frame which all other frame are defined relative to
newtonianBases :: Monad a => StateT System a Bases
newtonianBases = do
cs <- get
case csNewtonianBases cs of
Just _ -> error "newtonianBases: already set!"
Nothing -> do
let nb = NewtonianBases
put $ cs{ csNewtonianBases = Just nb }
addBases nb
return nb
addParam :: Monad a => String -> StateT System a Sca
addParam name = do
cs <- get
let p = SExpr (sym name) Nothing
put $ cs{ csParams = HS.insert p (csParams cs) }
return p
addAction :: Monad a => String -> StateT System a Sca
addAction name = do
cs <- get
let u = SExpr (sym name) Nothing
put $ cs{ csActions = HS.insert u (csActions cs) }
return u
addCoord :: Monad a => String -> StateT System a Sca
addCoord name = do
cs <- get
let c = SExpr (symDependent name time) (Just 0)
put $ cs{ csCoords = HS.insert c (csCoords cs) }
return c
addSpeed :: Monad a => String -> StateT System a Sca
addSpeed name = do
cs <- get
let s = SExpr (symDependent name time) (Just 1)
put $ cs{ csSpeeds = HS.insert s (csSpeeds cs) }
return s
setDeriv :: Monad a => Sca -> Sca -> StateT System a ()
setDeriv c c' =
if not (isCoord c)
then error "you can only set the derivative of a coordinate with setDeriv"
else do
cs <- get
let err = error $ "setDeriv: the derivative of " ++ show c ++ "is already set"
newCoordDerivs = HM.insertWith err c c' (csCoordDerivs cs)
put $ cs{ csCoordDerivs = newCoordDerivs }
derivIsSpeed :: Monad a => Sca -> StateT System a Sca
derivIsSpeed c = do
let s = ddt c
if not (isCoord c)
then error $ "derivIsSpeed given \"" ++ show s ++ "\" which is not a generalized coordinate"
else do
cs <- get
let newSpeeds = if HS.member s (csSpeeds cs)
then error $ "derivIsSpeed: " ++ show s ++ "is already a generalized speed"
else HS.insert s (csSpeeds cs)
put $ cs{ csSpeeds = newSpeeds }
setDeriv c s
return s
addParticle :: Monad a => Sca -> Point -> StateT System a Body
addParticle mass position = do
cs <- get
let err = error $ "error: you tried to add an existing particle with \"addParticle\"\n"++show p
p = Particle mass position
put $ cs{ csBodies = HM.insertWith err p (Forces [], Moments []) (csBodies cs) }
return p
addRigidBody :: Monad a => Sca -> Dyadic -> Point -> Bases -> StateT System a Body
addRigidBody mass dyadic position bases = do
cs <- get
let err = error $ "error: you tried to add an existing rigid body with \"addRigidBody\"\n"++show rb
rb = RigidBody mass dyadic position bases
put $ cs{ csBodies = HM.insertWith err rb (Forces [], Moments []) (csBodies cs) }
return rb
-- | add a force to a rigidy body to be applied at a given point
addForce :: Monad a => Body -> Point -> Vec -> StateT System a ()
addForce p@(Particle _ _) pos force = do
cs <- get
let newForcesMoments = case HM.lookup p (csBodies cs) of
Nothing -> error $ "addForce: called on unknown particle: " ++ show p
Just (Forces fs,ts) -> (Forces ((pos,force):fs), ts)
put $ cs{ csBodies = HM.insert p newForcesMoments (csBodies cs) }
addForce rb@(RigidBody{}) pos force = do
cs <- get
let newForcesMoments = case HM.lookup rb (csBodies cs) of
Nothing -> error $ "addForce: called on unknown rigid body: " ++ show rb
Just (Forces fs, ts) -> (Forces ((pos,force):fs), ts)
put $ cs{ csBodies = HM.insert rb newForcesMoments (csBodies cs) }
-- | add a force to a rigidy body to be applied at the body's center of mass
addForceAtCm :: Monad a => Body -> Vec -> StateT System a ()
addForceAtCm b = addForce b (getCMPos b)
addMoment :: Monad a => Body -> Vec -> StateT System a ()
addMoment p@(Particle _ _) _ =
error $ "addMoment: called on particle: " ++ show p ++ ", can only call addMoment on a rigid body"
addMoment rb@(RigidBody{}) moment = do
cs <- get
let newForcesMoments = case HM.lookup rb (csBodies cs) of
Nothing -> error $ "addMoment: called on unknown rigid body: " ++ show rb
Just (fs, Moments ts) -> (fs, Moments (moment:ts))
put $ cs{ csBodies = HM.insert rb newForcesMoments (csBodies cs) }
addBases :: Monad a => Bases -> StateT System a ()
addBases b = do
cs <- get
let newBases =
if HS.member b (csBases cs)
then error $ "error: you've defined a bases that already exists: " ++ show b
else HS.insert b (csBases cs)
put $ cs{ csBases = newBases }
-- should check for generalized speeds/coords
-- | define a new frame as x, y or z rotation about given frame, providing the name of the new frame
rotXYZ :: Monad a => XYZ -> Bases -> Sca -> String -> StateT System a Bases
rotXYZ xyz b0 q name = do
cs <- get
let newBases' =
if name `elem` (["N","n"] ++ map show (HS.toList (csBases cs)))
then error $ "rot" ++ show xyz ++ " error: basis name \""++name++"\" has already been used"
else RotatedBases b0 (RotCoord (scaleBasis q rotationBasis)) name
where
rotationBasis = Basis b0 xyz
newBases =
if HS.member newBases' (csBases cs)
then error $ "rot" ++ show xyz ++ " error: you've defined a bases that already exists"
else newBases'
a = Basis b0
b = Basis newBases
(se,ce) = case q of (SExpr expr _) -> (SExpr (sin expr) Nothing, SExpr (cos expr) Nothing)
s -> error $ "rotXYZ got non SExpr value: " ++ show s
newDots = HM.fromList $ case xyz of
X -> [ ((a X, b X), 1), ((a Y, b X), 0), ((a Z, b X), 0)
, ((a X, b Y), 0), ((a Y, b Y), ce), ((a Z, b Y), se)
, ((a X, b Z), 0), ((a Y, b Z), -se), ((a Z, b Z), ce)
]
Y -> [ ((a X, b X), ce), ((a Y, b X), 0), ((a Z, b X), -se)
, ((a X, b Y), 0), ((a Y, b Y), 1), ((a Z, b Y), 0)
, ((a X, b Z), se), ((a Y, b Z), 0), ((a Z, b Z), ce)
]
Z -> [ ((a X, b X), ce), ((a Y, b X), se), ((a Z, b X), 0)
, ((a X, b Y), -se), ((a Y, b Y), ce), ((a Z, b Y), 0)
, ((a X, b Z), 0), ((a Y, b Z), 0), ((a Z, b Z), 1)
]
err = error "rotXyz adding dcm compenents that already exist: "
put $ cs { csDots = HM.unionWith err newDots (csDots cs) }
addBases newBases
return newBases
-- | convenience functions for calling rotXYZ
rotX,rotY,rotZ :: Monad a => Bases -> Sca -> String -> StateT System a Bases
rotX = rotXYZ X
rotY = rotXYZ Y
rotZ = rotXYZ Z
-- | @c = frameWithAngVel n (wx,wy,wz) name@ defines a new frame @c@ named @name@
-- which is defined as having angular velocity @wx*cx>+ wy*cy> + wz*cz>@ with respect to frame @n@
basesWithAngVel :: Monad a => Bases -> (Sca,Sca,Sca) -> String -> StateT System a Bases
basesWithAngVel f0 (wx,wy,wz) name
| any isCoord [wx,wy,wz] =
error $ "frameWithAngVel can't be given generalized coordinates " ++
show (filter isCoord [wx,wy,wz]) ++ " as speeds"
| otherwise = do
let rb = RotatedBases f0 (RotSpeed (wx,wy,wz)) name
addBases rb
return rb
simplifyDcms :: HashMap (Basis,Basis) Sca -> Sca -> Sca
simplifyDcms hm s@(SDot (b0,b1) x) =
case (HM.lookup (b0,b1) hm, HM.lookup (b1,b0) hm) of
(Nothing,Nothing) -> s
(Just dotted,_) -> x*dotted
(_,Just dotted) -> x*dotted
simplifyDcms hm (SNeg x) = negate (simplifyDcms hm x)
simplifyDcms hm (SAdd x y) = simplifyDcms hm x + simplifyDcms hm y
simplifyDcms hm (SSub x y) = simplifyDcms hm x - simplifyDcms hm y
simplifyDcms hm (SMul x y) = simplifyDcms hm x * simplifyDcms hm y
simplifyDcms hm (SDiv x y) = simplifyDcms hm x / simplifyDcms hm y
simplifyDcms _ s@(SExpr _ _) = s
kanes :: System -> [Equation Sca]
kanes cs = mapEqs (simplifyDcms (csDots cs)) unsimplifiedEqs
where
mapEqs :: (a -> b) -> [Equation a] -> [Equation b]
mapEqs f' = map (fmapEq f')
where
fmapEq f (Equation x c y) = Equation (f x) c (f y)
unsimplifiedEqs = kaneEqs bodiesForcesMoments speeds
bodiesForcesMoments = map (\(body, (fs,ts)) -> (body,fs,ts)) (HM.toList $ csBodies cs)
speeds = HS.toOrderedList (csSpeeds cs)
run :: [Equation Sca]
run = kanes $ getSystem $ do
n <- newtonianBases
q <- addCoord "q"
r <- addCoord "r"
_ <- derivIsSpeed q
_ <- derivIsSpeed r
mass <- addParam "m"
g <- addParam "g"
tension <- addParam "T"
b <- rotY n q "B"
let r_b_n0 = CC.relativePoint N0 (CC.zVec r b)
basket <- addParticle mass r_b_n0
addForceAtCm basket (CC.zVec (mass*g) n)
addForceAtCm basket (CC.zVec (-tension) b)
| ghorn/classy-dvda | src/Classy/State.hs | bsd-3-clause | 11,307 | 0 | 18 | 3,461 | 4,134 | 2,136 | 1,998 | 239 | 6 |
{-# LANGUAGE OverloadedStrings #-}
module Mapnik.Util (
bracketed
, sepByCommas
, commaWs
, stripWs
, cppDouble
) where
import Data.Attoparsec.Text
import Data.Char (isDigit)
import Data.Monoid
bracketed :: Parser a -> Parser a
bracketed p = skipSpace *> char '(' *> skipSpace *> p <* skipSpace <* char ')'
sepByCommas :: Parser a -> Parser [a]
sepByCommas = (`sepBy'` commaWs)
commaWs :: Parser Char
commaWs = stripWs (char ',')
stripWs :: Parser a -> Parser a
stripWs p = skipSpace *> p <* skipSpace
cppDouble :: Parser Double
cppDouble = do
c <- peekChar'
if c /= '.' then double else do
s <- takeWhile1 (\x -> isDigit x || x=='.')
reparseWith double ("0"<>s)
where reparseWith p = either (fail "invalid double") return . parseOnly p
| albertov/hs-mapnik | pure/src/Mapnik/Util.hs | bsd-3-clause | 759 | 0 | 16 | 147 | 283 | 147 | 136 | 25 | 2 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE MagicHash #-}
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE RankNTypes #-}
-- |
-- Module : Data.Binary.Serialise.CBOR.Decoding
-- Copyright : (c) Duncan Coutts 2015
-- License : BSD3-style (see LICENSE.txt)
--
-- Maintainer : duncan@community.haskell.org
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Lorem ipsum...
--
module Data.Binary.Serialise.CBOR.Decoding
( -- * Decode primitive operations
Decoder
, DecodeAction(..)
, getDecodeAction
-- ** Read input tokens
, decodeWord
, decodeWord8
, decodeWord16
, decodeWord32
, decodeWord64
, decodeNegWord
, decodeNegWord64
, decodeInt
, decodeInt8
, decodeInt16
, decodeInt32
, decodeInt64
, decodeInteger
, decodeFloat
, decodeDouble
, decodeBytes
, decodeBytesIndef
, decodeString
, decodeStringIndef
, decodeListLen
, decodeListLenIndef
, decodeMapLen
, decodeMapLenIndef
, decodeTag
, decodeTag64
, decodeBool
, decodeNull
, decodeSimple
-- ** Specialised Read input token operations
, decodeWordOf
, decodeListLenOf
-- ** Branching operations
--, decodeBytesOrIndef
--, decodeStringOrIndef
, decodeListLenOrIndef
, decodeMapLenOrIndef
, decodeBreakOr
-- ** Inspecting the token type
, peekTokenType
, TokenType(..)
-- ** Special operations
--, ignoreTerms
--, decodeTrace
-- * Sequence operations
, decodeSequenceLenIndef
, decodeSequenceLenN
) where
#include "cbor.h"
import GHC.Exts
import GHC.Word
import GHC.Int
import Data.Text (Text)
import Data.ByteString (ByteString)
import Control.Applicative
import Prelude hiding (decodeFloat)
data Decoder a = Decoder {
runDecoder :: forall r. (a -> DecodeAction r) -> DecodeAction r
}
data DecodeAction a
= ConsumeWord (Word# -> DecodeAction a)
| ConsumeWord8 (Word# -> DecodeAction a)
| ConsumeWord16 (Word# -> DecodeAction a)
| ConsumeWord32 (Word# -> DecodeAction a)
| ConsumeNegWord (Word# -> DecodeAction a)
| ConsumeInt (Int# -> DecodeAction a)
| ConsumeInt8 (Int# -> DecodeAction a)
| ConsumeInt16 (Int# -> DecodeAction a)
| ConsumeInt32 (Int# -> DecodeAction a)
| ConsumeListLen (Int# -> DecodeAction a)
| ConsumeMapLen (Int# -> DecodeAction a)
| ConsumeTag (Word# -> DecodeAction a)
-- 64bit variants for 32bit machines
#if defined(ARCH_32bit)
| ConsumeWord64 (Word64# -> DecodeAction a)
| ConsumeNegWord64 (Word64# -> DecodeAction a)
| ConsumeInt64 (Int64# -> DecodeAction a)
| ConsumeListLen64 (Int64# -> DecodeAction a)
| ConsumeMapLen64 (Int64# -> DecodeAction a)
| ConsumeTag64 (Word64# -> DecodeAction a)
#endif
| ConsumeInteger (Integer -> DecodeAction a)
| ConsumeFloat (Float# -> DecodeAction a)
| ConsumeDouble (Double# -> DecodeAction a)
| ConsumeBytes (ByteString -> DecodeAction a)
| ConsumeString (Text -> DecodeAction a)
| ConsumeBool (Bool -> DecodeAction a)
| ConsumeSimple (Word# -> DecodeAction a)
| ConsumeBytesIndef (DecodeAction a)
| ConsumeStringIndef (DecodeAction a)
| ConsumeListLenIndef (DecodeAction a)
| ConsumeMapLenIndef (DecodeAction a)
| ConsumeNull (DecodeAction a)
| ConsumeListLenOrIndef (Int# -> DecodeAction a)
| ConsumeMapLenOrIndef (Int# -> DecodeAction a)
| ConsumeBreakOr (Bool -> DecodeAction a)
| PeekTokenType (TokenType -> DecodeAction a)
| Fail String
| Done a
data TokenType = TypeUInt
| TypeUInt64
| TypeNInt
| TypeNInt64
| TypeInteger
| TypeFloat16
| TypeFloat32
| TypeFloat64
| TypeBytes
| TypeBytesIndef
| TypeString
| TypeStringIndef
| TypeListLen
| TypeListLen64
| TypeListLenIndef
| TypeMapLen
| TypeMapLen64
| TypeMapLenIndef
| TypeTag
| TypeTag64
| TypeBool
| TypeNull
| TypeUndef
| TypeSimple
| TypeBreak
| TypeInvalid
deriving (Eq, Ord, Enum, Bounded, Show)
instance Functor Decoder where
{-# INLINE fmap #-}
fmap f = \d -> Decoder $ \k -> runDecoder d (k . f)
instance Applicative Decoder where
{-# INLINE pure #-}
pure = \x -> Decoder $ \k -> k x
{-# INLINE (<*>) #-}
(<*>) = \df dx -> Decoder $ \k ->
runDecoder df (\f -> runDecoder dx (\x -> k (f x)))
instance Monad Decoder where
return = pure
{-# INLINE (>>=) #-}
(>>=) = \dm f -> Decoder $ \k -> runDecoder dm (\m -> runDecoder (f m) k)
{-# INLINE (>>) #-}
(>>) = \dm dn -> Decoder $ \k -> runDecoder dm (\_ -> runDecoder dn k)
fail msg = Decoder $ \_ -> Fail msg
getDecodeAction :: Decoder a -> DecodeAction a
getDecodeAction (Decoder k) = k (\x -> Done x)
---------------------------------------
-- Read input tokens of various types
--
{-# INLINE decodeWord #-}
decodeWord :: Decoder Word
decodeWord = Decoder (\k -> ConsumeWord (\w# -> k (W# w#)))
{-# INLINE decodeWord8 #-}
decodeWord8 :: Decoder Word8
decodeWord8 = Decoder (\k -> ConsumeWord8 (\w# -> k (W8# w#)))
{-# INLINE decodeWord16 #-}
decodeWord16 :: Decoder Word16
decodeWord16 = Decoder (\k -> ConsumeWord16 (\w# -> k (W16# w#)))
{-# INLINE decodeWord32 #-}
decodeWord32 :: Decoder Word32
decodeWord32 = Decoder (\k -> ConsumeWord32 (\w# -> k (W32# w#)))
{-# INLINE decodeWord64 #-}
decodeWord64 :: Decoder Word64
decodeWord64 =
#if defined(ARCH_64bit)
Decoder (\k -> ConsumeWord (\w# -> k (W64# w#)))
#else
Decoder (\k -> ConsumeWord64 (\w64# -> k (W64# w64#)))
#endif
{-# INLINE decodeNegWord #-}
decodeNegWord :: Decoder Word
decodeNegWord = Decoder (\k -> ConsumeNegWord (\w# -> k (W# w#)))
{-# INLINE decodeNegWord64 #-}
decodeNegWord64 :: Decoder Word64
decodeNegWord64 =
#if defined(ARCH_64bit)
Decoder (\k -> ConsumeNegWord (\w# -> k (W64# w#)))
#else
Decoder (\k -> ConsumeNegWord64 (\w64# -> k (W64# w64#)))
#endif
{-# INLINE decodeInt #-}
decodeInt :: Decoder Int
decodeInt = Decoder (\k -> ConsumeInt (\n# -> k (I# n#)))
{-# INLINE decodeInt8 #-}
decodeInt8 :: Decoder Int8
decodeInt8 = Decoder (\k -> ConsumeInt8 (\w# -> k (I8# w#)))
{-# INLINE decodeInt16 #-}
decodeInt16 :: Decoder Int16
decodeInt16 = Decoder (\k -> ConsumeInt16 (\w# -> k (I16# w#)))
{-# INLINE decodeInt32 #-}
decodeInt32 :: Decoder Int32
decodeInt32 = Decoder (\k -> ConsumeInt32 (\w# -> k (I32# w#)))
{-# INLINE decodeInt64 #-}
decodeInt64 :: Decoder Int64
decodeInt64 =
#if defined(ARCH_64bit)
Decoder (\k -> ConsumeInt (\n# -> k (I64# n#)))
#else
Decoder (\k -> ConsumeInt64 (\n64# -> k (I64# n64#)))
#endif
{-# INLINE decodeInteger #-}
decodeInteger :: Decoder Integer
decodeInteger = Decoder (\k -> ConsumeInteger (\n -> k n))
{-# INLINE decodeFloat #-}
decodeFloat :: Decoder Float
decodeFloat = Decoder (\k -> ConsumeFloat (\f# -> k (F# f#)))
{-# INLINE decodeDouble #-}
decodeDouble :: Decoder Double
decodeDouble = Decoder (\k -> ConsumeDouble (\f# -> k (D# f#)))
{-# INLINE decodeBytes #-}
decodeBytes :: Decoder ByteString
decodeBytes = Decoder (\k -> ConsumeBytes (\bs -> k bs))
{-# INLINE decodeBytesIndef #-}
decodeBytesIndef :: Decoder ()
decodeBytesIndef = Decoder (\k -> ConsumeBytesIndef (k ()))
{-# INLINE decodeString #-}
decodeString :: Decoder Text
decodeString = Decoder (\k -> ConsumeString (\str -> k str))
{-# INLINE decodeStringIndef #-}
decodeStringIndef :: Decoder ()
decodeStringIndef = Decoder (\k -> ConsumeStringIndef (k ()))
{-# INLINE decodeListLen #-}
decodeListLen :: Decoder Int
decodeListLen = Decoder (\k -> ConsumeListLen (\n# -> k (I# n#)))
{-# INLINE decodeListLenIndef #-}
decodeListLenIndef :: Decoder ()
decodeListLenIndef = Decoder (\k -> ConsumeListLenIndef (k ()))
{-# INLINE decodeMapLen #-}
decodeMapLen :: Decoder Int
decodeMapLen = Decoder (\k -> ConsumeMapLen (\n# -> k (I# n#)))
{-# INLINE decodeMapLenIndef #-}
decodeMapLenIndef :: Decoder ()
decodeMapLenIndef = Decoder (\k -> ConsumeMapLenIndef (k ()))
{-# INLINE decodeTag #-}
decodeTag :: Decoder Word
decodeTag = Decoder (\k -> ConsumeTag (\w# -> k (W# w#)))
{-# INLINE decodeTag64 #-}
decodeTag64 :: Decoder Word64
decodeTag64 =
#if defined(ARCH_64bit)
Decoder (\k -> ConsumeTag (\w# -> k (W64# w#)))
#else
Decoder (\k -> ConsumeTag64 (\w64# -> k (W64# w64#)))
#endif
{-# INLINE decodeBool #-}
decodeBool :: Decoder Bool
decodeBool = Decoder (\k -> ConsumeBool (\b -> k b))
{-# INLINE decodeNull #-}
decodeNull :: Decoder ()
decodeNull = Decoder (\k -> ConsumeNull (k ()))
{-# INLINE decodeSimple #-}
decodeSimple :: Decoder Word8
decodeSimple = Decoder (\k -> ConsumeSimple (\w# -> k (W8# w#)))
--------------------------------------------------------------
-- Specialised read operations: expect a token with a specific value
--
{-# INLINE decodeWordOf #-}
decodeWordOf :: Word -> Decoder ()
decodeWordOf n = do
n' <- decodeWord
if n == n' then return ()
else fail $ "expected word " ++ show n
{-# INLINE decodeListLenOf #-}
decodeListLenOf :: Int -> Decoder ()
decodeListLenOf len = do
len' <- decodeListLen
if len == len' then return ()
else fail $ "expected list of length " ++ show len
--------------------------------------------------------------
-- Branching operations
{-# INLINE decodeListLenOrIndef #-}
decodeListLenOrIndef :: Decoder (Maybe Int)
decodeListLenOrIndef =
Decoder (\k -> ConsumeListLenOrIndef (\n# ->
if I# n# >= 0
then k (Just (I# n#))
else k Nothing))
{-# INLINE decodeMapLenOrIndef #-}
decodeMapLenOrIndef :: Decoder (Maybe Int)
decodeMapLenOrIndef =
Decoder (\k -> ConsumeMapLenOrIndef (\n# ->
if I# n# >= 0
then k (Just (I# n#))
else k Nothing))
{-# INLINE decodeBreakOr #-}
decodeBreakOr :: Decoder Bool
decodeBreakOr = Decoder (\k -> ConsumeBreakOr (\b -> k b))
--------------------------------------------------------------
-- Special operations
{-# INLINE peekTokenType #-}
peekTokenType :: Decoder TokenType
peekTokenType = Decoder (\k -> PeekTokenType (\tk -> k tk))
{-
expectExactly :: Word -> Decoder (Word :#: s) s
expectExactly n = expectExactly_ n done
expectAtLeast :: Word -> Decoder (Word :#: s) (Word :#: s)
expectAtLeast n = expectAtLeast_ n done
ignoreTrailingTerms :: Decoder (a :*: Word :#: s) (a :*: s)
ignoreTrailingTerms = IgnoreTerms done
-}
------------------------------------------------------------------------------
-- Special combinations for sequences
--
{-# INLINE decodeSequenceLenIndef #-}
decodeSequenceLenIndef :: (r -> a -> r)
-> r
-> (r -> r')
-> Decoder a
-> Decoder r'
decodeSequenceLenIndef f z g get =
go z
where
go !acc = do
stop <- decodeBreakOr
if stop then return $! g acc
else do !x <- get; go (f acc x)
{-# INLINE decodeSequenceLenN #-}
decodeSequenceLenN :: (r -> a -> r)
-> r
-> (r -> r')
-> Int
-> Decoder a
-> Decoder r'
decodeSequenceLenN f z g c get =
go z c
where
go !acc 0 = return $! g acc
go !acc n = do !x <- get; go (f acc x) (n-1)
| arianvp/binary-serialise-cbor | Data/Binary/Serialise/CBOR/Decoding.hs | bsd-3-clause | 11,695 | 0 | 17 | 2,973 | 2,998 | 1,644 | 1,354 | 274 | 2 |
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE DefaultSignatures #-}
{-# LANGUAGE MagicHash #-}
{-# LANGUAGE MultiParamTypeClasses #-}
-- |
-- Module : Basement.Cast
-- License : BSD-style
-- Maintainer : Haskell Foundation
--
module Basement.Cast
( Cast(..)
) where
#include "MachDeps.h"
import qualified Basement.Block.Base as Block
import Basement.Compat.Base
import Basement.Compat.Natural
import Basement.Numerical.Number
import Basement.Numerical.Conversion
import Basement.PrimType
import Data.Proxy (Proxy(..))
import GHC.Int
import GHC.Prim
import GHC.Types
import GHC.ST
import GHC.Word
-- | `Cast` an object of type a to b.
--
-- Do not add instance of this class if the source type is not of the same
-- size of the destination type. Also keep in mind this is casting a value
-- of a given type into a destination type. The value won't be changed to
-- fit the destination represention.
--
-- If you wish to convert a value of a given type into another type, look at
-- `From` and `TryFrom`.
--
-- @
-- cast (-10 :: Int) :: Word === 18446744073709551606
-- @
--
class Cast source destination where
cast :: source -> destination
default cast :: ( PrimType source
, PrimType destination
, PrimSize source ~ PrimSize destination
)
=> source -> destination
cast a = runST $ do
mba <- Block.new 1
Block.unsafeWrite mba 0 a
Block.unsafeRead (Block.unsafeRecast mba) 0
instance Cast Int8 Word8 where
cast (I8# i) = W8# (narrow8Word# (int2Word# i))
instance Cast Int16 Word16 where
cast (I16# i) = W16# (narrow16Word# (int2Word# i))
instance Cast Int32 Word32 where
cast (I32# i) = W32# (narrow32Word# (int2Word# i))
instance Cast Int64 Word64 where
cast = int64ToWord64
instance Cast Int Word where
cast (I# i) = W# (int2Word# i)
instance Cast Word8 Int8 where
cast (W8# i) = I8# (narrow8Int# (word2Int# i))
instance Cast Word16 Int16 where
cast (W16# i) = I16# (narrow16Int# (word2Int# i))
instance Cast Word32 Int32 where
cast (W32# i) = I32# (narrow32Int# (word2Int# i))
instance Cast Word64 Int64 where
cast = word64ToInt64
instance Cast Word Int where
cast (W# w) = I# (word2Int# w)
#if WORD_SIZE_IN_BITS == 64
instance Cast Word Word64 where
cast (W# w) = W64# w
instance Cast Word64 Word where
cast (W64# w) = W# w
instance Cast Word Int64 where
cast (W# w) = I64# (word2Int# w)
instance Cast Int64 Word where
cast (I64# i) = W# (int2Word# i)
instance Cast Int Int64 where
cast (I# i) = I64# i
instance Cast Int64 Int where
cast (I64# i) = I# i
instance Cast Int Word64 where
cast (I# i) = W64# (int2Word# i)
instance Cast Word64 Int where
cast (W64# w) = I# (word2Int# w)
#else
instance Cast Word Word32 where
cast (W# w) = W32# w
instance Cast Word32 Word where
cast (W32# w) = W# w
instance Cast Word Int32 where
cast (W# w) = I32# (word2Int# w)
instance Cast Int32 Word where
cast (I32# i) = W# (int2Word# i)
instance Cast Int Int32 where
cast (I# i) = I32# i
instance Cast Int32 Int where
cast (I32# i) = I# i
instance Cast Int Word32 where
cast (I# i) = W32# (int2Word# i)
instance Cast Word32 Int where
cast (W32# w) = I# (word2Int# w)
#endif
instance Cast (Block.Block a) (Block.Block Word8) where
cast (Block.Block ba) = Block.Block ba
| vincenthz/hs-foundation | basement/Basement/Cast.hs | bsd-3-clause | 3,651 | 0 | 13 | 984 | 852 | 444 | 408 | 68 | 0 |
{-# language PackageImports #-}
-- | This module re-exports <https://hackage.haskell.org/package/indentation-trifecta/docs/Text-Trifecta-Indentation.html Text.Trifecta.Indentation> from <https://hackage.haskell.org/package/indentation-trifecta indentation-trifecta>.
module Text.Trifecta.Indentation (module Impl) where
import "indentation-trifecta" Text.Trifecta.Indentation as Impl hiding (Token)
| lambdageek/indentation | indentation/src/Text/Trifecta/Indentation.hs | bsd-3-clause | 400 | 0 | 5 | 27 | 30 | 22 | 8 | 3 | 0 |
module VariableFunctions where
import Tokenizer
import Evaluator
functions = [("var_dump", phpVarDump)]
phpVarDump :: PHPFunctionType
phpVarDump args = (output $ unlines $ map dump args) >> return PHPNull
where dump (PHPInt i) = "int(" ++ (show i) ++ ")"
dump (PHPFloat f) = "float(" ++ (show f) ++ ")"
dump (PHPString s) = "string(" ++ (show $ length s) ++ ") \"" ++ s ++ "\""
dump (PHPBool b) = "bool(" ++ (show b) ++ ")"
dump PHPNull = "NULL"
| jhartikainen/hs-language-php | VariableFunctions.hs | bsd-3-clause | 506 | 0 | 13 | 139 | 194 | 101 | 93 | 11 | 5 |
{-# OPTIONS_GHC -fno-warn-type-defaults #-}
{-# OPTIONS_GHC -fno-warn-missing-signatures #-}
module Handler.Issue
( getCurrentScheduleR
, getScheduleR
, getTaskR
, postExportCsvR
, getProjectListR
, getAssignListR
, getStatusListR
, getCrossSearchR
, postCrossSearchR
, getIssueListR
, getNewIssueR
, postNewIssueR
, getIssueR
, postCommentR
, getAttachedFileR
, postReadCommentR
, deleteReadCommentR
, getCommentReadersR
) where
import Import hiding (intercalate, intersperse, groupBy)
import Database.Persist.Sql
import BISocie.Helpers.Util
import Data.List (intercalate, intersperse, nub, groupBy)
import Data.Maybe (fromJust)
import Data.Time
import Data.Time.Calendar.WeekDate
import qualified Data.Text.Lazy as L
import qualified Data.Text.Lazy.Encoding as LE
import qualified Data.Text as T
import Handler.S3
import Network.Mail.Mime
import Text.Blaze.Internal (preEscapedText)
import Text.Blaze.Html.Renderer.Utf8 (renderHtml)
import Text.Shakespeare.Text (stext)
import Yesod.Goodies.PNotify
getCurrentScheduleR :: Handler Html
getCurrentScheduleR = do
today <- liftIO $ fmap utctDay getCurrentTime
let (y, m, _) = toGregorian today
redirect $ ScheduleR y m
data WeekDay = Monday | Tuesday | Wednesday | Thursday | Friday | Saturday | Sunday
deriving (Show, Eq, Ord, Enum)
getScheduleR :: Year -> Month -> Handler Html
getScheduleR year month = do
u <- requireAuth
today <- liftIO $ fmap utctDay getCurrentTime
let days = map (map (ywd2cell today))
$ groupBy ((==) `on` snd3)
$ [toWeekDate d | d <- [fromWeekDate fy fm 1 .. fromWeekDate ly lm 7]]
defaultLayout $ do
setTitle $ preEscapedText $ showText year +++ "年" +++ showText month +++ "月のスケジュール"
$(widgetFile "schedule")
where
ywd2cell c (y,w,d) = let d' = fromWeekDate y w d in (d', classOf d' d c)
fday = fromGregorian year month 1
lday = fromGregorian year month $ gregorianMonthLength year month
(fy, fm, _) = toWeekDate fday
(ly, lm, _) = toWeekDate lday
classOf :: Day -> Int -> Day -> String
classOf day d today = intercalate " "
$ ["schedule-day-cell", toWeekName d]
++ (if today == day then ["today"] else [])
++ (if currentMonth day then ["currentMonth"] else ["otherMonth"])
taskUri :: Day -> Route App
taskUri d = let (y', m', d') = toGregorian d in TaskR y' m' d'
showDay :: Day -> String
showDay = show . thd3 . toGregorian
currentMonth :: Day -> Bool
currentMonth d = let (y', m', _) = toGregorian d in year == y' && month == m'
monthmove n cm = let (y', m', _) = toGregorian $ addGregorianMonthsClip n cm
in ScheduleR y' m'
prevMonth = monthmove (-1)
nextMonth = monthmove 1
prevYear = monthmove (-12)
nextYear = monthmove 12
toWeekName :: Int -> String
toWeekName = show . toWeekDay
toWeekDay :: Int -> WeekDay
toWeekDay n = toEnum (n-1)
getTaskR :: Year -> Month -> Date -> Handler Value
getTaskR y m d = do
(uid, r) <- (,) <$> requireAuthId <*> getUrlRender
issues <- runDB $ do
ptcpts <- selectList [ParticipantsUser ==. uid] []
selectList
[ IssueLimitdate ==. Just day
, IssueProject <-. map (participantsProject.entityVal) ptcpts
]
[Asc IssueLimittime]
returnJson $ object ["tasks" .= array (map (go r) issues)]
where
day = fromGregorian y m d
go r (Entity iid issue) =
object [ "id" .= show iid
, "subject" .= issueSubject issue
, "uri" .= r (IssueR (issueProject issue) (issueNumber issue))
, "limittime" .= showLimittime issue
]
postExportCsvR :: UserId -> Handler (RepCsv Text)
postExportCsvR uid = do
today <- liftIO $ fmap utctDay getCurrentTime
issues <- runDB $ do
ptcpts <- selectList [ParticipantsUser ==. uid] []
selectList
[ IssueLimitdate >=. Just today
, IssueProject <-. map (participantsProject.entityVal) ptcpts
]
[Asc IssueLimitdate, Asc IssueLimittime]
download "task-schedule.csv" (CSV $ generateCsv issues)
where
generateCsv :: [Entity Issue] -> ([Text], [[Text]])
generateCsv iss = (header, map (mkRecord.entityVal) iss)
header :: [Text]
header = ["Subject","Start Date","Start Time","End Date","End Time"]
mkRecord :: Issue -> [Text]
mkRecord is = sbj:sd:st:ed:et:[]
where
sbj = escape $ issueSubject is
sd = toText $ fromJust $ issueLimitdate is
st = toText $ fromJust $ issueLimittime is
(ed, et) = (sd, st)
escape t = "\"" <> T.foldr ((<>).rep) T.empty t <> "\""
rep :: Char -> Text
rep '\\' = "\\\\"
rep ',' = "\\,"
rep '"' = "\\\""
rep x = T.singleton x
getProjectListR :: Handler Value
getProjectListR = do
(u, r) <- (,) <$> requireAuth <*> getUrlRender
(includeTerminated, project_name, user_ident_or_name, mpage, ordName) <-
(,,,,) <$> fmap isJust (lookupGetParam "includeterminated")
<*> lookupGetParam "project_name"
<*> lookupGetParam "user_ident_or_name"
<*> fmap (fmap readText) (lookupGetParam "page")
<*> fmap (fromMaybe "DescProjectUdate") (lookupGetParam "order")
let tf = if includeTerminated then [] else [ProjectTerminated ==. False]
let order = [textToOrder ordName]
prjs' <- runDB $ do
pats <- case user_ident_or_name of
Nothing -> selectList [] []
Just q -> do
users <- selectList [] []
let uids = map entityKey $ filter (userIdentOrName q.entityVal) users
selectList [ParticipantsUser <-. uids] []
let pf = [ProjectId <-. map (participantsProject.entityVal) pats]
if isAdmin (entityVal u)
then selectList (tf++pf) order
else do
ps <- selectList [ParticipantsUser ==. entityKey u] []
selectList (tf ++ pf ++ [ProjectId <-. (map (participantsProject.entityVal) ps)]) order
let allprjs = case project_name of
Just pn -> filter (T.isInfixOf pn . projectName . entityVal) prjs'
Nothing -> prjs'
pageLength = ceiling (fromIntegral (length allprjs) / fromIntegral projectListLimit)
prjs = case mpage of
Nothing -> allprjs
Just n -> drop (n*projectListLimit) $ take ((n+1)*projectListLimit) allprjs
returnJson $ object [ "projects" .= array (map (go r) prjs)
, "page" .= maybe 0 id mpage
, "order" .= ordName
, "pageLength" .= (pageLength :: Int)
]
where
go r (Entity pid p) = object [ "pid" .= show pid
, "name" .= projectName p
, "description" .= unTextarea (projectDescription p)
, "cdate" .= showDate (projectCdate p)
, "udate" .= showDate (projectUdate p)
, "issuelistUri" .= r (IssueListR pid)
, "projectUri" .= r (ProjectR pid)
]
getAssignListR :: Handler Value
getAssignListR = do
u <- requireAuth
pids <- fmap (fmap readText) $ lookupGetParams "projectid"
users <- runDB $ do
ps <- if isAdmin (entityVal u)
then selectList [ParticipantsProject <-. pids] []
else do
ps' <- selectList [ParticipantsUser ==. entityKey u, ParticipantsProject <-. pids] []
selectList [ParticipantsProject <-. (map (participantsProject.entityVal) ps')] []
selectList [UserId <-. (map (participantsUser.entityVal) ps)] []
cacheSeconds 10 -- FIXME
returnJson $ object ["assigns" .= array (map go users)]
where
go (Entity uid u) = object [ "uid" .= show uid
, "name" .= userFullName u
]
getStatusListR :: Handler Value
getStatusListR = do
u <- requireAuth
pids <- fmap (fmap readText) $ lookupGetParams "projectid"
stss <- runDB $ do
prjs <- if isAdmin (entityVal u)
then selectList [ProjectId <-. pids] []
else do
ps <- selectList [ParticipantsUser ==. entityKey u, ParticipantsProject <-. pids] []
selectList [ProjectId <-. (map (participantsProject.entityVal) ps)] []
return $ nub $ concatMap (\(Entity _ prj) ->
let (Right es) = parseStatuses $ projectStatuses prj
in map fst3 es) prjs
cacheSeconds 10 -- FIXME
returnJson $ object ["statuses" .= array stss]
getCrossSearchR :: Handler Html
getCrossSearchR = do
u <- requireAuth
prjs <- runDB $ do
-- 初回GETなので終了プロジェクトは除外.
prjs' <- if isAdmin (entityVal u)
then selectList [ProjectTerminated ==. False] []
else do
ps <- selectList [ParticipantsUser ==. entityKey u] []
selectList [ ProjectTerminated ==. False
, ProjectId <-. (map (participantsProject.entityVal) ps)] []
return $ map toProjectBis prjs'
defaultLayout $ do
setTitle "クロスサーチ"
$(widgetFile "crosssearch")
postCrossSearchR :: Handler Value
postCrossSearchR = do
(u, r) <- (,) <$> requireAuth <*> getUrlRender
(ps, ss, as) <-
(,,) <$> fmap (fmap readText) (lookupPostParams "projectid")
<*> lookupPostParams "status"
<*> fmap (fmap (Just . readText)) (lookupPostParams "assign")
(lf, lt) <- uncurry (liftM2 (,))
(fmap (fmap (Just . readText)) $ lookupPostParam "limitdatefrom",
fmap (fmap (Just . addDays 1 . readText)) $ lookupPostParam "limitdateto")
(uf, ut) <- uncurry (liftM2 (,))
(fmap (fmap (localDayToUTC . readText)) $ lookupPostParam "updatedfrom",
fmap (fmap (localDayToUTC . addDays 1 . readText)) $ lookupPostParam "updatedto")
page <- fmap (max 0 . fromMaybe 0 . fmap readText) $ lookupPostParam "page"
issues <- runDB $ do
prjs <- if isAdmin (entityVal u)
then selectList [ProjectId <-. ps] []
else do
ps' <- selectList [ParticipantsUser ==. entityKey u, ParticipantsProject <-. ps] []
selectList [ProjectId <-. (map (participantsProject.entityVal) ps')] []
let (pS, sS, aS) = (toInFilter (IssueProject <-.) $ map entityKey prjs,
toInFilter (IssueStatus <-.) ss,
toInFilter (IssueAssign <-.) as)
(lF, lT, uF, uT) = (maybeToFilter (IssueLimitdate >=.) lf,
maybeToFilter (IssueLimitdate <.) lt,
maybeToFilter (IssueUdate >=.) uf,
maybeToFilter (IssueUdate <.) ut)
issues' <- selectList (pS ++ sS ++ aS ++ lF ++ lT ++ uF ++ uT) [Desc IssueUdate, LimitTo issueListLimit, OffsetBy (page*issueListLimit)]
forM issues' $ \(Entity id' i) -> do
cu <- get404 $ issueCuser i
uu <- get404 $ issueUuser i
mau <- case issueAssign i of
Nothing -> return Nothing
Just auid -> get auid
let (Just prj) = lookupProjectBis (issueProject i) $ map toProjectBis prjs
return $ (prj, IssueBis id' i cu uu mau)
cacheSeconds 10 -- FIXME
returnJson $ object ["issues" .= array (map (go r) issues)]
where
colorAndEffect s es = case lookupStatus s es of
Nothing -> ("", "")
Just (_, c, e) -> (fromMaybe "" c, fromMaybe "" (fmap show e))
go r (p, i) =
let (c, e) = colorAndEffect (issueStatus $ issueBisIssue i) (projectBisStatuses p)
projectRoute = IssueListR $ projectBisId p
issueRoute = IssueR (projectBisId p) (issueNumber $ issueBisIssue i)
in
object [ "id" .= show (issueBisId i)
, "effect" .= e
, "color" .= c
, "project" .= projectBisName p
, "projecturi" .= r (projectRoute)
, "no" .= issueNumber (issueBisIssue i)
, "subject" .= issueSubject (issueBisIssue i)
, "issueuri" .= r issueRoute
, "status" .= issueStatus (issueBisIssue i)
, "assign" .= showmaybe (fmap userFullName (issueBisAssign i))
, "limitdate" .= showLimitdate (issueBisIssue i)
, "limittime" .= showLimittime (issueBisIssue i)
, "creator" .= userFullName (issueBisCreator i)
, "updator" .= userFullName (issueBisUpdator i)
, "updated" .= showDate (issueUdate (issueBisIssue i))
]
getIssueListR :: ProjectId -> Handler Html
getIssueListR pid = do
page' <- lookupGetParam "page"
let page = max 0 $ fromMaybe 0 $ fmap readText $ page'
(alliis, issues'', prj, es) <- runDB $ do
prj' <- get404 pid
let (Right es) = parseStatuses $ projectStatuses prj'
prj = ProjectBis { projectBisId=pid
, projectBisName=projectName prj'
, projectBisDescription=projectDescription prj'
, projectBisStatuses=es
}
issues <- selectList [IssueProject ==. pid] [Desc IssueUdate]
let issues' = take issueListLimit $ drop (page*issueListLimit) issues
issues'' <- forM issues' $ \(Entity id' i) -> do
cu <- get404 $ issueCuser i
uu <- get404 $ issueUuser i
mau <- case issueAssign i of
Nothing -> return Nothing
Just auid -> get auid
return $ IssueBis id' i cu uu mau
return (issues, issues'', prj, es)
let issues = zip (concat $ repeat ["odd","even"]::[String]) issues''
colorOf = \s ->
case lookupStatus s es of
Nothing -> ""
Just (_, c, _) -> fromMaybe "" c
effectOf = \s ->
case lookupStatus s es of
Nothing -> ""
Just (_, _, e) -> fromMaybe "" (fmap show e)
-- pagenate
maxpage = ceiling (fromIntegral (length alliis) / fromIntegral issueListLimit) - 1
prevExist = page > 0
nextExist = page < maxpage
prevPage = (IssueListR pid, [("page", showText $ max 0 (page-1))])
nextPage = (IssueListR pid, [("page", showText $ max 0 (page+1))])
pagenate = intersperse [] $ map (map pageN) $ mkPagenate fillGapWidth pagenateWidth page maxpage
pageN = \n -> (n, (IssueListR pid, [("page", showText n)]))
isCurrent = (==page)
needPaging = maxpage > 0
inc = (+1)
colspan = 8
paging = $(widgetFile "paging")
defaultLayout $ do
setTitle $ preEscapedText $ projectBisName prj +++ "タスク一覧"
$(widgetFile "issuelist")
getNewIssueR :: ProjectId -> Handler Html
getNewIssueR pid = do
mparent <- lookupGetParam "parent"
(ptcpts, stss, prj) <- runDB $ do
prj <- get404 pid
ptcpts <- selectParticipants pid
let (Right stss) = parseStatuses $ projectStatuses prj
return (ptcpts, stss, prj)
defaultLayout $ do
setTitle "新規タスク作成"
$(widgetFile "newissue")
postNewIssueR :: ProjectId -> Handler Html
postNewIssueR pid = do
(uid, r, now) <-
(,,) <$> requireAuthId <*> getUrlRender <*> liftIO getCurrentTime
issue <- runInputPost $ Issue pid (-1) uid now uid now
<$> ireq textField "subject"
<*> fmap (fmap readText) (iopt textField "assign")
<*> ireq textField "status"
<*> iopt dayField "limitdate"
<*> iopt timeFieldTypeText "limittime"
<*> iopt dayField "reminderdate"
<*> fmap (fmap readText) (iopt hiddenField "parent")
comment <- runInputPost $ Comment pid (IssueKey (SqlBackendKey 0)) (Textarea "") Nothing uid now
<$> iopt textareaField "content"
<*> fmap (fmap readText) (iopt textField "assign")
<*> ireq textField "status"
<*> iopt dayField "limitdate"
<*> iopt timeFieldTypeText "limittime"
<*> iopt dayField "reminderdate"
<*> ireq boolField "checkreader"
mfi <- lookupFile "attached"
ino <- runDB $ do
update pid [ProjectIssuecounter +=. 1, ProjectUdate =. now]
prj <- get404 pid
let ino = projectIssuecounter prj
mfh <- storeAttachedFile uid mfi
iid <- insert $ issue {issueNumber=ino}
cid <- insert $ comment { commentIssue=iid
, commentAttached=fmap fst mfh
, commentAutomemo=Textarea "init."
}
bcc <- selectMailAddresses pid
let msgid = toMessageId iid cid now mailMessageIdDomain
fragment = "#" +++ toPathPiece cid
url = r (IssueR pid ino) <> fragment
mfurl = fmap (r . AttachedFileR cid . fst) mfh
when (isJust (commentContent comment) && not (null bcc)) $
liftIO $ renderSendMail Mail
{ mailFrom = fromEmailAddress
, mailTo = []
, mailCc = []
, mailBcc = bcc
, mailHeaders =
[ ("Subject", issueSubject issue)
, ("Message-ID", msgid)
, (mailXHeader, toPathPiece pid)
]
, mailParts =
[[ Part "text/plain; charset=utf-8" QuotedPrintableText Nothing []
$ LE.encodeUtf8 $ mkTextPart prj issue comment url mfurl
, Part "text/html; charset=utf-8" QuotedPrintableText Nothing []
$ LE.encodeUtf8 $ mkHtmlPart prj issue comment url mfurl
]]
}
return ino
redirect $ IssueR pid ino
-- | FIXME: like a mkMail on Root.hs
mkTextPart p i c url mfUrl = [stext|
プロジェクト: #{projectName p}
タスク: #{issueSubject i}
ステータス: #{issueStatus i}
#{unTextarea $ fromJust $ commentContent c}
* このメールに直接返信せずにこちらのページから投稿してください.
URL: #{url}
|] <> if isNothing mfUrl then "" else [stext|
添付ファイル: #{fromJust mfUrl}
|]
mkHtmlPart p i c url mfUrl = LE.decodeUtf8 $ renderHtml [shamlet|
<p>
<dl>
<dt>プロジェクト
<dd>#{projectName p}
<dt>タスク
<dd>#{issueSubject i}
<dt>ステータス
<dd>#{issueStatus i}
<p>
$forall ln <- T.lines $ unTextarea $ fromJust $ commentContent c
#{ln}<br>
<p> * このメールに直接返信せずにこちらのページから投稿してください.
<p>
<dl>
<dt>URL
<dd>#{url}
$maybe furl <- mfUrl
<dt>添付ファイル
<dd>#{furl}
|]
getIssueR :: ProjectId -> IssueNo -> Handler Html
getIssueR pid ino = do
selfid <- requireAuthId
(prj, ptcpts, iid, issue, comments, mparent, children) <-
runDB $ do
(Entity iid issue) <- getBy404 $ UniqueIssue pid ino
cs <- selectList [CommentIssue ==. iid] [Desc CommentCdate]
comments <- forM cs $ \(Entity cid c) -> do
let uid = commentCuser c
u <- get404 uid
mf <- case commentAttached c of
Nothing -> return Nothing
Just fid -> do
f <- get404 fid
return $ Just (fid, f)
mreadP <- getBy $ UniqueReader cid selfid
return $ (cid,
CommentBis { commentBisId=cid
, commentBisContent=commentContent c
, commentBisStatus=commentStatus c
, commentBisAutomemo=commentAutomemo c
, commentBisAttached=mf
, commentBisCheckReader=commentCheckReader c
, commentBisCuser=(uid, u)
, commentBisCdate=commentCdate c
}
,isJust mreadP)
prj <- get404 pid
ptcpts <- selectParticipants pid
mparent <- maybe (return Nothing) get $ issueParentIssue issue
children <- selectList [IssueParentIssue ==. Just iid] []
return (prj, ptcpts, iid, issue, comments, mparent, children)
let (Right stss) = parseStatuses $ projectStatuses prj
isAssign = case issueAssign issue of
Nothing -> const False
Just uid -> (==uid)
isStatus = (==issueStatus issue)
defaultLayout $ do
setTitle $ preEscapedText $ issueSubject issue
$(widgetFile "issue")
postCommentR :: ProjectId -> IssueNo -> Handler Html
postCommentR pid ino = do
uid <- requireAuthId
now <- liftIO getCurrentTime
comment <- runInputPost $ Comment pid (IssueKey (SqlBackendKey 0)) (Textarea "") Nothing uid now
<$> iopt textareaField "content"
<*> fmap (fmap readText) (iopt textField "assign")
<*> ireq textField "status"
<*> iopt dayField "limitdate"
<*> iopt timeFieldTypeText "limittime"
<*> iopt dayField "reminderdate"
<*> ireq boolField "checkreader"
mfi <- lookupFile "attached"
runDB $ do
r <- lift getUrlRender
(Entity iid issue) <- getBy404 $ UniqueIssue pid ino
Just (Entity lastCid lastC') <- selectFirst [CommentIssue ==. iid] [Desc CommentCdate]
mfh <- storeAttachedFile uid mfi
amemo <- generateAutomemo comment issue mfh
replace iid issue { issueUuser = uid
, issueUdate = now
, issueLimitdate = commentLimitdate comment
, issueLimittime = commentLimittime comment
, issueReminderdate = commentReminderdate comment
, issueAssign = commentAssign comment
, issueStatus = commentStatus comment
}
when (isNothing (commentContent comment) && T.null (unTextarea amemo)) $ do
lift $ do
r' <- getMessageRender
setPNotify $ PNotify JqueryUI Error "invalid input" $ r' MsgInvalidCommentPosted
redirect $ IssueR pid ino
cid <- insert $ comment { commentIssue=iid
, commentAttached=fmap fst mfh
, commentAutomemo=amemo
}
prj <- get404 pid
emails <- selectMailAddresses pid
let msgid = toMessageId iid cid now mailMessageIdDomain
refid = toMessageId iid lastCid (commentCdate lastC') mailMessageIdDomain
fragment = "#" +++ toPathPiece cid
url = r (IssueR pid ino) <> fragment
mfurl = fmap (r . AttachedFileR cid . fst) mfh
when (isJust (commentContent comment) && not (null emails)) $
liftIO $ renderSendMail Mail
{ mailFrom = fromEmailAddress
, mailBcc = emails
, mailTo = []
, mailCc = []
, mailHeaders =
[ ("Subject", issueSubject issue)
, ("Message-ID", msgid)
, ("References", refid)
, ("In-Reply-To", refid)
, (mailXHeader, toPathPiece pid)
]
, mailParts =
[[ Part "text/plain; charset=utf-8" QuotedPrintableText Nothing []
$ LE.encodeUtf8 $ mkTextPart prj issue comment url mfurl
, Part "text/html; charset=utf-8" QuotedPrintableText Nothing []
$ LE.encodeUtf8 $ mkHtmlPart prj issue comment url mfurl
]]
}
redirect $ IssueR pid ino
getAttachedFileR :: CommentId -> FileHeaderId -> Handler ()
getAttachedFileR _ fid = do
f <- runDB $ get404 fid
getFileR (fileHeaderCreator f) fid
postReadCommentR :: CommentId -> Handler Value
postReadCommentR cid = do
(Entity uid u) <- requireAuth
r <- getUrlRender
ret <- runDB $ do
mr <- getBy $ UniqueReader cid uid
case mr of
Just _ -> return "added"
Nothing -> do
now <- liftIO getCurrentTime
_ <- insert $ Reader cid uid now
return "added"
cacheSeconds 10 -- FIXME
returnJson $ object [ "status" .= (ret :: Text)
, "read" .=
object [ "comment" .= show cid
, "reader" .=
object [ "id" .= show uid
, "ident" .= userIdent u
, "name" .= userFullName u
, "uri" .= r (ProfileR uid)
, "avatar" .= r (AvatarImageR uid)
]
]
]
deleteReadCommentR :: CommentId -> Handler Value
deleteReadCommentR cid = do
(Entity uid u) <- requireAuth
r <- getUrlRender
ret <- runDB $ do
deleteBy $ UniqueReader cid uid
return "deleted"
cacheSeconds 10 -- FIXME
returnJson $ object [ "status" .= (ret :: Text)
, "read" .=
object [ "comment" .= show cid
, "reader" .=
object [ "id" .= show uid
, "ident" .= userIdent u
, "name" .= userFullName u
, "uri" .= r (ProfileR uid)
, "avatar" .= r (AvatarImageR uid)
]
]
]
getCommentReadersR :: CommentId -> Handler Value
getCommentReadersR cid = do
r <- getUrlRender
readers <- runDB $ do
rds' <- selectList [ReaderComment ==. cid] [Asc ReaderCheckdate]
forM rds' $ \(Entity _ rd') -> do
let uid' = readerReader rd'
ra = AvatarImageR uid'
Just u <- get uid'
return (uid', u, ra)
cacheSeconds 10 -- FIXME
returnJson $ object ["readers" .= array (map (go r) readers)]
where
go r (uid, u, ra) = object [ "id" .= show uid
, "ident" .= userIdent u
, "name" .= userFullName u
, "uri" .= r (ProfileR uid)
, "avatar" .= r ra
]
-- selectParticipants :: (Failure ErrorResponse m, MonadTrans t, PersistBackend t m) =>
-- Key t (ProjectGeneric t) -> t m [(Key t User, User)]
selectParticipants pid = do
mapM (p2u.entityVal) =<< selectList [ParticipantsProject ==. pid] []
where
p2u p = do
let uid = participantsUser p
u <- get404 uid
return (uid, u)
storeAttachedFile _ Nothing = return Nothing
storeAttachedFile uid (Just fi) = fmap (fmap fst5'snd5) $ upload uid fi
where
fst5'snd5 (x,y,_,_,_) = (x,y)
generateAutomemo c i f = do
let st = if issueStatus i == commentStatus c
then []
else [[stext|ステータス #{issueStatus i} から #{commentStatus c} に変更.|]]
lm = case (issueLimitDatetime i, commentLimitDatetime c) of
(Nothing, Nothing) -> []
(Just x , Nothing) -> [[stext|期限 #{showDate x} を期限なしに変更.|]]
(Nothing, Just y ) -> [[stext|期限を #{showDate y} に設定.|]]
(Just x , Just y ) -> if x == y
then []
else [[stext|期限を #{showDate x} から #{showDate y} に変更.|]]
rm = case (issueReminderdate i, commentReminderdate c) of
(Nothing, Nothing) -> []
(Just x , Nothing) -> [[stext|通知日 #{showText x} を通知なしに変更.|]]
(Nothing, Just y ) -> [[stext|通知日を #{showText y} に設定.|]]
(Just x , Just y ) -> if x == y
then []
else [[stext|通知日を #{showText x} から #{showText y} に変更.|]]
af = case f of
Nothing -> []
Just (_, fname) -> [[stext|ファイル #{fname} を添付.|]]
as <- case (issueAssign i, commentAssign c) of
(Nothing, Nothing) -> return []
(Just x , Nothing) -> do
x' <- get404 x
return [[stext|担当者 #{userFullName x'} を担当者なしに変更.|]]
(Nothing, Just y ) -> do
y' <- get404 y
return [[stext|担当者を #{userFullName y'} に設定.|]]
(Just x , Just y ) -> do
x' <- get404 x
y' <- get404 y
if x' == y'
then return []
else return [[stext|担当者を #{userFullName x'} から #{userFullName y'} に変更.|]]
return $ Textarea $ fromLazy $ L.intercalate "\n" $ st <> as <> lm <> rm <> af
where
fromLazy = T.pack . L.unpack
| cutsea110/BISocie | Handler/Issue.hs | bsd-3-clause | 28,326 | 0 | 24 | 9,077 | 8,697 | 4,389 | 4,308 | -1 | -1 |
module Set.Utils where
import Data.List (tails)
import System.Random (newStdGen, RandomGen, randomR)
-------------------------------------------------------------------------------
-- List utilities--------------------------------------------------------------
-------------------------------------------------------------------------------
-- | 'groups' breaks a list into sublists of the given size. The final resulting
-- group may contain fewer elements than the given size.
-- Property: For all positive n. concat (groups n xs) == xs
groups :: Int -> [a] -> [[a]]
groups _ [] = []
groups n xs = as : groups n bs
where
(as,bs) = splitAt n xs
-- | 'delete1' returns a list with the first occurrence of @x removed. If there
-- is no occurrence 'Nothing' is returned.
delete1 :: Eq a => a -> [a] -> Maybe [a]
delete1 x (y:ys)
| x == y = Just ys
| otherwise = fmap (y:) (delete1 x ys)
delete1 _ [] = Nothing
-- | 'index' returns the element at the given 0-based index and returns
-- 'Nothing' on failure.
index :: Int -> [a] -> Maybe a
index 0 (x:_) = Just x
index n (_:xs) | n > 0 = index (n-1) xs
index _ _ = Nothing
-- | Extract an element from a list by index returning that element
-- and the remaining list.
select :: Int -> [a] -> (a,[a])
select _ [] = error "select: index too large"
select 0 (x:xs) = (x,xs)
select n (x:xs) = (y,x:ys)
where
(y,ys) = select (n-1) xs
-- | Drop last element of list if there is an element to drop.
init' :: [a] -> [a]
init' [] = []
init' xs = init xs
-------------------------------------------------------------------------------
-- Other utilities ------------------------------------------------------------
-------------------------------------------------------------------------------
-- | 'chooseTwo' returns all combinations of two elements.
chooseTwo :: [a] -> [(a,a)]
chooseTwo xs = [ (a,b) | (a:as) <- tails xs
, b <- as
]
-- | 'seconds' converts seconds to microseconds for use in 'threadDelay'.
seconds :: Int -> Int
seconds x = 1000000 * x
-------------------------------------------------------------------------------
-- List shuffling utilities ---------------------------------------------------
-------------------------------------------------------------------------------
-- | 'shuffleIO' calls shuffle using a generator from 'newStdGen'.
shuffleIO :: [a] -> IO [a]
shuffleIO xs = fmap (fst . shuffle xs) newStdGen
-- | 'shuffle' shuffles the elements of a list using the given random generator.
shuffle :: RandomGen g => [a] -> g -> ([a], g)
shuffle xs = shuffle' (length xs) xs
shuffle' :: RandomGen g => Int -> [a] -> g -> ([a], g)
shuffle' _ [] g = ([], g)
shuffle' n xs g = (x:xs'', g'')
where
n' = n - 1
(i, g') = randomR (0,n') g
(x, xs') = select i xs
(xs'', g'') = shuffle' n' xs' g'
-------------------------------------------------------------------------------
-- Text manipulation utilities ------------------------------------------------
-------------------------------------------------------------------------------
-- | 'centerText' centers the given string in a field of @width characters.
centerText :: Int -> String -> String
centerText width xs = replicate ((width - length xs) `div` 2 ) ' ' ++ xs
++ replicate ((width - length xs + 1) `div` 2 ) ' '
-- | 'centerText' right-aligns the given string in a field of @width characters.
leftPadText :: Int -> String -> String
leftPadText width xs = replicate (width - length xs) ' ' ++ xs
-- | 'centerText' left-aligns the given string in a field of @width characters.
rightPadText :: Int -> String -> String
rightPadText width xs = xs ++ replicate (width - length xs) ' '
| glguy/set-game | src/Set/Utils.hs | bsd-3-clause | 3,768 | 0 | 12 | 723 | 968 | 527 | 441 | 47 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Parsers where
import Control.Applicative ((*>), (<*), (<$>), (<*>))
import Data.Char
import Data.Monoid ((<>))
import Text.Parsec
import Text.Parsec.String (Parser)
import Types
import Parsers.Color
import Parsers.Selector
{----------------------------------------------------------------------------------------------------{
| Primitives
}----------------------------------------------------------------------------------------------------}
property :: Parser String
property = many1 (satisfy (\x -> x == '-' || isAlpha x))
<?> "CSS property (eg. `display` or `-moz-border-radius`)"
--value :: Parser [Value]
{----------------------------------------------------------------------------------------------------{
| Selectors
}----------------------------------------------------------------------------------------------------}
{-
Selectors:
* element: body, p
* class: .foo
* id: #bar
* descendant: body p
* direct descendant: body > p
* adjacent sibling: p + p
* sibling: p ~ p
* attribute: input[type], input[type="text"]
* pseudoclass: p:last-child, p:nth(2n+1), p:last-of-type, input:focus, p:first-letter, div:empty
-}
--selector :: Parser ByteString
{----------------------------------------------------------------------------------------------------{
| Media Queries
}----------------------------------------------------------------------------------------------------}
{-
@media not screen and (min-width: 30em), (max-width: 30em) {
.foo {
color: red;
}
}
Rules:
* Comma is used to delimit individual queries (your `or` operator)
* The `and` keyword must be used to join query information together
* The `not` keyword can only be used at the beginning and negates the entire query
* Parentheses must surround comparitive values like min-width, pixel-density, etc.
-}
{----------------------------------------------------------------------------------------------------{
| Feature Queries
}----------------------------------------------------------------------------------------------------}
{-
http://www.w3.org/TR/css3-conditional/#at-supports
@supports ((display: flex) or (display: -webkit-flex) or (display: -ms-flexbox)) and (background: red) {
.foo {
color: red
}
}
Rules:
* All property/value pairs must be enclosed in parentheses, the value information cannot be omitted
* The `and` and `or` operators cannot be mixed, you must use parentheses to remove ambiguity (per the spec)
-} | cimmanon/classi | src/Parsers.hs | bsd-3-clause | 2,749 | 0 | 12 | 555 | 132 | 85 | 47 | 13 | 1 |
{-
(c) The University of Glasgow 2006
(c) The AQUA Project, Glasgow University, 1998
This module contains definitions for the IdInfo for things that
have a standard form, namely:
- data constructors
- record selectors
- method and superclass selectors
- primitive operations
-}
{-# LANGUAGE CPP #-}
module MkId (
mkDictFunId, mkDictFunTy, mkDictSelId, mkDictSelRhs,
mkPrimOpId, mkFCallId,
wrapNewTypeBody, unwrapNewTypeBody,
wrapFamInstBody, unwrapFamInstScrut,
wrapTypeUnbranchedFamInstBody, unwrapTypeUnbranchedFamInstScrut,
DataConBoxer(..), mkDataConRep, mkDataConWorkId,
-- And some particular Ids; see below for why they are wired in
wiredInIds, ghcPrimIds,
unsafeCoerceName, unsafeCoerceId, realWorldPrimId,
voidPrimId, voidArgId,
nullAddrId, seqId, lazyId, lazyIdKey, runRWId,
coercionTokenId, magicDictId, coerceId,
proxyHashId,
-- Re-export error Ids
module PrelRules
) where
#include "HsVersions.h"
import Rules
import TysPrim
import TysWiredIn
import PrelRules
import Type
import FamInstEnv
import Coercion
import TcType
import MkCore
import CoreUtils ( exprType, mkCast )
import CoreUnfold
import Literal
import TyCon
import CoAxiom
import Class
import NameSet
import VarSet
import Name
import PrimOp
import ForeignCall
import DataCon
import Id
import IdInfo
import Demand
import CoreSyn
import Unique
import UniqSupply
import PrelNames
import BasicTypes hiding ( SuccessFlag(..) )
import Util
import Pair
import DynFlags
import Outputable
import FastString
import ListSetOps
import qualified GHC.LanguageExtensions as LangExt
import Data.Maybe ( maybeToList )
{-
************************************************************************
* *
\subsection{Wired in Ids}
* *
************************************************************************
Note [Wired-in Ids]
~~~~~~~~~~~~~~~~~~~
There are several reasons why an Id might appear in the wiredInIds:
(1) The ghcPrimIds are wired in because they can't be defined in
Haskell at all, although the can be defined in Core. They have
compulsory unfoldings, so they are always inlined and they have
no definition site. Their home module is GHC.Prim, so they
also have a description in primops.txt.pp, where they are called
'pseudoops'.
(2) The 'error' function, eRROR_ID, is wired in because we don't yet have
a way to express in an interface file that the result type variable
is 'open'; that is can be unified with an unboxed type
[The interface file format now carry such information, but there's
no way yet of expressing at the definition site for these
error-reporting functions that they have an 'open'
result type. -- sof 1/99]
(3) Other error functions (rUNTIME_ERROR_ID) are wired in (a) because
the desugarer generates code that mentions them directly, and
(b) for the same reason as eRROR_ID
(4) lazyId is wired in because the wired-in version overrides the
strictness of the version defined in GHC.Base
In cases (2-4), the function has a definition in a library module, and
can be called; but the wired-in version means that the details are
never read from that module's interface file; instead, the full definition
is right here.
-}
wiredInIds :: [Id]
wiredInIds
= [lazyId, dollarId, oneShotId, runRWId]
++ errorIds -- Defined in MkCore
++ ghcPrimIds
-- These Ids are exported from GHC.Prim
ghcPrimIds :: [Id]
ghcPrimIds
= [ -- These can't be defined in Haskell, but they have
-- perfectly reasonable unfoldings in Core
realWorldPrimId,
voidPrimId,
unsafeCoerceId,
nullAddrId,
seqId,
magicDictId,
coerceId,
proxyHashId
]
{-
************************************************************************
* *
\subsection{Data constructors}
* *
************************************************************************
The wrapper for a constructor is an ordinary top-level binding that evaluates
any strict args, unboxes any args that are going to be flattened, and calls
the worker.
We're going to build a constructor that looks like:
data (Data a, C b) => T a b = T1 !a !Int b
T1 = /\ a b ->
\d1::Data a, d2::C b ->
\p q r -> case p of { p ->
case q of { q ->
Con T1 [a,b] [p,q,r]}}
Notice that
* d2 is thrown away --- a context in a data decl is used to make sure
one *could* construct dictionaries at the site the constructor
is used, but the dictionary isn't actually used.
* We have to check that we can construct Data dictionaries for
the types a and Int. Once we've done that we can throw d1 away too.
* We use (case p of q -> ...) to evaluate p, rather than "seq" because
all that matters is that the arguments are evaluated. "seq" is
very careful to preserve evaluation order, which we don't need
to be here.
You might think that we could simply give constructors some strictness
info, like PrimOps, and let CoreToStg do the let-to-case transformation.
But we don't do that because in the case of primops and functions strictness
is a *property* not a *requirement*. In the case of constructors we need to
do something active to evaluate the argument.
Making an explicit case expression allows the simplifier to eliminate
it in the (common) case where the constructor arg is already evaluated.
Note [Wrappers for data instance tycons]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In the case of data instances, the wrapper also applies the coercion turning
the representation type into the family instance type to cast the result of
the wrapper. For example, consider the declarations
data family Map k :: * -> *
data instance Map (a, b) v = MapPair (Map a (Pair b v))
The tycon to which the datacon MapPair belongs gets a unique internal
name of the form :R123Map, and we call it the representation tycon.
In contrast, Map is the family tycon (accessible via
tyConFamInst_maybe). A coercion allows you to move between
representation and family type. It is accessible from :R123Map via
tyConFamilyCoercion_maybe and has kind
Co123Map a b v :: {Map (a, b) v ~ :R123Map a b v}
The wrapper and worker of MapPair get the types
-- Wrapper
$WMapPair :: forall a b v. Map a (Map a b v) -> Map (a, b) v
$WMapPair a b v = MapPair a b v `cast` sym (Co123Map a b v)
-- Worker
MapPair :: forall a b v. Map a (Map a b v) -> :R123Map a b v
This coercion is conditionally applied by wrapFamInstBody.
It's a bit more complicated if the data instance is a GADT as well!
data instance T [a] where
T1 :: forall b. b -> T [Maybe b]
Hence we translate to
-- Wrapper
$WT1 :: forall b. b -> T [Maybe b]
$WT1 b v = T1 (Maybe b) b (Maybe b) v
`cast` sym (Co7T (Maybe b))
-- Worker
T1 :: forall c b. (c ~ Maybe b) => b -> :R7T c
-- Coercion from family type to representation type
Co7T a :: T [a] ~ :R7T a
Note [Newtype datacons]
~~~~~~~~~~~~~~~~~~~~~~~
The "data constructor" for a newtype should always be vanilla. At one
point this wasn't true, because the newtype arising from
class C a => D a
looked like
newtype T:D a = D:D (C a)
so the data constructor for T:C had a single argument, namely the
predicate (C a). But now we treat that as an ordinary argument, not
part of the theta-type, so all is well.
************************************************************************
* *
\subsection{Dictionary selectors}
* *
************************************************************************
Selecting a field for a dictionary. If there is just one field, then
there's nothing to do.
Dictionary selectors may get nested forall-types. Thus:
class Foo a where
op :: forall b. Ord b => a -> b -> b
Then the top-level type for op is
op :: forall a. Foo a =>
forall b. Ord b =>
a -> b -> b
This is unlike ordinary record selectors, which have all the for-alls
at the outside. When dealing with classes it's very convenient to
recover the original type signature from the class op selector.
-}
mkDictSelId :: Name -- Name of one of the *value* selectors
-- (dictionary superclass or method)
-> Class -> Id
mkDictSelId name clas
= mkGlobalId (ClassOpId clas) name sel_ty info
where
tycon = classTyCon clas
sel_names = map idName (classAllSelIds clas)
new_tycon = isNewTyCon tycon
[data_con] = tyConDataCons tycon
tyvars = dataConUnivTyVars data_con
arg_tys = dataConRepArgTys data_con -- Includes the dictionary superclasses
val_index = assoc "MkId.mkDictSelId" (sel_names `zip` [0..]) name
sel_ty = mkSpecForAllTys tyvars (mkFunTy (mkClassPred clas (mkTyVarTys tyvars))
(getNth arg_tys val_index))
base_info = noCafIdInfo
`setArityInfo` 1
`setStrictnessInfo` strict_sig
info | new_tycon
= base_info `setInlinePragInfo` alwaysInlinePragma
`setUnfoldingInfo` mkInlineUnfolding (Just 1) (mkDictSelRhs clas val_index)
-- See Note [Single-method classes] in TcInstDcls
-- for why alwaysInlinePragma
| otherwise
= base_info `setRuleInfo` mkRuleInfo [rule]
-- Add a magic BuiltinRule, but no unfolding
-- so that the rule is always available to fire.
-- See Note [ClassOp/DFun selection] in TcInstDcls
n_ty_args = length tyvars
-- This is the built-in rule that goes
-- op (dfT d1 d2) ---> opT d1 d2
rule = BuiltinRule { ru_name = fsLit "Class op " `appendFS`
occNameFS (getOccName name)
, ru_fn = name
, ru_nargs = n_ty_args + 1
, ru_try = dictSelRule val_index n_ty_args }
-- The strictness signature is of the form U(AAAVAAAA) -> T
-- where the V depends on which item we are selecting
-- It's worth giving one, so that absence info etc is generated
-- even if the selector isn't inlined
strict_sig = mkClosedStrictSig [arg_dmd] topRes
arg_dmd | new_tycon = evalDmd
| otherwise = mkManyUsedDmd $
mkProdDmd [ if name == sel_name then evalDmd else absDmd
| sel_name <- sel_names ]
mkDictSelRhs :: Class
-> Int -- 0-indexed selector among (superclasses ++ methods)
-> CoreExpr
mkDictSelRhs clas val_index
= mkLams tyvars (Lam dict_id rhs_body)
where
tycon = classTyCon clas
new_tycon = isNewTyCon tycon
[data_con] = tyConDataCons tycon
tyvars = dataConUnivTyVars data_con
arg_tys = dataConRepArgTys data_con -- Includes the dictionary superclasses
the_arg_id = getNth arg_ids val_index
pred = mkClassPred clas (mkTyVarTys tyvars)
dict_id = mkTemplateLocal 1 pred
arg_ids = mkTemplateLocalsNum 2 arg_tys
rhs_body | new_tycon = unwrapNewTypeBody tycon (mkTyVarTys tyvars) (Var dict_id)
| otherwise = Case (Var dict_id) dict_id (idType the_arg_id)
[(DataAlt data_con, arg_ids, varToCoreExpr the_arg_id)]
-- varToCoreExpr needed for equality superclass selectors
-- sel a b d = case x of { MkC _ (g:a~b) _ -> CO g }
dictSelRule :: Int -> Arity -> RuleFun
-- Tries to persuade the argument to look like a constructor
-- application, using exprIsConApp_maybe, and then selects
-- from it
-- sel_i t1..tk (D t1..tk op1 ... opm) = opi
--
dictSelRule val_index n_ty_args _ id_unf _ args
| (dict_arg : _) <- drop n_ty_args args
, Just (_, _, con_args) <- exprIsConApp_maybe id_unf dict_arg
= Just (getNth con_args val_index)
| otherwise
= Nothing
{-
************************************************************************
* *
Data constructors
* *
************************************************************************
-}
mkDataConWorkId :: Name -> DataCon -> Id
mkDataConWorkId wkr_name data_con
| isNewTyCon tycon
= mkGlobalId (DataConWrapId data_con) wkr_name nt_wrap_ty nt_work_info
| otherwise
= mkGlobalId (DataConWorkId data_con) wkr_name alg_wkr_ty wkr_info
where
tycon = dataConTyCon data_con
----------- Workers for data types --------------
alg_wkr_ty = dataConRepType data_con
wkr_arity = dataConRepArity data_con
wkr_info = noCafIdInfo
`setArityInfo` wkr_arity
`setStrictnessInfo` wkr_sig
`setUnfoldingInfo` evaldUnfolding -- Record that it's evaluated,
-- even if arity = 0
wkr_sig = mkClosedStrictSig (replicate wkr_arity topDmd) (dataConCPR data_con)
-- Note [Data-con worker strictness]
-- Notice that we do *not* say the worker is strict
-- even if the data constructor is declared strict
-- e.g. data T = MkT !(Int,Int)
-- Why? Because the *wrapper* is strict (and its unfolding has case
-- expressions that do the evals) but the *worker* itself is not.
-- If we pretend it is strict then when we see
-- case x of y -> $wMkT y
-- the simplifier thinks that y is "sure to be evaluated" (because
-- $wMkT is strict) and drops the case. No, $wMkT is not strict.
--
-- When the simplifer sees a pattern
-- case e of MkT x -> ...
-- it uses the dataConRepStrictness of MkT to mark x as evaluated;
-- but that's fine... dataConRepStrictness comes from the data con
-- not from the worker Id.
----------- Workers for newtypes --------------
(nt_tvs, _, nt_arg_tys, _) = dataConSig data_con
res_ty_args = mkTyVarTys nt_tvs
nt_wrap_ty = dataConUserType data_con
nt_work_info = noCafIdInfo -- The NoCaf-ness is set by noCafIdInfo
`setArityInfo` 1 -- Arity 1
`setInlinePragInfo` alwaysInlinePragma
`setUnfoldingInfo` newtype_unf
id_arg1 = mkTemplateLocal 1 (head nt_arg_tys)
newtype_unf = ASSERT2( isVanillaDataCon data_con &&
isSingleton nt_arg_tys, ppr data_con )
-- Note [Newtype datacons]
mkCompulsoryUnfolding $
mkLams nt_tvs $ Lam id_arg1 $
wrapNewTypeBody tycon res_ty_args (Var id_arg1)
dataConCPR :: DataCon -> DmdResult
dataConCPR con
| isDataTyCon tycon -- Real data types only; that is,
-- not unboxed tuples or newtypes
, null (dataConExTyVars con) -- No existentials
, wkr_arity > 0
, wkr_arity <= mAX_CPR_SIZE
= if is_prod then vanillaCprProdRes (dataConRepArity con)
else cprSumRes (dataConTag con)
| otherwise
= topRes
where
is_prod = isProductTyCon tycon
tycon = dataConTyCon con
wkr_arity = dataConRepArity con
mAX_CPR_SIZE :: Arity
mAX_CPR_SIZE = 10
-- We do not treat very big tuples as CPR-ish:
-- a) for a start we get into trouble because there aren't
-- "enough" unboxed tuple types (a tiresome restriction,
-- but hard to fix),
-- b) more importantly, big unboxed tuples get returned mainly
-- on the stack, and are often then allocated in the heap
-- by the caller. So doing CPR for them may in fact make
-- things worse.
{-
-------------------------------------------------
-- Data constructor representation
--
-- This is where we decide how to wrap/unwrap the
-- constructor fields
--
--------------------------------------------------
-}
type Unboxer = Var -> UniqSM ([Var], CoreExpr -> CoreExpr)
-- Unbox: bind rep vars by decomposing src var
data Boxer = UnitBox | Boxer (TCvSubst -> UniqSM ([Var], CoreExpr))
-- Box: build src arg using these rep vars
newtype DataConBoxer = DCB ([Type] -> [Var] -> UniqSM ([Var], [CoreBind]))
-- Bind these src-level vars, returning the
-- rep-level vars to bind in the pattern
mkDataConRep :: DynFlags
-> FamInstEnvs
-> Name
-> Maybe [HsImplBang]
-- See Note [Bangs on imported data constructors]
-> DataCon
-> UniqSM DataConRep
mkDataConRep dflags fam_envs wrap_name mb_bangs data_con
| not wrapper_reqd
= return NoDataConRep
| otherwise
= do { wrap_args <- mapM newLocal wrap_arg_tys
; wrap_body <- mk_rep_app (wrap_args `zip` dropList eq_spec unboxers)
initial_wrap_app
; let wrap_id = mkGlobalId (DataConWrapId data_con) wrap_name wrap_ty wrap_info
wrap_info = noCafIdInfo
`setArityInfo` wrap_arity
-- It's important to specify the arity, so that partial
-- applications are treated as values
`setInlinePragInfo` alwaysInlinePragma
`setUnfoldingInfo` wrap_unf
`setStrictnessInfo` wrap_sig
-- We need to get the CAF info right here because TidyPgm
-- does not tidy the IdInfo of implicit bindings (like the wrapper)
-- so it not make sure that the CAF info is sane
wrap_sig = mkClosedStrictSig wrap_arg_dmds (dataConCPR data_con)
wrap_arg_dmds = map mk_dmd arg_ibangs
mk_dmd str | isBanged str = evalDmd
| otherwise = topDmd
-- The Cpr info can be important inside INLINE rhss, where the
-- wrapper constructor isn't inlined.
-- And the argument strictness can be important too; we
-- may not inline a contructor when it is partially applied.
-- For example:
-- data W = C !Int !Int !Int
-- ...(let w = C x in ...(w p q)...)...
-- we want to see that w is strict in its two arguments
wrap_unf = mkInlineUnfolding (Just wrap_arity) wrap_rhs
wrap_tvs = (univ_tvs `minusList` map eqSpecTyVar eq_spec) ++ ex_tvs
wrap_rhs = mkLams wrap_tvs $
mkLams wrap_args $
wrapFamInstBody tycon res_ty_args $
wrap_body
; return (DCR { dcr_wrap_id = wrap_id
, dcr_boxer = mk_boxer boxers
, dcr_arg_tys = rep_tys
, dcr_stricts = rep_strs
, dcr_bangs = arg_ibangs }) }
where
(univ_tvs, ex_tvs, eq_spec, theta, orig_arg_tys, _orig_res_ty)
= dataConFullSig data_con
res_ty_args = substTyVars (mkTopTCvSubst (map eqSpecPair eq_spec)) univ_tvs
tycon = dataConTyCon data_con -- The representation TyCon (not family)
wrap_ty = dataConUserType data_con
ev_tys = eqSpecPreds eq_spec ++ theta
all_arg_tys = ev_tys ++ orig_arg_tys
ev_ibangs = map (const HsLazy) ev_tys
orig_bangs = dataConSrcBangs data_con
wrap_arg_tys = theta ++ orig_arg_tys
wrap_arity = length wrap_arg_tys
-- The wrap_args are the arguments *other than* the eq_spec
-- Because we are going to apply the eq_spec args manually in the
-- wrapper
arg_ibangs =
case mb_bangs of
Nothing -> zipWith (dataConSrcToImplBang dflags fam_envs)
orig_arg_tys orig_bangs
Just bangs -> bangs
(rep_tys_w_strs, wrappers)
= unzip (zipWith dataConArgRep all_arg_tys (ev_ibangs ++ arg_ibangs))
(unboxers, boxers) = unzip wrappers
(rep_tys, rep_strs) = unzip (concat rep_tys_w_strs)
wrapper_reqd = not (isNewTyCon tycon) -- Newtypes have only a worker
&& (any isBanged (ev_ibangs ++ arg_ibangs)
-- Some forcing/unboxing (includes eq_spec)
|| isFamInstTyCon tycon -- Cast result
|| (not $ null eq_spec)) -- GADT
initial_wrap_app = Var (dataConWorkId data_con)
`mkTyApps` res_ty_args
`mkVarApps` ex_tvs
`mkCoApps` map (mkReflCo Nominal . eqSpecType) eq_spec
mk_boxer :: [Boxer] -> DataConBoxer
mk_boxer boxers = DCB (\ ty_args src_vars ->
do { let (ex_vars, term_vars) = splitAtList ex_tvs src_vars
subst1 = mkTopTCvSubst (univ_tvs `zip` ty_args)
subst2 = extendTCvSubstList subst1 ex_tvs
(mkTyVarTys ex_vars)
; (rep_ids, binds) <- go subst2 boxers term_vars
; return (ex_vars ++ rep_ids, binds) } )
go _ [] src_vars = ASSERT2( null src_vars, ppr data_con ) return ([], [])
go subst (UnitBox : boxers) (src_var : src_vars)
= do { (rep_ids2, binds) <- go subst boxers src_vars
; return (src_var : rep_ids2, binds) }
go subst (Boxer boxer : boxers) (src_var : src_vars)
= do { (rep_ids1, arg) <- boxer subst
; (rep_ids2, binds) <- go subst boxers src_vars
; return (rep_ids1 ++ rep_ids2, NonRec src_var arg : binds) }
go _ (_:_) [] = pprPanic "mk_boxer" (ppr data_con)
mk_rep_app :: [(Id,Unboxer)] -> CoreExpr -> UniqSM CoreExpr
mk_rep_app [] con_app
= return con_app
mk_rep_app ((wrap_arg, unboxer) : prs) con_app
= do { (rep_ids, unbox_fn) <- unboxer wrap_arg
; expr <- mk_rep_app prs (mkVarApps con_app rep_ids)
; return (unbox_fn expr) }
{-
Note [Bangs on imported data constructors]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We pass Maybe [HsImplBang] to mkDataConRep to make use of HsImplBangs
from imported modules.
- Nothing <=> use HsSrcBangs
- Just bangs <=> use HsImplBangs
For imported types we can't work it all out from the HsSrcBangs,
because we want to be very sure to follow what the original module
(where the data type was declared) decided, and that depends on what
flags were enabled when it was compiled. So we record the decisions in
the interface file.
The HsImplBangs passed are in 1-1 correspondence with the
dataConOrigArgTys of the DataCon.
-}
-------------------------
newLocal :: Type -> UniqSM Var
newLocal ty = do { uniq <- getUniqueM
; return (mkSysLocalOrCoVar (fsLit "dt") uniq ty) }
-- | Unpack/Strictness decisions from source module
dataConSrcToImplBang
:: DynFlags
-> FamInstEnvs
-> Type
-> HsSrcBang
-> HsImplBang
dataConSrcToImplBang dflags fam_envs arg_ty
(HsSrcBang ann unpk NoSrcStrict)
| xopt LangExt.StrictData dflags -- StrictData => strict field
= dataConSrcToImplBang dflags fam_envs arg_ty
(HsSrcBang ann unpk SrcStrict)
| otherwise -- no StrictData => lazy field
= HsLazy
dataConSrcToImplBang _ _ _ (HsSrcBang _ _ SrcLazy)
= HsLazy
dataConSrcToImplBang dflags fam_envs arg_ty
(HsSrcBang _ unpk_prag SrcStrict)
| not (gopt Opt_OmitInterfacePragmas dflags) -- Don't unpack if -fomit-iface-pragmas
-- Don't unpack if we aren't optimising; rather arbitrarily,
-- we use -fomit-iface-pragmas as the indication
, let mb_co = topNormaliseType_maybe fam_envs arg_ty
-- Unwrap type families and newtypes
arg_ty' = case mb_co of { Just (_,ty) -> ty; Nothing -> arg_ty }
, isUnpackableType dflags fam_envs arg_ty'
, (rep_tys, _) <- dataConArgUnpack arg_ty'
, case unpk_prag of
NoSrcUnpack ->
gopt Opt_UnboxStrictFields dflags
|| (gopt Opt_UnboxSmallStrictFields dflags
&& length rep_tys <= 1) -- See Note [Unpack one-wide fields]
srcUnpack -> isSrcUnpacked srcUnpack
= case mb_co of
Nothing -> HsUnpack Nothing
Just (co,_) -> HsUnpack (Just co)
| otherwise -- Record the strict-but-no-unpack decision
= HsStrict
-- | Wrappers/Workers and representation following Unpack/Strictness
-- decisions
dataConArgRep
:: Type
-> HsImplBang
-> ([(Type,StrictnessMark)] -- Rep types
,(Unboxer,Boxer))
dataConArgRep arg_ty HsLazy
= ([(arg_ty, NotMarkedStrict)], (unitUnboxer, unitBoxer))
dataConArgRep arg_ty HsStrict
= ([(arg_ty, MarkedStrict)], (seqUnboxer, unitBoxer))
dataConArgRep arg_ty (HsUnpack Nothing)
| (rep_tys, wrappers) <- dataConArgUnpack arg_ty
= (rep_tys, wrappers)
dataConArgRep _ (HsUnpack (Just co))
| let co_rep_ty = pSnd (coercionKind co)
, (rep_tys, wrappers) <- dataConArgUnpack co_rep_ty
= (rep_tys, wrapCo co co_rep_ty wrappers)
-------------------------
wrapCo :: Coercion -> Type -> (Unboxer, Boxer) -> (Unboxer, Boxer)
wrapCo co rep_ty (unbox_rep, box_rep) -- co :: arg_ty ~ rep_ty
= (unboxer, boxer)
where
unboxer arg_id = do { rep_id <- newLocal rep_ty
; (rep_ids, rep_fn) <- unbox_rep rep_id
; let co_bind = NonRec rep_id (Var arg_id `Cast` co)
; return (rep_ids, Let co_bind . rep_fn) }
boxer = Boxer $ \ subst ->
do { (rep_ids, rep_expr)
<- case box_rep of
UnitBox -> do { rep_id <- newLocal (TcType.substTy subst rep_ty)
; return ([rep_id], Var rep_id) }
Boxer boxer -> boxer subst
; let sco = substCo subst co
; return (rep_ids, rep_expr `Cast` mkSymCo sco) }
------------------------
seqUnboxer :: Unboxer
seqUnboxer v = return ([v], \e -> Case (Var v) v (exprType e) [(DEFAULT, [], e)])
unitUnboxer :: Unboxer
unitUnboxer v = return ([v], \e -> e)
unitBoxer :: Boxer
unitBoxer = UnitBox
-------------------------
dataConArgUnpack
:: Type
-> ( [(Type, StrictnessMark)] -- Rep types
, (Unboxer, Boxer) )
dataConArgUnpack arg_ty
| Just (tc, tc_args) <- splitTyConApp_maybe arg_ty
, Just con <- tyConSingleAlgDataCon_maybe tc
-- NB: check for an *algebraic* data type
-- A recursive newtype might mean that
-- 'arg_ty' is a newtype
, let rep_tys = dataConInstArgTys con tc_args
= ASSERT( isVanillaDataCon con )
( rep_tys `zip` dataConRepStrictness con
,( \ arg_id ->
do { rep_ids <- mapM newLocal rep_tys
; let unbox_fn body
= Case (Var arg_id) arg_id (exprType body)
[(DataAlt con, rep_ids, body)]
; return (rep_ids, unbox_fn) }
, Boxer $ \ subst ->
do { rep_ids <- mapM (newLocal . TcType.substTyUnchecked subst) rep_tys
; return (rep_ids, Var (dataConWorkId con)
`mkTyApps` (substTys subst tc_args)
`mkVarApps` rep_ids ) } ) )
| otherwise
= pprPanic "dataConArgUnpack" (ppr arg_ty)
-- An interface file specified Unpacked, but we couldn't unpack it
isUnpackableType :: DynFlags -> FamInstEnvs -> Type -> Bool
-- True if we can unpack the UNPACK the argument type
-- See Note [Recursive unboxing]
-- We look "deeply" inside rather than relying on the DataCons
-- we encounter on the way, because otherwise we might well
-- end up relying on ourselves!
isUnpackableType dflags fam_envs ty
| Just (tc, _) <- splitTyConApp_maybe ty
, Just con <- tyConSingleAlgDataCon_maybe tc
, isVanillaDataCon con
= ok_con_args (unitNameSet (getName tc)) con
| otherwise
= False
where
ok_arg tcs (ty, bang) = not (attempt_unpack bang) || ok_ty tcs norm_ty
where
norm_ty = topNormaliseType fam_envs ty
ok_ty tcs ty
| Just (tc, _) <- splitTyConApp_maybe ty
, let tc_name = getName tc
= not (tc_name `elemNameSet` tcs)
&& case tyConSingleAlgDataCon_maybe tc of
Just con | isVanillaDataCon con
-> ok_con_args (tcs `extendNameSet` getName tc) con
_ -> True
| otherwise
= True
ok_con_args tcs con
= all (ok_arg tcs) (dataConOrigArgTys con `zip` dataConSrcBangs con)
-- NB: dataConSrcBangs gives the *user* request;
-- We'd get a black hole if we used dataConImplBangs
attempt_unpack (HsSrcBang _ SrcUnpack NoSrcStrict)
= xopt LangExt.StrictData dflags
attempt_unpack (HsSrcBang _ SrcUnpack SrcStrict)
= True
attempt_unpack (HsSrcBang _ NoSrcUnpack SrcStrict)
= True -- Be conservative
attempt_unpack (HsSrcBang _ NoSrcUnpack NoSrcStrict)
= xopt LangExt.StrictData dflags -- Be conservative
attempt_unpack _ = False
{-
Note [Unpack one-wide fields]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The flag UnboxSmallStrictFields ensures that any field that can
(safely) be unboxed to a word-sized unboxed field, should be so unboxed.
For example:
data A = A Int#
newtype B = B A
data C = C !B
data D = D !C
data E = E !()
data F = F !D
data G = G !F !F
All of these should have an Int# as their representation, except
G which should have two Int#s.
However
data T = T !(S Int)
data S = S !a
Here we can represent T with an Int#.
Note [Recursive unboxing]
~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
data R = MkR {-# UNPACK #-} !S Int
data S = MkS {-# UNPACK #-} !Int
The representation arguments of MkR are the *representation* arguments
of S (plus Int); the rep args of MkS are Int#. This is all fine.
But be careful not to try to unbox this!
data T = MkT {-# UNPACK #-} !T Int
Because then we'd get an infinite number of arguments.
Here is a more complicated case:
data S = MkS {-# UNPACK #-} !T Int
data T = MkT {-# UNPACK #-} !S Int
Each of S and T must decide independently whether to unpack
and they had better not both say yes. So they must both say no.
Also behave conservatively when there is no UNPACK pragma
data T = MkS !T Int
with -funbox-strict-fields or -funbox-small-strict-fields
we need to behave as if there was an UNPACK pragma there.
But it's the *argument* type that matters. This is fine:
data S = MkS S !Int
because Int is non-recursive.
************************************************************************
* *
Wrapping and unwrapping newtypes and type families
* *
************************************************************************
-}
wrapNewTypeBody :: TyCon -> [Type] -> CoreExpr -> CoreExpr
-- The wrapper for the data constructor for a newtype looks like this:
-- newtype T a = MkT (a,Int)
-- MkT :: forall a. (a,Int) -> T a
-- MkT = /\a. \(x:(a,Int)). x `cast` sym (CoT a)
-- where CoT is the coercion TyCon associated with the newtype
--
-- The call (wrapNewTypeBody T [a] e) returns the
-- body of the wrapper, namely
-- e `cast` (CoT [a])
--
-- If a coercion constructor is provided in the newtype, then we use
-- it, otherwise the wrap/unwrap are both no-ops
--
-- If the we are dealing with a newtype *instance*, we have a second coercion
-- identifying the family instance with the constructor of the newtype
-- instance. This coercion is applied in any case (ie, composed with the
-- coercion constructor of the newtype or applied by itself).
wrapNewTypeBody tycon args result_expr
= ASSERT( isNewTyCon tycon )
wrapFamInstBody tycon args $
mkCast result_expr (mkSymCo co)
where
co = mkUnbranchedAxInstCo Representational (newTyConCo tycon) args []
-- When unwrapping, we do *not* apply any family coercion, because this will
-- be done via a CoPat by the type checker. We have to do it this way as
-- computing the right type arguments for the coercion requires more than just
-- a spliting operation (cf, TcPat.tcConPat).
unwrapNewTypeBody :: TyCon -> [Type] -> CoreExpr -> CoreExpr
unwrapNewTypeBody tycon args result_expr
= ASSERT( isNewTyCon tycon )
mkCast result_expr (mkUnbranchedAxInstCo Representational (newTyConCo tycon) args [])
-- If the type constructor is a representation type of a data instance, wrap
-- the expression into a cast adjusting the expression type, which is an
-- instance of the representation type, to the corresponding instance of the
-- family instance type.
-- See Note [Wrappers for data instance tycons]
wrapFamInstBody :: TyCon -> [Type] -> CoreExpr -> CoreExpr
wrapFamInstBody tycon args body
| Just co_con <- tyConFamilyCoercion_maybe tycon
= mkCast body (mkSymCo (mkUnbranchedAxInstCo Representational co_con args []))
| otherwise
= body
-- Same as `wrapFamInstBody`, but for type family instances, which are
-- represented by a `CoAxiom`, and not a `TyCon`
wrapTypeFamInstBody :: CoAxiom br -> Int -> [Type] -> [Coercion]
-> CoreExpr -> CoreExpr
wrapTypeFamInstBody axiom ind args cos body
= mkCast body (mkSymCo (mkAxInstCo Representational axiom ind args cos))
wrapTypeUnbranchedFamInstBody :: CoAxiom Unbranched -> [Type] -> [Coercion]
-> CoreExpr -> CoreExpr
wrapTypeUnbranchedFamInstBody axiom
= wrapTypeFamInstBody axiom 0
unwrapFamInstScrut :: TyCon -> [Type] -> CoreExpr -> CoreExpr
unwrapFamInstScrut tycon args scrut
| Just co_con <- tyConFamilyCoercion_maybe tycon
= mkCast scrut (mkUnbranchedAxInstCo Representational co_con args []) -- data instances only
| otherwise
= scrut
unwrapTypeFamInstScrut :: CoAxiom br -> Int -> [Type] -> [Coercion]
-> CoreExpr -> CoreExpr
unwrapTypeFamInstScrut axiom ind args cos scrut
= mkCast scrut (mkAxInstCo Representational axiom ind args cos)
unwrapTypeUnbranchedFamInstScrut :: CoAxiom Unbranched -> [Type] -> [Coercion]
-> CoreExpr -> CoreExpr
unwrapTypeUnbranchedFamInstScrut axiom
= unwrapTypeFamInstScrut axiom 0
{-
************************************************************************
* *
\subsection{Primitive operations}
* *
************************************************************************
-}
mkPrimOpId :: PrimOp -> Id
mkPrimOpId prim_op
= id
where
(tyvars,arg_tys,res_ty, arity, strict_sig) = primOpSig prim_op
ty = mkSpecForAllTys tyvars (mkFunTys arg_tys res_ty)
name = mkWiredInName gHC_PRIM (primOpOcc prim_op)
(mkPrimOpIdUnique (primOpTag prim_op))
(AnId id) UserSyntax
id = mkGlobalId (PrimOpId prim_op) name ty info
info = noCafIdInfo
`setRuleInfo` mkRuleInfo (maybeToList $ primOpRules name prim_op)
`setArityInfo` arity
`setStrictnessInfo` strict_sig
`setInlinePragInfo` neverInlinePragma
-- We give PrimOps a NOINLINE pragma so that we don't
-- get silly warnings from Desugar.dsRule (the inline_shadows_rule
-- test) about a RULE conflicting with a possible inlining
-- cf Trac #7287
-- For each ccall we manufacture a separate CCallOpId, giving it
-- a fresh unique, a type that is correct for this particular ccall,
-- and a CCall structure that gives the correct details about calling
-- convention etc.
--
-- The *name* of this Id is a local name whose OccName gives the full
-- details of the ccall, type and all. This means that the interface
-- file reader can reconstruct a suitable Id
mkFCallId :: DynFlags -> Unique -> ForeignCall -> Type -> Id
mkFCallId dflags uniq fcall ty
= ASSERT( isEmptyVarSet (tyCoVarsOfType ty) )
-- A CCallOpId should have no free type variables;
-- when doing substitutions won't substitute over it
mkGlobalId (FCallId fcall) name ty info
where
occ_str = showSDoc dflags (braces (ppr fcall <+> ppr ty))
-- The "occurrence name" of a ccall is the full info about the
-- ccall; it is encoded, but may have embedded spaces etc!
name = mkFCallName uniq occ_str
info = noCafIdInfo
`setArityInfo` arity
`setStrictnessInfo` strict_sig
(bndrs, _) = tcSplitPiTys ty
arity = count isIdLikeBinder bndrs
strict_sig = mkClosedStrictSig (replicate arity topDmd) topRes
-- the call does not claim to be strict in its arguments, since they
-- may be lifted (foreign import prim) and the called code doesn't
-- necessarily force them. See Trac #11076.
{-
************************************************************************
* *
\subsection{DictFuns and default methods}
* *
************************************************************************
Note [Dict funs and default methods]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Dict funs and default methods are *not* ImplicitIds. Their definition
involves user-written code, so we can't figure out their strictness etc
based on fixed info, as we can for constructors and record selectors (say).
NB: See also Note [Exported LocalIds] in Id
-}
mkDictFunId :: Name -- Name to use for the dict fun;
-> [TyVar]
-> ThetaType
-> Class
-> [Type]
-> Id
-- Implements the DFun Superclass Invariant (see TcInstDcls)
-- See Note [Dict funs and default methods]
mkDictFunId dfun_name tvs theta clas tys
= mkExportedLocalId (DFunId is_nt)
dfun_name
dfun_ty
where
is_nt = isNewTyCon (classTyCon clas)
dfun_ty = mkDictFunTy tvs theta clas tys
mkDictFunTy :: [TyVar] -> ThetaType -> Class -> [Type] -> Type
mkDictFunTy tvs theta clas tys
= mkSpecSigmaTy tvs theta (mkClassPred clas tys)
{-
************************************************************************
* *
\subsection{Un-definable}
* *
************************************************************************
These Ids can't be defined in Haskell. They could be defined in
unfoldings in the wired-in GHC.Prim interface file, but we'd have to
ensure that they were definitely, definitely inlined, because there is
no curried identifier for them. That's what mkCompulsoryUnfolding
does. If we had a way to get a compulsory unfolding from an interface
file, we could do that, but we don't right now.
unsafeCoerce# isn't so much a PrimOp as a phantom identifier, that
just gets expanded into a type coercion wherever it occurs. Hence we
add it as a built-in Id with an unfolding here.
The type variables we use here are "open" type variables: this means
they can unify with both unlifted and lifted types. Hence we provide
another gun with which to shoot yourself in the foot.
-}
lazyIdName, unsafeCoerceName, nullAddrName, seqName,
realWorldName, voidPrimIdName, coercionTokenName,
magicDictName, coerceName, proxyName, dollarName, oneShotName,
runRWName :: Name
unsafeCoerceName = mkWiredInIdName gHC_PRIM (fsLit "unsafeCoerce#") unsafeCoerceIdKey unsafeCoerceId
nullAddrName = mkWiredInIdName gHC_PRIM (fsLit "nullAddr#") nullAddrIdKey nullAddrId
seqName = mkWiredInIdName gHC_PRIM (fsLit "seq") seqIdKey seqId
realWorldName = mkWiredInIdName gHC_PRIM (fsLit "realWorld#") realWorldPrimIdKey realWorldPrimId
voidPrimIdName = mkWiredInIdName gHC_PRIM (fsLit "void#") voidPrimIdKey voidPrimId
lazyIdName = mkWiredInIdName gHC_MAGIC (fsLit "lazy") lazyIdKey lazyId
coercionTokenName = mkWiredInIdName gHC_PRIM (fsLit "coercionToken#") coercionTokenIdKey coercionTokenId
magicDictName = mkWiredInIdName gHC_PRIM (fsLit "magicDict") magicDictKey magicDictId
coerceName = mkWiredInIdName gHC_PRIM (fsLit "coerce") coerceKey coerceId
proxyName = mkWiredInIdName gHC_PRIM (fsLit "proxy#") proxyHashKey proxyHashId
dollarName = mkWiredInIdName gHC_BASE (fsLit "$") dollarIdKey dollarId
oneShotName = mkWiredInIdName gHC_MAGIC (fsLit "oneShot") oneShotKey oneShotId
runRWName = mkWiredInIdName gHC_MAGIC (fsLit "runRW#") runRWKey runRWId
dollarId :: Id -- Note [dollarId magic]
dollarId = pcMiscPrelId dollarName ty
(noCafIdInfo `setUnfoldingInfo` unf)
where
fun_ty = mkFunTy alphaTy openBetaTy
ty = mkSpecForAllTys [levity2TyVar, alphaTyVar, openBetaTyVar] $
mkFunTy fun_ty fun_ty
unf = mkInlineUnfolding (Just 2) rhs
[f,x] = mkTemplateLocals [fun_ty, alphaTy]
rhs = mkLams [levity2TyVar, alphaTyVar, openBetaTyVar, f, x] $
App (Var f) (Var x)
------------------------------------------------
-- proxy# :: forall a. Proxy# a
proxyHashId :: Id
proxyHashId
= pcMiscPrelId proxyName ty
(noCafIdInfo `setUnfoldingInfo` evaldUnfolding) -- Note [evaldUnfoldings]
where
ty = mkSpecForAllTys [kv, tv] (mkProxyPrimTy k t)
kv = kKiVar
k = mkTyVarTy kv
[tv] = mkTemplateTyVars [k]
t = mkTyVarTy tv
------------------------------------------------
-- unsafeCoerce# :: forall a b. a -> b
unsafeCoerceId :: Id
unsafeCoerceId
= pcMiscPrelId unsafeCoerceName ty info
where
info = noCafIdInfo `setInlinePragInfo` alwaysInlinePragma
`setUnfoldingInfo` mkCompulsoryUnfolding rhs
ty = mkSpecForAllTys [ levity1TyVar, levity2TyVar
, openAlphaTyVar, openBetaTyVar ]
(mkFunTy openAlphaTy openBetaTy)
[x] = mkTemplateLocals [openAlphaTy]
rhs = mkLams [ levity1TyVar, levity2TyVar
, openAlphaTyVar, openBetaTyVar
, x] $
Cast (Var x) (mkUnsafeCo Representational openAlphaTy openBetaTy)
------------------------------------------------
nullAddrId :: Id
-- nullAddr# :: Addr#
-- The reason is is here is because we don't provide
-- a way to write this literal in Haskell.
nullAddrId = pcMiscPrelId nullAddrName addrPrimTy info
where
info = noCafIdInfo `setInlinePragInfo` alwaysInlinePragma
`setUnfoldingInfo` mkCompulsoryUnfolding (Lit nullAddrLit)
------------------------------------------------
seqId :: Id -- See Note [seqId magic]
seqId = pcMiscPrelId seqName ty info
where
info = noCafIdInfo `setInlinePragInfo` inline_prag
`setUnfoldingInfo` mkCompulsoryUnfolding rhs
`setRuleInfo` mkRuleInfo [seq_cast_rule]
inline_prag
= alwaysInlinePragma `setInlinePragmaActivation` ActiveAfter "0" 0
-- Make 'seq' not inline-always, so that simpleOptExpr
-- (see CoreSubst.simple_app) won't inline 'seq' on the
-- LHS of rules. That way we can have rules for 'seq';
-- see Note [seqId magic]
ty = mkSpecForAllTys [alphaTyVar,betaTyVar]
(mkFunTy alphaTy (mkFunTy betaTy betaTy))
[x,y] = mkTemplateLocals [alphaTy, betaTy]
rhs = mkLams [alphaTyVar,betaTyVar,x,y] (Case (Var x) x betaTy [(DEFAULT, [], Var y)])
-- See Note [Built-in RULES for seq]
-- NB: ru_nargs = 3, not 4, to match the code in
-- Simplify.rebuildCase which tries to apply this rule
seq_cast_rule = BuiltinRule { ru_name = fsLit "seq of cast"
, ru_fn = seqName
, ru_nargs = 3
, ru_try = match_seq_of_cast }
match_seq_of_cast :: RuleFun
-- See Note [Built-in RULES for seq]
match_seq_of_cast _ _ _ [Type _, Type res_ty, Cast scrut co]
= Just (fun `App` scrut)
where
fun = Lam x $ Lam y $
Case (Var x) x res_ty [(DEFAULT,[],Var y)]
-- Generate a Case directly, not a call to seq, which
-- might be ill-kinded if res_ty is unboxed
[x,y] = mkTemplateLocals [scrut_ty, res_ty]
scrut_ty = pFst (coercionKind co)
match_seq_of_cast _ _ _ _ = Nothing
------------------------------------------------
lazyId :: Id -- See Note [lazyId magic]
lazyId = pcMiscPrelId lazyIdName ty info
where
info = noCafIdInfo
ty = mkSpecForAllTys [alphaTyVar] (mkFunTy alphaTy alphaTy)
oneShotId :: Id -- See Note [The oneShot function]
oneShotId = pcMiscPrelId oneShotName ty info
where
info = noCafIdInfo `setInlinePragInfo` alwaysInlinePragma
`setUnfoldingInfo` mkCompulsoryUnfolding rhs
ty = mkSpecForAllTys [ levity1TyVar, levity2TyVar
, openAlphaTyVar, openBetaTyVar ]
(mkFunTy fun_ty fun_ty)
fun_ty = mkFunTy alphaTy betaTy
[body, x] = mkTemplateLocals [fun_ty, openAlphaTy]
x' = setOneShotLambda x
rhs = mkLams [ levity1TyVar, levity2TyVar
, openAlphaTyVar, openBetaTyVar
, body, x'] $
Var body `App` Var x
runRWId :: Id -- See Note [runRW magic] in this module
runRWId = pcMiscPrelId runRWName ty info
where
info = noCafIdInfo `setInlinePragInfo` neverInlinePragma
`setStrictnessInfo` strict_sig
`setArityInfo` 1
strict_sig = mkClosedStrictSig [strictApply1Dmd] topRes
-- Important to express its strictness,
-- since it is not inlined until CorePrep
-- Also see Note [runRW arg] in CorePrep
-- State# RealWorld
stateRW = mkTyConApp statePrimTyCon [realWorldTy]
-- (# State# RealWorld, o #)
ret_ty = mkTupleTy Unboxed [stateRW, openAlphaTy]
-- State# RealWorld -> (# State# RealWorld, o #)
arg_ty = stateRW `mkFunTy` ret_ty
-- (State# RealWorld -> (# State# RealWorld, o #))
-- -> (# State# RealWorld, o #)
ty = mkSpecForAllTys [levity1TyVar, openAlphaTyVar] $
arg_ty `mkFunTy` ret_ty
--------------------------------------------------------------------------------
magicDictId :: Id -- See Note [magicDictId magic]
magicDictId = pcMiscPrelId magicDictName ty info
where
info = noCafIdInfo `setInlinePragInfo` neverInlinePragma
ty = mkSpecForAllTys [alphaTyVar] alphaTy
--------------------------------------------------------------------------------
coerceId :: Id
coerceId = pcMiscPrelId coerceName ty info
where
info = noCafIdInfo `setInlinePragInfo` alwaysInlinePragma
`setUnfoldingInfo` mkCompulsoryUnfolding rhs
eqRTy = mkTyConApp coercibleTyCon [ liftedTypeKind
, alphaTy, betaTy ]
eqRPrimTy = mkTyConApp eqReprPrimTyCon [ liftedTypeKind
, liftedTypeKind
, alphaTy, betaTy ]
ty = mkSpecForAllTys [alphaTyVar, betaTyVar] $
mkFunTys [eqRTy, alphaTy] betaTy
[eqR,x,eq] = mkTemplateLocals [eqRTy, alphaTy, eqRPrimTy]
rhs = mkLams [alphaTyVar, betaTyVar, eqR, x] $
mkWildCase (Var eqR) eqRTy betaTy $
[(DataAlt coercibleDataCon, [eq], Cast (Var x) (mkCoVarCo eq))]
{-
Note [dollarId magic]
~~~~~~~~~~~~~~~~~~~~~
The only reason that ($) is wired in is so that its type can be
forall (a:*, b:Open). (a->b) -> a -> b
That is, the return type can be unboxed. E.g. this is OK
foo $ True where foo :: Bool -> Int#
because ($) doesn't inspect or move the result of the call to foo.
See Trac #8739.
There is a special typing rule for ($) in TcExpr, so the type of ($)
isn't looked at there, BUT Lint subsequently (and rightly) complains
if sees ($) applied to Int# (say), unless we give it a wired-in type
as we do here.
Note [Unsafe coerce magic]
~~~~~~~~~~~~~~~~~~~~~~~~~~
We define a *primitive*
GHC.Prim.unsafeCoerce#
and then in the base library we define the ordinary function
Unsafe.Coerce.unsafeCoerce :: forall (a:*) (b:*). a -> b
unsafeCoerce x = unsafeCoerce# x
Notice that unsafeCoerce has a civilized (albeit still dangerous)
polymorphic type, whose type args have kind *. So you can't use it on
unboxed values (unsafeCoerce 3#).
In contrast unsafeCoerce# is even more dangerous because you *can* use
it on unboxed things, (unsafeCoerce# 3#) :: Int. Its type is
forall (a:OpenKind) (b:OpenKind). a -> b
Note [seqId magic]
~~~~~~~~~~~~~~~~~~
'GHC.Prim.seq' is special in several ways.
a) In source Haskell its second arg can have an unboxed type
x `seq` (v +# w)
But see Note [Typing rule for seq] in TcExpr, which
explains why we give seq itself an ordinary type
seq :: forall a b. a -> b -> b
and treat it as a language construct from a typing point of view.
b) Its fixity is set in LoadIface.ghcPrimIface
c) It has quite a bit of desugaring magic.
See DsUtils.hs Note [Desugaring seq (1)] and (2) and (3)
d) There is some special rule handing: Note [User-defined RULES for seq]
Note [User-defined RULES for seq]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Roman found situations where he had
case (f n) of _ -> e
where he knew that f (which was strict in n) would terminate if n did.
Notice that the result of (f n) is discarded. So it makes sense to
transform to
case n of _ -> e
Rather than attempt some general analysis to support this, I've added
enough support that you can do this using a rewrite rule:
RULE "f/seq" forall n. seq (f n) = seq n
You write that rule. When GHC sees a case expression that discards
its result, it mentally transforms it to a call to 'seq' and looks for
a RULE. (This is done in Simplify.rebuildCase.) As usual, the
correctness of the rule is up to you.
VERY IMPORTANT: to make this work, we give the RULE an arity of 1, not 2.
If we wrote
RULE "f/seq" forall n e. seq (f n) e = seq n e
with rule arity 2, then two bad things would happen:
- The magical desugaring done in Note [seqId magic] item (c)
for saturated application of 'seq' would turn the LHS into
a case expression!
- The code in Simplify.rebuildCase would need to actually supply
the value argument, which turns out to be awkward.
Note [Built-in RULES for seq]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We also have the following built-in rule for seq
seq (x `cast` co) y = seq x y
This eliminates unnecessary casts and also allows other seq rules to
match more often. Notably,
seq (f x `cast` co) y --> seq (f x) y
and now a user-defined rule for seq (see Note [User-defined RULES for seq])
may fire.
Note [lazyId magic]
~~~~~~~~~~~~~~~~~~~
lazy :: forall a?. a? -> a? (i.e. works for unboxed types too)
Used to lazify pseq: pseq a b = a `seq` lazy b
Also, no strictness: by being a built-in Id, all the info about lazyId comes from here,
not from GHC.Base.hi. This is important, because the strictness
analyser will spot it as strict!
Also no unfolding in lazyId: it gets "inlined" by a HACK in CorePrep.
It's very important to do this inlining *after* unfoldings are exposed
in the interface file. Otherwise, the unfolding for (say) pseq in the
interface file will not mention 'lazy', so if we inline 'pseq' we'll totally
miss the very thing that 'lazy' was there for in the first place.
See Trac #3259 for a real world example.
lazyId is defined in GHC.Base, so we don't *have* to inline it. If it
appears un-applied, we'll end up just calling it.
Note [runRW magic]
~~~~~~~~~~~~~~~~~~
Some definitions, for instance @runST@, must have careful control over float out
of the bindings in their body. Consider this use of @runST@,
f x = runST ( \ s -> let (a, s') = newArray# 100 [] s
(_, s'') = fill_in_array_or_something a x s'
in freezeArray# a s'' )
If we inline @runST@, we'll get:
f x = let (a, s') = newArray# 100 [] realWorld#{-NB-}
(_, s'') = fill_in_array_or_something a x s'
in freezeArray# a s''
And now if we allow the @newArray#@ binding to float out to become a CAF,
we end up with a result that is totally and utterly wrong:
f = let (a, s') = newArray# 100 [] realWorld#{-NB-} -- YIKES!!!
in \ x ->
let (_, s'') = fill_in_array_or_something a x s'
in freezeArray# a s''
All calls to @f@ will share a {\em single} array! Clearly this is nonsense and
must be prevented.
This is what @runRW#@ gives us: by being inlined extremely late in the
optimization (right before lowering to STG, in CorePrep), we can ensure that
no further floating will occur. This allows us to safely inline things like
@runST@, which are otherwise needlessly expensive (see #10678 and #5916).
While the definition of @GHC.Magic.runRW#@, we override its type in @MkId@
to be open-kinded,
runRW# :: forall (lev :: Levity). (o :: TYPE lev)
=> (State# RealWorld -> (# State# RealWorld, o #))
-> (# State# RealWorld, o #)
Note [The oneShot function]
~~~~~~~~~~~~~~~~~~~~~~~~~~~
In the context of making left-folds fuse somewhat okish (see ticket #7994
and Note [Left folds via right fold]) it was determined that it would be useful
if library authors could explicitly tell the compiler that a certain lambda is
called at most once. The oneShot function allows that.
'oneShot' is open kinded, i.e. the type variables can refer to unlifted
types as well (Trac #10744); e.g.
oneShot (\x:Int# -> x +# 1#)
Like most magic functions it has a compulsary unfolding, so there is no need
for a real definition somewhere. We have one in GHC.Magic for the convenience
of putting the documentation there.
It uses `setOneShotLambda` on the lambda's binder. That is the whole magic:
A typical call looks like
oneShot (\y. e)
after unfolding the definition `oneShot = \f \x[oneshot]. f x` we get
(\f \x[oneshot]. f x) (\y. e)
--> \x[oneshot]. ((\y.e) x)
--> \x[oneshot] e[x/y]
which is what we want.
It is only effective if the one-shot info survives as long as possible; in
particular it must make it into the interface in unfoldings. See Note [Preserve
OneShotInfo] in CoreTidy.
Also see https://ghc.haskell.org/trac/ghc/wiki/OneShot.
Note [magicDictId magic]
~~~~~~~~~~~~~~~~~~~~~~~~~
The identifier `magicDict` is just a place-holder, which is used to
implement a primitve that we cannot define in Haskell but we can write
in Core. It is declared with a place-holder type:
magicDict :: forall a. a
The intention is that the identifier will be used in a very specific way,
to create dictionaries for classes with a single method. Consider a class
like this:
class C a where
f :: T a
We are going to use `magicDict`, in conjunction with a built-in Prelude
rule, to cast values of type `T a` into dictionaries for `C a`. To do
this, we define a function like this in the library:
data WrapC a b = WrapC (C a => Proxy a -> b)
withT :: (C a => Proxy a -> b)
-> T a -> Proxy a -> b
withT f x y = magicDict (WrapC f) x y
The purpose of `WrapC` is to avoid having `f` instantiated.
Also, it avoids impredicativity, because `magicDict`'s type
cannot be instantiated with a forall. The field of `WrapC` contains
a `Proxy` parameter which is used to link the type of the constraint,
`C a`, with the type of the `Wrap` value being made.
Next, we add a built-in Prelude rule (see prelude/PrelRules.hs),
which will replace the RHS of this definition with the appropriate
definition in Core. The rewrite rule works as follows:
magicDict @t (wrap @a @b f) x y
---->
f (x `cast` co a) y
The `co` coercion is the newtype-coercion extracted from the type-class.
The type class is obtain by looking at the type of wrap.
-------------------------------------------------------------
@realWorld#@ used to be a magic literal, \tr{void#}. If things get
nasty as-is, change it back to a literal (@Literal@).
voidArgId is a Local Id used simply as an argument in functions
where we just want an arg to avoid having a thunk of unlifted type.
E.g.
x = \ void :: Void# -> (# p, q #)
This comes up in strictness analysis
Note [evaldUnfoldings]
~~~~~~~~~~~~~~~~~~~~~~
The evaldUnfolding makes it look that some primitive value is
evaluated, which in turn makes Simplify.interestingArg return True,
which in turn makes INLINE things applied to said value likely to be
inlined.
-}
realWorldPrimId :: Id -- :: State# RealWorld
realWorldPrimId = pcMiscPrelId realWorldName realWorldStatePrimTy
(noCafIdInfo `setUnfoldingInfo` evaldUnfolding -- Note [evaldUnfoldings]
`setOneShotInfo` stateHackOneShot)
voidPrimId :: Id -- Global constant :: Void#
voidPrimId = pcMiscPrelId voidPrimIdName voidPrimTy
(noCafIdInfo `setUnfoldingInfo` evaldUnfolding) -- Note [evaldUnfoldings]
voidArgId :: Id -- Local lambda-bound :: Void#
voidArgId = mkSysLocal (fsLit "void") voidArgIdKey voidPrimTy
coercionTokenId :: Id -- :: () ~ ()
coercionTokenId -- Used to replace Coercion terms when we go to STG
= pcMiscPrelId coercionTokenName
(mkTyConApp eqPrimTyCon [liftedTypeKind, liftedTypeKind, unitTy, unitTy])
noCafIdInfo
pcMiscPrelId :: Name -> Type -> IdInfo -> Id
pcMiscPrelId name ty info
= mkVanillaGlobalWithInfo name ty info
-- We lie and say the thing is imported; otherwise, we get into
-- a mess with dependency analysis; e.g., core2stg may heave in
-- random calls to GHCbase.unpackPS__. If GHCbase is the module
-- being compiled, then it's just a matter of luck if the definition
-- will be in "the right place" to be in scope.
| gridaphobe/ghc | compiler/basicTypes/MkId.hs | bsd-3-clause | 58,900 | 0 | 21 | 16,331 | 7,335 | 4,004 | 3,331 | 602 | 6 |
{- CIS 194 HW 10
due Monday, 1 April
-}
module AParser
(
Parser (..),
satisfy,
char,
posInt,
abParser,
abParser_,
intPair,
intOrUppercase
) where
import Control.Applicative
import Data.Char
-- A parser for a value of type a is a function which takes a String
-- represnting the input to be parsed, and succeeds or fails; if it
-- succeeds, it returns the parsed value along with the remainder of
-- the input.
newtype Parser a = Parser { runParser :: String -> Maybe (a, String) }
-- For example, 'satisfy' takes a predicate on Char, and constructs a
-- parser which succeeds only if it sees a Char that satisfies the
-- predicate (which it then returns). If it encounters a Char that
-- does not satisfy the predicate (or an empty input), it fails.
satisfy :: (Char -> Bool) -> Parser Char
satisfy p = Parser f
where
f [] = Nothing -- fail on the empty input
f (x:xs) -- check if x satisfies the predicate
-- if so, return x along with the remainder
-- of the input (that is, xs)
| p x = Just (x, xs)
| otherwise = Nothing -- otherwise, fail
-- Using satisfy, we can define the parser 'char c' which expects to
-- see exactly the character c, and fails otherwise.
char :: Char -> Parser Char
char c = satisfy (== c)
{- For example:
*Parser> runParser (satisfy isUpper) "ABC"
Just ('A',"BC")
*Parser> runParser (satisfy isUpper) "abc"
Nothing
*Parser> runParser (char 'x') "xyz"
Just ('x',"yz")
-}
-- For convenience, we've also provided a parser for positive
-- integers.
posInt :: Parser Integer
posInt = Parser f
where
f xs
| null ns = Nothing
| otherwise = Just (read ns, rest)
where (ns, rest) = span isDigit xs
------------------------------------------------------------
-- Your code goes below here
------------------------------------------------------------
-- Exercise 1
-- First, you’ll need to implement a Functor instance for Parser.
-- Hint: You may find it useful to implement a function
-- first :: (a -> b) -> (a,c) -> (b,c)
first :: (a -> b) -> (a, c) -> (b, c)
first f (a, c) = (f a, c)
instance Functor Parser where
fmap f (Parser a) = Parser g
where
g s = case runParser (Parser a) s of
Nothing -> Nothing
Just (x, xs) -> Just $ first f (x, xs)
-- Exercise 2
-- implement an Applicative instance for Parser
--
-- 1. pure a represents the parser which consumes no input and successfully
-- returns a result of a.
--
-- 2. p1 <*> p2 represents the parser which first runs p1 (which will
-- consume some input and produce a function), then passes the
-- remaining input to p2 (which consumes more input and produces
-- some value), then returns the result of applying the function to the
-- value. However, if either p1 or p2 fails then the whole thing should
-- also fail (put another way, p1 <*> p2 only succeeds if both p1 and
-- p2 succeed).
instance Applicative Parser where
pure a = Parser f
where
f s = Just (a, s)
p1 <*> p2 = Parser f
where
f s = case runParser p1 s of
Nothing -> Nothing
Just (x, xs) -> case runParser p2 xs of
Nothing -> Nothing
Just (y, ys) -> Just (x y, ys)
-- Exercise 3
-- We can also test your Applicative instance using other simple
-- applications of functions to multiple parsers. You should implement
-- each of the following exercises using the Applicative interface to put
-- together simpler parsers into more complex ones. Do not implement
-- them using the low-level definition of a Parser! In other words, pretend
-- that you do not have access to the Parser constructor or even
-- know how the Parser type is defined.
abParser :: Parser (Char, Char)
abParser = (\x y -> (x, y)) <$> satisfy (== 'a') <*> satisfy (== 'b')
abParser_ :: Parser ()
abParser_ = (\_ _ -> ()) <$> satisfy (== 'a') <*> satisfy (== 'b')
intPair :: Parser [Integer]
intPair = (\x _ y -> [x, y]) <$> posInt <*> satisfy (== ' ') <*> posInt
-- Exercise 4
-- Applicative by itself can be used to make parsers for simple, fixed
-- formats. But for any format involving choice (e.g. “. . . after the colon
-- there can be a number or a word or parentheses. . . ”) Applicative is
-- not quite enough. To handle choice we turn to the Alternative class,
-- defined (essentially) as follows:
-- ```
-- class Applicative f => Alternative f where
-- empty :: f a
-- (<|>) :: f a -> f a -> f a
-- ```
-- (<|>) is intended to represent choice: that is, f1 <|> f2 represents
-- a choice between f1 and f2. empty should be the identity element for
-- (<|>), and often represents failure.
instance Alternative Parser where
-- empty represents the parser which always fails.
empty = Parser f where f s = Nothing
-- • p1 <|> p2 represents the parser which first tries running p1. If
-- p1 succeeds then p2 is ignored and the result of p1 is returned.
-- Otherwise, if p1 fails, then p2 is tried instead.
p1 <|> p2 = Parser f
where
f s = case runParser p1 s of
Nothing -> runParser p2 s
Just x -> Just x
-- Exercise 5
-- parses either an integer value or an uppercase character, and fails otherwise.
intOrUppercase :: Parser ()
intOrUppercase = const () <$> posInt <|> const () <$> satisfy isUpper
| wangwangwar/cis194 | src/ch10/AParser.hs | bsd-3-clause | 5,368 | 0 | 16 | 1,290 | 884 | 494 | 390 | 57 | 2 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE BangPatterns #-}
module Handlers (
handleMessages,
handlePresences,
handleExit
) where
import Network.Xmpp hiding (session)
import Network.Xmpp.IM
import Control.Concurrent.STM
import Control.Monad hiding (forM)
import Data.Traversable
import qualified Users
import Data.Maybe
import qualified Data.Text as Text
import qualified Data.XML.Types as Xml
import qualified XmlUtils
import Logs
import Common
import Data.Char(isSpace)
import Data.Attoparsec.Text
import qualified Data.List as List
import Control.Applicative
import Prelude hiding (takeWhile)
import qualified Data.Map as M
import System.Random
import Control.Concurrent
import qualified Control.Exception as E
import System.Posix.Signals
import System.Exit
import qualified Config
--The message handler takes in messages and broadcasts them to everyone on the roster.
handleMessages :: BotData -> IO ()
handleMessages bd@BotData {session=sess, users=us, logs=ls, botJid=bj} = forever $ do
!msg <- waitForMessage (\m -> isJust (messageFrom m) && isJust (XmlUtils.unwrapMessage (messagePayload m))) sess
let filterText = Text.replace "<" "≺"
let (Just !sender) = messageFrom msg
let (Just !payload) = fmap (fmap $ XmlUtils.mapNodeText filterText) $ XmlUtils.unwrapMessage (messagePayload msg)
let !s = XmlUtils.nodesToString payload
u <- Users.getUser sender us
let alias = Users.alias u
let !broadcastMsg = XmlUtils.boldText alias : XmlUtils.text ": " : payload
let msgLength = XmlUtils.messageLength payload
--putStrLn (show payload)
--Check if command
if head s == '!'
then parseCommand s sender
else if msgLength <= Config.messageCharacterLimit
then sendSquelchableMessageToAllFrom sender bd broadcastMsg
else sendMessageTo sender bd $ [XmlUtils.italicsText $ "Error: Message too long by " ++ show (msgLength - Config.messageCharacterLimit) ++ " characters."]
where
parseCommand :: String -> Jid -> IO ()
parseCommand !s !sender = case parseOnly parser (Text.pack s) of
Left e -> sendMessageTo sender bd $ [XmlUtils.italicsText $ "Incorrect command syntax."]
Right Help -> sendMessageTo sender bd $ [XmlUtils.italicsText "This bot is here to help! Commands: roll <num>d<num>, help, log <number>, ping, alias <name>, list, multicast"]
Right (GetLogs i) -> do
lastLogs <- getLastLogs i ls
sendMessageTo sender bd $ List.intercalate [XmlUtils.newline] $ [XmlUtils.boldText "Last logs:"] : lastLogs
Right Ping -> sendMessageTo sender bd $ [XmlUtils.italicsText "PONG!"]
Right (Alias a) -> do
if length a > 0 && length a <= 20
then do
u <- Users.getUser sender us
let oldAlias = Users.alias u
let u' = u {Users.alias = a}
Users.setUser u' us
sendSquelchableMessageToAll sender bd $ [XmlUtils.italicsNode [XmlUtils.boldText oldAlias, XmlUtils.text " is now known as ", XmlUtils.boldText a, XmlUtils.text "."]]
else sendMessageTo sender bd $ [XmlUtils.italicsText "Error: You must enter an alias between 1-20 characters, eg. !alias fel."]
Right (List) -> do
ps <- atomically $ getAvailablePeers sess
ls <- forM (filter (/=bj) ps) (\j -> do
u <- Users.getUser j us
return [XmlUtils.boldText (Users.alias u), XmlUtils.text $ " (" ++ Text.unpack (jidToText . toBare $ Users.jid u) ++ ")"])
sendMessageTo sender bd $ List.intercalate [XmlUtils.newline] $ [XmlUtils.boldText "Users in this chat:"] : ls
Right (Roll numDice numSides) -> do
rolls <- replicateM numDice $ randomRIO (1, numSides)
alias <- fmap Users.alias $ Users.getUser sender us
sendSquelchableMessageToAll sender bd $ [XmlUtils.italicsNode [XmlUtils.boldText alias, XmlUtils.text (" rolls " ++ show numDice ++ "d" ++ show numSides ++ ". "), XmlUtils.boldText "Result: ", XmlUtils.text (show rolls)]]
Right (Multicast) -> do
u <- Users.getUser sender us
let m = not $ Users.multicast u
let u' = u {Users.multicast = m}
Users.setUser u' us
sendMessageTo sender bd $ [XmlUtils.italicsText "Your multicast is now toggled to ", XmlUtils.boldText (show m), XmlUtils.italicsText "."]
Right (Squelch rawJid) -> do
case jidFromText (Text.pack rawJid) of
Just j -> do
u <- Users.getUser sender us
let u' = u {Users.squelchList = j : Users.squelchList u}
Users.setUser u' us
sendMessageTo sender bd $ [XmlUtils.italicsText "You have squelched ", XmlUtils.boldText rawJid, XmlUtils.italicsText $ ". Current squelch list: " ++ (show $ fmap (Text.unpack . jidToText) (Users.squelchList u'))]
Nothing -> do
sendMessageTo sender bd $ [XmlUtils.italicsText "Invalid JID entered."]
Right (Unsquelch rawJid) -> do
case jidFromText (Text.pack rawJid) of
Just j -> do
u <- Users.getUser sender us
let u' = u {Users.squelchList = List.delete j $ Users.squelchList u}
Users.setUser u' us
sendMessageTo sender bd $ [XmlUtils.italicsText "You have unsquelched ", XmlUtils.boldText rawJid, XmlUtils.italicsText $ ". Current squelch list: " ++ (show $ fmap (Text.unpack . jidToText) (Users.squelchList u'))]
Nothing -> do
sendMessageTo sender bd $ [XmlUtils.italicsText "Invalid JID entered."]
parser :: Parser BotCommand
parser = do
char '!'
(string "help" >> return Help)
<|> (string "log" >> takeWhile isSpace >> decimal >>= return . GetLogs)
<|> (string "ping" >> return Ping)
<|> (string "alias" >> takeWhile isSpace >> takeWhile (const True) >>= return . Alias . Text.unpack)
<|> (string "list" >> return List)
<|> (string "roll" >> takeWhile isSpace >> decimal >>= \d1 -> string "d" >> decimal >>= \d2 -> return $ Roll (min 100 d1) (min 10000 d2))
<|> (string "multicast" >> return Multicast)
<|> (string "squelch" >> takeWhile isSpace >> takeWhile (const True) >>= return . Squelch . Text.unpack)
<|> (string "unsquelch" >> takeWhile isSpace >> takeWhile (const True) >>= return . Unsquelch . Text.unpack)
data BotCommand = GetLogs Int | Help | Ping | Alias String | List | Roll Int Int | Multicast | Squelch String | Unsquelch String
--The presence handler takes in presences and looks for subscription requests. Upon finding one, it subscribes them back
--and adds them to the roster.
handlePresences :: BotData -> IO ()
handlePresences bd@BotData{session=sess} = forever $ do
!pres <- waitForPresence (\p -> isJust (presenceFrom p)) sess --only deal with presences who have a 'from'
let Just sender = presenceFrom pres
case presenceType pres of
Subscribe -> do
sendPresence (presenceSubscribe sender) sess --subscribe in turn, server handles roster
sendPresence (presenceSubscribed sender) sess
sendMessageTo sender bd $ [XmlUtils.italicsText "You have subscribed to the bot. In order to receive messages, you must accept the subscription request sent to you. If you accidentally decline, unsubscribe from this bot and then resubscribe."]
Unsubscribe -> do
sendPresence (presenceUnsubscribe sender) sess --unsub in turn, server handles roster
sendPresence (presenceUnsubscribed sender) sess
return ()
_ -> do
return undefined
handleExit :: ThreadId -> Logs -> IO ()
handleExit tid logs = do
putStrLn "Interrupt detected, exiting..."
E.throwTo tid (ExitSuccess) --exit code in main code
| DrewBarclay/XMPP-Chatbot | src/Handlers.hs | bsd-3-clause | 7,615 | 31 | 25 | 1,671 | 2,295 | 1,142 | 1,153 | 129 | 15 |
{-# LANGUAGE TemplateHaskell #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-| Tests for the lock data structure
-}
{-
Copyright (C) 2014 Google Inc.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301, USA.
-}
module Test.Ganeti.Locking.Locks (testLocking_Locks) where
import Control.Applicative ((<$>), (<*>), liftA2)
import Control.Monad (liftM)
import System.Posix.Types (CPid)
import Test.QuickCheck
import Text.JSON
import Test.Ganeti.TestHelper
import Test.Ganeti.TestCommon
import Test.Ganeti.Types ()
import Ganeti.Locking.Locks
import Ganeti.Locking.Types
instance Arbitrary GanetiLocks where
arbitrary = oneof [ return BGL
, return ClusterLockSet
, return InstanceLockSet
, Instance <$> genUUID
, return NodeGroupLockSet
, NodeGroup <$> genUUID
, return NAL
, return NodeAllocLockSet
, return NodeResLockSet
, NodeRes <$> genUUID
, return NodeLockSet
, Node <$> genUUID
, return NetworkLockSet
, Network <$> genUUID
]
-- | Verify that readJSON . showJSON = Ok
prop_ReadShow :: Property
prop_ReadShow = forAll (arbitrary :: Gen GanetiLocks) $ \a ->
readJSON (showJSON a) ==? Ok a
-- | Verify the implied locks are earlier in the lock order.
prop_ImpliedOrder :: Property
prop_ImpliedOrder =
forAll ((arbitrary :: Gen GanetiLocks)
`suchThat` (not . null . lockImplications)) $ \b ->
printTestCase "Implied locks must be earlier in the lock order"
. flip all (lockImplications b) $ \a ->
a < b
-- | Verify the intervall property of the locks.
prop_ImpliedIntervall :: Property
prop_ImpliedIntervall =
forAll ((arbitrary :: Gen GanetiLocks)
`suchThat` (not . null . lockImplications)) $ \b ->
forAll (elements $ lockImplications b) $ \a ->
forAll (arbitrary `suchThat` liftA2 (&&) (a <) (<= b)) $ \x ->
printTestCase ("Locks between a group and a member of the group"
++ " must also belong to the group")
$ a `elem` lockImplications x
instance Arbitrary LockLevel where
arbitrary = elements [LevelCluster ..]
-- | Verify that readJSON . showJSON = Ok for lock levels
prop_ReadShowLevel :: Property
prop_ReadShowLevel = forAll (arbitrary :: Gen LockLevel) $ \a ->
readJSON (showJSON a) ==? Ok a
instance Arbitrary ClientType where
arbitrary = oneof [ ClientOther <$> arbitrary
, ClientJob <$> arbitrary
]
-- | Verify that readJSON . showJSON = Ok for ClientType
prop_ReadShow_ClientType :: Property
prop_ReadShow_ClientType = forAll (arbitrary :: Gen ClientType) $ \a ->
readJSON (showJSON a) ==? Ok a
instance Arbitrary CPid where
arbitrary = liftM fromIntegral (arbitrary :: Gen Integer)
instance Arbitrary ClientId where
arbitrary = ClientId <$> arbitrary <*> arbitrary <*> arbitrary
-- | Verify that readJSON . showJSON = Ok for ClientId
prop_ReadShow_ClientId :: Property
prop_ReadShow_ClientId = forAll (arbitrary :: Gen ClientId) $ \a ->
readJSON (showJSON a) ==? Ok a
testSuite "Locking/Locks"
[ 'prop_ReadShow
, 'prop_ImpliedOrder
, 'prop_ImpliedIntervall
, 'prop_ReadShowLevel
, 'prop_ReadShow_ClientType
, 'prop_ReadShow_ClientId
]
| kawamuray/ganeti | test/hs/Test/Ganeti/Locking/Locks.hs | gpl-2.0 | 3,991 | 0 | 15 | 955 | 750 | 419 | 331 | 72 | 1 |
class MultiParam a b
| roberth/uu-helium | test/typeClassesStatic/MultiParam.hs | gpl-3.0 | 26 | 0 | 5 | 9 | 10 | 4 | 6 | -1 | -1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ViewPatterns #-}
-- Module : Network.AWS.Internal.Signing
-- Copyright : (c) 2013 Brendan Hay <brendan.g.hay@gmail.com>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
module Network.AWS.Internal.Signing
( sign
, version2
, version3
, version4
, versionS3
, presignS3
) where
import Control.Applicative
import Control.Monad.IO.Class
import qualified Crypto.Hash.SHA1 as SHA1
import qualified Crypto.Hash.SHA256 as SHA256
import qualified Crypto.MAC.HMAC as HMAC
import Data.ByteString (ByteString)
import qualified Data.ByteString.Base16 as Base16
import qualified Data.ByteString.Base64 as Base64
import qualified Data.ByteString.Char8 as BS
import Data.CaseInsensitive (CI)
import qualified Data.CaseInsensitive as Case
import Data.Default
import Data.Function (on)
import Data.List (groupBy, nub, sort, sortBy, find)
import Data.Maybe
import Data.Monoid
import Data.Ord
import Data.Time (UTCTime, getCurrentTime)
import Data.Time.Clock.POSIX
import Network.AWS.Headers
import Network.AWS.Internal.String
import Network.AWS.Internal.Time
import Network.AWS.Internal.Types
import Network.HTTP.Conduit
import Network.HTTP.Types (Header, StdMethod, urlEncode)
data Common = Common
{ _service :: !ByteString
, _version :: !ByteString
, _host :: !ByteString
, _query :: [(ByteString, Maybe ByteString)]
}
sign :: Raw -> AWS Request
sign raw@Raw{..} = do
auth <- getAuth
reg <- region rqService
time <- liftIO getCurrentTime
let sig = svcSigner rqService
hs = hHost (endpoint rqService reg) : rqHeaders
return $! sig (raw { rqHeaders = hs }) auth reg time
version2 :: Signer
version2 raw@Raw{..} auth reg time =
signed rqMethod _host rqPath query headers rqBody
where
Common{..} = common raw reg
query = encoded <> "&Signature=" <> urlEncode True signature
signature = Base64.encode
. hmacSHA256 (secretAccessKey auth)
$ BS.intercalate "\n"
[ BS.pack $ show rqMethod
, _host
, rqPath
, encoded
]
encoded = renderQuery $ _query
++ [ ("Version", Just _version)
, ("SignatureVersion", Just "2")
, ("SignatureMethod", Just "HmacSHA256")
, ("Timestamp", Just $ formatISO8601 time)
, ("AWSAccessKeyId", Just $ accessKeyId auth)
]
++ maybeToList ((\x -> ("SecurityToken", Just x)) <$> securityToken auth)
headers = hDate (formatISO8601 time) : rqHeaders
version3 :: Signer
version3 raw@Raw{..} auth reg time =
signed rqMethod _host rqPath query headers rqBody
where
Common{..} = common raw reg
query = renderQuery _query
headers = hDate (formatRFC822 time)
: hAMZAuth authorisation
: maybeToList (hAMZToken <$> securityToken auth)
++ rqHeaders
authorisation = "AWS3-HTTPS AWSAccessKeyId="
<> accessKeyId auth
<> ", Algorithm=HmacSHA256, Signature="
<> Base64.encode (hmacSHA256 (secretAccessKey auth) $ formatRFC822 time)
version4 :: Signer
version4 raw@Raw{..} auth reg time =
signed rqMethod _host rqPath query (hAuth authorisation : headers) rqBody
where
Common{..} = common raw reg
query = renderQuery . sort $ ("Version", Just _version) : _query
headers = hAMZDate time
: maybeToList (hAMZToken <$> securityToken auth)
++ rqHeaders
authorisation = mconcat
[ algorithm
, " Credential="
, accessKeyId auth
, "/"
, credentialScope
, ", SignedHeaders="
, signedHeaders
, ", Signature="
, signature
]
signature = Base16.encode $ hmacSHA256 signingKey stringToSign
signingKey = foldl1 hmacSHA256 $ ("AWS4" <> secretAccessKey auth) : scope
stringToSign = BS.intercalate "\n"
[ algorithm
, formatAWS time
, credentialScope
, Base16.encode $ SHA256.hash canonicalRequest
]
credentialScope = BS.intercalate "/" scope
algorithm = "AWS4-HMAC-SHA256"
scope = [formatBasic time, BS.pack $ show reg, _service, "aws4_request"]
canonicalRequest = BS.intercalate "\n"
[ BS.pack $ show rqMethod
, rqPath
, query
, canonicalHeaders
, signedHeaders
, bodySHA256
]
canonicalHeaders = mconcat $ map flattenValues grouped
signedHeaders = BS.intercalate ";" . nub $
map (Case.foldedCase . fst) grouped
grouped = groupHeaders headers
bodySHA256 = Base16.encode $ SHA256.hash ""
-- sinkHash :: (Monad m, Hash ctx d) => Consumer ByteString m SHA256
versionS3 :: ByteString -> Signer
versionS3 bucket raw@Raw{..} auth reg time =
signed rqMethod _host rqPath query (authorisation : headers) rqBody
where
Common{..} = common raw reg
query = renderQuery _query
authorisation = hAuth $ "AWS " <> accessKeyId auth <> ":" <> signature
signature = Base64.encode $ hmacSHA1 (secretAccessKey auth) stringToSign
stringToSign = BS.concat
[ BS.pack $ show rqMethod
, "\n"
, optionalHeader "content-md5"
, "\n"
, optionalHeader "content-type"
, "\n"
, date
, "\n"
, canonicalHeaders
, canonicalResource
]
optionalHeader = fromMaybe "" . (`lookupHeader` headers)
canonicalHeaders = BS.concat
. map flattenValues
. filter (BS.isPrefixOf "x-amz-" . Case.foldedCase . fst)
$ groupHeaders headers
headers = hDate date
: maybeToList (hAMZToken <$> securityToken auth)
++ rqHeaders
date = formatRFC822 time
canonicalResource = wrap '/' bucket <> stripPrefix "/" rqPath <> subResource
subResource = maybe "" (mappend "?" . f) $ find ((`elem` keys) . fst) _query
where
f (k, Just v) = k <> "=" <> v
f (k, _) = k
keys =
[ "acl"
, "cors"
, "defaultObjectAcl"
, "location"
, "logging"
, "partNumber"
, "policy"
, "requestPayment"
, "torrent"
, "versioning"
, "versionId"
, "versions"
, "website"
, "uploads"
, "uploadId"
, "response-content-type"
, "response-content-language"
, "response-expires"
, "response-cache-control"
, "response-content-disposition"
, "response-content-encoding"
, "delete"
, "lifecycle"
, "tagging"
, "restore"
, "storageClass"
, "notification"
]
presignS3 :: StdMethod
-> ByteString
-> ByteString
-> UTCTime
-> AWS ByteString
presignS3 meth (strip '/' -> bucket) (strip '/' -> key) expires = do
auth <- getAuth
let access = accessKeyId auth
secret = secretAccessKey auth
return $! mconcat
[ "https://"
, bucket
, ".s3.amazonaws.com/"
, key
, "?AWSAccessKeyId=" <> access
, "&Expires=" <> expiry
, "&Signature=" <> signature secret
]
where
signature = urlEncode True
. Base64.encode
. (`hmacSHA1` stringToSign)
stringToSign = BS.intercalate "\n"
[ BS.pack $ show meth
, ""
, ""
, expiry
, "/" <> bucket <> "/" <> key
]
expiry = BS.pack $ show (truncate $ utcTimeToPOSIXSeconds expires :: Integer)
common :: Raw -> Region -> Common
common Raw{..} reg = Common
{ _service = svcName rqService
, _version = svcVersion rqService
, _host = endpoint rqService reg
, _query = sort rqQuery
}
signed :: StdMethod
-> ByteString
-> ByteString
-> ByteString
-> [Header]
-> RequestBody
-> Request
signed meth host path qs hs body = def
{ secure = True
, method = BS.pack $ show meth
, host = host
, port = 443
, path = path
, queryString = qs
, requestHeaders = hs
, requestBody = body
, checkStatus = \_ _ _ -> Nothing
}
hmacSHA1 :: ByteString -> ByteString -> ByteString
hmacSHA1 key msg = HMAC.hmac SHA1.hash 64 key msg
hmacSHA256 :: ByteString -> ByteString -> ByteString
hmacSHA256 key msg = HMAC.hmac SHA256.hash 64 key msg
groupHeaders :: [Header] -> [Header]
groupHeaders = sort . map f . groupBy ((==) `on` fst)
where
f (h:hs) = (fst h, BS.intercalate "," . sort . map snd $ h : hs)
f [] = ("", "")
lookupHeader :: ByteString -> [Header] -> Maybe ByteString
lookupHeader (Case.mk -> key) = lookup key
flattenValues :: IsByteString a => (CI ByteString, a) -> ByteString
flattenValues (k, v) = mconcat [Case.foldedCase k, ":", strip ' ' v, "\n"]
-- | Ensures the querystring is sorted - very important!
renderQuery :: [(ByteString, Maybe ByteString)] -> ByteString
renderQuery = BS.intercalate "&" . map f . sortBy (comparing fst)
where
f (k, Just v) = mconcat [k, "=", urlEncode True v]
f (k, _) = k
| brendanhay/amazonka-limited | src/Network/AWS/Internal/Signing.hs | mpl-2.0 | 9,907 | 0 | 14 | 3,134 | 2,524 | 1,379 | 1,145 | 253 | 2 |
{-# LANGUAGE TemplateHaskell, DeriveDataTypeable #-}
--------------------------------------------------------------------
-- |
-- Copyright : (c) Edward Kmett and Dan Doel 2012-2013
-- License : BSD2
-- Maintainer: Edward Kmett <ekmett@gmail.com>
-- Stability : experimental
-- Portability: non-portable
--
--------------------------------------------------------------------
module Ermine.Console.Options
( MonitorOptions(..)
, HasMonitorOptions(..)
, Options(..)
, HasOptions(..)
, parseOptions
) where
import Control.Lens hiding (argument)
import Data.Data
import Ermine.Monitor
import Options.Applicative
import Paths_ermine
-- | All command line options.
data Options = Options
{ _optionsMonitorOptions :: MonitorOptions
, _libdir :: FilePath
, _files :: [FilePath]
} deriving (Eq,Ord,Show,Read,Data,Typeable)
makeClassy ''Options
instance HasMonitorOptions Options where
monitorOptions = optionsMonitorOptions
-- | Generate the command line option parser
parseOptions :: IO (Parser Options)
parseOptions = do
dd <- getDataDir
return $ Options
<$> parseMonitorOptions
<*> option auto (long "libdir" <> short 'l' <> help "location of the ermine library" <> metavar "DIR" <> action "directory" <> value dd)
<*> many (argument str $ help "files" <> metavar "FILE" <> action "file")
| PipocaQuemada/ermine | src/Ermine/Console/Options.hs | bsd-2-clause | 1,342 | 0 | 16 | 215 | 280 | 156 | 124 | 27 | 1 |
{-
(c) The University of Glasgow 2006-2012
(c) The GRASP Project, Glasgow University, 1992-2002
Various types used during typechecking, please see TcRnMonad as well for
operations on these types. You probably want to import it, instead of this
module.
All the monads exported here are built on top of the same IOEnv monad. The
monad functions like a Reader monad in the way it passes the environment
around. This is done to allow the environment to be manipulated in a stack
like fashion when entering expressions... ect.
For state that is global and should be returned at the end (e.g not part
of the stack mechanism), you should use an TcRef (= IORef) to store them.
-}
{-# LANGUAGE CPP, ExistentialQuantification, GeneralizedNewtypeDeriving,
ViewPatterns #-}
module TcRnTypes(
TcRnIf, TcRn, TcM, RnM, IfM, IfL, IfG, -- The monad is opaque outside this module
TcRef,
-- The environment types
Env(..),
TcGblEnv(..), TcLclEnv(..),
IfGblEnv(..), IfLclEnv(..),
tcVisibleOrphanMods,
-- Frontend types (shouldn't really be here)
FrontendResult(..),
-- Renamer types
ErrCtxt, RecFieldEnv,
ImportAvails(..), emptyImportAvails, plusImportAvails,
WhereFrom(..), mkModDeps,
-- Typechecker types
TcTypeEnv, TcIdBinderStack, TcIdBinder(..),
TcTyThing(..), PromotionErr(..),
IdBindingInfo(..),
IsGroupClosed(..),
SelfBootInfo(..),
pprTcTyThingCategory, pprPECategory,
-- Desugaring types
DsM, DsLclEnv(..), DsGblEnv(..), PArrBuiltin(..),
DsMetaEnv, DsMetaVal(..),
-- Template Haskell
ThStage(..), SpliceType(..), PendingStuff(..),
topStage, topAnnStage, topSpliceStage,
ThLevel, impLevel, outerLevel, thLevel,
-- Arrows
ArrowCtxt(..),
-- TcSigInfo
TcSigInfo(..), TcIdSigInfo(..),
TcIdSigInst(..), TcPatSynInfo(..),
isPartialSig,
-- Canonical constraints
Xi, Ct(..), Cts, emptyCts, andCts, andManyCts, pprCts,
singleCt, listToCts, ctsElts, consCts, snocCts, extendCtsList,
isEmptyCts, isCTyEqCan, isCFunEqCan,
isPendingScDict, superClassesMightHelp,
isCDictCan_Maybe, isCFunEqCan_maybe,
isCIrredEvCan, isCNonCanonical, isWantedCt, isDerivedCt,
isGivenCt, isHoleCt, isOutOfScopeCt, isExprHoleCt, isTypeHoleCt,
isUserTypeErrorCt, getUserTypeErrorMsg,
ctEvidence, ctLoc, setCtLoc, ctPred, ctFlavour, ctEqRel, ctOrigin,
mkTcEqPredLikeEv,
mkNonCanonical, mkNonCanonicalCt, mkGivens,
ctEvPred, ctEvLoc, ctEvOrigin, ctEvEqRel,
ctEvTerm, ctEvCoercion, ctEvId,
tyCoVarsOfCt, tyCoVarsOfCts,
tyCoVarsOfCtList, tyCoVarsOfCtsList,
WantedConstraints(..), insolubleWC, emptyWC, isEmptyWC,
andWC, unionsWC, mkSimpleWC, mkImplicWC,
addInsols, getInsolubles, addSimples, addImplics,
tyCoVarsOfWC, dropDerivedWC, dropDerivedSimples, dropDerivedInsols,
tyCoVarsOfWCList,
isDroppableDerivedLoc, insolubleImplic,
arisesFromGivens,
Implication(..), ImplicStatus(..), isInsolubleStatus, isSolvedStatus,
SubGoalDepth, initialSubGoalDepth, maxSubGoalDepth,
bumpSubGoalDepth, subGoalDepthExceeded,
CtLoc(..), ctLocSpan, ctLocEnv, ctLocLevel, ctLocOrigin,
ctLocTypeOrKind_maybe,
ctLocDepth, bumpCtLocDepth,
setCtLocOrigin, setCtLocEnv, setCtLocSpan,
CtOrigin(..), exprCtOrigin, matchesCtOrigin, grhssCtOrigin,
ErrorThing(..), mkErrorThing, errorThingNumArgs_maybe,
TypeOrKind(..), isTypeLevel, isKindLevel,
pprCtOrigin, pprCtLoc,
pushErrCtxt, pushErrCtxtSameOrigin,
SkolemInfo(..), pprSigSkolInfo, pprSkolInfo,
termEvidenceAllowed,
CtEvidence(..), TcEvDest(..),
mkGivenLoc, mkKindLoc, toKindLoc,
isWanted, isGiven, isDerived, isGivenOrWDeriv,
ctEvRole,
-- Constraint solver plugins
TcPlugin(..), TcPluginResult(..), TcPluginSolver,
TcPluginM, runTcPluginM, unsafeTcPluginTcM,
getEvBindsTcPluginM,
CtFlavour(..), ShadowInfo(..), ctEvFlavour,
CtFlavourRole, ctEvFlavourRole, ctFlavourRole,
eqCanRewriteFR, eqMayRewriteFR,
eqCanDischarge,
funEqCanDischarge, funEqCanDischargeF,
-- Pretty printing
pprEvVarTheta,
pprEvVars, pprEvVarWithType,
-- Misc other types
TcId, TcIdSet,
Hole(..), holeOcc,
NameShape(..)
) where
#include "HsVersions.h"
import HsSyn
import CoreSyn
import HscTypes
import TcEvidence
import Type
import Class ( Class )
import TyCon ( TyCon )
import Coercion ( Coercion, mkHoleCo )
import ConLike ( ConLike(..) )
import DataCon ( DataCon, dataConUserType, dataConOrigArgTys )
import PatSyn ( PatSyn, pprPatSynType )
import Id ( idType, idName )
import FieldLabel ( FieldLabel )
import TcType
import Annotations
import InstEnv
import FamInstEnv
import PmExpr
import IOEnv
import RdrName
import Name
import NameEnv
import NameSet
import Avail
import Var
import FV
import VarEnv
import Module
import SrcLoc
import VarSet
import ErrUtils
import UniqDFM
import UniqSupply
import BasicTypes
import Bag
import DynFlags
import Outputable
import ListSetOps
import FastString
import qualified GHC.LanguageExtensions as LangExt
import Fingerprint
import Util
import Control.Monad (ap, liftM, msum)
#if __GLASGOW_HASKELL__ > 710
import qualified Control.Monad.Fail as MonadFail
#endif
import Data.Set ( Set )
import Data.Map ( Map )
import Data.Dynamic ( Dynamic )
import Data.Typeable ( TypeRep )
import GHCi.Message
import GHCi.RemoteTypes
import qualified Language.Haskell.TH as TH
-- | A 'NameShape' is a substitution on 'Name's that can be used
-- to refine the identities of a hole while we are renaming interfaces
-- (see 'RnModIface'). Specifically, a 'NameShape' for
-- 'ns_module_name' @A@, defines a mapping from @{A.T}@
-- (for some 'OccName' @T@) to some arbitrary other 'Name'.
--
-- The most intruiging thing about a 'NameShape', however, is
-- how it's constructed. A 'NameShape' is *implied* by the
-- exported 'AvailInfo's of the implementor of an interface:
-- if an implementor of signature @<H>@ exports @M.T@, you implicitly
-- define a substitution from @{H.T}@ to @M.T@. So a 'NameShape'
-- is computed from the list of 'AvailInfo's that are exported
-- by the implementation of a module, or successively merged
-- together by the export lists of signatures which are joining
-- together.
--
-- It's not the most obvious way to go about doing this, but it
-- does seem to work!
--
-- NB: Can't boot this and put it in NameShape because then we
-- start pulling in too many DynFlags things.
data NameShape = NameShape {
ns_mod_name :: ModuleName,
ns_exports :: [AvailInfo],
ns_map :: OccEnv Name
}
{-
************************************************************************
* *
Standard monad definition for TcRn
All the combinators for the monad can be found in TcRnMonad
* *
************************************************************************
The monad itself has to be defined here, because it is mentioned by ErrCtxt
-}
type TcRnIf a b = IOEnv (Env a b)
type TcRn = TcRnIf TcGblEnv TcLclEnv -- Type inference
type IfM lcl = TcRnIf IfGblEnv lcl -- Iface stuff
type IfG = IfM () -- Top level
type IfL = IfM IfLclEnv -- Nested
type DsM = TcRnIf DsGblEnv DsLclEnv -- Desugaring
-- TcRn is the type-checking and renaming monad: the main monad that
-- most type-checking takes place in. The global environment is
-- 'TcGblEnv', which tracks all of the top-level type-checking
-- information we've accumulated while checking a module, while the
-- local environment is 'TcLclEnv', which tracks local information as
-- we move inside expressions.
-- | Historical "renaming monad" (now it's just 'TcRn').
type RnM = TcRn
-- | Historical "type-checking monad" (now it's just 'TcRn').
type TcM = TcRn
-- We 'stack' these envs through the Reader like monad infrastructure
-- as we move into an expression (although the change is focused in
-- the lcl type).
data Env gbl lcl
= Env {
env_top :: HscEnv, -- Top-level stuff that never changes
-- Includes all info about imported things
env_us :: {-# UNPACK #-} !(IORef UniqSupply),
-- Unique supply for local variables
env_gbl :: gbl, -- Info about things defined at the top level
-- of the module being compiled
env_lcl :: lcl -- Nested stuff; changes as we go into
}
instance ContainsDynFlags (Env gbl lcl) where
extractDynFlags env = hsc_dflags (env_top env)
instance ContainsModule gbl => ContainsModule (Env gbl lcl) where
extractModule env = extractModule (env_gbl env)
{-
************************************************************************
* *
The interface environments
Used when dealing with IfaceDecls
* *
************************************************************************
-}
data IfGblEnv
= IfGblEnv {
-- Some information about where this environment came from;
-- useful for debugging.
if_doc :: SDoc,
-- The type environment for the module being compiled,
-- in case the interface refers back to it via a reference that
-- was originally a hi-boot file.
-- We need the module name so we can test when it's appropriate
-- to look in this env.
-- See Note [Tying the knot] in TcIface
if_rec_types :: Maybe (Module, IfG TypeEnv)
-- Allows a read effect, so it can be in a mutable
-- variable; c.f. handling the external package type env
-- Nothing => interactive stuff, no loops possible
}
data IfLclEnv
= IfLclEnv {
-- The module for the current IfaceDecl
-- So if we see f = \x -> x
-- it means M.f = \x -> x, where M is the if_mod
-- NB: This is a semantic module, see
-- Note [Identity versus semantic module]
if_mod :: Module,
-- Whether or not the IfaceDecl came from a boot
-- file or not; we'll use this to choose between
-- NoUnfolding and BootUnfolding
if_boot :: Bool,
-- The field is used only for error reporting
-- if (say) there's a Lint error in it
if_loc :: SDoc,
-- Where the interface came from:
-- .hi file, or GHCi state, or ext core
-- plus which bit is currently being examined
if_nsubst :: Maybe NameShape,
if_tv_env :: FastStringEnv TyVar, -- Nested tyvar bindings
if_id_env :: FastStringEnv Id -- Nested id binding
}
{-
************************************************************************
* *
Desugarer monad
* *
************************************************************************
Now the mondo monad magic (yes, @DsM@ is a silly name)---carry around
a @UniqueSupply@ and some annotations, which
presumably include source-file location information:
-}
-- If '-XParallelArrays' is given, the desugarer populates this table with the corresponding
-- variables found in 'Data.Array.Parallel'.
--
data PArrBuiltin
= PArrBuiltin
{ lengthPVar :: Var -- ^ lengthP
, replicatePVar :: Var -- ^ replicateP
, singletonPVar :: Var -- ^ singletonP
, mapPVar :: Var -- ^ mapP
, filterPVar :: Var -- ^ filterP
, zipPVar :: Var -- ^ zipP
, crossMapPVar :: Var -- ^ crossMapP
, indexPVar :: Var -- ^ (!:)
, emptyPVar :: Var -- ^ emptyP
, appPVar :: Var -- ^ (+:+)
, enumFromToPVar :: Var -- ^ enumFromToP
, enumFromThenToPVar :: Var -- ^ enumFromThenToP
}
data DsGblEnv
= DsGblEnv
{ ds_mod :: Module -- For SCC profiling
, ds_fam_inst_env :: FamInstEnv -- Like tcg_fam_inst_env
, ds_unqual :: PrintUnqualified
, ds_msgs :: IORef Messages -- Warning messages
, ds_if_env :: (IfGblEnv, IfLclEnv) -- Used for looking up global,
-- possibly-imported things
, ds_dph_env :: GlobalRdrEnv -- exported entities of 'Data.Array.Parallel.Prim'
-- iff '-fvectorise' flag was given as well as
-- exported entities of 'Data.Array.Parallel' iff
-- '-XParallelArrays' was given; otherwise, empty
, ds_parr_bi :: PArrBuiltin -- desugarar names for '-XParallelArrays'
}
instance ContainsModule DsGblEnv where
extractModule = ds_mod
data DsLclEnv = DsLclEnv {
dsl_meta :: DsMetaEnv, -- Template Haskell bindings
dsl_loc :: RealSrcSpan, -- To put in pattern-matching error msgs
dsl_dicts :: Bag EvVar, -- Constraints from GADT pattern-matching
dsl_tm_cs :: Bag SimpleEq,
dsl_pm_iter :: IORef Int -- no iterations for pmcheck
}
-- Inside [| |] brackets, the desugarer looks
-- up variables in the DsMetaEnv
type DsMetaEnv = NameEnv DsMetaVal
data DsMetaVal
= DsBound Id -- Bound by a pattern inside the [| |].
-- Will be dynamically alpha renamed.
-- The Id has type THSyntax.Var
| DsSplice (HsExpr Id) -- These bindings are introduced by
-- the PendingSplices on a HsBracketOut
{-
************************************************************************
* *
Global typechecker environment
* *
************************************************************************
-}
-- | 'FrontendResult' describes the result of running the
-- frontend of a Haskell module. Usually, you'll get
-- a 'FrontendTypecheck', since running the frontend involves
-- typechecking a program, but for an hs-boot merge you'll
-- just get a ModIface, since no actual typechecking occurred.
--
-- This data type really should be in HscTypes, but it needs
-- to have a TcGblEnv which is only defined here.
data FrontendResult
= FrontendTypecheck TcGblEnv
-- Note [Identity versus semantic module]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- When typechecking an hsig file, it is convenient to keep track
-- of two different "this module" identifiers:
--
-- - The IDENTITY module is simply thisPackage + the module
-- name; i.e. it uniquely *identifies* the interface file
-- we're compiling. For example, p[A=<A>]:A is an
-- identity module identifying the requirement named A
-- from library p.
--
-- - The SEMANTIC module, which is the actual module that
-- this signature is intended to represent (e.g. if
-- we have a identity module p[A=base:Data.IORef]:A,
-- then the semantic module is base:Data.IORef)
--
-- Which one should you use?
--
-- - In the desugarer and later phases of compilation,
-- identity and semantic modules coincide, since we never compile
-- signatures (we just generate blank object files for
-- hsig files.)
--
-- A corrolary of this is that the following invariant holds at any point
-- past desugaring,
--
-- if I have a Module, this_mod, in hand representing the module
-- currently being compiled,
-- then moduleUnitId this_mod == thisPackage dflags
--
-- - For any code involving Names, we want semantic modules.
-- See lookupIfaceTop in IfaceEnv, mkIface and addFingerprints
-- in MkIface, and tcLookupGlobal in TcEnv
--
-- - When reading interfaces, we want the identity module to
-- identify the specific interface we want (such interfaces
-- should never be loaded into the EPS). However, if a
-- hole module <A> is requested, we look for A.hi
-- in the home library we are compiling. (See LoadIface.)
-- Similarly, in RnNames we check for self-imports using
-- identity modules, to allow signatures to import their implementor.
-- | 'TcGblEnv' describes the top-level of the module at the
-- point at which the typechecker is finished work.
-- It is this structure that is handed on to the desugarer
-- For state that needs to be updated during the typechecking
-- phase and returned at end, use a 'TcRef' (= 'IORef').
data TcGblEnv
= TcGblEnv {
tcg_mod :: Module, -- ^ Module being compiled
tcg_semantic_mod :: Module, -- ^ If a signature, the backing module
-- See also Note [Identity versus semantic module]
tcg_src :: HscSource,
-- ^ What kind of module (regular Haskell, hs-boot, hsig)
tcg_rdr_env :: GlobalRdrEnv, -- ^ Top level envt; used during renaming
tcg_default :: Maybe [Type],
-- ^ Types used for defaulting. @Nothing@ => no @default@ decl
tcg_fix_env :: FixityEnv, -- ^ Just for things in this module
tcg_field_env :: RecFieldEnv, -- ^ Just for things in this module
-- See Note [The interactive package] in HscTypes
tcg_type_env :: TypeEnv,
-- ^ Global type env for the module we are compiling now. All
-- TyCons and Classes (for this module) end up in here right away,
-- along with their derived constructors, selectors.
--
-- (Ids defined in this module start in the local envt, though they
-- move to the global envt during zonking)
--
-- NB: for what "things in this module" means, see
-- Note [The interactive package] in HscTypes
tcg_type_env_var :: TcRef TypeEnv,
-- Used only to initialise the interface-file
-- typechecker in initIfaceTcRn, so that it can see stuff
-- bound in this module when dealing with hi-boot recursions
-- Updated at intervals (e.g. after dealing with types and classes)
tcg_inst_env :: InstEnv,
-- ^ Instance envt for all /home-package/ modules;
-- Includes the dfuns in tcg_insts
tcg_fam_inst_env :: FamInstEnv, -- ^ Ditto for family instances
tcg_ann_env :: AnnEnv, -- ^ And for annotations
-- | Family instances we have to check for consistency.
-- Invariant: each FamInst in the list's fi_fam matches the
-- key of the entry in the 'NameEnv'. This gets consumed
-- by 'checkRecFamInstConsistency'.
-- See Note [Don't check hs-boot type family instances too early]
tcg_pending_fam_checks :: NameEnv [([FamInst], FamInstEnv)],
-- Now a bunch of things about this module that are simply
-- accumulated, but never consulted until the end.
-- Nevertheless, it's convenient to accumulate them along
-- with the rest of the info from this module.
tcg_exports :: [AvailInfo], -- ^ What is exported
tcg_imports :: ImportAvails,
-- ^ Information about what was imported from where, including
-- things bound in this module. Also store Safe Haskell info
-- here about transative trusted packaage requirements.
tcg_dus :: DefUses, -- ^ What is defined in this module and what is used.
tcg_used_gres :: TcRef [GlobalRdrElt], -- ^ Records occurrences of imported entities
-- See Note [Tracking unused binding and imports]
tcg_keep :: TcRef NameSet,
-- ^ Locally-defined top-level names to keep alive.
--
-- "Keep alive" means give them an Exported flag, so that the
-- simplifier does not discard them as dead code, and so that they
-- are exposed in the interface file (but not to export to the
-- user).
--
-- Some things, like dict-fun Ids and default-method Ids are "born"
-- with the Exported flag on, for exactly the above reason, but some
-- we only discover as we go. Specifically:
--
-- * The to/from functions for generic data types
--
-- * Top-level variables appearing free in the RHS of an orphan
-- rule
--
-- * Top-level variables appearing free in a TH bracket
tcg_th_used :: TcRef Bool,
-- ^ @True@ <=> Template Haskell syntax used.
--
-- We need this so that we can generate a dependency on the
-- Template Haskell package, because the desugarer is going
-- to emit loads of references to TH symbols. The reference
-- is implicit rather than explicit, so we have to zap a
-- mutable variable.
tcg_th_splice_used :: TcRef Bool,
-- ^ @True@ <=> A Template Haskell splice was used.
--
-- Splices disable recompilation avoidance (see #481)
tcg_th_top_level_locs :: TcRef (Set RealSrcSpan),
-- ^ Locations of the top-level splices; used for providing details on
-- scope in error messages for out-of-scope variables
tcg_dfun_n :: TcRef OccSet,
-- ^ Allows us to choose unique DFun names.
tcg_merged :: [(Module, Fingerprint)],
-- ^ The requirements we merged with; we always have to recompile
-- if any of these changed.
-- The next fields accumulate the payload of the module
-- The binds, rules and foreign-decl fields are collected
-- initially in un-zonked form and are finally zonked in tcRnSrcDecls
tcg_rn_exports :: Maybe [Located (IE Name)],
-- Nothing <=> no explicit export list
-- Is always Nothing if we don't want to retain renamed
-- exports
tcg_rn_imports :: [LImportDecl Name],
-- Keep the renamed imports regardless. They are not
-- voluminous and are needed if you want to report unused imports
tcg_rn_decls :: Maybe (HsGroup Name),
-- ^ Renamed decls, maybe. @Nothing@ <=> Don't retain renamed
-- decls.
tcg_dependent_files :: TcRef [FilePath], -- ^ dependencies from addDependentFile
tcg_th_topdecls :: TcRef [LHsDecl RdrName],
-- ^ Top-level declarations from addTopDecls
tcg_th_topnames :: TcRef NameSet,
-- ^ Exact names bound in top-level declarations in tcg_th_topdecls
tcg_th_modfinalizers :: TcRef [TcM ()],
-- ^ Template Haskell module finalizers.
--
-- They are computations in the @TcM@ monad rather than @Q@ because we
-- set them to use particular local environments.
tcg_th_state :: TcRef (Map TypeRep Dynamic),
tcg_th_remote_state :: TcRef (Maybe (ForeignRef (IORef QState))),
-- ^ Template Haskell state
tcg_ev_binds :: Bag EvBind, -- Top-level evidence bindings
-- Things defined in this module, or (in GHCi)
-- in the declarations for a single GHCi command.
-- For the latter, see Note [The interactive package] in HscTypes
tcg_tr_module :: Maybe Id, -- Id for $trModule :: GHC.Types.Module
-- for which every module has a top-level defn
-- except in GHCi in which case we have Nothing
tcg_binds :: LHsBinds Id, -- Value bindings in this module
tcg_sigs :: NameSet, -- ...Top-level names that *lack* a signature
tcg_imp_specs :: [LTcSpecPrag], -- ...SPECIALISE prags for imported Ids
tcg_warns :: Warnings, -- ...Warnings and deprecations
tcg_anns :: [Annotation], -- ...Annotations
tcg_tcs :: [TyCon], -- ...TyCons and Classes
tcg_insts :: [ClsInst], -- ...Instances
tcg_fam_insts :: [FamInst], -- ...Family instances
tcg_rules :: [LRuleDecl Id], -- ...Rules
tcg_fords :: [LForeignDecl Id], -- ...Foreign import & exports
tcg_vects :: [LVectDecl Id], -- ...Vectorisation declarations
tcg_patsyns :: [PatSyn], -- ...Pattern synonyms
tcg_doc_hdr :: Maybe LHsDocString, -- ^ Maybe Haddock header docs
tcg_hpc :: AnyHpcUsage, -- ^ @True@ if any part of the
-- prog uses hpc instrumentation.
tcg_self_boot :: SelfBootInfo, -- ^ Whether this module has a
-- corresponding hi-boot file
tcg_main :: Maybe Name, -- ^ The Name of the main
-- function, if this module is
-- the main module.
tcg_safeInfer :: TcRef (Bool, WarningMessages),
-- ^ Has the typechecker inferred this module as -XSafe (Safe Haskell)
-- See Note [Safe Haskell Overlapping Instances Implementation],
-- although this is used for more than just that failure case.
tcg_tc_plugins :: [TcPluginSolver],
-- ^ A list of user-defined plugins for the constraint solver.
tcg_top_loc :: RealSrcSpan,
-- ^ The RealSrcSpan this module came from
tcg_static_wc :: TcRef WantedConstraints
-- ^ Wanted constraints of static forms.
-- See Note [Constraints in static forms].
}
-- NB: topModIdentity, not topModSemantic!
-- Definition sites of orphan identities will be identity modules, not semantic
-- modules.
-- Note [Constraints in static forms]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
--
-- When a static form produces constraints like
--
-- f :: StaticPtr (Bool -> String)
-- f = static show
--
-- we collect them in tcg_static_wc and resolve them at the end
-- of type checking. They need to be resolved separately because
-- we don't want to resolve them in the context of the enclosing
-- expression. Consider
--
-- g :: Show a => StaticPtr (a -> String)
-- g = static show
--
-- If the @Show a0@ constraint that the body of the static form produces was
-- resolved in the context of the enclosing expression, then the body of the
-- static form wouldn't be closed because the Show dictionary would come from
-- g's context instead of coming from the top level.
tcVisibleOrphanMods :: TcGblEnv -> ModuleSet
tcVisibleOrphanMods tcg_env
= mkModuleSet (tcg_mod tcg_env : imp_orphs (tcg_imports tcg_env))
instance ContainsModule TcGblEnv where
extractModule env = tcg_semantic_mod env
type RecFieldEnv = NameEnv [FieldLabel]
-- Maps a constructor name *in this module*
-- to the fields for that constructor.
-- This is used when dealing with ".." notation in record
-- construction and pattern matching.
-- The FieldEnv deals *only* with constructors defined in *this*
-- module. For imported modules, we get the same info from the
-- TypeEnv
data SelfBootInfo
= NoSelfBoot -- No corresponding hi-boot file
| SelfBoot
{ sb_mds :: ModDetails -- There was a hi-boot file,
, sb_tcs :: NameSet } -- defining these TyCons,
-- What is sb_tcs used for? See Note [Extra dependencies from .hs-boot files]
-- in RnSource
{- Note [Tracking unused binding and imports]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We gather two sorts of usage information
* tcg_dus (defs/uses)
Records *defined* Names (local, top-level)
and *used* Names (local or imported)
Used (a) to report "defined but not used"
(see RnNames.reportUnusedNames)
(b) to generate version-tracking usage info in interface
files (see MkIface.mkUsedNames)
This usage info is mainly gathered by the renamer's
gathering of free-variables
* tcg_used_gres
Used only to report unused import declarations
Records each *occurrence* an *imported* (not locally-defined) entity.
The occurrence is recorded by keeping a GlobalRdrElt for it.
These is not the GRE that is in the GlobalRdrEnv; rather it
is recorded *after* the filtering done by pickGREs. So it reflect
/how that occurrence is in scope/. See Note [GRE filtering] in
RdrName.
************************************************************************
* *
The local typechecker environment
* *
************************************************************************
Note [The Global-Env/Local-Env story]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
During type checking, we keep in the tcg_type_env
* All types and classes
* All Ids derived from types and classes (constructors, selectors)
At the end of type checking, we zonk the local bindings,
and as we do so we add to the tcg_type_env
* Locally defined top-level Ids
Why? Because they are now Ids not TcIds. This final GlobalEnv is
a) fed back (via the knot) to typechecking the
unfoldings of interface signatures
b) used in the ModDetails of this module
-}
data TcLclEnv -- Changes as we move inside an expression
-- Discarded after typecheck/rename; not passed on to desugarer
= TcLclEnv {
tcl_loc :: RealSrcSpan, -- Source span
tcl_ctxt :: [ErrCtxt], -- Error context, innermost on top
tcl_tclvl :: TcLevel, -- Birthplace for new unification variables
tcl_th_ctxt :: ThStage, -- Template Haskell context
tcl_th_bndrs :: ThBindEnv, -- Binding level of in-scope Names
-- defined in this module (not imported)
tcl_arrow_ctxt :: ArrowCtxt, -- Arrow-notation context
tcl_rdr :: LocalRdrEnv, -- Local name envt
-- Maintained during renaming, of course, but also during
-- type checking, solely so that when renaming a Template-Haskell
-- splice we have the right environment for the renamer.
--
-- Does *not* include global name envt; may shadow it
-- Includes both ordinary variables and type variables;
-- they are kept distinct because tyvar have a different
-- occurrence constructor (Name.TvOcc)
-- We still need the unsullied global name env so that
-- we can look up record field names
tcl_env :: TcTypeEnv, -- The local type environment:
-- Ids and TyVars defined in this module
tcl_bndrs :: TcIdBinderStack, -- Used for reporting relevant bindings
tcl_tidy :: TidyEnv, -- Used for tidying types; contains all
-- in-scope type variables (but not term variables)
tcl_tyvars :: TcRef TcTyVarSet, -- The "global tyvars"
-- Namely, the in-scope TyVars bound in tcl_env,
-- plus the tyvars mentioned in the types of Ids bound
-- in tcl_lenv.
-- Why mutable? see notes with tcGetGlobalTyCoVars
tcl_lie :: TcRef WantedConstraints, -- Place to accumulate type constraints
tcl_errs :: TcRef Messages -- Place to accumulate errors
}
type TcTypeEnv = NameEnv TcTyThing
type ThBindEnv = NameEnv (TopLevelFlag, ThLevel)
-- Domain = all Ids bound in this module (ie not imported)
-- The TopLevelFlag tells if the binding is syntactically top level.
-- We need to know this, because the cross-stage persistence story allows
-- cross-stage at arbitrary types if the Id is bound at top level.
--
-- Nota bene: a ThLevel of 'outerLevel' is *not* the same as being
-- bound at top level! See Note [Template Haskell levels] in TcSplice
{- Note [Given Insts]
~~~~~~~~~~~~~~~~~~
Because of GADTs, we have to pass inwards the Insts provided by type signatures
and existential contexts. Consider
data T a where { T1 :: b -> b -> T [b] }
f :: Eq a => T a -> Bool
f (T1 x y) = [x]==[y]
The constructor T1 binds an existential variable 'b', and we need Eq [b].
Well, we have it, because Eq a refines to Eq [b], but we can only spot that if we
pass it inwards.
-}
-- | Type alias for 'IORef'; the convention is we'll use this for mutable
-- bits of data in 'TcGblEnv' which are updated during typechecking and
-- returned at the end.
type TcRef a = IORef a
-- ToDo: when should I refer to it as a 'TcId' instead of an 'Id'?
type TcId = Id
type TcIdSet = IdSet
---------------------------
-- The TcIdBinderStack
---------------------------
type TcIdBinderStack = [TcIdBinder]
-- This is a stack of locally-bound ids, innermost on top
-- Used ony in error reporting (relevantBindings in TcError)
-- We can't use the tcl_env type environment, because it doesn't
-- keep track of the nesting order
data TcIdBinder
= TcIdBndr
TcId
TopLevelFlag -- Tells whether the bindind is syntactically top-level
-- (The monomorphic Ids for a recursive group count
-- as not-top-level for this purpose.)
| TcIdBndr_ExpType -- Variant that allows the type to be specified as
-- an ExpType
Name
ExpType
TopLevelFlag
instance Outputable TcIdBinder where
ppr (TcIdBndr id top_lvl) = ppr id <> brackets (ppr top_lvl)
ppr (TcIdBndr_ExpType id _ top_lvl) = ppr id <> brackets (ppr top_lvl)
instance HasOccName TcIdBinder where
occName (TcIdBndr id _) = (occName (idName id))
occName (TcIdBndr_ExpType name _ _) = (occName name)
---------------------------
-- Template Haskell stages and levels
---------------------------
data SpliceType = Typed | Untyped
data ThStage -- See Note [Template Haskell state diagram] in TcSplice
= Splice SpliceType -- Inside a top-level splice
-- This code will be run *at compile time*;
-- the result replaces the splice
-- Binding level = 0
| RunSplice (TcRef [ForeignRef (TH.Q ())])
-- Set when running a splice, i.e. NOT when renaming or typechecking the
-- Haskell code for the splice. See Note [RunSplice ThLevel].
--
-- Contains a list of mod finalizers collected while executing the splice.
--
-- 'addModFinalizer' inserts finalizers here, and from here they are taken
-- to construct an @HsSpliced@ annotation for untyped splices. See Note
-- [Delaying modFinalizers in untyped splices] in "RnSplice".
--
-- For typed splices, the typechecker takes finalizers from here and
-- inserts them in the list of finalizers in the global environment.
--
-- See Note [Collecting modFinalizers in typed splices] in "TcSplice".
| Comp -- Ordinary Haskell code
-- Binding level = 1
| Brack -- Inside brackets
ThStage -- Enclosing stage
PendingStuff
data PendingStuff
= RnPendingUntyped -- Renaming the inside of an *untyped* bracket
(TcRef [PendingRnSplice]) -- Pending splices in here
| RnPendingTyped -- Renaming the inside of a *typed* bracket
| TcPending -- Typechecking the inside of a typed bracket
(TcRef [PendingTcSplice]) -- Accumulate pending splices here
(TcRef WantedConstraints) -- and type constraints here
topStage, topAnnStage, topSpliceStage :: ThStage
topStage = Comp
topAnnStage = Splice Untyped
topSpliceStage = Splice Untyped
instance Outputable ThStage where
ppr (Splice _) = text "Splice"
ppr (RunSplice _) = text "RunSplice"
ppr Comp = text "Comp"
ppr (Brack s _) = text "Brack" <> parens (ppr s)
type ThLevel = Int
-- NB: see Note [Template Haskell levels] in TcSplice
-- Incremented when going inside a bracket,
-- decremented when going inside a splice
-- NB: ThLevel is one greater than the 'n' in Fig 2 of the
-- original "Template meta-programming for Haskell" paper
impLevel, outerLevel :: ThLevel
impLevel = 0 -- Imported things; they can be used inside a top level splice
outerLevel = 1 -- Things defined outside brackets
thLevel :: ThStage -> ThLevel
thLevel (Splice _) = 0
thLevel (RunSplice _) =
-- See Note [RunSplice ThLevel].
panic "thLevel: called when running a splice"
thLevel Comp = 1
thLevel (Brack s _) = thLevel s + 1
{- Node [RunSplice ThLevel]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The 'RunSplice' stage is set when executing a splice, and only when running a
splice. In particular it is not set when the splice is renamed or typechecked.
'RunSplice' is needed to provide a reference where 'addModFinalizer' can insert
the finalizer (see Note [Delaying modFinalizers in untyped splices]), and
'addModFinalizer' runs when doing Q things. Therefore, It doesn't make sense to
set 'RunSplice' when renaming or typechecking the splice, where 'Splice', 'Brak'
or 'Comp' are used instead.
-}
---------------------------
-- Arrow-notation context
---------------------------
{- Note [Escaping the arrow scope]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In arrow notation, a variable bound by a proc (or enclosed let/kappa)
is not in scope to the left of an arrow tail (-<) or the head of (|..|).
For example
proc x -> (e1 -< e2)
Here, x is not in scope in e1, but it is in scope in e2. This can get
a bit complicated:
let x = 3 in
proc y -> (proc z -> e1) -< e2
Here, x and z are in scope in e1, but y is not.
We implement this by
recording the environment when passing a proc (using newArrowScope),
and returning to that (using escapeArrowScope) on the left of -< and the
head of (|..|).
All this can be dealt with by the *renamer*. But the type checker needs
to be involved too. Example (arrowfail001)
class Foo a where foo :: a -> ()
data Bar = forall a. Foo a => Bar a
get :: Bar -> ()
get = proc x -> case x of Bar a -> foo -< a
Here the call of 'foo' gives rise to a (Foo a) constraint that should not
be captured by the pattern match on 'Bar'. Rather it should join the
constraints from further out. So we must capture the constraint bag
from further out in the ArrowCtxt that we push inwards.
-}
data ArrowCtxt -- Note [Escaping the arrow scope]
= NoArrowCtxt
| ArrowCtxt LocalRdrEnv (TcRef WantedConstraints)
---------------------------
-- TcTyThing
---------------------------
-- | A typecheckable thing available in a local context. Could be
-- 'AGlobal' 'TyThing', but also lexically scoped variables, etc.
-- See 'TcEnv' for how to retrieve a 'TyThing' given a 'Name'.
data TcTyThing
= AGlobal TyThing -- Used only in the return type of a lookup
| ATcId { -- Ids defined in this module; may not be fully zonked
tct_id :: TcId,
tct_info :: IdBindingInfo } -- See Note [Bindings with closed types]
| ATyVar Name TcTyVar -- The type variable to which the lexically scoped type
-- variable is bound. We only need the Name
-- for error-message purposes; it is the corresponding
-- Name in the domain of the envt
| ATcTyCon TyCon -- Used temporarily, during kind checking, for the
-- tycons and clases in this recursive group
-- The TyCon is always a TcTyCon. Its kind
-- can be a mono-kind or a poly-kind; in TcTyClsDcls see
-- Note [Type checking recursive type and class declarations]
| APromotionErr PromotionErr
data PromotionErr
= TyConPE -- TyCon used in a kind before we are ready
-- data T :: T -> * where ...
| ClassPE -- Ditto Class
| FamDataConPE -- Data constructor for a data family
-- See Note [AFamDataCon: not promoting data family constructors] in TcRnDriver
| PatSynPE -- Pattern synonyms
-- See Note [Don't promote pattern synonyms] in TcEnv
| RecDataConPE -- Data constructor in a recursive loop
-- See Note [ARecDataCon: recusion and promoting data constructors] in TcTyClsDecls
| NoDataKindsTC -- -XDataKinds not enabled (for a tycon)
| NoDataKindsDC -- -XDataKinds not enabled (for a datacon)
| NoTypeInTypeTC -- -XTypeInType not enabled (for a tycon)
| NoTypeInTypeDC -- -XTypeInType not enabled (for a datacon)
instance Outputable TcTyThing where -- Debugging only
ppr (AGlobal g) = ppr g
ppr elt@(ATcId {}) = text "Identifier" <>
brackets (ppr (tct_id elt) <> dcolon
<> ppr (varType (tct_id elt)) <> comma
<+> ppr (tct_info elt))
ppr (ATyVar n tv) = text "Type variable" <+> quotes (ppr n) <+> equals <+> ppr tv
ppr (ATcTyCon tc) = text "ATcTyCon" <+> ppr tc
ppr (APromotionErr err) = text "APromotionErr" <+> ppr err
-- | Describes how an Id is bound.
--
-- It is used for the following purposes:
--
-- a) for static forms in TcExpr.checkClosedInStaticForm and
-- b) to figure out when a nested binding can be generalised (in
-- TcBinds.decideGeneralisationPlan).
--
-- See Note [Meaning of IdBindingInfo].
data IdBindingInfo
= NotLetBound
| ClosedLet
| NonClosedLet NameSet Bool
-- Note [Meaning of IdBindingInfo]
--
-- @NotLetBound@ means that the Id is not let-bound (e.g. it is bound in a
-- lambda-abstraction or in a case pattern).
--
-- @ClosedLet@ means that the Id is let-bound, it is closed and its type is
-- closed as well.
--
-- @NonClosedLet fvs type-closed@ means that the Id is let-bound but it is not
-- closed. The @fvs@ set contains the free variables of the rhs. The type-closed
-- flag indicates if the type of Id is closed.
instance Outputable IdBindingInfo where
ppr NotLetBound = text "NotLetBound"
ppr ClosedLet = text "TopLevelLet"
ppr (NonClosedLet fvs closed_type) =
text "TopLevelLet" <+> ppr fvs <+> ppr closed_type
-- | Tells if a group of binders is closed.
--
-- When it is not closed, it provides a map of binder ids to the free vars
-- in their right-hand sides.
--
data IsGroupClosed = ClosedGroup
| NonClosedGroup (NameEnv NameSet)
instance Outputable PromotionErr where
ppr ClassPE = text "ClassPE"
ppr TyConPE = text "TyConPE"
ppr PatSynPE = text "PatSynPE"
ppr FamDataConPE = text "FamDataConPE"
ppr RecDataConPE = text "RecDataConPE"
ppr NoDataKindsTC = text "NoDataKindsTC"
ppr NoDataKindsDC = text "NoDataKindsDC"
ppr NoTypeInTypeTC = text "NoTypeInTypeTC"
ppr NoTypeInTypeDC = text "NoTypeInTypeDC"
pprTcTyThingCategory :: TcTyThing -> SDoc
pprTcTyThingCategory (AGlobal thing) = pprTyThingCategory thing
pprTcTyThingCategory (ATyVar {}) = text "Type variable"
pprTcTyThingCategory (ATcId {}) = text "Local identifier"
pprTcTyThingCategory (ATcTyCon {}) = text "Local tycon"
pprTcTyThingCategory (APromotionErr pe) = pprPECategory pe
pprPECategory :: PromotionErr -> SDoc
pprPECategory ClassPE = text "Class"
pprPECategory TyConPE = text "Type constructor"
pprPECategory PatSynPE = text "Pattern synonym"
pprPECategory FamDataConPE = text "Data constructor"
pprPECategory RecDataConPE = text "Data constructor"
pprPECategory NoDataKindsTC = text "Type constructor"
pprPECategory NoDataKindsDC = text "Data constructor"
pprPECategory NoTypeInTypeTC = text "Type constructor"
pprPECategory NoTypeInTypeDC = text "Data constructor"
{- Note [Bindings with closed types]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
f x = let g ys = map not ys
in ...
Can we generalise 'g' under the OutsideIn algorithm? Yes,
because all g's free variables are top-level; that is they themselves
have no free type variables, and it is the type variables in the
environment that makes things tricky for OutsideIn generalisation.
Definition:
A variable is "closed", and has tct_info set to TopLevel,
iff
a) all its free variables are imported, or are let-bound and closed
b) generalisation is not restricted by the monomorphism restriction
Invariant: a closed variable has no free type variables in its type.
Why? Assume (induction hypothesis) that closed variables have closed
types, and that we have a new binding f = e, satisfying (a) and (b).
Then since monomorphism restriction does not apply, and there are no
free type variables, we can fully generalise, so its type will be closed.
Under OutsideIn we are free to generalise a closed let-binding.
This is an extension compared to the JFP paper on OutsideIn, which
used "top-level" as a proxy for "closed". (It's not a good proxy
anyway -- the MR can make a top-level binding with a free type
variable.)
Note that:
* A top-level binding may not be closed, if it suffers from the MR
* A nested binding may be closed (eg 'g' in the example we started with)
Indeed, that's the point; whether a function is defined at top level
or nested is orthogonal to the question of whether or not it is closed
* A binding may be non-closed because it mentions a lexically scoped
*type variable* Eg
f :: forall a. blah
f x = let g y = ...(y::a)...
-}
type ErrCtxt = (Bool, TidyEnv -> TcM (TidyEnv, MsgDoc))
-- Monadic so that we have a chance
-- to deal with bound type variables just before error
-- message construction
-- Bool: True <=> this is a landmark context; do not
-- discard it when trimming for display
{-
************************************************************************
* *
Operations over ImportAvails
* *
************************************************************************
-}
-- | 'ImportAvails' summarises what was imported from where, irrespective of
-- whether the imported things are actually used or not. It is used:
--
-- * when processing the export list,
--
-- * when constructing usage info for the interface file,
--
-- * to identify the list of directly imported modules for initialisation
-- purposes and for optimised overlap checking of family instances,
--
-- * when figuring out what things are really unused
--
data ImportAvails
= ImportAvails {
imp_mods :: ImportedMods,
-- = ModuleEnv [ImportedModsVal],
-- ^ Domain is all directly-imported modules
--
-- See the documentation on ImportedModsVal in HscTypes for the
-- meaning of the fields.
--
-- We need a full ModuleEnv rather than a ModuleNameEnv here,
-- because we might be importing modules of the same name from
-- different packages. (currently not the case, but might be in the
-- future).
imp_dep_mods :: DModuleNameEnv (ModuleName, IsBootInterface),
-- ^ Home-package modules needed by the module being compiled
--
-- It doesn't matter whether any of these dependencies
-- are actually /used/ when compiling the module; they
-- are listed if they are below it at all. For
-- example, suppose M imports A which imports X. Then
-- compiling M might not need to consult X.hi, but X
-- is still listed in M's dependencies.
imp_dep_pkgs :: [InstalledUnitId],
-- ^ Packages needed by the module being compiled, whether directly,
-- or via other modules in this package, or via modules imported
-- from other packages.
imp_trust_pkgs :: [InstalledUnitId],
-- ^ This is strictly a subset of imp_dep_pkgs and records the
-- packages the current module needs to trust for Safe Haskell
-- compilation to succeed. A package is required to be trusted if
-- we are dependent on a trustworthy module in that package.
-- While perhaps making imp_dep_pkgs a tuple of (UnitId, Bool)
-- where True for the bool indicates the package is required to be
-- trusted is the more logical design, doing so complicates a lot
-- of code not concerned with Safe Haskell.
-- See Note [RnNames . Tracking Trust Transitively]
imp_trust_own_pkg :: Bool,
-- ^ Do we require that our own package is trusted?
-- This is to handle efficiently the case where a Safe module imports
-- a Trustworthy module that resides in the same package as it.
-- See Note [RnNames . Trust Own Package]
imp_orphs :: [Module],
-- ^ Orphan modules below us in the import tree (and maybe including
-- us for imported modules)
imp_finsts :: [Module]
-- ^ Family instance modules below us in the import tree (and maybe
-- including us for imported modules)
}
mkModDeps :: [(ModuleName, IsBootInterface)]
-> DModuleNameEnv (ModuleName, IsBootInterface)
mkModDeps deps = foldl add emptyUDFM deps
where
add env elt@(m,_) = addToUDFM env m elt
emptyImportAvails :: ImportAvails
emptyImportAvails = ImportAvails { imp_mods = emptyModuleEnv,
imp_dep_mods = emptyUDFM,
imp_dep_pkgs = [],
imp_trust_pkgs = [],
imp_trust_own_pkg = False,
imp_orphs = [],
imp_finsts = [] }
-- | Union two ImportAvails
--
-- This function is a key part of Import handling, basically
-- for each import we create a separate ImportAvails structure
-- and then union them all together with this function.
plusImportAvails :: ImportAvails -> ImportAvails -> ImportAvails
plusImportAvails
(ImportAvails { imp_mods = mods1,
imp_dep_mods = dmods1, imp_dep_pkgs = dpkgs1,
imp_trust_pkgs = tpkgs1, imp_trust_own_pkg = tself1,
imp_orphs = orphs1, imp_finsts = finsts1 })
(ImportAvails { imp_mods = mods2,
imp_dep_mods = dmods2, imp_dep_pkgs = dpkgs2,
imp_trust_pkgs = tpkgs2, imp_trust_own_pkg = tself2,
imp_orphs = orphs2, imp_finsts = finsts2 })
= ImportAvails { imp_mods = plusModuleEnv_C (++) mods1 mods2,
imp_dep_mods = plusUDFM_C plus_mod_dep dmods1 dmods2,
imp_dep_pkgs = dpkgs1 `unionLists` dpkgs2,
imp_trust_pkgs = tpkgs1 `unionLists` tpkgs2,
imp_trust_own_pkg = tself1 || tself2,
imp_orphs = orphs1 `unionLists` orphs2,
imp_finsts = finsts1 `unionLists` finsts2 }
where
plus_mod_dep (m1, boot1) (m2, boot2)
= WARN( not (m1 == m2), (ppr m1 <+> ppr m2) $$ (ppr boot1 <+> ppr boot2) )
-- Check mod-names match
(m1, boot1 && boot2) -- If either side can "see" a non-hi-boot interface, use that
{-
************************************************************************
* *
\subsection{Where from}
* *
************************************************************************
The @WhereFrom@ type controls where the renamer looks for an interface file
-}
data WhereFrom
= ImportByUser IsBootInterface -- Ordinary user import (perhaps {-# SOURCE #-})
| ImportBySystem -- Non user import.
| ImportByPlugin -- Importing a plugin;
-- See Note [Care with plugin imports] in LoadIface
instance Outputable WhereFrom where
ppr (ImportByUser is_boot) | is_boot = text "{- SOURCE -}"
| otherwise = empty
ppr ImportBySystem = text "{- SYSTEM -}"
ppr ImportByPlugin = text "{- PLUGIN -}"
{- *********************************************************************
* *
Type signatures
* *
********************************************************************* -}
-- These data types need to be here only because
-- TcSimplify uses them, and TcSimplify is fairly
-- low down in the module hierarchy
data TcSigInfo = TcIdSig TcIdSigInfo
| TcPatSynSig TcPatSynInfo
data TcIdSigInfo -- See Note [Complete and partial type signatures]
= CompleteSig -- A complete signature with no wildcards,
-- so the complete polymorphic type is known.
{ sig_bndr :: TcId -- The polymorphic Id with that type
, sig_ctxt :: UserTypeCtxt -- In the case of type-class default methods,
-- the Name in the FunSigCtxt is not the same
-- as the TcId; the former is 'op', while the
-- latter is '$dmop' or some such
, sig_loc :: SrcSpan -- Location of the type signature
}
| PartialSig -- A partial type signature (i.e. includes one or more
-- wildcards). In this case it doesn't make sense to give
-- the polymorphic Id, because we are going to /infer/ its
-- type, so we can't make the polymorphic Id ab-initio
{ psig_name :: Name -- Name of the function; used when report wildcards
, psig_hs_ty :: LHsSigWcType Name -- The original partial signature in HsSyn form
, sig_ctxt :: UserTypeCtxt
, sig_loc :: SrcSpan -- Location of the type signature
}
{- Note [Complete and partial type signatures]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
A type signature is partial when it contains one or more wildcards
(= type holes). The wildcard can either be:
* A (type) wildcard occurring in sig_theta or sig_tau. These are
stored in sig_wcs.
f :: Bool -> _
g :: Eq _a => _a -> _a -> Bool
* Or an extra-constraints wildcard, stored in sig_cts:
h :: (Num a, _) => a -> a
A type signature is a complete type signature when there are no
wildcards in the type signature, i.e. iff sig_wcs is empty and
sig_extra_cts is Nothing.
-}
data TcIdSigInst
= TISI { sig_inst_sig :: TcIdSigInfo
, sig_inst_skols :: [(Name, TcTyVar)]
-- Instantiated type and kind variables SKOLEMS
-- The Name is the Name that the renamer chose;
-- but the TcTyVar may come from instantiating
-- the type and hence have a different unique.
-- No need to keep track of whether they are truly lexically
-- scoped because the renamer has named them uniquely
-- See Note [Binding scoped type variables] in TcSigs
, sig_inst_theta :: TcThetaType
-- Instantiated theta. In the case of a
-- PartialSig, sig_theta does not include
-- the extra-constraints wildcard
, sig_inst_tau :: TcSigmaType -- Instantiated tau
-- See Note [sig_inst_tau may be polymorphic]
-- Relevant for partial signature only
, sig_inst_wcs :: [(Name, TcTyVar)]
-- Like sig_inst_skols, but for wildcards. The named
-- wildcards scope over the binding, and hence their
-- Names may appear in type signatures in the binding
, sig_inst_wcx :: Maybe TcTyVar
-- Extra-constraints wildcard to fill in, if any
}
{- Note [sig_inst_tau may be polymorphic]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Note that "sig_inst_tau" might actually be a polymorphic type,
if the original function had a signature like
forall a. Eq a => forall b. Ord b => ....
But that's ok: tcMatchesFun (called by tcRhs) can deal with that
It happens, too! See Note [Polymorphic methods] in TcClassDcl.
Note [Wildcards in partial signatures]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The wildcards in psig_wcs may stand for a type mentioning
the universally-quantified tyvars of psig_ty
E.g. f :: forall a. _ -> a
f x = x
We get sig_inst_skols = [a]
sig_inst_tau = _22 -> a
sig_inst_wcs = [_22]
and _22 in the end is unified with the type 'a'
Moreover the kind of a wildcard in sig_inst_wcs may mention
the universally-quantified tyvars sig_inst_skols
e.g. f :: t a -> t _
Here we get
sig_inst_skole = [k:*, (t::k ->*), (a::k)]
sig_inst_tau = t a -> t _22
sig_inst_wcs = [ _22::k ]
-}
data TcPatSynInfo
= TPSI {
patsig_name :: Name,
patsig_implicit_bndrs :: [TyVarBinder], -- Implicitly-bound kind vars (Inferred) and
-- implicitly-bound type vars (Specified)
-- See Note [The pattern-synonym signature splitting rule] in TcPatSyn
patsig_univ_bndrs :: [TyVar], -- Bound by explicit user forall
patsig_req :: TcThetaType,
patsig_ex_bndrs :: [TyVar], -- Bound by explicit user forall
patsig_prov :: TcThetaType,
patsig_body_ty :: TcSigmaType
}
instance Outputable TcSigInfo where
ppr (TcIdSig idsi) = ppr idsi
ppr (TcPatSynSig tpsi) = text "TcPatSynInfo" <+> ppr tpsi
instance Outputable TcIdSigInfo where
ppr (CompleteSig { sig_bndr = bndr })
= ppr bndr <+> dcolon <+> ppr (idType bndr)
ppr (PartialSig { psig_name = name, psig_hs_ty = hs_ty })
= text "psig" <+> ppr name <+> dcolon <+> ppr hs_ty
instance Outputable TcIdSigInst where
ppr (TISI { sig_inst_sig = sig, sig_inst_skols = skols
, sig_inst_theta = theta, sig_inst_tau = tau })
= hang (ppr sig) 2 (vcat [ ppr skols, ppr theta <+> darrow <+> ppr tau ])
instance Outputable TcPatSynInfo where
ppr (TPSI{ patsig_name = name}) = ppr name
isPartialSig :: TcIdSigInst -> Bool
isPartialSig (TISI { sig_inst_sig = PartialSig {} }) = True
isPartialSig _ = False
{-
************************************************************************
* *
* Canonical constraints *
* *
* These are the constraints the low-level simplifier works with *
* *
************************************************************************
-}
-- The syntax of xi (ξ) types:
-- xi ::= a | T xis | xis -> xis | ... | forall a. tau
-- Two important notes:
-- (i) No type families, unless we are under a ForAll
-- (ii) Note that xi types can contain unexpanded type synonyms;
-- however, the (transitive) expansions of those type synonyms
-- will not contain any type functions, unless we are under a ForAll.
-- We enforce the structure of Xi types when we flatten (TcCanonical)
type Xi = Type -- In many comments, "xi" ranges over Xi
type Cts = Bag Ct
data Ct
-- Atomic canonical constraints
= CDictCan { -- e.g. Num xi
cc_ev :: CtEvidence, -- See Note [Ct/evidence invariant]
cc_class :: Class,
cc_tyargs :: [Xi], -- cc_tyargs are function-free, hence Xi
cc_pend_sc :: Bool -- True <=> (a) cc_class has superclasses
-- (b) we have not (yet) added those
-- superclasses as Givens
-- NB: cc_pend_sc is used for G/W/D. For W/D the reason
-- we need superclasses is to expose possible improvement
-- via fundeps
}
| CIrredEvCan { -- These stand for yet-unusable predicates
cc_ev :: CtEvidence -- See Note [Ct/evidence invariant]
-- The ctev_pred of the evidence is
-- of form (tv xi1 xi2 ... xin)
-- or (tv1 ~ ty2) where the CTyEqCan kind invariant fails
-- or (F tys ~ ty) where the CFunEqCan kind invariant fails
-- See Note [CIrredEvCan constraints]
}
| CTyEqCan { -- tv ~ rhs
-- Invariants:
-- * See Note [Applying the inert substitution] in TcFlatten
-- * tv not in tvs(rhs) (occurs check)
-- * If tv is a TauTv, then rhs has no foralls
-- (this avoids substituting a forall for the tyvar in other types)
-- * typeKind ty `tcEqKind` typeKind tv
-- * rhs may have at most one top-level cast
-- * rhs (perhaps under the one cast) is not necessarily function-free,
-- but it has no top-level function.
-- E.g. a ~ [F b] is fine
-- but a ~ F b is not
-- * If the equality is representational, rhs has no top-level newtype
-- See Note [No top-level newtypes on RHS of representational
-- equalities] in TcCanonical
-- * If rhs (perhaps under the cast) is also a tv, then it is oriented
-- to give best chance of
-- unification happening; eg if rhs is touchable then lhs is too
cc_ev :: CtEvidence, -- See Note [Ct/evidence invariant]
cc_tyvar :: TcTyVar,
cc_rhs :: TcType, -- Not necessarily function-free (hence not Xi)
-- See invariants above
cc_eq_rel :: EqRel -- INVARIANT: cc_eq_rel = ctEvEqRel cc_ev
}
| CFunEqCan { -- F xis ~ fsk
-- Invariants:
-- * isTypeFamilyTyCon cc_fun
-- * typeKind (F xis) = tyVarKind fsk
-- * always Nominal role
cc_ev :: CtEvidence, -- See Note [Ct/evidence invariant]
cc_fun :: TyCon, -- A type function
cc_tyargs :: [Xi], -- cc_tyargs are function-free (hence Xi)
-- Either under-saturated or exactly saturated
-- *never* over-saturated (because if so
-- we should have decomposed)
cc_fsk :: TcTyVar -- [Given] always a FlatSkol skolem
-- [Wanted] always a FlatMetaTv unification variable
-- See Note [The flattening story] in TcFlatten
}
| CNonCanonical { -- See Note [NonCanonical Semantics] in TcSMonad
cc_ev :: CtEvidence
}
| CHoleCan { -- See Note [Hole constraints]
-- Treated as an "insoluble" constraint
-- See Note [Insoluble constraints]
cc_ev :: CtEvidence,
cc_hole :: Hole
}
-- | An expression or type hole
data Hole = ExprHole UnboundVar
-- ^ Either an out-of-scope variable or a "true" hole in an
-- expression (TypedHoles)
| TypeHole OccName
-- ^ A hole in a type (PartialTypeSignatures)
holeOcc :: Hole -> OccName
holeOcc (ExprHole uv) = unboundVarOcc uv
holeOcc (TypeHole occ) = occ
{-
Note [Hole constraints]
~~~~~~~~~~~~~~~~~~~~~~~
CHoleCan constraints are used for two kinds of holes,
distinguished by cc_hole:
* For holes in expressions (including variables not in scope)
e.g. f x = g _ x
* For holes in type signatures
e.g. f :: _ -> _
f x = [x,True]
Note [CIrredEvCan constraints]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
CIrredEvCan constraints are used for constraints that are "stuck"
- we can't solve them (yet)
- we can't use them to solve other constraints
- but they may become soluble if we substitute for some
of the type variables in the constraint
Example 1: (c Int), where c :: * -> Constraint. We can't do anything
with this yet, but if later c := Num, *then* we can solve it
Example 2: a ~ b, where a :: *, b :: k, where k is a kind variable
We don't want to use this to substitute 'b' for 'a', in case
'k' is subequently unifed with (say) *->*, because then
we'd have ill-kinded types floating about. Rather we want
to defer using the equality altogether until 'k' get resolved.
Note [Ct/evidence invariant]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If ct :: Ct, then extra fields of 'ct' cache precisely the ctev_pred field
of (cc_ev ct), and is fully rewritten wrt the substitution. Eg for CDictCan,
ctev_pred (cc_ev ct) = (cc_class ct) (cc_tyargs ct)
This holds by construction; look at the unique place where CDictCan is
built (in TcCanonical).
In contrast, the type of the evidence *term* (ccev_evtm or ctev_evar/dest) in
the evidence may *not* be fully zonked; we are careful not to look at it
during constraint solving. See Note [Evidence field of CtEvidence]
-}
mkNonCanonical :: CtEvidence -> Ct
mkNonCanonical ev = CNonCanonical { cc_ev = ev }
mkNonCanonicalCt :: Ct -> Ct
mkNonCanonicalCt ct = CNonCanonical { cc_ev = cc_ev ct }
mkGivens :: CtLoc -> [EvId] -> [Ct]
mkGivens loc ev_ids
= map mk ev_ids
where
mk ev_id = mkNonCanonical (CtGiven { ctev_evar = ev_id
, ctev_pred = evVarPred ev_id
, ctev_loc = loc })
ctEvidence :: Ct -> CtEvidence
ctEvidence = cc_ev
ctLoc :: Ct -> CtLoc
ctLoc = ctEvLoc . ctEvidence
setCtLoc :: Ct -> CtLoc -> Ct
setCtLoc ct loc = ct { cc_ev = (cc_ev ct) { ctev_loc = loc } }
ctOrigin :: Ct -> CtOrigin
ctOrigin = ctLocOrigin . ctLoc
ctPred :: Ct -> PredType
-- See Note [Ct/evidence invariant]
ctPred ct = ctEvPred (cc_ev ct)
-- | Makes a new equality predicate with the same role as the given
-- evidence.
mkTcEqPredLikeEv :: CtEvidence -> TcType -> TcType -> TcType
mkTcEqPredLikeEv ev
= case predTypeEqRel pred of
NomEq -> mkPrimEqPred
ReprEq -> mkReprPrimEqPred
where
pred = ctEvPred ev
-- | Get the flavour of the given 'Ct'
ctFlavour :: Ct -> CtFlavour
ctFlavour = ctEvFlavour . ctEvidence
-- | Get the equality relation for the given 'Ct'
ctEqRel :: Ct -> EqRel
ctEqRel = ctEvEqRel . ctEvidence
instance Outputable Ct where
ppr ct = ppr (cc_ev ct) <+> parens pp_sort
where
pp_sort = case ct of
CTyEqCan {} -> text "CTyEqCan"
CFunEqCan {} -> text "CFunEqCan"
CNonCanonical {} -> text "CNonCanonical"
CDictCan { cc_pend_sc = pend_sc }
| pend_sc -> text "CDictCan(psc)"
| otherwise -> text "CDictCan"
CIrredEvCan {} -> text "CIrredEvCan"
CHoleCan { cc_hole = hole } -> text "CHoleCan:" <+> ppr (holeOcc hole)
{-
************************************************************************
* *
Simple functions over evidence variables
* *
************************************************************************
-}
---------------- Getting free tyvars -------------------------
-- | Returns free variables of constraints as a non-deterministic set
tyCoVarsOfCt :: Ct -> TcTyCoVarSet
tyCoVarsOfCt = fvVarSet . tyCoFVsOfCt
-- | Returns free variables of constraints as a deterministically ordered.
-- list. See Note [Deterministic FV] in FV.
tyCoVarsOfCtList :: Ct -> [TcTyCoVar]
tyCoVarsOfCtList = fvVarList . tyCoFVsOfCt
-- | Returns free variables of constraints as a composable FV computation.
-- See Note [Deterministic FV] in FV.
tyCoFVsOfCt :: Ct -> FV
tyCoFVsOfCt (CTyEqCan { cc_tyvar = tv, cc_rhs = xi })
= tyCoFVsOfType xi `unionFV` FV.unitFV tv
`unionFV` tyCoFVsOfType (tyVarKind tv)
tyCoFVsOfCt (CFunEqCan { cc_tyargs = tys, cc_fsk = fsk })
= tyCoFVsOfTypes tys `unionFV` FV.unitFV fsk
`unionFV` tyCoFVsOfType (tyVarKind fsk)
tyCoFVsOfCt (CDictCan { cc_tyargs = tys }) = tyCoFVsOfTypes tys
tyCoFVsOfCt (CIrredEvCan { cc_ev = ev }) = tyCoFVsOfType (ctEvPred ev)
tyCoFVsOfCt (CHoleCan { cc_ev = ev }) = tyCoFVsOfType (ctEvPred ev)
tyCoFVsOfCt (CNonCanonical { cc_ev = ev }) = tyCoFVsOfType (ctEvPred ev)
-- | Returns free variables of a bag of constraints as a non-deterministic
-- set. See Note [Deterministic FV] in FV.
tyCoVarsOfCts :: Cts -> TcTyCoVarSet
tyCoVarsOfCts = fvVarSet . tyCoFVsOfCts
-- | Returns free variables of a bag of constraints as a deterministically
-- odered list. See Note [Deterministic FV] in FV.
tyCoVarsOfCtsList :: Cts -> [TcTyCoVar]
tyCoVarsOfCtsList = fvVarList . tyCoFVsOfCts
-- | Returns free variables of a bag of constraints as a composable FV
-- computation. See Note [Deterministic FV] in FV.
tyCoFVsOfCts :: Cts -> FV
tyCoFVsOfCts = foldrBag (unionFV . tyCoFVsOfCt) emptyFV
-- | Returns free variables of WantedConstraints as a non-deterministic
-- set. See Note [Deterministic FV] in FV.
tyCoVarsOfWC :: WantedConstraints -> TyCoVarSet
-- Only called on *zonked* things, hence no need to worry about flatten-skolems
tyCoVarsOfWC = fvVarSet . tyCoFVsOfWC
-- | Returns free variables of WantedConstraints as a deterministically
-- ordered list. See Note [Deterministic FV] in FV.
tyCoVarsOfWCList :: WantedConstraints -> [TyCoVar]
-- Only called on *zonked* things, hence no need to worry about flatten-skolems
tyCoVarsOfWCList = fvVarList . tyCoFVsOfWC
-- | Returns free variables of WantedConstraints as a composable FV
-- computation. See Note [Deterministic FV] in FV.
tyCoFVsOfWC :: WantedConstraints -> FV
-- Only called on *zonked* things, hence no need to worry about flatten-skolems
tyCoFVsOfWC (WC { wc_simple = simple, wc_impl = implic, wc_insol = insol })
= tyCoFVsOfCts simple `unionFV`
tyCoFVsOfBag tyCoFVsOfImplic implic `unionFV`
tyCoFVsOfCts insol
-- | Returns free variables of Implication as a composable FV computation.
-- See Note [Deterministic FV] in FV.
tyCoFVsOfImplic :: Implication -> FV
-- Only called on *zonked* things, hence no need to worry about flatten-skolems
tyCoFVsOfImplic (Implic { ic_skols = skols
, ic_given = givens, ic_wanted = wanted })
= FV.delFVs (mkVarSet skols)
(tyCoFVsOfWC wanted `unionFV` tyCoFVsOfTypes (map evVarPred givens))
tyCoFVsOfBag :: (a -> FV) -> Bag a -> FV
tyCoFVsOfBag tvs_of = foldrBag (unionFV . tvs_of) emptyFV
--------------------------
dropDerivedSimples :: Cts -> Cts
-- Drop all Derived constraints, but make [W] back into [WD],
-- so that if we re-simplify these constraints we will get all
-- the right derived constraints re-generated. Forgetting this
-- step led to #12936
dropDerivedSimples simples = mapMaybeBag dropDerivedCt simples
dropDerivedCt :: Ct -> Maybe Ct
dropDerivedCt ct
= case ctEvFlavour ev of
Wanted WOnly -> Just (ct { cc_ev = ev_wd })
Wanted _ -> Just ct
_ -> ASSERT( isDerivedCt ct ) Nothing
-- simples are all Wanted or Derived
where
ev = ctEvidence ct
ev_wd = ev { ctev_nosh = WDeriv }
dropDerivedInsols :: Cts -> Cts
-- See Note [Dropping derived constraints]
dropDerivedInsols insols = filterBag keep insols
where -- insols can include Given
keep ct
| isDerivedCt ct = not (isDroppableDerivedLoc (ctLoc ct))
| otherwise = True
isDroppableDerivedLoc :: CtLoc -> Bool
-- Note [Dropping derived constraints]
isDroppableDerivedLoc loc
= case ctLocOrigin loc of
HoleOrigin {} -> False
KindEqOrigin {} -> False
GivenOrigin {} -> False
FunDepOrigin1 {} -> False
FunDepOrigin2 {} -> False
_ -> True
arisesFromGivens :: Ct -> Bool
arisesFromGivens ct
= case ctEvidence ct of
CtGiven {} -> True
CtWanted {} -> False
CtDerived { ctev_loc = loc } -> from_given loc
where
from_given :: CtLoc -> Bool
from_given loc = from_given_origin (ctLocOrigin loc)
from_given_origin :: CtOrigin -> Bool
from_given_origin (GivenOrigin {}) = True
from_given_origin (FunDepOrigin1 _ l1 _ l2) = from_given l1 && from_given l2
from_given_origin (FunDepOrigin2 _ o1 _ _) = from_given_origin o1
from_given_origin _ = False
{- Note [Dropping derived constraints]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In general we discard derived constraints at the end of constraint solving;
see dropDerivedWC. For example
* If we have an unsolved [W] (Ord a), we don't want to complain about
an unsolved [D] (Eq a) as well.
* If we have [W] a ~ Int, [W] a ~ Bool, improvement will generate
[D] Int ~ Bool, and we don't want to report that because it's incomprehensible.
That is why we don't rewrite wanteds with wanteds!
But (tiresomely) we do keep *some* Derived insolubles:
* Insoluble kind equalities (e.g. [D] * ~ (* -> *)) may arise from
a type equality a ~ Int#, say. In future they'll be Wanted, not Derived,
but at the moment they are Derived.
* Insoluble derived equalities (e.g. [D] Int ~ Bool) may arise from
functional dependency interactions, either between Givens or
Wanteds. It seems sensible to retain these:
- For Givens they reflect unreachable code
- For Wanteds it is arguably better to get a fundep error than
a no-instance error (Trac #9612)
* Type holes are derived constraints because they have no evidence
and we want to keep them so we get the error report
Moreover, we keep *all* derived insolubles under some circumstances:
* They are looked at by simplifyInfer, to decide whether to
generalise. Example: [W] a ~ Int, [W] a ~ Bool
We get [D] Int ~ Bool, and indeed the constraints are insoluble,
and we want simplifyInfer to see that, even though we don't
ultimately want to generate an (inexplicable) error message from
To distinguish these cases we use the CtOrigin.
************************************************************************
* *
CtEvidence
The "flavor" of a canonical constraint
* *
************************************************************************
-}
isWantedCt :: Ct -> Bool
isWantedCt = isWanted . cc_ev
isGivenCt :: Ct -> Bool
isGivenCt = isGiven . cc_ev
isDerivedCt :: Ct -> Bool
isDerivedCt = isDerived . cc_ev
isCTyEqCan :: Ct -> Bool
isCTyEqCan (CTyEqCan {}) = True
isCTyEqCan (CFunEqCan {}) = False
isCTyEqCan _ = False
isCDictCan_Maybe :: Ct -> Maybe Class
isCDictCan_Maybe (CDictCan {cc_class = cls }) = Just cls
isCDictCan_Maybe _ = Nothing
isCIrredEvCan :: Ct -> Bool
isCIrredEvCan (CIrredEvCan {}) = True
isCIrredEvCan _ = False
isCFunEqCan_maybe :: Ct -> Maybe (TyCon, [Type])
isCFunEqCan_maybe (CFunEqCan { cc_fun = tc, cc_tyargs = xis }) = Just (tc, xis)
isCFunEqCan_maybe _ = Nothing
isCFunEqCan :: Ct -> Bool
isCFunEqCan (CFunEqCan {}) = True
isCFunEqCan _ = False
isCNonCanonical :: Ct -> Bool
isCNonCanonical (CNonCanonical {}) = True
isCNonCanonical _ = False
isHoleCt:: Ct -> Bool
isHoleCt (CHoleCan {}) = True
isHoleCt _ = False
isOutOfScopeCt :: Ct -> Bool
-- We treat expression holes representing out-of-scope variables a bit
-- differently when it comes to error reporting
isOutOfScopeCt (CHoleCan { cc_hole = ExprHole (OutOfScope {}) }) = True
isOutOfScopeCt _ = False
isExprHoleCt :: Ct -> Bool
isExprHoleCt (CHoleCan { cc_hole = ExprHole {} }) = True
isExprHoleCt _ = False
isTypeHoleCt :: Ct -> Bool
isTypeHoleCt (CHoleCan { cc_hole = TypeHole {} }) = True
isTypeHoleCt _ = False
{- Note [Custom type errors in constraints]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When GHC reports a type-error about an unsolved-constraint, we check
to see if the constraint contains any custom-type errors, and if so
we report them. Here are some examples of constraints containing type
errors:
TypeError msg -- The actual constraint is a type error
TypError msg ~ Int -- Some type was supposed to be Int, but ended up
-- being a type error instead
Eq (TypeError msg) -- A class constraint is stuck due to a type error
F (TypeError msg) ~ a -- A type function failed to evaluate due to a type err
It is also possible to have constraints where the type error is nested deeper,
for example see #11990, and also:
Eq (F (TypeError msg)) -- Here the type error is nested under a type-function
-- call, which failed to evaluate because of it,
-- and so the `Eq` constraint was unsolved.
-- This may happen when one function calls another
-- and the called function produced a custom type error.
-}
-- | A constraint is considered to be a custom type error, if it contains
-- custom type errors anywhere in it.
-- See Note [Custom type errors in constraints]
getUserTypeErrorMsg :: Ct -> Maybe Type
getUserTypeErrorMsg ct = findUserTypeError (ctPred ct)
where
findUserTypeError t = msum ( userTypeError_maybe t
: map findUserTypeError (subTys t)
)
subTys t = case splitAppTys t of
(t,[]) ->
case splitTyConApp_maybe t of
Nothing -> []
Just (_,ts) -> ts
(t,ts) -> t : ts
isUserTypeErrorCt :: Ct -> Bool
isUserTypeErrorCt ct = case getUserTypeErrorMsg ct of
Just _ -> True
_ -> False
isPendingScDict :: Ct -> Maybe Ct
-- Says whether cc_pend_sc is True, AND if so flips the flag
isPendingScDict ct@(CDictCan { cc_pend_sc = True })
= Just (ct { cc_pend_sc = False })
isPendingScDict _ = Nothing
superClassesMightHelp :: Ct -> Bool
-- ^ True if taking superclasses of givens, or of wanteds (to perhaps
-- expose more equalities or functional dependencies) might help to
-- solve this constraint. See Note [When superclasses help]
superClassesMightHelp ct
= isWantedCt ct && not (is_ip ct)
where
is_ip (CDictCan { cc_class = cls }) = isIPClass cls
is_ip _ = False
{- Note [When superclasses help]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
First read Note [The superclass story] in TcCanonical.
We expand superclasses and iterate only if there is at unsolved wanted
for which expansion of superclasses (e.g. from given constraints)
might actually help. The function superClassesMightHelp tells if
doing this superclass expansion might help solve this constraint.
Note that
* Superclasses help only for Wanted constraints. Derived constraints
are not really "unsolved" and we certainly don't want them to
trigger superclass expansion. This was a good part of the loop
in Trac #11523
* Even for Wanted constraints, we say "no" for implicit parameters.
we have [W] ?x::ty, expanding superclasses won't help:
- Superclasses can't be implicit parameters
- If we have a [G] ?x:ty2, then we'll have another unsolved
[D] ty ~ ty2 (from the functional dependency)
which will trigger superclass expansion.
It's a bit of a special case, but it's easy to do. The runtime cost
is low because the unsolved set is usually empty anyway (errors
aside), and the first non-imlicit-parameter will terminate the search.
The special case is worth it (Trac #11480, comment:2) because it
applies to CallStack constraints, which aren't type errors. If we have
f :: (C a) => blah
f x = ...undefined...
we'll get a CallStack constraint. If that's the only unsolved
constraint it'll eventually be solved by defaulting. So we don't
want to emit warnings about hitting the simplifier's iteration
limit. A CallStack constraint really isn't an unsolved
constraint; it can always be solved by defaulting.
-}
singleCt :: Ct -> Cts
singleCt = unitBag
andCts :: Cts -> Cts -> Cts
andCts = unionBags
listToCts :: [Ct] -> Cts
listToCts = listToBag
ctsElts :: Cts -> [Ct]
ctsElts = bagToList
consCts :: Ct -> Cts -> Cts
consCts = consBag
snocCts :: Cts -> Ct -> Cts
snocCts = snocBag
extendCtsList :: Cts -> [Ct] -> Cts
extendCtsList cts xs | null xs = cts
| otherwise = cts `unionBags` listToBag xs
andManyCts :: [Cts] -> Cts
andManyCts = unionManyBags
emptyCts :: Cts
emptyCts = emptyBag
isEmptyCts :: Cts -> Bool
isEmptyCts = isEmptyBag
pprCts :: Cts -> SDoc
pprCts cts = vcat (map ppr (bagToList cts))
{-
************************************************************************
* *
Wanted constraints
These are forced to be in TcRnTypes because
TcLclEnv mentions WantedConstraints
WantedConstraint mentions CtLoc
CtLoc mentions ErrCtxt
ErrCtxt mentions TcM
* *
v%************************************************************************
-}
data WantedConstraints
= WC { wc_simple :: Cts -- Unsolved constraints, all wanted
, wc_impl :: Bag Implication
, wc_insol :: Cts -- Insoluble constraints, can be
-- wanted, given, or derived
-- See Note [Insoluble constraints]
}
emptyWC :: WantedConstraints
emptyWC = WC { wc_simple = emptyBag, wc_impl = emptyBag, wc_insol = emptyBag }
mkSimpleWC :: [CtEvidence] -> WantedConstraints
mkSimpleWC cts
= WC { wc_simple = listToBag (map mkNonCanonical cts)
, wc_impl = emptyBag
, wc_insol = emptyBag }
mkImplicWC :: Bag Implication -> WantedConstraints
mkImplicWC implic
= WC { wc_simple = emptyBag, wc_impl = implic, wc_insol = emptyBag }
isEmptyWC :: WantedConstraints -> Bool
isEmptyWC (WC { wc_simple = f, wc_impl = i, wc_insol = n })
= isEmptyBag f && isEmptyBag i && isEmptyBag n
andWC :: WantedConstraints -> WantedConstraints -> WantedConstraints
andWC (WC { wc_simple = f1, wc_impl = i1, wc_insol = n1 })
(WC { wc_simple = f2, wc_impl = i2, wc_insol = n2 })
= WC { wc_simple = f1 `unionBags` f2
, wc_impl = i1 `unionBags` i2
, wc_insol = n1 `unionBags` n2 }
unionsWC :: [WantedConstraints] -> WantedConstraints
unionsWC = foldr andWC emptyWC
addSimples :: WantedConstraints -> Bag Ct -> WantedConstraints
addSimples wc cts
= wc { wc_simple = wc_simple wc `unionBags` cts }
-- Consider: Put the new constraints at the front, so they get solved first
addImplics :: WantedConstraints -> Bag Implication -> WantedConstraints
addImplics wc implic = wc { wc_impl = wc_impl wc `unionBags` implic }
addInsols :: WantedConstraints -> Bag Ct -> WantedConstraints
addInsols wc cts
= wc { wc_insol = wc_insol wc `unionBags` cts }
getInsolubles :: WantedConstraints -> Cts
getInsolubles = wc_insol
dropDerivedWC :: WantedConstraints -> WantedConstraints
-- See Note [Dropping derived constraints]
dropDerivedWC wc@(WC { wc_simple = simples, wc_insol = insols })
= wc { wc_simple = dropDerivedSimples simples
, wc_insol = dropDerivedInsols insols }
-- The wc_impl implications are already (recursively) filtered
isSolvedStatus :: ImplicStatus -> Bool
isSolvedStatus (IC_Solved {}) = True
isSolvedStatus _ = False
isInsolubleStatus :: ImplicStatus -> Bool
isInsolubleStatus IC_Insoluble = True
isInsolubleStatus _ = False
insolubleImplic :: Implication -> Bool
insolubleImplic ic = isInsolubleStatus (ic_status ic)
insolubleWC :: WantedConstraints -> Bool
insolubleWC (WC { wc_impl = implics, wc_insol = insols })
= anyBag trulyInsoluble insols
|| anyBag insolubleImplic implics
trulyInsoluble :: Ct -> Bool
-- Constraints in the wc_insol set which ARE NOT
-- treated as truly insoluble:
-- a) type holes, arising from PartialTypeSignatures,
-- b) "true" expression holes arising from TypedHoles
--
-- A "expression hole" or "type hole" constraint isn't really an error
-- at all; it's a report saying "_ :: Int" here. But an out-of-scope
-- variable masquerading as expression holes IS treated as truly
-- insoluble, so that it trumps other errors during error reporting.
-- Yuk!
trulyInsoluble insol
| isHoleCt insol = isOutOfScopeCt insol
| otherwise = True
instance Outputable WantedConstraints where
ppr (WC {wc_simple = s, wc_impl = i, wc_insol = n})
= text "WC" <+> braces (vcat
[ ppr_bag (text "wc_simple") s
, ppr_bag (text "wc_insol") n
, ppr_bag (text "wc_impl") i ])
ppr_bag :: Outputable a => SDoc -> Bag a -> SDoc
ppr_bag doc bag
| isEmptyBag bag = empty
| otherwise = hang (doc <+> equals)
2 (foldrBag (($$) . ppr) empty bag)
{-
************************************************************************
* *
Implication constraints
* *
************************************************************************
-}
data Implication
= Implic {
ic_tclvl :: TcLevel, -- TcLevel of unification variables
-- allocated /inside/ this implication
ic_skols :: [TcTyVar], -- Introduced skolems
ic_info :: SkolemInfo, -- See Note [Skolems in an implication]
-- See Note [Shadowing in a constraint]
ic_given :: [EvVar], -- Given evidence variables
-- (order does not matter)
-- See Invariant (GivenInv) in TcType
ic_no_eqs :: Bool, -- True <=> ic_givens have no equalities, for sure
-- False <=> ic_givens might have equalities
ic_env :: TcLclEnv, -- Gives the source location and error context
-- for the implication, and hence for all the
-- given evidence variables
ic_wanted :: WantedConstraints, -- The wanted
ic_binds :: EvBindsVar, -- Points to the place to fill in the
-- abstraction and bindings.
ic_needed :: VarSet, -- Union of the ics_need fields of any /discarded/
-- solved implications in ic_wanted
ic_status :: ImplicStatus
}
data ImplicStatus
= IC_Solved -- All wanteds in the tree are solved, all the way down
{ ics_need :: VarSet -- Evidence variables bound further out,
-- but needed by this solved implication
, ics_dead :: [EvVar] } -- Subset of ic_given that are not needed
-- See Note [Tracking redundant constraints] in TcSimplify
| IC_Insoluble -- At least one insoluble constraint in the tree
| IC_Unsolved -- Neither of the above; might go either way
instance Outputable Implication where
ppr (Implic { ic_tclvl = tclvl, ic_skols = skols
, ic_given = given, ic_no_eqs = no_eqs
, ic_wanted = wanted, ic_status = status
, ic_binds = binds, ic_needed = needed , ic_info = info })
= hang (text "Implic" <+> lbrace)
2 (sep [ text "TcLevel =" <+> ppr tclvl
, text "Skolems =" <+> pprTyVars skols
, text "No-eqs =" <+> ppr no_eqs
, text "Status =" <+> ppr status
, hang (text "Given =") 2 (pprEvVars given)
, hang (text "Wanted =") 2 (ppr wanted)
, text "Binds =" <+> ppr binds
, text "Needed =" <+> ppr needed
, pprSkolInfo info ] <+> rbrace)
instance Outputable ImplicStatus where
ppr IC_Insoluble = text "Insoluble"
ppr IC_Unsolved = text "Unsolved"
ppr (IC_Solved { ics_need = vs, ics_dead = dead })
= text "Solved"
<+> (braces $ vcat [ text "Dead givens =" <+> ppr dead
, text "Needed =" <+> ppr vs ])
{-
Note [Needed evidence variables]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Th ic_need_evs field holds the free vars of ic_binds, and all the
ic_binds in nested implications.
* Main purpose: if one of the ic_givens is not mentioned in here, it
is redundant.
* solveImplication may drop an implication altogether if it has no
remaining 'wanteds'. But we still track the free vars of its
evidence binds, even though it has now disappeared.
Note [Shadowing in a constraint]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We assume NO SHADOWING in a constraint. Specifically
* The unification variables are all implicitly quantified at top
level, and are all unique
* The skolem variables bound in ic_skols are all freah when the
implication is created.
So we can safely substitute. For example, if we have
forall a. a~Int => ...(forall b. ...a...)...
we can push the (a~Int) constraint inwards in the "givens" without
worrying that 'b' might clash.
Note [Skolems in an implication]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The skolems in an implication are not there to perform a skolem escape
check. That happens because all the environment variables are in the
untouchables, and therefore cannot be unified with anything at all,
let alone the skolems.
Instead, ic_skols is used only when considering floating a constraint
outside the implication in TcSimplify.floatEqualities or
TcSimplify.approximateImplications
Note [Insoluble constraints]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Some of the errors that we get during canonicalization are best
reported when all constraints have been simplified as much as
possible. For instance, assume that during simplification the
following constraints arise:
[Wanted] F alpha ~ uf1
[Wanted] beta ~ uf1 beta
When canonicalizing the wanted (beta ~ uf1 beta), if we eagerly fail
we will simply see a message:
'Can't construct the infinite type beta ~ uf1 beta'
and the user has no idea what the uf1 variable is.
Instead our plan is that we will NOT fail immediately, but:
(1) Record the "frozen" error in the ic_insols field
(2) Isolate the offending constraint from the rest of the inerts
(3) Keep on simplifying/canonicalizing
At the end, we will hopefully have substituted uf1 := F alpha, and we
will be able to report a more informative error:
'Can't construct the infinite type beta ~ F alpha beta'
Insoluble constraints *do* include Derived constraints. For example,
a functional dependency might give rise to [D] Int ~ Bool, and we must
report that. If insolubles did not contain Deriveds, reportErrors would
never see it.
************************************************************************
* *
Pretty printing
* *
************************************************************************
-}
pprEvVars :: [EvVar] -> SDoc -- Print with their types
pprEvVars ev_vars = vcat (map pprEvVarWithType ev_vars)
pprEvVarTheta :: [EvVar] -> SDoc
pprEvVarTheta ev_vars = pprTheta (map evVarPred ev_vars)
pprEvVarWithType :: EvVar -> SDoc
pprEvVarWithType v = ppr v <+> dcolon <+> pprType (evVarPred v)
{-
************************************************************************
* *
CtEvidence
* *
************************************************************************
Note [Evidence field of CtEvidence]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
During constraint solving we never look at the type of ctev_evar/ctev_dest;
instead we look at the ctev_pred field. The evtm/evar field
may be un-zonked.
Note [Bind new Givens immediately]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
For Givens we make new EvVars and bind them immediately. Two main reasons:
* Gain sharing. E.g. suppose we start with g :: C a b, where
class D a => C a b
class (E a, F a) => D a
If we generate all g's superclasses as separate EvTerms we might
get selD1 (selC1 g) :: E a
selD2 (selC1 g) :: F a
selC1 g :: D a
which we could do more economically as:
g1 :: D a = selC1 g
g2 :: E a = selD1 g1
g3 :: F a = selD2 g1
* For *coercion* evidence we *must* bind each given:
class (a~b) => C a b where ....
f :: C a b => ....
Then in f's Givens we have g:(C a b) and the superclass sc(g,0):a~b.
But that superclass selector can't (yet) appear in a coercion
(see evTermCoercion), so the easy thing is to bind it to an Id.
So a Given has EvVar inside it rather than (as previously) an EvTerm.
-}
-- | A place for type-checking evidence to go after it is generated.
-- Wanted equalities are always HoleDest; other wanteds are always
-- EvVarDest.
data TcEvDest
= EvVarDest EvVar -- ^ bind this var to the evidence
| HoleDest CoercionHole -- ^ fill in this hole with the evidence
-- See Note [Coercion holes] in TyCoRep
data CtEvidence
= CtGiven -- Truly given, not depending on subgoals
-- NB: Spontaneous unifications belong here
{ ctev_pred :: TcPredType -- See Note [Ct/evidence invariant]
, ctev_evar :: EvVar -- See Note [Evidence field of CtEvidence]
, ctev_loc :: CtLoc }
| CtWanted -- Wanted goal
{ ctev_pred :: TcPredType -- See Note [Ct/evidence invariant]
, ctev_dest :: TcEvDest
, ctev_nosh :: ShadowInfo -- See Note [Constraint flavours]
, ctev_loc :: CtLoc }
| CtDerived -- A goal that we don't really have to solve and can't
-- immediately rewrite anything other than a derived
-- (there's no evidence!) but if we do manage to solve
-- it may help in solving other goals.
{ ctev_pred :: TcPredType
, ctev_loc :: CtLoc }
ctEvPred :: CtEvidence -> TcPredType
-- The predicate of a flavor
ctEvPred = ctev_pred
ctEvLoc :: CtEvidence -> CtLoc
ctEvLoc = ctev_loc
ctEvOrigin :: CtEvidence -> CtOrigin
ctEvOrigin = ctLocOrigin . ctEvLoc
-- | Get the equality relation relevant for a 'CtEvidence'
ctEvEqRel :: CtEvidence -> EqRel
ctEvEqRel = predTypeEqRel . ctEvPred
-- | Get the role relevant for a 'CtEvidence'
ctEvRole :: CtEvidence -> Role
ctEvRole = eqRelRole . ctEvEqRel
ctEvTerm :: CtEvidence -> EvTerm
ctEvTerm ev@(CtWanted { ctev_dest = HoleDest _ }) = EvCoercion $ ctEvCoercion ev
ctEvTerm ev = EvId (ctEvId ev)
ctEvCoercion :: CtEvidence -> Coercion
ctEvCoercion ev@(CtWanted { ctev_dest = HoleDest hole, ctev_pred = pred })
= case getEqPredTys_maybe pred of
Just (role, ty1, ty2) -> mkHoleCo hole role ty1 ty2
_ -> pprPanic "ctEvTerm" (ppr ev)
ctEvCoercion (CtGiven { ctev_evar = ev_id }) = mkTcCoVarCo ev_id
ctEvCoercion ev = pprPanic "ctEvCoercion" (ppr ev)
ctEvId :: CtEvidence -> TcId
ctEvId (CtWanted { ctev_dest = EvVarDest ev }) = ev
ctEvId (CtGiven { ctev_evar = ev }) = ev
ctEvId ctev = pprPanic "ctEvId:" (ppr ctev)
instance Outputable TcEvDest where
ppr (HoleDest h) = text "hole" <> ppr h
ppr (EvVarDest ev) = ppr ev
instance Outputable CtEvidence where
ppr ev = ppr (ctEvFlavour ev)
<+> pp_ev
<+> braces (ppr (ctl_depth (ctEvLoc ev))) <> dcolon
-- Show the sub-goal depth too
<+> ppr (ctEvPred ev)
where
pp_ev = case ev of
CtGiven { ctev_evar = v } -> ppr v
CtWanted {ctev_dest = d } -> ppr d
CtDerived {} -> text "_"
isWanted :: CtEvidence -> Bool
isWanted (CtWanted {}) = True
isWanted _ = False
isGiven :: CtEvidence -> Bool
isGiven (CtGiven {}) = True
isGiven _ = False
isDerived :: CtEvidence -> Bool
isDerived (CtDerived {}) = True
isDerived _ = False
{-
%************************************************************************
%* *
CtFlavour
%* *
%************************************************************************
Note [Constraint flavours]
~~~~~~~~~~~~~~~~~~~~~~~~~~
Constraints come in four flavours:
* [G] Given: we have evidence
* [W] Wanted WOnly: we want evidence
* [D] Derived: any solution must satisfy this constraint, but
we don't need evidence for it. Examples include:
- superclasses of [W] class constraints
- equalities arising from functional dependencies
or injectivity
* [WD] Wanted WDeriv: a single constraint that represents
both [W] and [D]
We keep them paired as one both for efficiency, and because
when we have a finite map F tys -> CFunEqCan, it's inconvenient
to have two CFunEqCans in the range
The ctev_nosh field of a Wanted distinguishes between [W] and [WD]
Wanted constraints are born as [WD], but are split into [W] and its
"shadow" [D] in TcSMonad.maybeEmitShadow.
See Note [The improvement story and derived shadows] in TcSMonad
-}
data CtFlavour -- See Note [Constraint flavours]
= Given
| Wanted ShadowInfo
| Derived
deriving Eq
data ShadowInfo
= WDeriv -- [WD] This Wanted constraint has no Derived shadow,
-- so it behaves like a pair of a Wanted and a Derived
| WOnly -- [W] It has a separate derived shadow
-- See Note [Derived shadows]
deriving( Eq )
isGivenOrWDeriv :: CtFlavour -> Bool
isGivenOrWDeriv Given = True
isGivenOrWDeriv (Wanted WDeriv) = True
isGivenOrWDeriv _ = False
instance Outputable CtFlavour where
ppr Given = text "[G]"
ppr (Wanted WDeriv) = text "[WD]"
ppr (Wanted WOnly) = text "[W]"
ppr Derived = text "[D]"
ctEvFlavour :: CtEvidence -> CtFlavour
ctEvFlavour (CtWanted { ctev_nosh = nosh }) = Wanted nosh
ctEvFlavour (CtGiven {}) = Given
ctEvFlavour (CtDerived {}) = Derived
-- | Whether or not one 'Ct' can rewrite another is determined by its
-- flavour and its equality relation. See also
-- Note [Flavours with roles] in TcSMonad
type CtFlavourRole = (CtFlavour, EqRel)
-- | Extract the flavour, role, and boxity from a 'CtEvidence'
ctEvFlavourRole :: CtEvidence -> CtFlavourRole
ctEvFlavourRole ev = (ctEvFlavour ev, ctEvEqRel ev)
-- | Extract the flavour, role, and boxity from a 'Ct'
ctFlavourRole :: Ct -> CtFlavourRole
ctFlavourRole = ctEvFlavourRole . cc_ev
{- Note [eqCanRewrite]
~~~~~~~~~~~~~~~~~~~~~~
(eqCanRewrite ct1 ct2) holds if the constraint ct1 (a CTyEqCan of form
tv ~ ty) can be used to rewrite ct2. It must satisfy the properties of
a can-rewrite relation, see Definition [Can-rewrite relation] in
TcSMonad.
With the solver handling Coercible constraints like equality constraints,
the rewrite conditions must take role into account, never allowing
a representational equality to rewrite a nominal one.
Note [Wanteds do not rewrite Wanteds]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We don't allow Wanteds to rewrite Wanteds, because that can give rise
to very confusing type error messages. A good example is Trac #8450.
Here's another
f :: a -> Bool
f x = ( [x,'c'], [x,True] ) `seq` True
Here we get
[W] a ~ Char
[W] a ~ Bool
but we do not want to complain about Bool ~ Char!
Note [Deriveds do rewrite Deriveds]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
However we DO allow Deriveds to rewrite Deriveds, because that's how
improvement works; see Note [The improvement story] in TcInteract.
However, for now at least I'm only letting (Derived,NomEq) rewrite
(Derived,NomEq) and not doing anything for ReprEq. If we have
eqCanRewriteFR (Derived, NomEq) (Derived, _) = True
then we lose property R2 of Definition [Can-rewrite relation]
in TcSMonad
R2. If f1 >= f, and f2 >= f,
then either f1 >= f2 or f2 >= f1
Consider f1 = (Given, ReprEq)
f2 = (Derived, NomEq)
f = (Derived, ReprEq)
I thought maybe we could never get Derived ReprEq constraints, but
we can; straight from the Wanteds during improvment. And from a Derived
ReprEq we could conceivably get a Derived NomEq improvment (by decomposing
a type constructor with Nomninal role), and hence unify.
-}
eqCanRewriteFR :: CtFlavourRole -> CtFlavourRole -> Bool
-- Can fr1 actually rewrite fr2?
-- Very important function!
-- See Note [eqCanRewrite]
-- See Note [Wanteds do not rewrite Wanteds]
-- See Note [Deriveds do rewrite Deriveds]
eqCanRewriteFR (Given, NomEq) (_, _) = True
eqCanRewriteFR (Given, ReprEq) (_, ReprEq) = True
eqCanRewriteFR (Wanted WDeriv, NomEq) (Derived, NomEq) = True
eqCanRewriteFR (Derived, NomEq) (Derived, NomEq) = True
eqCanRewriteFR _ _ = False
eqMayRewriteFR :: CtFlavourRole -> CtFlavourRole -> Bool
-- Is it /possible/ that fr1 can rewrite fr2?
-- This is used when deciding which inerts to kick out,
-- at which time a [WD] inert may be split into [W] and [D]
eqMayRewriteFR (Wanted WDeriv, NomEq) (Wanted WDeriv, NomEq) = True
eqMayRewriteFR (Derived, NomEq) (Wanted WDeriv, NomEq) = True
eqMayRewriteFR fr1 fr2 = eqCanRewriteFR fr1 fr2
-----------------
{- Note [funEqCanDischarge]
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Suppose we have two CFunEqCans with the same LHS:
(x1:F ts ~ f1) `funEqCanDischarge` (x2:F ts ~ f2)
Can we drop x2 in favour of x1, either unifying
f2 (if it's a flatten meta-var) or adding a new Given
(f1 ~ f2), if x2 is a Given?
Answer: yes if funEqCanDischarge is true.
-}
funEqCanDischarge
:: CtEvidence -> CtEvidence
-> ( SwapFlag -- NotSwapped => lhs can discharge rhs
-- Swapped => rhs can discharge lhs
, Bool) -- True <=> upgrade non-discharded one
-- from [W] to [WD]
-- See Note [funEqCanDischarge]
funEqCanDischarge ev1 ev2
= ASSERT2( ctEvEqRel ev1 == NomEq, ppr ev1 )
ASSERT2( ctEvEqRel ev2 == NomEq, ppr ev2 )
-- CFunEqCans are all Nominal, hence asserts
funEqCanDischargeF (ctEvFlavour ev1) (ctEvFlavour ev2)
funEqCanDischargeF :: CtFlavour -> CtFlavour -> (SwapFlag, Bool)
funEqCanDischargeF Given _ = (NotSwapped, False)
funEqCanDischargeF _ Given = (IsSwapped, False)
funEqCanDischargeF (Wanted WDeriv) _ = (NotSwapped, False)
funEqCanDischargeF _ (Wanted WDeriv) = (IsSwapped, True)
funEqCanDischargeF (Wanted WOnly) (Wanted WOnly) = (NotSwapped, False)
funEqCanDischargeF (Wanted WOnly) Derived = (NotSwapped, True)
funEqCanDischargeF Derived (Wanted WOnly) = (IsSwapped, True)
funEqCanDischargeF Derived Derived = (NotSwapped, False)
{- Note [eqCanDischarge]
~~~~~~~~~~~~~~~~~~~~~~~~
Suppose we have two identical CTyEqCan equality constraints
(i.e. both LHS and RHS are the same)
(x1:a~t) `eqCanDischarge` (xs:a~t)
Can we just drop x2 in favour of x1?
Answer: yes if eqCanDischarge is true.
Note that we do /not/ allow Wanted to discharge Derived.
We must keep both. Why? Because the Derived may rewrite
other Deriveds in the model whereas the Wanted cannot.
However a Wanted can certainly discharge an identical Wanted. So
eqCanDischarge does /not/ define a can-rewrite relation in the
sense of Definition [Can-rewrite relation] in TcSMonad.
We /do/ say that a [W] can discharge a [WD]. In evidence terms it
certainly can, and the /caller/ arranges that the otherwise-lost [D]
is spat out as a new Derived. -}
eqCanDischarge :: CtEvidence -> CtEvidence -> Bool
-- See Note [eqCanDischarge]
eqCanDischarge ev1 ev2 = eqCanDischargeFR (ctEvFlavourRole ev1)
(ctEvFlavourRole ev2)
eqCanDischargeFR :: CtFlavourRole -> CtFlavourRole -> Bool
eqCanDischargeFR (_, ReprEq) (_, NomEq) = False
eqCanDischargeFR (f1,_) (f2, _) = eqCanDischargeF f1 f2
eqCanDischargeF :: CtFlavour -> CtFlavour -> Bool
eqCanDischargeF Given _ = True
eqCanDischargeF (Wanted _) (Wanted _) = True
eqCanDischargeF (Wanted WDeriv) Derived = True
eqCanDischargeF Derived Derived = True
eqCanDischargeF _ _ = False
{-
************************************************************************
* *
SubGoalDepth
* *
************************************************************************
Note [SubGoalDepth]
~~~~~~~~~~~~~~~~~~~
The 'SubGoalDepth' takes care of stopping the constraint solver from looping.
The counter starts at zero and increases. It includes dictionary constraints,
equality simplification, and type family reduction. (Why combine these? Because
it's actually quite easy to mistake one for another, in sufficiently involved
scenarios, like ConstraintKinds.)
The flag -fcontext-stack=n (not very well named!) fixes the maximium
level.
* The counter includes the depth of type class instance declarations. Example:
[W] d{7} : Eq [Int]
That is d's dictionary-constraint depth is 7. If we use the instance
$dfEqList :: Eq a => Eq [a]
to simplify it, we get
d{7} = $dfEqList d'{8}
where d'{8} : Eq Int, and d' has depth 8.
For civilised (decidable) instance declarations, each increase of
depth removes a type constructor from the type, so the depth never
gets big; i.e. is bounded by the structural depth of the type.
* The counter also increments when resolving
equalities involving type functions. Example:
Assume we have a wanted at depth 7:
[W] d{7} : F () ~ a
If there is an type function equation "F () = Int", this would be rewritten to
[W] d{8} : Int ~ a
and remembered as having depth 8.
Again, without UndecidableInstances, this counter is bounded, but without it
can resolve things ad infinitum. Hence there is a maximum level.
* Lastly, every time an equality is rewritten, the counter increases. Again,
rewriting an equality constraint normally makes progress, but it's possible
the "progress" is just the reduction of an infinitely-reducing type family.
Hence we need to track the rewrites.
When compiling a program requires a greater depth, then GHC recommends turning
off this check entirely by setting -freduction-depth=0. This is because the
exact number that works is highly variable, and is likely to change even between
minor releases. Because this check is solely to prevent infinite compilation
times, it seems safe to disable it when a user has ascertained that their program
doesn't loop at the type level.
-}
-- | See Note [SubGoalDepth]
newtype SubGoalDepth = SubGoalDepth Int
deriving (Eq, Ord, Outputable)
initialSubGoalDepth :: SubGoalDepth
initialSubGoalDepth = SubGoalDepth 0
bumpSubGoalDepth :: SubGoalDepth -> SubGoalDepth
bumpSubGoalDepth (SubGoalDepth n) = SubGoalDepth (n + 1)
maxSubGoalDepth :: SubGoalDepth -> SubGoalDepth -> SubGoalDepth
maxSubGoalDepth (SubGoalDepth n) (SubGoalDepth m) = SubGoalDepth (n `max` m)
subGoalDepthExceeded :: DynFlags -> SubGoalDepth -> Bool
subGoalDepthExceeded dflags (SubGoalDepth d)
= mkIntWithInf d > reductionDepth dflags
{-
************************************************************************
* *
CtLoc
* *
************************************************************************
The 'CtLoc' gives information about where a constraint came from.
This is important for decent error message reporting because
dictionaries don't appear in the original source code.
type will evolve...
-}
data CtLoc = CtLoc { ctl_origin :: CtOrigin
, ctl_env :: TcLclEnv
, ctl_t_or_k :: Maybe TypeOrKind -- OK if we're not sure
, ctl_depth :: !SubGoalDepth }
-- The TcLclEnv includes particularly
-- source location: tcl_loc :: RealSrcSpan
-- context: tcl_ctxt :: [ErrCtxt]
-- binder stack: tcl_bndrs :: TcIdBinderStack
-- level: tcl_tclvl :: TcLevel
mkGivenLoc :: TcLevel -> SkolemInfo -> TcLclEnv -> CtLoc
mkGivenLoc tclvl skol_info env
= CtLoc { ctl_origin = GivenOrigin skol_info
, ctl_env = env { tcl_tclvl = tclvl }
, ctl_t_or_k = Nothing -- this only matters for error msgs
, ctl_depth = initialSubGoalDepth }
mkKindLoc :: TcType -> TcType -- original *types* being compared
-> CtLoc -> CtLoc
mkKindLoc s1 s2 loc = setCtLocOrigin (toKindLoc loc)
(KindEqOrigin s1 (Just s2) (ctLocOrigin loc)
(ctLocTypeOrKind_maybe loc))
-- | Take a CtLoc and moves it to the kind level
toKindLoc :: CtLoc -> CtLoc
toKindLoc loc = loc { ctl_t_or_k = Just KindLevel }
ctLocEnv :: CtLoc -> TcLclEnv
ctLocEnv = ctl_env
ctLocLevel :: CtLoc -> TcLevel
ctLocLevel loc = tcl_tclvl (ctLocEnv loc)
ctLocDepth :: CtLoc -> SubGoalDepth
ctLocDepth = ctl_depth
ctLocOrigin :: CtLoc -> CtOrigin
ctLocOrigin = ctl_origin
ctLocSpan :: CtLoc -> RealSrcSpan
ctLocSpan (CtLoc { ctl_env = lcl}) = tcl_loc lcl
ctLocTypeOrKind_maybe :: CtLoc -> Maybe TypeOrKind
ctLocTypeOrKind_maybe = ctl_t_or_k
setCtLocSpan :: CtLoc -> RealSrcSpan -> CtLoc
setCtLocSpan ctl@(CtLoc { ctl_env = lcl }) loc = setCtLocEnv ctl (lcl { tcl_loc = loc })
bumpCtLocDepth :: CtLoc -> CtLoc
bumpCtLocDepth loc@(CtLoc { ctl_depth = d }) = loc { ctl_depth = bumpSubGoalDepth d }
setCtLocOrigin :: CtLoc -> CtOrigin -> CtLoc
setCtLocOrigin ctl orig = ctl { ctl_origin = orig }
setCtLocEnv :: CtLoc -> TcLclEnv -> CtLoc
setCtLocEnv ctl env = ctl { ctl_env = env }
pushErrCtxt :: CtOrigin -> ErrCtxt -> CtLoc -> CtLoc
pushErrCtxt o err loc@(CtLoc { ctl_env = lcl })
= loc { ctl_origin = o, ctl_env = lcl { tcl_ctxt = err : tcl_ctxt lcl } }
pushErrCtxtSameOrigin :: ErrCtxt -> CtLoc -> CtLoc
-- Just add information w/o updating the origin!
pushErrCtxtSameOrigin err loc@(CtLoc { ctl_env = lcl })
= loc { ctl_env = lcl { tcl_ctxt = err : tcl_ctxt lcl } }
{-
************************************************************************
* *
SkolemInfo
* *
************************************************************************
-}
-- SkolemInfo gives the origin of *given* constraints
-- a) type variables are skolemised
-- b) an implication constraint is generated
data SkolemInfo
= SigSkol UserTypeCtxt -- A skolem that is created by instantiating
TcType -- a programmer-supplied type signature
-- Location of the binding site is on the TyVar
| ClsSkol Class -- Bound at a class decl
| DerivSkol Type -- Bound by a 'deriving' clause;
-- the type is the instance we are trying to derive
| InstSkol -- Bound at an instance decl
| InstSC TypeSize -- A "given" constraint obtained by superclass selection.
-- If (C ty1 .. tyn) is the largest class from
-- which we made a superclass selection in the chain,
-- then TypeSize = sizeTypes [ty1, .., tyn]
-- See Note [Solving superclass constraints] in TcInstDcls
| DataSkol -- Bound at a data type declaration
| FamInstSkol -- Bound at a family instance decl
| PatSkol -- An existential type variable bound by a pattern for
ConLike -- a data constructor with an existential type.
(HsMatchContext Name)
-- e.g. data T = forall a. Eq a => MkT a
-- f (MkT x) = ...
-- The pattern MkT x will allocate an existential type
-- variable for 'a'.
| ArrowSkol -- An arrow form (see TcArrows)
| IPSkol [HsIPName] -- Binding site of an implicit parameter
| RuleSkol RuleName -- The LHS of a RULE
| InferSkol [(Name,TcType)]
-- We have inferred a type for these (mutually-recursivive)
-- polymorphic Ids, and are now checking that their RHS
-- constraints are satisfied.
| BracketSkol -- Template Haskell bracket
| UnifyForAllSkol -- We are unifying two for-all types
TcType -- The instantiated type *inside* the forall
| UnkSkol -- Unhelpful info (until I improve it)
instance Outputable SkolemInfo where
ppr = pprSkolInfo
termEvidenceAllowed :: SkolemInfo -> Bool
-- Whether an implication constraint with this SkolemInfo
-- is permitted to have term-level evidence. There is
-- only one that is not, associated with unifiying
-- forall-types
termEvidenceAllowed (UnifyForAllSkol {}) = False
termEvidenceAllowed _ = True
pprSkolInfo :: SkolemInfo -> SDoc
-- Complete the sentence "is a rigid type variable bound by..."
pprSkolInfo (SigSkol ctxt ty) = pprSigSkolInfo ctxt ty
pprSkolInfo (IPSkol ips) = text "the implicit-parameter binding" <> plural ips <+> text "for"
<+> pprWithCommas ppr ips
pprSkolInfo (ClsSkol cls) = text "the class declaration for" <+> quotes (ppr cls)
pprSkolInfo (DerivSkol pred) = text "the deriving clause for" <+> quotes (ppr pred)
pprSkolInfo InstSkol = text "the instance declaration"
pprSkolInfo (InstSC n) = text "the instance declaration" <> ifPprDebug (parens (ppr n))
pprSkolInfo DataSkol = text "a data type declaration"
pprSkolInfo FamInstSkol = text "a family instance declaration"
pprSkolInfo BracketSkol = text "a Template Haskell bracket"
pprSkolInfo (RuleSkol name) = text "the RULE" <+> pprRuleName name
pprSkolInfo ArrowSkol = text "an arrow form"
pprSkolInfo (PatSkol cl mc) = sep [ pprPatSkolInfo cl
, text "in" <+> pprMatchContext mc ]
pprSkolInfo (InferSkol ids) = sep [ text "the inferred type of"
, vcat [ ppr name <+> dcolon <+> ppr ty
| (name,ty) <- ids ]]
pprSkolInfo (UnifyForAllSkol ty) = text "the type" <+> ppr ty
-- UnkSkol
-- For type variables the others are dealt with by pprSkolTvBinding.
-- For Insts, these cases should not happen
pprSkolInfo UnkSkol = WARN( True, text "pprSkolInfo: UnkSkol" ) text "UnkSkol"
pprSigSkolInfo :: UserTypeCtxt -> TcType -> SDoc
pprSigSkolInfo ctxt ty
= case ctxt of
FunSigCtxt f _ -> vcat [ text "the type signature for:"
, nest 2 (pprPrefixOcc f <+> dcolon <+> ppr ty) ]
PatSynCtxt {} -> pprUserTypeCtxt ctxt -- See Note [Skolem info for pattern synonyms]
_ -> vcat [ pprUserTypeCtxt ctxt <> colon
, nest 2 (ppr ty) ]
pprPatSkolInfo :: ConLike -> SDoc
pprPatSkolInfo (RealDataCon dc)
= sep [ text "a pattern with constructor:"
, nest 2 $ ppr dc <+> dcolon
<+> pprType (dataConUserType dc) <> comma ]
-- pprType prints forall's regardless of -fprint-explict-foralls
-- which is what we want here, since we might be saying
-- type variable 't' is bound by ...
pprPatSkolInfo (PatSynCon ps)
= sep [ text "a pattern with pattern synonym:"
, nest 2 $ ppr ps <+> dcolon
<+> pprPatSynType ps <> comma ]
{- Note [Skolem info for pattern synonyms]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
For pattern synonym SkolemInfo we have
SigSkol (PatSynCtxt p) ty
but the type 'ty' is not very helpful. The full pattern-synonym type
is has the provided and required pieces, which it is inconvenient to
record and display here. So we simply don't display the type at all,
contenting outselves with just the name of the pattern synonym, which
is fine. We could do more, but it doesn't seem worth it.
************************************************************************
* *
CtOrigin
* *
************************************************************************
-}
data CtOrigin
= GivenOrigin SkolemInfo
-- All the others are for *wanted* constraints
| OccurrenceOf Name -- Occurrence of an overloaded identifier
| OccurrenceOfRecSel RdrName -- Occurrence of a record selector
| AppOrigin -- An application of some kind
| SpecPragOrigin UserTypeCtxt -- Specialisation pragma for
-- function or instance
| TypeEqOrigin { uo_actual :: TcType
, uo_expected :: TcType
, uo_thing :: Maybe ErrorThing
-- ^ The thing that has type "actual"
}
| KindEqOrigin
TcType (Maybe TcType) -- A kind equality arising from unifying these two types
CtOrigin -- originally arising from this
(Maybe TypeOrKind) -- the level of the eq this arises from
| IPOccOrigin HsIPName -- Occurrence of an implicit parameter
| OverLabelOrigin FastString -- Occurrence of an overloaded label
| LiteralOrigin (HsOverLit Name) -- Occurrence of a literal
| NegateOrigin -- Occurrence of syntactic negation
| ArithSeqOrigin (ArithSeqInfo Name) -- [x..], [x..y] etc
| PArrSeqOrigin (ArithSeqInfo Name) -- [:x..y:] and [:x,y..z:]
| SectionOrigin
| TupleOrigin -- (..,..)
| ExprSigOrigin -- e :: ty
| PatSigOrigin -- p :: ty
| PatOrigin -- Instantiating a polytyped pattern at a constructor
| ProvCtxtOrigin -- The "provided" context of a pattern synonym signature
(PatSynBind Name Name) -- Information about the pattern synonym, in particular
-- the name and the right-hand side
| RecordUpdOrigin
| ViewPatOrigin
| ScOrigin TypeSize -- Typechecking superclasses of an instance declaration
-- If the instance head is C ty1 .. tyn
-- then TypeSize = sizeTypes [ty1, .., tyn]
-- See Note [Solving superclass constraints] in TcInstDcls
| DerivOrigin -- Typechecking deriving
| DerivOriginDC DataCon Int
-- Checking constraints arising from this data con and field index
| DerivOriginCoerce Id Type Type
-- DerivOriginCoerce id ty1 ty2: Trying to coerce class method `id` from
-- `ty1` to `ty2`.
| StandAloneDerivOrigin -- Typechecking stand-alone deriving
| DefaultOrigin -- Typechecking a default decl
| DoOrigin -- Arising from a do expression
| DoPatOrigin (LPat Name) -- Arising from a failable pattern in
-- a do expression
| MCompOrigin -- Arising from a monad comprehension
| MCompPatOrigin (LPat Name) -- Arising from a failable pattern in a
-- monad comprehension
| IfOrigin -- Arising from an if statement
| ProcOrigin -- Arising from a proc expression
| AnnOrigin -- An annotation
| FunDepOrigin1 -- A functional dependency from combining
PredType CtLoc -- This constraint arising from ...
PredType CtLoc -- and this constraint arising from ...
| FunDepOrigin2 -- A functional dependency from combining
PredType CtOrigin -- This constraint arising from ...
PredType SrcSpan -- and this top-level instance
-- We only need a CtOrigin on the first, because the location
-- is pinned on the entire error message
| HoleOrigin
| UnboundOccurrenceOf OccName
| ListOrigin -- An overloaded list
| StaticOrigin -- A static form
| FailablePattern (LPat TcId) -- A failable pattern in do-notation for the
-- MonadFail Proposal (MFP). Obsolete when
-- actual desugaring to MonadFail.fail is live.
| Shouldn'tHappenOrigin String
-- the user should never see this one,
-- unlesss ImpredicativeTypes is on, where all
-- bets are off
| InstProvidedOrigin Module ClsInst
-- Skolem variable arose when we were testing if an instance
-- is solvable or not.
-- | A thing that can be stored for error message generation only.
-- It is stored with a function to zonk and tidy the thing.
data ErrorThing
= forall a. Outputable a => ErrorThing a
(Maybe Arity) -- # of args, if known
(TidyEnv -> a -> TcM (TidyEnv, a))
-- | Flag to see whether we're type-checking terms or kind-checking types
data TypeOrKind = TypeLevel | KindLevel
deriving Eq
instance Outputable TypeOrKind where
ppr TypeLevel = text "TypeLevel"
ppr KindLevel = text "KindLevel"
isTypeLevel :: TypeOrKind -> Bool
isTypeLevel TypeLevel = True
isTypeLevel KindLevel = False
isKindLevel :: TypeOrKind -> Bool
isKindLevel TypeLevel = False
isKindLevel KindLevel = True
-- | Make an 'ErrorThing' that doesn't need tidying or zonking
mkErrorThing :: Outputable a => a -> ErrorThing
mkErrorThing thing = ErrorThing thing Nothing (\env x -> return (env, x))
-- | Retrieve the # of arguments in the error thing, if known
errorThingNumArgs_maybe :: ErrorThing -> Maybe Arity
errorThingNumArgs_maybe (ErrorThing _ args _) = args
instance Outputable CtOrigin where
ppr = pprCtOrigin
instance Outputable ErrorThing where
ppr (ErrorThing thing _ _) = ppr thing
ctoHerald :: SDoc
ctoHerald = text "arising from"
-- | Extract a suitable CtOrigin from a HsExpr
exprCtOrigin :: HsExpr Name -> CtOrigin
exprCtOrigin (HsVar (L _ name)) = OccurrenceOf name
exprCtOrigin (HsUnboundVar uv) = UnboundOccurrenceOf (unboundVarOcc uv)
exprCtOrigin (HsRecFld f) = OccurrenceOfRecSel (rdrNameAmbiguousFieldOcc f)
exprCtOrigin (HsOverLabel l) = OverLabelOrigin l
exprCtOrigin (HsIPVar ip) = IPOccOrigin ip
exprCtOrigin (HsOverLit lit) = LiteralOrigin lit
exprCtOrigin (HsLit {}) = Shouldn'tHappenOrigin "concrete literal"
exprCtOrigin (HsLam matches) = matchesCtOrigin matches
exprCtOrigin (HsLamCase ms) = matchesCtOrigin ms
exprCtOrigin (HsApp (L _ e1) _) = exprCtOrigin e1
exprCtOrigin (HsAppType (L _ e1) _) = exprCtOrigin e1
exprCtOrigin (HsAppTypeOut {}) = panic "exprCtOrigin HsAppTypeOut"
exprCtOrigin (OpApp _ (L _ op) _ _) = exprCtOrigin op
exprCtOrigin (NegApp (L _ e) _) = exprCtOrigin e
exprCtOrigin (HsPar (L _ e)) = exprCtOrigin e
exprCtOrigin (SectionL _ _) = SectionOrigin
exprCtOrigin (SectionR _ _) = SectionOrigin
exprCtOrigin (ExplicitTuple {}) = Shouldn'tHappenOrigin "explicit tuple"
exprCtOrigin ExplicitSum{} = Shouldn'tHappenOrigin "explicit sum"
exprCtOrigin (HsCase _ matches) = matchesCtOrigin matches
exprCtOrigin (HsIf (Just syn) _ _ _) = exprCtOrigin (syn_expr syn)
exprCtOrigin (HsIf {}) = Shouldn'tHappenOrigin "if expression"
exprCtOrigin (HsMultiIf _ rhs) = lGRHSCtOrigin rhs
exprCtOrigin (HsLet _ (L _ e)) = exprCtOrigin e
exprCtOrigin (HsDo _ _ _) = DoOrigin
exprCtOrigin (ExplicitList {}) = Shouldn'tHappenOrigin "list"
exprCtOrigin (ExplicitPArr {}) = Shouldn'tHappenOrigin "parallel array"
exprCtOrigin (RecordCon {}) = Shouldn'tHappenOrigin "record construction"
exprCtOrigin (RecordUpd {}) = Shouldn'tHappenOrigin "record update"
exprCtOrigin (ExprWithTySig {}) = ExprSigOrigin
exprCtOrigin (ExprWithTySigOut {}) = panic "exprCtOrigin ExprWithTySigOut"
exprCtOrigin (ArithSeq {}) = Shouldn'tHappenOrigin "arithmetic sequence"
exprCtOrigin (PArrSeq {}) = Shouldn'tHappenOrigin "parallel array sequence"
exprCtOrigin (HsSCC _ _ (L _ e))= exprCtOrigin e
exprCtOrigin (HsCoreAnn _ _ (L _ e)) = exprCtOrigin e
exprCtOrigin (HsBracket {}) = Shouldn'tHappenOrigin "TH bracket"
exprCtOrigin (HsRnBracketOut {})= Shouldn'tHappenOrigin "HsRnBracketOut"
exprCtOrigin (HsTcBracketOut {})= panic "exprCtOrigin HsTcBracketOut"
exprCtOrigin (HsSpliceE {}) = Shouldn'tHappenOrigin "TH splice"
exprCtOrigin (HsProc {}) = Shouldn'tHappenOrigin "proc"
exprCtOrigin (HsStatic {}) = Shouldn'tHappenOrigin "static expression"
exprCtOrigin (HsArrApp {}) = panic "exprCtOrigin HsArrApp"
exprCtOrigin (HsArrForm {}) = panic "exprCtOrigin HsArrForm"
exprCtOrigin (HsTick _ (L _ e)) = exprCtOrigin e
exprCtOrigin (HsBinTick _ _ (L _ e)) = exprCtOrigin e
exprCtOrigin (HsTickPragma _ _ _ (L _ e)) = exprCtOrigin e
exprCtOrigin EWildPat = panic "exprCtOrigin EWildPat"
exprCtOrigin (EAsPat {}) = panic "exprCtOrigin EAsPat"
exprCtOrigin (EViewPat {}) = panic "exprCtOrigin EViewPat"
exprCtOrigin (ELazyPat {}) = panic "exprCtOrigin ELazyPat"
exprCtOrigin (HsWrap {}) = panic "exprCtOrigin HsWrap"
-- | Extract a suitable CtOrigin from a MatchGroup
matchesCtOrigin :: MatchGroup Name (LHsExpr Name) -> CtOrigin
matchesCtOrigin (MG { mg_alts = alts })
| L _ [L _ match] <- alts
, Match { m_grhss = grhss } <- match
= grhssCtOrigin grhss
| otherwise
= Shouldn'tHappenOrigin "multi-way match"
-- | Extract a suitable CtOrigin from guarded RHSs
grhssCtOrigin :: GRHSs Name (LHsExpr Name) -> CtOrigin
grhssCtOrigin (GRHSs { grhssGRHSs = lgrhss }) = lGRHSCtOrigin lgrhss
-- | Extract a suitable CtOrigin from a list of guarded RHSs
lGRHSCtOrigin :: [LGRHS Name (LHsExpr Name)] -> CtOrigin
lGRHSCtOrigin [L _ (GRHS _ (L _ e))] = exprCtOrigin e
lGRHSCtOrigin _ = Shouldn'tHappenOrigin "multi-way GRHS"
pprCtLoc :: CtLoc -> SDoc
-- "arising from ... at ..."
-- Not an instance of Outputable because of the "arising from" prefix
pprCtLoc (CtLoc { ctl_origin = o, ctl_env = lcl})
= sep [ pprCtOrigin o
, text "at" <+> ppr (tcl_loc lcl)]
pprCtOrigin :: CtOrigin -> SDoc
-- "arising from ..."
-- Not an instance of Outputable because of the "arising from" prefix
pprCtOrigin (GivenOrigin sk) = ctoHerald <+> ppr sk
pprCtOrigin (SpecPragOrigin ctxt)
= case ctxt of
FunSigCtxt n _ -> text "a SPECIALISE pragma for" <+> quotes (ppr n)
SpecInstCtxt -> text "a SPECIALISE INSTANCE pragma"
_ -> text "a SPECIALISE pragma" -- Never happens I think
pprCtOrigin (FunDepOrigin1 pred1 loc1 pred2 loc2)
= hang (ctoHerald <+> text "a functional dependency between constraints:")
2 (vcat [ hang (quotes (ppr pred1)) 2 (pprCtLoc loc1)
, hang (quotes (ppr pred2)) 2 (pprCtLoc loc2) ])
pprCtOrigin (FunDepOrigin2 pred1 orig1 pred2 loc2)
= hang (ctoHerald <+> text "a functional dependency between:")
2 (vcat [ hang (text "constraint" <+> quotes (ppr pred1))
2 (pprCtOrigin orig1 )
, hang (text "instance" <+> quotes (ppr pred2))
2 (text "at" <+> ppr loc2) ])
pprCtOrigin (KindEqOrigin t1 (Just t2) _ _)
= hang (ctoHerald <+> text "a kind equality arising from")
2 (sep [ppr t1, char '~', ppr t2])
pprCtOrigin (KindEqOrigin t1 Nothing _ _)
= hang (ctoHerald <+> text "a kind equality when matching")
2 (ppr t1)
pprCtOrigin (UnboundOccurrenceOf name)
= ctoHerald <+> text "an undeclared identifier" <+> quotes (ppr name)
pprCtOrigin (DerivOriginDC dc n)
= hang (ctoHerald <+> text "the" <+> speakNth n
<+> text "field of" <+> quotes (ppr dc))
2 (parens (text "type" <+> quotes (ppr ty)))
where
ty = dataConOrigArgTys dc !! (n-1)
pprCtOrigin (DerivOriginCoerce meth ty1 ty2)
= hang (ctoHerald <+> text "the coercion of the method" <+> quotes (ppr meth))
2 (sep [ text "from type" <+> quotes (ppr ty1)
, nest 2 $ text "to type" <+> quotes (ppr ty2) ])
pprCtOrigin (DoPatOrigin pat)
= ctoHerald <+> text "a do statement"
$$
text "with the failable pattern" <+> quotes (ppr pat)
pprCtOrigin (MCompPatOrigin pat)
= ctoHerald <+> hsep [ text "the failable pattern"
, quotes (ppr pat)
, text "in a statement in a monad comprehension" ]
pprCtOrigin (FailablePattern pat)
= ctoHerald <+> text "the failable pattern" <+> quotes (ppr pat)
$$
text "(this will become an error in a future GHC release)"
pprCtOrigin (Shouldn'tHappenOrigin note)
= sdocWithDynFlags $ \dflags ->
if xopt LangExt.ImpredicativeTypes dflags
then text "a situation created by impredicative types"
else
vcat [ text "<< This should not appear in error messages. If you see this"
, text "in an error message, please report a bug mentioning" <+> quotes (text note) <+> text "at"
, text "https://ghc.haskell.org/trac/ghc/wiki/ReportABug >>" ]
pprCtOrigin (ProvCtxtOrigin PSB{ psb_id = (L _ name) })
= hang (ctoHerald <+> text "the \"provided\" constraints claimed by")
2 (text "the signature of" <+> quotes (ppr name))
pprCtOrigin (InstProvidedOrigin mod cls_inst)
= vcat [ text "arising when attempting to show that"
, ppr cls_inst
, text "is provided by" <+> quotes (ppr mod)]
pprCtOrigin simple_origin
= ctoHerald <+> pprCtO simple_origin
-- | Short one-liners
pprCtO :: CtOrigin -> SDoc
pprCtO (OccurrenceOf name) = hsep [text "a use of", quotes (ppr name)]
pprCtO (OccurrenceOfRecSel name) = hsep [text "a use of", quotes (ppr name)]
pprCtO AppOrigin = text "an application"
pprCtO (IPOccOrigin name) = hsep [text "a use of implicit parameter", quotes (ppr name)]
pprCtO (OverLabelOrigin l) = hsep [text "the overloaded label"
,quotes (char '#' <> ppr l)]
pprCtO RecordUpdOrigin = text "a record update"
pprCtO ExprSigOrigin = text "an expression type signature"
pprCtO PatSigOrigin = text "a pattern type signature"
pprCtO PatOrigin = text "a pattern"
pprCtO ViewPatOrigin = text "a view pattern"
pprCtO IfOrigin = text "an if expression"
pprCtO (LiteralOrigin lit) = hsep [text "the literal", quotes (ppr lit)]
pprCtO (ArithSeqOrigin seq) = hsep [text "the arithmetic sequence", quotes (ppr seq)]
pprCtO (PArrSeqOrigin seq) = hsep [text "the parallel array sequence", quotes (ppr seq)]
pprCtO SectionOrigin = text "an operator section"
pprCtO TupleOrigin = text "a tuple"
pprCtO NegateOrigin = text "a use of syntactic negation"
pprCtO (ScOrigin n) = text "the superclasses of an instance declaration"
<> ifPprDebug (parens (ppr n))
pprCtO DerivOrigin = text "the 'deriving' clause of a data type declaration"
pprCtO StandAloneDerivOrigin = text "a 'deriving' declaration"
pprCtO DefaultOrigin = text "a 'default' declaration"
pprCtO DoOrigin = text "a do statement"
pprCtO MCompOrigin = text "a statement in a monad comprehension"
pprCtO ProcOrigin = text "a proc expression"
pprCtO (TypeEqOrigin t1 t2 _)= text "a type equality" <+> sep [ppr t1, char '~', ppr t2]
pprCtO AnnOrigin = text "an annotation"
pprCtO HoleOrigin = text "a use of" <+> quotes (text "_")
pprCtO ListOrigin = text "an overloaded list"
pprCtO StaticOrigin = text "a static form"
pprCtO _ = panic "pprCtOrigin"
{-
Constraint Solver Plugins
-------------------------
-}
type TcPluginSolver = [Ct] -- given
-> [Ct] -- derived
-> [Ct] -- wanted
-> TcPluginM TcPluginResult
newtype TcPluginM a = TcPluginM (EvBindsVar -> TcM a)
instance Functor TcPluginM where
fmap = liftM
instance Applicative TcPluginM where
pure x = TcPluginM (const $ pure x)
(<*>) = ap
instance Monad TcPluginM where
fail x = TcPluginM (const $ fail x)
TcPluginM m >>= k =
TcPluginM (\ ev -> do a <- m ev
runTcPluginM (k a) ev)
#if __GLASGOW_HASKELL__ > 710
instance MonadFail.MonadFail TcPluginM where
fail x = TcPluginM (const $ fail x)
#endif
runTcPluginM :: TcPluginM a -> EvBindsVar -> TcM a
runTcPluginM (TcPluginM m) = m
-- | This function provides an escape for direct access to
-- the 'TcM` monad. It should not be used lightly, and
-- the provided 'TcPluginM' API should be favoured instead.
unsafeTcPluginTcM :: TcM a -> TcPluginM a
unsafeTcPluginTcM = TcPluginM . const
-- | Access the 'EvBindsVar' carried by the 'TcPluginM' during
-- constraint solving. Returns 'Nothing' if invoked during
-- 'tcPluginInit' or 'tcPluginStop'.
getEvBindsTcPluginM :: TcPluginM EvBindsVar
getEvBindsTcPluginM = TcPluginM return
data TcPlugin = forall s. TcPlugin
{ tcPluginInit :: TcPluginM s
-- ^ Initialize plugin, when entering type-checker.
, tcPluginSolve :: s -> TcPluginSolver
-- ^ Solve some constraints.
-- TODO: WRITE MORE DETAILS ON HOW THIS WORKS.
, tcPluginStop :: s -> TcPluginM ()
-- ^ Clean up after the plugin, when exiting the type-checker.
}
data TcPluginResult
= TcPluginContradiction [Ct]
-- ^ The plugin found a contradiction.
-- The returned constraints are removed from the inert set,
-- and recorded as insoluble.
| TcPluginOk [(EvTerm,Ct)] [Ct]
-- ^ The first field is for constraints that were solved.
-- These are removed from the inert set,
-- and the evidence for them is recorded.
-- The second field contains new work, that should be processed by
-- the constraint solver.
| olsner/ghc | compiler/typecheck/TcRnTypes.hs | bsd-3-clause | 135,780 | 0 | 16 | 38,309 | 16,776 | 9,496 | 7,280 | 1,356 | 6 |
------------------------------------------------------------------------------
-- File: Haikubot/Settings.hs
-- Creation Date: Dec 30 2012 [03:40:24]
-- Last Modified: Dec 31 2012 [09:03:43]
-- Created By: Samuli Thomasson [SimSaladin] samuli.thomassonAtpaivola.fi
------------------------------------------------------------------------------
-- | Configuration and settings.
module Haikubot.Settings where
import System.Environment (getArgs)
import Haikubot.Core
import Haikubot.Commands
import Haikubot.Logging
-- | Read and parse the .rc
-- XXX: should be "read command line arguments"
readRC :: Handler ()
readRC = liftIO getArgs >>= \as -> case as of
(filename:_) -> do
logInfo' $ "sourcing file `" ++ filename ++ "`..."
cmdsFromFile filename
_ -> return ()
| SimSaladin/haikubot | Haikubot/Settings.hs | bsd-3-clause | 839 | 0 | 14 | 165 | 116 | 65 | 51 | 11 | 2 |
module Main where
import Test.Framework.Providers.DocTest
import Test.Framework
main = docTest ["src/Test/Framework/Providers/DocTest.hs"] ["-optP-include", "-optPdist/build/autogen/cabal_macros.h"] >>= defaultMain . return
| sakari/test-framework-doctest | tests/Main.hs | bsd-3-clause | 226 | 0 | 8 | 19 | 43 | 26 | 17 | 4 | 1 |
{-# LANGUAGE ParallelArrays #-}
{-# OPTIONS -fvectorise #-}
module Vectorised (evensPA) where
import Data.Array.Parallel
import Data.Array.Parallel.Prelude.Int as I
import Data.Array.Parallel.Prelude.Bool
import qualified Prelude as P
evens :: [:Int:] -> [:Int:]
evens ints = filterP (\x -> x `mod` 2 I.== 0) ints
evensPA :: PArray Int -> PArray Int
{-# NOINLINE evensPA #-}
evensPA arr = toPArrayP (evens (fromPArrayP arr))
| mainland/dph | dph-examples/examples/smoke/prims/Evens/dph/Vectorised.hs | bsd-3-clause | 430 | 6 | 9 | 65 | 136 | 81 | 55 | 12 | 1 |
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE DeriveTraversable #-}
{-# LANGUAGE DeriveFoldable #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE TypeFamilies #-}
{- |
Module : Data.ML.Index
Description : Key-value map model.
Copyright : (c) Paweł Nowak
License : MIT
Maintainer : pawel834@gmail.com
Stability : experimental
-}
module Data.ML.Index where
import Control.Applicative
import Data.Bytes.Serial
import Data.Key
import Data.ML.Internal.Compose
import Data.ML.Model
import Linear
-- | A map from some domain dom to g.
newtype Index dom g a = Index' (Compose dom g a)
deriving (Functor, Applicative, Foldable, Traversable, Additive, Metric)
pattern Index m = Index' (Compose m)
instance (Serial1 dom, Serial1 g) => Serial1 (Index dom g) where
serializeWith f (Index' m) = serializeWith f m
deserializeWith f = Index' <$> deserializeWith f
instance (Indexable dom, Functor g) => Model (Index dom g) where
type Input (Index dom g) = Const (Key dom)
type Output (Index dom g) = g
predict (Const i) (Index m) = index m i
| bitemyapp/machine-learning | src/Data/ML/Index.hs | mit | 1,086 | 0 | 8 | 207 | 282 | 153 | 129 | 22 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Css.Draw
(drawStyles) where
import Clay hiding (map, repeat, id)
import Prelude hiding ((**))
import Css.Constants
import qualified Data.Text as T
{- drawStyles
- Generates all CSS for the draw page. -}
drawStyles :: Css
drawStyles = do
colourTableCSS
mainCSS
titleDiv
canvasCSS
panelCSS
modeButtonsCSS
clickedButtonsCSS
simpleButton
inputCSS
textButtonCSS
nodeLabelCSS
elbowCSS
finishRegionCSS
{- The colour table. -}
colourTableCSS :: Css
colourTableCSS =
"#colour-table" ? do
height (px 40)
width (px 200)
mapM_ makeRule [0..length colours - 1]
where
makeRule i =
let colour = colours !! i
(row, col) = divMod i 5
in
tr # nthChild (T.pack $ show $ row + 1) **
td # nthChild (T.pack $ show $ col + 1) ? background colour
colours = [
pastelRed, pastelYellow, pastelBlue, pastelPink, white,
pastelOrange, pastelGreen, pastelPurple, pastelBrown, pastelGrey
]
{- The wrapping around the canvas elements. -}
mainCSS :: Css
mainCSS = "#main" ? do
height (pct 85)
width (pct 85)
float floatRight
position relative
"border-radius" -: "8px"
border solid (px 2) black
titleDiv :: Css
titleDiv = "#about-div" ? do
fontSize (em 1.2)
margin 0 0 0 (px 10)
{- The SVG canvas and the grid background. -}
canvasCSS :: Css
canvasCSS = do
"#background" ? do
height100
width100
"background-image" -: "url(/static/res/backgrounds/draw-background.png)"
"background-size" -: "8px"
opacity 0.3
"#mySVG" ? do
height100
width100
position absolute
top nil
left nil
{- The side panel. -}
panelCSS :: Css
panelCSS = do
"#side-panel-wrap" ? do
height (pct 85)
width (pct 15)
float floatLeft
padding (px 5) 0 (px 5) 0
border solid (px 2) black
roundCorners
backgroundColor $ parse "#008080"
overflowY auto
{- The mode buttons. -}
modeButtonsCSS :: Css
modeButtonsCSS = ".mode" ? do
width (pct 93)
padding 0 0 0 (px 5)
margin 0 0 0 (px 5)
roundCorners
fontSize (em 0.75)
border solid (px 2) "#008080"
"-webkit-transition" -: "all 0.2s"
"-moz-transition" -: "all 0.2s"
"-ms-transition" -: "all 0.2s"
"-o-transition" -: "all 0.2s"
"transition" -: "all 0.2s"
":hover" & do
"background-color" -: "#28B0A2 !important"
"color" -: "#DCDCDC !important"
cursor pointer
".clicked" & do
"background-color" -: "#28B0A2 !important"
clickedButtonsCSS :: Css
clickedButtonsCSS = ".clicked" ? do
"color" -: "#DCDCDC !important"
border solid (px 2) black
{- The input field. -}
inputCSS :: Css
inputCSS = "input" ? do
fontSize (px 16)
border solid (px 2) "#DCDCDC"
roundCorners
margin (px 5) (px 0) (px 5) (px 5)
padding0
":focus" & do
{-border solid (px 2) "#FFD700"-}
"box-shadow" -: "0 0 3px 1px #FFD700"
{- Style for simple buttons. -}
simpleButton :: Css
simpleButton = ".button" ? do
width (pct 40)
margin (px 5) (px 5) (px 5) (px 5)
padding0
roundCorners
alignCenter
fontSize (em 0.75)
border solid (px 2) "#008080"
border solid (px 2) black
"-webkit-transition" -: "all 0.2s"
"-moz-transition" -: "all 0.2s"
"-ms-transition" -: "all 0.2s"
"-o-transition" -: "all 0.2s"
"transition" -: "all 0.2s"
":hover" & do
"background-color" -: "black !important"
"color" -: "#DCDCDC !important"
cursor pointer
{- The add button. -}
textButtonCSS :: Css
textButtonCSS = "#add-text" ? do
"display" -: "inline"
margin (px 5) (px 5) (px 5) (px 5)
padding (px 2) (px 5) (px 2) (px 5)
width (pct 45)
{- The labels for a node. -}
nodeLabelCSS :: Css
nodeLabelCSS = ".mylabel" ? do
alignCenter
"stroke" -: "none"
userSelect none
"-webkit-touch-callout" -: "none"
"-webkit-user-select" -: "none"
"-khtml-user-select" -: "none"
"-moz-user-select" -: "none"
"-ms-user-select" -: "none"
"text-anchor" -: "middle"
"dominant-baseline" -: "central"
{- The invisible elbow nodes. -}
elbowCSS :: Css
elbowCSS = do
".elbow" ? do
opacity 0
":hover" & do
cursor pointer
opacity 1
".rElbow" ? do
opacity 0
":hover" & do
cursor pointer
opacity 1
{- The finish button -}
finishRegionCSS :: Css
finishRegionCSS = "#finish-region" ? do
width (pct 40)
margin (px 5) (px 5) (px 5) (px 5)
padding0
backgroundColor $ parse "#DCDCDC"
-- border solid (px 2) black
border solid (px 2) $ parse "#008080"
| miameng/courseography | app/Css/Draw.hs | gpl-3.0 | 4,871 | 0 | 18 | 1,449 | 1,440 | 671 | 769 | 161 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.SNS.RemovePermission
-- Copyright : (c) 2013-2014 Brendan Hay <brendan.g.hay@gmail.com>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Removes a statement from a topic's access control policy.
--
-- <http://docs.aws.amazon.com/sns/latest/api/API_RemovePermission.html>
module Network.AWS.SNS.RemovePermission
(
-- * Request
RemovePermission
-- ** Request constructor
, removePermission
-- ** Request lenses
, rpLabel
, rpTopicArn
-- * Response
, RemovePermissionResponse
-- ** Response constructor
, removePermissionResponse
) where
import Network.AWS.Prelude
import Network.AWS.Request.Query
import Network.AWS.SNS.Types
import qualified GHC.Exts
data RemovePermission = RemovePermission
{ _rpLabel :: Text
, _rpTopicArn :: Text
} deriving (Eq, Ord, Read, Show)
-- | 'RemovePermission' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'rpLabel' @::@ 'Text'
--
-- * 'rpTopicArn' @::@ 'Text'
--
removePermission :: Text -- ^ 'rpTopicArn'
-> Text -- ^ 'rpLabel'
-> RemovePermission
removePermission p1 p2 = RemovePermission
{ _rpTopicArn = p1
, _rpLabel = p2
}
-- | The unique label of the statement you want to remove.
rpLabel :: Lens' RemovePermission Text
rpLabel = lens _rpLabel (\s a -> s { _rpLabel = a })
-- | The ARN of the topic whose access control policy you wish to modify.
rpTopicArn :: Lens' RemovePermission Text
rpTopicArn = lens _rpTopicArn (\s a -> s { _rpTopicArn = a })
data RemovePermissionResponse = RemovePermissionResponse
deriving (Eq, Ord, Read, Show, Generic)
-- | 'RemovePermissionResponse' constructor.
removePermissionResponse :: RemovePermissionResponse
removePermissionResponse = RemovePermissionResponse
instance ToPath RemovePermission where
toPath = const "/"
instance ToQuery RemovePermission where
toQuery RemovePermission{..} = mconcat
[ "Label" =? _rpLabel
, "TopicArn" =? _rpTopicArn
]
instance ToHeaders RemovePermission
instance AWSRequest RemovePermission where
type Sv RemovePermission = SNS
type Rs RemovePermission = RemovePermissionResponse
request = post "RemovePermission"
response = nullResponse RemovePermissionResponse
| romanb/amazonka | amazonka-sns/gen/Network/AWS/SNS/RemovePermission.hs | mpl-2.0 | 3,193 | 0 | 9 | 734 | 393 | 240 | 153 | 52 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE AllowAmbiguousTypes #-}
module Web.Offset.Splices where
import Control.Monad.State
import Control.Applicative ((<|>))
import Control.Lens hiding (children)
import Control.Concurrent.MVar
import Data.Aeson hiding (decode, encode, json, object)
import qualified Data.Attoparsec.Text as A
import Data.Char (toUpper)
import qualified Data.HashMap.Strict as M
import Data.List (lookup)
import qualified Data.Map as Map
import Data.IntSet (IntSet)
import qualified Data.IntSet as IntSet
import Data.Maybe (fromJust, fromMaybe, catMaybes)
import Data.Monoid
import Data.Scientific (floatingOrInteger)
import qualified Data.Set as Set
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import qualified Data.Vector as V
import Web.Larceny
import Web.Offset.Field
import Web.Offset.Posts
import Web.Offset.Queries
import Web.Offset.Date
import Web.Offset.Types
import Web.Offset.Utils
wordpressSubs :: Wordpress b
-> [Field s]
-> StateT s IO Text
-> WPLens b s
-> Maybe (MVar (Maybe IntSet))
-> Substitutions s
wordpressSubs wp extraFields getURI wpLens ids =
subs [ ("wpPosts", wpPostsFill wp extraFields wpLens ids)
, ("wpPostsAggregate", wpPostsAggregateFill wp extraFields wpLens ids)
, ("wpPostByPermalink", wpPostByPermalinkFill extraFields getURI wpLens ids)
, ("wpPage", wpPageFill wpLens)
, ("wpNoPostDuplicates", wpNoPostDuplicatesFill wpLens ids)
, ("wp", wpPrefetch wp extraFields getURI wpLens)
, ("wpCustom", wpCustomFill wp)
, ("wpCustomAggregate", wpCustomAggregateFill wp)
, ("wpCustomDate", wpCustomDateFill)
, ("stripHtml", stripHtmlFill)]
stripTags :: Text -> Text
stripTags "" = ""
stripTags str = case (T.take 1 str) of
"<" -> stripTags $ T.drop 1 $ T.dropWhile (/= '>') str
_ -> (T.take 1 str) <> stripTags (T.drop 1 str)
stripHtmlFill :: Fill s
stripHtmlFill = Fill $ \attrs (path, tpl) lib -> do
text <- runTemplate tpl path mempty lib
return $ stripTags text
wpCustomDateFill :: Fill s
wpCustomDateFill =
useAttrs (a "wp_format" % a "date") customDateFill
where customDateFill mWPFormat date =
let wpFormat = fromMaybe "%Y-%m-%d %H:%M:%S" mWPFormat in
case parseWPDate wpFormat date of
Just d -> fillChildrenWith $ datePartSubs d
Nothing -> rawTextFill $ "<!-- Unable to parse date: " <> date <> " -->"
wpCustomFill :: Wordpress b -> Fill s
wpCustomFill wp =
useAttrs (a "endpoint") (\e -> customFill wp (EndpointKey e []))
customFill :: Wordpress b -> WPKey -> Fill s
customFill Wordpress{..} key = Fill $ \attrs (path, tpl) lib ->
do res <- liftIO $ (cachingGetRetry key :: IO (Either StatusCode WPResponse))
case (fmap decodeWPResponseBody res :: Either StatusCode (Maybe Value)) of
Left code -> do
let notification = "Encountered status code " <> tshow code
<> " when querying \"" <> tshow key <> "\"."
liftIO $ wpLogger notification
return $ "<!-- " <> notification <> " -->"
Right (Just json) ->
unFill (jsonToFill json) attrs (path, tpl) lib
Right Nothing -> do
let notification = "Unable to decode JSON for endpoint \"" <> tshow key
liftIO $ wpLogger $ notification <> ": " <> tshow res
return $ "<!-- " <> notification <> "-->"
jsonToFill :: Value -> Fill s
jsonToFill (Object o) =
Fill $ \_ (path, tpl) lib -> runTemplate tpl path objectSubstitutions lib
where objectSubstitutions =
subs $ map (\k -> (transformName k,
jsonToFill (fromJust (M.lookup k o))))
(M.keys o)
jsonToFill (Array v) =
Fill $ \attrs (path, tpl) lib ->
V.foldr mappend "" <$> V.mapM (\e -> unFill (jsonToFillArrayItem e) attrs (path, tpl) lib) v
jsonToFill (String s) = rawTextFill s
jsonToFill (Number n) = case floatingOrInteger n of
Left r -> rawTextFill $ tshow (r :: Double)
Right i -> rawTextFill $ tshow (i :: Integer)
jsonToFill (Bool True) = rawTextFill $ tshow True
jsonToFill (Bool False) = rawTextFill "<!-- JSON field found, but value is false. -->"
jsonToFill (Null) = rawTextFill "<!-- JSON field found, but value is null. -->"
jsonToFillArrayItem :: Value -> Fill s
jsonToFillArrayItem o@(Object _) = jsonToFill o
jsonToFillArrayItem a@(Array _) = jsonToFill a
jsonToFillArrayItem (String s) = fillChildrenWith $ subs [("wpArrayItem", rawTextFill s)]
jsonToFillArrayItem (Number n) =
case floatingOrInteger n of
Left r -> fillChildrenWith $ subs [("wpArrayItem", textFill $ tshow (r :: Double))]
Right i -> fillChildrenWith $ subs [("wpArrayItem", textFill $ tshow (i :: Integer))]
jsonToFillArrayItem b@(Bool True) = jsonToFill b
jsonToFillArrayItem b@(Bool False) = jsonToFill b
jsonToFillArrayItem n@(Null) = jsonToFill n
wpCustomAggregateFill :: Wordpress b -> Fill s
wpCustomAggregateFill wp =
useAttrs (a "endpoint") (customAggregateFill wp)
customAggregateFill :: Wordpress b -> Text -> Fill s
customAggregateFill Wordpress{..} endpoint = Fill $ \attrs (path, tpl) lib ->
do let key = EndpointKey endpoint []
res <- liftIO $ (cachingGetRetry key :: IO (Either StatusCode WPResponse))
case (fmap decodeWPResponseBody res :: Either StatusCode (Maybe Value)) of
Left code -> do
let notification = "Encountered status code " <> tshow code
<> " when querying \"" <> endpoint <> "\"."
liftIO $ wpLogger notification
return $ "<!-- " <> notification <> " -->"
Right (Just json) ->
unFill (fillChildrenWith $
subs [ ("wpCustomItem", jsonToFill json)
, ("wpCustomMeta", useAttrs (a "page") (wpAggregateMetaFill res)) ])
attrs (path, tpl) lib
Right Nothing -> do
let notification = "Unable to decode JSON for endpoint \"" <> endpoint
liftIO $ wpLogger $ notification <> ": " <> tshow res
return $ "<!-- " <> notification <> "-->"
wpPostsFill :: Wordpress b
-> [Field s]
-> WPLens b s
-> Maybe (MVar (Maybe IntSet))
-> Fill s
wpPostsFill wp extraFields wpLens postIdSet = Fill $ \attrs tpl lib ->
do (postsQuery, wpKey) <- mkPostsQueryAndKey wp attrs
res <- liftIO $ cachingGetRetry wp wpKey
case fmap decodeWPResponseBody res of
Right (Just posts) -> do
postsND <- postsWithoutDuplicates wpLens postsQuery posts postIdSet
addPostIds postIdSet (map fst postsND)
unFill (wpPostsHelper wp extraFields (map snd postsND)) mempty tpl lib
Right Nothing -> return ""
Left code -> liftIO $ logStatusCode wp code
postsWithoutDuplicates :: WPLens b s
-> WPQuery
-> [Object]
-> Maybe (MVar (Maybe IntSet))
-> StateT s IO [(Int, Object)]
postsWithoutDuplicates wpLens postsQuery posts postIdSet = do
wp <- use wpLens
let postsW = extractPostIds posts
case postIdSet of
Just mvar -> do
ids <- liftIO $ readMVar mvar
return $ take (fromMaybe 20 $ qlimit postsQuery) . removeDupes ids $ postsW
Nothing -> return $ take (fromMaybe 20 $ qlimit postsQuery) postsW
where removeDupes :: Maybe IntSet -> [(Int, Object)] -> [(Int, Object)]
removeDupes Nothing = id
removeDupes (Just wpPostIdSet) =
filter (\(wpId,_) -> IntSet.notMember wpId wpPostIdSet)
mkPostsQueryAndKey :: Wordpress b
-> Attributes
-> StateT s IO (WPQuery, WPKey)
mkPostsQueryAndKey wp attrs = do
let postsQuery = parseQueryNode (Map.toList attrs)
filters <- liftIO $ mkFilters wp (qtaxes postsQuery)
let wpKey = mkWPKey filters postsQuery
return (postsQuery, wpKey)
logStatusCode :: Wordpress b -> Int -> IO Text
logStatusCode wp code = do
let notification = "Encountered status code " <> tshow code
<> " when querying wpPosts."
wpLogger wp notification
return $ "<!-- " <> notification <> " -->"
wpPostsAggregateFill :: Wordpress b
-> [Field s]
-> WPLens b s
-> Maybe (MVar (Maybe IntSet))
-> Fill s
wpPostsAggregateFill wp extraFields wpLens postIdSet = Fill $ \attrs tpl lib ->
do (postsQuery, wpKey) <- mkPostsQueryAndKey wp attrs
res <- liftIO $ cachingGetRetry wp wpKey
case fmap decodeWPResponseBody res of
Right (Just posts) -> do
postsND' <- postsWithoutDuplicates wpLens postsQuery posts postIdSet
addPostIds postIdSet (map fst postsND')
unFill (fillChildrenWith $
subs [ ("wpPostsItem", wpPostsHelper wp extraFields (map snd postsND'))
, ("wpPostsMeta", wpAggregateMetaFill res (qpage postsQuery)) ])
mempty tpl lib
Right Nothing -> return ""
Left code -> liftIO $ logStatusCode wp code
wpAggregateMetaFill :: Either StatusCode WPResponse -> Maybe Int -> Fill s
wpAggregateMetaFill (Right (WPResponse headers _)) mCurrentPage = do
let totalPagesText = maybe "" T.decodeUtf8
(lookup "x-wp-totalpages" headers)
totalItemsText = maybe "" T.decodeUtf8
(lookup "x-wp-total" headers)
totalPages = fromMaybe 1 (readSafe totalPagesText) :: Int
currentPage = fromMaybe 1 mCurrentPage
fillChildrenWith $
subs [ ("wpTotalPages", textFill totalPagesText )
, ("wpTotalItems", textFill totalItemsText)
, ("wpHasMorePages",
if currentPage < totalPages
then fillChildren
else textFill "")
, ("wpNoMorePages",
if currentPage < totalPages
then textFill ""
else fillChildren)
, ("wpHasMultiplePages",
if totalPages > 1
then fillChildren
else textFill "")
, ("wpHasSinglePage",
if totalPages > 1
then textFill ""
else fillChildren)
, ("wpHasPreviousPages",
if currentPage > 1
then fillChildren
else textFill "")
, ("wpHasNoPreviousPages",
if currentPage > 1
then textFill ""
else fillChildren)]
wpPostsMetaFill _ _ = textFill ""
mkFilters :: Wordpress b -> [TaxSpecList] -> IO [Filter]
mkFilters wp specLists =
concat <$> mapM (\(TaxSpecList tName list) -> catMaybes <$> mapM (toFilter tName) list) specLists
where toFilter :: TaxonomyName -> TaxSpec -> IO (Maybe Filter)
toFilter tName tSpec = do
mTSpecId <- lookupSpecId wp tName tSpec
case mTSpecId of
Just tSpecId -> return $ Just (TaxFilter tName tSpecId)
Nothing -> return Nothing
wpPostsHelper :: Wordpress b
-> [Field s]
-> [Object]
-> Fill s
wpPostsHelper wp extraFields postsND =
mapSubs (postSubs wp extraFields) postsND
wpPostByPermalinkFill :: [Field s]
-> StateT s IO Text
-> WPLens b s
-> Maybe (MVar (Maybe IntSet))
-> Fill s
wpPostByPermalinkFill extraFields getURI wpLens postIdSet = maybeFillChildrenWith' $
do uri <- getURI
let mperma = parsePermalink uri
case mperma of
Nothing -> do
w@Wordpress{..} <- use wpLens
liftIO $ wpLogger $ "unable to parse URI: " <> uri
return Nothing
Just (year, month, slug) ->
do res <- wpGetPost wpLens (PostByPermalinkKey year month slug)
case res of
Just post -> do addPostIds postIdSet [fst (extractPostId post)]
wp <- use wpLens
return $ Just (postSubs wp extraFields post)
_ -> return Nothing
feedSubs :: [Field s] -> WPLens b s -> Object -> Maybe (MVar (Maybe IntSet)) -> Substitutions s
feedSubs fields lens obj postIdSet =
subs $ [("wpPost", wpPostFromObjectFill fields lens obj postIdSet)]
wpPostFromObjectFill :: [Field s]
-> WPLens b s
-> Object
-> Maybe (MVar (Maybe IntSet))
-> Fill s
wpPostFromObjectFill extraFields wpLens postObj postIdSet = maybeFillChildrenWith' $
do addPostIds postIdSet [fst (extractPostId postObj)]
wp <- use wpLens
return $ Just (postSubs wp extraFields postObj)
wpNoPostDuplicatesFill :: WPLens b s -> (Maybe (MVar (Maybe IntSet))) -> Fill s
wpNoPostDuplicatesFill wpLens mPostIdSet= rawTextFill' $
do case mPostIdSet of
Just mvar -> liftIO $ modifyMVar_ mvar (\currentValue -> return (Just IntSet.empty))
Nothing -> return ()
return ""
wpPageFill :: WPLens b s -> Fill s
wpPageFill wpLens =
useAttrs (a "name") pageFill
where pageFill Nothing = rawTextFill ""
pageFill (Just slug) = rawTextFill' $
do res <- wpGetPost wpLens (PageKey slug)
return $ case res of
Just page -> case M.lookup "content" page of
Just (Object o) -> case M.lookup "rendered" o of
Just (String r) -> r
_ -> ""
_ -> ""
_ -> ""
postSubs :: Wordpress b -> [Field s] -> Object -> Substitutions s
postSubs wp extra object = subs (map (buildSplice object) (mergeFields postFields extra))
where buildSplice o (F n) =
(transformName n, rawTextFill $ getText n o)
buildSplice o (B n) =
(transformName n, textFill $ getBool n o)
buildSplice o (Q n endpoint) =
(transformName n, customFill wp (idToEndpoint endpoint $ getText n o))
buildSplice o (QM n endpoint) =
(transformName n, customFill wp (idsToEndpoint endpoint (unArray' . M.lookup n $ o)))
buildSplice o (P n fill') =
(transformName n, fill' $ getText n o)
buildSplice o (PV n fill') =
(transformName n, fill' (M.lookup n $ o))
buildSplice o (PN n fill') =
(transformName n, fill' (unObj . M.lookup n $ o))
buildSplice o (PM n fill') =
(transformName n, fill' (unArray . M.lookup n $ o))
buildSplice o (N n fs) =
(transformName n, fillChildrenWith $ subs
(map (buildSplice (unObj . M.lookup n $ o)) fs))
buildSplice o (C n path) =
(transformName n, rawTextFill (getText (last path) . traverseObject (init path) $ o))
buildSplice o (CB n path) =
(transformName n, rawTextFill (getBool (last path) . traverseObject (init path) $ o))
buildSplice o (CN n path fs) =
(transformName n, fillChildrenWith $ subs
(map (buildSplice (traverseObject path o)) fs))
buildSplice o (M n fs) =
(transformName n,
mapSubs (\(i, oinner) -> subs $ map (buildSplice oinner) fs
<> [(transformName n <> "Index", textFill (tshow i))])
(zip [1..] (unArray . M.lookup n $ o)))
unValue (String t) = t
unValue (Number i) = either (tshow :: Double -> Text)
(tshow :: Integer -> Text) (floatingOrInteger i)
unValue v = ""
unObj (Just (Object o)) = o
unObj _ = M.empty
unArray (Just (Array v)) = map (unObj . Just) $ V.toList v
unArray _ = []
unArray' (Just (Array v)) = map unValue $ V.toList v
unArray' _ = []
traverseObject pth o = foldl (\o' x -> unObj . M.lookup x $ o') o pth
getText n o = maybe "" unValue (M.lookup n o)
getBool n o = case M.lookup n o of
Just (Bool b) -> tshow b
_ -> ""
-- * -- Internal -- * --
parseQueryNode :: [(Text, Text)] -> WPQuery
parseQueryNode attrs =
WPPostsQuery { qlimit = readLookup "limit" attrs
, qnum = perpage
, qoffset = readLookup "offset" attrs
, qpage = readLookup "page" attrs
, qorder = readLookup "order" attrs
, qorderby = lookup "orderby" attrs
, qsearch = lookup "search" attrs
, qbefore = readLookup "before" attrs
, qafter = readLookup "after" attrs
, qstatus = readLookup "status" attrs
, qsticky = readLookup "sticky" attrs
, quser = lookup "user" attrs
, qtaxes = filterTaxonomies attrs }
where -- `toTitle` allows us to use the standard Read instance to, e.g.,
-- translate the text "asc" to the type constructor `Asc`
readLookup n attrs = (readSafe . T.toTitle) =<< lookup n attrs
perpage =
case readLookup "per-page" attrs of
Just n -> Just n
Nothing -> readLookup "num" attrs
listOfFilters = ["limit"
, "num"
, "offset"
, "page"
, "per-page"
, "user"
, "order"
, "orderby"
, "context"
, "search"
, "after"
, "before"
, "slug"
, "status"
, "sticky"]
filterTaxonomies :: [(Text, Text)] -> [TaxSpecList]
filterTaxonomies attrs =
let taxAttrs = filter (\(k, _) -> (k `notElem` listOfFilters)) attrs in
map attrToTaxSpecList taxAttrs
taxDictKeys :: [TaxSpecList] -> [WPKey]
taxDictKeys = map (\(TaxSpecList tName _) -> TaxDictKey tName)
wpPrefetch :: Wordpress b
-> [Field s]
-> StateT s IO Text
-> WPLens b s
-> Fill s
wpPrefetch wp extra uri wpLens = Fill $ \ _m (p, tpl) l -> do
Wordpress{..} <- use wpLens
mKeys <- liftIO $ newMVar []
void $ runTemplate tpl p (prefetchSubs wp mKeys) l
newPostIdSet <- liftIO $ newMVar Nothing
wpKeys <- liftIO $ readMVar mKeys
void $ liftIO $ concurrently $ map cachingGet wpKeys
runTemplate tpl p (wordpressSubs wp extra uri wpLens (Just newPostIdSet)) l
prefetchSubs :: Wordpress b -> MVar [WPKey] -> Substitutions s
prefetchSubs wp mkeys =
subs [ ("wpPosts", wpPostsPrefetch wp mkeys)
, ("wpPage", useAttrs (a"name") $ wpPagePrefetch mkeys) ]
wpPostsPrefetch :: Wordpress b
-> MVar [WPKey]
-> Fill s
wpPostsPrefetch wp mKeys = Fill $ \attrs _ _ ->
do let postsQuery = parseQueryNode (Map.toList attrs)
filters <- liftIO $ mkFilters wp (qtaxes postsQuery)
let key = mkWPKey filters postsQuery
liftIO $ modifyMVar_ mKeys (\keys -> return $ key : keys)
return ""
wpPagePrefetch :: MVar [WPKey]
-> Text
-> Fill s
wpPagePrefetch mKeys name = rawTextFill' $
do let key = PageKey name
liftIO $ modifyMVar_ mKeys (\keys -> return $ key : keys)
return ""
mkWPKey :: [Filter]
-> WPQuery
-> WPKey
mkWPKey taxFilters wppq@WPPostsQuery{..} =
PostsKey (Set.fromList $ toFilters wppq ++ taxFilters ++ userFilter quser)
where userFilter Nothing = []
userFilter (Just u) = [UserFilter u]
toFilters :: WPQuery -> [Filter]
toFilters WPPostsQuery{..} =
catMaybes [ NumFilter <$> qnum
, PageFilter <$> qpage
, OffsetFilter <$> qoffset
, OrderFilter <$> qorder
, OrderByFilter <$> qorderby
, SearchFilter <$> qsearch
, BeforeFilter <$> qbefore
, AfterFilter <$> qafter
, StatusFilter <$> qstatus
, StickyFilter <$> qsticky]
findDict :: [(TaxonomyName, TaxSpec -> TaxSpecId)] -> TaxSpecList -> [Filter]
findDict dicts (TaxSpecList tName tList) =
case lookup tName dicts of
Just dict -> map (TaxFilter tName . dict) tList
Nothing -> []
parsePermalink :: Text -> Maybe (Text, Text, Text)
parsePermalink = either (const Nothing) Just . A.parseOnly parser . T.reverse
where parser = do _ <- A.option ' ' (A.char '/')
guls <- A.many1 (A.letter <|> A.char '-' <|> A.digit)
_ <- A.char '/'
segment2 <- A.many1 (A.letter <|> A.char '-' <|> A.digit)
_ <- A.char '/'
segment1 <- A.many1 (A.letter <|> A.char '-' <|> A.digit)
_ <- A.char '/'
return (T.reverse $ T.pack segment1
,T.reverse $ T.pack segment2
,T.reverse $ T.pack guls)
wpGetPost :: (MonadState s m, MonadIO m) => WPLens b s -> WPKey -> m (Maybe Object)
wpGetPost wpLens wpKey =
do wp <- use wpLens
liftIO $ getPost wp wpKey
getPost :: Wordpress b -> WPKey -> IO (Maybe Object)
getPost Wordpress{..} wpKey = decodePost <$> cachingGetRetry wpKey
where decodePost :: Either StatusCode WPResponse -> Maybe Object
decodePost (Right t) =
do post' <- decodeWPResponseBody t
case post' of
Just (post:_) -> Just post
_ -> Nothing
decodePost (Left _) = Nothing
transformName :: Text -> Text
transformName = T.append "wp" . snd . T.foldl f (True, "")
where f (True, rest) next = (False, T.snoc rest (toUpper next))
f (False, rest) '_' = (True, rest)
f (False, rest) '-' = (True, rest)
f (False, rest) next = (False, T.snoc rest next)
-- Move this into Init.hs (should retrieve from Wordpress data structure)
addPostIds :: (MonadState s m, MonadIO m) => Maybe (MVar (Maybe IntSet)) -> [Int] -> m ()
addPostIds mIdSetMVar ids =
case mIdSetMVar of
Just idSetMVar -> liftIO $ modifyMVar_ idSetMVar addIds
Nothing -> return ()
where addIds :: Maybe IntSet -> IO (Maybe IntSet)
addIds (Just currentSet) = return (Just (currentSet `IntSet.union` (IntSet.fromList ids)))
addIds Nothing = return Nothing
idToEndpoint :: IdToEndpoint -> Text -> WPKey
idToEndpoint (UseId endpoint) id = EndpointKey (endpoint <> id) []
idToEndpoint (UseSlug endpoint) slug = EndpointKey (endpoint) [("slug", slug)]
idsToEndpoint :: IdsToEndpoint -> [Text] -> WPKey
idsToEndpoint (UseInclude endpoint) ids = EndpointKey endpoint (map (\id -> ("include[]", id)) ids)
{-# ANN module ("HLint: ignore Eta reduce" :: String) #-}
| dbp/snaplet-wordpress | src/Web/Offset/Splices.hs | bsd-3-clause | 23,114 | 0 | 28 | 7,328 | 7,436 | 3,739 | 3,697 | 493 | 19 |
module Ch1Lists where
import Data.Char
import Test.QuickCheck
-- Lists are the primary data structure in LISP like languages.
-- Where as Arrays are the primary data collection in C derived languages like Java.
examp1 = [x*x | x <- [1,2,3] ]
-- [1,4,9]
examp2 = [toLower c | c <- "Hello From Haskell"]
-- "hello from haskell"
examp3 = [ (x, even x) | x <- [1,2,3] ]
-- [(1,False),(2,True),(3,False)]
{------ notes -------------------------------
[x*x | x <- [1,2,3] ] is a list comprehension
x <- [1,2,3] is a generator
<- means "drawn from"
[(1,False).....] pairs in a list
| are guards and they evaluate true or false
isLower
Converts a letter to the corresponding lower-case letter, if any. Any other character is returned unchanged.
------------------------------------------}
examp4 = [x|x <-[1..5], odd x]
-- [1,3,5]
examp5 = [x*x | x <-[1..5], odd x]
-- [1,9,25]
examp6 = [x|x <- [42, -5, 24, 0, -3], x >= 0]
-- [42,24,0]
examp7 = [ toLower c | c <- "Hello, World!", isAlpha c ]
-- "helloworld"
{------ notes -------------------------------
isAlpha Selects alphabetic Unicode characters (lower-case, upper-case and title-case letters, plus letters of caseless scripts and modifiers letters).
------------------------------------------}
f1 = sum [1,2,3]
-- 6
f2 = sum []
-- 0
f3 = sum [x*x| x <- [1,2,3], odd x]
-- 10
f4 = product [1,2,3,4]
-- 24
f5 = product []
-- 1
factorial n = product [1..n]
f6 = factorial 4
-- 24
{------ notes -------------------------------
When you encounter a function you can politely ask
Are you Associative?
What is you identity element?
For + and - it will be 0
For * and / it will be 1
for [] it my be 1 or 0
[1..n] a list from 1 to whatever n is
eager oop languages would hate this notation
and choke on trying to call a potentially
infinite list
------------------------------------------}
squares :: [Integer] -> [Integer]
squares xs = [x * x | x <- xs]
odds :: [Integer] -> [Integer]
odds xs = [x|x <- xs, odd x]
sumSqOdd :: [Integer] -> Integer
sumSqOdd xs = sum [x*x | x <- xs, odd x]
------ QuickCheck required here ----------------
-- prop_sumSqOdd :: [Integer] -> Bool
prop_sumSqOdd xs = sum (squares (odds xs)) == sumSqOdd xs
{- --------------------------------------------
Running quickCheck from the prompt with prop_sumSqOdd as it's argument.
*Ch1Lists> quickCheck prop_sumSqOdd
Loading package array-0.4.0.1 ... linking ... done.
Loading package deepseq-1.3.0.1 ... linking ... done.
Loading package bytestring-0.10.0.2 ... linking ... done.
Loading package Win32-2.3.0.0 ... linking ... done.
Loading package old-locale-1.0.0.5 ... linking ... done.
Loading package time-1.4.0.1 ... linking ... done.
Loading package random-1.0.1.1 ... linking ... done.
Loading package containers-0.5.0.0 ... linking ... done.
Loading package pretty-1.1.1.0 ... linking ... done.
Loading package template-haskell ... linking ... done.
Loading package QuickCheck-2.6 ... linking ... done.
+++ OK, passed 100 tests.
-------------------------------------------------------} | HaskellForCats/HaskellForCats | MenaBeginning/002mena/00session/ch1Lists.hs | mit | 3,089 | 9 | 10 | 555 | 474 | 277 | 197 | 24 | 1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE Rank2Types #-}
{-# LANGUAGE DeriveDataTypeable #-}
#ifndef MIN_VERSION_base
#define MIN_VERSION_base(x,y,z) 1
#endif
#ifndef MIN_VERSION_mtl
#define MIN_VERSION_mtl(x,y,z) 1
#endif
-----------------------------------------------------------------------------
-- |
-- Module : Control.Monad.Trans.Iter
-- Copyright : (C) 2013 Edward Kmett
-- License : BSD-style (see the file LICENSE)
--
-- Maintainer : Edward Kmett <ekmett@gmail.com>
-- Stability : provisional
-- Portability : MPTCs, fundeps
--
-- Based on <http://www.ioc.ee/~tarmo/tday-veskisilla/uustalu-slides.pdf Capretta's Iterative Monad Transformer>
--
-- Unlike 'Free', this is a true monad transformer.
----------------------------------------------------------------------------
module Control.Monad.Trans.Iter
(
-- |
-- Functions in Haskell are meant to be pure. For example, if an expression
-- has type Int, there should exist a value of the type such that the expression
-- can be replaced by that value in any context without changing the meaning
-- of the program.
--
-- Some computations may perform side effects (@unsafePerformIO@), throw an
-- exception (using @error@); or not terminate
-- (@let infinity = 1 + infinity in infinity@).
--
-- While the 'IO' monad encapsulates side-effects, and the 'Either'
-- monad encapsulates errors, the 'Iter' monad encapsulates
-- non-termination. The 'IterT' transformer generalizes non-termination to any monadic
-- computation.
--
-- Computations in 'IterT' (or 'Iter') can be composed in two ways:
--
-- * /Sequential:/ Using the 'Monad' instance, the result of a computation
-- can be fed into the next.
--
-- * /Parallel:/ Using the 'MonadPlus' instance, several computations can be
-- executed concurrently, and the first to finish will prevail.
-- See also the <examples/Cabbage.lhs cabbage example>.
-- * The iterative monad transformer
IterT(..)
-- * Capretta's iterative monad
, Iter, iter, runIter
-- * Combinators
, delay
, hoistIterT
, liftIter
, cutoff
, never
, untilJust
, interleave, interleave_
-- * Consuming iterative monads
, retract
, fold
, foldM
-- * IterT ~ FreeT Identity
, MonadFree(..)
-- * Examples
-- $examples
) where
import Control.Applicative
import Control.Monad.Catch (MonadCatch(..), MonadThrow(..))
import Control.Monad (ap, liftM, MonadPlus(..), join)
import Control.Monad.Fix
import Control.Monad.Trans.Class
import Control.Monad.Free.Class
import Control.Monad.State.Class
import Control.Monad.Error.Class
import Control.Monad.Reader.Class
import Control.Monad.Writer.Class
import Control.Monad.Cont.Class
import Control.Monad.IO.Class
import Data.Bifunctor
import Data.Bitraversable
import Data.Either
import Data.Functor.Bind hiding (join)
import Data.Functor.Identity
import Data.Function (on)
import Data.Monoid
import Data.Semigroup.Foldable
import Data.Semigroup.Traversable
import Data.Typeable
import Data.Data
import Prelude.Extras
#if !(MIN_VERSION_base(4,8,0))
import Data.Foldable hiding (fold)
import Data.Traversable hiding (mapM)
#endif
-- | The monad supporting iteration based over a base monad @m@.
--
-- @
-- 'IterT' ~ 'FreeT' 'Identity'
-- @
newtype IterT m a = IterT { runIterT :: m (Either a (IterT m a)) }
#if __GLASGOW_HASKELL__ >= 707
deriving (Typeable)
#endif
-- | Plain iterative computations.
type Iter = IterT Identity
-- | Builds an iterative computation from one first step.
--
-- prop> runIter . iter == id
iter :: Either a (Iter a) -> Iter a
iter = IterT . Identity
{-# INLINE iter #-}
-- | Executes the first step of an iterative computation
--
-- prop> iter . runIter == id
runIter :: Iter a -> Either a (Iter a)
runIter = runIdentity . runIterT
{-# INLINE runIter #-}
instance (Functor m, Eq1 m) => Eq1 (IterT m) where
(==#) = on (==#) (fmap (fmap Lift1) . runIterT)
instance Eq (m (Either a (IterT m a))) => Eq (IterT m a) where
IterT m == IterT n = m == n
instance (Functor m, Ord1 m) => Ord1 (IterT m) where
compare1 = on compare1 (fmap (fmap Lift1) . runIterT)
instance Ord (m (Either a (IterT m a))) => Ord (IterT m a) where
compare (IterT m) (IterT n) = compare m n
instance (Functor m, Show1 m) => Show1 (IterT m) where
showsPrec1 d (IterT m) = showParen (d > 10) $
showString "IterT " . showsPrec1 11 (fmap (fmap Lift1) m)
instance Show (m (Either a (IterT m a))) => Show (IterT m a) where
showsPrec d (IterT m) = showParen (d > 10) $
showString "IterT " . showsPrec 11 m
instance (Functor m, Read1 m) => Read1 (IterT m) where
readsPrec1 d = readParen (d > 10) $ \r ->
[ (IterT (fmap (fmap lower1) m),t) | ("IterT",s) <- lex r, (m,t) <- readsPrec1 11 s]
instance Read (m (Either a (IterT m a))) => Read (IterT m a) where
readsPrec d = readParen (d > 10) $ \r ->
[ (IterT m,t) | ("IterT",s) <- lex r, (m,t) <- readsPrec 11 s]
instance Monad m => Functor (IterT m) where
fmap f = IterT . liftM (bimap f (fmap f)) . runIterT
{-# INLINE fmap #-}
instance Monad m => Applicative (IterT m) where
pure = IterT . return . Left
{-# INLINE pure #-}
(<*>) = ap
{-# INLINE (<*>) #-}
instance Monad m => Monad (IterT m) where
return = IterT . return . Left
{-# INLINE return #-}
IterT m >>= k = IterT $ m >>= either (runIterT . k) (return . Right . (>>= k))
{-# INLINE (>>=) #-}
fail _ = never
{-# INLINE fail #-}
instance Monad m => Apply (IterT m) where
(<.>) = ap
{-# INLINE (<.>) #-}
instance Monad m => Bind (IterT m) where
(>>-) = (>>=)
{-# INLINE (>>-) #-}
instance MonadFix m => MonadFix (IterT m) where
mfix f = IterT $ mfix $ runIterT . f . either id (error "mfix (IterT m): Right")
{-# INLINE mfix #-}
instance Monad m => Alternative (IterT m) where
empty = mzero
{-# INLINE empty #-}
(<|>) = mplus
{-# INLINE (<|>) #-}
-- | Capretta's 'race' combinator. Satisfies left catch.
instance Monad m => MonadPlus (IterT m) where
mzero = never
{-# INLINE mzero #-}
(IterT x) `mplus` (IterT y) = IterT $ x >>= either
(return . Left)
(flip liftM y . second . mplus)
{-# INLINE mplus #-}
instance MonadTrans IterT where
lift = IterT . liftM Left
{-# INLINE lift #-}
instance Foldable m => Foldable (IterT m) where
foldMap f = foldMap (either f (foldMap f)) . runIterT
{-# INLINE foldMap #-}
instance Foldable1 m => Foldable1 (IterT m) where
foldMap1 f = foldMap1 (either f (foldMap1 f)) . runIterT
{-# INLINE foldMap1 #-}
instance (Monad m, Traversable m) => Traversable (IterT m) where
traverse f (IterT m) = IterT <$> traverse (bitraverse f (traverse f)) m
{-# INLINE traverse #-}
instance (Monad m, Traversable1 m) => Traversable1 (IterT m) where
traverse1 f (IterT m) = IterT <$> traverse1 go m where
go (Left a) = Left <$> f a
go (Right a) = Right <$> traverse1 f a
{-# INLINE traverse1 #-}
instance MonadReader e m => MonadReader e (IterT m) where
ask = lift ask
{-# INLINE ask #-}
local f = hoistIterT (local f)
{-# INLINE local #-}
instance MonadWriter w m => MonadWriter w (IterT m) where
tell = lift . tell
{-# INLINE tell #-}
listen (IterT m) = IterT $ liftM concat' $ listen (fmap listen `liftM` m)
where
concat' (Left x, w) = Left (x, w)
concat' (Right y, w) = Right $ second (w <>) <$> y
pass m = IterT . pass' . runIterT . hoistIterT clean $ listen m
where
clean = pass . liftM (\x -> (x, const mempty))
pass' = join . liftM g
g (Left ((x, f), w)) = tell (f w) >> return (Left x)
g (Right f) = return . Right . IterT . pass' . runIterT $ f
#if MIN_VERSION_mtl(2,1,1)
writer w = lift (writer w)
{-# INLINE writer #-}
#endif
instance MonadState s m => MonadState s (IterT m) where
get = lift get
{-# INLINE get #-}
put s = lift (put s)
{-# INLINE put #-}
#if MIN_VERSION_mtl(2,1,1)
state f = lift (state f)
{-# INLINE state #-}
#endif
instance MonadError e m => MonadError e (IterT m) where
throwError = lift . throwError
{-# INLINE throwError #-}
IterT m `catchError` f = IterT $ liftM (fmap (`catchError` f)) m `catchError` (runIterT . f)
instance MonadIO m => MonadIO (IterT m) where
liftIO = lift . liftIO
instance MonadCont m => MonadCont (IterT m) where
callCC f = IterT $ callCC (\k -> runIterT $ f (lift . k . Left))
instance Monad m => MonadFree Identity (IterT m) where
wrap = IterT . return . Right . runIdentity
{-# INLINE wrap #-}
instance MonadThrow m => MonadThrow (IterT m) where
throwM = lift . throwM
{-# INLINE throwM #-}
instance MonadCatch m => MonadCatch (IterT m) where
catch (IterT m) f = IterT $ liftM (fmap (`Control.Monad.Catch.catch` f)) m `Control.Monad.Catch.catch` (runIterT . f)
{-# INLINE catch #-}
-- | Adds an extra layer to a free monad value.
--
-- In particular, for the iterative monad 'Iter', this makes the
-- computation require one more step, without changing its final
-- result.
--
-- prop> runIter (delay ma) == Right ma
delay :: (Monad f, MonadFree f m) => m a -> m a
delay = wrap . return
{-# INLINE delay #-}
-- |
-- 'retract' is the left inverse of 'lift'
--
-- @
-- 'retract' . 'lift' = 'id'
-- @
retract :: Monad m => IterT m a -> m a
retract m = runIterT m >>= either return retract
-- | Tear down a 'Free' 'Monad' using iteration.
fold :: Monad m => (m a -> a) -> IterT m a -> a
fold phi (IterT m) = phi (either id (fold phi) `liftM` m)
-- | Like 'fold' with monadic result.
foldM :: (Monad m, Monad n) => (m (n a) -> n a) -> IterT m a -> n a
foldM phi (IterT m) = phi (either return (foldM phi) `liftM` m)
-- | Lift a monad homomorphism from @m@ to @n@ into a Monad homomorphism from @'IterT' m@ to @'IterT' n@.
hoistIterT :: Monad n => (forall a. m a -> n a) -> IterT m b -> IterT n b
hoistIterT f (IterT as) = IterT (fmap (hoistIterT f) `liftM` f as)
-- | Lifts a plain, non-terminating computation into a richer environment.
-- 'liftIter' is a 'Monad' homomorphism.
liftIter :: (Monad m) => Iter a -> IterT m a
liftIter = hoistIterT (return . runIdentity)
-- | A computation that never terminates
never :: (Monad f, MonadFree f m) => m a
never = delay never
-- | Repeatedly run a computation until it produces a 'Just' value.
-- This can be useful when paired with a monad that has side effects.
--
-- For example, we may have @genId :: IO (Maybe Id)@ that uses a random
-- number generator to allocate ids, but fails if it finds a collision.
-- We can repeatedly run this with
--
-- @
-- 'retract' ('untilJust' genId) :: IO Id
-- @
untilJust :: (Monad m) => m (Maybe a) -> IterT m a
untilJust f = maybe (delay (untilJust f)) return =<< lift f
{-# INLINE untilJust #-}
-- | Cuts off an iterative computation after a given number of
-- steps. If the number of steps is 0 or less, no computation nor
-- monadic effects will take place.
--
-- The step where the final value is produced also counts towards the limit.
--
-- Some examples (@n ≥ 0@):
--
-- @
-- 'cutoff' 0 _ ≡ 'return' 'Nothing'
-- 'cutoff' (n+1) '.' 'return' ≡ 'return' '.' 'Just'
-- 'cutoff' (n+1) '.' 'lift' ≡ 'lift' '.' 'liftM' 'Just'
-- 'cutoff' (n+1) '.' 'delay' ≡ 'delay' . 'cutoff' n
-- 'cutoff' n 'never' ≡ 'iterate' 'delay' ('return' 'Nothing') '!!' n
-- @
--
-- Calling @'retract' '.' 'cutoff' n@ is always terminating, provided each of the
-- steps in the iteration is terminating.
cutoff :: (Monad m) => Integer -> IterT m a -> IterT m (Maybe a)
cutoff n | n <= 0 = const $ return Nothing
cutoff n = IterT . liftM (either (Left . Just)
(Right . cutoff (n - 1))) . runIterT
-- | Interleaves the steps of a finite list of iterative computations, and
-- collects their results.
--
-- The resulting computation has as many steps as the longest computation
-- in the list.
interleave :: Monad m => [IterT m a] -> IterT m [a]
interleave ms = IterT $ do
xs <- mapM runIterT ms
if null (rights xs)
then return . Left $ lefts xs
else return . Right . interleave $ map (either return id) xs
{-# INLINE interleave #-}
-- | Interleaves the steps of a finite list of computations, and discards their
-- results.
--
-- The resulting computation has as many steps as the longest computation
-- in the list.
--
-- Equivalent to @'void' '.' 'interleave'@.
interleave_ :: (Monad m) => [IterT m a] -> IterT m ()
interleave_ [] = return ()
interleave_ xs = IterT $ liftM (Right . interleave_ . rights) $ mapM runIterT xs
{-# INLINE interleave_ #-}
instance (Monad m, Monoid a) => Monoid (IterT m a) where
mempty = return mempty
x `mappend` y = IterT $ do
x' <- runIterT x
y' <- runIterT y
case (x', y') of
( Left a, Left b) -> return . Left $ a `mappend` b
( Left a, Right b) -> return . Right $ liftM (a `mappend`) b
(Right a, Left b) -> return . Right $ liftM (`mappend` b) a
(Right a, Right b) -> return . Right $ a `mappend` b
mconcat = mconcat' . map Right
where
mconcat' :: (Monad m, Monoid a) => [Either a (IterT m a)] -> IterT m a
mconcat' ms = IterT $ do
xs <- mapM (either (return . Left) runIterT) ms
case compact xs of
[l@(Left _)] -> return l
xs' -> return . Right $ mconcat' xs'
{-# INLINE mconcat' #-}
compact :: (Monoid a) => [Either a b] -> [Either a b]
compact [] = []
compact (r@(Right _):xs) = r:(compact xs)
compact ( Left a :xs) = compact' a xs
compact' a [] = [Left a]
compact' a (r@(Right _):xs) = (Left a):(r:(compact xs))
compact' a ( (Left a'):xs) = compact' (a <> a') xs
#if __GLASGOW_HASKELL__ < 707
instance Typeable1 m => Typeable1 (IterT m) where
typeOf1 t = mkTyConApp freeTyCon [typeOf1 (f t)] where
f :: IterT m a -> m a
f = undefined
freeTyCon :: TyCon
#if __GLASGOW_HASKELL__ < 704
freeTyCon = mkTyCon "Control.Monad.Iter.IterT"
#else
freeTyCon = mkTyCon3 "free" "Control.Monad.Iter" "IterT"
#endif
{-# NOINLINE freeTyCon #-}
#else
#define Typeable1 Typeable
#endif
instance
( Typeable1 m, Typeable a
, Data (m (Either a (IterT m a)))
, Data a
) => Data (IterT m a) where
gfoldl f z (IterT as) = z IterT `f` as
toConstr IterT{} = iterConstr
gunfold k z c = case constrIndex c of
1 -> k (z IterT)
_ -> error "gunfold"
dataTypeOf _ = iterDataType
dataCast1 f = gcast1 f
iterConstr :: Constr
iterConstr = mkConstr iterDataType "IterT" [] Prefix
{-# NOINLINE iterConstr #-}
iterDataType :: DataType
iterDataType = mkDataType "Control.Monad.Iter.IterT" [iterConstr]
{-# NOINLINE iterDataType #-}
{- $examples
* <examples/MandelbrotIter.lhs Rendering the Mandelbrot set>
* <examples/Cabbage.lhs The wolf, the sheep and the cabbage>
-}
| da-x/free | src/Control/Monad/Trans/Iter.hs | bsd-3-clause | 14,971 | 0 | 17 | 3,329 | 4,344 | 2,327 | 2,017 | 252 | 2 |
import Foreign
import Foreign.C
-- These newtypes...
newtype MyFunPtr a = MyFunPtr { getFunPtr :: FunPtr a }
newtype MyPtr a = MyPtr (Ptr a)
newtype MyIO a = MyIO { runIO :: IO a }
-- should be supported by...
-- foreign import dynamics
foreign import ccall "dynamic"
mkFun1 :: MyFunPtr (CInt -> CInt) -> (CInt -> CInt)
foreign import ccall "dynamic"
mkFun2 :: MyPtr (Int32 -> Int32) -> (CInt -> CInt)
-- and foreign import wrappers.
foreign import ccall "wrapper"
mkWrap1 :: (CInt -> CInt) -> MyIO (MyFunPtr (CInt -> CInt))
foreign import ccall "wrapper"
mkWrap2 :: (CInt -> CInt) -> MyIO (MyPtr (Int32 -> Int32))
-- We'll need a dynamic function point to export
foreign import ccall "getDbl" getDbl :: IO (MyFunPtr (CInt -> CInt))
-- and a Haskell function to export
half :: CInt -> CInt
half = (`div` 2)
-- and a C function to pass it to.
foreign import ccall "apply" apply1 :: MyFunPtr (CInt -> CInt) -> Int -> Int
foreign import ccall "apply" apply2 :: MyPtr (Int32 -> Int32) -> Int -> Int
main :: IO ()
main = do
dbl <- getDbl
let dbl1 = mkFun1 dbl
dbl2 = mkFun2 $ MyPtr $ castFunPtrToPtr $ getFunPtr dbl
print (dbl1 21, dbl2 21)
half1 <- runIO $ mkWrap1 half
half2 <- runIO $ mkWrap2 half
print (apply1 half1 84, apply2 half2 84)
| sdiehl/ghc | testsuite/tests/ffi/should_run/T493.hs | bsd-3-clause | 1,278 | 1 | 12 | 271 | 448 | 238 | 210 | 27 | 1 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="tr-TR">
<title>Tarayıcı Görünümü | ZAP Uzantısı</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>İçerikler</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Dizin</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Arama</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favoriler</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | thc202/zap-extensions | addOns/browserView/src/main/javahelp/org/zaproxy/zap/extension/browserView/resources/help_tr_TR/helpset_tr_TR.hs | apache-2.0 | 987 | 87 | 29 | 159 | 415 | 221 | 194 | -1 | -1 |
{-# LANGUAGE CPP #-}
module Tests.PlusPlusUnicode where
import Data.Char (ord)
{-# NOINLINE str1 #-}
str1 = "Tomten klappar händerna"
{-# NOINLINE str2 #-}
str2 = " åt 5001 apor"
{-# NOINLINE theString #-}
theString = str1 ++ str2
-- Displaying unicode chars is kind of broken in the standalone SpiderMonkey
-- interpreter, so we'll have to look at the char codes. :(
runTest :: IO [Int]
runTest = return $ map ord theString
| joelburget/haste-compiler | Tests/PlusPlusUnicode.hs | bsd-3-clause | 431 | 0 | 6 | 77 | 66 | 40 | 26 | 11 | 1 |
-- The behavior of type-inference and OverlappingInstances has changed
-- between GHC 6.12 and GHC 7.0 such that the following code
-- type-checks under 6.12, but not 7.0rc2. I assume this change has
-- something to do with the new type checker in GHC 7, but it is not
-- clear to me if this change in behavior is intended. Nor am I clear
-- how to achieve something similar to the old behavior. This is
-- preventing HSP (and by extension, happstack) from migrating to GHC
-- 7. I reported this earlier on the mailing lists, but I have further
-- simplified the test case here.
{-# LANGUAGE TypeFamilies, MultiParamTypeClasses
, FlexibleContexts, FlexibleInstances, UndecidableInstances
, TypeSynonymInstances, GeneralizedNewtypeDeriving
#-}
module XMLGenerator where
newtype XMLGenT m a = XMLGenT (m a)
deriving (Functor, Applicative, Monad)
class Monad m => XMLGen m where
type XML m
data Child m
genElement :: String -> XMLGenT m (XML m)
class XMLGen m => EmbedAsChild m c where
asChild :: c -> XMLGenT m [Child m]
instance {-# OVERLAPPING #-} (EmbedAsChild m c, m1 ~ m) => EmbedAsChild m (XMLGenT m1 c)
instance {-# OVERLAPPABLE #-} (XMLGen m, XML m ~ x) => EmbedAsChild m x
data Xml = Xml
data IdentityT m a = IdentityT (m a)
instance Functor (IdentityT m)
instance Applicative (IdentityT m)
instance Monad (IdentityT m)
instance XMLGen (IdentityT m) where
type XML (IdentityT m) = Xml
data Identity a = Identity a
instance Functor Identity
instance Applicative Identity
instance Monad Identity
instance {-# OVERLAPPING #-} EmbedAsChild (IdentityT IO) (XMLGenT Identity ())
data FooBar = FooBar
instance {-# OVERLAPPING #-} EmbedAsChild (IdentityT IO) FooBar where
asChild b = asChild $ (genElement "foo")
-- asChild :: FooBar -> XMLGenT (XMLGenT (IdentityT IO) [Child (IdentityT IO)])
{- ---------- Deriving the constraints ----------
asChild :: EmbedAsChild m c => c -> XMLGenT m [Child m]
genElement :: XMLGen m => String -> XMLGenT m (XML m)
Wanted: EmbedAsChild m c, with m = IdentityT IO
c = XMLGenT meta (XML meta)
XMLGen meta
ie EmbedAsChild (IdentityT IO) (XMLGen meta (XML meta)
XMLGen meta
We have instances
EmbedAsChild (IdentityT IO) FooBar
EmbedAsChild (IdentityT IO) (XMLGenT Identity ())
EmbedAsChild m (XMLGenT m1 c)
EmbedAsChild m x
-}
| ezyang/ghc | testsuite/tests/indexed-types/should_fail/T4485.hs | bsd-3-clause | 2,405 | 0 | 10 | 516 | 387 | 205 | 182 | 29 | 0 |
{-# LANGUAGE OverloadedStrings #-}
-- | Parsers for primitive values and types. Mostly useful for
-- "Futhark.IR.Parse", but can perhaps come in handy elsewhere too.
module Futhark.IR.Primitive.Parse
( pPrimValue,
pPrimType,
pFloatType,
pIntType,
-- * Building blocks
constituent,
lexeme,
keyword,
whitespace,
)
where
import Data.Char (isAlphaNum)
import Data.Functor
import qualified Data.Text as T
import Data.Void
import Futhark.IR.Primitive
import Futhark.Util.Pretty hiding (empty)
import Text.Megaparsec
import Text.Megaparsec.Char
import qualified Text.Megaparsec.Char.Lexer as L
-- | Is this character a valid member of an identifier?
constituent :: Char -> Bool
constituent c = isAlphaNum c || (c `elem` ("_/'+-=!&^.<>*|" :: String))
-- | Consume whitespace (including skipping line comments).
whitespace :: Parsec Void T.Text ()
whitespace = L.space space1 (L.skipLineComment "--") empty
-- | Consume whitespace after the provided parser, if it succeeds.
lexeme :: Parsec Void T.Text a -> Parsec Void T.Text a
lexeme = try . L.lexeme whitespace
-- | @keyword k@ parses @k@, which must not be immediately followed by
-- a 'constituent' character. This ensures that @iff@ is not seen as
-- the @if@ keyword followed by @f@. Sometimes called the "maximum
-- munch" rule.
keyword :: T.Text -> Parsec Void T.Text ()
keyword s = lexeme $ chunk s *> notFollowedBy (satisfy constituent)
-- | Parse an integer value.
pIntValue :: Parsec Void T.Text IntValue
pIntValue = try $ do
x <- L.signed (pure ()) L.decimal
t <- pIntType
pure $ intValue t (x :: Integer)
-- | Parse a floating-point value.
pFloatValue :: Parsec Void T.Text FloatValue
pFloatValue =
choice
[ pNum,
keyword "f16.nan" $> Float16Value (0 / 0),
keyword "f16.inf" $> Float16Value (1 / 0),
keyword "-f16.inf" $> Float16Value (-1 / 0),
keyword "f32.nan" $> Float32Value (0 / 0),
keyword "f32.inf" $> Float32Value (1 / 0),
keyword "-f32.inf" $> Float32Value (-1 / 0),
keyword "f64.nan" $> Float64Value (0 / 0),
keyword "f64.inf" $> Float64Value (1 / 0),
keyword "-f64.inf" $> Float64Value (-1 / 0)
]
where
pNum = try $ do
x <- L.signed (pure ()) L.float
t <- pFloatType
pure $ floatValue t (x :: Double)
-- | Parse a boolean value.
pBoolValue :: Parsec Void T.Text Bool
pBoolValue =
choice
[ keyword "true" $> True,
keyword "false" $> False
]
-- | Defined in this module for convenience.
pPrimValue :: Parsec Void T.Text PrimValue
pPrimValue =
choice
[ FloatValue <$> pFloatValue,
IntValue <$> pIntValue,
BoolValue <$> pBoolValue,
UnitValue <$ "()"
]
<?> "primitive value"
-- | Parse a floating-point type.
pFloatType :: Parsec Void T.Text FloatType
pFloatType = choice $ map p allFloatTypes
where
p t = keyword (prettyText t) $> t
-- | Parse an integer type.
pIntType :: Parsec Void T.Text IntType
pIntType = choice $ map p allIntTypes
where
p t = keyword (prettyText t) $> t
-- | Parse a primitive type.
pPrimType :: Parsec Void T.Text PrimType
pPrimType =
choice [p Bool, p Unit, FloatType <$> pFloatType, IntType <$> pIntType]
where
p t = keyword (prettyText t) $> t
| diku-dk/futhark | src/Futhark/IR/Primitive/Parse.hs | isc | 3,250 | 0 | 14 | 703 | 900 | 478 | 422 | 72 | 1 |
{-
pam-expiration – Expire inactive users
Copyright © 2011 Johan Kiviniemi <devel@johan.kiviniemi.name>
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-}
module Main where
import Expiration
import Shadow (shadowFile)
import Control.Applicative
import Control.Monad
import Data.Time.Clock.POSIX (getPOSIXTime)
import System.Environment
import System.Exit
import System.Log.Handler.Syslog
import System.Log.Logger
import System.Process
import Text.ParserCombinators.Parsec (parse)
expireDays = 180
logLevel = DEBUG
main = do
syslog <- openlog "pam-expiration" [PID] AUTH DEBUG
updateGlobalLogger rootLoggerName
(setLevel logLevel . addHandler syslog)
main' `catch` (\e -> errorM rootLoggerName (show e) >> exitFailure)
main' = do
args <- getArgs
service <- getEnv "PAM_SERVICE"
user <- getEnv "PAM_USER"
when (length args /= 1) $ error "Expected exactly one argument"
let moduleType = head args
case (moduleType, service) of
("auth", _) -> maybeChangeExpiration user
("ses_open", "sshd") -> maybeChangeExpiration user
_ -> return ()
maybeChangeExpiration user = do
shadow <- readProcess "/usr/bin/getent" ["shadow", "--", user] ""
entries <- eitherError $ parse shadowFile "shadow" shadow
now <- getPOSIXTime
when (length entries /= 1)
(error $ "Expected exactly one shadow entry (user " ++ show user ++ ")")
either (debugM rootLoggerName
. showString "Not changing expire date of user "
. shows user . showString ": ")
(changeExpiration user)
(newExpiration now expireDays $ head entries)
where
eitherError = either (ioError . userError . show) return
changeExpiration user n = do
noticeM rootLoggerName
("Setting expire date of user " ++ show user ++ " as " ++ show n)
readProcess "/usr/bin/chage" ["--expiredate", show n, "--", user] ""
return ()
| ion1/pam-expiration | src/Main.hs | isc | 2,612 | 0 | 13 | 557 | 530 | 267 | 263 | 46 | 3 |
module LwchTwo where
-- http://learnyouahaskell.com/chapters
-- : retab
-- : set expandtab ts=4 ruler number spell linebreak
-- :set +s
-- let fn003 = fn001 / fn000
{-
my #1 source of pain in learning Haskell was tabs. CONVERT ALL TABS TO SPACES!
Tabs can make perfectly correct code not compile.
Prelude> -- our base library
-- repl commands
:l
:r
:t
:i
:!
:m
myFile.hs
:set prompt "ghci> "
---------------------------------
simple math
ghci> 2 + 15
17
ghci> 49 * 100
4900
ghci> 1892 - 1472
420
ghci> 5 / 2
2.5
ghci> (50 * 100) - 4999
1
ghci> 50 * 100 - 4999
1
ghci> 50 * (100 - 4999)
-244950
5 * -3
ghci> 5 * -3
<interactive>:7:1:
Precedence parsing error
cannot mix `*' [infixl 7] and prefix `-' [infixl 6] in the same infix expression
ghci> 5 * (-3)
-15
-- True and False
ghci> True && False
False
ghci> True && True
True
ghci> False || True
True
ghci> not False
True
ghci> not (True && True)
False
ghci> 5 == 5
True
ghci> 1 == 0
False
ghci> 5 /= 5
False
ghci> 5 /= 4
True
ghci> "hello" == "hello"
True
---------functions------
-- functions are the workhorse of Haskell
ghci> succ 8
9
-- function with two arguments
ghci> min 9 10
9
ghci> min 3.4 3.2
3.2
ghci> max 100 101
101
---------------------------------
-- unlike lisp
-- white space can stand in for parentheses
-- you can use them but they are mostly optional
-- But when groupings are ambiguous you will have to
ghci> succ 9 + max 5 4 + 1
16
ghci> (succ 9) + (max 5 4) + 1
16
ghci> max (5 + 2) (sqrt 17)
7
div 92 10
-- not apostrophise but back-ticks
-- a way to do functions fix style
92 `div` 10
---------------------------------
-}
---------Making-your-own-Functions------------------------
-- note there are no parens, commas, braces, returns, blocks, etc.
doubleMe x = x + x
-- to write this in the repl the "let" prefix is required.
----------------------------------------------------------
-- conditionals ----------------
-- note! no dangling elses
-- generally things should terminate
doubleSmallNumber x = if x > 100
then x
else x*2
doubleSmallNumber' x = (if x > 100 then x else x*2) + 1
---------------------------------
{-
----------------------------------
-}
----------------------------------
----Functions-calling-Functions-----
---------------------------------
-- functions can call other functions from within functions
-- this is similar to recursion where we call the same function on itself.
-- multMax :: (Ord a, Num a) => a -> a -> a -> a
multMax a b x = (max a b) * x
fn003 = fn001 / fn000
fn000 = (6 * (7 + 5 /2)^2) + 2
-- myFun001 :: Int -> Int -> Int
myFun001 x y = (+) x y
-- *Lwch002> myFun001 (1/2) 2
fn001 = (+) (6 * (7 + 5 /2)^2) (2 * (product(take 5[1..])))
fn001b = (6 * (7 + 5 /2)^2) + (2 * (product(take 5[1..])))
fn001c x = (+) (6 * (7 + 5 /2)^2) (x * (product(take 5[1..])))
-------------------
-- fn003 see above
------------------
fn002 x y z = (/) (x + y ^ z) w
where
w=2
fn004 x y z = x + y ^ z * w
where
w = sum(head[101..201] : tail[1..100])
a = []
-- *Lwch002> fn004 4 5 6.0
-- *Lwch002> fn004 4 5.0 6
-- fn004 :: (Num a, Integral b, Enum a) => a -> a -> b -> a
------------------------------
-- :i fn004
----------------------------
-- :t fn004 0.3 5.0 (length "Hello")
-----------------------------------
-- Function Purity ---------------
-----------------------------------
-- TypeClassHierarchy pdf --------
-----------------------------------
-- purity means isolated from context.
-- purity means isolated from state.
-- running a function shouldn't change anything.
-- a.k.a. "launch the missiles!"
-- The hard part is they can't rely on context either.
-- but the benefit is reliability
-- Same args ---> same result!! -- always!
-- Anything else is impure.
-- is this "simple made hard?"
-- remember the first 7 years of Haskell's existence ...
-------------------------------
-- printing is impure
-- reading from a file is impure
-- generating a random number
- getting the current time
--------------------------------
-- all these things must be approached differently
-- but what do I get for my trouble?
-- Certainty, reliability, speed, ease of parallelization;
-- When we get the logic of our functions right
-- and those functions compile
-- there are no side-effects
-- there is no state
-- therefore (up to 90%) less testing
-- note: SkedgeMe doesn't test it's back-end, the Haskell part it's tests are confined to the front-end JavaScript.
-------------------------------
-------------------------------
-- quickcheck these
-- because there are no side effects
-- their is no state
-- I don't have to test what the type system will catch
-- I don't have to worry about Strings passed in where numbers should be or asking only to be surprised with nothing a.k.a. Null pointers.
someFun001 x y = x + y
someFun002 a b = a + b
| HaskellForCats/HaskellForCats | ladiesWhoCodeHaskell/LwchTwo.hs | mit | 5,108 | 1 | 12 | 1,189 | 530 | 316 | 214 | -1 | -1 |
{-# htermination minimum :: [Float] -> Float #-}
| ComputationWithBoundedResources/ara-inference | doc/tpdb_trs/Haskell/full_haskell/Prelude_minimum_6.hs | mit | 49 | 0 | 2 | 8 | 3 | 2 | 1 | 1 | 0 |
{-# LANGUAGE OverloadedStrings #-}
module Translator where
import Data.Either.Combinators (fromRight')
import Data.List.Zipper
import qualified Data.Text as T
import Pdf.Toolbox.Document
import System.IO
data Sentence = Sentence {sentenceNum :: Int, sentenceText :: T.Text }
data SentenceBlock = SentenceBlock {blockNum :: Int, originalText :: T.Text, translatedText :: T.Text}
writeSentenceWithNum :: Sentence -> String
writeSentenceWithNum s = number ++ ":" ++ sentence
where sentence = T.unpack $ sentenceText s
number = show $ sentenceNum s
writeSentence :: Sentence -> String
writeSentence s = T.unpack $ sentenceText s
updateSentenceBlock :: String -> SentenceBlock -> SentenceBlock
updateSentenceBlock text (SentenceBlock n orig _) = SentenceBlock n orig (T.pack text)
toSentence :: (Int,T.Text) -> Sentence
toSentence (n,t) = Sentence n t
getSentences :: T.Text -> [Sentence]
getSentences str = map toSentence $ zip [1..len] list
where list = map (flip T.snoc '.') $ init $ T.splitOn "." str
len = length list
toSentenceBlock :: Sentence -> SentenceBlock
toSentenceBlock s = SentenceBlock (sentenceNum s) (sentenceText s) ""
makeZipper :: [Sentence] -> Zipper SentenceBlock
makeZipper = fromList . map toSentenceBlock
updateZipper :: String -> Zipper SentenceBlock -> Zipper SentenceBlock
updateZipper text zipper = replace new_block zipper
where old_block = cursor zipper
new_block = updateSentenceBlock text old_block
shiftZipper :: (Zipper a -> Zipper a) -> Zipper a -> (Zipper a, a)
shiftZipper f zip = (new, cursor new)
where new = f zip
getPageText :: MonadIO m => Int -> Pdf m T.Text
getPageText n = do
pdf <- document
catalog <- documentCatalog pdf
rootNode <- catalogPageNode catalog
page <- pageNodePageByNum rootNode n
pageExtractText page
openPdfFile :: FilePath -> Int -> IO T.Text
openPdfFile file page = fmap fromRight' $ withBinaryFile file ReadMode $ \handle ->
runPdfWithHandle handle knownFilters $ getPageText page
| ericvm/translator | src/Translator.hs | mit | 2,028 | 0 | 12 | 366 | 661 | 340 | 321 | 44 | 1 |
{-# LANGUAGE NamedFieldPuns, FlexibleInstances #-}
module Latro.Semant.Display where
import Data.List (intercalate, intersperse)
import qualified Data.Map as Map
import Latro.Ast
import Latro.Output
import Latro.Semant
import Latro.Sexpable
import Text.Printf (printf)
instance Sexpable v => Sexpable (Maybe v) where
sexp (Just v) = sexp v
sexp Nothing = List []
-- To satisfy (Sexpable RawId)
instance Sexpable RawId where
sexp = Symbol
instance Sexpable SourcePos where
sexp (SourcePos filePath lineNum colNum) =
List [ Symbol "SourcePos"
, Atom filePath
, Symbol $ show lineNum
, Symbol $ show colNum
]
instance Sexpable CheckedData where
sexp (OfTy srcPos ty) =
List [ Symbol "OfTy"
, sexp srcPos
, sexp ty
]
instance (Sexpable a, Sexpable id) => Sexpable (QualifiedId a id) where
sexp (Id _ raw) = sexp raw
sexp (Path _ qid raw) =
Symbol $ printf "%s.%s" (showSexp qid) (showSexp raw)
instance (Sexpable a, CompilerOutput id) => CompilerOutput (QualifiedId a id) where
render (Id _ raw) = render raw
render (Path _ qid raw) =
printf "%s.%s" (render qid) (render raw)
instance (Sexpable a, Sexpable id) => Sexpable (SynTy a id) where
sexp (SynTyInt d) = List [ Symbol "Int", sexp d ]
sexp (SynTyBool d) = List [ Symbol "Bool", sexp d ]
sexp (SynTyChar d) = List [ Symbol "Char", sexp d ]
sexp (SynTyUnit d) = List [ Symbol "Unit", sexp d ]
sexp (SynTyArrow d paramTys retTy) =
List $ sexp d : (intersperse (Symbol "->" ) . map sexp) (paramTys ++ [retTy])
sexp (SynTyStruct d fields) =
List [ Symbol "Struct"
, sexp d
, List $ map (\(id, ty) -> List [ Symbol "Field", sexp id, sexp ty ]) fields
]
sexp (SynTyAdt d id alts) =
List [ Symbol "ADT"
, sexp d
, sexp id
, toSexpList alts
]
sexp (SynTyTuple d synTys) =
List [ Symbol "Tuple" , sexp d, toSexpList synTys ]
sexp (SynTyList d synTy) =
List [ Symbol "List", sexp d, sexp synTy ]
sexp (SynTyRef d qid []) =
List [ Symbol "Ref", sexp d, sexp qid ]
sexp (SynTyRef d qid tyParamIds) =
List [ Symbol "Ref", sexp d, sexp qid, toSexpList tyParamIds ]
instance CompilerOutput RawId where
render id = id
instance Sexpable UniqId where
sexp (UserId raw) = Symbol raw
sexp (UniqId i raw) = List [ Symbol "Id", Symbol raw, Symbol $ show i ]
instance CompilerOutput UniqId where
render (UserId raw) = raw
render (UniqId i raw) = printf "%s@%i" raw i
instance (Sexpable a, Sexpable id) => Sexpable (CompUnit a id) where
sexp (CompUnit d es) =
List [ Symbol "CompUnit"
, sexp d
, toSexpList es
]
instance (Sexpable a, Sexpable id) => CompilerOutput (CompUnit a id) where
render = showSexp
instance (Sexpable a, Sexpable id) => Sexpable (AdtAlternative a id) where
sexp (AdtAlternative d id i sTys) =
List [ Symbol "AdtAlternative"
, sexp d
, sexp id
, Symbol $ show i
, toSexpList sTys
]
instance (Sexpable a, Sexpable id) => Sexpable (TypeDec a id) where
sexp (TypeDecTy d id tyParamIds sTy) =
List [ Symbol "TypeDecTy"
, sexp d
, sexp id
, toSexpList tyParamIds
, sexp sTy
]
sexp (TypeDecAdt d id tyParamIds alts) =
List [ Symbol "TypeDecAdt"
, sexp d
, sexp id
, toSexpList tyParamIds
, toSexpList alts
]
sexp (TypeDecImplicit d tyDec) =
List [ Symbol "TypeDecImplicit"
, sexp d
, sexp tyDec
]
sexp (TypeDecEmpty d id tyParamIds) =
List [ Symbol "TypeDecEmpty"
, sexp d
, sexp id
, toSexpList tyParamIds
]
instance (Sexpable a, Sexpable id) => Sexpable (PatExp a id) where
sexp (PatExpNumLiteral d str) =
List [ Symbol "PatExpNumLiteral", sexp d, Atom str ]
sexp (PatExpBoolLiteral d b) =
List [ Symbol "PatExpBoolLiteral", sexp d, Symbol $ show b ]
sexp (PatExpStringLiteral d s) =
List [ Symbol "PatExpStringLiteral", sexp d, Atom s ]
sexp (PatExpCharLiteral d s) =
List [ Symbol "PatExpCharLiteral", sexp d, Atom s ]
sexp (PatExpTuple d patEs) =
List [ Symbol "PatExpTuple", sexp d, toSexpList patEs ]
sexp (PatExpAdt d qid patEs) =
List [ Symbol "PatExpAdt", sexp d, sexp qid, toSexpList patEs ]
sexp (PatExpList d patEs) =
List [ Symbol "PatExpList", sexp d, toSexpList patEs ]
sexp (PatExpListCons d a b) =
List [ Symbol "PatExpListCons", sexp d, sexp a, sexp b ]
sexp (PatExpId d id) =
List [ Symbol "PatExpId", sexp d, sexp id ]
sexp (PatExpWildcard d) = List [ Symbol "PatExpWildcard", sexp d ]
instance (Sexpable a, Sexpable id) => Sexpable (CaseClause a id) where
sexp (CaseClause d patE e) =
List [ Symbol "CaseClause"
, sexp d
, sexp patE
, sexp e
]
instance (Sexpable a, Sexpable id) => Sexpable (FunDef a id) where
sexp (FunDefFun d id argPatEs e) =
List [ Symbol "FunDefFun"
, sexp d
, sexp id
, toSexpList argPatEs
, sexp e
]
instance (Sexpable a, Sexpable id) => Sexpable (Constraint a id) where
sexp (Constraint d tyId protoId) =
List [ Symbol "Constraint"
, sexp d
, sexp tyId
, sexp protoId
]
instance (Sexpable a, Sexpable id) => Sexpable (FieldInit a id) where
sexp (FieldInit id e) = List [ sexp id, sexp e ]
instance (Sexpable a, Sexpable id) => Sexpable (AnnDef a id) where
sexp (AnnDefModule d id e) = List [ Symbol "AnnDefModule", sexp d, sexp e ]
sexp (AnnDefFun d funDef) = List [ Symbol "AnnDefFun", sexp d, sexp funDef ]
instance (Sexpable a, Sexpable id) => Sexpable (CondCaseClause a id) where
sexp (CondCaseClause d pExp bodyE) =
List [ Symbol "CondCaseClause"
, sexp d
, sexp pExp
, sexp bodyE
]
sexp (CondCaseClauseWildcard d bodyE) =
List [ Symbol "CondCaseClauseWildcard", sexp d, sexp bodyE ]
instance (Sexpable a, Sexpable id) => Sexpable (Exp a id) where
sexp (ExpCons d a b) = List [ Symbol "ExpCons", sexp d, sexp a, sexp b ]
sexp (ExpInParens d e) = List [ Symbol "ExpInParens", sexp d, sexp e ]
sexp (ExpCustomInfix d lhe id rhe) =
List [ Symbol "ExpCustomInfix"
, sexp d
, sexp lhe
, sexp id
, sexp rhe
]
sexp (ExpMemberAccess d e id) =
List [ Symbol "ExpMemberAccess"
, sexp d
, sexp e
, sexp id
]
sexp (ExpApp d rator rands) =
List [ Symbol "ExpApp"
, sexp d
, sexp rator
, toSexpList rands
]
sexp (ExpPrim d rator) =
List [ Symbol "ExpPrim"
, sexp d
, sexp rator
]
sexp (ExpImport d qid) = List [ Symbol "ExpImport", sexp d, sexp qid ]
sexp (ExpAssign d patE e) =
List [ Symbol "ExpAssign"
, sexp d
, sexp patE
, sexp e
]
sexp (ExpTypeDec d tyDec) = List [ Symbol "ExpTypeDec", sexp d, sexp tyDec ]
sexp (ExpDataDec d tyDec) = List [ Symbol "ExpDataDec", sexp d, sexp tyDec ]
sexp (ExpProtoDec d protoId tyId constraints tyAnns) =
List [ Symbol "ExpProtoDec"
, sexp d
, sexp protoId
, sexp tyId
, toSexpList constraints
, toSexpList tyAnns
]
sexp (ExpProtoImp d sty protoId constraints es) =
List [ Symbol "ExpProtoImp"
, sexp d
, sexp sty
, sexp protoId
, toSexpList constraints
, toSexpList es
]
sexp (ExpTyAnn (TyAnn d id tyParamIds synTy constrs)) =
List [ Symbol "ExpTyAnn"
, sexp d
, sexp id
, toSexpList tyParamIds
, sexp synTy
, toSexpList constrs
]
sexp (ExpWithAnn tyAnn e) =
List [ Symbol "ExpWithAnn"
, sexp tyAnn
, sexp e
]
sexp (ExpFunDef (FunDefFun d id argPatEs bodyE)) =
List [ Symbol "ExpFunDef"
, sexp d
, sexp id
, toSexpList argPatEs
, sexp bodyE
]
sexp (ExpFunDefClauses d id funDefs) =
List [ Symbol "ExpFunDefClauses"
, sexp d
, sexp id
, toSexpList funDefs
]
sexp (ExpInterfaceDec d id tyParamIds tyAnns) =
List [ Symbol "ExpInterfaceDec"
, sexp d
, sexp id
, toSexpList tyParamIds
, toSexpList tyAnns
]
sexp (ExpModule d id es) = List [ Symbol "ExpModule", sexp d, sexp id, toSexpList es ]
sexp (ExpTypeModule d id tyDec es) =
List [ Symbol "ExpTypeModule"
, sexp d
, sexp id
, sexp tyDec
, toSexpList es
]
sexp (ExpStruct d tyE fieldInits) =
List [ Symbol "ExpStruct"
, sexp d
, List $ map sexp fieldInits
]
sexp (ExpIfElse d e thenE elseE) =
List [ Symbol "ExpIfElse"
, sexp d
, sexp e
, sexp thenE
, sexp elseE
]
sexp (ExpMakeAdt d sTy i es) =
List [ Symbol "ExpMakeAdt"
, sexp d
, sexp sTy
, Atom $ show i
, toSexpList es
]
sexp (ExpGetAdtField d e index) =
List [ Symbol "ExpGetAdtField"
, sexp d
, sexp e
, Atom $ show index
]
sexp (ExpTuple d es) = List [ Symbol "ExpTuple", sexp d, toSexpList es ]
sexp (ExpSwitch d e clauses) = List [ Symbol "ExpSwitch", sexp d, sexp e, toSexpList clauses ]
sexp (ExpCond d clauses) =
List [ Symbol "ExpCond"
, sexp d
, toSexpList clauses
]
sexp (ExpList d es) = List [ Symbol "ExpList", sexp d, toSexpList es ]
sexp (ExpFun d paramIds e) =
List [ Symbol "ExpFun"
, sexp d
, toSexpList paramIds
, sexp e
]
sexp (ExpNum d str) = List [ Symbol "ExpNum", sexp d, Symbol str ]
sexp (ExpBool d b) = List [ Symbol "ExpBool", sexp d, Symbol $ show b ]
sexp (ExpString d s) = List [ Symbol "ExpString", sexp d, Atom s ]
sexp (ExpChar d s) = List [ Symbol "ExpChar", sexp d, Atom s ]
sexp (ExpRef d id) = List [ Symbol "ExpRef", sexp d, sexp id ]
sexp (ExpQualifiedRef d qid) = List [ Symbol "ExpQualifiedRef", sexp d, sexp qid ]
sexp (ExpUnit d) = List [ Symbol "ExpUnit", sexp d ]
sexp (ExpBegin d es) =
List [ Symbol "ExpBegin"
, sexp d
, toSexpList es
]
sexp (ExpPrecAssign d id level) =
List [ Symbol "ExpPrecAssign"
, sexp d
, sexp id
, Atom (show level)
]
sexp (ExpFail d msg) = List [ Symbol "ExpFail", sexp d, Atom msg ]
instance (Sexpable a, Sexpable id) => Sexpable (TyAnn a id) where
sexp (TyAnn d id tyParamIds synTy constrs) =
List [ Symbol "TyAnn"
, sexp d
, sexp id
, toSexpList tyParamIds
, sexp synTy
, toSexpList constrs
]
instance (Sexpable a) => Sexpable (ILFieldInit a) where
sexp (ILFieldInit fieldId e) =
List [ Symbol "ILFieldInit", sexp fieldId, sexp e ]
instance (Sexpable a) => Sexpable (ILCase a) where
sexp (ILCase d patE bodyE) =
List [ Symbol "ILCase", sexp d, sexp patE, sexp bodyE ]
instance (Sexpable a) => Sexpable (ILPat a) where
sexp ilPat =
case ilPat of
ILPatInt d i -> List [ Symbol "ILPatInt", sexp d, Atom $ show i ]
ILPatBool d b -> List [ Symbol "ILPatBool", sexp d, Symbol $ show b ]
ILPatStr d s -> List [ Symbol "ILPatStr", sexp d, Atom s ]
ILPatChar d s -> List [ Symbol "ILPatChar", sexp d, Atom s ]
ILPatTuple d argPatEs ->
List [ Symbol "ILPatTuple", sexp d, toSexpList argPatEs ]
ILPatAdt d ctorId argPatEs ->
List [ Symbol "ILPatAdt", sexp d, sexp ctorId, toSexpList argPatEs ]
ILPatList d argPatEs ->
List [ Symbol "ILPatList", sexp d, toSexpList argPatEs ]
ILPatCons d patHd patTl ->
List [ Symbol "ILPatCons", sexp d, sexp patHd, sexp patTl ]
ILPatId d id ->
List [ Symbol "ILPatId", sexp d, sexp id ]
ILPatWildcard d -> List [ Symbol "ILPatWildcard", sexp d ]
instance Sexpable Prim where
sexp prim =
case prim of
PrimPrintln -> Symbol "PrimPrintln"
PrimReadln -> Symbol "PrimReadln"
PrimCharEq -> Symbol "PrimCharEq"
PrimCharToInt -> Symbol "PrimCharToInt"
PrimIntAdd -> Symbol "PrimIntAdd"
PrimIntSub -> Symbol "PrimIntSub"
PrimIntDiv -> Symbol "PrimIntDiv"
PrimIntMul -> Symbol "PrimIntMul"
PrimIntMod -> Symbol "PrimIntMod"
PrimIntEq -> Symbol "PrimIntEq"
PrimIntLt -> Symbol "PrimIntLt"
PrimIntLeq -> Symbol "PrimIntLeq"
PrimIntGt -> Symbol "PrimIntGt"
PrimIntGeq -> Symbol "PrimIntGeq"
PrimUnknown id -> List [ Symbol "PrimUnknown", sexp id ]
instance (Sexpable a) => Sexpable (IL a) where
sexp il =
case il of
ILCons d a b -> List [ Symbol "ILCons", sexp d, sexp a, sexp b ]
ILApp d rator rands ->
List [ Symbol "ILApp"
, sexp d
, sexp rator
, toSexpList rands
]
ILPrim d prim ->
List [ Symbol "ILPrim"
, sexp d
, sexp prim
]
ILAssign d patE e -> List [ Symbol "ILAssign", sexp d, sexp patE, sexp e ]
ILWithAnn d tyAnn e -> List [ Symbol "ILWithAnn", sexp d, sexp tyAnn, sexp e ]
ILFunDef d id paramIds bodyE ->
List [ Symbol "ILFunDef", sexp d, sexp id, toSexpList paramIds, sexp bodyE ]
ILStruct d typeId fieldInits -> List [ Symbol "ILStruct", sexp d, toSexpList fieldInits ]
ILMakeAdt d typeId ctorIndex argEs ->
List [ Symbol "ILMakeAdt"
, sexp d
, sexp typeId
, Atom $ show ctorIndex
, toSexpList argEs
]
ILGetAdtField d e fieldIndex ->
List [ Symbol "ILGetAdtField", sexp d, sexp e, Atom $ show fieldIndex ]
ILTuple d argEs -> List [ Symbol "ILTuple", sexp d, toSexpList argEs ]
ILSwitch d e clauses -> List [ Symbol "ILSwitch", sexp d, sexp e, toSexpList clauses ]
ILList d argEs -> List [ Symbol "ILList", sexp d, toSexpList argEs ]
ILFun d paramIds bodyE ->
List [ Symbol "ILFun"
, sexp d
, toSexpList paramIds
, sexp bodyE
]
ILInt d i -> List [ Symbol "ILInt", sexp d, Atom $ show i ]
ILBool d b -> List [ Symbol "ILBool", sexp d, Symbol $ show b ]
ILStr d s -> List [ Symbol "ILStr", sexp d, Atom $ printf "\"%s\"" s ]
ILChar d s -> List [ Symbol "ILChar", sexp d, Atom $ printf "\'%s\'" s ]
ILUnit d -> List [ Symbol "ILUnit", sexp d ]
ILRef d id -> List [ Symbol "ILRef", sexp d, sexp id ]
ILBegin d es -> List [ Symbol "ILBegin", sexp d, toSexpList es ]
ILFail d msg -> List [ Symbol "ILFail", sexp d, Atom $ printf "\"%s\"" msg ]
ILMain d paramIds bodyE ->
List [ Symbol "ILMain"
, sexp d
, toSexpList paramIds
, sexp bodyE
]
ILPlaceholder d ph ->
List [ Symbol "ILPlaceholder"
, sexp d
, sexp ph
]
instance Sexpable OverloadPlaceholder where
sexp (PlaceholderMethod methodId ty) =
List [ Symbol "PlaceholderMethod"
, sexp methodId
, sexp ty
]
sexp (PlaceholderDict protoId ty) =
List [ Symbol "PlaceholderDict"
, sexp protoId
, sexp ty
]
instance (Sexpable a) => Sexpable (ILCompUnit a) where
sexp (ILCompUnit d tyDecs es) =
List [ Symbol "ILCompUnit",
sexp d,
toSexpList tyDecs,
toSexpList es
]
instance (Sexpable a) => CompilerOutput (ILCompUnit a) where
render = showSexp
sexpOfMap :: (Sexpable k, Sexpable v) => Map.Map k v -> Sexp
-- sexpOfMap m = List $ map (\\(k, v) -> List [ sexp k, sexp v]) $ Map.toList m
sexpOfMap m = toSexpList $ Map.keys m
instance Sexpable Exports where
sexp Exports { exportTypes, exportVars } =
List [ Symbol "Exports"
, sexpOfMap exportVars
]
instance Sexpable Module where
sexp (Module cloEnv paramIds exports) =
List [ Symbol "Module"
, List [ Symbol "CloEnv", sexpOfMap cloEnv ]
, List [ Symbol "Params", toSexpList paramIds ]
, sexp exports
]
instance Sexpable Closure where
sexp (Closure id _ cloEnv _ _) =
List [ Symbol "Fun"
, sexp id
, List [ Symbol "CloEnv", sexpOfMap cloEnv ]
]
instance Sexpable Struct where
sexp (Struct ty fields) =
List [ Symbol "Struct"
, sexp ty
, List $ map (\(id, v) -> List [ sexp id, sexp v ]) fields
]
instance Sexpable Adt where
sexp (Adt id i vs) =
List [ Symbol "Adt"
, sexp id
, Symbol (show i)
, toSexpList vs
]
instance Sexpable Value where
sexp (ValueInt i) = Symbol $ show i
sexp (ValueBool b) = Symbol $ show b
sexp (ValueChar c) = Symbol $ printf "#\\%c" c
sexp (ValueModule m) = sexp m
sexp (ValueFun clo) = sexp clo
sexp (ValueStruct struct) = sexp struct
sexp (ValueAdt adt) = sexp adt
sexp (ValueTuple vs) = List [ Symbol "Tuple", toSexpList vs ]
sexp (ValueList vs)
| null vs = List [ Symbol "List", toSexpList vs ]
| all (\v -> case v of
ValueChar _ -> True
_ -> False
)
vs = Atom $ map (\(ValueChar c) -> c) vs
| otherwise = List [ Symbol "List", toSexpList vs ]
sexp ValueUnit = Symbol "Unit"
sexp (Err str) = List [ Symbol "Error", Atom str ]
instance Sexpable Ty where
sexp (TyApp TyConInt []) = Symbol "Int"
sexp (TyApp TyConBool []) = Symbol "Bool"
sexp (TyApp TyConChar []) = Symbol "Char"
sexp (TyApp tyCon tys) =
List [ Symbol "App"
, sexp tyCon
, toSexpList tys
]
sexp (TyPoly tyVars [] ty) =
List [ Symbol "Poly"
, toSexpList tyVars
, sexp ty
]
sexp (TyPoly tyVars ctx ty) =
List [ Symbol "Poly"
, toSexpList tyVars
, ctxSexp
, sexp ty
]
where
ctxSexp = List $ map (\(ty, protoId) -> List [ sexp ty, sexp protoId ]) ctx
sexp (TyOverloaded ctx ty) =
List [ Symbol "Overloaded"
, ctxSexp
, sexp ty
]
where
ctxSexp = List $ map (\(ty, protoId) -> List [ sexp ty, sexp protoId ]) ctx
sexp (TyVar [] tyVar) = List [ Symbol "Var", sexp tyVar ]
sexp (TyVar straints tyVar) =
List [ Symbol "Var"
, toSexpList straints
, sexp tyVar
]
sexp (TyMeta [] id) = List [ Symbol "Meta", sexp id ]
sexp (TyMeta straints id) =
List [ Symbol "Meta"
, toSexpList straints
, sexp id
]
sexp (TyRef qid) = List [ Symbol "Ref", sexp qid ]
sexp (TyRigid ty) = List [ Symbol "Rigid", sexp ty ]
instance Sexpable TyConstraint where
sexp (TyConstraint protoId) = List [ Symbol "TyConstraint", sexp protoId ]
instance CompilerOutput TyConstraint where
render (TyConstraint protoId) = render protoId
instance CompilerOutput Ty where
render (TyApp TyConInt []) = "Int"
render (TyApp TyConBool []) = "Bool"
render (TyApp TyConChar []) = "Char"
render (TyApp TyConArrow [retTy]) =
printf "(-> %s)" $ render retTy
render (TyApp TyConArrow tys) =
let tyStrs = map render tys
in intercalate " -> " tyStrs
render (TyApp tyCon tys) =
printf "%s<%s>"
(render tyCon)
(renderCommaSep tys)
render (TyPoly _ [] ty) = render ty
render (TyPoly _ ctx ty) =
printf "(%s) => %s"
contextStr
(render ty)
where contextStr = intercalate "," $ map (\(ty, protoId) -> printf "%s : %s" (render ty) (render protoId)) ctx
render (TyVar [] tyVar) = render tyVar
render (TyVar straints tyVar) =
printf "%s(%s)"
(intercalate ", " $ map render straints)
(render tyVar)
render (TyMeta [] id) = render id
render (TyMeta straints id) =
printf "%s(%s)"
(intercalate ", " $ map render straints)
(render id)
render (TyRef qid) = render qid
render (TyOverloaded context ty) =
printf "(%s) => %s"
contextStr
(render ty)
where contextStr = intercalate "," $ map (\(ty, protoId) -> printf "%s(%s)" (render protoId) (render ty)) context
render (TyRigid ty) =
printf "rigid type %s" $ render ty
-- render ty = showSexp ty
instance Sexpable TyCon where
sexp TyConInt = Symbol "Int"
sexp TyConBool = Symbol "Bool"
sexp TyConChar = Symbol "Char"
sexp TyConUnit = Symbol "Unit"
sexp TyConList = Symbol "List"
sexp TyConTuple = Symbol "Tuple"
sexp TyConArrow = Symbol "Arrow"
sexp (TyConStruct fieldNames) =
List [ Symbol "Struct", toSexpList fieldNames ]
sexp (TyConAdt ctorNames) =
List [ Symbol "Adt", toSexpList ctorNames ]
sexp (TyConTyFun tyVarIds ty) =
List [ Symbol "TyFun", toSexpList tyVarIds, sexp ty ]
sexp (TyConUnique id tyCon) =
List [ Symbol "Unique", sexp id, sexp tyCon ]
sexp (TyConTyVar varId) =
List [ Symbol "TyVar", sexp varId ]
instance CompilerOutput TyCon where
render TyConInt = "Int"
render TyConBool = "Bool"
render TyConChar = "Char"
render TyConUnit = "Unit"
render TyConList = "List"
render TyConTuple = "Tuple"
render TyConArrow = "(->)"
render (TyConStruct _) = "<<struct>>"
render (TyConAdt _) = "<<adt>>"
render (TyConUnique id TyConTyFun{}) = render id
render (TyConTyFun tyVarIds ty) = printf "(tyfun<%s> -> %s)" (renderCommaSep tyVarIds) (render ty)
render (TyConTyVar varId) = render varId
instance CompilerOutput Value where
render v =
case v of
ValueInt i -> show i
ValueBool b -> show b
ValueChar c -> printf "'%c'" c
ValueFun (Closure fid fty _ paramIds _) -> printf "fun %s : %s" (show fid) (render fty)
ValueStruct struct -> "struct"
ValueAdt (Adt uid i []) ->
printf "%s" (show uid)
ValueAdt (Adt uid i vs) ->
printf "%s(%s)" (show uid) (concat . intersperse ", " $ map render vs)
ValueTuple vs -> printf "%%(%s)" $ concat . intersperse ", " $ map render vs
ValueList vs@((ValueChar c) : _) ->
let str = map (\(ValueChar c) -> c) vs
in show str
ValueList vs -> printf "[%s]" $ concat . intersperse ", " $ map render vs
ValueUnit -> "Unit"
ValuePrim prim -> "prim"
Err str -> "Error = " ++ str
| Zoetermeer/latro | src/Latro/Semant/Display.hs | mit | 22,780 | 0 | 17 | 7,345 | 8,473 | 4,136 | 4,337 | 574 | 1 |
module Golf where
skips :: [a] -> [[a]]
skips ls = map (\i -> everyN i ls) [1..length ls]
everyN :: Int -> [a] -> [a]
everyN n [] = []
everyN n ls = loop n (drop (n-1) ls)
where
loop n [] = []
loop n (x:xs) = x : loop n (drop (n-1) xs)
localMaxima :: [Int] -> [Int]
localMaxima (x:y:z:zs)
| x < y && y > z = y : localMaxima (y:z:zs)
| otherwise = localMaxima (y:z:zs)
localMaxima zs = []
-- putStrLn $ histogram [1,2,1]
histogram :: [Int] -> String
histogram xs = drawLines (reverse $ makeH $ group xs) ++ "==========\n0123456789\n"
group :: [Int] -> [[Int]]
group xs = map (\n -> filter (==n) xs) [0..9]
makeH :: [[Int]] -> [[[Int]]]
makeH [[],[],[],[],[],[],[],[],[],[]] = []
makeH xss = (heads xss) : makeH (tails xss)
heads :: [[Int]] -> [[Int]]
heads xss = map (take 1) xss
tails :: [[Int]] -> [[Int]]
tails xss = map (drop 1) xss
drawLines :: [[[Int]]] -> String
drawLines xsss = concat $ map drawLine xsss
drawLine :: [[Int]] -> String
drawLine xss = (map draw xss) ++ "\n"
draw :: [Int] -> Char
draw [] = ' '
draw _ = '*'
| t4sk/upenn-cis194 | hw-03/Golf.hs | mit | 1,056 | 0 | 12 | 231 | 669 | 360 | 309 | 31 | 2 |
-- Algorightms/Greedy/Max Min
module Main where
import qualified HackerRank.Algorithms.AngryChildren as M
main :: IO ()
main = M.main
| 4e6/sandbox | haskell/hackerrank/AngryChildren.hs | mit | 137 | 0 | 6 | 21 | 31 | 20 | 11 | 4 | 1 |
{-
Copyright 2012-2015 Vidar Holen
This file is part of ShellCheck.
http://www.vidarholen.net/contents/shellcheck
ShellCheck is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
ShellCheck is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
-}
module ShellCheck.ASTLib where
import ShellCheck.AST
import Control.Monad
import Data.List
import Data.Maybe
-- Is this a type of loop?
isLoop t = case t of
T_WhileExpression {} -> True
T_UntilExpression {} -> True
T_ForIn {} -> True
T_ForArithmetic {} -> True
T_SelectIn {} -> True
_ -> False
-- Will this split into multiple words when used as an argument?
willSplit x =
case x of
T_DollarBraced {} -> True
T_DollarExpansion {} -> True
T_Backticked {} -> True
T_BraceExpansion {} -> True
T_Glob {} -> True
T_Extglob {} -> True
T_NormalWord _ l -> any willSplit l
_ -> False
isGlob (T_Extglob {}) = True
isGlob (T_Glob {}) = True
isGlob (T_NormalWord _ l) = any isGlob l
isGlob _ = False
-- Is this shell word a constant?
isConstant token =
case token of
T_NormalWord _ l -> all isConstant l
T_DoubleQuoted _ l -> all isConstant l
T_SingleQuoted _ _ -> True
T_Literal _ _ -> True
_ -> False
-- Is this an empty literal?
isEmpty token =
case token of
T_NormalWord _ l -> all isEmpty l
T_DoubleQuoted _ l -> all isEmpty l
T_SingleQuoted _ "" -> True
T_Literal _ "" -> True
_ -> False
-- Quick&lazy oversimplification of commands, throwing away details
-- and returning a list like ["find", ".", "-name", "${VAR}*" ].
oversimplify token =
case token of
(T_NormalWord _ l) -> [concat (concatMap oversimplify l)]
(T_DoubleQuoted _ l) -> [concat (concatMap oversimplify l)]
(T_SingleQuoted _ s) -> [s]
(T_DollarBraced _ _) -> ["${VAR}"]
(T_DollarArithmetic _ _) -> ["${VAR}"]
(T_DollarExpansion _ _) -> ["${VAR}"]
(T_Backticked _ _) -> ["${VAR}"]
(T_Glob _ s) -> [s]
(T_Pipeline _ _ [x]) -> oversimplify x
(T_Literal _ x) -> [x]
(T_SimpleCommand _ vars words) -> concatMap oversimplify words
(T_Redirecting _ _ foo) -> oversimplify foo
(T_DollarSingleQuoted _ s) -> [s]
(T_Annotation _ _ s) -> oversimplify s
-- Workaround for let "foo = bar" parsing
(TA_Sequence _ [TA_Expansion _ v]) -> concatMap oversimplify v
otherwise -> []
-- Turn a SimpleCommand foo -avz --bar=baz into args "a", "v", "z", "bar",
-- each in a tuple of (token, stringFlag). Non-flag arguments are added with
-- stringFlag == "".
getFlagsUntil stopCondition (T_SimpleCommand _ _ (_:args)) =
let tokenAndText = map (\x -> (x, concat $ oversimplify x)) args
(flagArgs, rest) = break (stopCondition . snd) tokenAndText
in
concatMap flag flagArgs ++ map (\(t, _) -> (t, "")) rest
where
flag (x, '-':'-':arg) = [ (x, takeWhile (/= '=') arg) ]
flag (x, '-':args) = map (\v -> (x, [v])) args
flag (x, _) = [ (x, "") ]
getFlagsUntil _ _ = error "Internal shellcheck error, please report! (getFlags on non-command)"
-- Get all flags in a GNU way, up until --
getAllFlags = getFlagsUntil (== "--")
-- Get all flags in a BSD way, up until first non-flag argument
getLeadingFlags = getFlagsUntil (not . ("-" `isPrefixOf`))
-- Given a T_DollarBraced, return a simplified version of the string contents.
bracedString (T_DollarBraced _ l) = concat $ oversimplify l
bracedString _ = error "Internal shellcheck error, please report! (bracedString on non-variable)"
-- Is this an expansion of multiple items of an array?
isArrayExpansion t@(T_DollarBraced _ _) =
let string = bracedString t in
"@" `isPrefixOf` string ||
not ("#" `isPrefixOf` string) && "[@]" `isInfixOf` string
isArrayExpansion _ = False
-- Is it possible that this arg becomes multiple args?
mayBecomeMultipleArgs t = willBecomeMultipleArgs t || f t
where
f t@(T_DollarBraced _ _) =
let string = bracedString t in
"!" `isPrefixOf` string
f (T_DoubleQuoted _ parts) = any f parts
f (T_NormalWord _ parts) = any f parts
f _ = False
-- Is it certain that this word will becomes multiple words?
willBecomeMultipleArgs t = willConcatInAssignment t || f t
where
f (T_Extglob {}) = True
f (T_Glob {}) = True
f (T_BraceExpansion {}) = True
f (T_DoubleQuoted _ parts) = any f parts
f (T_NormalWord _ parts) = any f parts
f _ = False
-- This does token cause implicit concatenation in assignments?
willConcatInAssignment token =
case token of
t@(T_DollarBraced {}) -> isArrayExpansion t
(T_DoubleQuoted _ parts) -> any willConcatInAssignment parts
(T_NormalWord _ parts) -> any willConcatInAssignment parts
_ -> False
-- Maybe get the literal string corresponding to this token
getLiteralString :: Token -> Maybe String
getLiteralString = getLiteralStringExt (const Nothing)
-- Definitely get a literal string, skipping over all non-literals
onlyLiteralString :: Token -> String
onlyLiteralString = fromJust . getLiteralStringExt (const $ return "")
-- Maybe get a literal string, but only if it's an unquoted argument.
getUnquotedLiteral (T_NormalWord _ list) =
liftM concat $ mapM str list
where
str (T_Literal _ s) = return s
str _ = Nothing
getUnquotedLiteral _ = Nothing
-- Maybe get the literal string of this token and any globs in it.
getGlobOrLiteralString = getLiteralStringExt f
where
f (T_Glob _ str) = return str
f _ = Nothing
-- Maybe get the literal value of a token, using a custom function
-- to map unrecognized Tokens into strings.
getLiteralStringExt :: (Token -> Maybe String) -> Token -> Maybe String
getLiteralStringExt more = g
where
allInList = liftM concat . mapM g
g (T_DoubleQuoted _ l) = allInList l
g (T_DollarDoubleQuoted _ l) = allInList l
g (T_NormalWord _ l) = allInList l
g (TA_Expansion _ l) = allInList l
g (T_SingleQuoted _ s) = return s
g (T_Literal _ s) = return s
g x = more x
-- Is this token a string literal?
isLiteral t = isJust $ getLiteralString t
-- Turn a NormalWord like foo="bar $baz" into a series of constituent elements like [foo=,bar ,$baz]
getWordParts (T_NormalWord _ l) = concatMap getWordParts l
getWordParts (T_DoubleQuoted _ l) = l
getWordParts other = [other]
-- Return a list of NormalWords that would result from brace expansion
braceExpand (T_NormalWord id list) = take 1000 $ do
items <- mapM part list
return $ T_NormalWord id items
where
part (T_BraceExpansion id items) = do
item <- items
braceExpand item
part x = return x
-- Maybe get the command name of a token representing a command
getCommandName t =
case t of
T_Redirecting _ _ w -> getCommandName w
T_SimpleCommand _ _ (w:_) -> getLiteralString w
T_Annotation _ _ t -> getCommandName t
otherwise -> Nothing
-- Get the basename of a token representing a command
getCommandBasename = liftM basename . getCommandName
where
basename = reverse . takeWhile (/= '/') . reverse
isAssignment t =
case t of
T_Redirecting _ _ w -> isAssignment w
T_SimpleCommand _ (w:_) [] -> True
T_Assignment {} -> True
T_Annotation _ _ w -> isAssignment w
otherwise -> False
isOnlyRedirection t =
case t of
T_Pipeline _ _ [x] -> isOnlyRedirection x
T_Annotation _ _ w -> isOnlyRedirection w
T_Redirecting _ (_:_) c -> isOnlyRedirection c
T_SimpleCommand _ [] [] -> True
otherwise -> False
isFunction t = case t of T_Function {} -> True; _ -> False
-- Get the list of commands from tokens that contain them, such as
-- the body of while loops and if statements.
getCommandSequences t =
case t of
T_Script _ _ cmds -> [cmds]
T_BraceGroup _ cmds -> [cmds]
T_Subshell _ cmds -> [cmds]
T_WhileExpression _ _ cmds -> [cmds]
T_UntilExpression _ _ cmds -> [cmds]
T_ForIn _ _ _ cmds -> [cmds]
T_ForArithmetic _ _ _ _ cmds -> [cmds]
T_IfExpression _ thens elses -> map snd thens ++ [elses]
otherwise -> []
| coolhacks/scripts-hacks | examples/shellcheck-master/ShellCheck/ASTLib.hs | mit | 8,812 | 0 | 14 | 2,223 | 2,378 | 1,201 | 1,177 | 162 | 16 |
module Operators where
import Syntax
import Data.Generics.Uniplate.Data(rewriteBiM)
import Data.HashMap.Strict(fromList)
import Control.Monad((>=>))
{-
In the beginning every operator is parsed right associative.
This module changes the operators in the syntax tree
to their specified precedence and associativity
This blog post turned out to be really helpful
http://qfpl.io/posts/quick-and-easy-user-defined-operators/
-}
fixAssoc operatorTable =
let
f = fixAssocE operatorTable
g = fixAssocP operatorTable
h = fixAssocT operatorTable
in rewriteBiM f >=> rewriteBiM g >=> rewriteBiM h
{- Read the fixity declarations -}
captureAssocs (ModuleDeclaration _ decls) =
fromList [(op, (assoc, prec)) | FixityDeclaration assoc prec op _ <- decls]
fixAssocE operatorTable (InfixOperator (InfixOperator e1 child e2) root e3) =
fixAssocAux operatorTable LeftAssociative InfixOperator e1 child e2 root e3
fixAssocE operatorTable (InfixOperator e1 root (InfixOperator e2 child e3)) =
fixAssocAux operatorTable RightAssociative InfixOperator e1 child e2 root e3
fixAssocE _ _ = Right Nothing
fixAssocP operatorTable (PatternInfixOperator (PatternInfixOperator e1 child e2) root e3) =
fixAssocAux operatorTable LeftAssociative PatternInfixOperator e1 child e2 root e3
fixAssocP operatorTable (PatternInfixOperator e1 root (PatternInfixOperator e2 child e3)) =
fixAssocAux operatorTable RightAssociative PatternInfixOperator e1 child e2 root e3
fixAssocP _ _ = Right Nothing
fixAssocT operatorTable (TypeInfixOperator (TypeInfixOperator e1 child e2) root e3) =
fixAssocAux operatorTable LeftAssociative TypeInfixOperator e1 child e2 root e3
fixAssocT operatorTable (TypeInfixOperator e1 root (TypeInfixOperator e2 child e3)) =
fixAssocAux operatorTable RightAssociative TypeInfixOperator e1 child e2 root e3
fixAssocT _ _ = Right Nothing
-- General helper function for patterns, types and expressions
fixAssocAux operatorTable prevAssoc constr e1 child e2 root e3 = do
(assoc1, prec1) <- findEither root operatorTable
(assoc2, prec2) <- findEither child operatorTable
let left = constr (constr e1 root e2) child e3
let right = constr e1 child (constr e2 root e3)
case (compare prec1 prec2, prevAssoc, assoc1, assoc2) of
(EQ, LeftAssociative, RightAssociative, RightAssociative) -> Right (Just right)
(EQ, RightAssociative, LeftAssociative, LeftAssociative) -> Right (Just left)
(EQ, _, RightAssociative, LeftAssociative) -> fixAssocError child root
(EQ, _, LeftAssociative, RightAssociative) -> fixAssocError child root
(GT, LeftAssociative, _, _) -> Right (Just right)
(GT, RightAssociative, _, _) -> Right (Just left)
_ -> Right Nothing
fixAssocError child root =
Left ("Cannot mix operator " ++ pretty child ++ " and " ++ pretty root)
| kindl/Hypatia | src/Operators.hs | mit | 2,874 | 0 | 12 | 507 | 803 | 411 | 392 | 43 | 7 |
{-# LANGUAGE ScopedTypeVariables #-}
module Types(stampTarget, safeStampTarget, doesTargetExist, doesDoFileExist, findDoFile, Stamp(..), DoFile(..), Target(..)) where
import Control.Exception (catch, SomeException(..))
import Control.Monad (liftM, filterM)
import Data.Bool (bool)
import Data.Maybe (isNothing, listToMaybe)
import System.Directory (doesFileExist, doesDirectoryExist)
import System.FilePath (takeExtensions, dropExtension, dropExtensions, isDrive, splitFileName, (</>), takeDirectory, pathSeparator)
import System.Posix.Files (getFileStatus, modificationTimeHiRes)
import Data.Time.Clock.POSIX (POSIXTime)
import FilePathUtil
-- This module provides basic types for redo
---------------------------------------------------------------------
-- Basic Redo Type Definitions:
---------------------------------------------------------------------
newtype Stamp = Stamp { unStamp :: POSIXTime } deriving (Show, Eq, Ord) -- Timestamp or hash stamp of a file
newtype DoFile = DoFile { unDoFile :: FilePath } deriving (Show, Eq) -- The absolute path to a do file
newtype Target = Target { unTarget :: FilePath } deriving (Show, Eq) -- The absolute path to a target file
-- Common functions that use these types:
---------------------------------------------------------------------
-- Generate stamps from targets
---------------------------------------------------------------------
-- Get the stamp of the target which marks it's current status
stampTarget :: Target -> IO Stamp
stampTarget = getTimeStamp
-- Get the stamp of the target if it exists, otherwise return Nothing
safeStampTarget :: Target -> IO (Maybe Stamp)
safeStampTarget target = catch (Just <$> stampTarget target) (\(_ :: SomeException) -> return Nothing)
-- Get the target timestamp (old version, now using real POSIXTime instead of string, below)
-- newtype Stamp = Stamp { unStamp :: String } deriving (Show, Eq) -- Timestamp or hash stamp of a file
-- getTimeStamp :: Target -> IO Stamp
-- getTimeStamp target = do
-- st <- getFileStatus $ unTarget target
-- return $ Stamp $ show (modificationTimeHiRes st) ++ show (fileID st) ++ show (fileSize st)
-- Get the target timestamp
getTimeStamp :: Target -> IO Stamp
getTimeStamp target = do
st <- getFileStatus $ unTarget target
return $ Stamp $ modificationTimeHiRes st
-- Hash the file (no longer supported, using timestamps for speed)
-- getTargetHashStamp :: Target -> IO Stamp
-- getTargetHashStamp file = Stamp <$> hash `liftM` unStamp <$> readMetaFile (unTarget file)
---------------------------------------------------------------------
-- Existance functions:
---------------------------------------------------------------------
-- Does a do file exist on the file system?
doesDoFileExist :: DoFile -> IO Bool
doesDoFileExist doFile = doesFileExist $ unDoFile doFile
-- Does a target exist on the file system?
doesTargetExist :: Target -> IO Bool
doesTargetExist target = (||) <$> doesFileExist filePath <*> doesDirectoryExist filePath
where filePath = unTarget target
---------------------------------------------------------------------
-- Find do files.
---------------------------------------------------------------------
-- Returns the absolute path to the do file given the absolute path to the target:
findDoFile :: Target -> IO (Maybe DoFile)
findDoFile absTarget = do
let (targetDir, targetName) = splitFileName $ unTarget absTarget
let targetDo = DoFile $ removeDotDirs $ unTarget absTarget ++ ".do"
bool (defaultDoPath targetDir targetName) (return $ Just targetDo) =<< doesDoFileExist targetDo
where
-- Try to find matching .do file by checking directories upwards of "." until a suitable match is
-- found or "/" is reached.
defaultDoPath :: FilePath -> FilePath -> IO (Maybe DoFile)
defaultDoPath absPath' name = do
let absPath = if last absPath' == pathSeparator then takeDirectory absPath' else absPath'
doFile <- listToMaybe `liftM` filterM doesDoFileExist (candidates absPath name)
if isNothing doFile && not (isDrive absPath) then defaultDoPath (takeDirectory absPath) name
else return doFile
-- List the possible default.do file candidates relative to the given path:
candidates path name = map (DoFile . (path </>)) (defaults name)
defaults name = map (++ ".do") (getDefaultDo $ "default" ++ takeExtensions name)
-- Form all possible matching default.do files in order of preference:
getDefaultDo :: FilePath -> [FilePath]
getDefaultDo filename = filename : if smallfilename == filename then [] else getDefaultDo $ dropFirstExtension filename
where smallfilename = dropExtension filename
basefilename = dropExtensions filename
dropFirstExtension fname = basefilename ++ takeExtensions (drop 1 (takeExtensions fname))
| dinkelk/redo | src/Types.hs | mit | 4,810 | 0 | 14 | 740 | 869 | 484 | 385 | 45 | 4 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE PatternSynonyms #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE GADTs #-}
module IHaskell.Display.Widgets.Common where
import Data.Aeson
import Data.Aeson.Types (emptyObject)
import Data.Text (pack, Text)
import IHaskell.Display (IHaskellWidget)
import IHaskell.Eval.Widgets (widgetSendClose)
import qualified IHaskell.Display.Widgets.Singletons as S
pattern ViewModule = S.SViewModule
pattern ViewName = S.SViewName
pattern MsgThrottle = S.SMsgThrottle
pattern Version = S.SVersion
pattern DisplayHandler = S.SDisplayHandler
pattern Visible = S.SVisible
pattern CSS = S.SCSS
pattern DOMClasses = S.SDOMClasses
pattern Width = S.SWidth
pattern Height = S.SHeight
pattern Padding = S.SPadding
pattern Margin = S.SMargin
pattern Color = S.SColor
pattern BackgroundColor = S.SBackgroundColor
pattern BorderColor = S.SBorderColor
pattern BorderWidth = S.SBorderWidth
pattern BorderRadius = S.SBorderRadius
pattern BorderStyle = S.SBorderStyle
pattern FontStyle = S.SFontStyle
pattern FontWeight = S.SFontWeight
pattern FontSize = S.SFontSize
pattern FontFamily = S.SFontFamily
pattern Description = S.SDescription
pattern ClickHandler = S.SClickHandler
pattern SubmitHandler = S.SSubmitHandler
pattern Disabled = S.SDisabled
pattern StringValue = S.SStringValue
pattern Placeholder = S.SPlaceholder
pattern Tooltip = S.STooltip
pattern Icon = S.SIcon
pattern ButtonStyle = S.SButtonStyle
pattern B64Value = S.SB64Value
pattern ImageFormat = S.SImageFormat
pattern BoolValue = S.SBoolValue
pattern Options = S.SOptions
pattern SelectedLabel = S.SSelectedLabel
pattern SelectedValue = S.SSelectedValue
pattern SelectionHandler = S.SSelectionHandler
pattern Tooltips = S.STooltips
pattern Icons = S.SIcons
pattern SelectedLabels = S.SSelectedLabels
pattern SelectedValues = S.SSelectedValues
pattern IntValue = S.SIntValue
pattern StepInt = S.SStepInt
pattern MaxInt = S.SMaxInt
pattern MinInt = S.SMinInt
pattern IntPairValue = S.SIntPairValue
pattern LowerInt = S.SLowerInt
pattern UpperInt = S.SUpperInt
pattern FloatValue = S.SFloatValue
pattern StepFloat = S.SStepFloat
pattern MaxFloat = S.SMaxFloat
pattern MinFloat = S.SMinFloat
pattern FloatPairValue = S.SFloatPairValue
pattern LowerFloat = S.SLowerFloat
pattern UpperFloat = S.SUpperFloat
pattern Orientation = S.SOrientation
pattern ShowRange = S.SShowRange
pattern ReadOut = S.SReadOut
pattern SliderColor = S.SSliderColor
pattern BarStyle = S.SBarStyle
pattern ChangeHandler = S.SChangeHandler
pattern Children = S.SChildren
pattern OverflowX = S.SOverflowX
pattern OverflowY = S.SOverflowY
pattern BoxStyle = S.SBoxStyle
pattern Flex = S.SFlex
pattern Pack = S.SPack
pattern Align = S.SAlign
pattern Titles = S.STitles
pattern SelectedIndex = S.SSelectedIndex
-- | Close a widget's comm
closeWidget :: IHaskellWidget w => w -> IO ()
closeWidget w = widgetSendClose w emptyObject
newtype StrInt = StrInt Integer
deriving (Num, Ord, Eq, Enum)
instance ToJSON StrInt where
toJSON (StrInt x) = toJSON . pack $ show x
-- | Pre-defined border styles
data BorderStyleValue = NoBorder
| HiddenBorder
| DottedBorder
| DashedBorder
| SolidBorder
| DoubleBorder
| GrooveBorder
| RidgeBorder
| InsetBorder
| OutsetBorder
| InitialBorder
| InheritBorder
| DefaultBorder
instance ToJSON BorderStyleValue where
toJSON NoBorder = "none"
toJSON HiddenBorder = "hidden"
toJSON DottedBorder = "dotted"
toJSON DashedBorder = "dashed"
toJSON SolidBorder = "solid"
toJSON DoubleBorder = "double"
toJSON GrooveBorder = "groove"
toJSON RidgeBorder = "ridge"
toJSON InsetBorder = "inset"
toJSON OutsetBorder = "outset"
toJSON InitialBorder = "initial"
toJSON InheritBorder = "inherit"
toJSON DefaultBorder = ""
-- | Font style values
data FontStyleValue = NormalFont
| ItalicFont
| ObliqueFont
| InitialFont
| InheritFont
| DefaultFont
instance ToJSON FontStyleValue where
toJSON NormalFont = "normal"
toJSON ItalicFont = "italic"
toJSON ObliqueFont = "oblique"
toJSON InitialFont = "initial"
toJSON InheritFont = "inherit"
toJSON DefaultFont = ""
-- | Font weight values
data FontWeightValue = NormalWeight
| BoldWeight
| BolderWeight
| LighterWeight
| InheritWeight
| InitialWeight
| DefaultWeight
instance ToJSON FontWeightValue where
toJSON NormalWeight = "normal"
toJSON BoldWeight = "bold"
toJSON BolderWeight = "bolder"
toJSON LighterWeight = "lighter"
toJSON InheritWeight = "inherit"
toJSON InitialWeight = "initial"
toJSON DefaultWeight = ""
-- | Pre-defined button styles
data ButtonStyleValue = PrimaryButton
| SuccessButton
| InfoButton
| WarningButton
| DangerButton
| DefaultButton
instance ToJSON ButtonStyleValue where
toJSON PrimaryButton = "primary"
toJSON SuccessButton = "success"
toJSON InfoButton = "info"
toJSON WarningButton = "warning"
toJSON DangerButton = "danger"
toJSON DefaultButton = ""
-- | Pre-defined bar styles
data BarStyleValue = SuccessBar
| InfoBar
| WarningBar
| DangerBar
| DefaultBar
instance ToJSON BarStyleValue where
toJSON SuccessBar = "success"
toJSON InfoBar = "info"
toJSON WarningBar = "warning"
toJSON DangerBar = "danger"
toJSON DefaultBar = ""
-- | Image formats for ImageWidget
data ImageFormatValue = PNG
| SVG
| JPG
deriving Eq
instance Show ImageFormatValue where
show PNG = "png"
show SVG = "svg"
show JPG = "jpg"
instance ToJSON ImageFormatValue where
toJSON = toJSON . pack . show
-- | Options for selection widgets.
data SelectionOptions = OptionLabels [Text]
| OptionDict [(Text, Text)]
-- | Orientation values.
data OrientationValue = HorizontalOrientation
| VerticalOrientation
instance ToJSON OrientationValue where
toJSON HorizontalOrientation = "horizontal"
toJSON VerticalOrientation = "vertical"
data OverflowValue = VisibleOverflow
| HiddenOverflow
| ScrollOverflow
| AutoOverflow
| InitialOverflow
| InheritOverflow
| DefaultOverflow
instance ToJSON OverflowValue where
toJSON VisibleOverflow = "visible"
toJSON HiddenOverflow = "hidden"
toJSON ScrollOverflow = "scroll"
toJSON AutoOverflow = "auto"
toJSON InitialOverflow = "initial"
toJSON InheritOverflow = "inherit"
toJSON DefaultOverflow = ""
data BoxStyleValue = SuccessBox
| InfoBox
| WarningBox
| DangerBox
| DefaultBox
instance ToJSON BoxStyleValue where
toJSON SuccessBox = "success"
toJSON InfoBox = "info"
toJSON WarningBox = "warning"
toJSON DangerBox = "danger"
toJSON DefaultBox = ""
data LocationValue = StartLocation
| CenterLocation
| EndLocation
| BaselineLocation
| StretchLocation
instance ToJSON LocationValue where
toJSON StartLocation = "start"
toJSON CenterLocation = "center"
toJSON EndLocation = "end"
toJSON BaselineLocation = "baseline"
toJSON StretchLocation = "stretch"
| wyager/IHaskell | ihaskell-display/ihaskell-widgets/src/IHaskell/Display/Widgets/Common.hs | mit | 7,917 | 0 | 8 | 2,089 | 1,706 | 905 | 801 | 223 | 1 |
module Data.Mole.Builder.Image where
import Control.Concurrent.STM
import qualified Data.Set as S
import qualified Data.ByteString as BS
import qualified Data.Text as T
import Data.Maybe
import Data.Time
import Text.Printf
import Data.Mole.Types
import Data.Mole.Builder.Internal.Fingerprint
import qualified Network.Kraken as K
import System.Environment
import System.FilePath
import System.Directory
import System.Posix.Files
imageBuilder :: String -> String -> Handle -> AssetId -> IO Builder
imageBuilder src contentType h aId = do
originalBody <- BS.readFile src
let fp = contentHash originalBody
cacheDir <- lookupEnv "XDG_CACHE_DIR"
let cacheName = (fromMaybe ".cache" cacheDir) </> "kraken" </> T.unpack fp
inCache <- fileExist cacheName
body <- if inCache
then do BS.readFile cacheName
else case krakenH h of
Nothing -> return originalBody
Just kh -> do
atomically $ takeTMVar (lock h)
logMessage' h aId $ "Compressing image with Kraken..."
res <- K.compressImage kh (K.Options Nothing Nothing Nothing) originalBody
case res of
Left _ -> do
atomically $ putTMVar (lock h) ()
return originalBody
Right body -> do
createDirectoryIfMissing True $ takeDirectory cacheName
BS.writeFile cacheName body
atomically $ putTMVar (lock h) ()
return body
let ol = BS.length originalBody
let nl = BS.length body
let ratio :: Double
ratio = (100.0 * ((fromIntegral nl :: Double) / (fromIntegral ol)))
logMessage' h aId $ concat
[ "Compressed image from "
, show ol
, " to "
, show nl
, " bytes ("
, printf "%.2f" ratio
, "%)"
]
return $ Builder
{ assetSources = S.singleton src
, assetDependencies = S.empty
, packageAsset = const $ Right $ Result (PublicIdentifier $ fingerprint body src) $ Just (body, contentType)
, sourceFingerprint = originalBody
}
logMessage' :: Handle -> AssetId -> String -> IO ()
logMessage' h aId msg = do
now <- getCurrentTime
atomically $ writeTQueue (messages h) (Message now aId msg)
| wereHamster/mole | src/Data/Mole/Builder/Image.hs | mit | 2,487 | 0 | 22 | 864 | 675 | 339 | 336 | 60 | 4 |
{-# LANGUAGE TypeFamilies #-}
module Glucose.Parser.Tokens where
import Data.List as List
import qualified Data.List.NonEmpty as NonEmpty
import Text.Megaparsec
import qualified Glucose.Error as Glucose
import Glucose.Lexer.Location
import Glucose.Lexer.SyntacticToken
instance Stream [SyntacticToken] where
type Token [SyntacticToken] = SyntacticToken
uncons = List.uncons
updatePos _ _ (SourcePos f _ _) t = (fromLocation f start, fromLocation f start) where
start = location t
instance ShowToken SyntacticToken where
showTokens = concatMap showToken
fromLocation :: String -> Location -> SourcePos
fromLocation file (Location _ line col) = SourcePos file (intPos line) (intPos col)
toLocation :: SourcePos -> Location
toLocation (SourcePos _ line col) = Location 0 (fromIntegral $ unPos line) (fromIntegral $ unPos col)
intPos :: Int -> Pos
intPos = unsafePos . fromIntegral
fromParseError :: (ShowErrorComponent e, ShowToken t, Ord t) => ParseError t e -> Glucose.CompileError
fromParseError e = Glucose.CompileError location $ Glucose.ParseError messages where
location = toLocation . NonEmpty.head $ errorPos e
messages = tail . lines $ parseErrorPretty e
| sardonicpresence/glucose | src/Glucose/Parser/Tokens.hs | mit | 1,217 | 0 | 9 | 212 | 364 | 195 | 169 | 25 | 1 |
{-# LANGUAGE RankNTypes #-}
module Data.BoehmBerarducci.Either where
import Prelude hiding (Either, Left, Right, either)
newtype Either a b = Either {either :: forall r . (a -> r) -> (b -> r) -> r}
left :: a -> Either a b
left x = Either (\left' right' -> left' x)
right :: b -> Either a b
right x = Either (\left' right' -> right' x)
instance Functor (Either a) where
fmap f xs = either xs left (right . f)
instance Applicative (Either a) where
pure = right
fs <*> xs = either fs left (\f -> fmap f xs)
instance Monad (Either a) where
xs >>= f = either xs left (\x -> f x)
instance (Show a, Show b) => Show (Either a b) where
show xs = either xs (\u -> "Left " ++ show u) (\u -> "Right " ++ show u)
instance (Eq a, Eq b) => Eq (Either a b) where
xs == ys = either xs isLeftEqual isRightEqual where
isLeftEqual x = either ys (\y -> x == y) (const False)
isRightEqual x = either ys (const False) (\y -> x == y)
| rcalsaverini/boehm-berarducci | haskell/src/Data/BoehmBerarducci/Either.hs | mit | 956 | 0 | 11 | 237 | 468 | 245 | 223 | 21 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE TupleSections #-}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticloadbalancingv2-listenerrule-authenticateoidcconfig.html
module Stratosphere.ResourceProperties.ElasticLoadBalancingV2ListenerRuleAuthenticateOidcConfig where
import Stratosphere.ResourceImports
-- | Full data type definition for
-- ElasticLoadBalancingV2ListenerRuleAuthenticateOidcConfig. See
-- 'elasticLoadBalancingV2ListenerRuleAuthenticateOidcConfig' for a more
-- convenient constructor.
data ElasticLoadBalancingV2ListenerRuleAuthenticateOidcConfig =
ElasticLoadBalancingV2ListenerRuleAuthenticateOidcConfig
{ _elasticLoadBalancingV2ListenerRuleAuthenticateOidcConfigAuthenticationRequestExtraParams :: Maybe Object
, _elasticLoadBalancingV2ListenerRuleAuthenticateOidcConfigAuthorizationEndpoint :: Val Text
, _elasticLoadBalancingV2ListenerRuleAuthenticateOidcConfigClientId :: Val Text
, _elasticLoadBalancingV2ListenerRuleAuthenticateOidcConfigClientSecret :: Val Text
, _elasticLoadBalancingV2ListenerRuleAuthenticateOidcConfigIssuer :: Val Text
, _elasticLoadBalancingV2ListenerRuleAuthenticateOidcConfigOnUnauthenticatedRequest :: Maybe (Val Text)
, _elasticLoadBalancingV2ListenerRuleAuthenticateOidcConfigScope :: Maybe (Val Text)
, _elasticLoadBalancingV2ListenerRuleAuthenticateOidcConfigSessionCookieName :: Maybe (Val Text)
, _elasticLoadBalancingV2ListenerRuleAuthenticateOidcConfigSessionTimeout :: Maybe (Val Integer)
, _elasticLoadBalancingV2ListenerRuleAuthenticateOidcConfigTokenEndpoint :: Val Text
, _elasticLoadBalancingV2ListenerRuleAuthenticateOidcConfigUserInfoEndpoint :: Val Text
} deriving (Show, Eq)
instance ToJSON ElasticLoadBalancingV2ListenerRuleAuthenticateOidcConfig where
toJSON ElasticLoadBalancingV2ListenerRuleAuthenticateOidcConfig{..} =
object $
catMaybes
[ fmap (("AuthenticationRequestExtraParams",) . toJSON) _elasticLoadBalancingV2ListenerRuleAuthenticateOidcConfigAuthenticationRequestExtraParams
, (Just . ("AuthorizationEndpoint",) . toJSON) _elasticLoadBalancingV2ListenerRuleAuthenticateOidcConfigAuthorizationEndpoint
, (Just . ("ClientId",) . toJSON) _elasticLoadBalancingV2ListenerRuleAuthenticateOidcConfigClientId
, (Just . ("ClientSecret",) . toJSON) _elasticLoadBalancingV2ListenerRuleAuthenticateOidcConfigClientSecret
, (Just . ("Issuer",) . toJSON) _elasticLoadBalancingV2ListenerRuleAuthenticateOidcConfigIssuer
, fmap (("OnUnauthenticatedRequest",) . toJSON) _elasticLoadBalancingV2ListenerRuleAuthenticateOidcConfigOnUnauthenticatedRequest
, fmap (("Scope",) . toJSON) _elasticLoadBalancingV2ListenerRuleAuthenticateOidcConfigScope
, fmap (("SessionCookieName",) . toJSON) _elasticLoadBalancingV2ListenerRuleAuthenticateOidcConfigSessionCookieName
, fmap (("SessionTimeout",) . toJSON) _elasticLoadBalancingV2ListenerRuleAuthenticateOidcConfigSessionTimeout
, (Just . ("TokenEndpoint",) . toJSON) _elasticLoadBalancingV2ListenerRuleAuthenticateOidcConfigTokenEndpoint
, (Just . ("UserInfoEndpoint",) . toJSON) _elasticLoadBalancingV2ListenerRuleAuthenticateOidcConfigUserInfoEndpoint
]
-- | Constructor for
-- 'ElasticLoadBalancingV2ListenerRuleAuthenticateOidcConfig' containing
-- required fields as arguments.
elasticLoadBalancingV2ListenerRuleAuthenticateOidcConfig
:: Val Text -- ^ 'elbvlraocAuthorizationEndpoint'
-> Val Text -- ^ 'elbvlraocClientId'
-> Val Text -- ^ 'elbvlraocClientSecret'
-> Val Text -- ^ 'elbvlraocIssuer'
-> Val Text -- ^ 'elbvlraocTokenEndpoint'
-> Val Text -- ^ 'elbvlraocUserInfoEndpoint'
-> ElasticLoadBalancingV2ListenerRuleAuthenticateOidcConfig
elasticLoadBalancingV2ListenerRuleAuthenticateOidcConfig authorizationEndpointarg clientIdarg clientSecretarg issuerarg tokenEndpointarg userInfoEndpointarg =
ElasticLoadBalancingV2ListenerRuleAuthenticateOidcConfig
{ _elasticLoadBalancingV2ListenerRuleAuthenticateOidcConfigAuthenticationRequestExtraParams = Nothing
, _elasticLoadBalancingV2ListenerRuleAuthenticateOidcConfigAuthorizationEndpoint = authorizationEndpointarg
, _elasticLoadBalancingV2ListenerRuleAuthenticateOidcConfigClientId = clientIdarg
, _elasticLoadBalancingV2ListenerRuleAuthenticateOidcConfigClientSecret = clientSecretarg
, _elasticLoadBalancingV2ListenerRuleAuthenticateOidcConfigIssuer = issuerarg
, _elasticLoadBalancingV2ListenerRuleAuthenticateOidcConfigOnUnauthenticatedRequest = Nothing
, _elasticLoadBalancingV2ListenerRuleAuthenticateOidcConfigScope = Nothing
, _elasticLoadBalancingV2ListenerRuleAuthenticateOidcConfigSessionCookieName = Nothing
, _elasticLoadBalancingV2ListenerRuleAuthenticateOidcConfigSessionTimeout = Nothing
, _elasticLoadBalancingV2ListenerRuleAuthenticateOidcConfigTokenEndpoint = tokenEndpointarg
, _elasticLoadBalancingV2ListenerRuleAuthenticateOidcConfigUserInfoEndpoint = userInfoEndpointarg
}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticloadbalancingv2-listenerrule-authenticateoidcconfig.html#cfn-elasticloadbalancingv2-listenerrule-authenticateoidcconfig-authenticationrequestextraparams
elbvlraocAuthenticationRequestExtraParams :: Lens' ElasticLoadBalancingV2ListenerRuleAuthenticateOidcConfig (Maybe Object)
elbvlraocAuthenticationRequestExtraParams = lens _elasticLoadBalancingV2ListenerRuleAuthenticateOidcConfigAuthenticationRequestExtraParams (\s a -> s { _elasticLoadBalancingV2ListenerRuleAuthenticateOidcConfigAuthenticationRequestExtraParams = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticloadbalancingv2-listenerrule-authenticateoidcconfig.html#cfn-elasticloadbalancingv2-listenerrule-authenticateoidcconfig-authorizationendpoint
elbvlraocAuthorizationEndpoint :: Lens' ElasticLoadBalancingV2ListenerRuleAuthenticateOidcConfig (Val Text)
elbvlraocAuthorizationEndpoint = lens _elasticLoadBalancingV2ListenerRuleAuthenticateOidcConfigAuthorizationEndpoint (\s a -> s { _elasticLoadBalancingV2ListenerRuleAuthenticateOidcConfigAuthorizationEndpoint = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticloadbalancingv2-listenerrule-authenticateoidcconfig.html#cfn-elasticloadbalancingv2-listenerrule-authenticateoidcconfig-clientid
elbvlraocClientId :: Lens' ElasticLoadBalancingV2ListenerRuleAuthenticateOidcConfig (Val Text)
elbvlraocClientId = lens _elasticLoadBalancingV2ListenerRuleAuthenticateOidcConfigClientId (\s a -> s { _elasticLoadBalancingV2ListenerRuleAuthenticateOidcConfigClientId = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticloadbalancingv2-listenerrule-authenticateoidcconfig.html#cfn-elasticloadbalancingv2-listenerrule-authenticateoidcconfig-clientsecret
elbvlraocClientSecret :: Lens' ElasticLoadBalancingV2ListenerRuleAuthenticateOidcConfig (Val Text)
elbvlraocClientSecret = lens _elasticLoadBalancingV2ListenerRuleAuthenticateOidcConfigClientSecret (\s a -> s { _elasticLoadBalancingV2ListenerRuleAuthenticateOidcConfigClientSecret = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticloadbalancingv2-listenerrule-authenticateoidcconfig.html#cfn-elasticloadbalancingv2-listenerrule-authenticateoidcconfig-issuer
elbvlraocIssuer :: Lens' ElasticLoadBalancingV2ListenerRuleAuthenticateOidcConfig (Val Text)
elbvlraocIssuer = lens _elasticLoadBalancingV2ListenerRuleAuthenticateOidcConfigIssuer (\s a -> s { _elasticLoadBalancingV2ListenerRuleAuthenticateOidcConfigIssuer = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticloadbalancingv2-listenerrule-authenticateoidcconfig.html#cfn-elasticloadbalancingv2-listenerrule-authenticateoidcconfig-onunauthenticatedrequest
elbvlraocOnUnauthenticatedRequest :: Lens' ElasticLoadBalancingV2ListenerRuleAuthenticateOidcConfig (Maybe (Val Text))
elbvlraocOnUnauthenticatedRequest = lens _elasticLoadBalancingV2ListenerRuleAuthenticateOidcConfigOnUnauthenticatedRequest (\s a -> s { _elasticLoadBalancingV2ListenerRuleAuthenticateOidcConfigOnUnauthenticatedRequest = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticloadbalancingv2-listenerrule-authenticateoidcconfig.html#cfn-elasticloadbalancingv2-listenerrule-authenticateoidcconfig-scope
elbvlraocScope :: Lens' ElasticLoadBalancingV2ListenerRuleAuthenticateOidcConfig (Maybe (Val Text))
elbvlraocScope = lens _elasticLoadBalancingV2ListenerRuleAuthenticateOidcConfigScope (\s a -> s { _elasticLoadBalancingV2ListenerRuleAuthenticateOidcConfigScope = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticloadbalancingv2-listenerrule-authenticateoidcconfig.html#cfn-elasticloadbalancingv2-listenerrule-authenticateoidcconfig-sessioncookiename
elbvlraocSessionCookieName :: Lens' ElasticLoadBalancingV2ListenerRuleAuthenticateOidcConfig (Maybe (Val Text))
elbvlraocSessionCookieName = lens _elasticLoadBalancingV2ListenerRuleAuthenticateOidcConfigSessionCookieName (\s a -> s { _elasticLoadBalancingV2ListenerRuleAuthenticateOidcConfigSessionCookieName = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticloadbalancingv2-listenerrule-authenticateoidcconfig.html#cfn-elasticloadbalancingv2-listenerrule-authenticateoidcconfig-sessiontimeout
elbvlraocSessionTimeout :: Lens' ElasticLoadBalancingV2ListenerRuleAuthenticateOidcConfig (Maybe (Val Integer))
elbvlraocSessionTimeout = lens _elasticLoadBalancingV2ListenerRuleAuthenticateOidcConfigSessionTimeout (\s a -> s { _elasticLoadBalancingV2ListenerRuleAuthenticateOidcConfigSessionTimeout = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticloadbalancingv2-listenerrule-authenticateoidcconfig.html#cfn-elasticloadbalancingv2-listenerrule-authenticateoidcconfig-tokenendpoint
elbvlraocTokenEndpoint :: Lens' ElasticLoadBalancingV2ListenerRuleAuthenticateOidcConfig (Val Text)
elbvlraocTokenEndpoint = lens _elasticLoadBalancingV2ListenerRuleAuthenticateOidcConfigTokenEndpoint (\s a -> s { _elasticLoadBalancingV2ListenerRuleAuthenticateOidcConfigTokenEndpoint = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticloadbalancingv2-listenerrule-authenticateoidcconfig.html#cfn-elasticloadbalancingv2-listenerrule-authenticateoidcconfig-userinfoendpoint
elbvlraocUserInfoEndpoint :: Lens' ElasticLoadBalancingV2ListenerRuleAuthenticateOidcConfig (Val Text)
elbvlraocUserInfoEndpoint = lens _elasticLoadBalancingV2ListenerRuleAuthenticateOidcConfigUserInfoEndpoint (\s a -> s { _elasticLoadBalancingV2ListenerRuleAuthenticateOidcConfigUserInfoEndpoint = a })
| frontrowed/stratosphere | library-gen/Stratosphere/ResourceProperties/ElasticLoadBalancingV2ListenerRuleAuthenticateOidcConfig.hs | mit | 10,846 | 0 | 13 | 685 | 1,075 | 607 | 468 | 78 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE TupleSections #-}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-kinesisanalyticsv2-applicationreferencedatasource-recordformat.html
module Stratosphere.ResourceProperties.KinesisAnalyticsV2ApplicationReferenceDataSourceRecordFormat where
import Stratosphere.ResourceImports
import Stratosphere.ResourceProperties.KinesisAnalyticsV2ApplicationReferenceDataSourceMappingParameters
-- | Full data type definition for
-- KinesisAnalyticsV2ApplicationReferenceDataSourceRecordFormat. See
-- 'kinesisAnalyticsV2ApplicationReferenceDataSourceRecordFormat' for a more
-- convenient constructor.
data KinesisAnalyticsV2ApplicationReferenceDataSourceRecordFormat =
KinesisAnalyticsV2ApplicationReferenceDataSourceRecordFormat
{ _kinesisAnalyticsV2ApplicationReferenceDataSourceRecordFormatMappingParameters :: Maybe KinesisAnalyticsV2ApplicationReferenceDataSourceMappingParameters
, _kinesisAnalyticsV2ApplicationReferenceDataSourceRecordFormatRecordFormatType :: Val Text
} deriving (Show, Eq)
instance ToJSON KinesisAnalyticsV2ApplicationReferenceDataSourceRecordFormat where
toJSON KinesisAnalyticsV2ApplicationReferenceDataSourceRecordFormat{..} =
object $
catMaybes
[ fmap (("MappingParameters",) . toJSON) _kinesisAnalyticsV2ApplicationReferenceDataSourceRecordFormatMappingParameters
, (Just . ("RecordFormatType",) . toJSON) _kinesisAnalyticsV2ApplicationReferenceDataSourceRecordFormatRecordFormatType
]
-- | Constructor for
-- 'KinesisAnalyticsV2ApplicationReferenceDataSourceRecordFormat' containing
-- required fields as arguments.
kinesisAnalyticsV2ApplicationReferenceDataSourceRecordFormat
:: Val Text -- ^ 'kavardsrfRecordFormatType'
-> KinesisAnalyticsV2ApplicationReferenceDataSourceRecordFormat
kinesisAnalyticsV2ApplicationReferenceDataSourceRecordFormat recordFormatTypearg =
KinesisAnalyticsV2ApplicationReferenceDataSourceRecordFormat
{ _kinesisAnalyticsV2ApplicationReferenceDataSourceRecordFormatMappingParameters = Nothing
, _kinesisAnalyticsV2ApplicationReferenceDataSourceRecordFormatRecordFormatType = recordFormatTypearg
}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-kinesisanalyticsv2-applicationreferencedatasource-recordformat.html#cfn-kinesisanalyticsv2-applicationreferencedatasource-recordformat-mappingparameters
kavardsrfMappingParameters :: Lens' KinesisAnalyticsV2ApplicationReferenceDataSourceRecordFormat (Maybe KinesisAnalyticsV2ApplicationReferenceDataSourceMappingParameters)
kavardsrfMappingParameters = lens _kinesisAnalyticsV2ApplicationReferenceDataSourceRecordFormatMappingParameters (\s a -> s { _kinesisAnalyticsV2ApplicationReferenceDataSourceRecordFormatMappingParameters = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-kinesisanalyticsv2-applicationreferencedatasource-recordformat.html#cfn-kinesisanalyticsv2-applicationreferencedatasource-recordformat-recordformattype
kavardsrfRecordFormatType :: Lens' KinesisAnalyticsV2ApplicationReferenceDataSourceRecordFormat (Val Text)
kavardsrfRecordFormatType = lens _kinesisAnalyticsV2ApplicationReferenceDataSourceRecordFormatRecordFormatType (\s a -> s { _kinesisAnalyticsV2ApplicationReferenceDataSourceRecordFormatRecordFormatType = a })
| frontrowed/stratosphere | library-gen/Stratosphere/ResourceProperties/KinesisAnalyticsV2ApplicationReferenceDataSourceRecordFormat.hs | mit | 3,407 | 0 | 13 | 214 | 262 | 152 | 110 | 29 | 1 |
{- |
Module : TextAreaContentUtils.hs
Description : .
Maintainer : Kristin Knorr (c)
License : MIT
Stability : stable
'TextAreaContentUtils' serves methods to move Characters in TextAreaContent.
-}
module TextAreaContentUtils (
-- * Methods
moveChars,
findLastCharBefore,
moveLinesUp,
moveLinesDownXShift,
moveCharsRight,
mvLinesUp
) where
import Graphics.UI.Gtk
import Data.IORef
import Data.Maybe
import Data.Map as Map
import Control.Monad
import qualified TextAreaContent as TAC
-- | calculates Destination depending on Direction
calculateDest :: TAC.Position
-> TAC.Direction
-> TAC.Position
calculateDest (stX,stY) (dirX,dirY) = (stX+dirX,stY+dirY)
-- | moves Contents into a Direction
moveChar :: TAC.TextAreaContent
-> TAC.Position
-> TAC.Direction
-> IO()
moveChar area from dir = do
x <- TAC.getCell area from
unless (isNothing x) $ do
let
cell = fromJust x
to = calculateDest from dir
TAC.putCell area to cell
TAC.deleteCell area from
return ()
-- | moves amount of Characters of one line in range from Pos x to last char in line
moveChars :: TAC.TextAreaContent
-> TAC.Position
-> TAC.Direction
-> IO()
moveChars area (stX, line) dir = do
endX <- TAC.findLastChar area line
unless (stX > endX) $
if snd dir == 0 && fst dir > 0
then
moveCharsRight area stX endX line dir
else do
moveChar area (stX,line) dir
moveChars area (stX+1,line) dir
return ()
where
moveCharsRight area stX endX line dir =
unless (stX > endX) $ do
moveChar area (endX,line) dir
moveCharsRight area stX (endX-1) line dir
return ()
{- |
searchs for last character in Line and returns x-Position, if Line is empty
return -1
-}
findLastCharBefore :: TAC.TextAreaContent
-> TAC.Coord
-> TAC.Coord
-> IO TAC.Coord
findLastCharBefore area x line =
if x<0
then return (-1)
else do
cont <- TAC.getCell area (x,line)
if isJust cont
then return x
else findLastCharBefore area (x-1) line
-- | searchs for last written Line
findLastWrittenLine :: TAC.TextAreaContent
-> IO TAC.Coord
findLastWrittenLine area = do
size <- TAC.size area
findLastWrittenLineHelper area (snd size)
where
findLastWrittenLineHelper area line =
if line<0
then return(-1)
else do
empty <- TAC.isEmptyLine area line
if empty
then findLastWrittenLineHelper area (line-1)
else return line
-- | moves Lines up where param line is the upper line
moveLinesUp :: TAC.TextAreaContent
-> TAC.Coord
-> IO()
moveLinesUp area line = do
finY <- findLastWrittenLine area
moveLinesUpHelper area line line finY
where
moveLinesUpHelper area line stY finY =
unless (line<=0 || line>finY) $ do
empty <- TAC.isEmptyLine area line
if empty
then moveLinesUpHelper area (line+1) stY finY
else
if line == stY
then do
lastPrev <- TAC.findLastChar area (line-1)
moveChars area (0,line) (lastPrev+1, -1)
moveLinesUpHelper area (line+1) stY finY
else do
moveChars area (0,line) (0,-1)
moveLinesUpHelper area (line+1) stY finY
{- |
moves Lines down where param line upper Line
param xShift is a Boolean, which defines wether
the upper line starting at posx is shifted vertically down (False)
or is shifted down to pos 0 (True)
-}
moveLinesDownXShift :: TAC.TextAreaContent
-> TAC.Position
-> Bool
-> IO()
moveLinesDownXShift area (posX,line) xShift = do
lastLine <- findLastWrittenLine area
unless (line>lastLine || line<0) $
if line==lastLine
then
moveChars area (posX,line) $
if xShift then (-posX,1) else (0,1)
else
if xShift
then do
moveLinesVertDown area (line+1)
moveChars area (posX,line) (-posX,1)
else do
moveLinesVertDown area (line+1)
moveChars area (posX,line) (0,1)
-- | moves all chars of lines lower "line" to one line lower
moveLinesVertDown :: TAC.TextAreaContent
-> TAC.Coord
-> IO()
moveLinesVertDown area line = do
lastLine <- findLastWrittenLine area
moveDownHelper area lastLine line
where
moveDownHelper area line stY =
unless (line<stY) $ do
empty <- TAC.isEmptyLine area line
if empty
then moveDownHelper area (line-1) stY
else do
moveChars area (0,line) (0,1)
moveDownHelper area (line-1) stY
moveCharsRight :: TAC.TextAreaContent -> TAC.Position -> TAC.Position -> TAC.Position -> IO TAC.Position
moveCharsRight tac (x,y) topLeft@(xLeft,yTop) bottomRight@(xRight,yBottom) = do
moveChars tac bottomRight $ if (x,y) == topLeft then (x - xRight - 1, y - yBottom) else (xLeft - x, yTop - y)
return topLeft
mvLinesUp :: TAC.TextAreaContent -> TAC.Coord -> Int -> (TAC.Action, TAC.Position) -> IO (TAC.Action, TAC.Position)
mvLinesUp _ _ 0 action = return action
mvLinesUp tac y diff action = do
moveLinesUp tac y
mvLinesUp tac (y-1) (diff-1) (TAC.Concat action (TAC.RemoveLine, (0,y-1)), (0, y))
| SWP-Ubau-SoSe2014-Haskell/SWPSoSe14 | src/RailEditor/TextAreaContentUtils.hs | mit | 5,145 | 0 | 17 | 1,285 | 1,629 | 835 | 794 | 132 | 4 |
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Compiler.Rum.Compiler.CodeGen where
import Data.Char (ord)
import Control.Monad.State (MonadState, State, execState, gets, modify, void)
import Data.Map (Map)
import qualified Data.Map as Map (empty, insert, lookup, toList)
import Data.Maybe (fromMaybe)
import Data.List (map, sortBy)
import Data.Function (on)
import Data.Text (Text)
import qualified Data.Text as T
import GHC.Word (Word32)
import qualified LLVM.AST.Global as G (Global(..), functionDefaults, globalVariableDefaults)
import qualified LLVM.AST as AST
import qualified LLVM.AST.Type as Ty (Type(..), i8, i32)
import LLVM.AST ( BasicBlock(..), Definition(..)
, Instruction(..)
, Module(..), Name(..)
, Named(..)
, Operand(..), Parameter(..), Terminator(..)
, defaultModule
)
import qualified LLVM.AST.Attribute as A
import qualified LLVM.AST.CallingConvention as CC
import qualified LLVM.AST.Constant as C
import qualified LLVM.AST.IntegerPredicate as I
import qualified LLVM.AST.Linkage as L
-----------------------
-------- Setup --------
-----------------------
newtype LLVM a = LLVM {stateLLVM :: State Module a}
deriving (Functor, Applicative, Monad, MonadState Module )
runLLVM :: Module -> LLVM a -> Module
runLLVM modul l = execState (stateLLVM l) modul
emptyModule :: String -> Module
emptyModule label = defaultModule { moduleName = label }
addDefn :: Definition -> LLVM ()
addDefn def = gets moduleDefinitions >>= \defs ->
modify (\s -> s { moduleDefinitions = defs ++ [def] })
defineFun :: AST.Type -> Text -> [(AST.Type, Name)] -> [BasicBlock] -> LLVM ()
defineFun retType funName argTypes body = addDefn $
GlobalDefinition $ G.functionDefaults {
G.name = Name (T.unpack funName)
, G.parameters = ([Parameter parType nm [] | (parType, nm) <- argTypes], False)
, G.returnType = retType
, G.basicBlocks = body
}
defineIOStrVariable :: String -> String -> LLVM ()
defineIOStrVariable varName formatString = addDefn $
GlobalDefinition $ G.globalVariableDefaults {
G.name = Name varName
, G.type' = Ty.ArrayType (fromIntegral $ length formatString) Ty.i8
, G.isConstant = True
, G.initializer = Just $ C.Array Ty.i8 $ map (C.Int 8 . fromIntegral . ord) formatString
}
declareExtFun :: AST.Type -> Text -> [(AST.Type, Name)] -> Bool -> LLVM ()
declareExtFun retType funName argTypes isVararg = addDefn $
GlobalDefinition $ G.functionDefaults {
G.name = Name (T.unpack funName)
, G.linkage = L.External
, G.parameters = ([Parameter parType nm [] | (parType, nm) <- argTypes], isVararg)
, G.returnType = retType
, G.basicBlocks = []
}
-----------------------------
------- Codegen State -------
-----------------------------
type SymbolTable = [(String, Operand)]
-- toplevel module code generation
data CodegenState
= CodegenState { currentBlock :: Name -- Name of the active block to append to
, blocks :: Map Name BlockState -- Blocks for function
, symTable :: SymbolTable -- Function scope symbol table
, blockCount :: Int -- Count of basic blocks
, count :: Word -- Count of unnamed instructions
, names :: Names -- Name Supply
, varTypes :: Map String Ty.Type
} deriving Show
-- basic blocks inside of function definitions
data BlockState
= BlockState { idx :: Int -- Block index
, stack :: [Named Instruction] -- Stack of instructions
, term :: Maybe (Named Terminator) -- Block terminator
} deriving Show
newtype Codegen a = Codegen { runCodegen :: State CodegenState a }
deriving (Functor, Applicative, Monad, MonadState CodegenState )
---------------------------
---------- Types ----------
---------------------------
iType :: AST.Type
iType = Ty.i32
iBits :: Word32
iBits = 32
-------------------------
--------- Names ---------
-------------------------
type Names = Map String Int
uniqueName :: String -> Names -> (String, Names)
uniqueName nm ns =
case Map.lookup nm ns of
Nothing -> (nm, Map.insert nm 1 ns)
Just ix -> (nm ++ show ix, Map.insert nm (ix+1) ns)
------------------------------
----- Codegen Operations -----
------------------------------
sortBlocks :: [(Name, BlockState)] -> [(Name, BlockState)]
sortBlocks = sortBy (compare `on` (idx . snd))
createBlocks :: CodegenState -> [BasicBlock]
createBlocks m = map makeBlock $ sortBlocks $ Map.toList (blocks m)
makeBlock :: (Name, BlockState) -> BasicBlock
makeBlock (l, BlockState _ s t) = BasicBlock l (reverse s) (makeTerm t)
where
makeTerm = fromMaybe (error $ "Block has no terminator: " ++ show l)
entryBlockName :: String
entryBlockName = "entry"
emptyBlock :: Int -> BlockState
emptyBlock i = BlockState i [] Nothing
emptyCodegen :: CodegenState
emptyCodegen = CodegenState (Name entryBlockName) Map.empty [] 1 0 Map.empty Map.empty
execCodegen :: Codegen a -> CodegenState
execCodegen m = execState (runCodegen m) emptyCodegen
fresh :: Codegen Word
fresh = do
i <- gets count
let iNew = succ i
modify $ \s -> s { count = iNew }
return iNew
tyInstr :: Ty.Type -> Instruction -> Codegen Operand
tyInstr t ins = do
nm <- fresh
let ref = UnName nm
blk <- current
let i = stack blk
modifyBlock (blk { stack = (ref := ins) : i } )
return $ local t ref
instr :: Instruction -> Codegen Operand
instr = tyInstr iType
--namedInstr :: String -> Instruction -> Codegen Operand
--namedInstr name instruction = do
-- identfiersNames <- gets names
-- let (newName, newNameMap) = uniqueName name identfiersNames
-- modify $ \codegenState -> codegenState { names = newNameMap }
-- addInstr (Name newName) instruction
--
--addInstr :: Name -> Instruction -> Codegen Operand
--addInstr name instruction = do
-- curBlock <- current
-- let curStack = stack curBlock
-- modifyBlock (curBlock { stack = curStack ++ [name := instruction] })
-- return $ LocalReference iType name
terminator :: Named Terminator -> Codegen (Named Terminator)
terminator trm = do
curBlock <- current
case term curBlock of
Just oldTrm -> return oldTrm
Nothing -> do
modifyBlock (curBlock { term = Just trm })
return trm
-------------------------------
--------- Block Stack ---------
-------------------------------
entry :: Codegen Name
entry = gets currentBlock
addBlock :: String -> Codegen Name
addBlock bname = do
bls <- gets blocks
ix <- gets blockCount
nms <- gets names
let new = emptyBlock ix
let (qname, supply) = uniqueName bname nms
modify $ \s -> s { blocks = Map.insert (Name qname) new bls
, blockCount = succ ix
, names = supply
}
return $ Name qname
setBlock :: Name -> Codegen ()
setBlock bname = modify (\s -> s { currentBlock = bname })
getBlock :: Codegen Name
getBlock = gets currentBlock
modifyBlock :: BlockState -> Codegen ()
modifyBlock new = getBlock >>= \active ->
modify (\s -> s { blocks = Map.insert active new (blocks s) })
current :: Codegen BlockState
current = getBlock >>= \c -> gets blocks >>= \blks ->
pure $ fromMaybe (error $ "No such block: " ++ show c) (Map.lookup c blks)
----------------------------
------- Symbol Table -------
----------------------------
assign :: String -> Operand -> Codegen ()
assign v x = gets symTable >>= \symbs -> gets varTypes >>= \varTps ->
modify (\s -> s { symTable = (v, x) : symbs
, varTypes = Map.insert v (typeOfOperand x) varTps})
getVar :: String -> Codegen Operand
getVar var = gets symTable >>= \syms ->
pure $ fromMaybe (error $ "Local variable not in scope: " ++ show var) (lookup var syms)
----------------------------
-------- References --------
----------------------------
local :: Ty.Type -> Name -> Operand
local = LocalReference
global :: Ty.Type -> Name -> C.Constant
global = C.GlobalReference
externf :: Ty.Type -> Name -> Operand
externf ty = ConstantOperand . global ty
----------------------------------
---- Arithmetic and Constants ----
----------------------------------
iAdd :: Operand -> Operand -> Codegen Operand
iAdd a b = instr $ Add False False a b []
iSub :: Operand -> Operand -> Codegen Operand
iSub a b = instr $ Sub False False a b []
iMul :: Operand -> Operand -> Codegen Operand
iMul a b = instr $ Mul False False a b []
iDiv :: Operand -> Operand -> Codegen Operand
iDiv a b = instr $ SDiv False a b []
iMod :: Operand -> Operand -> Codegen Operand
iMod a b = instr $ SRem a b []
--- logic operations ---
lAnd :: Operand -> Operand -> Codegen Operand
lAnd a b = instr $ And a b []
lOr :: Operand -> Operand -> Codegen Operand
lOr a b = instr $ Or a b []
--- compare operations ---
iCmp :: I.IntegerPredicate -> Operand -> Operand -> Codegen Operand
iCmp cond a b = do
a' <- instr $ AST.ZExt a iType []
b' <- instr $ AST.ZExt b iType []
temp <- instr $ ICmp cond a' b' []
instr $ AST.ZExt temp iType []
bNeq :: Operand -> Operand -> Codegen Operand
bNeq a b = instr $ AST.ICmp I.NE a b []
iEq :: Operand -> Operand -> Codegen Operand
iEq = iCmp I.EQ
iNeq :: Operand -> Operand -> Codegen Operand
iNeq = iCmp I.NE
iNotGt :: Operand -> Operand -> Codegen Operand
iNotGt = iCmp I.SLE
iNotLt :: Operand -> Operand -> Codegen Operand
iNotLt = iCmp I.SGE
iLt :: Operand -> Operand -> Codegen Operand
iLt = iCmp I.SLT
iGt :: Operand -> Operand -> Codegen Operand
iGt = iCmp I.SGT
--------------------------------------
cons :: C.Constant -> Operand
cons = ConstantOperand
iZero :: Operand
iZero = cons $ C.Int iBits 0
isTrue :: Operand -> Codegen Operand
isTrue = iCmp I.NE iZero
isFalse :: Operand -> Codegen Operand
isFalse = iCmp I.EQ iZero
toArgs :: [Operand] -> [(Operand, [A.ParameterAttribute])]
toArgs = map (\x -> (x, []))
-- Effects
call :: Operand -> [Operand] -> Codegen Operand
call fn args = tyInstr (typeOfOperand fn) $ Call Nothing CC.C [] (Right fn) (toArgs args) [] []
alloca :: AST.Type -> Codegen Operand
alloca ty = tyInstr ty $ Alloca ty Nothing 0 []
store :: Operand -> Operand -> Codegen Operand
store ptr val = tyInstr (typeOfOperand val) $ Store False ptr val Nothing 0 []
load :: Operand -> Codegen Operand
load ptr = tyInstr (typeOfOperand ptr) $ Load False ptr Nothing 0 []
getElementPtr :: Operand -> Codegen Operand
getElementPtr o = tyInstr (typeOfOperand o) $ GetElementPtr True o [iZero, iZero] []
------------------------
----- Control Flow -----
------------------------
-- Unconditional jump
br :: Name -> Codegen ()
br val = void $ terminator $ Do $ Br val []
-- Conditional jump
cbr :: Operand -> Name -> Name -> Codegen ()
cbr cond tr fl = do
boolCond <- bNeq cond iZero
void $ terminator $ Do $ CondBr boolCond tr fl []
-- return command
ret :: Operand -> Codegen (Named Terminator)
ret val = terminator $ Do $ Ret (Just val) []
typeOfOperand :: Operand -> Ty.Type
typeOfOperand (AST.LocalReference t _) = t
typeOfOperand (AST.ConstantOperand C.Int{..}) = iType
typeOfOperand (AST.ConstantOperand (C.GlobalReference t _ )) = t
typeOfOperand (AST.ConstantOperand C.Array{..}) = AST.ArrayType (fromIntegral $ length memberValues) memberType
typeOfOperand _ = iType
| vrom911/Compiler | src/Compiler/Rum/Compiler/CodeGen.hs | mit | 11,850 | 0 | 16 | 2,895 | 3,698 | 1,978 | 1,720 | -1 | -1 |
module Handler.GameHome where
import Import
import Handler.Game
import Crypto.Random (SystemRandom)
import Control.Monad.CryptoRandom
import Data.ByteString.Base16 (encode)
import DbFunctions
-- generate a random id (8 bytes) and forward to the corresponding site
-- see http://www.yesodweb.com/book/yesods-monads#yesods-monads_adding_a_new_monad_transformer
getGameHomeR :: Handler Html
getGameHomeR = do
gen <- liftIO newGenIO
eres <- evalCRandT (getBytes 8) (gen :: SystemRandom)
pId <- case eres of
Left e -> error $ show (e :: GenError)
Right g -> return g
let urlHash = decodeUtf8 $ encode pId
_ <- insertPlayer urlHash
getGameR urlHash
| total-git/missingno | yesodMissingNo/Handler/GameHome.hs | mit | 685 | 0 | 13 | 126 | 170 | 87 | 83 | 17 | 2 |
{-# LANGUAGE CPP, TypeFamilies, DeriveDataTypeable #-}
module PGIP.GraphQL.Result.Axiom where
import PGIP.GraphQL.Result.FileRange
import PGIP.GraphQL.Result.Symbol
import Data.Data
data Axiom = Axiom { __typename :: String
, fileRange :: Maybe FileRange
, locId :: String
, name :: String
, symbols :: [Symbol]
, text :: String
} deriving (Show, Typeable, Data)
| spechub/Hets | PGIP/GraphQL/Result/Axiom.hs | gpl-2.0 | 481 | 0 | 9 | 167 | 94 | 60 | 34 | 12 | 0 |
module BibDB where
import Text.ParserCombinators.Parsec (parseFromFile)
import TypedBibData
import Config
import Control.Monad
------------
-- DB
loadBibliography cfg = loadBibliographyFrom (bibfile cfg)
loadBibliographyFrom fileName = do
putStrLn ("Loading " ++ fileName)
mBib <- fmap bibToForest <$> parseFromFile parseBib fileName
case join (either (Left . show) Right $ mBib) of
Left err -> return (Left err)
Right bib -> do putStrLn $ show (length $ bib) ++ " entries loaded."
return (Right bib)
formatBib :: [Entry] -> [Char]
formatBib = concatMap formatEntry . map treeToEntry
saveBibliography :: InitFile -> [Entry] -> IO ()
saveBibliography cfg bib = do
let formatted = formatBib bib
writeFile (bibfile cfg) formatted
putStrLn $ show (length bib) ++ " entries saved to " ++ (bibfile cfg)
| jyp/imbib | lib/BibDB.hs | gpl-2.0 | 847 | 0 | 16 | 169 | 291 | 143 | 148 | 20 | 2 |
module Yi.Mode.Interactive where
import Control.Concurrent (threadDelay)
import Data.List (elemIndex)
import Prelude ()
import Yi.Modes
import Yi.Core
import Yi.History
import Yi.Lexer.Alex (Tok)
import Yi.Lexer.Compilation (Token)
import Yi.Region
import qualified Yi.Mode.Compilation as Compilation
import qualified Yi.Syntax.OnlineTree as OnlineTree
atLastLine :: BufferM Bool
atLastLine = savingPointB $ do
moveToEol
(==) <$> sizeB <*> pointB
mode :: Mode (OnlineTree.Tree (Tok Token))
mode = Compilation.mode
{ modeApplies = modeNeverApplies,
modeName = "interactive",
modeKeymap = topKeymapA ^: ((<||)
(choice
[spec KHome ?>>! ghciHome,
spec KEnter ?>>! do
eof <- withBuffer $ atLastLine
if eof
then feedCommand
else withSyntax modeFollow,
meta (char 'p') ?>>! interactHistoryMove 1,
meta (char 'n') ?>>! interactHistoryMove (-1)
])) }
-- | The GHCi prompt always begins with ">"; this goes to just before it, or if one is already at the start
-- of the prompt, goes to the beginning of the line. (If at the beginning of the line, this pushes you forward to it.)
ghciHome :: BufferM ()
ghciHome = do l <- readLnB
let epos = elemIndex '>' l
case epos of
Nothing -> moveToSol
Just pos -> do (_,mypos) <- getLineAndCol
if mypos == (pos+2) then moveToSol
else moveToSol >> moveXorEol (pos+2)
interactId :: String
interactId = "Interact"
interactHistoryMove :: Int -> EditorM ()
interactHistoryMove delta = historyMoveGen interactId delta (withBuffer0 getInput) >>= (withBuffer0 . setInput)
interactHistoryFinish :: EditorM ()
interactHistoryFinish = historyFinishGen interactId (withBuffer0 getInput)
interactHistoryStart :: EditorM ()
interactHistoryStart = historyStartGen interactId
getInputRegion :: BufferM Region
getInputRegion = do mo <- getMarkB (Just "StdOUT")
p <- pointAt botB
q <- getMarkPointB mo
return $ mkRegion p q
getInput :: BufferM String
getInput = readRegionB =<< getInputRegion
setInput :: String -> BufferM ()
setInput val = flip replaceRegionB val =<< getInputRegion
-- | Open a new buffer for interaction with a process.
interactive :: String -> [String] -> YiM BufferRef
interactive cmd args = do
b <- startSubprocess cmd args (const $ return ())
withEditor $ interactHistoryStart
mode' <- lookupMode $ AnyMode mode
withBuffer $ do m1 <- getMarkB (Just "StdERR")
m2 <- getMarkB (Just "StdOUT")
modifyMarkB m1 (\v -> v {markGravity = Backward})
modifyMarkB m2 (\v -> v {markGravity = Backward})
setAnyMode mode'
return b
-- | Send the type command to the process
feedCommand :: YiM ()
feedCommand = do
b <- gets currentBuffer
withEditor interactHistoryFinish
cmd <- withBuffer $ do
botB
insertN "\n"
me <- getMarkB (Just "StdERR")
mo <- getMarkB (Just "StdOUT")
p <- pointB
q <- getMarkPointB mo
cmd <- readRegionB $ mkRegion p q
setMarkPointB me p
setMarkPointB mo p
return $ cmd
withEditor interactHistoryStart
sendToProcess b cmd
-- | Send command, recieve reply
queryReply :: BufferRef -> String -> YiM String
queryReply buf cmd = do
start <- withGivenBuffer buf (botB >> pointB)
sendToProcess buf (cmd ++ "\n")
io $ threadDelay 50000 -- Hack to let ghci finish writing its output.
withGivenBuffer buf $ do
botB
moveToSol
leftB -- There is probably a much better way to do this moving around, but it works
end <- pointB
result <- readRegionB (mkRegion start end)
botB
return result
| codemac/yi-editor | src/Yi/Mode/Interactive.hs | gpl-2.0 | 3,894 | 0 | 17 | 1,086 | 1,055 | 522 | 533 | 96 | 3 |
{- |
Module : $Header$
Description : Logic independent retrieval functionality
Copyright : (c) Immanuel Normann, Uni Bremen 2007
License : GPLv2 or higher, see LICENSE.txt
Maintainer : inormann@jacobs-university.de
Stability : provisional
Portability : portable
-}
module Search.Common.Select where
import Search.Common.Data hiding (parameter,role)
import Data.List as L
import Data.Set as S
import Data.Map as M hiding ((!))
import Data.Maybe (catMaybes)
import Search.Utils.SetMap as SM --(fromListSetValues)
import Search.Utils.List
import Database.HaskellDB
import Database.HaskellDB.DriverAPI
import Database.HaskellDB.HDBRec
import Search.DB.Connection
import Search.DB.FormulaDB.Profile
-- -----------------------------------------------------------
-- * Principle Functions
-- -----------------------------------------------------------
{- |
search is the main function of this module. It takes a handler function
to read in the source theory and a path to the file of the source theory
and returns all possible profile morphisms from all matching target theories
in the database.
-}
search :: (Ord p, Read p, Show p) =>
(FilePath -> TheoryName -> IO ([ShortProfile p], [ShortProfile p]))
-> FilePath
-> TheoryName
-> IO [[LongInclusionTuple p]]
search getSourceProfiles dir file =
let skelOf (skel,_,_,_) = skel
in do (axioms,theorems) <- getSourceProfiles dir file
axioms' <- return $ removeDuplicateProfiles axioms
targetCandidates <- allTargetCandidates (L.map skelOf axioms')
mapM (showResults file axioms' theorems) (M.toList targetCandidates)
{- |
showResults shows for a given target theory candidate all possible profile morphisms
and the actual reused theorems.
-}
showResults :: (Ord p, Read p, Show p) =>
TheoryName
-> [ShortProfile p]
-> [ShortProfile p]
-> (String, Map Skel (Set ([p], LineNr)))
-> IO [LongInclusionTuple p]
showResults sourceTheory sAxioms sTheorems (targetTheory,tMap) =
let sAxioms' = (L.map toProfile2 sAxioms)
morphs = profileMorphism sAxioms' tMap
toProfile2 (skel, ps, lineNr, _) = (skel, (ps, lineNr))
toIncTuple (st,tt,ps,lm,lines) = (st,tt,ps,lm,length lines)
in do putStrLn targetTheory
targetProfiles <- getProfilesFromDB targetTheory
longIncTuples <- return (L.map (inclusionTuple sourceTheory targetTheory
targetProfiles sTheorems)
morphs)
multiInsertInclusion (L.map toIncTuple longIncTuples)
return longIncTuples
inclusionTuple :: (Eq p,Ord p) =>
TheoryName -- ^ source theory
-> TheoryName -- ^ target theory
-> [ShortProfile p] -- ^ profiles of target sentences
-> [ShortProfile p] -- ^ profiles of source theorems
-> (Renaming p, LineMap) -- ^ profile mapping
-> LongInclusionTuple p -- ^ profiles of reused source theorems
inclusionTuple st tt ts ss (ren,lmap) = (st,tt,ren',lmap',newTheorems)
where newTheorems = L.map lineOf $ reusedTheorems ts ss ren
lineOf (_,_,lNr,_) = lNr
neq a b = a /=b
ren' = M.filterWithKey neq ren
lmap' = M.filterWithKey neq lmap
reusedTheorems :: (Eq p,Ord p) => [ShortProfile p] -> [ShortProfile p] -> Renaming p -> [ShortProfile p]
reusedTheorems tSens sTheorems renaming = L.filter reusedTheorem sTheorems'
where sTheorems' = L.map (translate renaming) sTheorems
neq (s1,p1,_,_) (s2,p2,_,_) = (s1 /= s2) || (p1 /= p2)
reusedTheorem s = all (neq s) tSens
translate :: (Ord p) => Renaming p -> ShortProfile p -> ShortProfile p
translate renaming (skel,param,lnr,role) = (skel,param',lnr,role)
where param' = L.map (f renaming) param
f renaming param = findWithDefault param param renaming
getProfilesFromDB :: (Read p) => TheoryName -> IO [ShortProfile p]
getProfilesFromDB targetTheory =
let q = do t <- table profile -- the query
restrict ((t!file) .==. (constant targetTheory))
project (skeleton_md5 << t!skeleton_md5 #
parameter << t!parameter #
line << t!line # role << t!role)
recToTuple rec = (skel, (read parameter) , line, role)
where (RecCons skel (RecCons parameter (RecCons line (RecCons role _)))) =
rec RecNil
in do recs <- myQuery q
return $ L.map recToTuple recs
{- |
two profiles are said to be duplicates if they have the same skeletons and parameters.
-}
removeDuplicateProfiles :: (Eq p) => [ShortProfile p] -> [ShortProfile p]
removeDuplicateProfiles profiles = nubBy eqProfiles profiles
where eqProfiles (skel1,par1,_,_) (skel2,par2,_,_) =
skel1 == skel2 && par1 == par2
-- -----------------------------------------------------------
-- * Database Access
-- -----------------------------------------------------------
{- |
allTargetCandidates takes a list of skeletons and retrieves from the database
candidates of target theories. A theory is a candidate if it contains all the
skeletons from the input list.
-}
allTargetCandidates :: (Ord p, Read p) =>
[Skel] -> IO (Map TheoryName (Map Skel (Set ([p], LineNr))))
allTargetCandidates skels =
let len = length $ nub skels
comprisesAllSkels skelLineSet =
((S.size $ S.map fst skelLineSet) == len)
in do triples <- matchSkeleton skels
return $ (M.map fromSetSetValues
(M.filter comprisesAllSkels (fromListSetValues triples)))
{- |
matchSkeleton takes a list of skeletons and retrieves from the database all
datasets whose skeleton matches one of the input list.
(this function is only used in 'allTargetCandidates')
-}
matchSkeleton :: (Ord p, Read p) => [Skel] -> IO [(TheoryName, (Skel, ([p], LineNr)))]
matchSkeleton skels =
let q = do t <- table profile -- the query
restrict (_in (t!skeleton_md5) (L.map constant skels))
project (file << t!file # skeleton_md5 << t!skeleton_md5 #
parameter << t!parameter # line << t!line)
recToTuple rec = (theory,(skel, (read parameter, line)))
where (RecCons theory (RecCons skel (RecCons parameter (RecCons line _)))) =
rec RecNil
in do recs <- myQuery q
return $ L.map recToTuple recs
-- -----------------------------------------------------------
-- * Matching
-- -----------------------------------------------------------
{- |
profileMorphism finds all theory morphisms from profiles of a source theory
to profiles in a target theory.
-}
profileMorphism :: (Ord p, Read p) =>
[(Skel,([p], LineNr))] -- ^ source profiles
-> M.Map Skel (S.Set ([p], LineNr)) -- ^ map from skeletons to target profile
-> [(Renaming p, LineMap)]
profileMorphism sourceProfiles targetProfileMap =
let matchList' (s,sourceProfile) =
case M.lookup s targetProfileMap
of (Just targetProfiles) -> matchList (S.toList targetProfiles) sourceProfile
Nothing -> []
in case (L.map matchList' sourceProfiles)
of (h:t) -> foldr merge h t
[] -> []
{- |
merge takes two lists of profile mappings and returns the list of
all profile mappings resulting from a admissible union out of the
Cartesian product of the input lists. A union is of two profile mappings
is admissible iff their renamings are equal on their common domain.
-}
merge :: (Ord p, Read p) => [(Renaming p, LineMap)]
-> [(Renaming p, LineMap)]
-> [(Renaming p, LineMap)]
merge ps1 ps2 = catMaybes [merge' p1 p2 | p1 <- ps1, p2 <- ps2]
where merge' (r1,l1) (r2,l2) =
case maybeUnion r1 r2
of (Just r) -> Just (r, M.union l1 l2)
Nothing -> Nothing
{- |
matchList takes a list of target pairs and a single source pair
and returns a list of renamings together with a line number mapping.
-}
matchList :: (Ord p, Read p) =>
[([p], LineNr)] -> ([p], LineNr) -> [(Renaming p, LineMap)]
matchList targetProfiles sourceProfile =
foldr justInsert [] (L.map (match sourceProfile) targetProfiles)
where justInsert Nothing lst = lst
justInsert (Just x) lst = x:lst
{- |
match takes two pairs and returns (Just)
a renaming of parameters and a line number association
if the pairs match and Nothing otherwise.
Each pair has a list of parameter as first component
and a line number as second. Each pair represents a formula
whose skeleton is supposed to be identical.
The pairs match iff their parameter do (s. 'constructRenaming').
-}
match :: (Ord p, Read p) => ([p],LineNr) -> ([p],LineNr)
-> Maybe (Renaming p, LineMap)
match (p1,l1) (p2,l2) =
case constructRenaming p1 p2
of (Just renaming) -> Just (renaming, M.singleton l1 l2)
_ -> Nothing
{- |
constructRenaming takes two lists of parameters and returns (Just) a pointwise
mapping between these lists if this is consistently possible.
Otherwise it returns Nothing.
-}
constructRenaming :: (Ord p, Read p) => [p] -> [p] -> Maybe (Renaming p)
constructRenaming lst1 lst2 = SM.fromList $ zip lst1 lst2
| nevrenato/Hets_Fork | Search/Common/Select.hs | gpl-2.0 | 9,538 | 50 | 21 | 2,471 | 2,378 | 1,302 | 1,076 | 134 | 3 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
module Utils.ToString where
{-
Haskell has already a built-in show; but we need more.
We often want a data representation (often handily available through `deriving Show`), but we often want fancy representations, in multiple flavours.
A flavour could be that we need some more data to accuratly show the data, a version for 'native' (e.g. in the target language) or as expression (in the meta-stuff, e.g. proofs)
These are presented here.
-}
import Data.List (intercalate)
class ToString a where
-- show gives us the default haskell representation
-- how you would write it in the original data file
-- e.g. a parsetree as if it was target language,
-- a meta-expression as if it came from a typesystem file
toParsable :: a -> String
-- Show as if this were in the opposite file
-- e.g. a parsetree as if it was a meta-expression in a typesystem file
-- a meta-expression as if it was target language. This might not always be possible
toCoParsable :: a -> String
toCoParsable = toParsable
-- can contain more info, e.g. type info of the expression
debug :: a -> String
debug = toParsable
class ToString' opts a where
show' :: opts -> a -> String
toParsable' :: opts -> a -> String
toCoParsable' :: opts -> a -> String
debug' :: opts -> a -> String
instance ToString a => ToString' () a where
show' = const toParsable
toParsable' = const toParsable
toCoParsable' = const toCoParsable
debug' = const debug
instance (Show a, ToString a) => ToString' String [a] where
show' s as = intercalate s $ map show as
toParsable' s as = intercalate s $ map toParsable as
toCoParsable' s as = intercalate s $ map toCoParsable as
debug' s as = intercalate s $ map debug as
printPars :: (ToString a) => a -> IO ()
printPars a = putStrLn $ toParsable a
printPars' :: (ToString' x a) => x -> a -> IO ()
printPars' x a = putStrLn $ toParsable' x a
printCoPars :: (ToString a) => a -> IO ()
printCoPars = putStrLn . toCoParsable
printDebug :: (ToString a) => a -> IO ()
printDebug = putStrLn . debug
| pietervdvn/ALGT2 | src/Utils/ToString.hs | gpl-3.0 | 2,117 | 52 | 9 | 430 | 505 | 262 | 243 | 33 | 1 |
module Main where
import Mudblood
import Mudblood.Screen.Gtk
import System.Environment
import MGStable
main :: IO ()
main = do
args <- getArgs
userpath <- initUserPath []
profpath <- case args of
[profile] -> initUserPath ["mg", profile]
_ -> initUserPath ["mg"]
execScreen "res/gui.glade" mkMBConfig (mkMBState NoTrigger mkMGState) (boot userpath profpath >> screen (return ()))
| talanis85/mudblood | main/Main-MG-Gtk.hs | gpl-3.0 | 424 | 0 | 13 | 96 | 139 | 71 | 68 | 13 | 2 |
-- | Simple graphviz output.
module Data.Graph.Inductive.Graphviz(
Orient(..),
graphviz, graphviz'
) where
import Data.Graph.Inductive.Graph
data Orient = Portrait | Landscape deriving (Eq, Show)
o2s :: Orient -> String
o2s Portrait = "\trotate = \"0\"\n"
o2s Landscape = "\trotate = \"90\"\n"
-- | Formats a graph for use in graphviz.
graphviz :: (Graph g, Show a, Show b) => g a b -- ^ The graph to format
-> String -- ^ The title of the graph
-> (Double, Double) -- ^ The size
-- of the page
-> (Int, Int) -- ^ The width and
-- height of the page
-- grid
-> Orient -- ^ The orientation of
-- the graph.
-> String
i2d :: Int -> Double
i2d = fromInteger . toInteger
graphviz g t (w, h) p@(pw', ph') o =
let n = labNodes g
e = labEdges g
ns = concatMap sn n
es = concatMap se e
sz w' h' = if o == Portrait then show w'++","++show h' else show h'++","++show w'
ps = show w++","++show h
(pw, ph) = if o == Portrait then p else (ph', pw')
--gs = show ((w*(i2d pw))-m)++","++show ((h*(i2d ph))-m)
gs = sz (w*(i2d pw)) (h*(i2d ph))
in "digraph "++t++" {\n"
++"\tmargin = \"0\"\n"
++"\tpage = \""++ps++"\"\n"
++"\tsize = \""++gs++"\"\n"
++o2s o
++"\tratio = \"fill\"\n"
++ns
++es
++"}"
where sn (n, a) | sa == "" = ""
| otherwise = '\t':(show n ++ sa ++ "\n")
where sa = sl a
se (n1, n2, b) = '\t':(show n1 ++ " -> " ++ show n2 ++ sl b ++ "\n")
-- | Format a graph for graphviz with reasonable defaults: title of \"fgl\",
-- 8.5x11 pages, one page, landscape orientation
graphviz' :: (Graph g, Show a, Show b) => g a b -> String
graphviz' g = graphviz g "fgl" (8.5,11.0) (1,1) Landscape
sq :: String -> String
sq ('"':s) | last s == '"' = init s
| otherwise = s
sq ('\'':s) | last s == '\'' = init s
| otherwise = s
sq s = s
sl :: (Show a) => a -> String
sl a =
let l = sq (show a)
in if (l /= "()") then (" [label = \""++l++"\"]") else ""
| ckaestne/CIDE | CIDE_Language_Haskell/test/FGL-layout/Graph/Inductive/Graphviz.hs | gpl-3.0 | 2,082 | 105 | 13 | 620 | 817 | 438 | 379 | 50 | 3 |
-----------------------------------------------------------------------------
--
-- Module : GenImplicit
-- Copyright : (c) hokum
-- License : GPL3
--
-- Maintainer :
-- Stability : experimental
-- Portability :
--
-- |
--
-----------------------------------------------------------------------------
import Data.Aeson
import Control.Applicative
import Control.Monad
import qualified Control.Monad.RWS.Lazy as RWS_
import qualified Data.ByteString.Lazy as B
import qualified Data.ByteString.Char8 as B8
--import Network.HTTP.Conduit (simpleHttp)
import qualified Graphics.Implicit as I
import qualified Graphics.Implicit.Primitives as IP
import qualified Cmd_arguments as CmdA
import qualified System.Environment as SE
import Generation_branch
import Genimplicit_types
{----}
-- Read the local copy of the JSON file.
getJSON :: FilePath -> IO B.ByteString
getJSON f = B.readFile f
--}
{-
-- Read the remote copy of the JSON file.
getJSON :: IO B.ByteString
getJSON = simpleHttp jsonURL
--}--}
main :: IO ()
main = do
a<- SE.getArgs
routine a
routine :: [String] -> IO ()
routine args
|otherwise = output_stl_default
where
output_stl_default = do
d <- (eitherDecode <$> getJSON jif) :: IO (Either String BlenderData)
case d of
Left err -> putStrLn err
Right bo -> do
let (a, w) = RWS_.evalRWS (render bo) () gs
mapM B8.putStrLn w
I.writeSTL mq stl_ef a
where
mq = (\(CmdA.InputArguments {CmdA.mesh_quality = (Just d)}) -> d) inputArgs'
jif = (\(CmdA.InputArguments {CmdA.json_import_file = (Just d)}) -> d) inputArgs'
stl_ef = (\(CmdA.InputArguments {CmdA.stl_export_file = (Just d)}) -> d) inputArgs'
gs = (Generation_settings
{
overall_union_rounding = (\(CmdA.InputArguments
{CmdA.overall_union_rounding = (Just d)}
) -> d) inputArgs'
})
inputArgs' = CmdA.inputArgs $ CmdA.tag_DMap args
| Collocalini/GenImplicit | genimplicit/src/GenImplicit.hs | gpl-3.0 | 2,008 | 1 | 17 | 457 | 502 | 278 | 224 | 37 | 2 |
module Handler.Info.Contact where
import Import
------------------------------------------------------------------------
getContactR :: Handler Html
getContactR = defaultLayout $ do
setTitle "Betty : Contact"
$(widgetFile "info.contact")
------------------------------------------------------------------------
| sajith/betty-web | Handler/Info/Contact.hs | agpl-3.0 | 323 | 0 | 10 | 34 | 45 | 24 | 21 | 6 | 1 |
-- Copied from: https://wiki.haskell.org/Existential_type
{-# LANGUAGE ExistentialQuantification #-}
module Main where
main :: IO ()
main = do
let circle1 = circle 1
print $ area circle1
class Shape_ a where
perimeter :: a -> Double
area :: a -> Double
data Shape = forall a. Shape_ a => Shape a
type Radius = Double
type Side = Double
data Circle = Circle Radius
data Rectangle = Rectangle Side Side
data Square = Square Side
instance Shape_ Circle where
perimeter (Circle r) = 2 * pi * r
area (Circle r) = pi * r * r
instance Shape_ Rectangle where
perimeter (Rectangle x y) = 2*(x + y)
area (Rectangle x y) = x * y
instance Shape_ Square where
perimeter (Square s) = 4*s
area (Square s) = s*s
instance Shape_ Shape where
perimeter (Shape shape) = perimeter shape
area (Shape shape) = area shape
--
-- Smart constructor
--
circle :: Radius -> Shape
circle r = Shape (Circle r)
rectangle :: Side -> Side -> Shape
rectangle x y = Shape (Rectangle x y)
square :: Side -> Shape
square s = Shape (Square s)
shapes :: [Shape]
shapes = [circle 2.4, rectangle 3.1 4.4, square 2.1]
| azadbolour/boardgame | haskell-server/trial/ExistentialTrial.hs | agpl-3.0 | 1,158 | 0 | 10 | 283 | 447 | 231 | 216 | 35 | 1 |
module Bob (responseFor) where
import Data.Char (isSpace, isUpper, isAlpha)
data Prompt = Silence | Yell | Question | Other
classify :: String -> Prompt
classify s | all isSpace s = Silence
| any isAlpha s && (all isUpper $ filter isAlpha s) = Yell
| '?' == last s = Question
| otherwise = Other
response :: Prompt -> String
response Silence = "Fine. Be that way!"
response Yell = "Woah, chill out!"
response Question = "Sure."
response Other = "Whatever."
responseFor :: String -> String
responseFor = response . classify
| mscoutermarsh/exercism_coveralls | assignments/haskell/bob/example.hs | agpl-3.0 | 560 | 0 | 11 | 129 | 186 | 96 | 90 | 15 | 1 |
{-# LANGUAGE DerivingVia, StandaloneDeriving #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module ProjectM36.Serialise.Error where
import ProjectM36.Error
import Codec.Winery
import ProjectM36.Serialise.Base ()
import ProjectM36.Serialise.AtomFunctionError ()
import ProjectM36.Serialise.DatabaseContextFunctionError ()
deriving via WineryVariant RelationalError instance Serialise RelationalError
deriving via WineryVariant MergeError instance Serialise MergeError
deriving via WineryVariant ScriptCompilationError instance Serialise ScriptCompilationError
deriving via WineryVariant PersistenceError instance Serialise PersistenceError
deriving via WineryVariant SchemaError instance Serialise SchemaError
deriving via WineryVariant ImportError' instance Serialise ImportError'
| agentm/project-m36 | src/lib/ProjectM36/Serialise/Error.hs | unlicense | 777 | 0 | 6 | 68 | 136 | 73 | 63 | -1 | -1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE TypeFamilies #-}
module Language.K3.Metaprogram.Evaluation where
import Control.Applicative
import Control.Arrow
import Control.Monad
import Control.Monad.State
import Data.Either
import Data.Functor.Identity
import Data.List
import qualified Data.Map as Map
import Data.Maybe
import Data.Tree
import Debug.Trace
import Language.K3.Core.Annotation
import Language.K3.Core.Common
import Language.K3.Core.Declaration
import Language.K3.Core.Expression
import Language.K3.Core.Literal
import Language.K3.Core.Metaprogram
import Language.K3.Core.Type
import Language.K3.Core.Utils
import qualified Language.K3.Core.Constructor.Type as TC
import qualified Language.K3.Core.Constructor.Expression as EC
import qualified Language.K3.Core.Constructor.Declaration as DC
import qualified Language.K3.Core.Constructor.Literal as LC
import Language.K3.Metaprogram.DataTypes
import Language.K3.Metaprogram.MetaHK3 hiding ( localLog, localLogAction )
import Language.K3.Parser.ProgramBuilder ( defaultRoleName )
import Language.K3.Parser.DataTypes
import Language.K3.Analysis.Core
import Language.K3.Analysis.HMTypes.Inference hiding ( localLog, localLogAction )
import Language.K3.Utils.Logger
import Language.K3.Utils.Pretty
traceLogging :: Bool
traceLogging = False
localLog :: (Functor m, Monad m) => String -> m ()
localLog = logVoid traceLogging
localLogAction :: (Functor m, Monad m) => (Maybe a -> Maybe String) -> m a -> m a
localLogAction = logAction traceLogging
{- Top-level AST transformations -}
evalMetaprogram :: Maybe MPEvalOptions
-> Maybe (K3 Declaration -> GeneratorM (K3 Declaration))
-> Maybe (K3 Declaration -> GeneratorM (K3 Declaration))
-> K3 Declaration -> IO (Either String (K3 Declaration))
evalMetaprogram evalOpts analyzeFOpt repairFOpt prog =
runGeneratorM initGState $! synthesizedProg prog
where
synthesizedProg mp = do
localLog $! generatorInput mp
void $! modifyGEnvF_ $! \_ -> return emptyGeneratorEnv
pWithDataAnns <- runMpGenerators mp
pWithMDataAnns <- applyDAnnGens pWithDataAnns
pWithDADecls <- modifyGDeclsF $! \gd -> addDecls gd pWithMDataAnns
analyzedP <- analyzeF pWithDADecls
localLog $! debugAnalysis analyzedP
pWithMCtrlAnns <- generatorWithEvalOptions (flip applyCAnnGens analyzedP)
pWithCADecls <- modifyGDeclsF $! \gd -> addDecls gd pWithMCtrlAnns
pRepaired <- repairF pWithCADecls
if pRepaired == mp then return pRepaired
else rcr pRepaired -- Tail recursive fixpoint
initGState = maybe emptyGeneratorState mkGeneratorState evalOpts
analyzeF = maybe defaultMetaAnalysis id analyzeFOpt
repairF = maybe defaultMetaRepair id repairFOpt
rcr p = synthesizedProg p
addDecls genDecls p@(tag -> DRole n)
| n == defaultRoleName =
let (dd, cd) = generatorDeclsToList genDecls
in return $! (emptyGeneratorDecls, Node (DRole n :@: annotations p) $! children p ++ dd ++ cd)
addDecls _ p = Left . boxToString $ [addErrMsg] %$ prettyLines p
generatorInput = metalog "Evaluating metaprogram "
debugAnalysis = metalog "Analyzed metaprogram "
metalog msg p = boxToString $ [msg] %$ (indent 2 $ prettyLines p)
addErrMsg = "Invalid top-level role resulting from metaprogram evaluation"
defaultMetaAnalysis :: K3 Declaration -> GeneratorM (K3 Declaration)
defaultMetaAnalysis p = do
strippedP <- mapExpression removeTypes p
liftError (liftError return . translateProgramTypes . fst) $! inferProgramTypes strippedP
where
-- | Match any type annotation except pattern types which are user-defined in patterns.
removeTypes e = return $! stripExprAnnotations (\a -> isETypeOrBound a || isEQType a) (const False) e
liftError = either throwG
defaultMetaRepair :: K3 Declaration -> GeneratorM (K3 Declaration)
defaultMetaRepair p = return $! snd $! repairProgram "metaprogram" Nothing p
nullMetaAnalysis :: K3 Declaration -> GeneratorM (K3 Declaration)
nullMetaAnalysis p = return p
-- | Adds parametric annotations as generator functions in the generator state.
runMpGenerators :: K3 Declaration -> GeneratorM (K3 Declaration)
runMpGenerators mp = mapTree evalMPDecl mp
where
evalMPDecl :: [K3 Declaration] -> K3 Declaration -> GeneratorM (K3 Declaration)
evalMPDecl ch d@(tag -> DGenerator (MPDataAnnotation n [] tvars (partitionEithers -> ([], annMems)))) =
rebuildNode (DC.dataAnnotation n tvars annMems) (annotations d) ch
evalMPDecl ch d@(tag -> DGenerator mpd@(MPDataAnnotation n svars tvars mems)) =
let extendGen genEnv =
case lookupDGenE n genEnv of
Nothing -> Right $! addDGenE n (annotationSplicer n svars tvars mems) genEnv
Just _ -> Left $! unwords ["Duplicate metaprogrammed data annotation for", n]
in modifyGEnvF_ extendGen >> rebuildNode (DC.generator mpd) (annotations d) ch
evalMPDecl ch d@(tag -> DGenerator mpd@(MPCtrlAnnotation n svars rewriteRules extensions)) =
let extendGen genEnv =
case lookupCGenE n genEnv of
Nothing -> Right $! addCGenE n (exprPatternMatcher svars rewriteRules extensions) genEnv
Just _ -> Left $! unwords ["Duplicate metaprogrammed control annotation for", n]
in modifyGEnvF_ extendGen >> rebuildNode (DC.generator mpd) (annotations d) ch
evalMPDecl ch (tag &&& annotations -> (t,anns)) = return $! Node (t :@: anns) ch
rebuildNode (Node (t :@: anns) _) nanns ch = return $! Node (t :@: (nub $! anns ++ nanns)) ch
applyDAnnGens :: K3 Declaration -> GeneratorM (K3 Declaration)
applyDAnnGens mp = mapProgram applyDAnnDecl applyDAnnMemDecl applyDAnnExprTree (Just applyDAnnTypeTree) mp
where
applyDAnnExprTree e = mapTree applyDAnnExpr e
applyDAnnTypeTree t = mapTree applyDAnnType t
applyDAnnLitTree l = mapTree applyDAnnLiteral l
applyDAnnDecl d = mapM dApplyAnn (annotations d) >>= rebuildNodeWithAnns d
applyDAnnMemDecl (Lifted p n t eOpt anns) = do
nanns <- mapM dApplyAnn anns
nt <- applyDAnnTypeTree t
neOpt <- maybe (return Nothing) (\e -> applyDAnnExprTree e >>= return . Just) eOpt
return $! Lifted p n nt neOpt nanns
applyDAnnMemDecl (Attribute p n t eOpt anns) = do
nanns <- mapM dApplyAnn anns
nt <- applyDAnnTypeTree t
neOpt <- maybe (return Nothing) (\e -> applyDAnnExprTree e >>= return . Just) eOpt
return $! Attribute p n nt neOpt nanns
applyDAnnMemDecl (MAnnotation p n anns) = mapM dApplyAnn anns >>= return . MAnnotation p n
applyDAnnExpr ch n@(tag -> EConstant (CEmpty t)) = do
nt <- applyDAnnTypeTree t
nanns <- mapM (eApplyAnn t) $! annotations n
rebuildNode (EC.constant $! CEmpty nt) (Just nanns) ch
applyDAnnExpr ch n = rebuildNode n Nothing ch
applyDAnnType ch n@(tag -> TCollection) = do
nanns <- mapM (tApplyAnn $! head $! children n) $! annotations n
rebuildNode (TC.collection $! head $! children n) (Just nanns) ch
applyDAnnType ch n = rebuildNode n Nothing ch
applyDAnnLiteral ch n@(tag -> LEmpty t) = do
nt <- applyDAnnTypeTree t
nanns <- mapM (lApplyAnn t) $! annotations n
rebuildNode (LC.empty nt) (Just nanns) ch
applyDAnnLiteral ch n@(tag -> LCollection t) = do
nt <- applyDAnnTypeTree t
nanns <- mapM (lApplyAnn t) $! annotations n
rebuildNode (LC.collection nt $! children n) (Just nanns) ch
applyDAnnLiteral ch n = rebuildNode n Nothing ch
dApplyAnn (DProperty (Left (n, Just l))) = applyDAnnLitTree l >>= return . DProperty . Left . (n,) . Just
dApplyAnn (DProperty (Right (n, Just l))) = applyDAnnLitTree l >>= return . DProperty . Right . (n,) . Just
dApplyAnn x = return x
eApplyAnn t (EApplyGen False n senv) = applyDAnnotation EAnnotation n senv t
eApplyAnn _ (EProperty (Left (n, Just l))) = applyDAnnLitTree l >>= return . EProperty . Left . (n,) . Just
eApplyAnn _ (EProperty (Right (n, Just l))) = applyDAnnLitTree l >>= return . EProperty . Right . (n,) . Just
eApplyAnn _ x = return x
tApplyAnn t (TApplyGen n senv) = applyDAnnotation TAnnotation n senv t
tApplyAnn _ x = return x
lApplyAnn t (LApplyGen n senv) = applyDAnnotation LAnnotation n senv t
lApplyAnn _ x = return x
rebuildNode (Node (t :@: anns) _) Nothing ch = return $! Node (t :@: anns) ch
rebuildNode (Node (t :@: anns) _) (Just nanns) ch = return $! Node (t :@: (nub $! anns ++ nanns)) ch
rebuildNodeWithAnns (Node (t :@: _) ch) anns = return $! Node (t :@: anns) ch
applyDAnnotation :: AnnotationCtor a -> Identifier -> SpliceEnv -> K3 Type -> GeneratorM (Annotation a)
applyDAnnotation aCtor annId sEnv t = do
(gEnv, sCtxt) <- get >>= return . (getGeneratorEnv &&& getSpliceContext)
nsEnv <- evalBindings sCtxt sEnv
let postSCtxt = pushSCtxt nsEnv sCtxt
maybe (spliceLookupErr annId)
(expectSpliceAnnotation postSCtxt . ($ nsEnv))
$! lookupDSPGenE annId gEnv
where
expectSpliceAnnotation sctxt (SRGenDecl p) = do
declGen <- p
case declGen of
SGContentDependent contentF -> contentF t >>= processSpliceDGen sctxt
_ -> processSpliceDGen sctxt declGen
expectSpliceAnnotation _ _ = throwG "Invalid data annotation splice"
processSpliceDGen sctxt declGen = case declGen of
SGNamed n -> return $! aCtor n
SGDecl decl ->
case tag decl of
DDataAnnotation n tvs mems -> do
nmems <- generateInSpliceCtxt sctxt $! mapM applyDAnnMemDecl mems
ndecl <- bindDAnnVars sctxt $! (DC.dataAnnotation n tvs nmems) @<- annotations decl
modifyGDeclsF_ (Right . addDGenDecl annId ndecl) >> return (aCtor n)
_ -> throwG $ boxToString $ ["Invalid data annotation splice"] %+ prettyLines decl
_ -> throwG $ boxToString $ ["Invalid splice data generator"]
spliceLookupErr n = throwG $ unwords ["Could not find data macro", n]
applyCAnnGens :: MPEvalOptions -> K3 Declaration -> GeneratorM (K3 Declaration)
applyCAnnGens opts mp = foldExpression applyCAnnExprTree False mp >>= return . snd
where
applyCAnnExprTree changed e = foldMapRebuildTree (applyCAnnExpr $! mpSerial opts) changed e
applyCAnnExpr True chChanged ch (Node (t :@: anns) _) =
if or chChanged
then return (True, Node (t :@: anns) ch)
else let (appAnns, rest) = partition isEApplyGen anns in
case appAnns of
h:tl -> do
(Node (nt :@: nanns) nch) <- eApplyAnn (Node (t :@: rest) ch) h
return (True, Node (nt :@: (nanns ++ tl)) nch)
[] -> return (False, Node (t :@: rest) ch)
applyCAnnExpr False _ ch (Node (t :@: anns) _) =
let (appAnns, rest) = partition isEApplyGen anns
in foldM eApplyAnn (Node (t :@: rest) ch) appAnns >>= return . (False,)
eApplyAnn e (EApplyGen True n senv) = applyCAnnotation e n senv
eApplyAnn e _ = return e
applyCAnnotation :: K3 Expression -> Identifier -> SpliceEnv -> ExprGenerator
applyCAnnotation targetE cAnnId sEnv = do
(gEnv, sCtxt) <- get >>= return . (getGeneratorEnv &&& getSpliceContext)
nsEnv <- evalBindings sCtxt sEnv
let postSCtxt = pushSCtxt nsEnv sCtxt
debugApply sCtxt nsEnv
maybe (spliceLookupErr cAnnId)
(\g -> injectRewrite postSCtxt $! g targetE nsEnv)
$! lookupERWGenE cAnnId gEnv
where
injectRewrite sctxt (SRExpr p) = localLog debugPassThru >> p >>= bindEAnnVars sctxt
injectRewrite sctxt (SRRewrite (p, rwsEnv)) = do
let nsctxt = pushSCtxt rwsEnv sctxt
(rewriteE, decls) <- p
rewriteESub <- bindEAnnVars nsctxt rewriteE
declsSub <- mapM (bindDAnnVars nsctxt) decls
splicedDeclsSub <- generateInSpliceCtxt nsctxt $! mapM spliceDecl declsSub
localLog (debugRewrite rewriteESub)
modifyGDeclsF_ (Right . addCGenDecls cAnnId splicedDeclsSub) >> return rewriteESub
injectRewrite _ _ = throwG "Invalid control annotation rewrite"
debugApply sCtxt nsEnv =
localLog $ boxToString $ ["Applying control annotation " ++ cAnnId ++ " in context "]
%$ prettyLines sCtxt %$ ["with splice env"] %$ prettyLines nsEnv
debugPassThru = unwords ["Passed on generator", cAnnId]
debugRewrite e = boxToString $ [unwords ["Generator", cAnnId, "rewrote as "]] %+ prettyLines e
spliceLookupErr n = throwG $ unwords ["Could not find control macro", n]
evalBindings :: SpliceContext -> SpliceEnv -> GeneratorM SpliceEnv
evalBindings sctxt senv = evalMap (generateInSpliceCtxt sctxt) senv
where eval (SVar i) = do
sv <- expectEmbeddingSplicer i
case sv of
SLabel j | i == j -> eval_var $! SVar j
SVar _ -> eval_var sv
_ -> return sv
where eval_var sv@(chase -> csv) = if csv == sv then return sv else eval csv
eval (SLabel i) = spliceIdentifier i >>= return . SLabel
eval (SBinder b) = spliceBinder b >>= return . SBinder
eval (SType t) = spliceType t >>= return . SType
eval (SExpr e) = spliceExpression e >>= return . SExpr
eval (SDecl d) = spliceDeclaration d >>= return . SDecl
eval (SLiteral l) = spliceLiteral l >>= return . SLiteral
eval (SRecord nvs) = evalMap id nvs >>= return . SRecord
eval (SList svs) = mapM eval svs >>= return . SList
evalMap f m = mapM (\(k,v) -> f (eval v) >>= return . (k,)) (Map.toList m) >>= return . Map.fromList
chase (SVar i) = maybe (SVar i) chase $! lookupSCtxt i sctxt
chase x = x
-- TODO: handle LApplyGen in DProperty
bindDAnnVars :: SpliceContext -> K3 Declaration -> DeclGenerator
bindDAnnVars sctxt d = mapAnnotation return (evalEApply sctxt) (evalTApply sctxt) d
bindEAnnVars :: SpliceContext -> K3 Expression -> ExprGenerator
bindEAnnVars sctxt e = mapExprAnnotation (evalEApply sctxt) (evalTApply sctxt) e
evalEApply :: SpliceContext -> Annotation Expression -> GeneratorM (Annotation Expression)
evalEApply sctxt (EApplyGen c n csenv) = evalBindings sctxt csenv >>= return . EApplyGen c n
evalEApply _ a = return a
evalTApply :: SpliceContext -> Annotation Type -> GeneratorM (Annotation Type)
evalTApply sctxt (TApplyGen n csenv) = evalBindings sctxt csenv >>= return . TApplyGen n
evalTApply _ a = return a
{- Splice-checking -}
-- TODO: match splice parameter types (e.g., types vs label-types vs exprs.)
validateSplice :: [TypedSpliceVar] -> SpliceEnv -> SpliceEnv
validateSplice spliceParams spliceEnv =
let paramIds = map snd spliceParams
in Map.filterWithKey (\k _ -> k `elem` paramIds) spliceEnv
{- Splicer construction -}
globalSplicer :: Identifier -> K3 Type -> Maybe (K3 Expression) -> K3Generator
globalSplicer n t eOpt = Splicer $! \spliceEnv -> SRDecl $! do
nt <- generateInSpliceEnv spliceEnv $! spliceType t
neOpt <- maybe (return Nothing) (\e -> generateInSpliceEnv spliceEnv (spliceExpression e) >>= return . Just) eOpt
return $! DC.global n nt neOpt
annotationSplicer :: Identifier -> [TypedSpliceVar] -> [TypeVarDecl] -> [Either MPAnnMemDecl AnnMemDecl] -> K3Generator
annotationSplicer n spliceParams typeParams mems = Splicer $! \spliceEnv -> SRGenDecl $! do
let vspliceEnv = validateSplice spliceParams spliceEnv
nmems <- generateInSpliceEnv vspliceEnv $! mapM (either spliceMPAnnMem (\m -> spliceAnnMem m >>= return . (:[]))) mems
if isContentDependent
then return $! SGContentDependent $! \t -> withGUID n vspliceEnv (Just t) $! onGenerated nmems
else withGUID n vspliceEnv Nothing $! onGenerated nmems
where
onGenerated _ (Left i) = SGNamed $! concat [n, "_", show i]
onGenerated nmems (Right i) = SGDecl $! DC.dataAnnotation (concat [n, "_", show i]) typeParams $! concat nmems
isContentDependent = any (`isInfixOf` n) ["VMapIndex", "MapE", "SortedMapE", "MapCE"]
exprSplicer :: K3 Expression -> K3Generator
exprSplicer e = Splicer $! \spliceEnv -> SRExpr $! generateInSpliceEnv spliceEnv $! spliceExpression e
typeSplicer :: K3 Type -> K3Generator
typeSplicer t = Splicer $! \spliceEnv -> SRType $! generateInSpliceEnv spliceEnv $! spliceType t
{- Splice evaluation -}
spliceDeclaration :: K3 Declaration -> DeclGenerator
spliceDeclaration = mapProgram spliceDecl spliceAnnMem spliceExpression (Just spliceType)
spliceDecl :: K3 Declaration -> DeclGenerator
spliceDecl d = case d of
(tag -> DGlobal n t eOpt) -> do
((nn, nt, neOpt), nanns) <- spliceDeclParts n t eOpt >>= newAnns d
return $! Node (DGlobal nn nt neOpt :@: nanns) $! children d
(tag -> DTrigger n t e) -> do
((nn, nt, Just ne), nanns) <- spliceDeclParts n t (Just e) >>= newAnns d
return $! Node (DTrigger nn nt ne :@: nanns) $! children d
(tag -> DDataAnnotation n tvars mems) ->
mapM spliceAnnMem mems >>= newAnns d >>= \(nmems, nanns) ->
return $! Node (DDataAnnotation n tvars nmems :@: nanns) $! children d
(tag -> DTypeDef n t) ->
spliceType t >>= newAnns d >>= \(nt, nanns) -> return $! Node (DTypeDef n nt :@: nanns) $! children d
(Node (tg :@: _) ch) ->
newAnns d () >>= \(_,nanns) -> return $! Node (tg :@: nanns) ch
where
newAnns d' v = mapM spliceDAnnotation (annotations d') >>= return . (v,)
spliceMPAnnMem :: MPAnnMemDecl -> GeneratorM [AnnMemDecl]
spliceMPAnnMem (MPAnnMemDecl i c mems) = spliceWithValue c
where
spliceWithValue = \case
SVar v -> generatorWithSCtxt $! \sctxt -> do
sv <- expectEmbeddingSplicer v
case sv of
SLabel v' -> maybe (lookupErr v') spliceWithValue $! lookupSCtxt v' sctxt
_ -> spliceWithValue sv
SList svs -> mapM (\sv -> generateInExtendedSpliceEnv i sv $! mapM spliceAnnMem mems) svs >>= return . concat
v -> throwG $ boxToString $ ["Invalid splice value in member generator "] %+ prettyLines v
lookupErr v = throwG $ "Invalid loop target in member generator: " ++ show v
spliceAnnMem :: AnnMemDecl -> AnnMemGenerator
spliceAnnMem = \case
Lifted p n t eOpt anns -> spliceDeclParts n t eOpt >>= newAnns anns >>= \((sn, st, seOpt), nanns) -> return $! Lifted p sn st seOpt nanns
Attribute p n t eOpt anns -> spliceDeclParts n t eOpt >>= newAnns anns >>= \((sn, st, seOpt), nanns) -> return $! Attribute p sn st seOpt nanns
MAnnotation p n anns -> newAnns anns () >>= \(_,nanns) -> return $! MAnnotation p n nanns
where newAnns anns v = mapM spliceDAnnotation anns >>= return . (v,)
spliceDeclParts :: Identifier -> K3 Type -> Maybe (K3 Expression) -> GeneratorM (Identifier, K3 Type, Maybe (K3 Expression))
spliceDeclParts n t eOpt = do
sn <- spliceIdentifier n
st <- spliceType t
seOpt <- maybe (return Nothing) (\e -> spliceExpression e >>= return . Just) eOpt
return (sn, st, seOpt)
spliceExpression :: K3 Expression -> ExprGenerator
spliceExpression = mapTree doSplice
where
doSplice [] e@(tag -> EVariable i) = expectExprSplicer i >>= newAnns e >>= \(ne, nanns) -> return $! foldl (@+) ne nanns
doSplice ch e@(tag -> ERecord ids) = mapM expectIdSplicer ids >>= newAnns e >>= \(nids, nanns) -> return $! Node (ERecord nids :@: nanns) ch
doSplice ch e@(tag -> EProject i) = expectIdSplicer i >>= newAnns e >>= \(nid, nanns) -> return $! Node (EProject nid :@: nanns) ch
doSplice ch e@(tag -> EAssign i) = expectIdSplicer i >>= newAnns e >>= \(nid, nanns) -> return $! Node (EAssign nid :@: nanns) ch
doSplice ch e@(tag -> ELambda i) = expectIdSplicer i >>= newAnns e >>= \(nid, nanns) -> return $! Node (ELambda nid :@: nanns) ch
doSplice ch e@(tag -> ELetIn i) = expectIdSplicer i >>= newAnns e >>= \(nid, nanns) -> return $! Node (ELetIn nid :@: nanns) ch
doSplice ch e@(tag -> ECaseOf i) = expectIdSplicer i >>= newAnns e >>= \(nid, nanns) -> return $! Node (ECaseOf nid :@: nanns) ch
doSplice ch e@(tag -> EBindAs b) = expectBindSplicer b >>= newAnns e >>= \(nb, nanns) -> return $! Node (EBindAs nb :@: nanns) ch
doSplice ch e@(tag -> EConstant (CEmpty ct)) = spliceType ct >>= newAnns e >>= \(nct, nanns) -> return $! Node (EConstant (CEmpty nct) :@: nanns) ch
doSplice ch e@(Node (tg :@: _) _) = newAnns e () >>= \(_,nanns) -> return $! Node (tg :@: nanns) ch
newAnns e v = mapM spliceEAnnotation (annotations e) >>= return . (v,)
spliceType :: K3 Type -> TypeGenerator
spliceType = mapTree doSplice
where
doSplice [] t@(tag -> TDeclaredVar i) = expectTypeSplicer i >>= \nt -> return $! foldl (@+) nt $! annotations t
doSplice ch t@(tag -> TRecord ids) = mapM spliceIdentifier ids >>= \nids -> return $! Node (TRecord nids :@: annotations t) ch
doSplice ch (Node tg _) = return $! Node tg ch
spliceLiteral :: K3 Literal -> LiteralGenerator
spliceLiteral = mapTree doSplice
where doSplice [] l@(tag -> LString s) = expectLiteralSplicer s >>= \ns -> return $! foldl (@+) ns $! annotations l
doSplice ch l@(tag -> LRecord ids) = mapM expectIdSplicer ids >>= \nids -> return $! Node (LRecord nids :@: annotations l) ch
doSplice ch l@(tag -> LEmpty ct) = spliceType ct >>= \nct -> return $! Node (LEmpty nct :@: annotations l) ch
doSplice ch l@(tag -> LCollection ct) = spliceType ct >>= \nct -> return $! Node (LCollection nct :@: annotations l) ch
doSplice ch (Node tg _) = return $! Node tg ch
spliceIdentifier :: Identifier -> GeneratorM Identifier
spliceIdentifier i = expectIdSplicer i
spliceBinder :: Binder -> GeneratorM Binder
spliceBinder (BIndirection i) = spliceIdentifier i >>= return . BIndirection
spliceBinder (BTuple i) = mapM spliceIdentifier i >>= return . BTuple
spliceBinder (BRecord ijs) = mapM (\(i,j) -> (,) <$> spliceIdentifier i <*> spliceIdentifier j) ijs >>= return . BRecord
spliceBinder (BSplice i) = spliceIdentifier i >>= return . BSplice
spliceDAnnotation :: Annotation Declaration -> DeclAnnGenerator
spliceDAnnotation (DProperty (Left (n, Just l))) = spliceLiteral l >>= return . DProperty . Left . (n,) . Just
spliceDAnnotation (DProperty (Right (n, Just l))) = spliceLiteral l >>= return . DProperty . Right . (n,) . Just
spliceDAnnotation da = return da
spliceEAnnotation :: Annotation Expression -> ExprAnnGenerator
spliceEAnnotation (EProperty (Left (n, Just l))) = spliceLiteral l >>= return . EProperty . Left . (n,) . Just
spliceEAnnotation (EProperty (Right (n, Just l))) = spliceLiteral l >>= return . EProperty . Right . (n,) . Just
spliceEAnnotation ea = return ea
expectIdSplicer :: Identifier -> GeneratorM Identifier
expectIdSplicer i = generatorWithSCtxt $! \sctxt -> liftParser i idFromParts >>= evalIdPartsSplice sctxt
expectTypeSplicer :: Identifier -> TypeGenerator
expectTypeSplicer i = generatorWithSCtxt $! \sctxt -> liftParser i typeEmbedding >>= evalTypeSplice sctxt
expectExprSplicer :: Identifier -> ExprGenerator
expectExprSplicer i = generatorWithSCtxt $! \sctxt -> liftParser i exprEmbedding >>= evalExprSplice sctxt
expectLiteralSplicer :: String -> LiteralGenerator
expectLiteralSplicer i = generatorWithSCtxt $! \sctxt -> liftParser i literalEmbedding >>= evalLiteralSplice sctxt
expectEmbeddingSplicer :: Identifier -> GeneratorM SpliceValue
expectEmbeddingSplicer i = generatorWithSCtxt $! \sctxt -> do
e <- liftParser i idFromParts
either (evalSumEmbedding i sctxt) (return . SLabel) e
expectBindSplicer :: Binder -> GeneratorM Binder
expectBindSplicer (BSplice i) = generatorWithSCtxt $! \sctxt -> liftParser i bindEmbedding >>= evalBindSplice sctxt
expectBindSplicer b = return b
evalIdPartsSplice :: SpliceContext -> Either [MPEmbedding] Identifier -> GeneratorM Identifier
evalIdPartsSplice sctxt (Left ml) = evalSumEmbedding "identifier" sctxt ml >>= \case
SLabel i -> return i
_ -> spliceFail $ "Invalid splice identifier embedding " ++ show ml
evalIdPartsSplice _ (Right i) = return i
evalTypeSplice :: SpliceContext -> Either [MPEmbedding] (K3 Type) -> TypeGenerator
evalTypeSplice sctxt (Left ml) = evalSumEmbedding "type" sctxt ml >>= \case
SType t -> return t
SLabel i -> return $! TC.declaredVar i
_ -> spliceFail $ "Invalid splice type value " ++ show ml
evalTypeSplice _ (Right t) = return t
evalExprSplice :: SpliceContext -> Either [MPEmbedding] (K3 Expression) -> ExprGenerator
evalExprSplice sctxt (Left ml) = evalSumEmbedding "expr" sctxt ml >>= \case
SExpr e -> return e
SLiteral l -> either (const $ throwG "Invalid literal splice") return $! literalExpression l
SLabel i -> return $! EC.variable i
sv -> spliceFail $ boxToString $ ["Invalid splice expression value " ++ show ml] %$ prettyLines sv
evalExprSplice _ (Right e) = return e
evalLiteralSplice :: SpliceContext -> Either [MPEmbedding] (K3 Literal) -> LiteralGenerator
evalLiteralSplice sctxt (Left ml) = evalSumEmbedding "literal" sctxt ml >>= \case
SLiteral l -> return l
_ -> spliceFail $ "Invalid splice literal value " ++ show ml
evalLiteralSplice _ (Right l) = return l
evalBindSplice :: SpliceContext -> Either [MPEmbedding] Binder -> GeneratorM Binder
evalBindSplice sctxt (Left ml) = evalSumEmbedding "binder" sctxt ml >>= \case
SBinder b -> return b
_ -> spliceFail $ "Invalid splice binder value " ++ show ml
evalBindSplice _ (Right b) = return b
evalSumEmbedding :: String -> SpliceContext -> [MPEmbedding] -> GeneratorM SpliceValue
evalSumEmbedding tg sctxt l = maybe sumError return =<< foldM concatSpliceVal Nothing l
where
sumError :: GeneratorM a
sumError = spliceFail $ "Inconsistent " ++ tg ++ " splice parts " ++ show l ++ " " ++ show sctxt
concatSpliceVal Nothing se = evalEmbedding sctxt se >>= return . Just
concatSpliceVal (Just (SLabel i)) se = evalEmbedding sctxt se >>= doConcat (SLabel i)
concatSpliceVal (Just _) _ = sumError
doConcat (SLabel i) (SLabel j) = return . Just . SLabel $! i ++ j
doConcat _ _ = sumError
evalEmbedding :: SpliceContext -> MPEmbedding -> GeneratorM SpliceValue
evalEmbedding _ (MPENull i) = return $! SLabel i
evalEmbedding sctxt em@(MPEPath var path) = maybe evalErr (flip matchPath path) $! lookupSCtxt var sctxt
where matchPath v [] = return v
matchPath v (h:t) = maybe evalErr (flip matchPath t) $! spliceRecordField v h
evalErr = spliceIdPathFail var path sctxt $ unwords ["lookup failed", "(", show em, ")"]
evalEmbedding sctxt (MPEHProg expr) = evalHaskellProg sctxt expr
spliceIdPathFail :: Identifier -> [Identifier] -> SpliceContext -> String -> GeneratorM a
spliceIdPathFail i path sctxt msg = throwG $ boxToString $
[unwords ["Failed to splice", (intercalate "." $ [i]++path), ":", msg]]
%$ ["in context ["] %$ prettyLines sctxt %$ ["]"]
spliceFail :: String -> GeneratorM a
spliceFail msg = throwG $ unwords ["Splice failed:", msg]
{- Pattern matching -}
isPatternVariable :: Identifier -> Bool
isPatternVariable i = isPrefixOf "?" i
patternVariable :: Identifier -> Maybe Identifier
patternVariable i = stripPrefix "?" i
matchTree :: (Monad m) => (b -> K3 a -> K3 a -> m (Bool, b)) -> K3 a -> K3 a -> b -> m b
matchTree matchF t1 t2 z = matchF z t1 t2 >>= \(stop, acc) ->
if stop then return acc
else let (ch1, ch2) = (children t1, children t2) in
if length ch1 == length ch2
then foldM rcr acc $ zip ch1 ch2
else fail "Mismatched children during matchTree"
where rcr z' (t1',t2') = matchTree matchF t1' t2' z'
-- | Matches the first expression to the second, returning a splice environment
-- of pattern variables present in the second expression.
matchExpr :: K3 Expression -> K3 Expression -> Maybe SpliceEnv
matchExpr e patE = matchTree matchTag e patE emptySpliceEnv
where
matchTag sEnv e1 e2@(tag -> EVariable i)
| isPatternVariable i =
let nrEnv = spliceRecord $! (maybe [] typeRepr $! e1 @~ isEType) ++ [(spliceVESym, SExpr $! stripEUIDSpan e1)]
nsEnv = maybe sEnv (\n -> if null n then sEnv else addSpliceE n nrEnv sEnv) $! patternVariable i
in do
localLog $ debugMatchPVar i
matchTypesAndAnnotations (annotations e1) (annotations e2) nsEnv >>= return . (True,)
matchTag sEnv e1@(tag -> EBindAs b) e2@(tag -> EBindAs (BSplice i))
| isPatternVariable i =
let nrEnv = spliceRecord [(spliceVBSym, SBinder b)]
nsEnv = maybe sEnv (\n -> if null n then sEnv else addSpliceE n nrEnv sEnv) $! patternVariable i
in do
localLog $ debugMatchPVar i
matchTypesAndAnnotations (annotations e1) (annotations e2) nsEnv >>= return . (False,)
matchTag sEnv e1@(tag -> EConstant (CEmpty t1)) e2@(tag -> EConstant (CEmpty t2)) =
let (anns1, anns2) = (annotations e1, annotations e2) in
if matchAnnotationsE (\x -> ignoreUIDSpan x && ignoreTypes x) anns1 anns2
then matchType t1 t2 >>= return . (True,) . mergeSpliceEnv sEnv
else debugMismatchAnns anns1 anns2 Nothing
matchTag sEnv e1@(tag -> x) e2@(tag -> y)
| hasIdentifiers y = matchITAPair e1 e2 sEnv
| x == y = matchTAPair e1 e2 sEnv
| otherwise = debugMismatch e1 e2 Nothing
matchITAPair e1 e2 sEnv = matchIdentifiers (extractIdentifiers $! tag e1) (extractIdentifiers $! tag e2) sEnv >>= matchTAPair e1 e2
matchTAPair e1 e2 sEnv = matchTypesAndAnnotations (annotations e1) (annotations e2) sEnv >>= return . (False,)
matchIdentifiers :: [Identifier] -> [Identifier] -> SpliceEnv -> Maybe SpliceEnv
matchIdentifiers ids patIds sEnv =
if length ids /= length patIds then Nothing
else foldM bindIdentifier sEnv $! zip ids patIds
matchTypesAndAnnotations :: [Annotation Expression] -> [Annotation Expression] -> SpliceEnv
-> Maybe SpliceEnv
matchTypesAndAnnotations anns1 anns2 sEnv = case (find isEType anns1, find isEPType anns2) of
(Just (EType ty), Just (EPType pty)) ->
if matchAnnotationsE (\x -> ignoreUIDSpan x && ignoreTypes x) anns1 anns2
then matchType ty pty >>= return . mergeSpliceEnv sEnv
else debugMismatchAnns anns1 anns2 Nothing
(_, _) -> if matchAnnotationsE (\x -> ignoreUIDSpan x && ignoreTypes x) anns1 anns2
then Just sEnv else debugMismatchAnns anns1 anns2 Nothing
bindIdentifier :: SpliceEnv -> (Identifier, Identifier) -> Maybe SpliceEnv
bindIdentifier sEnv (a, b@(isPatternVariable -> True)) =
let nrEnv = spliceRecord [(spliceVIdSym, SLabel a)]
in Just $! maybe sEnv (\n -> if null n then sEnv else addSpliceE n nrEnv sEnv) $! patternVariable b
bindIdentifier sEnv (a,b) = if a == b then Just sEnv else Nothing
hasIdentifiers :: Expression -> Bool
hasIdentifiers (ELambda _) = True
hasIdentifiers (ERecord _) = True
hasIdentifiers (EProject _) = True
hasIdentifiers (ELetIn _) = True
hasIdentifiers (EAssign _) = True
hasIdentifiers (ECaseOf _) = True
hasIdentifiers (EBindAs _) = True
hasIdentifiers _ = False
extractIdentifiers :: Expression -> [Identifier]
extractIdentifiers (ELambda i) = [i]
extractIdentifiers (ERecord i) = i
extractIdentifiers (EProject i) = [i]
extractIdentifiers (ELetIn i) = [i]
extractIdentifiers (EAssign i) = [i]
extractIdentifiers (ECaseOf i) = [i]
extractIdentifiers (EBindAs b) = bindingVariables b
extractIdentifiers _ = []
typeRepr (EType ty) = [(spliceVTSym, SType $! stripTUIDSpan ty)]
typeRepr _ = []
ignoreUIDSpan a = not (isEUID a || isESpan a || isESyntax a)
ignoreTypes a = not $! isEAnyType a
debugMismatch p1 p2 r =
localLog (boxToString $ ["No match on "] %$ prettyLines p1 %$ ["and"] %$ prettyLines p2) >> r
debugMismatchAnns a1 a2 r =
localLog (boxToString $ ["No match on "] %$ [show a1] %$ ["and"] %$ [show a2]) >> r
debugMatchPVar i =
unwords ["isPatternVariable", show i, ":", show $ isPatternVariable i]
-- | Match two types, returning any pattern variables bound in the second argument.
matchType :: K3 Type -> K3 Type -> Maybe SpliceEnv
matchType t patT = matchTree matchTag t patT emptySpliceEnv
where matchTag sEnv t1 t2@(tag -> TDeclaredVar i)
| isPatternVariable i =
let extend n = if null n then Nothing
else Just . (True,) $! addSpliceE n (spliceRecord [(spliceVTSym, SType $! stripTUIDSpan t1)]) sEnv
in do
localLog $ debugMatchPVar i
if matchTypeAnnotations t1 t2 then maybe Nothing extend $! patternVariable i else debugMismatch t1 t2 Nothing
matchTag sEnv t1@(tag -> x) t2@(tag -> y)
| x == y && matchTypeMetadata t1 t2 = Just (False, sEnv)
| otherwise = debugMismatch t1 t2 Nothing
matchTypeMetadata t1 t2 = matchTypeAnnotations t1 t2 && matchMutability t1 t2
matchTypeAnnotations t1 t2 = matchAnnotationsT isTAnnotation (annotations t1) (annotations t2)
matchMutability t1 t2 = (t1 @~ isTQualified) == (t2 @~ isTQualified) || isNothing (t2 @~ isTQualified)
debugMismatch p1 p2 r =
localLog (boxToString $ ["No match on "] %$ prettyLines p1 %$ ["and"] %$ prettyLines p2) >> r
debugMatchPVar i =
unwords ["isPatternVariable", show i, ":", show $ isPatternVariable i]
-- | Match two annotation sets. For now this does not introduce any bindings,
-- rather it ensures that the second set of annotations are a subset of the first.
-- Thus matching acts as a constraint on the presence of annotation and properties
-- in any rewrite rules fired.
matchAnnotationsE :: (Annotation Expression -> Bool) -> [Annotation Expression] -> [Annotation Expression] -> Bool
matchAnnotationsE a2FilterF a1 a2 = all match $! filter a2FilterF a2
where match (EProperty (ePropertyName -> n)) = any (matchPropertyByName n) a1
match a = a `elem` a1
matchPropertyByName n (EProperty (ePropertyName -> n2)) = n == n2
matchPropertyByName _ _ = False
matchAnnotationsT :: (Annotation Type -> Bool) -> [Annotation Type] -> [Annotation Type] -> Bool
matchAnnotationsT a2FilterF a1 a2 = all match $! filter a2FilterF a2
where match (TProperty (tPropertyName -> n)) = any (matchPropertyByName n) a1
match a = a `elem` a1
matchPropertyByName n (TProperty (tPropertyName -> n2)) = n == n2
matchPropertyByName _ _ = False
exprPatternMatcher :: [TypedSpliceVar] -> [PatternRewriteRule] -> [Either MPRewriteDecl (K3 Declaration)] -> K3Generator
exprPatternMatcher spliceParams rules extensions = ExprRewriter $! \expr spliceEnv ->
let vspliceEnv = validateSplice spliceParams spliceEnv
matchResult = foldl (tryMatch expr) Nothing rules
in logValue (debugMatchResult matchResult)
$! maybe (inputSR expr) (exprDeclSR vspliceEnv) matchResult
where
logValue msg v = runIdentity (localLog msg >> return v)
inputSR expr = SRExpr $! return expr
exprDeclSR spliceEnv (sEnv, rewriteE, ruleExts) =
let msenv = mergeSpliceEnv spliceEnv sEnv in
SRRewrite . (, msenv) $! generateInSpliceEnv msenv $
(\ a b -> (a, concat b)) <$> spliceExpression rewriteE <*> mapM spliceNonAnnotationTree (extensions ++ ruleExts)
tryMatch _ acc@(Just _) _ = acc
tryMatch expr Nothing (pat, rewrite, ruleExts) = do
nsEnv <- (localLogAction (tryMatchLogger expr pat) $! matchExpr expr pat)
return (nsEnv, rewrite, ruleExts)
tryMatchLogger expr pat = maybe (Just $! debugMatchStep expr pat) (Just . debugMatchStepResult expr pat)
debugMatchStep expr pat = boxToString $
["Trying match step "] %+ prettyLines pat %+ [" on "] %+ prettyLines expr
debugMatchStepResult expr pat r = boxToString $
["Match step result "] %+ prettyLines pat %+ [" on "] %+ prettyLines expr
%$ ["Result "] %$ prettyLines r
debugMatchResult opt = unwords ["Match result", show opt]
spliceNonAnnotationTree (Left (MPRewriteDecl i c decls)) = spliceWithValue c
where
spliceWithValue c' = case c' of
SVar v -> generatorWithSCtxt $! \sctxt -> do
sv <- expectEmbeddingSplicer v
case sv of
SLabel v' -> maybe (lookupErr v') spliceWithValue $! lookupSCtxt v' sctxt
_ -> spliceWithValue sv
SList svs -> do
dll <- forM svs $! \sv -> generateInExtendedSpliceEnv i sv $
forM decls $! \d -> do
nd <- mapTree (spliceNonAnnotationDecl) d
generatorWithSCtxt $! \sctxt -> bindDAnnVars sctxt nd
return $! concat dll
v -> throwG $ boxToString $ ["Invalid splice value in member generator "] %+ prettyLines v
lookupErr v = throwG $ "Invalid loop target in rewrite declaration generator: " ++ show v
spliceNonAnnotationTree (Right d) = mapTree spliceNonAnnotationDecl d >>= return . (:[])
spliceNonAnnotationDecl ch d@(tag -> DGlobal n t eOpt) =
spliceDeclParts n t eOpt >>= \(nn, nt, neOpt) ->
return (overrideChildren ch $! foldl (@+) (DC.global nn nt neOpt) $! annotations d)
spliceNonAnnotationDecl ch d@(tag -> DTrigger n t e) =
spliceDeclParts n t (Just e) >>= \(nn, nt, neOpt) ->
case neOpt of
Nothing -> throwG "Invalid trigger body resulting from pattern splicing"
Just ne -> return (overrideChildren ch $! foldl (@+) (DC.trigger nn nt ne) $! annotations d)
spliceNonAnnotationDecl _ _ = throwG "Invalid declaration in control annotation extensions"
overrideChildren ch (Node n _) = Node n ch
| DaMSL/K3 | src/Language/K3/Metaprogram/Evaluation.hs | apache-2.0 | 38,267 | 0 | 23 | 8,871 | 13,120 | 6,534 | 6,586 | 585 | 29 |
module Problem013 where
main =
print $ 40 `choose` 20
n `choose` 0 = 1
0 `choose` k = 0
n `choose` k = (n - 1) `choose` (k - 1) * n `div` k
| vasily-kartashov/playground | euler/problem-015.hs | apache-2.0 | 145 | 0 | 9 | 40 | 88 | 51 | 37 | 6 | 1 |
{-# OPTIONS -fglasgow-exts #-}
-----------------------------------------------------------------------------
{-| Module : QStyleOptionTitleBar.hs
Copyright : (c) David Harley 2010
Project : qtHaskell
Version : 1.1.4
Modified : 2010-09-02 17:02:27
Warning : this file is machine generated - do not modify.
--}
-----------------------------------------------------------------------------
module Qtc.Gui.QStyleOptionTitleBar (
QqStyleOptionTitleBar(..)
,QqStyleOptionTitleBar_nf(..)
,setTitleBarFlags
,setTitleBarState
,titleBarFlags
,titleBarState
,qStyleOptionTitleBar_delete
)
where
import Foreign.C.Types
import Qth.ClassTypes.Core
import Qtc.Enums.Base
import Qtc.Enums.Core.Qt
import Qtc.Classes.Base
import Qtc.Classes.Qccs
import Qtc.Classes.Core
import Qtc.ClassTypes.Core
import Qth.ClassTypes.Core
import Qtc.Classes.Gui
import Qtc.ClassTypes.Gui
class QqStyleOptionTitleBar x1 where
qStyleOptionTitleBar :: x1 -> IO (QStyleOptionTitleBar ())
instance QqStyleOptionTitleBar (()) where
qStyleOptionTitleBar ()
= withQStyleOptionTitleBarResult $
qtc_QStyleOptionTitleBar
foreign import ccall "qtc_QStyleOptionTitleBar" qtc_QStyleOptionTitleBar :: IO (Ptr (TQStyleOptionTitleBar ()))
instance QqStyleOptionTitleBar ((QStyleOptionTitleBar t1)) where
qStyleOptionTitleBar (x1)
= withQStyleOptionTitleBarResult $
withObjectPtr x1 $ \cobj_x1 ->
qtc_QStyleOptionTitleBar1 cobj_x1
foreign import ccall "qtc_QStyleOptionTitleBar1" qtc_QStyleOptionTitleBar1 :: Ptr (TQStyleOptionTitleBar t1) -> IO (Ptr (TQStyleOptionTitleBar ()))
class QqStyleOptionTitleBar_nf x1 where
qStyleOptionTitleBar_nf :: x1 -> IO (QStyleOptionTitleBar ())
instance QqStyleOptionTitleBar_nf (()) where
qStyleOptionTitleBar_nf ()
= withObjectRefResult $
qtc_QStyleOptionTitleBar
instance QqStyleOptionTitleBar_nf ((QStyleOptionTitleBar t1)) where
qStyleOptionTitleBar_nf (x1)
= withObjectRefResult $
withObjectPtr x1 $ \cobj_x1 ->
qtc_QStyleOptionTitleBar1 cobj_x1
instance Qicon (QStyleOptionTitleBar a) (()) (IO (QIcon ())) where
icon x0 ()
= withQIconResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QStyleOptionTitleBar_icon cobj_x0
foreign import ccall "qtc_QStyleOptionTitleBar_icon" qtc_QStyleOptionTitleBar_icon :: Ptr (TQStyleOptionTitleBar a) -> IO (Ptr (TQIcon ()))
instance QsetIcon (QStyleOptionTitleBar a) ((QIcon t1)) where
setIcon x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QStyleOptionTitleBar_setIcon cobj_x0 cobj_x1
foreign import ccall "qtc_QStyleOptionTitleBar_setIcon" qtc_QStyleOptionTitleBar_setIcon :: Ptr (TQStyleOptionTitleBar a) -> Ptr (TQIcon t1) -> IO ()
instance QsetText (QStyleOptionTitleBar a) ((String)) where
setText x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
qtc_QStyleOptionTitleBar_setText cobj_x0 cstr_x1
foreign import ccall "qtc_QStyleOptionTitleBar_setText" qtc_QStyleOptionTitleBar_setText :: Ptr (TQStyleOptionTitleBar a) -> CWString -> IO ()
setTitleBarFlags :: QStyleOptionTitleBar a -> ((WindowFlags)) -> IO ()
setTitleBarFlags x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QStyleOptionTitleBar_setTitleBarFlags cobj_x0 (toCLong $ qFlags_toInt x1)
foreign import ccall "qtc_QStyleOptionTitleBar_setTitleBarFlags" qtc_QStyleOptionTitleBar_setTitleBarFlags :: Ptr (TQStyleOptionTitleBar a) -> CLong -> IO ()
setTitleBarState :: QStyleOptionTitleBar a -> ((Int)) -> IO ()
setTitleBarState x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QStyleOptionTitleBar_setTitleBarState cobj_x0 (toCInt x1)
foreign import ccall "qtc_QStyleOptionTitleBar_setTitleBarState" qtc_QStyleOptionTitleBar_setTitleBarState :: Ptr (TQStyleOptionTitleBar a) -> CInt -> IO ()
instance Qtext (QStyleOptionTitleBar a) (()) (IO (String)) where
text x0 ()
= withStringResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QStyleOptionTitleBar_text cobj_x0
foreign import ccall "qtc_QStyleOptionTitleBar_text" qtc_QStyleOptionTitleBar_text :: Ptr (TQStyleOptionTitleBar a) -> IO (Ptr (TQString ()))
titleBarFlags :: QStyleOptionTitleBar a -> (()) -> IO (WindowFlags)
titleBarFlags x0 ()
= withQFlagsResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QStyleOptionTitleBar_titleBarFlags cobj_x0
foreign import ccall "qtc_QStyleOptionTitleBar_titleBarFlags" qtc_QStyleOptionTitleBar_titleBarFlags :: Ptr (TQStyleOptionTitleBar a) -> IO CLong
titleBarState :: QStyleOptionTitleBar a -> (()) -> IO (Int)
titleBarState x0 ()
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QStyleOptionTitleBar_titleBarState cobj_x0
foreign import ccall "qtc_QStyleOptionTitleBar_titleBarState" qtc_QStyleOptionTitleBar_titleBarState :: Ptr (TQStyleOptionTitleBar a) -> IO CInt
qStyleOptionTitleBar_delete :: QStyleOptionTitleBar a -> IO ()
qStyleOptionTitleBar_delete x0
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QStyleOptionTitleBar_delete cobj_x0
foreign import ccall "qtc_QStyleOptionTitleBar_delete" qtc_QStyleOptionTitleBar_delete :: Ptr (TQStyleOptionTitleBar a) -> IO ()
| keera-studios/hsQt | Qtc/Gui/QStyleOptionTitleBar.hs | bsd-2-clause | 5,085 | 0 | 12 | 696 | 1,249 | 648 | 601 | -1 | -1 |
{-# LANGUAGE FlexibleInstances, RankNTypes #-}
{- |
Module : SystemFI
Description : Abstract syntax and pretty printer for SystemFI.
Copyright : (c) 2014—2015 The F2J Project Developers (given in AUTHORS.txt)
License : BSD3
Maintainer : Zhiyuan Shi <zhiyuan.shi@gmail.com>, Haoyuan Zhang <zhanghaoyuan00@gmail.com>
Stability : experimental
Portability : portable
-}
module SystemFI
( Type(..)
, Expr(..)
, FExp(..)
, Constructor(..)
, Alt(..)
, DataBind(..)
, Definition(..)
--, TypeContext
--, ValueContext
--, Index
--, alphaEq
, mapTVar
--, mapVar
, fsubstTT
, fsubstTE
, fsubstEE
, joinType
, prettyType
, prettyExpr
) where
import JavaUtils
import PrettyUtils
import qualified Src
import Control.Arrow (second)
import Data.List (intersperse)
import qualified Data.Map as Map
import qualified Data.Set as Set
import qualified Language.Java.Pretty (prettyPrint)
import Prelude hiding ((<$>))
import Text.PrettyPrint.ANSI.Leijen
data Type t
= TVar Src.ReadId t -- a
| JClass ClassName -- C
| Fun (Type t) (Type t) -- t1 -> t2
| Forall Src.ReadId (t -> Type t) -- forall a. t
| Product [Type t] -- (t1, ..., tn)
| Unit
-- Extensions
| And (Type t) (Type t) -- t1 & t2
| RecordType (Src.Label, Type t)
| Datatype Src.ReadId [Type t] [Src.ReadId]
-- Warning: If you ever add a case to this, you *must* also define the
-- binary relations on your new case. Namely, add cases for your data
-- constructor in `alphaEq' (below) and `coerce' (in Simplify.hs). Consult
-- George if you're not sure.
data Definition t e = Def Src.Name (Src.Type, Type t) (Expr t e) (e -> Definition t e)
| DefRec [Src.Name] [(Src.Type, Type t)] ([e] -> [Expr t e]) ([e] -> Definition t e)
| Null
data Expr t e
= Var Src.ReadId e
| Lit Src.Lit
-- Binders we have: λ, fix, letrec, and Λ
| Lam Src.ReadId (Type t) (e -> Expr t e)
| Fix Src.ReadId Src.ReadId
(e -> e -> Expr t e)
(Type t) -- t1
(Type t) -- t
-- fix x (x1 : t1) : t. e Syntax in the tal-toplas paper
-- fix (x : t1 -> t). \x1. e Alternative syntax, which is arguably clear
-- <name>: Fix funcName paraName func paraType returnType
| Let Src.ReadId (Expr t e) (e -> Expr t e)
| LetRec [Src.ReadId] -- Names
[Type t] -- Signatures
([e] -> [Expr t e]) -- Bindings
([e] -> Expr t e) -- Body
| BLam Src.ReadId (t -> Expr t e)
| App (Expr t e) (Expr t e)
| TApp (Expr t e) (Type t)
| If (Expr t e) (Expr t e) (Expr t e)
| PrimOp (Expr t e) Src.Operator (Expr t e)
-- SystemF extension from:
-- https://www.cs.princeton.edu/~dpw/papers/tal-toplas.pdf
-- (no int restriction)
| Tuple [Expr t e] -- Tuple introduction
| Proj Int (Expr t e) -- Tuple elimination
-- Module
| Module (Maybe Src.PackageName) (Definition t e)
-- Java
| JNew ClassName [Expr t e]
| JMethod (Src.JReceiver (Expr t e)) MethodName [Expr t e] ClassName
| JField (Src.JReceiver (Expr t e)) FieldName (Type t)
| Seq [Expr t e]
| Merge (Expr t e) (Expr t e) -- e1 ,, e2
| RecordCon (Src.Label, Expr t e)
| RecordProj (Expr t e) Src.Label
| RecordUpdate (Expr t e) (Src.Label, Expr t e)
| Data Src.RecFlag [DataBind t] (Expr t e)
| ConstrOut (Constructor t) [Expr t e]
| Case (Expr t e) [Alt t e]
| Error (Type t) (Expr t e)
newtype FExp = HideF { revealF :: forall t e. Expr t e }
data Alt t e = ConstrAlt (Constructor t) (Expr t e)
| Default (Expr t e)
data DataBind t = DataBind Src.ReadId [Src.ReadId] ([t] -> [Constructor t])
data Constructor t = Constructor {constrName :: Src.ReadId, constrParams :: [Type t]}
-- newtype Typ = HideTyp { revealTyp :: forall t. Type t } -- type of closed types
-- newtype Exp = HideExp { revealExp :: forall t e. Expr t e }
type TypeContext t = Set.Set t
type ValueContext t e = Map.Map e (Type t)
type Index = Int
alphaEq :: Int -> Type Index -> Type Index -> Bool
alphaEq _ (TVar _ a) (TVar _ b) = a == b
alphaEq _ (JClass c) (JClass d) = c == d
alphaEq i (Fun s1 s2) (Fun t1 t2) = alphaEq i s1 t1 && alphaEq i s2 t2
alphaEq i (Forall _ f) (Forall _ g) = alphaEq (succ i) (f i) (g i)
alphaEq i (Product ss) (Product ts) = length ss == length ts && uncurry (alphaEq i) `all` zip ss ts
alphaEq _ Unit Unit = True
alphaEq i (And s1 s2) (And t1 t2) = alphaEq i s1 t1 && alphaEq i s2 t2
alphaEq _ _ _ = False
mapTVar :: (Src.ReadId -> t -> Type t) -> Type t -> Type t
mapTVar g (TVar n a) = g n a
mapTVar _ (JClass c) = JClass c
mapTVar g (Fun t1 t2) = Fun (mapTVar g t1) (mapTVar g t2)
mapTVar g (Forall n f) = Forall n (mapTVar g . f)
mapTVar g (Product ts) = Product (map (mapTVar g) ts)
mapTVar _ Unit = Unit
mapTVar g (And t1 t2) = And (mapTVar g t1) (mapTVar g t2)
mapTVar g (RecordType (l,t)) = RecordType (l, mapTVar g t)
mapTVar g (Datatype n ts ns) = Datatype n (map (mapTVar g) ts) ns
mapVar :: (Src.ReadId -> e -> Expr t e) -> (Type t -> Type t) -> Expr t e -> Expr t e
mapVar g _ (Var n a) = g n a
mapVar _ _ (Lit n) = Lit n
mapVar g h (Lam n t f) = Lam n (h t) (mapVar g h . f)
mapVar g h (BLam n f) = BLam n (mapVar g h . f)
mapVar g h (Fix n1 n2 f t1 t) = Fix n1 n2 (\x x1 -> mapVar g h (f x x1)) (h t1) (h t)
mapVar g h (Let n b e) = Let n (mapVar g h b) (mapVar g h . e)
mapVar g h (LetRec ns ts bs e) = LetRec ns (map h ts) (map (mapVar g h) . bs) (mapVar g h . e)
mapVar g h (Data rec databinds e) = Data rec (map mapDatabind databinds) (mapVar g h e)
where mapDatabind (DataBind name params ctrs) = DataBind name params (map mapCtr . ctrs)
mapCtr (Constructor n ts) = Constructor n (map h ts)
mapVar g h (ConstrOut (Constructor n ts) es) = ConstrOut c' (map (mapVar g h) es)
where c' = Constructor n (map h ts)
mapVar g h (Case e alts) = Case (mapVar g h e) (map mapAlt alts)
where mapAlt (ConstrAlt (Constructor n ts) e1) = ConstrAlt (Constructor n (map h ts)) (mapVar g h e1)
mapAlt (Default e1) = Default (mapVar g h e1)
mapVar g h (App f e) = App (mapVar g h f) (mapVar g h e)
mapVar g h (TApp f t) = TApp (mapVar g h f) (h t)
mapVar g h (If p b1 b2) = If (mapVar g h p) (mapVar g h b1) (mapVar g h b2)
mapVar g h (PrimOp e1 op e2) = PrimOp (mapVar g h e1) op (mapVar g h e2)
mapVar g h (Tuple es) = Tuple (map (mapVar g h) es)
mapVar g h (Proj i e) = Proj i (mapVar g h e)
mapVar g h (JNew c args) = JNew c (map (mapVar g h) args)
mapVar g h (JMethod callee m args c) = JMethod (fmap (mapVar g h) callee) m (map (mapVar g h) args) c
mapVar g h (JField callee f c) = JField (fmap (mapVar g h) callee) f (h c)
mapVar g h (Error ty str) = Error (h ty) (mapVar g h str)
mapVar g h (Seq es) = Seq (map (mapVar g h) es)
mapVar g h (Merge e1 e2) = Merge (mapVar g h e1) (mapVar g h e2)
mapVar g h (RecordCon (l, e)) = RecordCon (l, mapVar g h e)
mapVar g h (RecordProj e l) = RecordProj (mapVar g h e) l
mapVar g h (RecordUpdate e (l1,e1)) = RecordUpdate (mapVar g h e) (l1, mapVar g h e1)
mapVar g h (Module pname defs) = Module pname (mapVarDefs defs)
where
-- necessary?
mapVarDefs Null = Null
mapVarDefs (Def n t expr def) = Def n t (mapVar g h expr) (mapVarDefs . def)
mapVarDefs (DefRec names types exprs def) =
DefRec names (map (second h) types) (map (mapVar g h) . exprs) (mapVarDefs . def)
fsubstTT :: Eq a => a -> Type a -> Type a -> Type a
fsubstTT x r = mapTVar (\n a -> if a == x then r else TVar n a)
fsubstTE :: Eq t => t -> Type t -> Expr t e -> Expr t e
fsubstTE x r = mapVar Var (fsubstTT x r)
fsubstEE :: Eq a => a -> Expr t a -> Expr t a -> Expr t a
fsubstEE x r = mapVar (\n a -> if a == x then r else Var n a) id
joinType :: Type (Type t) -> Type t
joinType (TVar n a) = a
joinType (JClass c) = JClass c
joinType (Fun t1 t2) = Fun (joinType t1) (joinType t2)
joinType (Forall n g) = Forall n (joinType . g . TVar "_") -- Right?
joinType (Product ts) = Product (map joinType ts)
joinType Unit = Unit
joinType (And t1 t2) = And (joinType t1) (joinType t2)
joinType (RecordType (l,t)) = RecordType (l, joinType t)
joinType (Datatype n ts ns) = Datatype n (map joinType ts) ns
-- instance Show (Type Index) where
-- show = show . pretty
-- instance Pretty (Type Index) where
-- pretty = prettyType
prettyType :: Type Index -> Doc
prettyType = prettyType' basePrec 0
prettyType' :: Prec -> Index -> Type Index -> Doc
prettyType' _ _ (TVar n a) = text n
prettyType' p i (Datatype n ts _) = hsep $ text n : map (prettyType' p i) ts
prettyType' p i (Fun t1 t2) =
parensIf p 2
(prettyType' (2,PrecPlus) i t1 <+> arrow <+> prettyType' (2,PrecMinus) i t2)
prettyType' p i (Forall n f) =
parensIf p 1
(forall <+> text n <> dot <+>
prettyType' (1,PrecMinus) (succ i) (f i))
prettyType' _ i (Product ts) = parens $ hcat (intersperse comma (map (prettyType' basePrec i) ts))
prettyType' _ _ Unit = text "Unit"
prettyType' _ _ (JClass "java.lang.Integer") = text "Int"
prettyType' _ _ (JClass "java.lang.String") = text "String"
prettyType' _ _ (JClass "java.lang.Boolean") = text "Bool"
prettyType' _ _ (JClass "java.lang.Character") = text "Char"
prettyType' _ _ (JClass c) = text c
prettyType' p i (And t1 t2) =
parensIf p 2
(prettyType' (2,PrecMinus) i t1 <+>
ampersand <+>
prettyType' (2,PrecPlus) i t2)
prettyType' _ i (RecordType (l,t)) = lbrace <+> text l <+> colon <+> prettyType' basePrec i t <+> rbrace
-- instance Show (Expr Index Index) where
-- show = show . pretty
-- instance Pretty (Expr Index Index) where
-- pretty = prettyExpr
prettyDef :: Prec -> (Index, Index) -> Definition Index Index -> Doc
prettyDef _ (i, j) (Def fname typ e def) =
text fname <+> colon <+> pretty (fst typ) <+> equals <+> prettyExpr' basePrec (i, j + 1) e <> semi <$>
prettyDef basePrec (i, j+1) (def j) -- crappy pretty printer
prettyDef p (i, j) (DefRec names sigs binds def) = vcat (intersperse (text "and") pretty_binds) <> semi <$> pretty_body
where
n = length sigs
ids = [i .. (i + n) - 1]
pretty_ids = map text names
pretty_sigs = map (pretty . fst) sigs
pretty_defs = map (prettyExpr' p (i, j + n)) (binds ids)
pretty_binds = zipWith3
(\pretty_id pretty_sig pretty_def ->
pretty_id <+> colon <+> pretty_sig <$> indent 2 (equals <+> pretty_def))
pretty_ids
pretty_sigs
pretty_defs
pretty_body = prettyDef p (i, j + n) (def ids)
prettyDef _ _ Null = text ""
prettyExpr :: Expr Index Index -> Doc
prettyExpr = prettyExpr' basePrec (0, 0)
prettyExpr' :: Prec -> (Index, Index) -> Expr Index Index -> Doc
prettyExpr' _ _ (Var n _) = text n
prettyExpr' p (i,j) (Lam n t f)
= parensIf p 2 $ group $ hang 2 $
lambda <+> parens (text n <+> colon <+> prettyType' basePrec i t) <> dot <$>
prettyExpr' (2,PrecMinus) (i, j + 1) (f j)
prettyExpr' p (i,j) (App e1 e2)
= parensIf p 4 $
group $ hang 2 $ prettyExpr' (4,PrecMinus) (i,j) e1 <$> prettyExpr' (4,PrecPlus) (i,j) e2
prettyExpr' p (i,j) (BLam n f) =
parensIf p 2
(biglambda <+> text n <> dot <+>
prettyExpr' (2,PrecMinus) (succ i, j) (f i))
prettyExpr' p (i,j) (TApp e t) =
parensIf p 4
(group $ hang 2 $ prettyExpr' (4,PrecMinus) (i,j) e <$> prettyType' (4,PrecPlus) i t)
prettyExpr' _ _ (Lit (Src.Int n)) = integer n
prettyExpr' _ _ (Lit (Src.String s)) = dquotes (string s)
prettyExpr' _ _ (Lit (Src.Bool b)) = bool b
prettyExpr' _ _ (Lit (Src.Char c)) = char c
prettyExpr' _ _ (Lit Src.UnitLit) = unit
prettyExpr' p (i,j) (If e1 e2 e3)
= parensIf p prec
(hang 3 (text "if" <+> prettyExpr' (prec,PrecMinus) (i,j) e1 <+>
text "then" <+> prettyExpr' (prec,PrecMinus) (i,j) e2 <+>
text "else" <+> prettyExpr' (prec,PrecMinus) (i,j) e3))
where prec = 3
prettyExpr' p (i,j) (PrimOp e1 op e2)
= parens (prettyExpr' p (i,j) e1 <+> pretty_op <+> prettyExpr' p (i,j) e2)
where
pretty_op = text (Language.Java.Pretty.prettyPrint java_op)
java_op = case op of
Src.Arith op' -> op'
Src.Compare op' -> op'
Src.Logic op' -> op'
prettyExpr' _ (i,j) (Tuple es) = tupled (map (prettyExpr' basePrec (i,j)) es)
prettyExpr' p i (Proj n e) =
parensIf p 5
(prettyExpr' (5,PrecMinus) i e <> dot <> char '_' <> int n)
prettyExpr' p i (Module pname defs) =
maybe empty ((text "package" <+>) . pretty) pname <$> text "module" <> semi <$> prettyDef p i defs
prettyExpr' _ (i,j) (JNew c args) =
parens (text "new" <+> text c <> tupled (map (prettyExpr' basePrec (i,j)) args))
prettyExpr' _ i (JMethod name m args _) = methodStr name <> dot <> text m <> tupled (map (prettyExpr' basePrec i) args)
where
methodStr (Src.Static x) = text x
methodStr (Src.NonStatic x) = prettyExpr' (6,PrecMinus) i x
prettyExpr' _ i (JField name f _) = fieldStr name <> dot <> text f
where
fieldStr (Src.Static x) = text x
fieldStr (Src.NonStatic x) = prettyExpr' (6,PrecMinus) i x
prettyExpr' p (i,j) (Seq es) = semiBraces (map (prettyExpr' p (i,j)) es)
prettyExpr' p (i,j) (Data recflag databinds e) =
text "data" <+> (pretty recflag) <+> (align .vsep) (map prettyDatabind databinds) <$> prettyExpr' p (i,j) e
where prettyCtr i' (Constructor ctrName ctrParams) = (text ctrName) <+> (hsep. map (prettyType' p i') $ ctrParams)
prettyDatabind (DataBind n tvars cons) = hsep (map text $ n:tvars) <+> align
(equals <+> intersperseBar (map (prettyCtr (length tvars + i)) $ cons [i..length tvars +i-1]) <$$> semi)
prettyExpr' p i (Error _ str) = text "error:" <+> prettyExpr' p i str
prettyExpr' p (i,j) (Fix n1 n2 f t1 t)
= parens $ group $ hang 2 $
text "fix" <+> text n1 <+>
parens (text n2 <+> colon <+> prettyType' p i t1) <+>
colon <+> prettyType' p i t <> dot <$>
prettyExpr' p (i, j + 2) (f j (j + 1))
prettyExpr' p (i,j) (Let n b e) =
parensIf p 2 (text "let" <+> text n <+> equals <+> prettyExpr' basePrec (i, j + 1) b <$> text "in" <$>
prettyExpr' basePrec (i, j + 1) (e j))
prettyExpr' p (i,j) (LetRec names sigs binds body)
= text "let" <+> text "rec" <$>
vcat (intersperse (text "and") (map (indent 2) pretty_binds)) <$>
text "in" <$>
pretty_body
where
n = length sigs
ids = [i..(i+n-1)]
pretty_ids = map text names
pretty_sigs = map (prettyType' p i) sigs
pretty_defs = map (prettyExpr' p (i, j + n)) (binds ids)
pretty_binds = zipWith3 (\pretty_id pretty_sig pretty_def ->
pretty_id <+> colon <+> pretty_sig <$> indent 2 (equals <+> pretty_def))
pretty_ids pretty_sigs pretty_defs
pretty_body = prettyExpr' p (i, j + n) (body ids)
prettyExpr' p (i,j) (Merge e1 e2) =
parens $ prettyExpr' p (i,j) e1 <+> dcomma <+> prettyExpr' p (i,j) e2
prettyExpr' _ (i,j) (RecordCon (l, e)) = lbrace <+> text l <+> equals <+> prettyExpr' basePrec (i,j) e <+> rbrace
prettyExpr' p (i,j) (RecordProj e l) = prettyExpr' p (i,j) e <> dot <> text l
prettyExpr' p (i,j) (RecordUpdate e (l, e1)) = prettyExpr' p (i,j) e <+> text "with" <+> prettyExpr' p (i,j) (RecordCon (l, e1))
prettyExpr' p (i,j) (ConstrOut c es) = parens $ hsep $ text (constrName c) : map (prettyExpr' p (i,j)) es
prettyExpr' p (i,j) (Case e alts) =
hang 2 $ text "case" <+> prettyExpr' p (i,j) e <+> text "of" <$> align (intersperseBar (map pretty_alt alts))
where pretty_alt (ConstrAlt c e1) =
(text (constrName c) <+> arrow <+> (align $ prettyExpr' p (i, j) e1 ))
pretty_alt (Default e1) =
(text "_" <+> arrow <+> (align $ prettyExpr' p (i, j) e1 ))
| bixuanzju/fcore | lib/SystemFI.hs | bsd-2-clause | 16,328 | 1 | 20 | 4,516 | 7,381 | 3,792 | 3,589 | 286 | 6 |
-- Standard code to make a table of units
-- First true example of a (small!) recipe.
module Drasil.Sections.TableOfUnits (tOfUnitDesc, tOfUnitSIName, unitTableRef) where
import Control.Lens ((^.))
import Language.Drasil
import Data.Drasil.Concepts.Documentation (symbol_, description)
-- | Creates the table of units with an "SI Name" column
tOfUnitSIName :: IsUnit s => [s] -> LabelledContent
tOfUnitSIName = tOfUnitHelper [atStart symbol_, atStart description, S "SI Name"]
[Sy . usymb, (^. defn), phrase]
-- | Creates the table of units with SI name in the "Description" column
tOfUnitDesc :: IsUnit s => [s] -> LabelledContent
tOfUnitDesc = tOfUnitHelper [atStart symbol_, atStart description]
[Sy . usymb, \x -> (x ^. defn) +:+ sParen (phrase x)]
-- | Helper for making Table of Units
tOfUnitHelper :: [Sentence] -> [s -> Sentence] -> [s] -> LabelledContent
tOfUnitHelper headers fs u = llcc unitTableRef $ Table headers
(mkTable fs u) (S "Table of Units") True
unitTableRef :: Reference
unitTableRef = makeTabRef "ToU"
| JacquesCarette/literate-scientific-software | code/drasil-docLang/Drasil/Sections/TableOfUnits.hs | bsd-2-clause | 1,068 | 0 | 11 | 195 | 280 | 157 | 123 | 15 | 1 |
-- Spec.hs
import Pear.Operator
import Test.Hspec
main :: IO ()
main = hspec $ do
describe "Integer parser" $ do
it "should parse '2 + (3 * 4)' properly" do
| Charlesetc/haskell-parsing | test/Spec.hs | bsd-3-clause | 179 | 1 | 6 | 52 | 42 | 24 | 18 | -1 | -1 |
-- | This module converts between HTML/XML entities (i.e. @&@) and
-- the characters they represent.
module Text.HTML.TagSoup.Entity(
lookupEntity, lookupNamedEntity, lookupNumericEntity,
escapeXML,
xmlEntities, htmlEntities
) where
import Data.Char (chr, ord)
import qualified Data.IntMap as IntMap
import Data.Ix
import qualified Data.Map as Map
import Numeric (readHex)
-- | Lookup an entity, using 'lookupNumericEntity' if it starts with
-- @#@ and 'lookupNamedEntity' otherwise
lookupEntity :: String -> Maybe String
lookupEntity ('#':xs) = lookupNumericEntity xs
lookupEntity xs = lookupNamedEntity xs
-- | Lookup a numeric entity, the leading @\'#\'@ must have already been removed.
--
-- > lookupNumericEntity "65" == Just "A"
-- > lookupNumericEntity "x41" == Just "A"
-- > lookupNumericEntity "x4E" === Just "N"
-- > lookupNumericEntity "x4e" === Just "N"
-- > lookupNumericEntity "X4e" === Just "N"
-- > lookupNumericEntity "Haskell" == Nothing
-- > lookupNumericEntity "" == Nothing
-- > lookupNumericEntity "89439085908539082" == Nothing
lookupNumericEntity :: String -> Maybe String
lookupNumericEntity = f
-- entity = '&#' [0-9]+ ';' | '&#x' [0-9a-fA-F]+ ';'
where
f (x:xs) | x `elem` "xX" = g [('0','9'),('a','f'),('A','F')] readHex xs
f xs = g [('0','9')] reads xs
g :: [(Char,Char)] -> ReadS Integer -> String -> Maybe String
g valid reader xs = do
let test b = if b then Just () else Nothing
test $ isValid valid xs
test $ not $ null xs
case reader xs of
[(a,"")] -> do
test $ inRange (toInteger $ ord minBound, toInteger $ ord maxBound) a
return [chr $ fromInteger a]
_ -> Nothing
isValid :: [(Char,Char)] -> String -> Bool
isValid valid xs = all (\x -> any (`inRange` x) valid) xs
-- | Lookup a named entity, using 'htmlEntities'
--
-- > lookupNamedEntity "amp" == Just "&"
-- > lookupNamedEntity "haskell" == Nothing
lookupNamedEntity :: String -> Maybe String
lookupNamedEntity = \x -> Map.lookup x mp
where mp = Map.fromList htmlEntities
-- | Escape an XML string.
--
-- > escapeXML "hello world" == "hello world"
-- > escapeXML "hello & world" == "hello & world"
escapeXML :: String -> String
escapeXML = concatMap $ \x -> IntMap.findWithDefault [x] (ord x) mp
where mp = IntMap.fromList [(ord b, "&"++a++";") | (a,[b]) <- ("#39","\'"):xmlEntities]
-- | A table mapping XML entity names to resolved strings. All strings are a single character long.
xmlEntities :: [(String, String)]
xmlEntities = let a*b = (a,[b]) in
["quot" * '"'
,"amp" * '&'
,"lt" * '<'
,"gt" * '>'
]
-- | A table mapping HTML entity names to resolved strings. Most resolved strings are a single character long,
-- but some (e.g. @"ngeqq"@) are two characters long. The list is taken from
-- <http://www.w3.org/TR/html5/syntax.html#named-character-references>.
htmlEntities :: [(String, String)]
htmlEntities = let (*) = (,) in
["Aacute" * "\x00C1"
,"aacute" * "\x00E1"
,"Aacute;" * "\x00C1"
,"aacute;" * "\x00E1"
,"Abreve;" * "\x0102"
,"abreve;" * "\x0103"
,"ac;" * "\x223E"
,"acd;" * "\x223F"
,"acE;" * "\x223E\x0333"
,"Acirc" * "\x00C2"
,"acirc" * "\x00E2"
,"Acirc;" * "\x00C2"
,"acirc;" * "\x00E2"
,"acute" * "\x00B4"
,"acute;" * "\x00B4"
,"Acy;" * "\x0410"
,"acy;" * "\x0430"
,"AElig" * "\x00C6"
,"aelig" * "\x00E6"
,"AElig;" * "\x00C6"
,"aelig;" * "\x00E6"
,"af;" * "\x2061"
,"Afr;" * "\x1D504"
,"afr;" * "\x1D51E"
,"Agrave" * "\x00C0"
,"agrave" * "\x00E0"
,"Agrave;" * "\x00C0"
,"agrave;" * "\x00E0"
,"alefsym;" * "\x2135"
,"aleph;" * "\x2135"
,"Alpha;" * "\x0391"
,"alpha;" * "\x03B1"
,"Amacr;" * "\x0100"
,"amacr;" * "\x0101"
,"amalg;" * "\x2A3F"
,"AMP" * "\x0026"
,"amp" * "\x0026"
,"AMP;" * "\x0026"
,"amp;" * "\x0026"
,"and;" * "\x2227"
,"And;" * "\x2A53"
,"andand;" * "\x2A55"
,"andd;" * "\x2A5C"
,"andslope;" * "\x2A58"
,"andv;" * "\x2A5A"
,"ang;" * "\x2220"
,"ange;" * "\x29A4"
,"angle;" * "\x2220"
,"angmsd;" * "\x2221"
,"angmsdaa;" * "\x29A8"
,"angmsdab;" * "\x29A9"
,"angmsdac;" * "\x29AA"
,"angmsdad;" * "\x29AB"
,"angmsdae;" * "\x29AC"
,"angmsdaf;" * "\x29AD"
,"angmsdag;" * "\x29AE"
,"angmsdah;" * "\x29AF"
,"angrt;" * "\x221F"
,"angrtvb;" * "\x22BE"
,"angrtvbd;" * "\x299D"
,"angsph;" * "\x2222"
,"angst;" * "\x00C5"
,"angzarr;" * "\x237C"
,"Aogon;" * "\x0104"
,"aogon;" * "\x0105"
,"Aopf;" * "\x1D538"
,"aopf;" * "\x1D552"
,"ap;" * "\x2248"
,"apacir;" * "\x2A6F"
,"ape;" * "\x224A"
,"apE;" * "\x2A70"
,"apid;" * "\x224B"
,"apos;" * "\x0027"
,"ApplyFunction;" * "\x2061"
,"approx;" * "\x2248"
,"approxeq;" * "\x224A"
,"Aring" * "\x00C5"
,"aring" * "\x00E5"
,"Aring;" * "\x00C5"
,"aring;" * "\x00E5"
,"Ascr;" * "\x1D49C"
,"ascr;" * "\x1D4B6"
,"Assign;" * "\x2254"
,"ast;" * "\x002A"
,"asymp;" * "\x2248"
,"asympeq;" * "\x224D"
,"Atilde" * "\x00C3"
,"atilde" * "\x00E3"
,"Atilde;" * "\x00C3"
,"atilde;" * "\x00E3"
,"Auml" * "\x00C4"
,"auml" * "\x00E4"
,"Auml;" * "\x00C4"
,"auml;" * "\x00E4"
,"awconint;" * "\x2233"
,"awint;" * "\x2A11"
,"backcong;" * "\x224C"
,"backepsilon;" * "\x03F6"
,"backprime;" * "\x2035"
,"backsim;" * "\x223D"
,"backsimeq;" * "\x22CD"
,"Backslash;" * "\x2216"
,"Barv;" * "\x2AE7"
,"barvee;" * "\x22BD"
,"barwed;" * "\x2305"
,"Barwed;" * "\x2306"
,"barwedge;" * "\x2305"
,"bbrk;" * "\x23B5"
,"bbrktbrk;" * "\x23B6"
,"bcong;" * "\x224C"
,"Bcy;" * "\x0411"
,"bcy;" * "\x0431"
,"bdquo;" * "\x201E"
,"becaus;" * "\x2235"
,"Because;" * "\x2235"
,"because;" * "\x2235"
,"bemptyv;" * "\x29B0"
,"bepsi;" * "\x03F6"
,"bernou;" * "\x212C"
,"Bernoullis;" * "\x212C"
,"Beta;" * "\x0392"
,"beta;" * "\x03B2"
,"beth;" * "\x2136"
,"between;" * "\x226C"
,"Bfr;" * "\x1D505"
,"bfr;" * "\x1D51F"
,"bigcap;" * "\x22C2"
,"bigcirc;" * "\x25EF"
,"bigcup;" * "\x22C3"
,"bigodot;" * "\x2A00"
,"bigoplus;" * "\x2A01"
,"bigotimes;" * "\x2A02"
,"bigsqcup;" * "\x2A06"
,"bigstar;" * "\x2605"
,"bigtriangledown;" * "\x25BD"
,"bigtriangleup;" * "\x25B3"
,"biguplus;" * "\x2A04"
,"bigvee;" * "\x22C1"
,"bigwedge;" * "\x22C0"
,"bkarow;" * "\x290D"
,"blacklozenge;" * "\x29EB"
,"blacksquare;" * "\x25AA"
,"blacktriangle;" * "\x25B4"
,"blacktriangledown;" * "\x25BE"
,"blacktriangleleft;" * "\x25C2"
,"blacktriangleright;" * "\x25B8"
,"blank;" * "\x2423"
,"blk12;" * "\x2592"
,"blk14;" * "\x2591"
,"blk34;" * "\x2593"
,"block;" * "\x2588"
,"bne;" * "\x003D\x20E5"
,"bnequiv;" * "\x2261\x20E5"
,"bnot;" * "\x2310"
,"bNot;" * "\x2AED"
,"Bopf;" * "\x1D539"
,"bopf;" * "\x1D553"
,"bot;" * "\x22A5"
,"bottom;" * "\x22A5"
,"bowtie;" * "\x22C8"
,"boxbox;" * "\x29C9"
,"boxdl;" * "\x2510"
,"boxdL;" * "\x2555"
,"boxDl;" * "\x2556"
,"boxDL;" * "\x2557"
,"boxdr;" * "\x250C"
,"boxdR;" * "\x2552"
,"boxDr;" * "\x2553"
,"boxDR;" * "\x2554"
,"boxh;" * "\x2500"
,"boxH;" * "\x2550"
,"boxhd;" * "\x252C"
,"boxHd;" * "\x2564"
,"boxhD;" * "\x2565"
,"boxHD;" * "\x2566"
,"boxhu;" * "\x2534"
,"boxHu;" * "\x2567"
,"boxhU;" * "\x2568"
,"boxHU;" * "\x2569"
,"boxminus;" * "\x229F"
,"boxplus;" * "\x229E"
,"boxtimes;" * "\x22A0"
,"boxul;" * "\x2518"
,"boxuL;" * "\x255B"
,"boxUl;" * "\x255C"
,"boxUL;" * "\x255D"
,"boxur;" * "\x2514"
,"boxuR;" * "\x2558"
,"boxUr;" * "\x2559"
,"boxUR;" * "\x255A"
,"boxv;" * "\x2502"
,"boxV;" * "\x2551"
,"boxvh;" * "\x253C"
,"boxvH;" * "\x256A"
,"boxVh;" * "\x256B"
,"boxVH;" * "\x256C"
,"boxvl;" * "\x2524"
,"boxvL;" * "\x2561"
,"boxVl;" * "\x2562"
,"boxVL;" * "\x2563"
,"boxvr;" * "\x251C"
,"boxvR;" * "\x255E"
,"boxVr;" * "\x255F"
,"boxVR;" * "\x2560"
,"bprime;" * "\x2035"
,"Breve;" * "\x02D8"
,"breve;" * "\x02D8"
,"brvbar" * "\x00A6"
,"brvbar;" * "\x00A6"
,"Bscr;" * "\x212C"
,"bscr;" * "\x1D4B7"
,"bsemi;" * "\x204F"
,"bsim;" * "\x223D"
,"bsime;" * "\x22CD"
,"bsol;" * "\x005C"
,"bsolb;" * "\x29C5"
,"bsolhsub;" * "\x27C8"
,"bull;" * "\x2022"
,"bullet;" * "\x2022"
,"bump;" * "\x224E"
,"bumpe;" * "\x224F"
,"bumpE;" * "\x2AAE"
,"Bumpeq;" * "\x224E"
,"bumpeq;" * "\x224F"
,"Cacute;" * "\x0106"
,"cacute;" * "\x0107"
,"cap;" * "\x2229"
,"Cap;" * "\x22D2"
,"capand;" * "\x2A44"
,"capbrcup;" * "\x2A49"
,"capcap;" * "\x2A4B"
,"capcup;" * "\x2A47"
,"capdot;" * "\x2A40"
,"CapitalDifferentialD;" * "\x2145"
,"caps;" * "\x2229\xFE00"
,"caret;" * "\x2041"
,"caron;" * "\x02C7"
,"Cayleys;" * "\x212D"
,"ccaps;" * "\x2A4D"
,"Ccaron;" * "\x010C"
,"ccaron;" * "\x010D"
,"Ccedil" * "\x00C7"
,"ccedil" * "\x00E7"
,"Ccedil;" * "\x00C7"
,"ccedil;" * "\x00E7"
,"Ccirc;" * "\x0108"
,"ccirc;" * "\x0109"
,"Cconint;" * "\x2230"
,"ccups;" * "\x2A4C"
,"ccupssm;" * "\x2A50"
,"Cdot;" * "\x010A"
,"cdot;" * "\x010B"
,"cedil" * "\x00B8"
,"cedil;" * "\x00B8"
,"Cedilla;" * "\x00B8"
,"cemptyv;" * "\x29B2"
,"cent" * "\x00A2"
,"cent;" * "\x00A2"
,"CenterDot;" * "\x00B7"
,"centerdot;" * "\x00B7"
,"Cfr;" * "\x212D"
,"cfr;" * "\x1D520"
,"CHcy;" * "\x0427"
,"chcy;" * "\x0447"
,"check;" * "\x2713"
,"checkmark;" * "\x2713"
,"Chi;" * "\x03A7"
,"chi;" * "\x03C7"
,"cir;" * "\x25CB"
,"circ;" * "\x02C6"
,"circeq;" * "\x2257"
,"circlearrowleft;" * "\x21BA"
,"circlearrowright;" * "\x21BB"
,"circledast;" * "\x229B"
,"circledcirc;" * "\x229A"
,"circleddash;" * "\x229D"
,"CircleDot;" * "\x2299"
,"circledR;" * "\x00AE"
,"circledS;" * "\x24C8"
,"CircleMinus;" * "\x2296"
,"CirclePlus;" * "\x2295"
,"CircleTimes;" * "\x2297"
,"cire;" * "\x2257"
,"cirE;" * "\x29C3"
,"cirfnint;" * "\x2A10"
,"cirmid;" * "\x2AEF"
,"cirscir;" * "\x29C2"
,"ClockwiseContourIntegral;" * "\x2232"
,"CloseCurlyDoubleQuote;" * "\x201D"
,"CloseCurlyQuote;" * "\x2019"
,"clubs;" * "\x2663"
,"clubsuit;" * "\x2663"
,"colon;" * "\x003A"
,"Colon;" * "\x2237"
,"colone;" * "\x2254"
,"Colone;" * "\x2A74"
,"coloneq;" * "\x2254"
,"comma;" * "\x002C"
,"commat;" * "\x0040"
,"comp;" * "\x2201"
,"compfn;" * "\x2218"
,"complement;" * "\x2201"
,"complexes;" * "\x2102"
,"cong;" * "\x2245"
,"congdot;" * "\x2A6D"
,"Congruent;" * "\x2261"
,"conint;" * "\x222E"
,"Conint;" * "\x222F"
,"ContourIntegral;" * "\x222E"
,"Copf;" * "\x2102"
,"copf;" * "\x1D554"
,"coprod;" * "\x2210"
,"Coproduct;" * "\x2210"
,"COPY" * "\x00A9"
,"copy" * "\x00A9"
,"COPY;" * "\x00A9"
,"copy;" * "\x00A9"
,"copysr;" * "\x2117"
,"CounterClockwiseContourIntegral;" * "\x2233"
,"crarr;" * "\x21B5"
,"cross;" * "\x2717"
,"Cross;" * "\x2A2F"
,"Cscr;" * "\x1D49E"
,"cscr;" * "\x1D4B8"
,"csub;" * "\x2ACF"
,"csube;" * "\x2AD1"
,"csup;" * "\x2AD0"
,"csupe;" * "\x2AD2"
,"ctdot;" * "\x22EF"
,"cudarrl;" * "\x2938"
,"cudarrr;" * "\x2935"
,"cuepr;" * "\x22DE"
,"cuesc;" * "\x22DF"
,"cularr;" * "\x21B6"
,"cularrp;" * "\x293D"
,"cup;" * "\x222A"
,"Cup;" * "\x22D3"
,"cupbrcap;" * "\x2A48"
,"CupCap;" * "\x224D"
,"cupcap;" * "\x2A46"
,"cupcup;" * "\x2A4A"
,"cupdot;" * "\x228D"
,"cupor;" * "\x2A45"
,"cups;" * "\x222A\xFE00"
,"curarr;" * "\x21B7"
,"curarrm;" * "\x293C"
,"curlyeqprec;" * "\x22DE"
,"curlyeqsucc;" * "\x22DF"
,"curlyvee;" * "\x22CE"
,"curlywedge;" * "\x22CF"
,"curren" * "\x00A4"
,"curren;" * "\x00A4"
,"curvearrowleft;" * "\x21B6"
,"curvearrowright;" * "\x21B7"
,"cuvee;" * "\x22CE"
,"cuwed;" * "\x22CF"
,"cwconint;" * "\x2232"
,"cwint;" * "\x2231"
,"cylcty;" * "\x232D"
,"dagger;" * "\x2020"
,"Dagger;" * "\x2021"
,"daleth;" * "\x2138"
,"darr;" * "\x2193"
,"Darr;" * "\x21A1"
,"dArr;" * "\x21D3"
,"dash;" * "\x2010"
,"dashv;" * "\x22A3"
,"Dashv;" * "\x2AE4"
,"dbkarow;" * "\x290F"
,"dblac;" * "\x02DD"
,"Dcaron;" * "\x010E"
,"dcaron;" * "\x010F"
,"Dcy;" * "\x0414"
,"dcy;" * "\x0434"
,"DD;" * "\x2145"
,"dd;" * "\x2146"
,"ddagger;" * "\x2021"
,"ddarr;" * "\x21CA"
,"DDotrahd;" * "\x2911"
,"ddotseq;" * "\x2A77"
,"deg" * "\x00B0"
,"deg;" * "\x00B0"
,"Del;" * "\x2207"
,"Delta;" * "\x0394"
,"delta;" * "\x03B4"
,"demptyv;" * "\x29B1"
,"dfisht;" * "\x297F"
,"Dfr;" * "\x1D507"
,"dfr;" * "\x1D521"
,"dHar;" * "\x2965"
,"dharl;" * "\x21C3"
,"dharr;" * "\x21C2"
,"DiacriticalAcute;" * "\x00B4"
,"DiacriticalDot;" * "\x02D9"
,"DiacriticalDoubleAcute;" * "\x02DD"
,"DiacriticalGrave;" * "\x0060"
,"DiacriticalTilde;" * "\x02DC"
,"diam;" * "\x22C4"
,"Diamond;" * "\x22C4"
,"diamond;" * "\x22C4"
,"diamondsuit;" * "\x2666"
,"diams;" * "\x2666"
,"die;" * "\x00A8"
,"DifferentialD;" * "\x2146"
,"digamma;" * "\x03DD"
,"disin;" * "\x22F2"
,"div;" * "\x00F7"
,"divide" * "\x00F7"
,"divide;" * "\x00F7"
,"divideontimes;" * "\x22C7"
,"divonx;" * "\x22C7"
,"DJcy;" * "\x0402"
,"djcy;" * "\x0452"
,"dlcorn;" * "\x231E"
,"dlcrop;" * "\x230D"
,"dollar;" * "\x0024"
,"Dopf;" * "\x1D53B"
,"dopf;" * "\x1D555"
,"Dot;" * "\x00A8"
,"dot;" * "\x02D9"
,"DotDot;" * "\x20DC"
,"doteq;" * "\x2250"
,"doteqdot;" * "\x2251"
,"DotEqual;" * "\x2250"
,"dotminus;" * "\x2238"
,"dotplus;" * "\x2214"
,"dotsquare;" * "\x22A1"
,"doublebarwedge;" * "\x2306"
,"DoubleContourIntegral;" * "\x222F"
,"DoubleDot;" * "\x00A8"
,"DoubleDownArrow;" * "\x21D3"
,"DoubleLeftArrow;" * "\x21D0"
,"DoubleLeftRightArrow;" * "\x21D4"
,"DoubleLeftTee;" * "\x2AE4"
,"DoubleLongLeftArrow;" * "\x27F8"
,"DoubleLongLeftRightArrow;" * "\x27FA"
,"DoubleLongRightArrow;" * "\x27F9"
,"DoubleRightArrow;" * "\x21D2"
,"DoubleRightTee;" * "\x22A8"
,"DoubleUpArrow;" * "\x21D1"
,"DoubleUpDownArrow;" * "\x21D5"
,"DoubleVerticalBar;" * "\x2225"
,"DownArrow;" * "\x2193"
,"downarrow;" * "\x2193"
,"Downarrow;" * "\x21D3"
,"DownArrowBar;" * "\x2913"
,"DownArrowUpArrow;" * "\x21F5"
,"DownBreve;" * "\x0311"
,"downdownarrows;" * "\x21CA"
,"downharpoonleft;" * "\x21C3"
,"downharpoonright;" * "\x21C2"
,"DownLeftRightVector;" * "\x2950"
,"DownLeftTeeVector;" * "\x295E"
,"DownLeftVector;" * "\x21BD"
,"DownLeftVectorBar;" * "\x2956"
,"DownRightTeeVector;" * "\x295F"
,"DownRightVector;" * "\x21C1"
,"DownRightVectorBar;" * "\x2957"
,"DownTee;" * "\x22A4"
,"DownTeeArrow;" * "\x21A7"
,"drbkarow;" * "\x2910"
,"drcorn;" * "\x231F"
,"drcrop;" * "\x230C"
,"Dscr;" * "\x1D49F"
,"dscr;" * "\x1D4B9"
,"DScy;" * "\x0405"
,"dscy;" * "\x0455"
,"dsol;" * "\x29F6"
,"Dstrok;" * "\x0110"
,"dstrok;" * "\x0111"
,"dtdot;" * "\x22F1"
,"dtri;" * "\x25BF"
,"dtrif;" * "\x25BE"
,"duarr;" * "\x21F5"
,"duhar;" * "\x296F"
,"dwangle;" * "\x29A6"
,"DZcy;" * "\x040F"
,"dzcy;" * "\x045F"
,"dzigrarr;" * "\x27FF"
,"Eacute" * "\x00C9"
,"eacute" * "\x00E9"
,"Eacute;" * "\x00C9"
,"eacute;" * "\x00E9"
,"easter;" * "\x2A6E"
,"Ecaron;" * "\x011A"
,"ecaron;" * "\x011B"
,"ecir;" * "\x2256"
,"Ecirc" * "\x00CA"
,"ecirc" * "\x00EA"
,"Ecirc;" * "\x00CA"
,"ecirc;" * "\x00EA"
,"ecolon;" * "\x2255"
,"Ecy;" * "\x042D"
,"ecy;" * "\x044D"
,"eDDot;" * "\x2A77"
,"Edot;" * "\x0116"
,"edot;" * "\x0117"
,"eDot;" * "\x2251"
,"ee;" * "\x2147"
,"efDot;" * "\x2252"
,"Efr;" * "\x1D508"
,"efr;" * "\x1D522"
,"eg;" * "\x2A9A"
,"Egrave" * "\x00C8"
,"egrave" * "\x00E8"
,"Egrave;" * "\x00C8"
,"egrave;" * "\x00E8"
,"egs;" * "\x2A96"
,"egsdot;" * "\x2A98"
,"el;" * "\x2A99"
,"Element;" * "\x2208"
,"elinters;" * "\x23E7"
,"ell;" * "\x2113"
,"els;" * "\x2A95"
,"elsdot;" * "\x2A97"
,"Emacr;" * "\x0112"
,"emacr;" * "\x0113"
,"empty;" * "\x2205"
,"emptyset;" * "\x2205"
,"EmptySmallSquare;" * "\x25FB"
,"emptyv;" * "\x2205"
,"EmptyVerySmallSquare;" * "\x25AB"
,"emsp13;" * "\x2004"
,"emsp14;" * "\x2005"
,"emsp;" * "\x2003"
,"ENG;" * "\x014A"
,"eng;" * "\x014B"
,"ensp;" * "\x2002"
,"Eogon;" * "\x0118"
,"eogon;" * "\x0119"
,"Eopf;" * "\x1D53C"
,"eopf;" * "\x1D556"
,"epar;" * "\x22D5"
,"eparsl;" * "\x29E3"
,"eplus;" * "\x2A71"
,"epsi;" * "\x03B5"
,"Epsilon;" * "\x0395"
,"epsilon;" * "\x03B5"
,"epsiv;" * "\x03F5"
,"eqcirc;" * "\x2256"
,"eqcolon;" * "\x2255"
,"eqsim;" * "\x2242"
,"eqslantgtr;" * "\x2A96"
,"eqslantless;" * "\x2A95"
,"Equal;" * "\x2A75"
,"equals;" * "\x003D"
,"EqualTilde;" * "\x2242"
,"equest;" * "\x225F"
,"Equilibrium;" * "\x21CC"
,"equiv;" * "\x2261"
,"equivDD;" * "\x2A78"
,"eqvparsl;" * "\x29E5"
,"erarr;" * "\x2971"
,"erDot;" * "\x2253"
,"escr;" * "\x212F"
,"Escr;" * "\x2130"
,"esdot;" * "\x2250"
,"esim;" * "\x2242"
,"Esim;" * "\x2A73"
,"Eta;" * "\x0397"
,"eta;" * "\x03B7"
,"ETH" * "\x00D0"
,"eth" * "\x00F0"
,"ETH;" * "\x00D0"
,"eth;" * "\x00F0"
,"Euml" * "\x00CB"
,"euml" * "\x00EB"
,"Euml;" * "\x00CB"
,"euml;" * "\x00EB"
,"euro;" * "\x20AC"
,"excl;" * "\x0021"
,"exist;" * "\x2203"
,"Exists;" * "\x2203"
,"expectation;" * "\x2130"
,"ExponentialE;" * "\x2147"
,"exponentiale;" * "\x2147"
,"fallingdotseq;" * "\x2252"
,"Fcy;" * "\x0424"
,"fcy;" * "\x0444"
,"female;" * "\x2640"
,"ffilig;" * "\xFB03"
,"fflig;" * "\xFB00"
,"ffllig;" * "\xFB04"
,"Ffr;" * "\x1D509"
,"ffr;" * "\x1D523"
,"filig;" * "\xFB01"
,"FilledSmallSquare;" * "\x25FC"
,"FilledVerySmallSquare;" * "\x25AA"
,"fjlig;" * "\x0066\x006A"
,"flat;" * "\x266D"
,"fllig;" * "\xFB02"
,"fltns;" * "\x25B1"
,"fnof;" * "\x0192"
,"Fopf;" * "\x1D53D"
,"fopf;" * "\x1D557"
,"ForAll;" * "\x2200"
,"forall;" * "\x2200"
,"fork;" * "\x22D4"
,"forkv;" * "\x2AD9"
,"Fouriertrf;" * "\x2131"
,"fpartint;" * "\x2A0D"
,"frac12" * "\x00BD"
,"frac12;" * "\x00BD"
,"frac13;" * "\x2153"
,"frac14" * "\x00BC"
,"frac14;" * "\x00BC"
,"frac15;" * "\x2155"
,"frac16;" * "\x2159"
,"frac18;" * "\x215B"
,"frac23;" * "\x2154"
,"frac25;" * "\x2156"
,"frac34" * "\x00BE"
,"frac34;" * "\x00BE"
,"frac35;" * "\x2157"
,"frac38;" * "\x215C"
,"frac45;" * "\x2158"
,"frac56;" * "\x215A"
,"frac58;" * "\x215D"
,"frac78;" * "\x215E"
,"frasl;" * "\x2044"
,"frown;" * "\x2322"
,"Fscr;" * "\x2131"
,"fscr;" * "\x1D4BB"
,"gacute;" * "\x01F5"
,"Gamma;" * "\x0393"
,"gamma;" * "\x03B3"
,"Gammad;" * "\x03DC"
,"gammad;" * "\x03DD"
,"gap;" * "\x2A86"
,"Gbreve;" * "\x011E"
,"gbreve;" * "\x011F"
,"Gcedil;" * "\x0122"
,"Gcirc;" * "\x011C"
,"gcirc;" * "\x011D"
,"Gcy;" * "\x0413"
,"gcy;" * "\x0433"
,"Gdot;" * "\x0120"
,"gdot;" * "\x0121"
,"ge;" * "\x2265"
,"gE;" * "\x2267"
,"gel;" * "\x22DB"
,"gEl;" * "\x2A8C"
,"geq;" * "\x2265"
,"geqq;" * "\x2267"
,"geqslant;" * "\x2A7E"
,"ges;" * "\x2A7E"
,"gescc;" * "\x2AA9"
,"gesdot;" * "\x2A80"
,"gesdoto;" * "\x2A82"
,"gesdotol;" * "\x2A84"
,"gesl;" * "\x22DB\xFE00"
,"gesles;" * "\x2A94"
,"Gfr;" * "\x1D50A"
,"gfr;" * "\x1D524"
,"gg;" * "\x226B"
,"Gg;" * "\x22D9"
,"ggg;" * "\x22D9"
,"gimel;" * "\x2137"
,"GJcy;" * "\x0403"
,"gjcy;" * "\x0453"
,"gl;" * "\x2277"
,"gla;" * "\x2AA5"
,"glE;" * "\x2A92"
,"glj;" * "\x2AA4"
,"gnap;" * "\x2A8A"
,"gnapprox;" * "\x2A8A"
,"gnE;" * "\x2269"
,"gne;" * "\x2A88"
,"gneq;" * "\x2A88"
,"gneqq;" * "\x2269"
,"gnsim;" * "\x22E7"
,"Gopf;" * "\x1D53E"
,"gopf;" * "\x1D558"
,"grave;" * "\x0060"
,"GreaterEqual;" * "\x2265"
,"GreaterEqualLess;" * "\x22DB"
,"GreaterFullEqual;" * "\x2267"
,"GreaterGreater;" * "\x2AA2"
,"GreaterLess;" * "\x2277"
,"GreaterSlantEqual;" * "\x2A7E"
,"GreaterTilde;" * "\x2273"
,"gscr;" * "\x210A"
,"Gscr;" * "\x1D4A2"
,"gsim;" * "\x2273"
,"gsime;" * "\x2A8E"
,"gsiml;" * "\x2A90"
,"GT" * "\x003E"
,"gt" * "\x003E"
,"GT;" * "\x003E"
,"gt;" * "\x003E"
,"Gt;" * "\x226B"
,"gtcc;" * "\x2AA7"
,"gtcir;" * "\x2A7A"
,"gtdot;" * "\x22D7"
,"gtlPar;" * "\x2995"
,"gtquest;" * "\x2A7C"
,"gtrapprox;" * "\x2A86"
,"gtrarr;" * "\x2978"
,"gtrdot;" * "\x22D7"
,"gtreqless;" * "\x22DB"
,"gtreqqless;" * "\x2A8C"
,"gtrless;" * "\x2277"
,"gtrsim;" * "\x2273"
,"gvertneqq;" * "\x2269\xFE00"
,"gvnE;" * "\x2269\xFE00"
,"Hacek;" * "\x02C7"
,"hairsp;" * "\x200A"
,"half;" * "\x00BD"
,"hamilt;" * "\x210B"
,"HARDcy;" * "\x042A"
,"hardcy;" * "\x044A"
,"harr;" * "\x2194"
,"hArr;" * "\x21D4"
,"harrcir;" * "\x2948"
,"harrw;" * "\x21AD"
,"Hat;" * "\x005E"
,"hbar;" * "\x210F"
,"Hcirc;" * "\x0124"
,"hcirc;" * "\x0125"
,"hearts;" * "\x2665"
,"heartsuit;" * "\x2665"
,"hellip;" * "\x2026"
,"hercon;" * "\x22B9"
,"Hfr;" * "\x210C"
,"hfr;" * "\x1D525"
,"HilbertSpace;" * "\x210B"
,"hksearow;" * "\x2925"
,"hkswarow;" * "\x2926"
,"hoarr;" * "\x21FF"
,"homtht;" * "\x223B"
,"hookleftarrow;" * "\x21A9"
,"hookrightarrow;" * "\x21AA"
,"Hopf;" * "\x210D"
,"hopf;" * "\x1D559"
,"horbar;" * "\x2015"
,"HorizontalLine;" * "\x2500"
,"Hscr;" * "\x210B"
,"hscr;" * "\x1D4BD"
,"hslash;" * "\x210F"
,"Hstrok;" * "\x0126"
,"hstrok;" * "\x0127"
,"HumpDownHump;" * "\x224E"
,"HumpEqual;" * "\x224F"
,"hybull;" * "\x2043"
,"hyphen;" * "\x2010"
,"Iacute" * "\x00CD"
,"iacute" * "\x00ED"
,"Iacute;" * "\x00CD"
,"iacute;" * "\x00ED"
,"ic;" * "\x2063"
,"Icirc" * "\x00CE"
,"icirc" * "\x00EE"
,"Icirc;" * "\x00CE"
,"icirc;" * "\x00EE"
,"Icy;" * "\x0418"
,"icy;" * "\x0438"
,"Idot;" * "\x0130"
,"IEcy;" * "\x0415"
,"iecy;" * "\x0435"
,"iexcl" * "\x00A1"
,"iexcl;" * "\x00A1"
,"iff;" * "\x21D4"
,"Ifr;" * "\x2111"
,"ifr;" * "\x1D526"
,"Igrave" * "\x00CC"
,"igrave" * "\x00EC"
,"Igrave;" * "\x00CC"
,"igrave;" * "\x00EC"
,"ii;" * "\x2148"
,"iiiint;" * "\x2A0C"
,"iiint;" * "\x222D"
,"iinfin;" * "\x29DC"
,"iiota;" * "\x2129"
,"IJlig;" * "\x0132"
,"ijlig;" * "\x0133"
,"Im;" * "\x2111"
,"Imacr;" * "\x012A"
,"imacr;" * "\x012B"
,"image;" * "\x2111"
,"ImaginaryI;" * "\x2148"
,"imagline;" * "\x2110"
,"imagpart;" * "\x2111"
,"imath;" * "\x0131"
,"imof;" * "\x22B7"
,"imped;" * "\x01B5"
,"Implies;" * "\x21D2"
,"in;" * "\x2208"
,"incare;" * "\x2105"
,"infin;" * "\x221E"
,"infintie;" * "\x29DD"
,"inodot;" * "\x0131"
,"int;" * "\x222B"
,"Int;" * "\x222C"
,"intcal;" * "\x22BA"
,"integers;" * "\x2124"
,"Integral;" * "\x222B"
,"intercal;" * "\x22BA"
,"Intersection;" * "\x22C2"
,"intlarhk;" * "\x2A17"
,"intprod;" * "\x2A3C"
,"InvisibleComma;" * "\x2063"
,"InvisibleTimes;" * "\x2062"
,"IOcy;" * "\x0401"
,"iocy;" * "\x0451"
,"Iogon;" * "\x012E"
,"iogon;" * "\x012F"
,"Iopf;" * "\x1D540"
,"iopf;" * "\x1D55A"
,"Iota;" * "\x0399"
,"iota;" * "\x03B9"
,"iprod;" * "\x2A3C"
,"iquest" * "\x00BF"
,"iquest;" * "\x00BF"
,"Iscr;" * "\x2110"
,"iscr;" * "\x1D4BE"
,"isin;" * "\x2208"
,"isindot;" * "\x22F5"
,"isinE;" * "\x22F9"
,"isins;" * "\x22F4"
,"isinsv;" * "\x22F3"
,"isinv;" * "\x2208"
,"it;" * "\x2062"
,"Itilde;" * "\x0128"
,"itilde;" * "\x0129"
,"Iukcy;" * "\x0406"
,"iukcy;" * "\x0456"
,"Iuml" * "\x00CF"
,"iuml" * "\x00EF"
,"Iuml;" * "\x00CF"
,"iuml;" * "\x00EF"
,"Jcirc;" * "\x0134"
,"jcirc;" * "\x0135"
,"Jcy;" * "\x0419"
,"jcy;" * "\x0439"
,"Jfr;" * "\x1D50D"
,"jfr;" * "\x1D527"
,"jmath;" * "\x0237"
,"Jopf;" * "\x1D541"
,"jopf;" * "\x1D55B"
,"Jscr;" * "\x1D4A5"
,"jscr;" * "\x1D4BF"
,"Jsercy;" * "\x0408"
,"jsercy;" * "\x0458"
,"Jukcy;" * "\x0404"
,"jukcy;" * "\x0454"
,"Kappa;" * "\x039A"
,"kappa;" * "\x03BA"
,"kappav;" * "\x03F0"
,"Kcedil;" * "\x0136"
,"kcedil;" * "\x0137"
,"Kcy;" * "\x041A"
,"kcy;" * "\x043A"
,"Kfr;" * "\x1D50E"
,"kfr;" * "\x1D528"
,"kgreen;" * "\x0138"
,"KHcy;" * "\x0425"
,"khcy;" * "\x0445"
,"KJcy;" * "\x040C"
,"kjcy;" * "\x045C"
,"Kopf;" * "\x1D542"
,"kopf;" * "\x1D55C"
,"Kscr;" * "\x1D4A6"
,"kscr;" * "\x1D4C0"
,"lAarr;" * "\x21DA"
,"Lacute;" * "\x0139"
,"lacute;" * "\x013A"
,"laemptyv;" * "\x29B4"
,"lagran;" * "\x2112"
,"Lambda;" * "\x039B"
,"lambda;" * "\x03BB"
,"lang;" * "\x27E8"
,"Lang;" * "\x27EA"
,"langd;" * "\x2991"
,"langle;" * "\x27E8"
,"lap;" * "\x2A85"
,"Laplacetrf;" * "\x2112"
,"laquo" * "\x00AB"
,"laquo;" * "\x00AB"
,"larr;" * "\x2190"
,"Larr;" * "\x219E"
,"lArr;" * "\x21D0"
,"larrb;" * "\x21E4"
,"larrbfs;" * "\x291F"
,"larrfs;" * "\x291D"
,"larrhk;" * "\x21A9"
,"larrlp;" * "\x21AB"
,"larrpl;" * "\x2939"
,"larrsim;" * "\x2973"
,"larrtl;" * "\x21A2"
,"lat;" * "\x2AAB"
,"latail;" * "\x2919"
,"lAtail;" * "\x291B"
,"late;" * "\x2AAD"
,"lates;" * "\x2AAD\xFE00"
,"lbarr;" * "\x290C"
,"lBarr;" * "\x290E"
,"lbbrk;" * "\x2772"
,"lbrace;" * "\x007B"
,"lbrack;" * "\x005B"
,"lbrke;" * "\x298B"
,"lbrksld;" * "\x298F"
,"lbrkslu;" * "\x298D"
,"Lcaron;" * "\x013D"
,"lcaron;" * "\x013E"
,"Lcedil;" * "\x013B"
,"lcedil;" * "\x013C"
,"lceil;" * "\x2308"
,"lcub;" * "\x007B"
,"Lcy;" * "\x041B"
,"lcy;" * "\x043B"
,"ldca;" * "\x2936"
,"ldquo;" * "\x201C"
,"ldquor;" * "\x201E"
,"ldrdhar;" * "\x2967"
,"ldrushar;" * "\x294B"
,"ldsh;" * "\x21B2"
,"le;" * "\x2264"
,"lE;" * "\x2266"
,"LeftAngleBracket;" * "\x27E8"
,"LeftArrow;" * "\x2190"
,"leftarrow;" * "\x2190"
,"Leftarrow;" * "\x21D0"
,"LeftArrowBar;" * "\x21E4"
,"LeftArrowRightArrow;" * "\x21C6"
,"leftarrowtail;" * "\x21A2"
,"LeftCeiling;" * "\x2308"
,"LeftDoubleBracket;" * "\x27E6"
,"LeftDownTeeVector;" * "\x2961"
,"LeftDownVector;" * "\x21C3"
,"LeftDownVectorBar;" * "\x2959"
,"LeftFloor;" * "\x230A"
,"leftharpoondown;" * "\x21BD"
,"leftharpoonup;" * "\x21BC"
,"leftleftarrows;" * "\x21C7"
,"LeftRightArrow;" * "\x2194"
,"leftrightarrow;" * "\x2194"
,"Leftrightarrow;" * "\x21D4"
,"leftrightarrows;" * "\x21C6"
,"leftrightharpoons;" * "\x21CB"
,"leftrightsquigarrow;" * "\x21AD"
,"LeftRightVector;" * "\x294E"
,"LeftTee;" * "\x22A3"
,"LeftTeeArrow;" * "\x21A4"
,"LeftTeeVector;" * "\x295A"
,"leftthreetimes;" * "\x22CB"
,"LeftTriangle;" * "\x22B2"
,"LeftTriangleBar;" * "\x29CF"
,"LeftTriangleEqual;" * "\x22B4"
,"LeftUpDownVector;" * "\x2951"
,"LeftUpTeeVector;" * "\x2960"
,"LeftUpVector;" * "\x21BF"
,"LeftUpVectorBar;" * "\x2958"
,"LeftVector;" * "\x21BC"
,"LeftVectorBar;" * "\x2952"
,"leg;" * "\x22DA"
,"lEg;" * "\x2A8B"
,"leq;" * "\x2264"
,"leqq;" * "\x2266"
,"leqslant;" * "\x2A7D"
,"les;" * "\x2A7D"
,"lescc;" * "\x2AA8"
,"lesdot;" * "\x2A7F"
,"lesdoto;" * "\x2A81"
,"lesdotor;" * "\x2A83"
,"lesg;" * "\x22DA\xFE00"
,"lesges;" * "\x2A93"
,"lessapprox;" * "\x2A85"
,"lessdot;" * "\x22D6"
,"lesseqgtr;" * "\x22DA"
,"lesseqqgtr;" * "\x2A8B"
,"LessEqualGreater;" * "\x22DA"
,"LessFullEqual;" * "\x2266"
,"LessGreater;" * "\x2276"
,"lessgtr;" * "\x2276"
,"LessLess;" * "\x2AA1"
,"lesssim;" * "\x2272"
,"LessSlantEqual;" * "\x2A7D"
,"LessTilde;" * "\x2272"
,"lfisht;" * "\x297C"
,"lfloor;" * "\x230A"
,"Lfr;" * "\x1D50F"
,"lfr;" * "\x1D529"
,"lg;" * "\x2276"
,"lgE;" * "\x2A91"
,"lHar;" * "\x2962"
,"lhard;" * "\x21BD"
,"lharu;" * "\x21BC"
,"lharul;" * "\x296A"
,"lhblk;" * "\x2584"
,"LJcy;" * "\x0409"
,"ljcy;" * "\x0459"
,"ll;" * "\x226A"
,"Ll;" * "\x22D8"
,"llarr;" * "\x21C7"
,"llcorner;" * "\x231E"
,"Lleftarrow;" * "\x21DA"
,"llhard;" * "\x296B"
,"lltri;" * "\x25FA"
,"Lmidot;" * "\x013F"
,"lmidot;" * "\x0140"
,"lmoust;" * "\x23B0"
,"lmoustache;" * "\x23B0"
,"lnap;" * "\x2A89"
,"lnapprox;" * "\x2A89"
,"lnE;" * "\x2268"
,"lne;" * "\x2A87"
,"lneq;" * "\x2A87"
,"lneqq;" * "\x2268"
,"lnsim;" * "\x22E6"
,"loang;" * "\x27EC"
,"loarr;" * "\x21FD"
,"lobrk;" * "\x27E6"
,"LongLeftArrow;" * "\x27F5"
,"longleftarrow;" * "\x27F5"
,"Longleftarrow;" * "\x27F8"
,"LongLeftRightArrow;" * "\x27F7"
,"longleftrightarrow;" * "\x27F7"
,"Longleftrightarrow;" * "\x27FA"
,"longmapsto;" * "\x27FC"
,"LongRightArrow;" * "\x27F6"
,"longrightarrow;" * "\x27F6"
,"Longrightarrow;" * "\x27F9"
,"looparrowleft;" * "\x21AB"
,"looparrowright;" * "\x21AC"
,"lopar;" * "\x2985"
,"Lopf;" * "\x1D543"
,"lopf;" * "\x1D55D"
,"loplus;" * "\x2A2D"
,"lotimes;" * "\x2A34"
,"lowast;" * "\x2217"
,"lowbar;" * "\x005F"
,"LowerLeftArrow;" * "\x2199"
,"LowerRightArrow;" * "\x2198"
,"loz;" * "\x25CA"
,"lozenge;" * "\x25CA"
,"lozf;" * "\x29EB"
,"lpar;" * "\x0028"
,"lparlt;" * "\x2993"
,"lrarr;" * "\x21C6"
,"lrcorner;" * "\x231F"
,"lrhar;" * "\x21CB"
,"lrhard;" * "\x296D"
,"lrm;" * "\x200E"
,"lrtri;" * "\x22BF"
,"lsaquo;" * "\x2039"
,"Lscr;" * "\x2112"
,"lscr;" * "\x1D4C1"
,"Lsh;" * "\x21B0"
,"lsh;" * "\x21B0"
,"lsim;" * "\x2272"
,"lsime;" * "\x2A8D"
,"lsimg;" * "\x2A8F"
,"lsqb;" * "\x005B"
,"lsquo;" * "\x2018"
,"lsquor;" * "\x201A"
,"Lstrok;" * "\x0141"
,"lstrok;" * "\x0142"
,"LT" * "\x003C"
,"lt" * "\x003C"
,"LT;" * "\x003C"
,"lt;" * "\x003C"
,"Lt;" * "\x226A"
,"ltcc;" * "\x2AA6"
,"ltcir;" * "\x2A79"
,"ltdot;" * "\x22D6"
,"lthree;" * "\x22CB"
,"ltimes;" * "\x22C9"
,"ltlarr;" * "\x2976"
,"ltquest;" * "\x2A7B"
,"ltri;" * "\x25C3"
,"ltrie;" * "\x22B4"
,"ltrif;" * "\x25C2"
,"ltrPar;" * "\x2996"
,"lurdshar;" * "\x294A"
,"luruhar;" * "\x2966"
,"lvertneqq;" * "\x2268\xFE00"
,"lvnE;" * "\x2268\xFE00"
,"macr" * "\x00AF"
,"macr;" * "\x00AF"
,"male;" * "\x2642"
,"malt;" * "\x2720"
,"maltese;" * "\x2720"
,"map;" * "\x21A6"
,"Map;" * "\x2905"
,"mapsto;" * "\x21A6"
,"mapstodown;" * "\x21A7"
,"mapstoleft;" * "\x21A4"
,"mapstoup;" * "\x21A5"
,"marker;" * "\x25AE"
,"mcomma;" * "\x2A29"
,"Mcy;" * "\x041C"
,"mcy;" * "\x043C"
,"mdash;" * "\x2014"
,"mDDot;" * "\x223A"
,"measuredangle;" * "\x2221"
,"MediumSpace;" * "\x205F"
,"Mellintrf;" * "\x2133"
,"Mfr;" * "\x1D510"
,"mfr;" * "\x1D52A"
,"mho;" * "\x2127"
,"micro" * "\x00B5"
,"micro;" * "\x00B5"
,"mid;" * "\x2223"
,"midast;" * "\x002A"
,"midcir;" * "\x2AF0"
,"middot" * "\x00B7"
,"middot;" * "\x00B7"
,"minus;" * "\x2212"
,"minusb;" * "\x229F"
,"minusd;" * "\x2238"
,"minusdu;" * "\x2A2A"
,"MinusPlus;" * "\x2213"
,"mlcp;" * "\x2ADB"
,"mldr;" * "\x2026"
,"mnplus;" * "\x2213"
,"models;" * "\x22A7"
,"Mopf;" * "\x1D544"
,"mopf;" * "\x1D55E"
,"mp;" * "\x2213"
,"Mscr;" * "\x2133"
,"mscr;" * "\x1D4C2"
,"mstpos;" * "\x223E"
,"Mu;" * "\x039C"
,"mu;" * "\x03BC"
,"multimap;" * "\x22B8"
,"mumap;" * "\x22B8"
,"nabla;" * "\x2207"
,"Nacute;" * "\x0143"
,"nacute;" * "\x0144"
,"nang;" * "\x2220\x20D2"
,"nap;" * "\x2249"
,"napE;" * "\x2A70\x0338"
,"napid;" * "\x224B\x0338"
,"napos;" * "\x0149"
,"napprox;" * "\x2249"
,"natur;" * "\x266E"
,"natural;" * "\x266E"
,"naturals;" * "\x2115"
,"nbsp" * "\x00A0"
,"nbsp;" * "\x00A0"
,"nbump;" * "\x224E\x0338"
,"nbumpe;" * "\x224F\x0338"
,"ncap;" * "\x2A43"
,"Ncaron;" * "\x0147"
,"ncaron;" * "\x0148"
,"Ncedil;" * "\x0145"
,"ncedil;" * "\x0146"
,"ncong;" * "\x2247"
,"ncongdot;" * "\x2A6D\x0338"
,"ncup;" * "\x2A42"
,"Ncy;" * "\x041D"
,"ncy;" * "\x043D"
,"ndash;" * "\x2013"
,"ne;" * "\x2260"
,"nearhk;" * "\x2924"
,"nearr;" * "\x2197"
,"neArr;" * "\x21D7"
,"nearrow;" * "\x2197"
,"nedot;" * "\x2250\x0338"
,"NegativeMediumSpace;" * "\x200B"
,"NegativeThickSpace;" * "\x200B"
,"NegativeThinSpace;" * "\x200B"
,"NegativeVeryThinSpace;" * "\x200B"
,"nequiv;" * "\x2262"
,"nesear;" * "\x2928"
,"nesim;" * "\x2242\x0338"
,"NestedGreaterGreater;" * "\x226B"
,"NestedLessLess;" * "\x226A"
,"NewLine;" * "\x000A"
,"nexist;" * "\x2204"
,"nexists;" * "\x2204"
,"Nfr;" * "\x1D511"
,"nfr;" * "\x1D52B"
,"ngE;" * "\x2267\x0338"
,"nge;" * "\x2271"
,"ngeq;" * "\x2271"
,"ngeqq;" * "\x2267\x0338"
,"ngeqslant;" * "\x2A7E\x0338"
,"nges;" * "\x2A7E\x0338"
,"nGg;" * "\x22D9\x0338"
,"ngsim;" * "\x2275"
,"nGt;" * "\x226B\x20D2"
,"ngt;" * "\x226F"
,"ngtr;" * "\x226F"
,"nGtv;" * "\x226B\x0338"
,"nharr;" * "\x21AE"
,"nhArr;" * "\x21CE"
,"nhpar;" * "\x2AF2"
,"ni;" * "\x220B"
,"nis;" * "\x22FC"
,"nisd;" * "\x22FA"
,"niv;" * "\x220B"
,"NJcy;" * "\x040A"
,"njcy;" * "\x045A"
,"nlarr;" * "\x219A"
,"nlArr;" * "\x21CD"
,"nldr;" * "\x2025"
,"nlE;" * "\x2266\x0338"
,"nle;" * "\x2270"
,"nleftarrow;" * "\x219A"
,"nLeftarrow;" * "\x21CD"
,"nleftrightarrow;" * "\x21AE"
,"nLeftrightarrow;" * "\x21CE"
,"nleq;" * "\x2270"
,"nleqq;" * "\x2266\x0338"
,"nleqslant;" * "\x2A7D\x0338"
,"nles;" * "\x2A7D\x0338"
,"nless;" * "\x226E"
,"nLl;" * "\x22D8\x0338"
,"nlsim;" * "\x2274"
,"nLt;" * "\x226A\x20D2"
,"nlt;" * "\x226E"
,"nltri;" * "\x22EA"
,"nltrie;" * "\x22EC"
,"nLtv;" * "\x226A\x0338"
,"nmid;" * "\x2224"
,"NoBreak;" * "\x2060"
,"NonBreakingSpace;" * "\x00A0"
,"Nopf;" * "\x2115"
,"nopf;" * "\x1D55F"
,"not" * "\x00AC"
,"not;" * "\x00AC"
,"Not;" * "\x2AEC"
,"NotCongruent;" * "\x2262"
,"NotCupCap;" * "\x226D"
,"NotDoubleVerticalBar;" * "\x2226"
,"NotElement;" * "\x2209"
,"NotEqual;" * "\x2260"
,"NotEqualTilde;" * "\x2242\x0338"
,"NotExists;" * "\x2204"
,"NotGreater;" * "\x226F"
,"NotGreaterEqual;" * "\x2271"
,"NotGreaterFullEqual;" * "\x2267\x0338"
,"NotGreaterGreater;" * "\x226B\x0338"
,"NotGreaterLess;" * "\x2279"
,"NotGreaterSlantEqual;" * "\x2A7E\x0338"
,"NotGreaterTilde;" * "\x2275"
,"NotHumpDownHump;" * "\x224E\x0338"
,"NotHumpEqual;" * "\x224F\x0338"
,"notin;" * "\x2209"
,"notindot;" * "\x22F5\x0338"
,"notinE;" * "\x22F9\x0338"
,"notinva;" * "\x2209"
,"notinvb;" * "\x22F7"
,"notinvc;" * "\x22F6"
,"NotLeftTriangle;" * "\x22EA"
,"NotLeftTriangleBar;" * "\x29CF\x0338"
,"NotLeftTriangleEqual;" * "\x22EC"
,"NotLess;" * "\x226E"
,"NotLessEqual;" * "\x2270"
,"NotLessGreater;" * "\x2278"
,"NotLessLess;" * "\x226A\x0338"
,"NotLessSlantEqual;" * "\x2A7D\x0338"
,"NotLessTilde;" * "\x2274"
,"NotNestedGreaterGreater;" * "\x2AA2\x0338"
,"NotNestedLessLess;" * "\x2AA1\x0338"
,"notni;" * "\x220C"
,"notniva;" * "\x220C"
,"notnivb;" * "\x22FE"
,"notnivc;" * "\x22FD"
,"NotPrecedes;" * "\x2280"
,"NotPrecedesEqual;" * "\x2AAF\x0338"
,"NotPrecedesSlantEqual;" * "\x22E0"
,"NotReverseElement;" * "\x220C"
,"NotRightTriangle;" * "\x22EB"
,"NotRightTriangleBar;" * "\x29D0\x0338"
,"NotRightTriangleEqual;" * "\x22ED"
,"NotSquareSubset;" * "\x228F\x0338"
,"NotSquareSubsetEqual;" * "\x22E2"
,"NotSquareSuperset;" * "\x2290\x0338"
,"NotSquareSupersetEqual;" * "\x22E3"
,"NotSubset;" * "\x2282\x20D2"
,"NotSubsetEqual;" * "\x2288"
,"NotSucceeds;" * "\x2281"
,"NotSucceedsEqual;" * "\x2AB0\x0338"
,"NotSucceedsSlantEqual;" * "\x22E1"
,"NotSucceedsTilde;" * "\x227F\x0338"
,"NotSuperset;" * "\x2283\x20D2"
,"NotSupersetEqual;" * "\x2289"
,"NotTilde;" * "\x2241"
,"NotTildeEqual;" * "\x2244"
,"NotTildeFullEqual;" * "\x2247"
,"NotTildeTilde;" * "\x2249"
,"NotVerticalBar;" * "\x2224"
,"npar;" * "\x2226"
,"nparallel;" * "\x2226"
,"nparsl;" * "\x2AFD\x20E5"
,"npart;" * "\x2202\x0338"
,"npolint;" * "\x2A14"
,"npr;" * "\x2280"
,"nprcue;" * "\x22E0"
,"npre;" * "\x2AAF\x0338"
,"nprec;" * "\x2280"
,"npreceq;" * "\x2AAF\x0338"
,"nrarr;" * "\x219B"
,"nrArr;" * "\x21CF"
,"nrarrc;" * "\x2933\x0338"
,"nrarrw;" * "\x219D\x0338"
,"nrightarrow;" * "\x219B"
,"nRightarrow;" * "\x21CF"
,"nrtri;" * "\x22EB"
,"nrtrie;" * "\x22ED"
,"nsc;" * "\x2281"
,"nsccue;" * "\x22E1"
,"nsce;" * "\x2AB0\x0338"
,"Nscr;" * "\x1D4A9"
,"nscr;" * "\x1D4C3"
,"nshortmid;" * "\x2224"
,"nshortparallel;" * "\x2226"
,"nsim;" * "\x2241"
,"nsime;" * "\x2244"
,"nsimeq;" * "\x2244"
,"nsmid;" * "\x2224"
,"nspar;" * "\x2226"
,"nsqsube;" * "\x22E2"
,"nsqsupe;" * "\x22E3"
,"nsub;" * "\x2284"
,"nsube;" * "\x2288"
,"nsubE;" * "\x2AC5\x0338"
,"nsubset;" * "\x2282\x20D2"
,"nsubseteq;" * "\x2288"
,"nsubseteqq;" * "\x2AC5\x0338"
,"nsucc;" * "\x2281"
,"nsucceq;" * "\x2AB0\x0338"
,"nsup;" * "\x2285"
,"nsupe;" * "\x2289"
,"nsupE;" * "\x2AC6\x0338"
,"nsupset;" * "\x2283\x20D2"
,"nsupseteq;" * "\x2289"
,"nsupseteqq;" * "\x2AC6\x0338"
,"ntgl;" * "\x2279"
,"Ntilde" * "\x00D1"
,"ntilde" * "\x00F1"
,"Ntilde;" * "\x00D1"
,"ntilde;" * "\x00F1"
,"ntlg;" * "\x2278"
,"ntriangleleft;" * "\x22EA"
,"ntrianglelefteq;" * "\x22EC"
,"ntriangleright;" * "\x22EB"
,"ntrianglerighteq;" * "\x22ED"
,"Nu;" * "\x039D"
,"nu;" * "\x03BD"
,"num;" * "\x0023"
,"numero;" * "\x2116"
,"numsp;" * "\x2007"
,"nvap;" * "\x224D\x20D2"
,"nvdash;" * "\x22AC"
,"nvDash;" * "\x22AD"
,"nVdash;" * "\x22AE"
,"nVDash;" * "\x22AF"
,"nvge;" * "\x2265\x20D2"
,"nvgt;" * "\x003E\x20D2"
,"nvHarr;" * "\x2904"
,"nvinfin;" * "\x29DE"
,"nvlArr;" * "\x2902"
,"nvle;" * "\x2264\x20D2"
,"nvlt;" * "\x003C\x20D2"
,"nvltrie;" * "\x22B4\x20D2"
,"nvrArr;" * "\x2903"
,"nvrtrie;" * "\x22B5\x20D2"
,"nvsim;" * "\x223C\x20D2"
,"nwarhk;" * "\x2923"
,"nwarr;" * "\x2196"
,"nwArr;" * "\x21D6"
,"nwarrow;" * "\x2196"
,"nwnear;" * "\x2927"
,"Oacute" * "\x00D3"
,"oacute" * "\x00F3"
,"Oacute;" * "\x00D3"
,"oacute;" * "\x00F3"
,"oast;" * "\x229B"
,"ocir;" * "\x229A"
,"Ocirc" * "\x00D4"
,"ocirc" * "\x00F4"
,"Ocirc;" * "\x00D4"
,"ocirc;" * "\x00F4"
,"Ocy;" * "\x041E"
,"ocy;" * "\x043E"
,"odash;" * "\x229D"
,"Odblac;" * "\x0150"
,"odblac;" * "\x0151"
,"odiv;" * "\x2A38"
,"odot;" * "\x2299"
,"odsold;" * "\x29BC"
,"OElig;" * "\x0152"
,"oelig;" * "\x0153"
,"ofcir;" * "\x29BF"
,"Ofr;" * "\x1D512"
,"ofr;" * "\x1D52C"
,"ogon;" * "\x02DB"
,"Ograve" * "\x00D2"
,"ograve" * "\x00F2"
,"Ograve;" * "\x00D2"
,"ograve;" * "\x00F2"
,"ogt;" * "\x29C1"
,"ohbar;" * "\x29B5"
,"ohm;" * "\x03A9"
,"oint;" * "\x222E"
,"olarr;" * "\x21BA"
,"olcir;" * "\x29BE"
,"olcross;" * "\x29BB"
,"oline;" * "\x203E"
,"olt;" * "\x29C0"
,"Omacr;" * "\x014C"
,"omacr;" * "\x014D"
,"Omega;" * "\x03A9"
,"omega;" * "\x03C9"
,"Omicron;" * "\x039F"
,"omicron;" * "\x03BF"
,"omid;" * "\x29B6"
,"ominus;" * "\x2296"
,"Oopf;" * "\x1D546"
,"oopf;" * "\x1D560"
,"opar;" * "\x29B7"
,"OpenCurlyDoubleQuote;" * "\x201C"
,"OpenCurlyQuote;" * "\x2018"
,"operp;" * "\x29B9"
,"oplus;" * "\x2295"
,"or;" * "\x2228"
,"Or;" * "\x2A54"
,"orarr;" * "\x21BB"
,"ord;" * "\x2A5D"
,"order;" * "\x2134"
,"orderof;" * "\x2134"
,"ordf" * "\x00AA"
,"ordf;" * "\x00AA"
,"ordm" * "\x00BA"
,"ordm;" * "\x00BA"
,"origof;" * "\x22B6"
,"oror;" * "\x2A56"
,"orslope;" * "\x2A57"
,"orv;" * "\x2A5B"
,"oS;" * "\x24C8"
,"oscr;" * "\x2134"
,"Oscr;" * "\x1D4AA"
,"Oslash" * "\x00D8"
,"oslash" * "\x00F8"
,"Oslash;" * "\x00D8"
,"oslash;" * "\x00F8"
,"osol;" * "\x2298"
,"Otilde" * "\x00D5"
,"otilde" * "\x00F5"
,"Otilde;" * "\x00D5"
,"otilde;" * "\x00F5"
,"otimes;" * "\x2297"
,"Otimes;" * "\x2A37"
,"otimesas;" * "\x2A36"
,"Ouml" * "\x00D6"
,"ouml" * "\x00F6"
,"Ouml;" * "\x00D6"
,"ouml;" * "\x00F6"
,"ovbar;" * "\x233D"
,"OverBar;" * "\x203E"
,"OverBrace;" * "\x23DE"
,"OverBracket;" * "\x23B4"
,"OverParenthesis;" * "\x23DC"
,"par;" * "\x2225"
,"para" * "\x00B6"
,"para;" * "\x00B6"
,"parallel;" * "\x2225"
,"parsim;" * "\x2AF3"
,"parsl;" * "\x2AFD"
,"part;" * "\x2202"
,"PartialD;" * "\x2202"
,"Pcy;" * "\x041F"
,"pcy;" * "\x043F"
,"percnt;" * "\x0025"
,"period;" * "\x002E"
,"permil;" * "\x2030"
,"perp;" * "\x22A5"
,"pertenk;" * "\x2031"
,"Pfr;" * "\x1D513"
,"pfr;" * "\x1D52D"
,"Phi;" * "\x03A6"
,"phi;" * "\x03C6"
,"phiv;" * "\x03D5"
,"phmmat;" * "\x2133"
,"phone;" * "\x260E"
,"Pi;" * "\x03A0"
,"pi;" * "\x03C0"
,"pitchfork;" * "\x22D4"
,"piv;" * "\x03D6"
,"planck;" * "\x210F"
,"planckh;" * "\x210E"
,"plankv;" * "\x210F"
,"plus;" * "\x002B"
,"plusacir;" * "\x2A23"
,"plusb;" * "\x229E"
,"pluscir;" * "\x2A22"
,"plusdo;" * "\x2214"
,"plusdu;" * "\x2A25"
,"pluse;" * "\x2A72"
,"PlusMinus;" * "\x00B1"
,"plusmn" * "\x00B1"
,"plusmn;" * "\x00B1"
,"plussim;" * "\x2A26"
,"plustwo;" * "\x2A27"
,"pm;" * "\x00B1"
,"Poincareplane;" * "\x210C"
,"pointint;" * "\x2A15"
,"Popf;" * "\x2119"
,"popf;" * "\x1D561"
,"pound" * "\x00A3"
,"pound;" * "\x00A3"
,"pr;" * "\x227A"
,"Pr;" * "\x2ABB"
,"prap;" * "\x2AB7"
,"prcue;" * "\x227C"
,"pre;" * "\x2AAF"
,"prE;" * "\x2AB3"
,"prec;" * "\x227A"
,"precapprox;" * "\x2AB7"
,"preccurlyeq;" * "\x227C"
,"Precedes;" * "\x227A"
,"PrecedesEqual;" * "\x2AAF"
,"PrecedesSlantEqual;" * "\x227C"
,"PrecedesTilde;" * "\x227E"
,"preceq;" * "\x2AAF"
,"precnapprox;" * "\x2AB9"
,"precneqq;" * "\x2AB5"
,"precnsim;" * "\x22E8"
,"precsim;" * "\x227E"
,"prime;" * "\x2032"
,"Prime;" * "\x2033"
,"primes;" * "\x2119"
,"prnap;" * "\x2AB9"
,"prnE;" * "\x2AB5"
,"prnsim;" * "\x22E8"
,"prod;" * "\x220F"
,"Product;" * "\x220F"
,"profalar;" * "\x232E"
,"profline;" * "\x2312"
,"profsurf;" * "\x2313"
,"prop;" * "\x221D"
,"Proportion;" * "\x2237"
,"Proportional;" * "\x221D"
,"propto;" * "\x221D"
,"prsim;" * "\x227E"
,"prurel;" * "\x22B0"
,"Pscr;" * "\x1D4AB"
,"pscr;" * "\x1D4C5"
,"Psi;" * "\x03A8"
,"psi;" * "\x03C8"
,"puncsp;" * "\x2008"
,"Qfr;" * "\x1D514"
,"qfr;" * "\x1D52E"
,"qint;" * "\x2A0C"
,"Qopf;" * "\x211A"
,"qopf;" * "\x1D562"
,"qprime;" * "\x2057"
,"Qscr;" * "\x1D4AC"
,"qscr;" * "\x1D4C6"
,"quaternions;" * "\x210D"
,"quatint;" * "\x2A16"
,"quest;" * "\x003F"
,"questeq;" * "\x225F"
,"QUOT" * "\x0022"
,"quot" * "\x0022"
,"QUOT;" * "\x0022"
,"quot;" * "\x0022"
,"rAarr;" * "\x21DB"
,"race;" * "\x223D\x0331"
,"Racute;" * "\x0154"
,"racute;" * "\x0155"
,"radic;" * "\x221A"
,"raemptyv;" * "\x29B3"
,"rang;" * "\x27E9"
,"Rang;" * "\x27EB"
,"rangd;" * "\x2992"
,"range;" * "\x29A5"
,"rangle;" * "\x27E9"
,"raquo" * "\x00BB"
,"raquo;" * "\x00BB"
,"rarr;" * "\x2192"
,"Rarr;" * "\x21A0"
,"rArr;" * "\x21D2"
,"rarrap;" * "\x2975"
,"rarrb;" * "\x21E5"
,"rarrbfs;" * "\x2920"
,"rarrc;" * "\x2933"
,"rarrfs;" * "\x291E"
,"rarrhk;" * "\x21AA"
,"rarrlp;" * "\x21AC"
,"rarrpl;" * "\x2945"
,"rarrsim;" * "\x2974"
,"rarrtl;" * "\x21A3"
,"Rarrtl;" * "\x2916"
,"rarrw;" * "\x219D"
,"ratail;" * "\x291A"
,"rAtail;" * "\x291C"
,"ratio;" * "\x2236"
,"rationals;" * "\x211A"
,"rbarr;" * "\x290D"
,"rBarr;" * "\x290F"
,"RBarr;" * "\x2910"
,"rbbrk;" * "\x2773"
,"rbrace;" * "\x007D"
,"rbrack;" * "\x005D"
,"rbrke;" * "\x298C"
,"rbrksld;" * "\x298E"
,"rbrkslu;" * "\x2990"
,"Rcaron;" * "\x0158"
,"rcaron;" * "\x0159"
,"Rcedil;" * "\x0156"
,"rcedil;" * "\x0157"
,"rceil;" * "\x2309"
,"rcub;" * "\x007D"
,"Rcy;" * "\x0420"
,"rcy;" * "\x0440"
,"rdca;" * "\x2937"
,"rdldhar;" * "\x2969"
,"rdquo;" * "\x201D"
,"rdquor;" * "\x201D"
,"rdsh;" * "\x21B3"
,"Re;" * "\x211C"
,"real;" * "\x211C"
,"realine;" * "\x211B"
,"realpart;" * "\x211C"
,"reals;" * "\x211D"
,"rect;" * "\x25AD"
,"REG" * "\x00AE"
,"reg" * "\x00AE"
,"REG;" * "\x00AE"
,"reg;" * "\x00AE"
,"ReverseElement;" * "\x220B"
,"ReverseEquilibrium;" * "\x21CB"
,"ReverseUpEquilibrium;" * "\x296F"
,"rfisht;" * "\x297D"
,"rfloor;" * "\x230B"
,"Rfr;" * "\x211C"
,"rfr;" * "\x1D52F"
,"rHar;" * "\x2964"
,"rhard;" * "\x21C1"
,"rharu;" * "\x21C0"
,"rharul;" * "\x296C"
,"Rho;" * "\x03A1"
,"rho;" * "\x03C1"
,"rhov;" * "\x03F1"
,"RightAngleBracket;" * "\x27E9"
,"RightArrow;" * "\x2192"
,"rightarrow;" * "\x2192"
,"Rightarrow;" * "\x21D2"
,"RightArrowBar;" * "\x21E5"
,"RightArrowLeftArrow;" * "\x21C4"
,"rightarrowtail;" * "\x21A3"
,"RightCeiling;" * "\x2309"
,"RightDoubleBracket;" * "\x27E7"
,"RightDownTeeVector;" * "\x295D"
,"RightDownVector;" * "\x21C2"
,"RightDownVectorBar;" * "\x2955"
,"RightFloor;" * "\x230B"
,"rightharpoondown;" * "\x21C1"
,"rightharpoonup;" * "\x21C0"
,"rightleftarrows;" * "\x21C4"
,"rightleftharpoons;" * "\x21CC"
,"rightrightarrows;" * "\x21C9"
,"rightsquigarrow;" * "\x219D"
,"RightTee;" * "\x22A2"
,"RightTeeArrow;" * "\x21A6"
,"RightTeeVector;" * "\x295B"
,"rightthreetimes;" * "\x22CC"
,"RightTriangle;" * "\x22B3"
,"RightTriangleBar;" * "\x29D0"
,"RightTriangleEqual;" * "\x22B5"
,"RightUpDownVector;" * "\x294F"
,"RightUpTeeVector;" * "\x295C"
,"RightUpVector;" * "\x21BE"
,"RightUpVectorBar;" * "\x2954"
,"RightVector;" * "\x21C0"
,"RightVectorBar;" * "\x2953"
,"ring;" * "\x02DA"
,"risingdotseq;" * "\x2253"
,"rlarr;" * "\x21C4"
,"rlhar;" * "\x21CC"
,"rlm;" * "\x200F"
,"rmoust;" * "\x23B1"
,"rmoustache;" * "\x23B1"
,"rnmid;" * "\x2AEE"
,"roang;" * "\x27ED"
,"roarr;" * "\x21FE"
,"robrk;" * "\x27E7"
,"ropar;" * "\x2986"
,"Ropf;" * "\x211D"
,"ropf;" * "\x1D563"
,"roplus;" * "\x2A2E"
,"rotimes;" * "\x2A35"
,"RoundImplies;" * "\x2970"
,"rpar;" * "\x0029"
,"rpargt;" * "\x2994"
,"rppolint;" * "\x2A12"
,"rrarr;" * "\x21C9"
,"Rrightarrow;" * "\x21DB"
,"rsaquo;" * "\x203A"
,"Rscr;" * "\x211B"
,"rscr;" * "\x1D4C7"
,"Rsh;" * "\x21B1"
,"rsh;" * "\x21B1"
,"rsqb;" * "\x005D"
,"rsquo;" * "\x2019"
,"rsquor;" * "\x2019"
,"rthree;" * "\x22CC"
,"rtimes;" * "\x22CA"
,"rtri;" * "\x25B9"
,"rtrie;" * "\x22B5"
,"rtrif;" * "\x25B8"
,"rtriltri;" * "\x29CE"
,"RuleDelayed;" * "\x29F4"
,"ruluhar;" * "\x2968"
,"rx;" * "\x211E"
,"Sacute;" * "\x015A"
,"sacute;" * "\x015B"
,"sbquo;" * "\x201A"
,"sc;" * "\x227B"
,"Sc;" * "\x2ABC"
,"scap;" * "\x2AB8"
,"Scaron;" * "\x0160"
,"scaron;" * "\x0161"
,"sccue;" * "\x227D"
,"sce;" * "\x2AB0"
,"scE;" * "\x2AB4"
,"Scedil;" * "\x015E"
,"scedil;" * "\x015F"
,"Scirc;" * "\x015C"
,"scirc;" * "\x015D"
,"scnap;" * "\x2ABA"
,"scnE;" * "\x2AB6"
,"scnsim;" * "\x22E9"
,"scpolint;" * "\x2A13"
,"scsim;" * "\x227F"
,"Scy;" * "\x0421"
,"scy;" * "\x0441"
,"sdot;" * "\x22C5"
,"sdotb;" * "\x22A1"
,"sdote;" * "\x2A66"
,"searhk;" * "\x2925"
,"searr;" * "\x2198"
,"seArr;" * "\x21D8"
,"searrow;" * "\x2198"
,"sect" * "\x00A7"
,"sect;" * "\x00A7"
,"semi;" * "\x003B"
,"seswar;" * "\x2929"
,"setminus;" * "\x2216"
,"setmn;" * "\x2216"
,"sext;" * "\x2736"
,"Sfr;" * "\x1D516"
,"sfr;" * "\x1D530"
,"sfrown;" * "\x2322"
,"sharp;" * "\x266F"
,"SHCHcy;" * "\x0429"
,"shchcy;" * "\x0449"
,"SHcy;" * "\x0428"
,"shcy;" * "\x0448"
,"ShortDownArrow;" * "\x2193"
,"ShortLeftArrow;" * "\x2190"
,"shortmid;" * "\x2223"
,"shortparallel;" * "\x2225"
,"ShortRightArrow;" * "\x2192"
,"ShortUpArrow;" * "\x2191"
,"shy" * "\x00AD"
,"shy;" * "\x00AD"
,"Sigma;" * "\x03A3"
,"sigma;" * "\x03C3"
,"sigmaf;" * "\x03C2"
,"sigmav;" * "\x03C2"
,"sim;" * "\x223C"
,"simdot;" * "\x2A6A"
,"sime;" * "\x2243"
,"simeq;" * "\x2243"
,"simg;" * "\x2A9E"
,"simgE;" * "\x2AA0"
,"siml;" * "\x2A9D"
,"simlE;" * "\x2A9F"
,"simne;" * "\x2246"
,"simplus;" * "\x2A24"
,"simrarr;" * "\x2972"
,"slarr;" * "\x2190"
,"SmallCircle;" * "\x2218"
,"smallsetminus;" * "\x2216"
,"smashp;" * "\x2A33"
,"smeparsl;" * "\x29E4"
,"smid;" * "\x2223"
,"smile;" * "\x2323"
,"smt;" * "\x2AAA"
,"smte;" * "\x2AAC"
,"smtes;" * "\x2AAC\xFE00"
,"SOFTcy;" * "\x042C"
,"softcy;" * "\x044C"
,"sol;" * "\x002F"
,"solb;" * "\x29C4"
,"solbar;" * "\x233F"
,"Sopf;" * "\x1D54A"
,"sopf;" * "\x1D564"
,"spades;" * "\x2660"
,"spadesuit;" * "\x2660"
,"spar;" * "\x2225"
,"sqcap;" * "\x2293"
,"sqcaps;" * "\x2293\xFE00"
,"sqcup;" * "\x2294"
,"sqcups;" * "\x2294\xFE00"
,"Sqrt;" * "\x221A"
,"sqsub;" * "\x228F"
,"sqsube;" * "\x2291"
,"sqsubset;" * "\x228F"
,"sqsubseteq;" * "\x2291"
,"sqsup;" * "\x2290"
,"sqsupe;" * "\x2292"
,"sqsupset;" * "\x2290"
,"sqsupseteq;" * "\x2292"
,"squ;" * "\x25A1"
,"Square;" * "\x25A1"
,"square;" * "\x25A1"
,"SquareIntersection;" * "\x2293"
,"SquareSubset;" * "\x228F"
,"SquareSubsetEqual;" * "\x2291"
,"SquareSuperset;" * "\x2290"
,"SquareSupersetEqual;" * "\x2292"
,"SquareUnion;" * "\x2294"
,"squarf;" * "\x25AA"
,"squf;" * "\x25AA"
,"srarr;" * "\x2192"
,"Sscr;" * "\x1D4AE"
,"sscr;" * "\x1D4C8"
,"ssetmn;" * "\x2216"
,"ssmile;" * "\x2323"
,"sstarf;" * "\x22C6"
,"Star;" * "\x22C6"
,"star;" * "\x2606"
,"starf;" * "\x2605"
,"straightepsilon;" * "\x03F5"
,"straightphi;" * "\x03D5"
,"strns;" * "\x00AF"
,"sub;" * "\x2282"
,"Sub;" * "\x22D0"
,"subdot;" * "\x2ABD"
,"sube;" * "\x2286"
,"subE;" * "\x2AC5"
,"subedot;" * "\x2AC3"
,"submult;" * "\x2AC1"
,"subne;" * "\x228A"
,"subnE;" * "\x2ACB"
,"subplus;" * "\x2ABF"
,"subrarr;" * "\x2979"
,"subset;" * "\x2282"
,"Subset;" * "\x22D0"
,"subseteq;" * "\x2286"
,"subseteqq;" * "\x2AC5"
,"SubsetEqual;" * "\x2286"
,"subsetneq;" * "\x228A"
,"subsetneqq;" * "\x2ACB"
,"subsim;" * "\x2AC7"
,"subsub;" * "\x2AD5"
,"subsup;" * "\x2AD3"
,"succ;" * "\x227B"
,"succapprox;" * "\x2AB8"
,"succcurlyeq;" * "\x227D"
,"Succeeds;" * "\x227B"
,"SucceedsEqual;" * "\x2AB0"
,"SucceedsSlantEqual;" * "\x227D"
,"SucceedsTilde;" * "\x227F"
,"succeq;" * "\x2AB0"
,"succnapprox;" * "\x2ABA"
,"succneqq;" * "\x2AB6"
,"succnsim;" * "\x22E9"
,"succsim;" * "\x227F"
,"SuchThat;" * "\x220B"
,"Sum;" * "\x2211"
,"sum;" * "\x2211"
,"sung;" * "\x266A"
,"sup1" * "\x00B9"
,"sup1;" * "\x00B9"
,"sup2" * "\x00B2"
,"sup2;" * "\x00B2"
,"sup3" * "\x00B3"
,"sup3;" * "\x00B3"
,"sup;" * "\x2283"
,"Sup;" * "\x22D1"
,"supdot;" * "\x2ABE"
,"supdsub;" * "\x2AD8"
,"supe;" * "\x2287"
,"supE;" * "\x2AC6"
,"supedot;" * "\x2AC4"
,"Superset;" * "\x2283"
,"SupersetEqual;" * "\x2287"
,"suphsol;" * "\x27C9"
,"suphsub;" * "\x2AD7"
,"suplarr;" * "\x297B"
,"supmult;" * "\x2AC2"
,"supne;" * "\x228B"
,"supnE;" * "\x2ACC"
,"supplus;" * "\x2AC0"
,"supset;" * "\x2283"
,"Supset;" * "\x22D1"
,"supseteq;" * "\x2287"
,"supseteqq;" * "\x2AC6"
,"supsetneq;" * "\x228B"
,"supsetneqq;" * "\x2ACC"
,"supsim;" * "\x2AC8"
,"supsub;" * "\x2AD4"
,"supsup;" * "\x2AD6"
,"swarhk;" * "\x2926"
,"swarr;" * "\x2199"
,"swArr;" * "\x21D9"
,"swarrow;" * "\x2199"
,"swnwar;" * "\x292A"
,"szlig" * "\x00DF"
,"szlig;" * "\x00DF"
,"Tab;" * "\x0009"
,"target;" * "\x2316"
,"Tau;" * "\x03A4"
,"tau;" * "\x03C4"
,"tbrk;" * "\x23B4"
,"Tcaron;" * "\x0164"
,"tcaron;" * "\x0165"
,"Tcedil;" * "\x0162"
,"tcedil;" * "\x0163"
,"Tcy;" * "\x0422"
,"tcy;" * "\x0442"
,"tdot;" * "\x20DB"
,"telrec;" * "\x2315"
,"Tfr;" * "\x1D517"
,"tfr;" * "\x1D531"
,"there4;" * "\x2234"
,"Therefore;" * "\x2234"
,"therefore;" * "\x2234"
,"Theta;" * "\x0398"
,"theta;" * "\x03B8"
,"thetasym;" * "\x03D1"
,"thetav;" * "\x03D1"
,"thickapprox;" * "\x2248"
,"thicksim;" * "\x223C"
,"ThickSpace;" * "\x205F\x200A"
,"thinsp;" * "\x2009"
,"ThinSpace;" * "\x2009"
,"thkap;" * "\x2248"
,"thksim;" * "\x223C"
,"THORN" * "\x00DE"
,"thorn" * "\x00FE"
,"THORN;" * "\x00DE"
,"thorn;" * "\x00FE"
,"tilde;" * "\x02DC"
,"Tilde;" * "\x223C"
,"TildeEqual;" * "\x2243"
,"TildeFullEqual;" * "\x2245"
,"TildeTilde;" * "\x2248"
,"times" * "\x00D7"
,"times;" * "\x00D7"
,"timesb;" * "\x22A0"
,"timesbar;" * "\x2A31"
,"timesd;" * "\x2A30"
,"tint;" * "\x222D"
,"toea;" * "\x2928"
,"top;" * "\x22A4"
,"topbot;" * "\x2336"
,"topcir;" * "\x2AF1"
,"Topf;" * "\x1D54B"
,"topf;" * "\x1D565"
,"topfork;" * "\x2ADA"
,"tosa;" * "\x2929"
,"tprime;" * "\x2034"
,"TRADE;" * "\x2122"
,"trade;" * "\x2122"
,"triangle;" * "\x25B5"
,"triangledown;" * "\x25BF"
,"triangleleft;" * "\x25C3"
,"trianglelefteq;" * "\x22B4"
,"triangleq;" * "\x225C"
,"triangleright;" * "\x25B9"
,"trianglerighteq;" * "\x22B5"
,"tridot;" * "\x25EC"
,"trie;" * "\x225C"
,"triminus;" * "\x2A3A"
,"TripleDot;" * "\x20DB"
,"triplus;" * "\x2A39"
,"trisb;" * "\x29CD"
,"tritime;" * "\x2A3B"
,"trpezium;" * "\x23E2"
,"Tscr;" * "\x1D4AF"
,"tscr;" * "\x1D4C9"
,"TScy;" * "\x0426"
,"tscy;" * "\x0446"
,"TSHcy;" * "\x040B"
,"tshcy;" * "\x045B"
,"Tstrok;" * "\x0166"
,"tstrok;" * "\x0167"
,"twixt;" * "\x226C"
,"twoheadleftarrow;" * "\x219E"
,"twoheadrightarrow;" * "\x21A0"
,"Uacute" * "\x00DA"
,"uacute" * "\x00FA"
,"Uacute;" * "\x00DA"
,"uacute;" * "\x00FA"
,"uarr;" * "\x2191"
,"Uarr;" * "\x219F"
,"uArr;" * "\x21D1"
,"Uarrocir;" * "\x2949"
,"Ubrcy;" * "\x040E"
,"ubrcy;" * "\x045E"
,"Ubreve;" * "\x016C"
,"ubreve;" * "\x016D"
,"Ucirc" * "\x00DB"
,"ucirc" * "\x00FB"
,"Ucirc;" * "\x00DB"
,"ucirc;" * "\x00FB"
,"Ucy;" * "\x0423"
,"ucy;" * "\x0443"
,"udarr;" * "\x21C5"
,"Udblac;" * "\x0170"
,"udblac;" * "\x0171"
,"udhar;" * "\x296E"
,"ufisht;" * "\x297E"
,"Ufr;" * "\x1D518"
,"ufr;" * "\x1D532"
,"Ugrave" * "\x00D9"
,"ugrave" * "\x00F9"
,"Ugrave;" * "\x00D9"
,"ugrave;" * "\x00F9"
,"uHar;" * "\x2963"
,"uharl;" * "\x21BF"
,"uharr;" * "\x21BE"
,"uhblk;" * "\x2580"
,"ulcorn;" * "\x231C"
,"ulcorner;" * "\x231C"
,"ulcrop;" * "\x230F"
,"ultri;" * "\x25F8"
,"Umacr;" * "\x016A"
,"umacr;" * "\x016B"
,"uml" * "\x00A8"
,"uml;" * "\x00A8"
,"UnderBar;" * "\x005F"
,"UnderBrace;" * "\x23DF"
,"UnderBracket;" * "\x23B5"
,"UnderParenthesis;" * "\x23DD"
,"Union;" * "\x22C3"
,"UnionPlus;" * "\x228E"
,"Uogon;" * "\x0172"
,"uogon;" * "\x0173"
,"Uopf;" * "\x1D54C"
,"uopf;" * "\x1D566"
,"UpArrow;" * "\x2191"
,"uparrow;" * "\x2191"
,"Uparrow;" * "\x21D1"
,"UpArrowBar;" * "\x2912"
,"UpArrowDownArrow;" * "\x21C5"
,"UpDownArrow;" * "\x2195"
,"updownarrow;" * "\x2195"
,"Updownarrow;" * "\x21D5"
,"UpEquilibrium;" * "\x296E"
,"upharpoonleft;" * "\x21BF"
,"upharpoonright;" * "\x21BE"
,"uplus;" * "\x228E"
,"UpperLeftArrow;" * "\x2196"
,"UpperRightArrow;" * "\x2197"
,"upsi;" * "\x03C5"
,"Upsi;" * "\x03D2"
,"upsih;" * "\x03D2"
,"Upsilon;" * "\x03A5"
,"upsilon;" * "\x03C5"
,"UpTee;" * "\x22A5"
,"UpTeeArrow;" * "\x21A5"
,"upuparrows;" * "\x21C8"
,"urcorn;" * "\x231D"
,"urcorner;" * "\x231D"
,"urcrop;" * "\x230E"
,"Uring;" * "\x016E"
,"uring;" * "\x016F"
,"urtri;" * "\x25F9"
,"Uscr;" * "\x1D4B0"
,"uscr;" * "\x1D4CA"
,"utdot;" * "\x22F0"
,"Utilde;" * "\x0168"
,"utilde;" * "\x0169"
,"utri;" * "\x25B5"
,"utrif;" * "\x25B4"
,"uuarr;" * "\x21C8"
,"Uuml" * "\x00DC"
,"uuml" * "\x00FC"
,"Uuml;" * "\x00DC"
,"uuml;" * "\x00FC"
,"uwangle;" * "\x29A7"
,"vangrt;" * "\x299C"
,"varepsilon;" * "\x03F5"
,"varkappa;" * "\x03F0"
,"varnothing;" * "\x2205"
,"varphi;" * "\x03D5"
,"varpi;" * "\x03D6"
,"varpropto;" * "\x221D"
,"varr;" * "\x2195"
,"vArr;" * "\x21D5"
,"varrho;" * "\x03F1"
,"varsigma;" * "\x03C2"
,"varsubsetneq;" * "\x228A\xFE00"
,"varsubsetneqq;" * "\x2ACB\xFE00"
,"varsupsetneq;" * "\x228B\xFE00"
,"varsupsetneqq;" * "\x2ACC\xFE00"
,"vartheta;" * "\x03D1"
,"vartriangleleft;" * "\x22B2"
,"vartriangleright;" * "\x22B3"
,"vBar;" * "\x2AE8"
,"Vbar;" * "\x2AEB"
,"vBarv;" * "\x2AE9"
,"Vcy;" * "\x0412"
,"vcy;" * "\x0432"
,"vdash;" * "\x22A2"
,"vDash;" * "\x22A8"
,"Vdash;" * "\x22A9"
,"VDash;" * "\x22AB"
,"Vdashl;" * "\x2AE6"
,"vee;" * "\x2228"
,"Vee;" * "\x22C1"
,"veebar;" * "\x22BB"
,"veeeq;" * "\x225A"
,"vellip;" * "\x22EE"
,"verbar;" * "\x007C"
,"Verbar;" * "\x2016"
,"vert;" * "\x007C"
,"Vert;" * "\x2016"
,"VerticalBar;" * "\x2223"
,"VerticalLine;" * "\x007C"
,"VerticalSeparator;" * "\x2758"
,"VerticalTilde;" * "\x2240"
,"VeryThinSpace;" * "\x200A"
,"Vfr;" * "\x1D519"
,"vfr;" * "\x1D533"
,"vltri;" * "\x22B2"
,"vnsub;" * "\x2282\x20D2"
,"vnsup;" * "\x2283\x20D2"
,"Vopf;" * "\x1D54D"
,"vopf;" * "\x1D567"
,"vprop;" * "\x221D"
,"vrtri;" * "\x22B3"
,"Vscr;" * "\x1D4B1"
,"vscr;" * "\x1D4CB"
,"vsubne;" * "\x228A\xFE00"
,"vsubnE;" * "\x2ACB\xFE00"
,"vsupne;" * "\x228B\xFE00"
,"vsupnE;" * "\x2ACC\xFE00"
,"Vvdash;" * "\x22AA"
,"vzigzag;" * "\x299A"
,"Wcirc;" * "\x0174"
,"wcirc;" * "\x0175"
,"wedbar;" * "\x2A5F"
,"wedge;" * "\x2227"
,"Wedge;" * "\x22C0"
,"wedgeq;" * "\x2259"
,"weierp;" * "\x2118"
,"Wfr;" * "\x1D51A"
,"wfr;" * "\x1D534"
,"Wopf;" * "\x1D54E"
,"wopf;" * "\x1D568"
,"wp;" * "\x2118"
,"wr;" * "\x2240"
,"wreath;" * "\x2240"
,"Wscr;" * "\x1D4B2"
,"wscr;" * "\x1D4CC"
,"xcap;" * "\x22C2"
,"xcirc;" * "\x25EF"
,"xcup;" * "\x22C3"
,"xdtri;" * "\x25BD"
,"Xfr;" * "\x1D51B"
,"xfr;" * "\x1D535"
,"xharr;" * "\x27F7"
,"xhArr;" * "\x27FA"
,"Xi;" * "\x039E"
,"xi;" * "\x03BE"
,"xlarr;" * "\x27F5"
,"xlArr;" * "\x27F8"
,"xmap;" * "\x27FC"
,"xnis;" * "\x22FB"
,"xodot;" * "\x2A00"
,"Xopf;" * "\x1D54F"
,"xopf;" * "\x1D569"
,"xoplus;" * "\x2A01"
,"xotime;" * "\x2A02"
,"xrarr;" * "\x27F6"
,"xrArr;" * "\x27F9"
,"Xscr;" * "\x1D4B3"
,"xscr;" * "\x1D4CD"
,"xsqcup;" * "\x2A06"
,"xuplus;" * "\x2A04"
,"xutri;" * "\x25B3"
,"xvee;" * "\x22C1"
,"xwedge;" * "\x22C0"
,"Yacute" * "\x00DD"
,"yacute" * "\x00FD"
,"Yacute;" * "\x00DD"
,"yacute;" * "\x00FD"
,"YAcy;" * "\x042F"
,"yacy;" * "\x044F"
,"Ycirc;" * "\x0176"
,"ycirc;" * "\x0177"
,"Ycy;" * "\x042B"
,"ycy;" * "\x044B"
,"yen" * "\x00A5"
,"yen;" * "\x00A5"
,"Yfr;" * "\x1D51C"
,"yfr;" * "\x1D536"
,"YIcy;" * "\x0407"
,"yicy;" * "\x0457"
,"Yopf;" * "\x1D550"
,"yopf;" * "\x1D56A"
,"Yscr;" * "\x1D4B4"
,"yscr;" * "\x1D4CE"
,"YUcy;" * "\x042E"
,"yucy;" * "\x044E"
,"yuml" * "\x00FF"
,"yuml;" * "\x00FF"
,"Yuml;" * "\x0178"
,"Zacute;" * "\x0179"
,"zacute;" * "\x017A"
,"Zcaron;" * "\x017D"
,"zcaron;" * "\x017E"
,"Zcy;" * "\x0417"
,"zcy;" * "\x0437"
,"Zdot;" * "\x017B"
,"zdot;" * "\x017C"
,"zeetrf;" * "\x2128"
,"ZeroWidthSpace;" * "\x200B"
,"Zeta;" * "\x0396"
,"zeta;" * "\x03B6"
,"Zfr;" * "\x2128"
,"zfr;" * "\x1D537"
,"ZHcy;" * "\x0416"
,"zhcy;" * "\x0436"
,"zigrarr;" * "\x21DD"
,"Zopf;" * "\x2124"
,"zopf;" * "\x1D56B"
,"Zscr;" * "\x1D4B5"
,"zscr;" * "\x1D4CF"
,"zwj;" * "\x200D"
,"zwnj;" * "\x200C"
]
| ndmitchell/tagsoup | src/Text/HTML/TagSoup/Entity.hs | bsd-3-clause | 62,809 | 0 | 18 | 16,321 | 16,338 | 9,330 | 7,008 | 2,273 | 4 |
module Problem13Tests
( problem13Tests
) where
import Test.HUnit
import Problem13
testColumnarAddition :: Test
testColumnarAddition = TestCase $ do
assertEqual "123 + 456" (123 + 456) (columnarAddition [123, 456])
assertEqual "3455 + 3555" (3455 + 3555) (columnarAddition [3455, 3555])
assertEqual "123 + 456 + 789" (123 + 456 + 789) (columnarAddition [123, 456, 789])
problem13Tests = TestList [testColumnarAddition]
| candidtim/euler | test/Problem13Tests.hs | bsd-3-clause | 435 | 0 | 11 | 74 | 136 | 74 | 62 | 10 | 1 |
{-# LANGUAGE ScopedTypeVariables #-}
module Graphviz.Render (render) where
import qualified Data.ByteString.Lazy.Char8 as BL
import System.IO
import System.Directory (getTemporaryDirectory, removeFile)
import System.Process
import Control.Exception as CE
render :: BL.ByteString -> IO BL.ByteString
render dat = do
withTempFile "graphviz.dot" $
\path hndl -> do
writeData dat hndl
hClose hndl
(_,_,_,p) <- createProcess (proc "dot" [path, "-Tpng", "-O"])
waitForProcess p
BL.readFile (path ++ ".png")
writeData :: BL.ByteString -> Handle -> IO ()
writeData dat hndl = BL.hPut hndl dat
withTempFile :: String -> (FilePath -> Handle -> IO a) -> IO a
withTempFile pattern func = do
tempdir <- CE.catch (getTemporaryDirectory) (\(_ :: IOException) -> return ".")
(tempfile, temph) <- openTempFile tempdir pattern
finally (func tempfile temph)
(do hClose temph
removeFile tempfile)
| co-dan/graphviz-render | Graphviz/Render.hs | bsd-3-clause | 969 | 0 | 15 | 211 | 328 | 169 | 159 | 25 | 1 |
-- | A Map mapping multiple values to a key (cross between Map and Set).
-- This is not a complete module.
module Data.Parser.Grempa.Auxiliary.MultiMap
( MultiMap
, lookup
, insert
, inserts
, delete
, union
, unions
, fromList
, M.empty
, toList
) where
import qualified Data.Map as M
import Data.Map(Map)
import Prelude hiding (lookup)
import Data.Maybe
import qualified Data.Set as S
import Data.Set(Set)
type MultiMap k a = Map k (Set a)
lookup :: Ord k => k -> MultiMap k a -> Set a
lookup k m = fromMaybe S.empty $ M.lookup k m
insert :: (Ord a, Ord k) => k -> a -> MultiMap k a -> MultiMap k a
insert k v m = M.insert k (S.insert v $ lookup k m) m
inserts :: (Ord a, Ord k) => k -> Set a -> MultiMap k a -> MultiMap k a
inserts k v m = M.insertWith S.union k v m
delete :: (Ord a, Ord k) => k -> a -> MultiMap k a -> MultiMap k a
delete k v m = M.update aux k m
where aux ss = let ss' = S.delete v ss in if S.null ss'
then Nothing
else Just ss'
union :: (Ord a, Ord k) => MultiMap k a -> MultiMap k a -> MultiMap k a
union m1 m2 = foldl (flip $ uncurry inserts) m1 $ M.toList m2
unions :: (Ord a, Ord k) => [MultiMap k a] -> MultiMap k a
unions = foldl union M.empty
fromList :: (Ord a, Ord k) => [(k, a)] -> MultiMap k a
fromList = foldl (flip $ uncurry insert) M.empty
toList :: (Ord a, Ord k) => MultiMap k a -> [(k, Set a)]
toList = M.toList
| ollef/Grempa | Data/Parser/Grempa/Auxiliary/MultiMap.hs | bsd-3-clause | 1,483 | 0 | 12 | 423 | 659 | 345 | 314 | 37 | 2 |
{-# LANGUAGE PatternSynonyms #-}
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.GL.AMD.QueryBufferObject
-- Copyright : (c) Sven Panne 2019
-- License : BSD3
--
-- Maintainer : Sven Panne <svenpanne@gmail.com>
-- Stability : stable
-- Portability : portable
--
--------------------------------------------------------------------------------
module Graphics.GL.AMD.QueryBufferObject (
-- * Extension Support
glGetAMDQueryBufferObject,
gl_AMD_query_buffer_object,
-- * Enums
pattern GL_QUERY_BUFFER_AMD,
pattern GL_QUERY_BUFFER_BINDING_AMD,
pattern GL_QUERY_RESULT_NO_WAIT_AMD
) where
import Graphics.GL.ExtensionPredicates
import Graphics.GL.Tokens
| haskell-opengl/OpenGLRaw | src/Graphics/GL/AMD/QueryBufferObject.hs | bsd-3-clause | 745 | 0 | 5 | 99 | 57 | 42 | 15 | 9 | 0 |
{-#LANGUAGE RecordWildCards #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE QuasiQuotes, TemplateHaskell, TypeFamilies #-}
{-# LANGUAGE OverloadedStrings, GADTs, FlexibleContexts #-}
{-# LANGUAGE GeneralizedNewtypeDeriving, MultiParamTypeClasses #-}
module DirectoryServer where
import Network hiding (accept, sClose)
import Network.Socket hiding (send, recv, sendTo, recvFrom, Broadcast)
import Network.Socket.ByteString
import Data.ByteString.Char8 (pack, unpack)
import System.Environment
import System.IO
import Control.Concurrent
import Control.Concurrent.STM
import Control.Exception
import Control.Monad (forever, when, join)
import Data.List.Split
import Data.Word
import Text.Printf (printf)
import System.Directory
import Data.Map (Map) -- from the `containers` library
import Data.Time
import System.Random
import qualified Data.Map as M
type Uuid = Int
type Address = String
type Port = String
type Filename = String
type Timestamp = IO String
--Server data type allows me to pass address and port details easily
data DirectoryServer = DirectoryServer
{ address :: String
, port :: String
, filemappings :: TVar (M.Map Filename Filemapping)
, fileservers :: TVar (M.Map Uuid Fileserver)
, fileservercount :: TVar Int
}
--Constructor
newDirectoryServer :: String -> String -> IO DirectoryServer
newDirectoryServer address port = atomically $ do DirectoryServer <$> return address <*> return port <*> newTVar M.empty <*> newTVar M.empty <*> newTVar 0
addFilemapping :: DirectoryServer -> Filename -> Uuid -> Address -> Port -> Timestamp -> STM ()
addFilemapping DirectoryServer{..} filename uuid fmaddress fmport timestamp = do
fm <- newFilemapping filename uuid fmaddress fmport timestamp
modifyTVar filemappings . M.insert filename $ fm
addFileserver :: DirectoryServer -> Uuid -> Address -> Port -> STM ()
addFileserver DirectoryServer{..} uuid fsaddress fsport = do
fs <- newFileserver uuid fsaddress fsport
modifyTVar fileservers . M.insert uuid $ fs
lookupFilemapping :: DirectoryServer -> Filename -> STM (Maybe Filemapping)
lookupFilemapping DirectoryServer{..} filename = M.lookup filename <$> readTVar filemappings
lookupFileserver :: DirectoryServer -> Uuid -> STM (Maybe Fileserver)
lookupFileserver DirectoryServer{..} uuid = M.lookup uuid <$> readTVar fileservers
data Filemapping = Filemapping
{ fmfilename :: Filename
, fmuuid :: Uuid
, fmaddress :: Address
, fmport :: Port
, fmtimestamp :: Timestamp
}
newFilemapping :: Filename -> Uuid -> Address -> Port -> Timestamp -> STM Filemapping
newFilemapping fmfilename fmuuid fmaddress fmport fmtimestamp = Filemapping <$> return fmfilename <*> return fmuuid <*> return fmaddress <*> return fmport <*> return fmtimestamp
getFilemappinguuid :: Filemapping -> Uuid
getFilemappinguuid Filemapping{..} = fmuuid
getFilemappingaddress :: Filemapping -> Address
getFilemappingaddress Filemapping{..} = fmaddress
getFilemappingport :: Filemapping -> Port
getFilemappingport Filemapping{..} = fmport
getFilemappingtimestamp :: Filemapping -> Timestamp
getFilemappingtimestamp Filemapping{..} = fmtimestamp
data Fileserver = Fileserver
{ fsuuid :: Uuid
, fsaddress :: HostName
, fsport :: Port
}
newFileserver :: Uuid -> Address -> Port -> STM Fileserver
newFileserver fsuuid fsaddress fsport = Fileserver <$> return fsuuid <*> return fsaddress <*> return fsport
getFileserveraddress :: Fileserver -> HostName
getFileserveraddress Fileserver{..} = fsaddress
getFileserverport :: Fileserver -> Port
getFileserverport Fileserver{..} = fsport
--4 is easy for testing the pooling
maxnumThreads = 4
serverport :: String
serverport = "7008"
serverhost :: String
serverhost = "localhost"
dirrun:: IO ()
dirrun = withSocketsDo $ do
--Command line arguments for port and address
--args <- getArgs
server <- newDirectoryServer serverhost serverport
--sock <- listenOn (PortNumber (fromIntegral serverport))
addrinfos <- getAddrInfo
(Just (defaultHints {addrFlags = [AI_PASSIVE]}))
Nothing (Just serverport)
let serveraddr = head addrinfos
sock <- socket (addrFamily serveraddr) Stream defaultProtocol
bindSocket sock (addrAddress serveraddr)
listen sock 5
_ <- printf "Listening on port %s\n" serverport
--Listen on port from command line argument
--New Abstract FIFO Channel
chan <- newChan
--Tvars are variables Stored in memory, this way we can access the numThreads from any method
numThreads <- atomically $ newTVar 0
--Spawns a new thread to handle the clientconnectHandler method, passes socket, channel, numThreads and server
forkIO $ clientconnectHandler sock chan numThreads server
--Calls the mainHandler which will monitor the FIFO channel
mainHandler sock chan
mainHandler :: Socket -> Chan String -> IO ()
mainHandler sock chan = do
--Read current message on the FIFO channel
chanMsg <- readChan chan
--If KILL_SERVICE, stop mainHandler running, If anything else, call mainHandler again, keeping the service running
case (chanMsg) of
("KILL_SERVICE") -> putStrLn "Terminating the Service!"
_ -> mainHandler sock chan
clientconnectHandler :: Socket -> Chan String -> TVar Int -> DirectoryServer -> IO ()
clientconnectHandler sock chan numThreads server = do
--Accept the socket which returns a handle, host and port
--(handle, host, port) <- accept sock
(s,a) <- accept sock
--handle <- socketToHandle s ReadWriteMode
--Read numThreads from memory and print it on server console
count <- atomically $ readTVar numThreads
putStrLn $ "numThreads = " ++ show count
--If there are still threads remaining create new thread and increment (exception if thread is lost -> decrement), else tell user capacity has been reached
if (count < maxnumThreads) then do
forkFinally (clientHandler s chan server) (\_ -> atomically $ decrementTVar numThreads)
atomically $ incrementTVar numThreads
else do
send s (pack ("Maximum number of threads in use. try again soon"++"\n\n"))
sClose s
clientconnectHandler sock chan numThreads server
clientHandler :: Socket -> Chan String -> DirectoryServer -> IO ()
clientHandler sock chan server@DirectoryServer{..} =
forever $ do
message <- recv sock 1024
let msg = unpack message
print $ msg ++ "!ENDLINE!"
let cmd = head $ words $ head $ splitOn ":" msg
print cmd
case cmd of
("HELO") -> heloCommand sock server $ (words msg) !! 1
("KILL_SERVICE") -> killCommand chan sock
("DOWNLOAD") -> downloadCommand sock server msg
("UPLOAD") -> uploadCommand sock server msg
("JOIN") -> joinCommand sock server msg
_ -> do send sock (pack ("Unknown Command - " ++ msg ++ "\n\n")) ; return ()
--Function called when HELO text command recieved
heloCommand :: Socket -> DirectoryServer -> String -> IO ()
heloCommand sock DirectoryServer{..} msg = do
send sock $ pack $ "HELO " ++ msg ++ "\n" ++
"IP:" ++ "192.168.6.129" ++ "\n" ++
"Port:" ++ port ++ "\n" ++
"StudentID:12306421\n\n"
return ()
killCommand :: Chan String -> Socket -> IO ()
killCommand chan sock = do
send sock $ pack $ "Service is now terminating!"
writeChan chan "KILL_SERVICE"
downloadCommand :: Socket -> DirectoryServer ->String -> IO ()
downloadCommand sock server@DirectoryServer{..} command = do
let clines = splitOn "\\n" command
filename = (splitOn ":" $ clines !! 1) !! 1
fm <- atomically $ lookupFilemapping server filename
case fm of
(Nothing) -> send sock $ pack $ "DOWNLOAD: " ++ filename ++ "\n" ++
"STATUS: " ++ "File not found" ++ "\n\n"
(Just fm) -> do print (getFilemappingaddress fm)
print (getFilemappingport fm)
forkIO $ downloadmsg filename (getFilemappingaddress fm) (getFilemappingport fm) sock
send sock $ pack $ "DOWNLOAD: " ++ filename ++ "\\n" ++
"STATUS: " ++ "SUCCESSFUL" ++ "\n\n"
return ()
downloadmsg :: String -> String -> String -> Socket -> IO()
downloadmsg filename host port sock = do
addrInfo <- getAddrInfo (Just (defaultHints {addrFlags = [AI_PASSIVE]})) Nothing (Just "7007")
let serverAddr = head addrInfo
clsock <- socket (addrFamily serverAddr) Stream defaultProtocol
connect clsock (addrAddress serverAddr)
send clsock $ pack $ "DOWNLOAD:FILE" ++ "\\n" ++
"FILENAME:" ++ filename ++ "\\n"
resp <- recv clsock 1024
let msg = unpack resp
let clines = splitOn "\\n" msg
fdata = (splitOn ":" $ clines !! 1) !! 1
sClose clsock
forkIO $ returndata filename sock fdata
return ()
returndata :: String -> Socket -> String -> IO ()
returndata filename sock fdata = do
send sock $ pack $ "DOWNLOAD: " ++ filename ++ "\\n" ++
"DATA: " ++ fdata ++ "\n\n"
return ()
uploadCommand :: Socket -> DirectoryServer ->String -> IO ()
uploadCommand sock server@DirectoryServer{..} command = do
let clines = splitOn "\\n" command
filename = (splitOn ":" $ clines !! 1) !! 1
fdata = (splitOn ":" $ clines !! 2) !! 1
fm <- atomically $ lookupFilemapping server filename
case fm of
(Just fm) -> send sock $ pack $ "UPLOAD: " ++ filename ++ "\n" ++
"STATUS: " ++ "File Already Exists" ++ "\n\n"
(Nothing) -> do numfs <- atomically $ M.size <$> readTVar fileservers
rand <- randomRIO (0, (numfs-1))
fs <- atomically $ lookupFileserver server rand
case fs of
(Nothing) -> send sock $ pack $ "UPLOAD: " ++ filename ++ "\n"++
"FAILED: " ++ "No valid Fileserver found to host" ++ "\n\n"
(Just fs) -> do forkIO $ uploadmsg sock filename fdata fs rand server
fm <- atomically $ newFilemapping filename rand (getFileserveraddress fs) (getFileserverport fs) (fmap show getZonedTime)
atomically $ addFilemapping server filename rand (getFileserveraddress fs) (getFileserverport fs) (fmap show getZonedTime)
send sock $ pack $ "UPLOAD: " ++ filename ++ "\\n" ++
"STATUS: " ++ "Successfull" ++ "\n\n"
return ()
uploadmsg :: Socket -> String -> String -> Fileserver -> Int -> DirectoryServer -> IO ()
uploadmsg sock filename fdata fs rand server@DirectoryServer{..} = withSocketsDo $ do
addrInfo <- getAddrInfo (Just (defaultHints {addrFlags = [AI_PASSIVE]})) Nothing (Just "7007")
let serverAddr = head addrInfo
clsock <- socket (addrFamily serverAddr) Stream defaultProtocol
connect clsock (addrAddress serverAddr)
send clsock $ pack $ "UPLOAD:FILE" ++ "\\n" ++
"FILENAME:" ++ filename ++ "\\n" ++
"DATA:" ++ fdata ++ "\\n"
resp <- recv clsock 1024
sClose clsock
let msg = unpack resp
print $ msg ++ "!ENDLINE!"
let clines = splitOn "\\n" msg
status = (splitOn ":" $ clines !! 1) !! 1
return ()
joinCommand :: Socket -> DirectoryServer ->String -> IO ()
joinCommand sock server@DirectoryServer{..} command = do
let clines = splitOn "\\n" command
newaddress = (splitOn ":" $ clines !! 1) !! 1
newport = (splitOn ":" $ clines !! 2) !! 1
nodeID <- atomically $ readTVar fileservercount
fs <- atomically $ newFileserver nodeID newaddress newport
atomically $ addFileserver server nodeID newaddress newport
atomically $ incrementFileserverCount fileservercount
send sock $ pack $ "JOINED DISTRIBUTED FILE SERVICE as fileserver: " ++ (show nodeID) ++ "\n\n"
return ()
--Increment Tvar stored in memory i.e. numThreads
incrementTVar :: TVar Int -> STM ()
incrementTVar tv = modifyTVar tv ((+) 1)
--Decrement Tvar stored in memory i.e. numThreads
decrementTVar :: TVar Int -> STM ()
decrementTVar tv = modifyTVar tv (subtract 1)
incrementFileserverCount :: TVar Int -> STM ()
incrementFileserverCount tv = modifyTVar tv ((+) 1)
| Garygunn94/DFS | .stack-work/intero/intero36933CDm.hs | bsd-3-clause | 12,487 | 303 | 15 | 3,060 | 3,290 | 1,699 | 1,591 | 227 | 6 |
{- |
Module : XMonad.Prompt.Shell
Copyright : (C) 2007 Andrea Rossato
License : BSD3
Maintainer : andrea.rossato@unibz.it
Stability : unstable
Portability : unportable
A shell prompt for XMonad
-}
-- My features:
-- - remove completion
module XMonad.Prompt.MyShell
( -- * Usage
-- $usage
Shell (..)
, shellPrompt
-- ** Variations on shellPrompt
-- $spawns
, prompt
, safePrompt
, unsafePrompt
-- * Utility functions
, getCommands
, getBrowser
, getEditor
, getShellCompl
, split
) where
import Codec.Binary.UTF8.String (encodeString)
import Control.Exception as E
import Control.Monad (forM)
import Data.List (isPrefixOf)
import System.Directory (getDirectoryContents)
import System.Environment (getEnv)
import System.Posix.Files (getFileStatus, isDirectory)
import XMonad hiding (config)
import XMonad.Prompt
import XMonad.Util.Run
econst :: Monad m => a -> IOException -> m a
econst = const . return
{- $usage
1. In your @~\/.xmonad\/xmonad.hs@:
> import XMonad.Prompt
> import XMonad.Prompt.Shell
2. In your keybindings add something like:
> , ((modm .|. controlMask, xK_x), shellPrompt def)
For detailed instruction on editing the key binding see
"XMonad.Doc.Extending#Editing_key_bindings". -}
data Shell = Shell
instance XPrompt Shell where
showXPrompt Shell = "Run: "
completionToCommand _ = escape
commandToComplete _ c = c
shellPrompt :: XPConfig -> X ()
shellPrompt c = do
cmds <- io getCommands
mkXPrompt Shell c (getShellCompl cmds) spawn
{- $spawns
See safe and unsafeSpawn in "XMonad.Util.Run".
prompt is an alias for safePrompt;
safePrompt and unsafePrompt work on the same principles, but will use
XPrompt to interactively query the user for input; the appearance is
set by passing an XPConfig as the second argument. The first argument
is the program to be run with the interactive input.
You would use these like this:
> , ((modm, xK_b), safePrompt "firefox" greenXPConfig)
> , ((modm .|. shiftMask, xK_c), prompt ("xterm" ++ " -e") greenXPConfig)
Note that you want to use safePrompt for Firefox input, as Firefox
wants URLs, and unsafePrompt for the XTerm example because this allows
you to easily start a terminal executing an arbitrary command, like
'top'. -}
prompt, unsafePrompt, safePrompt :: FilePath -> XPConfig -> X ()
prompt = unsafePrompt
safePrompt c config = mkXPrompt Shell config getShellCompl run
where run = safeSpawn c . return
unsafePrompt c config = mkXPrompt Shell config getShellCompl run
where run a = unsafeSpawn $ c ++ " " ++ a
getShellCompl :: [String] -> String -> IO [String]
getShellCompl s = return []
commandCompletionFunction :: [String] -> String -> [String]
commandCompletionFunction cmds str | '/' `elem` str = []
| otherwise = filter (isPrefixOf str) cmds
getCommands :: IO [String]
getCommands = do
p <- getEnv "PATH" `E.catch` econst []
let ds = filter (/= "") $ split ':' p
es <- forM ds $ \d -> getDirectoryContents d `E.catch` econst []
return . uniqSort . filter ((/= '.') . head) . concat $ es
split :: Eq a => a -> [a] -> [[a]]
split _ [] = []
split e l =
f : split e (rest ls)
where
(f,ls) = span (/=e) l
rest s | s == [] = []
| otherwise = tail s
escape :: String -> String
escape [] = ""
escape (x:xs)
| isSpecialChar x = '\\' : x : escape xs
| otherwise = x : escape xs
isSpecialChar :: Char -> Bool
isSpecialChar = flip elem " &\\@\"'#?$*()[]{};"
-- | Ask the shell environment for the value of a variable in XMonad's environment, with a default value.
-- In order to /set/ an environment variable (eg. combine with a prompt so you can modify @$HTTP_PROXY@ dynamically),
-- you need to use 'System.Posix.putEnv'.
env :: String -> String -> IO String
env variable fallthrough = getEnv variable `E.catch` econst fallthrough
{- | Ask the shell what browser the user likes. If the user hasn't defined any
$BROWSER, defaults to returning \"firefox\", since that seems to be the most
common X web browser.
Note that if you don't specify a GUI browser but a textual one, that'll be a problem
as 'getBrowser' will be called by functions expecting to be able to just execute the string
or pass it to a shell; so in that case, define $BROWSER as something like \"xterm -e elinks\"
or as the name of a shell script doing much the same thing. -}
getBrowser :: IO String
getBrowser = env "BROWSER" "firefox"
-- | Like 'getBrowser', but should be of a text editor. This gets the $EDITOR variable, defaulting to \"emacs\".
getEditor :: IO String
getEditor = env "EDITOR" "emacs"
| eb-gh-cr/XMonadContrib1 | XMonad/Prompt/MyShell.hs | bsd-3-clause | 4,979 | 0 | 14 | 1,283 | 898 | 475 | 423 | 70 | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.