code
stringlengths
5
1.03M
repo_name
stringlengths
5
90
path
stringlengths
4
158
license
stringclasses
15 values
size
int64
5
1.03M
n_ast_errors
int64
0
53.9k
ast_max_depth
int64
2
4.17k
n_whitespaces
int64
0
365k
n_ast_nodes
int64
3
317k
n_ast_terminals
int64
1
171k
n_ast_nonterminals
int64
1
146k
loc
int64
-1
37.3k
cycloplexity
int64
-1
1.31k
{-# OPTIONS_GHC -O2 -Wall #-} {-# LANGUAGE DeriveGeneric, DeriveAnyClass #-} ------------------------------------------------------------------------------- -- Maze.hs: Functions and types for processing the maze. -- -- Author: Jeremy Nuttall ------------------------------------------------------------------------------- module Proc.Maze ( Loc , getSF , getMoves , render , insert , chain) where import GHC.Generics (Generic) import Control.Lens (ix) import Control.Lens.Setter ((.~)) import Control.Monad (liftM2) import Control.DeepSeq (NFData,($!!)) import qualified Data.Map.Lazy as M (Map,insert,lookup) import qualified Data.ByteString.Char8 as C (ByteString,elemIndices,length ,index) import Data.List (findIndices) ------------------------------------------------------------------------------- -- Location type ------------------------------------------------------------------------------- data Loc = Loc !Int !Int deriving (Generic,Eq,Ord,NFData) -- Coonstructor loc :: Int -> Int -> Loc loc = Loc {-# INLINE loc #-} ------------------------------------------------------------------------------- -- Maze operations ------------------------------------------------------------------------------- isOpen :: Char -> Bool isOpen = (/= '#') {-# INLINE isOpen #-} lNE1 :: [a] -> Bool lNE1 (_:_:_) = True lNE1 [] = True lNE1 _ = False {-# INLINE lNE1 #-} -- Get the start and finish locations getSF :: [C.ByteString] -> Maybe (Loc, Loc) getSF contents = case (start,finish) of (Just s, Just f) -> Just (s,f) (Nothing, Just _) -> Nothing (Just _, Nothing) -> Nothing (Nothing, Nothing) -> Nothing where start = findRC 'S' finish = findRC 'F' findRC ch = let rc = map (C.elemIndices ch) contents row = findIndices (not . null) rc col = concat rc in if lNE1 col || lNE1 row then Nothing else Just $ loc (head col) (head row) {-# INLINE getSF #-} -- Returns a location if and only if it is within the bounds of the maze canAccess :: [C.ByteString] -> Loc -> Bool canAccess xs (Loc x y) = x >= 0 && x < C.length (head xs) && y >= 0 && y < length xs {-# INLINE canAccess #-} -- Get all possible moves for a given location getMoves :: [C.ByteString] -> Loc -> [Loc] getMoves maze (Loc x y) | not . isOpen $ maze !! y `C.index` x = [] | otherwise = scrub nwse where nwse = [loc x (y+1), loc (x+1) y, loc x (y-1), loc (x-1) y] scrub = filter (canAccess maze) {-# INLINE getMoves #-} -- The render function produces a maze with asterisks on the shortest route -- using a lens (asterisks have ASCII value 42) render :: [C.ByteString] -> [Loc] -> [C.ByteString] render maze [] = maze render maze ( Loc x y :locs) = render (ix y . ix x .~ 42 $ maze) locs {-# INLINABLE render #-} ------------------------------------------------------------------------------- -- Operation on predecessor map -- Predecessor map has form: children -> predecessors ------------------------------------------------------------------------------- -- Insert a list of children into the map insert :: [Loc] -> Loc -> M.Map Loc Loc -> M.Map Loc Loc insert [] _ mp = mp insert (k:ks) v mp = insert ks v (M.insert k v mp) -- Trace the map's chain from the finish to the start chain :: Loc -> Loc -> M.Map Loc Loc -> Maybe [Loc] chain begin start = chain' begin where chain' c locs | c == start = Just [] | otherwise = case M.lookup c locs of Just location -> liftM2 (:) (Just c) $!! chain' location locs Nothing -> Nothing
jtnuttall/bfs-haskell
Proc/Maze.hs
gpl-2.0
3,882
0
14
1,010
1,047
570
477
-1
-1
module T where import Prelude hiding ( id ) import Tests.ModelCheckerBasis -- One initial state. c = proc () -> do rec x <- (| unsafeNonDetAC (\x -> notA -< x) (idB -< x) |) returnA -< x Just (m, out) = isConstructive c ctlM = mkCTLModel m test_model = ctlM `seq` True test_nondet_init = isOK (mc ctlM (neg (prop out)))
peteg/ADHOC
Tests/07_Nondeterminism/025_init_nondet_one_state.hs
gpl-2.0
331
3
15
72
146
78
68
-1
-1
module CSP.STS.Bisi.Report where import CSP.STS.Type import CSP.STS.Bisi.Refine ( domain, codomain, images ) import Autolib.Reporter import Autolib.ToDoc import Autolib.Reader import qualified Autolib.Relation as R import qualified Data.Set as S import Autolib.Set ( cross ) import Control.Monad ( guard, void, forM ) check_bisi (s,ms) (t, mt) r = do let miss = S.difference ( states s ) ( domain r ) when ( not $ S.null miss ) $ reject $ text "Diese Zustände aus" <+> ms <+> text "sind nicht im Vorbereich von R enthalten:" </> toDoc miss let mist = S.difference ( states t ) ( codomain r ) when ( not $ S.null mist ) $ reject $ text "Diese Zustände aus" <+> mt <+> text "sind nicht im Nachbereich von R enthalten:" </> toDoc mist when ( not $ R.holds r (start s) (start t) ) $ reject $ text "Es gilt nicht R" <> parens ( text "start" <+> ms <+> text ", start" <+> mt ) is_simulated_by (s, ms) (t, mt) ( r, text "R" ) is_simulated_by (t, mt) (s, ms) ( R.inverse r, text "R^-" ) is_simulated_by (s, ms) (t, mt) (r, mr ) = forM ( visible s ) $ \ (p,a,p') -> forM ( S.toList $ R.images r p ) $ \ q -> do let imgs = S.fromList $ images t q a sims = R.images r p' both = S.intersection imgs sims inform $ text "Transition (p1,a,q1) in" <+> ms <+> text ":" <+> toDoc (p,a,p') </> vcat [ text "Simulation (p1,p2) in" <+> mr <+> text ":" <+> toDoc (p,q) , nest 4 $ vcat [ braces ( text "p2 | (p2,a,q2) in" <+> mt ) <+> equals <+> toDoc imgs , braces ( text "p2 | (p1,p2) in" <+> mr ) <+> equals <+> toDoc sims , nest 4 $ text "gemeinsam erreichbar:" <+> toDoc both ] ] when ( S.null both ) $ reject $ text "nicht zusammenführbar."
marcellussiegburg/autotool
collection/src/CSP/STS/Bisi/Report.hs
gpl-2.0
1,937
0
22
630
720
368
352
35
1
{-# OPTIONS -Wall #-} module GenFunctions (genModule) where -- import System.IO import Data.List genModule :: Int -> IO () genModule arity = writeFile "Generated.hs" (genModule' arity) genModule' :: Int -> String genModule' n = unlines . map pwc $ [1..n] pwc :: Int -> String pwc n = unlines [pwcdoc n, sigTot n,defTot n] pwcdoc :: Int -> String pwcdoc n = "-- | " ++ "Pointwise composition of a binary and a " ++ arities n ++ " function." -- Documentation strings arities :: Int -> String arities n | n < 0 = error "Arities are not defined for negative numbers" | n == 0 = "nullary" | (n-1) < length aritystrings = aritystrings !! (n-1) ++ "nary" | otherwise = show n ++ "-ary" aritystrings :: [String] aritystrings = ["u","bi","ter","quater","qui","se","septe","octo","nove","de"] -- Type signatures and symbol shapes symbol :: Int -> String symbol n = "(" ++ (n % 2) × '-' ++ (n ÷ 2) × '¦' ++ "|" ++")" tSuff :: Int -> Char -> Char -> String tSuff n a v = n ~× (a:" -> ") ++ [v] tFunc :: Int -> Char -> Char -> String tFunc n a v = "(" ++ tSuff n a v ++ ")" tTot :: Int -> String tTot n = tFunc n 'b' 'c' ++ " -> (a -> b) -> " ++ tSuff n 'a' 'c' sigTot :: Int -> String sigTot n = symbol n ++ "::" ++ tTot n -- Function implementations defTot :: Int -> String defTot n = symbol n ++ defLhs n ++ " = " ++ defImpl n defLhs :: Int -> String defLhs n = " g f " ++ (intercalate " " . map vName $ [1..n]) vName :: Int -> String vName n = 'a':show n defImpl :: Int -> String defImpl n = "g " ++ concatMap defApp [1..n] where defApp m = "(f " ++ vName m ++ ")" -- helpers (×) :: Int -> a -> [a] (×) = replicate (~×) :: Int -> [a] -> [a] (~×) = ((.).(.)) concat replicate (÷) :: Int -> Int -> Int (÷) = div (%) :: Int -> Int -> Int (%) = mod
jplLloyd/pointwise-composition
src/Data/GenFunctions.hs
gpl-3.0
1,858
1
12
480
782
417
365
45
1
{- This file is part of The Simple Nice Manual Generator. The Simple Nice Manual Generator is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or any later version. The Simple Nice Manual Generator is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with The Simple Nice Manual Generator. If not, see <http://www.gnu.org/licenses/> -} -- | A little module, exposed to test the syntax highlighting plugin system. module Text.Syntax.Test where import Text.Syntax.Simple import Manual.Structure import Data.Dynamic import Data.Either import Text.Parsec import Text.Parsec.String import Text.Parsec.Prim import Error.Report import Control.Exception hiding (try) import Control.Monad -- | The plugin, highlights red, yellow, green, blue colour :: Dynamic colour = highlight "colour" $ map (\c -> (c, c ++ "_elem", c)) ["red", "yellow", "green", "blue"]
elginer/snm
Text/Syntax/Test.hs
gpl-3.0
1,282
0
10
242
124
77
47
13
1
-- Copyright (c) 2015 Taylor Fausak <taylor@fausak.me> -- Copyright (c) 2015 Stian Ellingsen <stian@plaimi.net> -- -- See COPYING.Haskeleton.md for the licence covering this file. module Main (main) where import Control.Monad (guard) import Data.Functor ((<$>)) import Data.Maybe (fromMaybe) import Data.Monoid ((<>)) import System.Directory (doesFileExist) import System.Exit (exitFailure, exitSuccess) import System.Process (readProcess) import Text.XML.Light ilu :: String -> [Attr] -> Maybe Integer ilu k a = read <$> lookupAttr (blank_name {qName = k}) a checkBoxes :: [Attr] -> Bool checkBoxes a = ilu "boxes" a == ilu "count" a checkBoolean :: [Attr] -> Bool checkBoolean a = checkBoxes a && ilu "false" a == Just 0 checkGuards :: [Attr] -> Bool checkGuards a = (checkBoolean a &&) . fromMaybe False $ do t <- ilu "true" a c <- ilu "count" a return $ t <= c `div` 2 check :: String -> [Attr] -> Bool check "guards" = checkGuards check t | t `elem` ["booleans", "conditionals", "qualifiers"] = checkBoolean check _ = checkBoxes main :: IO () main = do file <- tix let arguments n = ["report", "--per-module"] <> n <> [file] output <- readProcess "hpc" (arguments ["--xml-output"]) "" let res = fromMaybe False $ do e <- parseXMLDoc output s <- findChild (blank_name {qName = "summary"}) e let m = findChildren (blank_name {qName = "module"}) e c = concat $ elChildren <$> (s : m) guard . not $ null c return $ and [check (qName $ elName x) (elAttribs x) | x <- c] if res then exitSuccess else readProcess "hpc" (arguments []) "" >>= putStr >> exitFailure -- The location of the TIX file changed between versions of cabal-install. -- See <https://github.com/tfausak/haskeleton/issues/31> for details. tix :: IO FilePath tix = do let newFile = "tests.tix" oldFile = "dist/hpc/tix/tests/tests.tix" newFileExists <- doesFileExist newFile let file = if newFileExists then newFile else oldFile return file
stiell/phec
test-suite/HPC.hs
gpl-3.0
2,045
0
19
445
678
354
324
46
2
{-# LANGUAGE ScopedTypeVariables #-} module Main where import Data.Typeable import Transient.Base import Transient.Backtrack import Transient.Indeterminism import Transient.Logged import Transient.Move import Transient.EVars import Control.Applicative import Control.Concurrent import Control.Exception import Control.Monad.State import Data.Monoid import System.IO.Unsafe import System.Directory import System.FilePath import Network.HTTP import Network import System.IO import Data.IORef import Data.List hiding (find,map, group) main= keep $ do oneThread $ option "main" "to kill previous spawned processes and return to the main menu" <|> return "" liftIO $ putStrLn "MAIN MENU" nonDeterminsm <|> trans <|> colors <|> app <|> futures <|> server <|> distributed <|> pubSub solveConstraint= do x <- choose [1,2,3] y <- choose [4,5,6] guard $ x * y == 8 return (x,y) pythags = freeThreads $ do x <- choose [1..50] y <- choose ([1..x] :: [Int]) z <- choose [1..round $ sqrt(fromIntegral $ 2*x*x)] guard (x*x+y*y==z*z) th <- liftIO myThreadId return (x, y, z, th) example1= do option "ex1" "example 1" r <- threads 4 solveConstraint liftIO $ print r example2= do option "pyt" "pythagoras" r<- threads 2 pythags liftIO $ print r collectSample= threads 4 $ do option "coll" "group sample: return results in a list" r <- collect 0 $ do x <- choose [1,2,3] y <- choose [4,5,6] th <- liftIO $ threadDelay 1000 >> myThreadId return (x,y,th) liftIO $ print r threadSample= do option "th" "threads sample" liftIO $ print "number of threads? (< 10)" n <- input ( < 10) threads n $ do x <- choose [1,2,3] y <- choose [4,5,6] th <- liftIO myThreadId liftIO $ print (x,y,th) nonDeterminsm= do option "nondet" "Non determinism examples" example1 <|> example2 <|> collectSample <|> threadSample <|> fileSearch find' :: String -> FilePath -> TransientIO FilePath find' s d = do fs <- liftIO $ getDirectoryContents d `catch` \(e:: SomeException) -> return [] -- 1 let fs' = sort $ filter (`notElem` [".",".."]) fs -- 2 if any (== s) fs' -- 3 then do liftIO $ print $ d </> s return $ d</> s else do f <- choose fs' -- 4 let d' = d </> f -- 6 isdir <- liftIO $ doesDirectoryExist d' -- 7 if isdir then find' s d' -- 8 else stop ------------------ fileSearch= do option "file" "example of file search" r<- threads 3 $ collect 10 $ find' "Main.hs" "." liftIO $ putStrLn $ "SOLUTION= "++ show r -- exit trans= do option "trans" "transaction examples with backtracking for undoing actions" transaction <|> transaction2 transaction= do option "back" "backtracking test" productNavigation reserve payment transaction2= do option "back2" "backtracking test 2" productNavigation reserveAndSendMsg payment liftIO $ print "done!" productNavigation = liftIO $ putStrLn "product navigation" reserve= liftIO (putStrLn "product reserved,added to cart") `onUndo` liftIO (putStrLn "product un-reserved") payment = do liftIO $ putStrLn "Payment failed" undo reserveAndSendMsg= do reserve liftIO (putStrLn "update other database necesary for the reservation") `onUndo` liftIO (putStrLn "database update undone") colors :: TransientIO () colors= do option "colors" "choose between three colors" r <- color 1 "red" <|> color 2 "green" <|> color 3 "blue" liftIO $ print r where color :: Int -> String -> TransientIO String color n str= do option (show n) str liftIO . print $ str ++ " color" return str app :: TransientIO () app= do option "app" "applicative expression that return a counter in 2-tuples every second" liftIO $ putStrLn "to stop the sequence, write main(enter)" counter <- liftIO $ newMVar 0 r <- (,) <$> number counter 1 <*> number counter 1 liftIO $ putStrLn $ "result=" ++ show r where number counter n= waitEvents $ do threadDelay $ n * 1000000 n <- takeMVar counter putMVar counter (n+1) return n futures= do option "async" "for parallelization of IO actions with applicative and monoidal combinators" sum1 <|> sum2 sum1 :: TransientIO () sum1= do option "sum1" "access to two web pages concurrently and sum the number of words using Applicative" liftIO $ print " downloading data..." (r,r') <- (,) <$> async (worker "http://www.haskell.org/") <*> async (worker "http://www.google.com/") liftIO $ putStrLn $ "result=" ++ show (r + r') getURL= simpleHTTP . getRequest worker :: String -> IO Int worker url=do r <- getURL url body <- getResponseBody r putStrLn $ "number of words in " ++ url ++" is: " ++ show(length (words body)) return . length . words $ body sum2 :: TransientIO () sum2= do option "sum2" "access to N web pages concurrenty and sum the number of words using map-fold" liftIO $ print " downloading data..." rs <- foldl (<>) (return 0) $ map (async . worker) [ "http://www.haskell.org/" , "http://www.google.com/"] liftIO $ putStrLn $ "result=" ++ show rs instance Monoid Int where mappend= (+) mempty= 0 server :: TransientIO () server= do option "server" "A web server in the port 8080" liftIO $ print "Server Stated" sock <- liftIO $ listenOn $ PortNumber 8080 (h,_,_) <- spawn $ accept sock `catch` (\(e::SomeException) -> sClose sock >> throw e) liftIO $ do hPutStr h msg putStrLn "new request" hFlush h hClose h `catch` (\(e::SomeException) -> sClose sock) msg = "HTTP/1.0 200 OK\r\nContent-Length: 5\r\n\r\nPong!\r\n" -- distributed computing distributed= do option "distr" "examples of distributed computing" let port1 = PortNumber 2000 let node =createNode host port1 addNodes [node ] listen port1 <|> return ()-- conn port1 port1 <|> conn port2 port1 examples' node where host= "localhost" examples' node= do logged $ option "maind" "to see this menu" <|> return "" r <-logged $ option "move" "move to another node" <|> option "call" "call a function in another node" <|> option "chat" "chat" <|> option "netev" "events propagating trough the network" case r of "call" -> callExample node "move" -> moveExample node "chat" -> chat "netev" -> networkEvents node callExample node= do logged $ putStrLnhp node "asking for the remote data" s <- callTo node $ liftIO $ do putStrLnhp node "remote callTo request" readIORef environ liftIO $ putStrLn $ "resp=" ++ show s {-# NOINLINE environ #-} environ= unsafePerformIO $ newIORef "Not Changed" moveExample node= do putStrLnhp node "enter a string. It will be inserted in the other node by a migrating program" name <- logged $ input (const True) beamTo node putStrLnhp node "moved!" putStrLnhp node $ "inserting "++ name ++" as new data in this node" liftIO $ writeIORef environ name return() chat :: TransIO () chat = do name <- logged $ do liftIO $ putStrLn "Name?" ; input (const True) text <- logged $ waitEvents $ putStr ">" >> hFlush stdout >> getLine' (const True) let line= name ++": "++ text clustered $ liftIO $ putStrLn line networkEvents node= do logged $ do putStrLnhp node "callTo is not a simole remote call. it stablish a connection" putStrLnhp node "between transient processes in different nodes" putStrLnhp node "in this example, events are piped back from a remote node to the local node" r <- callTo node $ do option "fire" "fire event" return "event fired" putStrLnhp node $ r ++ " in remote node" putStrLnhp p msg= liftIO $ putStr (show p) >> putStr " ->" >> putStrLn msg pubSub= do option "pubs" "an example of publish-subscribe using Event Vars (EVars)" v <- newEVar :: TransIO (EVar String) v' <- newEVar subscribe v v' <|> publish v v' where publish v v'= do liftIO $ putStrLn "Enter a message to publish" msg <- input(const True) writeEVar v msg liftIO $ putStrLn "after writing first EVar\n" writeEVar v' $ "second " ++ msg liftIO $ putStrLn "after writing second EVar\n" publish v v' subscribe :: EVar String -> EVar String -> TransIO () subscribe v v'= do r <- (,) <$> proc1 v <*> proc2 v' liftIO $ do putStr "applicative result= " print r susbcribe2 :: EVar String -> EVar String -> TransIO () susbcribe2 v v'= do x <- readEVar v y <- readEVar v' liftIO $ do putStr "monadic result" print (x,y) proc1 v= do msg <- readEVar v liftIO $ putStrLn $ "proc1 readed var: " ++ show msg return msg proc2 v= do msg <- readEVar v liftIO $ putStrLn $ "proc2 readed var: " ++ show msg return msg --main=do -- r <- getURL "https://www.w3.org/services/html2txt?url=http%3A%2F%2Fwww.searchquotes.com%2Fsearch%2Ftransient%2F" -- body <- getResponseBody r -- print $ parse quote' "" body -- where -- quote'= do -- q <- between(brackets natural) (brackets natural) string -- -- if "http" `isPrefixOf` q -- then quote' -- else return q -- --main = case (parse numbers "" "11, 2, 43") of -- Left err -> print err -- Right xs -> print (sum xs) -- --numbers = commaSep integer
haskellGardener/transient
Main.hs
gpl-3.0
10,528
0
16
3,404
2,926
1,386
1,540
263
4
{-# LANGUAGE DataKinds #-} {-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE TypeOperators #-} {-# OPTIONS_GHC -fno-warn-duplicate-exports #-} {-# OPTIONS_GHC -fno-warn-unused-binds #-} {-# OPTIONS_GHC -fno-warn-unused-imports #-} -- | -- Module : Network.Google.Resource.DFAReporting.Placements.Get -- Copyright : (c) 2015-2016 Brendan Hay -- License : Mozilla Public License, v. 2.0. -- Maintainer : Brendan Hay <brendan.g.hay@gmail.com> -- Stability : auto-generated -- Portability : non-portable (GHC extensions) -- -- Gets one placement by ID. -- -- /See:/ <https://developers.google.com/doubleclick-advertisers/ DCM/DFA Reporting And Trafficking API Reference> for @dfareporting.placements.get@. module Network.Google.Resource.DFAReporting.Placements.Get ( -- * REST Resource PlacementsGetResource -- * Creating a Request , placementsGet , PlacementsGet -- * Request Lenses , pgProFileId , pgId ) where import Network.Google.DFAReporting.Types import Network.Google.Prelude -- | A resource alias for @dfareporting.placements.get@ method which the -- 'PlacementsGet' request conforms to. type PlacementsGetResource = "dfareporting" :> "v2.7" :> "userprofiles" :> Capture "profileId" (Textual Int64) :> "placements" :> Capture "id" (Textual Int64) :> QueryParam "alt" AltJSON :> Get '[JSON] Placement -- | Gets one placement by ID. -- -- /See:/ 'placementsGet' smart constructor. data PlacementsGet = PlacementsGet' { _pgProFileId :: !(Textual Int64) , _pgId :: !(Textual Int64) } deriving (Eq,Show,Data,Typeable,Generic) -- | Creates a value of 'PlacementsGet' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'pgProFileId' -- -- * 'pgId' placementsGet :: Int64 -- ^ 'pgProFileId' -> Int64 -- ^ 'pgId' -> PlacementsGet placementsGet pPgProFileId_ pPgId_ = PlacementsGet' { _pgProFileId = _Coerce # pPgProFileId_ , _pgId = _Coerce # pPgId_ } -- | User profile ID associated with this request. pgProFileId :: Lens' PlacementsGet Int64 pgProFileId = lens _pgProFileId (\ s a -> s{_pgProFileId = a}) . _Coerce -- | Placement ID. pgId :: Lens' PlacementsGet Int64 pgId = lens _pgId (\ s a -> s{_pgId = a}) . _Coerce instance GoogleRequest PlacementsGet where type Rs PlacementsGet = Placement type Scopes PlacementsGet = '["https://www.googleapis.com/auth/dfatrafficking"] requestClient PlacementsGet'{..} = go _pgProFileId _pgId (Just AltJSON) dFAReportingService where go = buildClient (Proxy :: Proxy PlacementsGetResource) mempty
rueshyna/gogol
gogol-dfareporting/gen/Network/Google/Resource/DFAReporting/Placements/Get.hs
mpl-2.0
3,085
0
14
727
421
249
172
61
1
{-# LANGUAGE DataKinds #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE GeneralizedNewtypeDeriving #-} {-# LANGUAGE LambdaCase #-} {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE TypeFamilies #-} {-# OPTIONS_GHC -fno-warn-unused-imports #-} -- Module : Network.AWS.ElasticTranscoder.ListPresets -- Copyright : (c) 2013-2014 Brendan Hay <brendan.g.hay@gmail.com> -- License : This Source Code Form is subject to the terms of -- the Mozilla Public License, v. 2.0. -- A copy of the MPL can be found in the LICENSE file or -- you can obtain it at http://mozilla.org/MPL/2.0/. -- Maintainer : Brendan Hay <brendan.g.hay@gmail.com> -- Stability : experimental -- Portability : non-portable (GHC extensions) -- -- Derived from AWS service descriptions, licensed under Apache 2.0. -- | The ListPresets operation gets a list of the default presets included with -- Elastic Transcoder and the presets that you've added in an AWS region. -- -- <http://docs.aws.amazon.com/elastictranscoder/latest/developerguide/ListPresets.html> module Network.AWS.ElasticTranscoder.ListPresets ( -- * Request ListPresets -- ** Request constructor , listPresets -- ** Request lenses , lp1Ascending , lp1PageToken -- * Response , ListPresetsResponse -- ** Response constructor , listPresetsResponse -- ** Response lenses , lpr1NextPageToken , lpr1Presets ) where import Network.AWS.Prelude import Network.AWS.Request.RestJSON import Network.AWS.ElasticTranscoder.Types import qualified GHC.Exts data ListPresets = ListPresets { _lp1Ascending :: Maybe Text , _lp1PageToken :: Maybe Text } deriving (Eq, Ord, Read, Show) -- | 'ListPresets' constructor. -- -- The fields accessible through corresponding lenses are: -- -- * 'lp1Ascending' @::@ 'Maybe' 'Text' -- -- * 'lp1PageToken' @::@ 'Maybe' 'Text' -- listPresets :: ListPresets listPresets = ListPresets { _lp1Ascending = Nothing , _lp1PageToken = Nothing } -- | To list presets in chronological order by the date and time that they were -- created, enter 'true'. To list presets in reverse chronological order, enter 'false'. lp1Ascending :: Lens' ListPresets (Maybe Text) lp1Ascending = lens _lp1Ascending (\s a -> s { _lp1Ascending = a }) -- | When Elastic Transcoder returns more than one page of results, use 'pageToken' -- in subsequent 'GET' requests to get each successive page of results. lp1PageToken :: Lens' ListPresets (Maybe Text) lp1PageToken = lens _lp1PageToken (\s a -> s { _lp1PageToken = a }) data ListPresetsResponse = ListPresetsResponse { _lpr1NextPageToken :: Maybe Text , _lpr1Presets :: List "Presets" Preset } deriving (Eq, Read, Show) -- | 'ListPresetsResponse' constructor. -- -- The fields accessible through corresponding lenses are: -- -- * 'lpr1NextPageToken' @::@ 'Maybe' 'Text' -- -- * 'lpr1Presets' @::@ ['Preset'] -- listPresetsResponse :: ListPresetsResponse listPresetsResponse = ListPresetsResponse { _lpr1Presets = mempty , _lpr1NextPageToken = Nothing } -- | A value that you use to access the second and subsequent pages of results, if -- any. When the presets fit on one page or when you've reached the last page of -- results, the value of 'NextPageToken' is 'null'. lpr1NextPageToken :: Lens' ListPresetsResponse (Maybe Text) lpr1NextPageToken = lens _lpr1NextPageToken (\s a -> s { _lpr1NextPageToken = a }) -- | An array of 'Preset' objects. lpr1Presets :: Lens' ListPresetsResponse [Preset] lpr1Presets = lens _lpr1Presets (\s a -> s { _lpr1Presets = a }) . _List instance ToPath ListPresets where toPath = const "/2012-09-25/presets" instance ToQuery ListPresets where toQuery ListPresets{..} = mconcat [ "Ascending" =? _lp1Ascending , "PageToken" =? _lp1PageToken ] instance ToHeaders ListPresets instance ToJSON ListPresets where toJSON = const (toJSON Empty) instance AWSRequest ListPresets where type Sv ListPresets = ElasticTranscoder type Rs ListPresets = ListPresetsResponse request = get response = jsonResponse instance FromJSON ListPresetsResponse where parseJSON = withObject "ListPresetsResponse" $ \o -> ListPresetsResponse <$> o .:? "NextPageToken" <*> o .:? "Presets" .!= mempty instance AWSPager ListPresets where page rq rs | stop (rs ^. lpr1NextPageToken) = Nothing | otherwise = (\x -> rq & lp1PageToken ?~ x) <$> (rs ^. lpr1NextPageToken)
dysinger/amazonka
amazonka-elastictranscoder/gen/Network/AWS/ElasticTranscoder/ListPresets.hs
mpl-2.0
4,754
0
12
1,033
665
392
273
72
1
{-# LANGUAGE TypeSynonymInstances, FlexibleInstances #-} -------------------------------------------------------------------------------- {-| Module : Dialogs Copyright : (c) Daan Leijen 2003 License : wxWindows Maintainer : wxhaskell-devel@lists.sourceforge.net Stability : provisional Portability : portable Defines common dialogs. * Instances: 'Form', 'Framed' -- 'Textual', 'Literate', 'Dimensions', 'Colored', 'Visible', 'Child', 'Able', 'Tipped', 'Identity', 'Styled', 'Reactive', 'Paint'. -} -------------------------------------------------------------------------------- module Graphics.UI.WX.Dialogs ( -- * Generic dialogs Dialog, dialog, showModal -- * Messages , errorDialog, warningDialog, infoDialog , confirmDialog, proceedDialog -- * Files , fileOpenDialog, filesOpenDialog , fileSaveDialog , dirOpenDialog -- * Misc. , fontDialog , colorDialog , passwordDialog , textDialog , numberDialog -- * Primitive , dialogEx ) where import Graphics.UI.WXCore import Graphics.UI.WX.Types import Graphics.UI.WX.Attributes import Graphics.UI.WX.Layout import Graphics.UI.WX.Classes import Graphics.UI.WX.Window import Graphics.UI.WX.Events -- just for haddock import Graphics.UI.WX.Frame instance Form (Dialog a) where layout = writeAttr "layout" windowSetLayout -- | Create a dialog window. Use 'showModal' for a modal dialog. Use -- the 'visible' property to show\/hide a modeless dialog. dialog :: Window a -> [Prop (Dialog ())] -> IO (Dialog ()) dialog parent props = dialogEx parent (dialogDefaultStyle {- .+. wxNO_FULL_REPAINT_ON_RESIZE -}) props -- | Create a dialog window with a certain style. dialogEx :: Window a -> Style -> [Prop (Dialog ())] -> IO (Dialog ()) dialogEx parent style props = feed2 props style $ initialFrame $ \id rect txt -> \props flags -> do d <- dialogCreate parent id txt rect flags set d props return d -- | Show a modal dialog. Take a function as argument that takes a function itself -- as argument that can be used to end the modal dialog. The argument of this function -- is returned as the result of the dialog. The result is 'Nothing' when the dialog -- is dismissed via the system menu. -- -- > d <- dialog w [text := "Demo"] -- > ok <- button d [text := "Ok"] -- > ... -- > result <- showModal d (\stop -> set ok [on command := stop (Just 42)]) -- showModal :: Dialog b -> ((Maybe a -> IO ()) -> IO ()) -> IO (Maybe a) showModal dialog f = do ret <- varCreate Nothing f (\x -> do{ varSet ret x; dialogEndModal dialog 0} ) dialogShowModal dialog varGet ret
thielema/wxhaskell
wx/src/Graphics/UI/WX/Dialogs.hs
lgpl-2.1
2,707
0
12
573
463
258
205
42
1
{-# LANGUAGE OverloadedStrings, UnicodeSyntax #-} module DevelMain where import Sweetroll.Prelude import System.Environment import Text.Read (readMaybe) import Rapid import Network.Wai.Handler.Warp import Network.Wai.Middleware.RequestLogger import Sweetroll.App import Sweetroll.Conf app ∷ IO Application app = logStdoutDev <$> initSweetrollApp def { testMode = True } def { secretKey = "TESTKEY" } update ∷ IO () update = do dport ← lookupEnv "SWEETROLL_DEV_PORT" let portNumber = fromMaybe 3000 $ readMaybe =<< dport rapid 0 $ \x → restart x (asString "web") (runSettings (setPort portNumber defaultSettings) =<< app)
myfreeweb/sweetroll
sweetroll-be/executable/DevelMain.hs
unlicense
723
0
14
175
181
98
83
18
1
{-# LANGUAGE QuasiQuotes, FlexibleInstances #-} module View where import Str(str) import Acl import Util import Console import Diff tblList = [str| SELECT n.nspname AS "Schema", c.relname AS "Name", d.description AS "Comment", relacl AS "ACLs" FROM pg_catalog.pg_namespace n JOIN pg_catalog.pg_class c ON c.relnamespace = n.oid LEFT JOIN pg_catalog.pg_description d ON (c.oid = d.objoid AND d.objsubid = 0) -- LEFT JOIN pg_catalog.pg_class dc ON (d.classoid=dc.oid AND dc.relname='pg_class') -- LEFT JOIN pg_catalog.pg_namespace dn ON (dn.oid=dc.relnamespace AND dn.nspname='pg_catalog') WHERE n.nspname IN ('account','document') AND c.relkind = 'r' AND n.nspname !~ '^pg_' AND n.nspname <> 'information_schema' ORDER BY 1, 2 |] tblColumns = [str| SELECT * FROM (SELECT n.nspname,c.relname,a.attname,a.atttypid,a.attnotnull OR (t.typtype = 'd' AND t.typnotnull) AS attnotnull,a.atttypmod,a.attlen,row_number() OVER (PARTITION BY a.attrelid ORDER BY a.attnum) AS attnum, pg_catalog.pg_get_expr(def.adbin, def.adrelid) AS adsrc,dsc.description,t.typbasetype,t.typtype FROM pg_catalog.pg_namespace n JOIN pg_catalog.pg_class c ON (c.relnamespace = n.oid) JOIN pg_catalog.pg_attribute a ON (a.attrelid=c.oid) JOIN pg_catalog.pg_type t ON (a.atttypid = t.oid) LEFT JOIN pg_catalog.pg_attrdef def ON (a.attrelid=def.adrelid AND a.attnum = def.adnum) LEFT JOIN pg_catalog.pg_description dsc ON (c.oid=dsc.objoid AND a.attnum = dsc.objsubid) LEFT JOIN pg_catalog.pg_class dc ON (dc.oid=dsc.classoid AND dc.relname='pg_class') LEFT JOIN pg_catalog.pg_namespace dn ON (dc.relnamespace=dn.oid AND dn.nspname='pg_catalog') WHERE a.attnum > 0 AND NOT a.attisdropped AND n.nspname LIKE 'account' AND c.relname LIKE 'user_table') c WHERE true ORDER BY nspname,c.relname,attnum |] tblIndices2 = [str| SELECT NULL AS TABLE_CAT, n.nspname AS TABLE_SCHEM, ct.relname AS TABLE_NAME, NOT i.indisunique AS NON_UNIQUE, NULL AS INDEX_QUALIFIER, ci.relname AS INDEX_NAME, CASE i.indisclustered WHEN true THEN 1 ELSE CASE am.amname WHEN 'hash' THEN 2 ELSE 3 END END AS TYPE, (i.keys).n AS ORDINAL_POSITION, pg_catalog.pg_get_indexdef(ci.oid, (i.keys).n, false) AS COLUMN_NAME, CASE am.amcanorder WHEN true THEN CASE i.indoption[(i.keys).n - 1] & 1 WHEN 1 THEN 'D' ELSE 'A' END ELSE NULL END AS ASC_OR_DESC, ci.reltuples AS CARDINALITY, ci.relpages AS PAGES, pg_catalog.pg_get_expr(i.indpred, i.indrelid) AS FILTER_CONDITION FROM pg_catalog.pg_class ct JOIN pg_catalog.pg_namespace n ON (ct.relnamespace = n.oid) JOIN (SELECT i.indexrelid, i.indrelid, i.indoption, i.indisunique, i.indisclustered, i.indpred, i.indexprs, information_schema._pg_expandarray(i.indkey) AS keys FROM pg_catalog.pg_index i) i ON (ct.oid = i.indrelid) JOIN pg_catalog.pg_class ci ON (ci.oid = i.indexrelid) JOIN pg_catalog.pg_am am ON (ci.relam = am.oid) WHERE true AND n.nspname = 'account' AND ct.relname = 'user_table' ORDER BY NON_UNIQUE, TYPE, INDEX_NAME, ORDINAL_POSITION |] tblIndices = [str| select ind.indisclustered, ind.indexrelid, ind.indisprimary, cls.relname from pg_catalog.pg_index ind, pg_catalog.pg_class tab, pg_catalog.pg_namespace sch, pg_catalog.pg_class cls where ind.indrelid = tab.oid and cls.oid = ind.indexrelid and tab.relnamespace = sch.oid and tab.relname = $1 and sch.nspname = $2 |] tblConstraints = [str| SELECT cons.conname, cons.conkey FROM pg_catalog.pg_constraint cons, pg_catalog.pg_class tab, pg_catalog.pg_namespace sch WHERE cons.contype = 'u' and cons.conrelid = tab.oid and tab.relnamespace = sch.oid AND tab.relname = $1 and sch.nspname = $2 |] tblKeysx = [str| SELECT NULL AS TABLE_CAT, n.nspname AS TABLE_SCHEM, ct.relname AS TABLE_NAME, a.attname AS COLUMN_NAME, (i.keys).n AS KEY_SEQ, ci.relname AS PK_NAME FROM pg_catalog.pg_class ct JOIN pg_catalog.pg_attribute a ON (ct.oid = a.attrelid) JOIN pg_catalog.pg_namespace n ON (ct.relnamespace = n.oid) JOIN (SELECT i.indexrelid, i.indrelid, i.indisprimary, information_schema._pg_expandarray(i.indkey) AS keys FROM pg_catalog.pg_index i) i ON (a.attnum = (i.keys).x AND a.attrelid = i.indrelid) JOIN pg_catalog.pg_class ci ON (ci.oid = i.indexrelid) WHERE true AND n.nspname = 'account' AND ct.relname = 'user_table' AND i.indisprimary ORDER BY table_name, pk_name, key_seq |] tblKeys = [str| SELECT NULL::text AS PKTABLE_CAT, pkn.nspname AS PKTABLE_SCHEM, pkc.relname AS PKTABLE_NAME, pka.attname AS PKCOLUMN_NAME, NULL::text AS FKTABLE_CAT, fkn.nspname AS FKTABLE_SCHEM, fkc.relname AS FKTABLE_NAME, fka.attname AS FKCOLUMN_NAME, pos.n AS KEY_SEQ, CASE con.confupdtype WHEN 'c' THEN 0 WHEN 'n' THEN 2 WHEN 'd' THEN 4 WHEN 'r' THEN 1 WHEN 'a' THEN 3 ELSE NULL END AS UPDATE_RULE, CASE con.confdeltype WHEN 'c' THEN 0 WHEN 'n' THEN 2 WHEN 'd' THEN 4 WHEN 'r' THEN 1 WHEN 'a' THEN 3 ELSE NULL END AS DELETE_RULE, con.conname AS FK_NAME, pkic.relname AS PK_NAME, CASE WHEN con.condeferrable AND con.condeferred THEN 5 WHEN con.condeferrable THEN 6 ELSE 7 END AS DEFERRABILITY FROM pg_catalog.pg_namespace pkn, pg_catalog.pg_class pkc, pg_catalog.pg_attribute pka, pg_catalog.pg_namespace fkn, pg_catalog.pg_class fkc, pg_catalog.pg_attribute fka, pg_catalog.pg_constraint con, pg_catalog.generate_series(1, 32) pos(n), pg_catalog.pg_depend dep, pg_catalog.pg_class pkic WHERE pkn.oid = pkc.relnamespace AND pkc.oid = pka.attrelid AND pka.attnum = con.confkey[pos.n] AND con.confrelid = pkc.oid AND fkn.oid = fkc.relnamespace AND fkc.oid = fka.attrelid AND fka.attnum = con.conkey[pos.n] AND con.conrelid = fkc.oid AND con.contype = 'f' AND con.oid = dep.objid AND pkic.oid = dep.refobjid AND pkic.relkind = 'i' AND dep.classid = 'pg_constraint'::regclass::oid AND dep.refclassid = 'pg_class'::regclass::oid AND fkn.nspname = 'account' AND fkc.relname = 'user_table' ORDER BY pkn.nspname,pkc.relname,pos.n |] {- viewList = [str| SELECT n.nspname AS "Schema", c.relname AS "Name", -- d.description AS "Comment", pg_get_viewdef(c.oid) AS definition, relacl AS "ACLs" FROM pg_catalog.pg_namespace n JOIN pg_catalog.pg_class c ON c.relnamespace = n.oid LEFT JOIN pg_catalog.pg_description d ON (c.oid = d.objoid AND d.objsubid = 0) WHERE n.nspname IN (select * from unnest(current_schemas(false))) AND c.relkind = 'v' AND n.nspname !~ '^pg_' AND n.nspname <> 'information_schema' ORDER BY 1, 2 |] viewColumns = [str| SELECT n.nspname as "Schema",c.relname AS "View",a.attname AS "Column",a.atttypid AS "Type", a.attnotnull OR (t.typtype = 'd' AND t.typnotnull) AS attnotnull, a.atttypmod,a.attlen,row_number() OVER (PARTITION BY a.attrelid ORDER BY a.attnum) AS attnum, pg_catalog.pg_get_expr(def.adbin, def.adrelid) AS adsrc, dsc.description,t.typbasetype,t.typtype FROM pg_catalog.pg_namespace n JOIN pg_catalog.pg_class c ON (c.relnamespace = n.oid) JOIN pg_catalog.pg_attribute a ON (a.attrelid=c.oid) JOIN pg_catalog.pg_type t ON (a.atttypid = t.oid) LEFT JOIN pg_catalog.pg_attrdef def ON (a.attrelid=def.adrelid AND a.attnum = def.adnum) LEFT JOIN pg_catalog.pg_description dsc ON (c.oid=dsc.objoid AND a.attnum = dsc.objsubid) LEFT JOIN pg_catalog.pg_class dc ON (dc.oid=dsc.classoid AND dc.relname='pg_class') LEFT JOIN pg_catalog.pg_namespace dn ON (dc.relnamespace=dn.oid AND dn.nspname='pg_catalog') WHERE a.attnum > 0 AND NOT a.attisdropped AND n.nspname IN (select * from unnest(current_schemas(false))) ORDER BY 1,2,3 |] viewTriggers = [str| SELECT n.nspname as "Schema", c.relname AS "View", t.tgname AS "Name", t.tgenabled = 'O' AS enabled, -- pg_get_triggerdef(trig.oid) as source concat (np.nspname, '.', p.proname) AS procedure FROM pg_catalog.pg_trigger t JOIN pg_catalog.pg_class c ON t.tgrelid = c.oid JOIN pg_catalog.pg_namespace n ON c.relnamespace = n.oid JOIN pg_catalog.pg_proc p ON t.tgfoid = p.oid JOIN pg_catalog.pg_namespace np ON p.pronamespace = np.oid WHERE t.tgconstraint = 0 AND n.nspname IN (select * from unnest(current_schemas(false))) ORDER BY 1,2,3 |] viewRules = [str| SELECT n.nspname as "Schema", c.relname AS "View", r.rulename AS "Name", pg_get_ruledef(r.oid) AS definition FROM pg_rewrite r JOIN pg_class c ON c.oid = r.ev_class JOIN pg_namespace n ON c.relnamespace = n.oid WHERE n.nspname IN (select * from unnest(current_schemas(false))) AND c.relkind = 'v' ORDER BY 1,2,3 |] -} data DbView = DbView { schema :: String, name :: String, definition :: String, acl :: [Acl] } deriving(Show) mkdbv (a:b:c:d:_) = DbView a b c (cvtacl d) instance Show (Comparison DbView) where show (Equal x) = concat [sok, showView x, treset] show (LeftOnly a) = concat [azure, [charLeftArrow]," ", showView a, treset] show (RightOnly a) = concat [peach, [charRightArrow], " ", showView a, treset] show (Unequal a b) = concat [nok, showView a, treset, -- if (acl a /= acl b) then concat[ setAttr bold, "\n acls: " , treset, map show $ dbCompare a b] else "", showAclDiffs (acl a) (acl b), if (compareIgnoringWhiteSpace (definition a) (definition b)) then "" else concat [setAttr bold,"\n definition differences: \n", treset, concatMap show $ diff (definition a) (definition b)] ] instance Comparable DbView where objCmp a b = if (acl a == acl b && compareIgnoringWhiteSpace (definition a) (definition b)) then Equal a else Unequal a b compareViews (get1, get2) = do aa <- get1 viewList -- aac <- get1 viewColumns -- aat <- get1 viewTriggers -- aar <- get1 viewRules bb <- get2 viewList -- bbc <- get2 viewColumns -- bbt <- get2 viewTriggers -- bbr <- get2 viewRules let a = map (mkdbv . (map gs)) aa let b = map (mkdbv . (map gs)) bb let cc = dbCompare a b let cnt = dcount iseq cc putStr $ if (fst cnt > 0) then sok ++ (show $ fst cnt) ++ " matches, " else "" putStrLn $ if (snd cnt > 0) then concat [setColor dullRed,show $ snd cnt," differences"] else concat [sok,"no differences"] putStr $ treset return $ filter (not . iseq) cc showView x = (schema x) ++ "." ++ (name x) instance Ord DbView where compare a b = let hd p = map ($ p) [schema, name] in compare (hd a) (hd b) instance Eq DbView where (==) a b = EQ == compare a b
sourcewave/pg-schema-diff
Table.hs
unlicense
10,334
0
14
1,761
854
461
393
45
3
{-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE OverloadedStrings #-} module Options where import Options.Generic import Data.Maybe (fromMaybe) import Type (Config(..), Role(..), defaultConfig) data TunnelOptions = TunnelOptions { role :: Maybe Text , localHost :: Maybe Text , localPort :: Maybe Integer , remoteHost :: Text , remotePort :: Integer , forwardHost :: Maybe Text , forwardPort :: Maybe Integer , randomnessInBytes :: Maybe Integer , timeoutInSeconds :: Maybe Integer , mtu :: Maybe Integer , debug :: Bool } deriving (Generic, Show, Eq) instance ParseRecord TunnelOptions toConfig :: TunnelOptions -> Config toConfig o = let c = defaultConfig _role = case role o of Just "remote" -> Remote Just "local" -> Local Just "both" -> Both _ -> Local in Config { _localHost = fromMaybe (_localHost c) (localHost o) , _localPort = fromMaybe (_localPort c) (localPort o) , _remoteHost = remoteHost o , _remotePort = remotePort o , _forwardHost = fromMaybe (_forwardHost c) (forwardHost o) , _forwardPort = fromMaybe (_forwardPort c) (forwardPort o) , _role = _role , _randomnessInBytes = fromMaybe (_randomnessInBytes c) (randomnessInBytes o) , _timeoutInSeconds = fromMaybe (_timeoutInSeconds c) (timeoutInSeconds o) , _mtu = fromMaybe (_mtu c) (mtu o) , _debug = debug o }
nfjinjing/neko-obfs
src/Options.hs
apache-2.0
1,427
0
12
347
433
236
197
41
4
module Apply where apply :: (Eq a, Num a) => a -> (b -> b) -> b -> b apply 0 f = id apply n f = f . apply (n - 1) f
OCExercise/haskellbook-solutions
chapters/chapter08/scratch/apply.hs
bsd-2-clause
118
0
9
37
79
42
37
4
1
{- | Module Mewa.Protocol Copyright : Copyright (C) 2014 Krzysztof Langner License : BSD3 Maintainer : Krzysztof Langner <klangner@gmail.com> Stability : alpha Portability : portable Mewa protocol implementation -} module Mewa.Protocol where type Device = String type Channel = String type Password = String data Packet = Connect Channel Device Password [String] | Disconnect | Connected
AnthillTech/mewa-sim
src/Mewa/Protocol.hs
bsd-2-clause
415
0
7
83
49
31
18
7
0
-- | Wrap a function's return value with another function. module Data.Function.Wrap where -- | Wrap the result of a function applied to 1 argument. wrap1 :: (r -> s) -- ^ The final result wrapper. -> (a1 -> r) -- ^ The function to wrap. -> (a1 -> s) -- ^ The wrapped up function. wrap1 = (.) -- | Wrap the result of a function applied to 2 arguments. wrap2 :: (r -> s) -- ^ The final result wrapper. -> (a1 -> a2 -> r) -- ^ The function to wrap. -> (a1 -> a2 -> s) -- ^ The wrapped up function. wrap2 s r = \a1 a2 -> s (r a1 a2) -- | Wrap the result of a function applied to 3 arguments. wrap3 :: (r -> s) -- ^ The final result wrapper. -> (a1 -> a2 -> a3 -> r) -- ^ The function to wrap. -> (a1 -> a2 -> a3 -> s) -- ^ The wrapped up function. wrap3 s r = \a1 a2 a3 -> s (r a1 a2 a3) -- | Wrap the result of a function applied to 4 arguments. wrap4 :: (r -> s) -- ^ The final result wrapper. -> (a1 -> a2 -> a3 -> a4 -> r) -- ^ The function to wrap. -> (a1 -> a2 -> a3 -> a4 -> s) -- ^ The wrapped up function. wrap4 s r = \a1 a2 a3 a4 -> s (r a1 a2 a3 a4) -- | Wrap the result of a function applied to 5 arguments. wrap5 :: (r -> s) -- ^ The final result wrapper. -> (a1 -> a2 -> a3 -> a4 -> a5 -> r) -- ^ The function to wrap. -> (a1 -> a2 -> a3 -> a4 -> a5 -> s) -- ^ The wrapped up function. wrap5 s r = \a1 a2 a3 a4 a5 -> s (r a1 a2 a3 a4 a5) -- | Wrap the result of a function applied to 6 arguments. wrap6 :: (r -> s) -- ^ The final result wrapper. -> (a1 -> a2 -> a3 -> a4 -> a5 -> a6 -> r) -- ^ The function to wrap. -> (a1 -> a2 -> a3 -> a4 -> a5 -> a6 -> s) -- ^ The wrapped up function. wrap6 s r = \a1 a2 a3 a4 a5 a6 -> s (r a1 a2 a3 a4 a5 a6) -- | Wrap the result of a function applied to 7 arguments. wrap7 :: (r -> s) -- ^ The final result wrapper. -> (a1 -> a2 -> a3 -> a4 -> a5 -> a6 -> a7 -> r) -- ^ The function to wrap. -> (a1 -> a2 -> a3 -> a4 -> a5 -> a6 -> a7 -> s) -- ^ The wrapped up function. wrap7 s r = \a1 a2 a3 a4 a5 a6 a7 -> s (r a1 a2 a3 a4 a5 a6 a7) -- | Wrap the result of a function applied to 8 arguments. wrap8 :: (r -> s) -- ^ The final result wrapper. -> (a1 -> a2 -> a3 -> a4 -> a5 -> a6 -> a7 -> a8 -> r) -- ^ The function to wrap. -> (a1 -> a2 -> a3 -> a4 -> a5 -> a6 -> a7 -> a8 -> s) -- ^ The wrapped up function. wrap8 s r = \a1 a2 a3 a4 a5 a6 a7 a8 -> s (r a1 a2 a3 a4 a5 a6 a7 a8) -- | Wrap the result of a function applied to 9 arguments. wrap9 :: (r -> s) -- ^ The final result wrapper. -> (a1 -> a2 -> a3 -> a4 -> a5 -> a6 -> a7 -> a8 -> a9 -> r) -- ^ The function to wrap. -> (a1 -> a2 -> a3 -> a4 -> a5 -> a6 -> a7 -> a8 -> a9 -> s) -- ^ The wrapped up function. wrap9 s r = \a1 a2 a3 a4 a5 a6 a7 a8 a9 -> s (r a1 a2 a3 a4 a5 a6 a7 a8 a9) -- | Wrap the result of a function applied to 10 arguments. wrap10 :: (r -> s) -- ^ The final result wrapper. -> (a1 -> a2 -> a3 -> a4 -> a5 -> a6 -> a7 -> a8 -> a9 -> a10 -> r) -- ^ The function to wrap. -> (a1 -> a2 -> a3 -> a4 -> a5 -> a6 -> a7 -> a8 -> a9 -> a10 -> s) -- ^ The wrapped up function. wrap10 s r = \a1 a2 a3 a4 a5 a6 a7 a8 a9 a10 -> s (r a1 a2 a3 a4 a5 a6 a7 a8 a9 a10) -- | Wrap the result of a function applied to 11 arguments. wrap11 :: (r -> s) -- ^ The final result wrapper. -> (a1 -> a2 -> a3 -> a4 -> a5 -> a6 -> a7 -> a8 -> a9 -> a10 -> a11 -> r) -- ^ The function to wrap. -> (a1 -> a2 -> a3 -> a4 -> a5 -> a6 -> a7 -> a8 -> a9 -> a10 -> a11 -> s) -- ^ The wrapped up function. wrap11 s r = \a1 a2 a3 a4 a5 a6 a7 a8 a9 a10 a11 -> s (r a1 a2 a3 a4 a5 a6 a7 a8 a9 a10 a11) -- | Wrap the result of a function applied to 12 arguments. wrap12 :: (r -> s) -- ^ The final result wrapper. -> (a1 -> a2 -> a3 -> a4 -> a5 -> a6 -> a7 -> a8 -> a9 -> a10 -> a11 -> a12 -> r) -- ^ The function to wrap. -> (a1 -> a2 -> a3 -> a4 -> a5 -> a6 -> a7 -> a8 -> a9 -> a10 -> a11 -> a12 -> s) -- ^ The wrapped up function. wrap12 s r = \a1 a2 a3 a4 a5 a6 a7 a8 a9 a10 a11 a12 -> s (r a1 a2 a3 a4 a5 a6 a7 a8 a9 a10 a11 a12)
chrisdone/wrap
src/Data/Function/Wrap.hs
bsd-3-clause
4,488
0
19
1,592
1,527
815
712
83
1
{-# LANGUAGE OverloadedStrings #-} module WildBind.ExecSpec (main, spec) where import Control.Applicative ((<$>)) import Control.Concurrent (forkIOWithUnmask, killThread, threadDelay) import Control.Concurrent.STM (atomically, TChan, readTChan, tryReadTChan, writeTChan, newTChanIO) import Control.Exception (bracket, throw, fromException) import Control.Monad.IO.Class (MonadIO, liftIO) import qualified Control.Monad.Trans.State as State import Data.Monoid ((<>)) import System.IO.Error (userError) import Test.Hspec import qualified WildBind.Binding as WBB import qualified WildBind.Exec as WBE import qualified WildBind.FrontEnd as WBF import WildBind.ForTest (SampleInput(..), SampleState(..), SampleBackState(..)) newtype EventChan s i = EventChan { unEventChan :: TChan (WBF.FrontEvent s i) } data GrabHistory i = GSet i | GUnset i deriving (Show, Eq, Ord) newtype GrabChan i = GrabChan { unGrabChan :: TChan (GrabHistory i) } frontEnd :: EventChan s i -> GrabChan i -> WBF.FrontEnd s i frontEnd echan gchan = WBF.FrontEnd { WBF.frontDefaultDescription = const "", WBF.frontSetGrab = \i -> atomically $ writeTChan (unGrabChan gchan) (GSet i), WBF.frontUnsetGrab = \i -> atomically $ writeTChan (unGrabChan gchan) (GUnset i), WBF.frontNextEvent = atomically $ readTChan $ unEventChan echan } _write :: MonadIO m => TChan a -> a -> m () _write tc = liftIO . atomically . writeTChan tc outChanOn :: MonadIO m => TChan a -> i -> a -> (i, WBB.Action m ()) outChanOn out_chan input out_elem = (input, WBB.Action "" (out_chan `_write` out_elem)) outChanOnS :: TChan a -> i -> a -> bs -> (i, WBB.Action (State.StateT bs IO) ()) outChanOnS out_chan input out_elem next_state = (,) input $ WBB.Action "" $ do liftIO $ atomically $ writeTChan out_chan out_elem State.put next_state withWildBind' :: (WBF.FrontEnd s i -> IO ()) -> (EventChan s i -> GrabChan i -> IO ()) -> IO () withWildBind' exec action = do echan <- EventChan <$> newTChanIO gchan <- GrabChan <$> newTChanIO let spawnWildBind = forkIOWithUnmask $ \umask -> umask $ exec (frontEnd echan gchan) bracket spawnWildBind killThread (\_ -> action echan gchan) withWildBind :: Ord i => WBB.Binding s i -> (EventChan s i -> GrabChan i -> IO ()) -> IO () withWildBind binding action = withWildBind' (WBE.wildBind binding) action emitEvent :: TChan (WBF.FrontEvent s i) -> WBF.FrontEvent s i -> IO () emitEvent chan event = atomically $ writeTChan chan event shouldProduce :: (Show a, Eq a) => TChan a -> a -> IO () shouldProduce chan expectation = (atomically $ readTChan chan) `shouldReturn` expectation readAll :: TChan a -> IO [a] readAll chan = atomically $ readAll' [] where readAll' acc = do mret <- tryReadTChan chan case mret of Nothing -> return (reverse acc) Just ret -> readAll' (ret : acc) shouldNowMatch :: (Show a, Eq a) => TChan a -> [a] -> IO () shouldNowMatch chan expectation = readAll chan >>= (`shouldMatchList` expectation) changeAndInput :: s -> i -> [WBF.FrontEvent s i] changeAndInput s i = [WBF.FEChange s, WBF.FEInput i] main :: IO () main = hspec spec spec :: Spec spec = do wildBindSpec optionSpec wildBindSpec :: Spec wildBindSpec = do describe "wildBind" $ do it "should enable input grabs" $ do ochan <- newTChanIO let b = WBB.binding [outChanOn ochan SIa 'A', outChanOn ochan SIb 'B'] withWildBind b $ \(EventChan echan) (GrabChan gchan) -> do emitEvent echan $ WBF.FEChange $ SS "" emitEvent echan $ WBF.FEInput SIa ochan `shouldProduce` 'A' ghist <- readAll gchan ghist `shouldMatchList` [GSet SIa, GSet SIb] it "should enable/disable grabs when the front-end state changes" $ do ochan <- newTChanIO let b = (WBB.whenFront (\(SS s) -> s == "A") $ WBB.binding [outChanOn ochan SIa 'A']) <> (WBB.whenFront (\(SS s) -> s == "B") $ WBB.binding [outChanOn ochan SIb 'B']) <> (WBB.whenFront (\(SS s) -> s == "C") $ WBB.binding [outChanOn ochan SIc 'C']) withWildBind b $ \(EventChan echan) (GrabChan gchan) -> do mapM_ (emitEvent echan) $ changeAndInput (SS "A") SIa ochan `shouldProduce` 'A' gchan `shouldNowMatch` [GSet SIa] mapM_ (emitEvent echan) $ changeAndInput (SS "B") SIb ochan `shouldProduce` 'B' gchan `shouldNowMatch` [GUnset SIa, GSet SIb] mapM_ (emitEvent echan) $ changeAndInput (SS "C") SIc ochan `shouldProduce` 'C' gchan `shouldNowMatch` [GUnset SIb, GSet SIc] emitEvent echan $ WBF.FEChange (SS "") threadDelay 10000 gchan `shouldNowMatch` [GUnset SIc] it "should enable/disable grabs when the back-end state changes" $ do ochan <- newTChanIO let b' = WBB.ifBack (== (SB 0)) (WBB.binding' [outChanOnS ochan SIa 'A' (SB 1)]) $ WBB.whenBack (== (SB 1)) (WBB.binding' [outChanOnS ochan SIb 'B' (SB 0)]) b = WBB.startFrom (SB 0) b' withWildBind b $ \(EventChan echan) (GrabChan gchan) -> do emitEvent echan $ WBF.FEChange (SS "") threadDelay 10000 gchan `shouldNowMatch` [GSet SIa] emitEvent echan $ WBF.FEInput SIa ochan `shouldProduce` 'A' threadDelay 10000 gchan `shouldNowMatch` [GUnset SIa, GSet SIb] emitEvent echan $ WBF.FEInput SIb ochan `shouldProduce` 'B' threadDelay 10000 gchan `shouldNowMatch` [GUnset SIb, GSet SIa] it "should crush exceptions from bound actions" $ do ochan <- newTChanIO let b = WBB.binds $ do WBB.on SIa `WBB.run` (fail "ERROR!!") WBB.on SIb `WBB.run` (atomically $ writeTChan ochan 'b') withWildBind b $ \(EventChan echan) _ -> do emitEvent echan $ WBF.FEChange (SS "") emitEvent echan $ WBF.FEInput SIa emitEvent echan $ WBF.FEInput SIb ochan `shouldProduce` 'b' it "should keep the current back-state when exception is thrown from bound actions" $ do ochan <- newTChanIO let killing_b = WBB.binds' $ WBB.on SIa `WBB.run` do State.put (SB 0) liftIO $ fail "ERROR!" b = WBB.startFrom (SB 0) $ (killing_b <>) $ WBB.ifBack (== SB 0) ( WBB.binds' $ WBB.on SIb `WBB.run` do liftIO $ atomically $ writeTChan ochan 'b' State.put (SB 1) ) ( WBB.binds' $ WBB.on SIc `WBB.run` do liftIO $ atomically $ writeTChan ochan 'c' State.put (SB 0) ) withWildBind b $ \(EventChan echan) (GrabChan gchan) -> do emitEvent echan $ WBF.FEChange (SS "") emitEvent echan $ WBF.FEInput SIa emitEvent echan $ WBF.FEInput SIb ochan `shouldProduce` 'b' gchan `shouldNowMatch` [GSet SIa, GSet SIb, GUnset SIb, GSet SIc] emitEvent echan $ WBF.FEInput SIa emitEvent echan $ WBF.FEInput SIc ochan `shouldProduce` 'c' gchan `shouldNowMatch` [GUnset SIc, GSet SIb] shouldNextMatch :: (Show a, Eq a) => TChan [a] -> [a] -> IO () shouldNextMatch tc expected = do got <- atomically $ readTChan tc got `shouldMatchList` expected optionSpec :: Spec optionSpec = do describe "optBindingHook" $ do it "hooks change of binding because front-end state changes" $ do hook_chan <- newTChanIO out_chan <- newTChanIO let opt = WBE.defOption { WBE.optBindingHook = _write hook_chan } b = WBB.whenFront (== SS "hoge") $ WBB.binding [ (SIa, WBB.Action "a button" (out_chan `_write` 'a')), (SIb, WBB.Action "b button" (out_chan `_write` 'b')) ] withWildBind' (WBE.wildBind' opt b) $ \(EventChan echan) (GrabChan gchan) -> do emitEvent echan $ WBF.FEChange (SS "hoge") hook_chan `shouldNextMatch` [(SIa, "a button"), (SIb, "b button")] emitEvent echan $ WBF.FEInput SIa hook_chan `shouldNextMatch` [(SIa, "a button"), (SIb, "b button")] out_chan `shouldProduce` 'a' gchan `shouldNowMatch` [GSet SIa, GSet SIb] emitEvent echan $ WBF.FEChange (SS "") hook_chan `shouldNextMatch` [] gchan `shouldNowMatch` [GUnset SIa, GUnset SIb] it "hooks change of binding because back-end state changes" $ do hook_chan <- newTChanIO out_chan <- newTChanIO let opt = WBE.defOption { WBE.optBindingHook = _write hook_chan } b = WBB.startFrom (SB 0) $ WBB.ifBack (== (SB 0)) (WBB.binding' [(SIa, WBB.Action "a button" (out_chan `_write` 'a' >> State.put (SB 1)))]) $ WBB.whenBack (== (SB 1)) ( WBB.binding' [(SIa, WBB.Action "A BUTTON" (out_chan `_write` 'A' >> State.put (SB 0))), (SIc, WBB.Action "c button" (out_chan `_write` 'c'))] ) withWildBind' (WBE.wildBind' opt b) $ \(EventChan echan) (GrabChan gchan) -> do emitEvent echan $ WBF.FEChange (SS "") hook_chan `shouldNextMatch` [(SIa, "a button")] gchan `shouldNowMatch` [GSet SIa] emitEvent echan $ WBF.FEInput SIa out_chan `shouldProduce` 'a' hook_chan `shouldNextMatch` [(SIa, "A BUTTON"), (SIc, "c button")] gchan `shouldNowMatch` [GSet SIc] emitEvent echan $ WBF.FEInput SIc out_chan `shouldProduce` 'c' hook_chan `shouldNextMatch` [(SIa, "A BUTTON"), (SIc, "c button")] gchan `shouldNowMatch` [] emitEvent echan $ WBF.FEInput SIa out_chan `shouldProduce` 'A' hook_chan `shouldNextMatch` [(SIa, "a button")] gchan `shouldNowMatch` [GUnset SIc] describe "optCatch" $ do it "receives front-state, input and exception" $ do hook_chan <- newTChanIO let catcher fs input err = atomically $ writeTChan hook_chan (fs, input, err) opt = WBE.defOption { WBE.optCatch = catcher } b = WBB.binds $ WBB.on SIa `WBB.run` (throw $ userError "BOOM!") withWildBind' (WBE.wildBind' opt b) $ \(EventChan echan) _ -> do emitEvent echan $ WBF.FEChange (SS "front state") emitEvent echan $ WBF.FEInput SIa (got_state, got_input, got_exception) <- atomically $ readTChan hook_chan got_state `shouldBe` SS "front state" got_input `shouldBe` SIa fromException got_exception `shouldBe` Just (userError "BOOM!")
debug-ito/wild-bind
wild-bind/test/WildBind/ExecSpec.hs
bsd-3-clause
10,527
0
28
2,782
3,860
1,948
1,912
206
2
{-# LANGUAGE QuasiQuotes #-} import LiquidHaskell import Language.Haskell.Liquid.Prelude [lq| data List [llen] a <p :: x0:a -> x1:a -> Prop> = Nil | Cons (h :: a) (t :: List <p> (a <p h>)) |] [lq| measure llen :: (List a) -> Int llen(Nil) = 0 llen(Cons x xs) = 1 + (llen xs) |] [lq| invariant {v:(List a) | ((llen v) >= 0)} |] data List a = Nil | Cons a (List a) make2d :: a -> Int -> Int -> List ([a]) make2d x n m = cloneL (clone x n) m [lq| invariant {v:Int | v >= 0} |] clone :: a -> Int -> [a] clone x n | n == 0 = [] | otherwise = x : (clone x (n-1)) cloneL :: a -> Int -> List a cloneL x n | n == 0 = Nil | otherwise = Cons x (cloneL x (n-1)) -- check [] = [liquidAssertB True] -- check (xs:xss) = let n = length xs in map (\xs' -> liquidAssertB (length xs' == n)) xss chk :: List [a] -> Bool chk Nil = liquidAssertB True chk (Cons xs xss) = case xss of (Cons xs1 xss1) -> let n = length xs in liquidAssertB (length xs1 == n) && chk xss Nil -> liquidAssertB True fooL = Cons [1, 1, 3] (Cons [2, 2, 5] Nil) fooL1 = make2d 0 n m where n = choose 0 m = choose 1 propL = chk fooL1 prop = chk fooL
spinda/liquidhaskell
tests/gsoc15/unknown/pos/ListLen-LType.hs
bsd-3-clause
1,182
0
14
334
430
225
205
34
2
module TestReadDirectoryInfo (tests) where import DirectoryInfo import Fixtures import System.Directory import System.FilePath import Test.Hspec.HUnit() import Test.Hspec.Monadic import Test.HUnit tests = describe "read directory info" $ do it "can read directory info from file system" $ withTemporaryDirectory $ \tmpDir -> do createEmptyFile $ tmpDir </> "a-file.png" createDirectory $ tmpDir </> "b-dir" createEmptyFile $ tmpDir </> "b-dir" </> "b-file.png" createDirectory $ tmpDir </> "b-dir" </> "c-dir" info <- getDirectoryInfoRecursive tmpDir info @?= [ DirectoryInfo tmpDir ["a-file.png"] Nothing , DirectoryInfo (tmpDir </> "b-dir") ["b-file.png"] Nothing , DirectoryInfo (tmpDir </> "b-dir" </> "c-dir") [] Nothing ] it "pupulates meta" $ withTemporaryDirectory $ \tmpDir -> do writeFile (tmpDir </> "meta.txt") "meta content" info <- getDirectoryInfoRecursive tmpDir info @?= [ DirectoryInfo tmpDir ["meta.txt"] (Just "meta content") ]
rickardlindberg/orgapp
tests/TestReadDirectoryInfo.hs
bsd-3-clause
1,150
0
17
323
271
138
133
22
1
{-# LANGUAGE EmptyDataDecls, TypeSynonymInstances #-} {-# OPTIONS_GHC -fcontext-stack49 #-} module Games.Chaos2010.Database.Monster_spells where import Games.Chaos2010.Database.Fields import Database.HaskellDB.DBLayout type Monster_spells = Record (HCons (LVPair Spell_name (Expr (Maybe String))) (HCons (LVPair Base_chance (Expr (Maybe Int))) (HCons (LVPair Alignment (Expr (Maybe Int))) (HCons (LVPair Spell_category (Expr (Maybe String))) (HCons (LVPair Description (Expr (Maybe String))) (HCons (LVPair Range (Expr (Maybe Int))) (HCons (LVPair Numb (Expr (Maybe Int))) (HCons (LVPair Valid_square_category (Expr (Maybe String))) (HCons (LVPair Ptype (Expr (Maybe String))) HNil))))))))) monster_spells :: Table Monster_spells monster_spells = baseTable "monster_spells"
JakeWheat/Chaos-2010
Games/Chaos2010/Database/Monster_spells.hs
bsd-3-clause
954
0
29
263
300
156
144
18
1
{-# LANGUAGE PatternSynonyms #-} {-# LANGUAGE GADTs #-} module Language.GroundExpr(GroundExpr(..),Floor(..),floor,Two(..)) where import Prelude hiding (floor) import Data.Sequence (Seq,(<|),(|>)) import qualified Data.Sequence as S import Data.Text (Text) import qualified Data.Text as T import Language.Constant import Language.Expression (Expr) import qualified Language.Expression as Full import Language.Haskell.TH.Syntax data Two = One | Two deriving (Eq,Ord) instance Show Two where show One = "1" show Two = "2" data GroundExpr a where Var :: Seq Two -> GroundExpr a Const :: Constant a => a -> GroundExpr a If :: GroundExpr Bool -> GroundExpr a -> GroundExpr a -> GroundExpr a App :: GroundExpr (a -> b) -> GroundExpr a -> GroundExpr b Fun :: Text -> Q (TExp (a -> b)) -> GroundExpr (a -> b) -- deriving instance Show (GroundExpr a) instance Show (GroundExpr a) where showsPrec d e = case e of Var v -> showsPrec d v Const a -> showsPrec d a If e1 e2 e3 -> showParen (d > appPrec) $ showString "if " . showsPrec (appPrec+1) e1 . showString " " . showsPrec (appPrec+1) e2 . showString " " . showsPrec (appPrec+1) e3 App e1 e2 -> showParen (d > appPrec) $ showsPrec (appPrec+1) e1 . showString " " . showsPrec (appPrec+1) e2 Fun t _ -> showString (T.unpack t) where appPrec = 10 data Floor a where Inj :: Floor a -> Floor b -> Floor (a,b) Floor :: Seq Two -> GroundExpr a -> Floor a instance Show (Floor a) where showsPrec d e = case e of Inj e1 e2 -> showString "(" . showsPrec appPrec e1 . showString "," . showsPrec appPrec e2 . showString ")" Floor x e1 -> showParen (d > appPrec) $ showsPrec (appPrec+1) x . showString " <- " . showsPrec (appPrec+1) e1 where appPrec = 10 :: Int floor :: Expr a -> Floor a floor = go S.empty . Full.optimizeExpr where go :: Seq Two -> Expr a -> Floor a go to expr = case expr of Full.Inj e1 e2 -> Inj (go (to |> One) e1) (go (to |> Two) e2) e -> Floor to (lowerExpr e) lowerExpr :: Expr a -> GroundExpr a lowerExpr e0 = go S.empty e0 where go :: Seq Two -> Expr a -> GroundExpr a go addr expr = case expr of Full.Proj1 e1 -> coerceLeft $ go (One <| addr) e1 Full.Proj2 e1 -> coerceRight $ go (Two <| addr) e1 Full.Var -> Var addr Full.Const c -> Const c Full.App e1 e2 -> App (go addr e1) (go addr e2) Full.Fun n f -> Fun n f Full.If e1 e2 e3 -> If (go addr e1) (go addr e2) (go addr e3) Full.Inj {} -> error $ "did not expect injection at this point: " ++ show expr ++ " in " ++ show e0 coerceLeft :: GroundExpr (a,b) -> GroundExpr a coerceLeft (Var v) = Var v coerceLeft e = error $ "cannot coerce function that produces a tuple" ++ show e coerceRight :: GroundExpr (a,b) -> GroundExpr b coerceRight (Var v) = Var v coerceRight e = error $ "cannot coerce function that produces a tuple" ++ show e
svenkeidel/hsynth
src/Language/GroundExpr.hs
bsd-3-clause
3,095
0
17
875
1,264
636
628
81
10
module Main where import Prelude hiding (catch) import Control.Exception (catch) import Control.Monad ( forM_ , when ) import Data.Maybe ( listToMaybe ) import System.Environment ( getArgs ) import System.Exit ( exitFailure ) import Ak.Commands ( Command(..) , CommandError(..) ) import Ak.Db ( discoverDbSpec ) import qualified Ak.Commands as Commands usage :: [Command] -> IO () usage commands = do putStrLn "Usage: ak <command> [args]" forM_ commands $ \cmd -> putStrLn $ concat [ " " , cmdUsage cmd , " - " , cmdDescription cmd ] lookupCommand :: String -> [Command] -> Maybe Command lookupCommand name commands = listToMaybe $ filter ((name ==) . cmdName) commands main :: IO () main = do args <- getArgs spec <- discoverDbSpec let commands = Commands.allCommands abort = usage commands >> exitFailure onCommandError :: Command -> CommandError -> IO () onCommandError cmd (CommandError msg) = do putStrLn $ "Error running command '" ++ cmdName cmd ++ "': " ++ msg when (null args) abort let (commandName:commandArgs) = args case lookupCommand commandName commands of Nothing -> abort Just cmd -> cmdHandler cmd spec commandArgs `catch` (onCommandError cmd)
elliottt/ak
src/Main.hs
bsd-3-clause
1,397
0
15
418
410
216
194
44
2
module LogMsg (initLog, FacilityString, LevelString, LogSystem(..), errorMsg, warnMsg, noticeMsg, infoMsg, debugMsg) where import System.Log.Logger import System.Log.Handler.Syslog errorMsg :: String -> IO () errorMsg = errorM rootLoggerName warnMsg :: String -> IO () warnMsg = warningM rootLoggerName noticeMsg :: String -> IO () noticeMsg = noticeM rootLoggerName infoMsg :: String -> IO () infoMsg = infoM rootLoggerName debugMsg :: String -> IO () debugMsg = debugM rootLoggerName type FacilityString = String type LevelString = String data LogSystem = StdErr | SysLog initLog :: String -> FacilityString -> LevelString -> LogSystem -> IO () initLog name fcl lvl SysLog = do let level = toLevel lvl facility = toFacility fcl s <- openlog name [PID] facility level updateGlobalLogger rootLoggerName (setLevel level . setHandlers [s]) initLog _ _ lvl StdErr = do let level = toLevel lvl updateGlobalLogger rootLoggerName (setLevel level) toLevel :: String -> Priority toLevel str = maybe (error ("Unknown level " ++ show str)) id (lookup str levelDB) toFacility :: String -> Facility toFacility str = maybe (error ("Unknown facility " ++ show str)) id (lookup str facilityDB) levelDB :: [(String, Priority)] levelDB = [ ("debug",DEBUG) , ("info",INFO) , ("notice",NOTICE) , ("warning",WARNING) , ("error",ERROR) , ("critical",CRITICAL) , ("alert",ALERT) , ("emergency",EMERGENCY) ] facilityDB :: [(String, Facility)] facilityDB = [ ("kern",KERN) , ("use",USER) , ("mail",MAIL) , ("daemon",DAEMON) , ("auth",AUTH) , ("syslog",SYSLOG) , ("lpr",LPR) , ("news",NEWS) , ("uucp",UUCP) , ("cron",CRON) , ("authpriv",AUTHPRIV) , ("ftp",FTP) , ("local0",LOCAL0) , ("local1",LOCAL1) , ("local2",LOCAL2) , ("local3",LOCAL3) , ("local4",LOCAL4) , ("local5",LOCAL5) , ("local6",LOCAL6) , ("local7",LOCAL7) ]
mwotton/sofadb
LogMsg.hs
bsd-3-clause
2,026
0
11
476
711
410
301
70
1
module System.Build.Access.Nodeprecatedlist where class Nodeprecatedlist r where setNodeprecatedlist :: r -> r unsetNodeprecatedlist :: r -> r
tonymorris/lastik
System/Build/Access/Nodeprecatedlist.hs
bsd-3-clause
169
0
7
43
35
20
15
8
0
-------------------------------------------------------------------------------- -- | -- Module : Graphics.Rendering.OpenGL.Raw.ARB.QueryBufferObject -- Copyright : (c) Sven Panne 2015 -- License : BSD3 -- -- Maintainer : Sven Panne <svenpanne@gmail.com> -- Stability : stable -- Portability : portable -- -- The <https://www.opengl.org/registry/specs/ARB/query_buffer_object.txt ARB_query_buffer_object> extension. -- -------------------------------------------------------------------------------- module Graphics.Rendering.OpenGL.Raw.ARB.QueryBufferObject ( -- * Enums gl_QUERY_BUFFER, gl_QUERY_BUFFER_BARRIER_BIT, gl_QUERY_BUFFER_BINDING, gl_QUERY_RESULT_NO_WAIT ) where import Graphics.Rendering.OpenGL.Raw.Tokens
phaazon/OpenGLRaw
src/Graphics/Rendering/OpenGL/Raw/ARB/QueryBufferObject.hs
bsd-3-clause
751
0
4
87
46
37
9
6
0
{-# LANGUAGE DataKinds #-} {-# LANGUAGE DeriveAnyClass #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE StandaloneDeriving #-} {-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE TypeOperators #-} {-# LANGUAGE TypeSynonymInstances #-} module DirectoryServer where import Control.Monad.Trans.Except import Control.Monad.Trans.Resource import Control.Monad.IO.Class import Data.Aeson import Data.Aeson.TH import Data.Bson.Generic import GHC.Generics import Network.Wai hiding(Response) import Network.Wai.Handler.Warp import Network.Wai.Logger import Servant import Servant.API import Servant.Client import System.IO import System.Directory import System.Environment (getArgs, getProgName, lookupEnv) import System.Log.Formatter import System.Log.Handler (setFormatter) import System.Log.Handler.Simple import System.Log.Handler.Syslog import System.Log.Logger import Data.Bson.Generic import qualified Data.List as DL import Data.Maybe (catMaybes) import Data.Text (pack, unpack) import Data.Time.Clock (UTCTime, getCurrentTime) import Data.Time.Format (defaultTimeLocale, formatTime) import Database.MongoDB import Control.Monad (when) import Network.HTTP.Client (newManager, defaultManagerSettings) --manager = newManager defaultManagerSettings data File = File { fileName :: FilePath, fileContent :: String } deriving (Eq, Show, Generic) instance ToJSON File instance FromJSON File data Response = Response{ response :: String } deriving (Eq, Show, Generic) instance ToJSON Response instance FromJSON Response data FileServer = FileServer{ id :: String, fsaddress :: String, fsport :: String } deriving (Eq, Show, Generic) instance ToJSON FileServer instance FromJSON FileServer instance ToBSON FileServer instance FromBSON FileServer data FileMapping = FileMapping{ fmfileName :: String, fmaddress :: String, fmport :: String } deriving (Eq, Show, Generic) instance ToJSON FileMapping instance FromJSON FileMapping instance ToBSON FileMapping instance FromBSON FileMapping type ApiHandler = ExceptT ServantErr IO serverport :: String serverport = "7008" serverhost :: String serverhost = "localhost" type DirectoryApi = "join" :> ReqBody '[JSON] FileServer :> Post '[JSON] Response :<|> "open" :> Capture "fileName" String :> Get '[JSON] File :<|> "close" :> ReqBody '[JSON] File :> Post '[JSON] Response type FileApi = "files" :> Get '[JSON] [FilePath] :<|> "download" :> Capture "fileName" String :> Get '[JSON] File :<|> "upload" :> ReqBody '[JSON] File :> Post '[JSON] Response -- :<|> fileApi :: Proxy FileApi fileApi = Proxy files:: ClientM [FilePath] download :: String -> ClientM File upload :: File -> ClientM Response files :<|> download :<|> upload = client fileApi getFilesQuery :: ClientM[FilePath] getFilesQuery = do get_files <- files return(get_files) downloadQuery :: String -> ClientM File downloadQuery fname = do get_download <- download (fname) return(get_download) directoryApi :: Proxy DirectoryApi directoryApi = Proxy server :: Server DirectoryApi server = fsJoin :<|> DirectoryServer.openFile :<|> closeFile directoryApp :: Application directoryApp = serve directoryApi server mkApp :: IO() mkApp = do run (read (serverport) ::Int) directoryApp storefs:: FileServer -> IO() storefs fs@(FileServer key _ _) = liftIO $ do --warnLog $ "Storing file under key " ++ key ++ "." withMongoDbConnection $ upsert (select ["id" =: key] "FILESERVER_RECORD") $ toBSON fs -- return True storefm :: String -> [FileMapping] -> String -> IO[FileMapping] storefm port a filename = do warnLog $ "Storing file under key " ++ filename ++ "." let serverNumber = port `mod` 8080 let serverName = "Server" ++ show serverNumber let fileMapping = (FileMapping filename serverName (show port)) withMongoDbConnection $ upsert (select ["id" =: filename] "FILEMAPPING_RECORD") $ toBSON fileMapping return $ (FileMapping filename serverName (show port)):a -- return True getStoreFm :: FileServer -> IO() getStoreFm fs = liftIO $ do manager <- newManager defaultManagerSettings res <- runClientM getFilesQuery (ClientEnv manager (BaseUrl Http (fsaddress fs) (read(fsport fs)) "")) case res of Left err -> putStrLn $ "Error: " ++ show err Right response' -> do blah' <- mapM (storefm (fsport fs) []) response' return () -- return True fsJoin :: FileServer -> ApiHandler Response fsJoin fs = liftIO $ do storefs fs getStoreFm fs return (Response "Success") searchFileMappings :: String -> IO(FileMapping) searchFileMappings key = do warnLog $ "Searching for value for key: " ++ key filemappings <- withMongoDbConnection $ do docs <- find (select ["fmfileName" =: key] "FILEMAPPING_RECORD") >>= drainCursor return $ catMaybes $ DL.map (\ b -> fromBSON b :: Maybe FileMapping) docs return $ head $ filemappings openFileQuery :: String -> FileMapping -> IO(File) openFileQuery key fm = do manager <- newManager defaultManagerSettings res <- runClientM (downloadQuery key) (ClientEnv manager (BaseUrl Http (fmaddress fm) (read(fmport fm)) "")) case res of Left err -> return (File "" "") Right response -> return (response) openFile :: String -> ApiHandler File openFile key = liftIO $ do fm <- searchFileMappings key file <- openFileQuery key fm return file -- | Logging stuff iso8601 :: UTCTime -> String iso8601 = formatTime defaultTimeLocale "%FT%T%q%z" -- global loggin functions debugLog, warnLog, errorLog :: String -> IO () debugLog = doLog debugM warnLog = doLog warningM errorLog = doLog errorM noticeLog = doLog noticeM doLog f s = getProgName >>= \ p -> do t <- getCurrentTime f p $ (iso8601 t) ++ " " ++ s withLogging act = withStdoutLogger $ \aplogger -> do lname <- getProgName llevel <- logLevel updateGlobalLogger lname (setLevel $ case llevel of "WARNING" -> WARNING "ERROR" -> ERROR _ -> DEBUG) act aplogger -- | Mongodb helpers... -- | helper to open connection to mongo database and run action -- generally run as follows: -- withMongoDbConnection $ do ... -- withMongoDbConnection :: Action IO a -> IO a withMongoDbConnection act = do ip <- mongoDbIp port <- mongoDbPort database <- mongoDbDatabase pipe <- connect (host ip) ret <- runResourceT $ liftIO $ access pipe master (pack database) act Database.MongoDB.close pipe return ret -- | helper method to ensure we force extraction of all results -- note how it is defined recursively - meaning that draincursor' calls itself. -- the purpose is to iterate through all documents returned if the connection is -- returning the documents in batch mode, meaning in batches of retruned results with more -- to come on each call. The function recurses until there are no results left, building an -- array of returned [Document] drainCursor :: Cursor -> Action IO [Document] drainCursor cur = drainCursor' cur [] where drainCursor' cur res = do batch <- nextBatch cur if null batch then return res else drainCursor' cur (res ++ batch) -- | Environment variable functions, that return the environment variable if set, or -- default values if not set. -- | The IP address of the mongoDB database that devnostics-rest uses to store and access data mongoDbIp :: IO String mongoDbIp = defEnv "MONGODB_IP" Prelude.id "database" True -- | The port number of the mongoDB database that devnostics-rest uses to store and access data mongoDbPort :: IO Integer mongoDbPort = defEnv "MONGODB_PORT" read 27017 False -- 27017 is the default mongodb port -- | The name of the mongoDB database that devnostics-rest uses to store and access data mongoDbDatabase :: IO String mongoDbDatabase = defEnv "MONGODB_DATABASE" Prelude.id "USEHASKELLDB" True -- | Determines log reporting level. Set to "DEBUG", "WARNING" or "ERROR" as preferred. Loggin is -- provided by the hslogger library. logLevel :: IO String logLevel = defEnv "LOG_LEVEL" Prelude.id "DEBUG" True -- | Helper function to simplify the setting of environment variables -- function that looks up environment variable and returns the result of running funtion fn over it -- or if the environment variable does not exist, returns the value def. The function will optionally log a -- warning based on Boolean tag defEnv :: Show a => String -- Environment Variable name -> (String -> a) -- function to process variable string (set as 'id' if not needed) -> a -- default value to use if environment variable is not set -> Bool -- True if we should warn if environment variable is not set -> IO a defEnv env fn def doWarn = lookupEnv env >>= \ e -> case e of Just s -> return $ fn s Nothing -> do when doWarn (doLog warningM $ "Environment variable: " ++ env ++ " is not set. Defaulting to " ++ (show def)) return def
Garygunn94/DFS
DirectoryServer/.stack-work/intero/intero27005gYj.hs
bsd-3-clause
9,850
176
15
2,517
2,263
1,225
1,038
208
3
{------------------------------------------------------------------------------- MorphGrammar.Hofm.Language.German.Derivation HOFM German Derivation Grammar (c) 2012 Britta Zeller <zeller@cl.uni-heidelberg.de> Jan Snajder <jan.snajder@fer.hr> -------------------------------------------------------------------------------} module MorphGrammar.Hofm.Language.German.Derivation where import MorphGrammar.Hofm.Transf import MorphGrammar.Hofm.IPattern import MorphGrammar.Hofm.DPattern import MorphGrammar.Hofm.Language.German.Transf import MorphGrammar.Hofm.Language.German.Inflection ------------------------------------------------------------------------------- -- Derivational patterns ------------------------------------------------------------------------------- dPattern = DPatternSL -- default derivation is from a stem to lemma dPatterns = [ dVN01,dVN02,dVN03, dVA01] ------------------------------------------------------------------------------- -- 1. NOMENABLEITUNG ------------------------------------------------------------------------------- -- 1.1 NOMEN ZU NOMEN -- 1.2 ADJEKTIV ZU NOMEN -- 1.3 VERB ZU NOMEN -- singen -> gesang dVN01 = dPattern "dVN01" (pfx "ge" & rifx "i" "a") verbs mNouns -- reden -> gerede dVN02 = dPattern "dVN02" (pfx "ge" & sfx "e") verbs nNouns -- tanzen -> tänzer dVN03 = dPattern "dVN03" (sfx "er" & uml) verbs nNouns ------------------------------------------------------------------------------- -- 2. ADJEKTIVABLEITUNG ------------------------------------------------------------------------------- -- 2.1 NOMEN ZU ADJEKTIV -- 2.2 ADJEKTIV ZU ADJEKTIV -- 2.3 VERB ZU ADJEKTIV -- sagen -> unsangbar dVA01 = dPattern "dVA01" (pfx "un" & sfx "bar") verbs adjectives ------------------------------------------------------------------------------- -- 3. VERBABLEITUNG ------------------------------------------------------------------------------- -- 3.1 NOMEN ZU VERB -- 3.2 ADJEKTIV ZU VERB -- 3.3 VERB ZU VERB
jsnajder/hofm
src/MorphGrammar/Hofm/Language/German/Derivation.hs
bsd-3-clause
1,995
0
8
210
199
124
75
18
1
-- {-# LANGUAGE #-} {-# OPTIONS_GHC -Wall #-} {-# OPTIONS_GHC -fno-warn-unused-imports #-} -- TEMP {-# OPTIONS_GHC -fno-warn-unused-binds #-} -- TEMP ---------------------------------------------------------------------- -- | -- Module : Test -- Copyright : (c) 2014 Tabula, Inc. -- -- Maintainer : conal@tabula.com -- Stability : experimental -- -- Test the plugin. To run: -- -- hermit Test.hs -v0 -opt=TypeEncode.Plugin Auto.hss -- ---------------------------------------------------------------------- module Test (case1) where -- Needed for resolving names. Is there an alternative? import GHC.Tuple () import Data.Either () import qualified TypeEncode.Encode q :: Int q = 3 t0 :: () t0 = () t1 :: Bool t1 = True t2 :: [Int] t2 = [1,2,3] t3 :: [Bool] t3 = [True,False] t4 :: (Int,Int) t4 = (3,4) t5 :: (Int,Int,Int,Int,Int) t5 = (3,4,5,6,7) data A = B Int | C () Bool () Int | Y Int | Z t6 :: A t6 = C () True () 3 data D = D t7 :: D t7 = D data E a = E a a t8 :: E Bool t8 = E False True t9 :: E () t9 = E () () fizzle :: String fizzle = "fizzle" newtype F a = F (a,a) -- The next two fail. Appartently callDataConT doesn't work for newtype -- constructors. Investigate. case0 :: () -> Bool case0 () = False data G a = G a case1 :: G Bool -> Bool case1 (G x) = not x case2 :: E Bool -> Bool case2 (E zink zonk) = zink || zonk case4 :: A -> Int case4 (B n) = n case4 (C () b _ n) = if b then n else 7 case4 (Y m) = m case4 Z = 85 qq :: (G Bool,G Bool) -> Bool qq (G a, G b) = a && b
conal/type-encode
test/Test.hs
bsd-3-clause
1,564
0
8
375
553
321
232
49
2
----------------------------------------------------------------------------- -- | -- Module : Text.Hyphenation.Hyphenator -- Copyright : (C) 2012-2015 Edward Kmett -- License : BSD-style (see the file LICENSE) -- -- Maintainer : Edward Kmett <ekmett@gmail.com> -- Stability : provisional -- Portability : portable -- -- Hyphenation based on the Knuth-Liang algorithm as used by TeX. ---------------------------------------------------------------------------- module Text.Hyphenation.Hyphenator ( Hyphenator(..) -- * Hyphenate with a given set of patterns , hyphenate , defaultLeftMin , defaultRightMin ) where import Text.Hyphenation.Pattern import Text.Hyphenation.Exception -- | By default, do not insert hyphens in the first two characters -- -- >>> defaultLeftMin -- 2 defaultLeftMin :: Int defaultLeftMin = 2 -- | By default, do not insert hyphens in the last three characters. -- -- >>> defaultRightMin -- 3 defaultRightMin :: Int defaultRightMin = 3 -- | A @Hyphenator@ is combination of an alphabet normalization scheme, a set of 'Patterns', a set of 'Exceptions' to those patterns -- and a number of characters at each end to skip hyphenating. data Hyphenator = Hyphenator { hyphenatorChars :: Char -> Char -- ^ a normalization function applied to input characters before applying patterns or exceptions , hyphenatorPatterns :: Patterns -- ^ hyphenation patterns stored in a trie , hyphenatorExceptions :: Exceptions -- ^ exceptions to the general hyphenation rules, hyphenated manually , hyphenatorLeftMin :: {-# UNPACK #-} !Int -- ^ the number of characters as the start of a word to skip hyphenating, by default: 2 , hyphenatorRightMin :: {-# UNPACK #-} !Int -- ^ the number of characters at the end of the word to skip hyphenating, by default: 3 } -- | Using a 'Hyphenator', compute the score of a string. hyphenationScore :: Hyphenator -> String -> [Int] hyphenationScore (Hyphenator nf ps es l r) s | l + r >= n = replicate (n + 1) 0 | otherwise = case lookupException ls es of Just pts -> trim pts Nothing -> trim (lookupPattern ls ps) where trim result = replicate l 0 ++ take (n - l - r) (drop l result) n = length s ls = map nf s -- | hyphenate a single word using the specified Hyphenator. Returns a set of candidate breakpoints by decomposing the input -- into substrings. -- -- >>> import Text.Hyphenation -- -- >>> hyphenate english_US "supercalifragilisticexpialadocious" -- ["su","per","cal","ifrag","ilis","tic","ex","pi","al","ado","cious"] -- -- >>> hyphenate english_US "hyphenation" -- ["hy","phen","ation"] hyphenate :: Hyphenator -> String -> [String] hyphenate h s0 = go [] s0 $ tail $ hyphenationScore h s0 where go acc (w:ws) (p:ps) | odd p = reverse (w:acc) : go [] ws ps | otherwise = go (w:acc) ws ps go acc ws _ = [reverse acc ++ ws]
DNNX/hyphenation
src/Text/Hyphenation/Hyphenator.hs
bsd-3-clause
2,907
0
11
571
468
263
205
32
2
module Main where import Control.Monad (void, unless, when, forever) import Data.Maybe (isNothing) import Control.Concurrent.Async (async) import qualified Data.ByteString.Char8 as B import Pipes import Pipes.Network.TCP import System.Console.CmdArgs.Explicit arguments :: Mode [(String,String)] arguments = mode "inspection-proxy" [] "" (flagArg (upd "proxy-setup") "proxy-port server port") [ flagNone ["only-server"] (("only-server",""):) "print only server response activity" , flagNone ["only-client"] (("only-client",""):) "print only client request activity" , flagHelpSimple (("help",""):) , flagVersion (("version",""):) ] where upd msg x v = Right $ (msg,x):v proxyDetails :: [(String, t)] -> Maybe (t, t, t) proxyDetails as = readProxyDetails $ filter (\(a,_) -> a == "proxy-setup") as where readProxyDetails as' | length as' /= 3 = Nothing | otherwise = Just (snd $ as' !! 2, snd $ as' !! 1, snd $ head as') -- arguments are parsed in reverse order printPass :: Bool -> Pipe B.ByteString B.ByteString IO r printPass skip = forever $ do readValue <- await unless skip $ lift $ B.putStrLn readValue yield readValue main :: IO () main = do args <- processArgs arguments let connectionDetails = proxyDetails args let hasFlag flag = (flag, "") `elem` args when (hasFlag "version") $ putStrLn "inspection-proxy version 0.1.0.3" if hasFlag "help" || isNothing connectionDetails then print $ helpText [] HelpFormatDefault arguments else do let Just (bindport, host, port) = connectionDetails void . serve HostAny bindport $ \(bindSocket, _) -> connect host port $ \(serviceSocket, _) -> do void $ async $ runEffect $ fromSocket bindSocket 4096 >-> printPass (hasFlag "only-server") >-> toSocket serviceSocket runEffect $ fromSocket serviceSocket 4096 >-> printPass (hasFlag "only-client") >-> toSocket bindSocket return ()
mhitza/inspection-proxy
Main.hs
bsd-3-clause
2,201
0
20
634
686
360
326
39
2
module Syntax( Expr, FDef, RecordDef, ProgramDefs, ilRecordDef, fdef, ap, var, num, bool, ite, programDefs, toRPN, builtinMap, numFields, accessors, constructor) where import Control.Monad.State.Lazy hiding (ap) import Data.Map as M import RPN type RecordField = String data RecordDef = RecordDef String [RecordField] deriving (Eq, Ord, Show) ilRecordDef = RecordDef numFields (RecordDef _ fields) = length fields accessors (RecordDef _ fields) = zip fields [0..(length fields - 1)] constructor (RecordDef name _) = name data FDef = FDef String Int deriving (Eq, Ord, Show) fdef = FDef arity (FDef _ a) = a name (FDef n _) = n data ProgramDefs = ProgramDefs { functionDefs :: Map String FDef, argumentNums :: Map String Int, accessorIndexes :: Map String Int, constructorArities :: Map String Int } programDefs = ProgramDefs data Expr = Ap Expr Expr | Var String | Num Int | Boolean Bool | IfThenElse Expr Expr Expr deriving (Eq, Ord, Show) ap = Ap var = Var num = Num bool = Boolean ite = IfThenElse toRPN :: Map String FDef -> Map String Int -> Map String Int -> Map String Int -> Expr -> [RPN] toRPN funcDefs argNums accessorInds constructorArts expr = fst $ toRPNWithLabelNums 0 programDefs expr where programDefs = ProgramDefs funcDefs argNums accessorInds constructorArts toRPNWithLabelNums :: Int -> ProgramDefs -> Expr -> ([RPN], Int) toRPNWithLabelNums n _ (Num v) = ([intVal v], n) toRPNWithLabelNums n _ (Boolean b) = ([boolVal b], n) toRPNWithLabelNums n _ (Var "nil") = ([intVal 0], n) toRPNWithLabelNums n pDefs (Ap l r) = (rightRPN ++ leftRPN ++ [appl], nr) where leftRPNLab = toRPNWithLabelNums n pDefs l leftRPN = fst leftRPNLab nl = snd leftRPNLab rightRPNLab = toRPNWithLabelNums nl pDefs r rightRPN = fst rightRPNLab nr = snd rightRPNLab toRPNWithLabelNums n pDefs (Var v) = case M.lookup v (argumentNums pDefs) of Just argNum -> ([arg argNum], n) Nothing -> case M.lookup v (functionDefs pDefs) of Just (FDef name arity) -> ([funcall name arity], n) Nothing -> case M.lookup v (constructorArities pDefs) of Just arity -> ([intVal arity, funcall "create_record" (arity + 1), appl], n) Nothing -> case M.lookup v (accessorIndexes pDefs) of Just index -> ([intVal index, funcall "get_field" 2, appl], n) Nothing -> error $ v ++ " is not defined\nDefined functions are " ++ show (functionDefs pDefs) toRPNWithLabelNums n pDefs (IfThenElse e1 e2 e3) = (finalRPN, snd e3RPNLab) where e1RPNLab = toRPNWithLabelNums (n+2) pDefs e1 e1RPN = fst e1RPNLab e2RPNLab = toRPNWithLabelNums (snd e1RPNLab) pDefs e2 e2RPN = fst e2RPNLab e3RPNLab = toRPNWithLabelNums (snd e2RPNLab) pDefs e3 e3RPN = fst e3RPNLab finalRPN = e1RPN ++ [jumpFalse n] ++ e2RPN ++ [jump (n+1)] ++ [label n] ++ e3RPN ++ [label (n+1)] builtinMap = M.fromList [("+", fdef "int_add" 2), ("-", fdef "int_sub" 2), ("*", fdef "int_mul" 2), ("/", fdef "int_div" 2), ("||", fdef "bool_or" 2), ("&&", fdef "bool_and" 2), ("~", fdef "bool_not" 1), (">", fdef "greater" 2), ("<", fdef "less" 2), (">=", fdef "greater_or_equal" 2), ("<=", fdef "less_or_equal" 2), ("==", fdef "equal" 2), ("isNil", fdef "is_nil" 1)]
dillonhuff/IntLang
src/Syntax.hs
bsd-3-clause
3,607
0
20
1,017
1,314
705
609
82
5
{-# LANGUAGE BangPatterns #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE TypeSynonymInstances #-} {-# LANGUAGE ViewPatterns #-} {-# LANGUAGE TemplateHaskell #-} import Control.Applicative import Control.Exception as E import Control.Monad import Control.Monad.Trans import Data.Function (fix) import Data.Time.Clock.POSIX import System.Environment import System.IO import qualified Data.Text as T import System.Random.MWC (Variate(..)) import qualified Network.HTTP.Client as HC import qualified System.Random.MWC as MWC import Database.InfluxDB import Database.InfluxDB.TH import qualified Database.InfluxDB.Stream as S oneWeekInSeconds :: Int oneWeekInSeconds = 7*24*60*60 main :: IO () main = do [read -> (numPoints :: Int), read -> (batches :: Int)] <- getArgs hSetBuffering stdout NoBuffering HC.withManager managerSettings $ \manager -> do config <- newConfig manager let db = "ctx" dropDatabase config db `E.catch` -- Ignore exceptions here \(_ :: HC.HttpException) -> return () createDatabase config "ctx" gen <- MWC.create flip fix batches $ \outerLoop !m -> when (m > 0) $ do postWithPrecision config db SecondsPrecision $ withSeries "ct1" $ flip fix numPoints $ \innerLoop !n -> when (n > 0) $ do !timestamp <- liftIO $ (-) <$> getPOSIXTime <*> (fromIntegral <$> uniformR (0, oneWeekInSeconds) gen) !value <- liftIO $ uniform gen writePoints $ Point value (Time timestamp) innerLoop $ n - 1 outerLoop $ m - 1 result <- query config db "select count(value) from ct1;" case result of [] -> putStrLn "Empty series" series:_ -> do print $ seriesColumns series print $ seriesPoints series -- Streaming output queryChunked config db "select * from ct1;" $ S.fold step () where step _ series = do case fromSeriesData series of Left reason -> hPutStrLn stderr reason Right points -> mapM_ print (points :: [Point]) putStrLn "--" newConfig :: HC.Manager -> IO Config newConfig manager = do pool <- newServerPool localServer [] return Config { configCreds = rootCreds , configServerPool = pool , configHttpManager = manager } managerSettings :: HC.ManagerSettings managerSettings = HC.defaultManagerSettings { HC.managerResponseTimeout = Just $ 60*(10 :: Int)^(6 :: Int) } data Point = Point { pointValue :: !Name , pointTime :: !Time } deriving Show newtype Time = Time POSIXTime deriving Show instance ToValue Time where toValue (Time epoch) = toValue $ epochInSeconds epoch where epochInSeconds :: POSIXTime -> Value epochInSeconds = Int . floor instance FromValue Time where parseValue (Int n) = return $ Time $ fromIntegral n parseValue (Float d) = return $ Time $ realToFrac d parseValue v = typeMismatch "Int or Float" v data Name = Foo | Bar | Baz | Quu | Qux deriving (Enum, Bounded, Show) instance ToValue Name where toValue Foo = String "foo" toValue Bar = String "bar" toValue Baz = String "baz" toValue Quu = String "quu" toValue Qux = String "qux" instance FromValue Name where parseValue (String name) = case name of "foo" -> return Foo "bar" -> return Bar "baz" -> return Baz "quu" -> return Quu "qux" -> return Qux _ -> fail $ "Incorrect string: " ++ T.unpack name parseValue v = typeMismatch "String" v instance Variate Name where uniform = uniformR (minBound, maxBound) uniformR (lower, upper) g = do name <- uniformR (fromEnum lower, fromEnum upper) g return $! toEnum name -- Instance deriving deriveSeriesData defaultOptions { fieldLabelModifier = stripPrefixLower "point" } ''Point
alphaHeavy/influxdb-haskell
examples/random-points.hs
bsd-3-clause
3,802
15
26
879
1,218
611
607
115
3
module Engine.Monad where import Control.Monad import Control.Monad.Random import Control.Monad.State import Control.Monad.Writer import qualified Data.DList as D import System.IO.Unsafe ---------------------------------------- data MessageType = All | Game | Turn | None deriving (Read,Show,Eq,Ord) newtype Sim a = Sim ( RandT StdGen ( Writer ( D.DList (MessageType,String) ) ) a ) deriving (Functor,Applicative,Monad,MonadRandom) writeMsg :: MessageType -> String -> Sim () writeMsg t str = Sim $ do tell $ D.fromList [(t,str)] runSim :: MessageType -> Sim a -> IO a runSim t (Sim m) = do sg <- newStdGen let (a,w) = runWriter $ evalRandT m sg go (D.toList w) return a where go [] = return () go ((t',str):xs) = do if t'>=t then putStrLn str else return () go xs -- evalSim :: s -> Sim s a -> s -- evalSim s m = unsafePerformIO (runSim None s m)
mikeizbicki/dominion
src/Engine/Monad.hs
bsd-3-clause
1,036
0
12
328
346
184
162
34
3
module GeneratedTest where import Haskore.Basic.Duration((%+)) import Haskore.Music import Haskore.Melody.Standard import Haskore.Music.GeneralMIDI as MidiMusic import Haskore.Interface.MIDI.Render as Render main = Render.fileFromGeneralMIDIMusic "hello.midi" song song = MidiMusic.fromStdMelody MidiMusic.AcousticGrandPiano $ chord [changeTempo (2 %+ 3) (line [c 1 (1 %+ 23) na, rest (1 %+ 23)]), transpose 3 (line [c 1 qn na, qnr]), changeTempo (2 %+ 3) (c 1 (1 %+ 23) na), transpose (- 3) (c 1 qn na), changeTempo (7 %+ 1) (rest (1 %+ 23)), transpose 4 qnr]
nfjinjing/haskore-guide
src/hello.hs
bsd-3-clause
577
0
14
95
245
136
109
13
1
{-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE ViewPatterns #-} module Ivory.ModelCheck where import qualified Ivory.Language.Proc as I import qualified Ivory.Language.Syntax as I import Ivory.Language.Syntax.Concrete.Location import Text.Printf import Ivory.ModelCheck.Ivory2CVC4 import Ivory.ModelCheck.Monad import Ivory.ModelCheck.CVC4 import System.FilePath.Posix import System.Directory import System.Process import System.IO import Control.Applicative import Control.Monad import qualified Data.ByteString.Char8 as B import Data.List import qualified Data.Map as M -------------------------------------------------------------------------------- data Args = Args { printQuery :: Bool , printEnv :: Bool , printLocs :: Bool , inlineCall :: Bool -- ^ Should we inline `call`s or just assume the `ensures`? , callCVC4 :: Bool , cvc4Path :: FilePath , cvc4Args :: [String] } deriving (Show, Eq) initArgs :: Args initArgs = Args { printQuery = True , printEnv = True , printLocs = True , inlineCall = False , callCVC4 = True , cvc4Path = "" , cvc4Args = ["--incremental", "--rewrite-divk"] } -------------------------------------------------------------------------------- data Result = Safe | Unsafe [String] FilePath | Inconsistent FilePath | Error String FilePath deriving (Show, Eq) isSafe :: Result -> Bool isSafe Safe = True isSafe _ = False isUnsafe :: Result -> Bool isUnsafe (Unsafe{}) = True isUnsafe _ = False isError :: Result -> Bool isError (Error{}) = True isError _ = False showResult :: Result -> String showResult Safe = "Safe" showResult (Inconsistent f) = printf "Inconsistent (generated script at %s)" f showResult (Unsafe qs f) = printf "Unsafe: %s (generated script at %s)" (intercalate ", " qs) f showResult (Error e f) = printf "Error: %s (generated script at %s)" e f modelCheck' :: [I.Module] -> I.Def p -> IO () modelCheck' mods p = do res <- modelCheck initArgs mods p print res modelCheck :: Args -> [I.Module] -> I.Def p -> IO Result modelCheck args mods (I.DefProc p) = do let (_, st) = runMC (SymOpts (inlineCall args)) (modelCheckProc mods p) let bs = B.unlines (mkScript st) debugging args st bs file <- writeInput bs out <- reverse <$> runCVC4 args file case out of ("valid":_) -> return (Inconsistent file) ("invalid":results) | all (=="valid") results -> return Safe | otherwise -> return (Unsafe bad file) where bad = [ B.unpack $ concrete q | (q, "invalid") <- zip (tail $ allQueries st) results ] _ -> return (Error (show out) file) modelCheck _ _ _ = error "I can only check procedures defined in Ivory!" mkModuleEnv :: [I.Module] -> M.Map I.ModuleName I.Module mkModuleEnv deps = M.fromList [ (I.modName m, m) | m <- deps ] -------------------------------------------------------------------------------- debugging :: Args -> SymExecSt -> B.ByteString -> IO () debugging args st bs = do when (printQuery args) $ do putStrLn "**** QUERY ************************************" B.putStrLn bs putStrLn "***********************************************" putStrLn "" when (printEnv args) $ do putStrLn "**** ENV **************************************" print (symEnv st) putStrLn "***********************************************" putStrLn "" -------------------------------------------------------------------------------- mkScript :: SymExecSt -> [B.ByteString] mkScript st = [ "% Script auto-generated for model-checking Ivory function " , B.pack (funcSym st) , "" , "% CVC4 Lib -----------------------------------" , "" ] ++ map concrete cvc4Lib ++ [ "" , "% user-defined types -------------------------" , "" ] ++ writeStmts (map (uncurry typeDecl) . types . symSt) ++ [ "" , "% declarations -------------------------------" , "" ] ++ writeStmts (decls . symSt) ++ [ "" , "% program encoding ---------------------------" , "" ] ++ writeStmts (map assert . invars . symSt) ++ [ "" , "% queries ------------------------------------" , "" ] ++ writeStmts allQueries where writeStmts :: Concrete a => (SymExecSt -> [a]) -> [B.ByteString] writeStmts f = map concrete (reverse $ f st) -- | Are the assertions consistent? If not, there's a bug in the -- model-checking. consistencyQuery :: Statement consistencyQuery = query $ noLoc false allQueries :: SymExecSt -> [Statement] allQueries st = consistencyQuery : (map query . assertQueries . symQuery) st -- | Write model inputs to a temp file. writeInput :: B.ByteString -> IO FilePath writeInput bs = do dir <- getTemporaryDirectory let tempDir = dir </> "cvc4-inputs" createDirectoryIfMissing False tempDir (file, hd) <- openTempFile tempDir "cvc4input.cvc" -- putStrLn $ "Created temp file " ++ file ++ "\n" B.hPut hd bs hClose hd return file -- | Run cvc4 on the input file returning the results. runCVC4 :: Args -> FilePath -> IO [String] runCVC4 args file = do (_, Just hout, _, _) <- createProcess $ (proc exec execArgs) { std_out = CreatePipe } out <- hGetContents hout return (lines out) where exec = cvc4Path args </> "cvc4" execArgs = cvc4Args args ++ [file] printResults :: SymExecSt -> [String] -> IO () printResults st results = do let queries = map concrete $ reverse $ allQueries st let match = reverse (zip queries results) B.putStrLn "*** If \'Query FALSE\' is valid, the assertions are inconsistent. ***\n" mapM_ printRes match where printRes (q,res) = printf "%-30s : %s\n" (B.unpack q) res
Hodapp87/ivory
ivory-model-check/src/Ivory/ModelCheck.hs
bsd-3-clause
5,907
0
18
1,397
1,644
854
790
153
3
------------------------------------------------------------------------------- -- | -- Copyright : (C) 2015 Michael Carpenter -- License : GPL3 -- Maintainer : Michael Carpenter <oldmanmike.dev@gmail.com> -- Stability : experimental -- Portability : GHC -- ------------------------------------------------------------------------------- module Catan.Internal.Settlement ( upgradeToCity --, testSettlements ) where import Catan.Internal.Resource import Catan.Types upgradeToCity :: [Settlement] -> Settlement -> Either String [Settlement] upgradeToCity slst s = do if elem s slst then Right ((s {city = True}):(filter (\i -> (getVert i) /= (getVert s)) slst)) else Left "Error: Settlement does not exist!" {- testSettlements :: Settlements testSettlements = V.fromList [ (Settlement ((0,-2),(1,-2),(0,-1)) {- [ (Hex ( 0,-2) 6 Forest False) , (Hex ( 1,-2) 5 Fields False) , (Hex ( 0,-1) 3 Pasture False)] -} Blue False) , (Settlement ((1,-2),(0,-1),(1,-1)) {- [ (Hex (1,-2) 5 Fields False) , (Hex (0,-1) 3 Pasture False) , (Hex (1,-1) 8 Hills False)] -} Blue False) , (Settlement ((1,-2),(1,-1),(2,-2)) {- [ (Hex (1,-2) 5 Fields False) , (Hex (1,-1) 8 Hills False) , (Hex (2,-2) 9 Mountains False)] -} Blue False) , (Settlement ((-1,2),(0,2),(0,1)) {- [ (Hex (-1,2) 4 Forest False) , (Hex (0,2) 8 Pasture False) , (Hex (0,1) 10 Fields False)] -} Red False) ] -}
oldmanmike/catan
src/Catan/Internal/Settlement.hs
bsd-3-clause
2,164
0
17
996
140
82
58
9
2
{-# LANGUAGE BangPatterns #-} {-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE GeneralizedNewtypeDeriving #-} {-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE TupleSections #-} -- | Names for packages. module Stack.Types.PackageName (PackageName ,PackageNameParseFail(..) ,packageNameParser ,parsePackageName ,parsePackageNameFromString ,packageNameString ,packageNameText ,fromCabalPackageName ,toCabalPackageName ,parsePackageNameFromFilePath ,mkPackageName ,packageNameArgument) where import Control.Applicative import Control.DeepSeq import Control.Monad import Control.Monad.IO.Unlift import Data.Aeson.Extended import Data.Attoparsec.Combinators import Data.Attoparsec.Text import Data.Data import Data.Hashable import Data.List (intercalate) import Data.Store (Store) import Data.Text (Text) import qualified Data.Text as T import Data.Text.Binary () import qualified Distribution.Package as Cabal import GHC.Generics import Language.Haskell.TH import Language.Haskell.TH.Syntax import qualified Options.Applicative as O import Path import Stack.Types.StringError -- | A parse fail. data PackageNameParseFail = PackageNameParseFail Text | CabalFileNameParseFail FilePath | CabalFileNameInvalidPackageName FilePath deriving (Typeable) instance Exception PackageNameParseFail instance Show PackageNameParseFail where show (PackageNameParseFail bs) = "Invalid package name: " ++ show bs show (CabalFileNameParseFail fp) = "Invalid file path for cabal file, must have a .cabal extension: " ++ fp show (CabalFileNameInvalidPackageName fp) = "cabal file names must use valid package names followed by a .cabal extension, the following is invalid: " ++ fp -- | A package name. newtype PackageName = PackageName Text deriving (Eq,Ord,Typeable,Data,Generic,Hashable,NFData,Store,ToJSON,ToJSONKey) instance Lift PackageName where lift (PackageName n) = appE (conE 'PackageName) (stringE (T.unpack n)) instance Show PackageName where show (PackageName n) = T.unpack n instance FromJSON PackageName where parseJSON j = do s <- parseJSON j case parsePackageNameFromString s of Nothing -> fail ("Couldn't parse package name: " ++ s) Just ver -> return ver instance FromJSONKey PackageName where fromJSONKey = FromJSONKeyTextParser $ \k -> either (fail . show) return $ parsePackageName k -- | Attoparsec parser for a package name packageNameParser :: Parser PackageName packageNameParser = fmap (PackageName . T.pack . intercalate "-") (sepBy1 word (char '-')) where word = concat <$> sequence [many digit, pured letter, many (alternating letter digit)] -- | Make a package name. mkPackageName :: String -> Q Exp mkPackageName s = case parsePackageNameFromString s of Nothing -> errorString ("Invalid package name: " ++ show s) Just pn -> [|pn|] -- | Parse a package name from a 'Text'. parsePackageName :: MonadThrow m => Text -> m PackageName parsePackageName x = go x where go = either (const (throwM (PackageNameParseFail x))) return . parseOnly (packageNameParser <* endOfInput) -- | Parse a package name from a 'String'. parsePackageNameFromString :: MonadThrow m => String -> m PackageName parsePackageNameFromString = parsePackageName . T.pack -- | Produce a string representation of a package name. packageNameString :: PackageName -> String packageNameString (PackageName n) = T.unpack n -- | Produce a string representation of a package name. packageNameText :: PackageName -> Text packageNameText (PackageName n) = n -- | Convert from a Cabal package name. fromCabalPackageName :: Cabal.PackageName -> PackageName fromCabalPackageName (Cabal.PackageName name) = let !x = T.pack name in PackageName x -- | Convert to a Cabal package name. toCabalPackageName :: PackageName -> Cabal.PackageName toCabalPackageName (PackageName name) = let !x = T.unpack name in Cabal.PackageName x -- | Parse a package name from a file path. parsePackageNameFromFilePath :: MonadThrow m => Path a File -> m PackageName parsePackageNameFromFilePath fp = do base <- clean $ toFilePath $ filename fp case parsePackageNameFromString base of Nothing -> throwM $ CabalFileNameInvalidPackageName $ toFilePath fp Just x -> return x where clean = liftM reverse . strip . reverse strip ('l':'a':'b':'a':'c':'.':xs) = return xs strip _ = throwM (CabalFileNameParseFail (toFilePath fp)) -- | An argument which accepts a template name of the format -- @foo.hsfiles@. packageNameArgument :: O.Mod O.ArgumentFields PackageName -> O.Parser PackageName packageNameArgument = O.argument (do s <- O.str either O.readerError return (p s)) where p s = case parsePackageNameFromString s of Just x -> Right x Nothing -> Left $ unlines [ "Expected valid package name, but got: " ++ s , "Package names consist of one or more alphanumeric words separated by hyphens." , "To avoid ambiguity with version numbers, each of these words must contain at least one letter." ]
martin-kolinek/stack
src/Stack/Types/PackageName.hs
bsd-3-clause
5,515
0
14
1,276
1,222
640
582
124
3
{-# LINE 1 "Control.Monad.ST.Lazy.Unsafe.hs" #-} {-# LANGUAGE Unsafe #-} ----------------------------------------------------------------------------- -- | -- Module : Control.Monad.ST.Lazy.Unsafe -- Copyright : (c) The University of Glasgow 2001 -- License : BSD-style (see the file libraries/base/LICENSE) -- -- Maintainer : libraries@haskell.org -- Stability : provisional -- Portability : non-portable (requires universal quantification for runST) -- -- This module presents an identical interface to "Control.Monad.ST", -- except that the monad delays evaluation of state operations until -- a value depending on them is required. -- -- Unsafe API. -- ----------------------------------------------------------------------------- module Control.Monad.ST.Lazy.Unsafe ( -- * Unsafe operations unsafeInterleaveST, unsafeIOToST ) where import Control.Monad.ST.Lazy.Imp
phischu/fragnix
builtins/base/Control.Monad.ST.Lazy.Unsafe.hs
bsd-3-clause
922
0
4
151
45
38
7
5
0
-- | <http://strava.github.io/api/v3/athlete/> module Strive.Actions.Athletes ( getCurrentAthlete , getAthlete , updateCurrentAthlete , getAthleteStats , getAthleteCrs ) where import Network.HTTP.Types (Query, toQuery) import Strive.Aliases (AthleteId, Result) import Strive.Client (Client) import Strive.Internal.HTTP (get, put) import Strive.Options (GetAthleteCrsOptions, UpdateCurrentAthleteOptions) import Strive.Types (AthleteDetailed, AthleteStats, AthleteSummary, EffortDetailed) -- | <http://strava.github.io/api/v3/athlete/#get-details> getCurrentAthlete :: Client -> IO (Result AthleteDetailed) getCurrentAthlete client = get client resource query where resource = "api/v3/athlete" query = [] :: Query -- | <http://strava.github.io/api/v3/athlete/#get-another-details> getAthlete :: Client -> AthleteId -> IO (Result AthleteSummary) getAthlete client athleteId = get client resource query where resource = "api/v3/athletes/" ++ show athleteId query = [] :: Query -- | <http://strava.github.io/api/v3/athlete/#update> updateCurrentAthlete :: Client -> UpdateCurrentAthleteOptions -> IO (Result AthleteDetailed) updateCurrentAthlete client options = put client resource query where resource = "api/v3/athlete" query = toQuery options -- | <http://strava.github.io/api/v3/athlete/#stats> getAthleteStats :: Client -> Integer -> IO (Result AthleteStats) getAthleteStats client athleteId = get client resource query where resource = "api/v3/athletes/" ++ show athleteId ++ "/stats" query = [] :: Query -- | <http://strava.github.io/api/v3/athlete/#koms> getAthleteCrs :: Client -> AthleteId -> GetAthleteCrsOptions -> IO (Result [EffortDetailed]) getAthleteCrs client athleteId options = get client resource query where resource = "api/v3/athletes/" ++ show athleteId ++ "/koms" query = toQuery options
liskin/strive
library/Strive/Actions/Athletes.hs
mit
1,873
0
11
262
421
230
191
33
1
{-# LANGUAGE ScopedTypeVariables, CPP #-} {- Copyright (C) 2009 John MacFarlane <jgm@berkeley.edu> This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -} module Yst.Build (buildSite) where import Yst.Types import Yst.Util import Yst.Render import qualified Data.Map as M import Data.Maybe (fromMaybe, mapMaybe) import Data.List import System.FilePath import System.Directory import System.Exit #if MIN_VERSION_directory(1,2,0) import Data.Time.Calendar (Day(..)) import Data.Time.Clock (UTCTime(..), secondsToDiffTime) #else import System.Time (ClockTime(..)) #endif -- Note: ghc >= 6.12 (base >=4.2) supports unicode through iconv -- So we use System.IO.UTF8 only if we have an earlier version #if MIN_VERSION_base(4,2,0) import System.IO (hPutStrLn) import Prelude hiding (catch) #else import Prelude hiding (readFile, putStrLn, print, writeFile, catch) import System.IO.UTF8 #endif import System.IO (stderr) import Control.Monad import Control.Exception (catch, SomeException) #if MIN_VERSION_directory(1,2,0) minTime :: UTCTime minTime = UTCTime (ModifiedJulianDay 0) (secondsToDiffTime 0) #else minTime :: ClockTime minTime = TOD 0 0 #endif findSource :: Site -> FilePath -> IO FilePath findSource = searchPath . sourceDir dependencies :: Site -> String -> IO [FilePath] dependencies site url = do let page = case M.lookup url (pageIndex site) of Nothing -> error $ "Tried to get dependencies for nonexistent page: " ++ url Just pg -> pg layout <- findSource site $ stripStExt (fromMaybe (defaultLayout site) $ layoutFile page) <.> "st" requires <- mapM (findSource site) $ requiresFiles page srcdir <- findSource site $ case sourceFile page of TemplateFile f -> stripStExt f <.> "st" SourceFile f -> f let fileFromSpec (DataFromFile f _) = Just f fileFromSpec (DataFromSqlite3 f _ _) = Just f fileFromSpec _ = Nothing dataFiles <- mapM (searchPath $ dataDir site) $ mapMaybe (\(_,s) -> fileFromSpec s) $ pageData page return $ indexFile site : layout : srcdir : (requires ++ dataFiles) buildSite :: Site -> IO () buildSite site = do let filesIn dir = liftM (filter (/=".") . map (makeRelative dir)) $ getDirectoryContentsRecursive dir files <- liftM concat $ mapM filesIn $ filesDir site let pages = M.keys $ pageIndex site let overlap = files `intersect` pages unless (null overlap) $ forM_ overlap (\f -> hPutStrLn stderr $ "Warning: the page '" ++ f ++ "' will overwrite the file by the same name.") forM_ files $ \file -> updateFile site file forM_ pages $ \page -> case M.lookup page (pageIndex site) of Just pg -> updatePage site pg Nothing -> error $ "Couldn't find page " ++ page updateFile :: Site -> FilePath -> IO () updateFile site file = do let destpath = deployDir site </> file srcpath <- searchPath (filesDir site) file srcmod <- getModificationTime srcpath destmod <- catch (getModificationTime destpath) (\(_::SomeException) -> return minTime) if srcmod > destmod then do createDirectoryIfMissing True $ takeDirectory destpath hPutStrLn stderr $ "Updating " ++ destpath copyFile srcpath destpath else return () updatePage :: Site -> Page -> IO () updatePage site page = do let destpath = deployDir site </> pageUrl page deps <- dependencies site $ pageUrl page forM_ deps $ \dep -> do exists <- doesFileExist dep unless exists $ do hPutStrLn stderr $ "Missing dependency: " ++ dep hPutStrLn stderr $ "Aborting! Cannot build " ++ destpath exitWith $ ExitFailure 3 depsmod <- mapM getModificationTime deps destmod <- catch (getModificationTime destpath) (\(_::SomeException) -> return minTime) if maximum depsmod > destmod then do createDirectoryIfMissing True $ takeDirectory destpath hPutStrLn stderr $ "Updating " ++ destpath renderPage site page >>= writeFile destpath else return ()
2ion/yst
Yst/Build.hs
gpl-2.0
4,673
0
16
1,006
1,192
584
608
83
5
-- This file is part of HamSql -- -- Copyright 2014-2016 by it's authors. -- Some rights reserved. See COPYING, AUTHORS. module Database.HamSql.Internal.Utils ( module Data.Maybe , module Database.HamSql.Internal.Utils , module Database.YamSql.Internal.Utils , traverseOf , _Just , each ) where import Control.Lens (_Just, each, traverseOf) import Data.List (group, intercalate, sort) import Data.Maybe import qualified Data.Text as T import qualified Data.Text.IO as TIO import Debug.Trace import System.Exit import System.IO (stderr) import System.IO.Unsafe import Database.HamSql.Internal.Option import Database.YamSql.Internal.Utils join :: [a] -> [[a]] -> [a] join = intercalate preset :: Eq a => a -> a -> Maybe a preset d x | d == x = Nothing | otherwise = Just x presetEmpty :: [a] -> Maybe [a] presetEmpty [] = Nothing presetEmpty xs = Just xs err :: Text -> a err xs = unsafePerformIO $ do TIO.hPutStrLn stderr ("error: " <> xs) exitWith $ ExitFailure 1 warn :: Text -> a -> a warn = msg "warning" warn' :: Text -> IO () warn' = msg' "warning" msg :: Text -> Text -> a -> a msg typ xs ys = unsafePerformIO $ do msg' typ xs return ys msg' :: Text -> Text -> IO () msg' typ xs = TIO.hPutStrLn stderr (typ <> ": " <> xs) info :: OptCommon -> Text -> a -> a info opts xs | optVerbose opts = msg "info" xs | otherwise = id debug :: OptCommon -> Text -> a -> a debug opts xs | optDebug opts = msg "debug" xs | otherwise = id removeDuplicates :: (Ord a) => [a] -> [a] removeDuplicates = map head . group . sort --- Maybe Utils maybeMap :: (a -> b) -> Maybe [a] -> [b] maybeMap f = maybe [] (map f) maybePrefix :: Text -> Maybe Text -> Text maybePrefix _ Nothing = "" maybePrefix p (Just x) = p <> x fromJustReason :: Text -> Maybe a -> a fromJustReason _ (Just x) = x fromJustReason reason Nothing = err $ "fromJust failed: " <> reason selectUniqueReason :: Text -> [a] -> a selectUniqueReason _ [x] = x selectUniqueReason msgt [] = err $ "No element found while trying to find exactly one: " <> msgt selectUniqueReason msgt xs = err $ "More then one element (" <> tshow (length xs) <> ") found while trying to extrac one: " <> msgt showCode :: Text -> Text showCode = T.replace "\n" "\n " . T.cons '\n' maybeHead :: [a] -> Maybe a maybeHead [] = Nothing maybeHead (x:_) = Just x tr :: Show a => a -> a tr x = trace (show x <> "\n") x (<->) :: Text -> Text -> Text (<->) a b = a <> " " <> b (<\>) :: Text -> Text -> Text (<\>) a b = a <> "\n" <> b
qua-bla/hamsql
src/Database/HamSql/Internal/Utils.hs
gpl-3.0
2,528
0
10
549
1,000
530
470
80
1
{-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE TypeFamilies #-} {-# OPTIONS_GHC -fno-warn-unused-imports #-} {-# OPTIONS_GHC -fno-warn-unused-binds #-} {-# OPTIONS_GHC -fno-warn-unused-matches #-} -- Derived from AWS service descriptions, licensed under Apache 2.0. -- | -- Module : Network.AWS.RDS.ModifyDBClusterParameterGroup -- Copyright : (c) 2013-2015 Brendan Hay -- License : Mozilla Public License, v. 2.0. -- Maintainer : Brendan Hay <brendan.g.hay@gmail.com> -- Stability : auto-generated -- Portability : non-portable (GHC extensions) -- -- Modifies the parameters of a DB cluster parameter group. To modify more -- than one parameter, submit a list of the following: 'ParameterName', -- 'ParameterValue', and 'ApplyMethod'. A maximum of 20 parameters can be -- modified in a single request. -- -- For more information on Amazon Aurora, see -- <http://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/CHAP_Aurora.html Aurora on Amazon RDS> -- in the /Amazon RDS User Guide./ -- -- Changes to dynamic parameters are applied immediately. Changes to static -- parameters require a reboot without failover to the DB cluster -- associated with the parameter group before the change can take effect. -- -- After you create a DB cluster parameter group, you should wait at least -- 5 minutes before creating your first DB cluster that uses that DB -- cluster parameter group as the default parameter group. This allows -- Amazon RDS to fully complete the create action before the parameter -- group is used as the default for a new DB cluster. This is especially -- important for parameters that are critical when creating the default -- database for a DB cluster, such as the character set for the default -- database defined by the 'character_set_database' parameter. You can use -- the /Parameter Groups/ option of the -- <https://console.aws.amazon.com/rds/ Amazon RDS console> or the -- DescribeDBClusterParameters command to verify that your DB cluster -- parameter group has been created or modified. -- -- /See:/ <http://docs.aws.amazon.com/AmazonRDS/latest/APIReference/API_ModifyDBClusterParameterGroup.html AWS API Reference> for ModifyDBClusterParameterGroup. module Network.AWS.RDS.ModifyDBClusterParameterGroup ( -- * Creating a Request modifyDBClusterParameterGroup , ModifyDBClusterParameterGroup -- * Request Lenses , mdcpgDBClusterParameterGroupName , mdcpgParameters -- * Destructuring the Response , dbClusterParameterGroupNameMessage , DBClusterParameterGroupNameMessage -- * Response Lenses , dcpgnmDBClusterParameterGroupName ) where import Network.AWS.Prelude import Network.AWS.RDS.Types import Network.AWS.RDS.Types.Product import Network.AWS.Request import Network.AWS.Response -- | -- -- /See:/ 'modifyDBClusterParameterGroup' smart constructor. data ModifyDBClusterParameterGroup = ModifyDBClusterParameterGroup' { _mdcpgDBClusterParameterGroupName :: !Text , _mdcpgParameters :: ![Parameter] } deriving (Eq,Read,Show,Data,Typeable,Generic) -- | Creates a value of 'ModifyDBClusterParameterGroup' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'mdcpgDBClusterParameterGroupName' -- -- * 'mdcpgParameters' modifyDBClusterParameterGroup :: Text -- ^ 'mdcpgDBClusterParameterGroupName' -> ModifyDBClusterParameterGroup modifyDBClusterParameterGroup pDBClusterParameterGroupName_ = ModifyDBClusterParameterGroup' { _mdcpgDBClusterParameterGroupName = pDBClusterParameterGroupName_ , _mdcpgParameters = mempty } -- | The name of the DB cluster parameter group to modify. mdcpgDBClusterParameterGroupName :: Lens' ModifyDBClusterParameterGroup Text mdcpgDBClusterParameterGroupName = lens _mdcpgDBClusterParameterGroupName (\ s a -> s{_mdcpgDBClusterParameterGroupName = a}); -- | A list of parameters in the DB cluster parameter group to modify. mdcpgParameters :: Lens' ModifyDBClusterParameterGroup [Parameter] mdcpgParameters = lens _mdcpgParameters (\ s a -> s{_mdcpgParameters = a}) . _Coerce; instance AWSRequest ModifyDBClusterParameterGroup where type Rs ModifyDBClusterParameterGroup = DBClusterParameterGroupNameMessage request = postQuery rDS response = receiveXMLWrapper "ModifyDBClusterParameterGroupResult" (\ s h x -> parseXML x) instance ToHeaders ModifyDBClusterParameterGroup where toHeaders = const mempty instance ToPath ModifyDBClusterParameterGroup where toPath = const "/" instance ToQuery ModifyDBClusterParameterGroup where toQuery ModifyDBClusterParameterGroup'{..} = mconcat ["Action" =: ("ModifyDBClusterParameterGroup" :: ByteString), "Version" =: ("2014-10-31" :: ByteString), "DBClusterParameterGroupName" =: _mdcpgDBClusterParameterGroupName, "Parameters" =: toQueryList "Parameter" _mdcpgParameters]
fmapfmapfmap/amazonka
amazonka-rds/gen/Network/AWS/RDS/ModifyDBClusterParameterGroup.hs
mpl-2.0
5,284
0
10
1,011
446
280
166
63
1
{-# LANGUAGE CPP #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE UndecidableInstances #-} ----------------------------------------------------------------------------- -- | -- Copyright : (C) 2013-15 Edward Kmett -- License : BSD-style (see the file LICENSE) -- Maintainer : Edward Kmett <ekmett@gmail.com> -- Stability : experimental -- Portability : non-portable -- -- Delpratt, Rahman and Raman's double numbering scheme for LOUDS ----------------------------------------------------------------------------- module Succinct.Tree.LOUDS ( -- * Basic rose tree Rose(..) -- * Conversion , rose , toRose , fromRose , louds -- * LOUDS zipper , Zipper(..) -- * Operations , root , index , top , parent , children , next ) where #if __GLASGOW_HASKELL__ < 710 import Control.Applicative #endif import Control.Comonad import Data.Proxy import Data.Word #if __GLASGOW_HASKELL__ < 710 import Data.Foldable import Data.Traversable #endif import Succinct.Dictionary.Class import Succinct.Dictionary.Rank9 import Succinct.Tree.Types (Rose(..)) import Succinct.Internal.Bit (PackedBits(Packed)) import qualified Data.Vector.Unboxed as U import qualified Data.Vector.Generic as G -- | Jacobson's 1-based LOUDS -- -- Visit every node in level order. Represent each node with @n@ children by @(1^n)0@. -- -- We add an extra @10@ \"superroot\" to the top of the tree to avoid corner cases. louds :: Rose -> [Bool] louds xs = True: False: go 0 where go n = case level n xs [] of [] -> [] ys -> ys ++ go (n+1) {-# INLINE louds #-} -- | Convert a finite 'Rose' to 'Rank9' fromRose :: (Bitwise [Bool] U.Vector) => Rose -> Rank9 (Packed U.Vector) fromRose = fromRose' (Proxy :: Proxy U.Vector) {-# INLINE fromRose #-} -- | Convert a finite 'Rose' to 'Rank9' fromRose' :: ( Bitwise [Bool] v, PackedBits v , G.Vector (Packed v) Word64, G.Vector v Word64) => Proxy v -> Rose -> Rank9 (Packed v) fromRose' p = rank9 p . louds {-# INLINE fromRose' #-} level :: Int -> Rose -> [Bool] -> [Bool] level 0 (Rose cs) xs = replicate (length cs) True ++ (False:xs) level n (Rose cs) xs = Prelude.foldr (level (n - 1)) xs cs -- | -- @Zipper i j t@ stores @i@ in @1..2n@, the position of a 1 in the LOUDS structure @t@ along with -- @j = rank0 t i@. -- -- All combinators in this module preserve these invariants. data Zipper t = Zipper {-# UNPACK #-} !Int {-# UNPACK #-} !Int t deriving (Eq,Show) instance Access a t => Access a (Zipper t) where size (Zipper _ _ t) = size t {-# INLINE size #-} (!) (Zipper _ _ t) i = t ! i {-# INLINE (!) #-} instance Dictionary a t => Dictionary a (Zipper t) where rank a (Zipper _ _ t) i = rank a t i {-# INLINE rank #-} select a (Zipper _ _ t) i = select a t i {-# INLINE select #-} instance Select0 t => Select0 (Zipper t) where select0 (Zipper _ _ t) i = select0 t i {-# INLINE select0 #-} instance Select1 t => Select1 (Zipper t) where select1 (Zipper _ _ t) i = select1 t i {-# INLINE select1 #-} instance Ranked t => Ranked (Zipper t) where rank0 (Zipper _ _ t) i = rank0 t i {-# INLINE rank0 #-} rank1 (Zipper _ _ t) i = rank1 t i {-# INLINE rank1 #-} instance Functor Zipper where fmap f (Zipper i j a) = Zipper i j (f a) {-# INLINE fmap #-} instance Foldable Zipper where foldMap f (Zipper _ _ a) = f a {-# INLINE foldMap #-} instance Traversable Zipper where traverse f (Zipper i j a) = Zipper i j <$> f a {-# INLINE traverse #-} instance Comonad Zipper where extract (Zipper _ _ a) = a {-# INLINE extract #-} duplicate w@(Zipper i j _) = Zipper i j w {-# INLINE duplicate #-} -- | The 'root' of our succinct tree. root :: t -> Zipper t root = Zipper 1 0 {-# INLINE root #-} -- | The index of this node in level order, starting at 1 for the root. index :: Zipper t -> Int index (Zipper i j _) = i - j {-# INLINE index #-} -- | Is this node the 'root'? top :: Zipper t -> Bool top (Zipper i _ _) = i == 1 {-# INLINE top #-} -- | The parent of any node @i /= root@, obtained by a legal sequence of operations. parent :: Select1 t => Zipper t -> Zipper t parent (Zipper _ j t) = Zipper i' (i' - j) t where i' = select1 t j {-# INLINE parent #-} -- | positions of all of the children of a node children :: Select0 t => Zipper t -> [Zipper t] children (Zipper i j t) = do let j' = i - j i' <- [select0 t j' + 1..select0 t (j' + 1) - 1] return $ Zipper i' j' t {-# INLINE children #-} -- | next sibling, if any next :: Access Bool t => Zipper t -> Maybe (Zipper t) next (Zipper i j t) | i' <- i + 1, t ! i' = Just $ Zipper i' j t | otherwise = Nothing {-# INLINE next #-} -- | Extract a given sub-tree rose :: Select0 t => Zipper t -> Rose rose i = Rose (rose <$> children i) {-# INLINE rose #-} -- | -- @ -- toRose . fromRose = id -- @ toRose :: Select0 t => t -> Rose toRose = rose . root {-# INLINE toRose #-}
ekmett/succinct
src/Succinct/Tree/LOUDS.hs
bsd-2-clause
5,018
0
13
1,121
1,473
787
686
117
2
----------------------------------------------------------------------------- -- | -- Module : System.Info -- Copyright : (c) The University of Glasgow 2001 -- License : BSD-style (see the file libraries/base/LICENSE) -- -- Maintainer : libraries@haskell.org -- Stability : experimental -- Portability : portable -- -- Information about the characteristics of the host -- system lucky enough to run your program. -- ----------------------------------------------------------------------------- module System.Info ( os, -- :: String arch, -- :: String compilerName, -- :: String compilerVersion -- :: Version ) where import Prelude import Data.Version -- | The version of 'compilerName' with which the program was compiled -- or is being interpreted. compilerVersion :: Version compilerVersion = Version {versionBranch=[maj,min], versionTags=[]} where (maj,min) = compilerVersionRaw `divMod` 100 -- | The operating system on which the program is running. os :: String -- | The machine architecture on which the program is running. arch :: String -- | The Haskell implementation with which the program was compiled -- or is being interpreted. compilerName :: String compilerVersionRaw :: Int #if defined(__NHC__) #include "OSInfo.hs" compilerName = "nhc98" compilerVersionRaw = __NHC__ #elif defined(__GLASGOW_HASKELL__) #include "ghcplatform.h" os = HOST_OS arch = HOST_ARCH compilerName = "ghc" compilerVersionRaw = __GLASGOW_HASKELL__ #elif defined(__HUGS__) #include "platform.h" os = HOST_OS arch = HOST_ARCH compilerName = "hugs" compilerVersionRaw = 0 -- ToDo #else #error Unknown compiler name #endif
FranklinChen/hugs98-plus-Sep2006
packages/base/System/Info.hs
bsd-3-clause
1,695
0
7
300
138
96
42
-1
-1
{-# LANGUAGE FlexibleInstances, FunctionalDependencies, MultiParamTypeClasses, TypeFamilies, UndecidableInstances #-} {-# OPTIONS -Wall #-} -- successful import Prelude hiding (Eq(..), not, (&&), (||)) import qualified Prelude (Eq(..), not, (&&), (||)) infix 4 ==, /= infixr 3 && infixr 2 || class Boolean b where true, false :: b not :: b -> b (&&), (||) :: b -> b -> b instance Boolean Bool where true = True false = False not = Prelude.not (&&) = (Prelude.&&) (||) = (Prelude.||) type family BoolFor a :: * class Eq a where (==) :: a -> a -> BoolFor a (/=) :: a -> a -> BoolFor a type instance BoolFor Double = Bool instance Eq Double where (==) = (Prelude.==) (/=) = (Prelude./=) newtype Hako a = Hako { unhako :: a } type instance BoolFor (Hako a) = Hako Bool instance (Eq a, BoolFor a ~ Bool) => Eq (Hako a) where (Hako a) == (Hako b) = Hako (a==b) (Hako a) /= (Hako b) = Hako (a==b) main :: IO () main = do putStrLn "hi" let ans :: Double ans = 42 print $ 6*7==ans print $ 5*8==ans
nushio3/Paraiso
attic/Ord/2.hs
bsd-3-clause
1,095
5
10
294
478
267
211
39
1
{- (c) The University of Glasgow 2006 (c) The GRASP/AQUA Project, Glasgow University, 1992-1998 The @TyCon@ datatype -} {-# LANGUAGE CPP, DeriveDataTypeable #-} module TyCon( -- * Main TyCon data types TyCon, FieldLabel, AlgTyConRhs(..), visibleDataCons, TyConParent(..), isNoParent, FamTyConFlav(..), Role(..), -- ** Constructing TyCons mkAlgTyCon, mkClassTyCon, mkFunTyCon, mkPrimTyCon, mkKindTyCon, mkLiftedPrimTyCon, mkTupleTyCon, mkSynonymTyCon, mkFamilyTyCon, mkPromotedDataCon, mkPromotedTyCon, -- ** Predicates on TyCons isAlgTyCon, isClassTyCon, isFamInstTyCon, isFunTyCon, isPrimTyCon, isTupleTyCon, isUnboxedTupleTyCon, isBoxedTupleTyCon, isTypeSynonymTyCon, isDecomposableTyCon, isPromotedDataCon, isPromotedTyCon, isPromotedDataCon_maybe, isPromotedTyCon_maybe, promotableTyCon_maybe, promoteTyCon, isDataTyCon, isProductTyCon, isDataProductTyCon_maybe, isEnumerationTyCon, isNewTyCon, isAbstractTyCon, isFamilyTyCon, isOpenFamilyTyCon, isTypeFamilyTyCon, isDataFamilyTyCon, isOpenTypeFamilyTyCon, isClosedSynFamilyTyConWithAxiom_maybe, isBuiltInSynFamTyCon_maybe, isUnLiftedTyCon, isGadtSyntaxTyCon, isDistinctTyCon, isDistinctAlgRhs, isTyConAssoc, tyConAssoc_maybe, isRecursiveTyCon, isImplicitTyCon, -- ** Extracting information out of TyCons tyConName, tyConKind, tyConUnique, tyConTyVars, tyConCType, tyConCType_maybe, tyConDataCons, tyConDataCons_maybe, tyConSingleDataCon_maybe, tyConSingleDataCon, tyConSingleAlgDataCon_maybe, tyConFamilySize, tyConStupidTheta, tyConArity, tyConRoles, tyConParent, tyConTuple_maybe, tyConClass_maybe, tyConFamInst_maybe, tyConFamInstSig_maybe, tyConFamilyCoercion_maybe, synTyConDefn_maybe, synTyConRhs_maybe, famTyConFlav_maybe, algTyConRhs, newTyConRhs, newTyConEtadArity, newTyConEtadRhs, unwrapNewTyCon_maybe, unwrapNewTyConEtad_maybe, -- ** Manipulating TyCons expandSynTyCon_maybe, makeTyConAbstract, newTyConCo, newTyConCo_maybe, pprPromotionQuote, -- * Primitive representations of Types PrimRep(..), PrimElemRep(..), tyConPrimRep, isVoidRep, isGcPtrRep, primRepSizeW, primElemRepSizeB, primRepIsFloat, -- * Recursion breaking RecTcChecker, initRecTc, checkRecTc ) where #include "HsVersions.h" import {-# SOURCE #-} TypeRep ( Kind, Type, PredType ) import {-# SOURCE #-} DataCon ( DataCon, dataConExTyVars ) import Var import Class import BasicTypes import DynFlags import ForeignCall import Name import NameSet import CoAxiom import PrelNames import Maybes import Outputable import Constants import Util import qualified Data.Data as Data import Data.Typeable (Typeable) {- ----------------------------------------------- Notes about type families ----------------------------------------------- Note [Type synonym families] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ * Type synonym families, also known as "type functions", map directly onto the type functions in FC: type family F a :: * type instance F Int = Bool ..etc... * Reply "yes" to isTypeFamilyTyCon, and isFamilyTyCon * From the user's point of view (F Int) and Bool are simply equivalent types. * A Haskell 98 type synonym is a degenerate form of a type synonym family. * Type functions can't appear in the LHS of a type function: type instance F (F Int) = ... -- BAD! * Translation of type family decl: type family F a :: * translates to a FamilyTyCon 'F', whose FamTyConFlav is OpenSynFamilyTyCon type family G a :: * where G Int = Bool G Bool = Char G a = () translates to a FamilyTyCon 'G', whose FamTyConFlav is ClosedSynFamilyTyCon, with the appropriate CoAxiom representing the equations * In the future we might want to support * injective type families (allow decomposition) but we don't at the moment [2013] Note [Data type families] ~~~~~~~~~~~~~~~~~~~~~~~~~ See also Note [Wrappers for data instance tycons] in MkId.hs * Data type families are declared thus data family T a :: * data instance T Int = T1 | T2 Bool Here T is the "family TyCon". * Reply "yes" to isDataFamilyTyCon, and isFamilyTyCon * The user does not see any "equivalent types" as he did with type synonym families. He just sees constructors with types T1 :: T Int T2 :: Bool -> T Int * Here's the FC version of the above declarations: data T a data R:TInt = T1 | T2 Bool axiom ax_ti : T Int ~ R:TInt The R:TInt is the "representation TyCons". It has an AlgTyConParent of FamInstTyCon T [Int] ax_ti * The axiom ax_ti may be eta-reduced; see Note [Eta reduction for data family axioms] in TcInstDcls * The data contructor T2 has a wrapper (which is what the source-level "T2" invokes): $WT2 :: Bool -> T Int $WT2 b = T2 b `cast` sym ax_ti * A data instance can declare a fully-fledged GADT: data instance T (a,b) where X1 :: T (Int,Bool) X2 :: a -> b -> T (a,b) Here's the FC version of the above declaration: data R:TPair a where X1 :: R:TPair Int Bool X2 :: a -> b -> R:TPair a b axiom ax_pr :: T (a,b) ~ R:TPair a b $WX1 :: forall a b. a -> b -> T (a,b) $WX1 a b (x::a) (y::b) = X2 a b x y `cast` sym (ax_pr a b) The R:TPair are the "representation TyCons". We have a bit of work to do, to unpick the result types of the data instance declaration for T (a,b), to get the result type in the representation; e.g. T (a,b) --> R:TPair a b The representation TyCon R:TList, has an AlgTyConParent of FamInstTyCon T [(a,b)] ax_pr * Notice that T is NOT translated to a FC type function; it just becomes a "data type" with no constructors, which can be coerced inot into R:TInt, R:TPair by the axioms. These axioms axioms come into play when (and *only* when) you - use a data constructor - do pattern matching Rather like newtype, in fact As a result - T behaves just like a data type so far as decomposition is concerned - (T Int) is not implicitly converted to R:TInt during type inference. Indeed the latter type is unknown to the programmer. - There *is* an instance for (T Int) in the type-family instance environment, but it is only used for overlap checking - It's fine to have T in the LHS of a type function: type instance F (T a) = [a] It was this last point that confused me! The big thing is that you should not think of a data family T as a *type function* at all, not even an injective one! We can't allow even injective type functions on the LHS of a type function: type family injective G a :: * type instance F (G Int) = Bool is no good, even if G is injective, because consider type instance G Int = Bool type instance F Bool = Char So a data type family is not an injective type function. It's just a data type with some axioms that connect it to other data types. Note [Associated families and their parent class] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ *Associated* families are just like *non-associated* families, except that they have a TyConParent of AssocFamilyTyCon, which identifies the parent class. However there is an important sharing relationship between * the tyConTyVars of the parent Class * the tyConTyvars of the associated TyCon class C a b where data T p a type F a q b Here the 'a' and 'b' are shared with the 'Class'; that is, they have the same Unique. This is important. In an instance declaration we expect * all the shared variables to be instantiated the same way * the non-shared variables of the associated type should not be instantiated at all instance C [x] (Tree y) where data T p [x] = T1 x | T2 p type F [x] q (Tree y) = (x,y,q) Note [TyCon Role signatures] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Every tycon has a role signature, assigning a role to each of the tyConTyVars (or of equal length to the tyConArity, if there are no tyConTyVars). An example demonstrates these best: say we have a tycon T, with parameters a at nominal, b at representational, and c at phantom. Then, to prove representational equality between T a1 b1 c1 and T a2 b2 c2, we need to have nominal equality between a1 and a2, representational equality between b1 and b2, and nothing in particular (i.e., phantom equality) between c1 and c2. This might happen, say, with the following declaration: data T a b c where MkT :: b -> T Int b c Data and class tycons have their roles inferred (see inferRoles in TcTyDecls), as do vanilla synonym tycons. Family tycons have all parameters at role N, though it is conceivable that we could relax this restriction. (->)'s and tuples' parameters are at role R. Each primitive tycon declares its roles; it's worth noting that (~#)'s parameters are at role N. Promoted data constructors' type arguments are at role R. All kind arguments are at role N. ************************************************************************ * * \subsection{The data type} * * ************************************************************************ -} -- | TyCons represent type constructors. Type constructors are introduced by -- things such as: -- -- 1) Data declarations: @data Foo = ...@ creates the @Foo@ type constructor of -- kind @*@ -- -- 2) Type synonyms: @type Foo = ...@ creates the @Foo@ type constructor -- -- 3) Newtypes: @newtype Foo a = MkFoo ...@ creates the @Foo@ type constructor -- of kind @* -> *@ -- -- 4) Class declarations: @class Foo where@ creates the @Foo@ type constructor -- of kind @*@ -- -- This data type also encodes a number of primitive, built in type constructors -- such as those for function and tuple types. -- If you edit this type, you may need to update the GHC formalism -- See Note [GHC Formalism] in coreSyn/CoreLint.hs data TyCon = -- | The function type constructor, @(->)@ FunTyCon { tyConUnique :: Unique, -- ^ A Unique of this TyCon. Invariant: -- identical to Unique of Name stored in -- tyConName field. tyConName :: Name, -- ^ Name of the constructor tyConKind :: Kind, -- ^ Kind of this TyCon (full kind, not just -- the return kind) tyConArity :: Arity -- ^ Number of arguments this TyCon must -- receive to be considered saturated -- (including implicit kind variables) } -- | Algebraic type constructors, which are defined to be those -- arising @data@ type and @newtype@ declarations. All these -- constructors are lifted and boxed. See 'AlgTyConRhs' for more -- information. | AlgTyCon { tyConUnique :: Unique, -- ^ A Unique of this TyCon. Invariant: -- identical to Unique of Name stored in -- tyConName field. tyConName :: Name, -- ^ Name of the constructor tyConKind :: Kind, -- ^ Kind of this TyCon (full kind, not just -- the return kind) tyConArity :: Arity, -- ^ Number of arguments this TyCon must -- receive to be considered saturated -- (including implicit kind variables) tyConTyVars :: [TyVar], -- ^ The kind and type variables used in the -- type constructor. -- Invariant: length tyvars = arity -- Precisely, this list scopes over: -- -- 1. The 'algTcStupidTheta' -- 2. The cached types in algTyConRhs.NewTyCon -- 3. The family instance types if present -- -- Note that it does /not/ scope over the data -- constructors. tcRoles :: [Role], -- ^ The role for each type variable -- This list has the same length as tyConTyVars -- See also Note [TyCon Role signatures] tyConCType :: Maybe CType,-- ^ The C type that should be used -- for this type when using the FFI -- and CAPI algTcGadtSyntax :: Bool, -- ^ Was the data type declared with GADT -- syntax? If so, that doesn't mean it's a -- true GADT; only that the "where" form -- was used. This field is used only to -- guide pretty-printing algTcStupidTheta :: [PredType], -- ^ The \"stupid theta\" for the data -- type (always empty for GADTs). A -- \"stupid theta\" is the context to -- the left of an algebraic type -- declaration, e.g. @Eq a@ in the -- declaration @data Eq a => T a ...@. algTcRhs :: AlgTyConRhs, -- ^ Contains information about the -- data constructors of the algebraic type algTcRec :: RecFlag, -- ^ Tells us whether the data type is part -- of a mutually-recursive group or not algTcParent :: TyConParent, -- ^ Gives the class or family declaration -- 'TyCon' for derived 'TyCon's representing -- class or family instances, respectively. -- See also 'synTcParent' tcPromoted :: Maybe TyCon -- ^ Promoted TyCon, if any } -- | Represents type synonyms | SynonymTyCon { tyConUnique :: Unique, -- ^ A Unique of this TyCon. Invariant: -- identical to Unique of Name stored in -- tyConName field. tyConName :: Name, -- ^ Name of the constructor tyConKind :: Kind, -- ^ Kind of this TyCon (full kind, not just -- the return kind) tyConArity :: Arity, -- ^ Number of arguments this TyCon must -- receive to be considered saturated -- (including implicit kind variables) tyConTyVars :: [TyVar], -- ^ List of type and kind variables in this -- TyCon. Includes implicit kind variables. -- Invariant: length tyConTyVars = tyConArity tcRoles :: [Role], -- ^ The role for each type variable -- This list has the same length as tyConTyVars -- See also Note [TyCon Role signatures] synTcRhs :: Type -- ^ Contains information about the expansion -- of the synonym } -- | Represents type families | FamilyTyCon { tyConUnique :: Unique, -- ^ A Unique of this TyCon. Invariant: -- identical to Unique of Name stored in -- tyConName field. tyConName :: Name, -- ^ Name of the constructor tyConKind :: Kind, -- ^ Kind of this TyCon (full kind, not just -- the return kind) tyConArity :: Arity, -- ^ Number of arguments this TyCon must -- receive to be considered saturated -- (including implicit kind variables) tyConTyVars :: [TyVar], -- ^ The kind and type variables used in the -- type constructor. -- Invariant: length tyvars = arity -- Precisely, this list scopes over: -- -- 1. The 'algTcStupidTheta' -- 2. The cached types in 'algTyConRhs.NewTyCon' -- 3. The family instance types if present -- -- Note that it does /not/ scope over the data -- constructors. famTcFlav :: FamTyConFlav, -- ^ Type family flavour: open, closed, -- abstract, built-in. See comments for -- FamTyConFlav famTcParent :: TyConParent -- ^ TyCon of enclosing class for -- associated type families } -- | Primitive types; cannot be defined in Haskell. This includes -- the usual suspects (such as @Int#@) as well as foreign-imported -- types and kinds | PrimTyCon { tyConUnique :: Unique, -- ^ A Unique of this TyCon. Invariant: -- identical to Unique of Name stored in -- tyConName field. tyConName :: Name, -- ^ Name of the constructor tyConKind :: Kind, -- ^ Kind of this TyCon (full kind, not just -- the return kind) tyConArity :: Arity, -- ^ Number of arguments this TyCon must -- receive to be considered saturated -- (including implicit kind variables) tcRoles :: [Role], -- ^ The role for each type variable -- This list has the same length as tyConTyVars -- See also Note [TyCon Role signatures] primTyConRep :: PrimRep,-- ^ Many primitive tycons are unboxed, but -- some are boxed (represented by -- pointers). This 'PrimRep' holds that -- information. Only relevant if tyConKind = * isUnLifted :: Bool -- ^ Most primitive tycons are unlifted (may -- not contain bottom) but other are lifted, -- e.g. @RealWorld@ } -- | Represents promoted data constructor. | PromotedDataCon { -- See Note [Promoted data constructors] tyConUnique :: Unique, -- ^ Same Unique as the data constructor tyConName :: Name, -- ^ Same Name as the data constructor tyConArity :: Arity, tyConKind :: Kind, -- ^ Translated type of the data constructor tcRoles :: [Role], -- ^ Roles: N for kind vars, R for type vars dataCon :: DataCon -- ^ Corresponding data constructor } -- | Represents promoted type constructor. | PromotedTyCon { tyConUnique :: Unique, -- ^ Same Unique as the type constructor tyConName :: Name, -- ^ Same Name as the type constructor tyConArity :: Arity, -- ^ n if ty_con :: * -> ... -> * n times tyConKind :: Kind, -- ^ Always TysPrim.superKind ty_con :: TyCon -- ^ Corresponding type constructor } deriving Typeable -- | Names of the fields in an algebraic record type type FieldLabel = Name -- | Represents right-hand-sides of 'TyCon's for algebraic types data AlgTyConRhs -- | Says that we know nothing about this data type, except that -- it's represented by a pointer. Used when we export a data type -- abstractly into an .hi file. = AbstractTyCon Bool -- True <=> It's definitely a distinct data type, -- equal only to itself; ie not a newtype -- False <=> Not sure -- See Note [AbstractTyCon and type equality] -- | Represents an open type family without a fixed right hand -- side. Additional instances can appear at any time. -- -- These are introduced by either a top level declaration: -- -- > data T a :: * -- -- Or an associated data type declaration, within a class declaration: -- -- > class C a b where -- > data T b :: * | DataFamilyTyCon -- | Information about those 'TyCon's derived from a @data@ -- declaration. This includes data types with no constructors at -- all. | DataTyCon { data_cons :: [DataCon], -- ^ The data type constructors; can be empty if the -- user declares the type to have no constructors -- -- INVARIANT: Kept in order of increasing 'DataCon' -- tag (see the tag assignment in DataCon.mkDataCon) is_enum :: Bool -- ^ Cached value: is this an enumeration type? -- See Note [Enumeration types] } | TupleTyCon { -- A boxed, unboxed, or constraint tuple data_con :: DataCon, -- NB: it can be an *unboxed* tuple tup_sort :: TupleSort -- ^ Is this a boxed, unboxed or constraint -- tuple? } -- | Information about those 'TyCon's derived from a @newtype@ declaration | NewTyCon { data_con :: DataCon, -- ^ The unique constructor for the @newtype@. -- It has no existentials nt_rhs :: Type, -- ^ Cached value: the argument type of the -- constructor, which is just the representation -- type of the 'TyCon' (remember that @newtype@s -- do not exist at runtime so need a different -- representation type). -- -- The free 'TyVar's of this type are the -- 'tyConTyVars' from the corresponding 'TyCon' nt_etad_rhs :: ([TyVar], Type), -- ^ Same as the 'nt_rhs', but this time eta-reduced. -- Hence the list of 'TyVar's in this field may be -- shorter than the declared arity of the 'TyCon'. -- See Note [Newtype eta] nt_co :: CoAxiom Unbranched -- The axiom coercion that creates the @newtype@ -- from the representation 'Type'. -- See Note [Newtype coercions] -- Invariant: arity = #tvs in nt_etad_rhs; -- See Note [Newtype eta] -- Watch out! If any newtypes become transparent -- again check Trac #1072. } {- Note [AbstractTyCon and type equality] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ TODO -} -- | Extract those 'DataCon's that we are able to learn about. Note -- that visibility in this sense does not correspond to visibility in -- the context of any particular user program! visibleDataCons :: AlgTyConRhs -> [DataCon] visibleDataCons (AbstractTyCon {}) = [] visibleDataCons DataFamilyTyCon {} = [] visibleDataCons (DataTyCon{ data_cons = cs }) = cs visibleDataCons (NewTyCon{ data_con = c }) = [c] visibleDataCons (TupleTyCon{ data_con = c }) = [c] -- ^ Both type classes as well as family instances imply implicit -- type constructors. These implicit type constructors refer to their parent -- structure (ie, the class or family from which they derive) using a type of -- the following form. We use 'TyConParent' for both algebraic and synonym -- types, but the variant 'ClassTyCon' will only be used by algebraic 'TyCon's. data TyConParent = -- | An ordinary type constructor has no parent. NoParentTyCon -- | Type constructors representing a class dictionary. -- See Note [ATyCon for classes] in TypeRep | ClassTyCon Class -- INVARIANT: the classTyCon of this Class is the -- current tycon -- | An *associated* type of a class. | AssocFamilyTyCon Class -- The class in whose declaration the family is declared -- See Note [Associated families and their parent class] -- | Type constructors representing an instance of a *data* family. -- Parameters: -- -- 1) The type family in question -- -- 2) Instance types; free variables are the 'tyConTyVars' -- of the current 'TyCon' (not the family one). INVARIANT: -- the number of types matches the arity of the family 'TyCon' -- -- 3) A 'CoTyCon' identifying the representation -- type with the type instance family | FamInstTyCon -- See Note [Data type families] (CoAxiom Unbranched) -- The coercion axiom. -- Generally of kind T ty1 ty2 ~ R:T a b c -- where T is the family TyCon, -- and R:T is the representation TyCon (ie this one) -- and a,b,c are the tyConTyVars of this TyCon -- -- BUT may be eta-reduced; see TcInstDcls -- Note [Eta reduction for data family axioms] -- Cached fields of the CoAxiom, but adjusted to -- use the tyConTyVars of this TyCon TyCon -- The family TyCon [Type] -- Argument types (mentions the tyConTyVars of this TyCon) -- Match in length the tyConTyVars of the family TyCon -- E.g. data intance T [a] = ... -- gives a representation tycon: -- data R:TList a = ... -- axiom co a :: T [a] ~ R:TList a -- with R:TList's algTcParent = FamInstTyCon T [a] co instance Outputable TyConParent where ppr NoParentTyCon = text "No parent" ppr (ClassTyCon cls) = text "Class parent" <+> ppr cls ppr (AssocFamilyTyCon cls) = text "Class parent (assoc. family)" <+> ppr cls ppr (FamInstTyCon _ tc tys) = text "Family parent (family instance)" <+> ppr tc <+> sep (map ppr tys) -- | Checks the invariants of a 'TyConParent' given the appropriate type class -- name, if any okParent :: Name -> TyConParent -> Bool okParent _ NoParentTyCon = True okParent tc_name (AssocFamilyTyCon cls) = tc_name `elem` map tyConName (classATs cls) okParent tc_name (ClassTyCon cls) = tc_name == tyConName (classTyCon cls) okParent _ (FamInstTyCon _ fam_tc tys) = tyConArity fam_tc == length tys isNoParent :: TyConParent -> Bool isNoParent NoParentTyCon = True isNoParent _ = False -------------------- -- | Information pertaining to the expansion of a type synonym (@type@) data FamTyConFlav = -- | An open type synonym family e.g. @type family F x y :: * -> *@ OpenSynFamilyTyCon -- | A closed type synonym family e.g. -- @type family F x where { F Int = Bool }@ | ClosedSynFamilyTyCon (Maybe (CoAxiom Branched)) -- See Note [Closed type families] -- | A closed type synonym family declared in an hs-boot file with -- type family F a where .. | AbstractClosedSynFamilyTyCon -- | Built-in type family used by the TypeNats solver | BuiltInSynFamTyCon BuiltInSynFamily {- Note [Closed type families] ~~~~~~~~~~~~~~~~~~~~~~~~~ * In an open type family you can add new instances later. This is the usual case. * In a closed type family you can only put equations where the family is defined. A non-empty closed type family has a single axiom with multiple branches, stored in the 'ClosedSynFamilyTyCon' constructor. A closed type family with no equations does not have an axiom, because there is nothing for the axiom to prove! Note [Promoted data constructors] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ A data constructor can be promoted to become a type constructor, via the PromotedTyCon alternative in TyCon. * Only data constructors with (a) no kind polymorphism (b) no constraints in its type (eg GADTs) are promoted. Existentials are ok; see Trac #7347. * The TyCon promoted from a DataCon has the *same* Name and Unique as the DataCon. Eg. If the data constructor Data.Maybe.Just(unique 78, say) is promoted to a TyCon whose name is Data.Maybe.Just(unique 78) * The *kind* of a promoted DataCon may be polymorphic. Example: type of DataCon Just :: forall (a:*). a -> Maybe a kind of (promoted) tycon Just :: forall (a:box). a -> Maybe a The kind is not identical to the type, because of the */box kind signature on the forall'd variable; so the tyConKind field of PromotedTyCon is not identical to the dataConUserType of the DataCon. But it's the same modulo changing the variable kinds, done by DataCon.promoteType. * Small note: We promote the *user* type of the DataCon. Eg data T = MkT {-# UNPACK #-} !(Bool, Bool) The promoted kind is MkT :: (Bool,Bool) -> T *not* MkT :: Bool -> Bool -> T Note [Enumeration types] ~~~~~~~~~~~~~~~~~~~~~~~~ We define datatypes with no constructors to *not* be enumerations; this fixes trac #2578, Otherwise we end up generating an empty table for <mod>_<type>_closure_tbl which is used by tagToEnum# to map Int# to constructors in an enumeration. The empty table apparently upset the linker. Moreover, all the data constructor must be enumerations, meaning they have type (forall abc. T a b c). GADTs are not enumerations. For example consider data T a where T1 :: T Int T2 :: T Bool T3 :: T a What would [T1 ..] be? [T1,T3] :: T Int? Easiest thing is to exclude them. See Trac #4528. Note [Newtype coercions] ~~~~~~~~~~~~~~~~~~~~~~~~ The NewTyCon field nt_co is a CoAxiom which is used for coercing from the representation type of the newtype, to the newtype itself. For example, newtype T a = MkT (a -> a) the NewTyCon for T will contain nt_co = CoT where CoT t : T t ~ t -> t. In the case that the right hand side is a type application ending with the same type variables as the left hand side, we "eta-contract" the coercion. So if we had newtype S a = MkT [a] then we would generate the arity 0 axiom CoS : S ~ []. The primary reason we do this is to make newtype deriving cleaner. In the paper we'd write axiom CoT : (forall t. T t) ~ (forall t. [t]) and then when we used CoT at a particular type, s, we'd say CoT @ s which encodes as (TyConApp instCoercionTyCon [TyConApp CoT [], s]) Note [Newtype eta] ~~~~~~~~~~~~~~~~~~ Consider newtype Parser a = MkParser (IO a) deriving Monad Are these two types equal (to Core)? Monad Parser Monad IO which we need to make the derived instance for Monad Parser. Well, yes. But to see that easily we eta-reduce the RHS type of Parser, in this case to ([], Froogle), so that even unsaturated applications of Parser will work right. This eta reduction is done when the type constructor is built, and cached in NewTyCon. Here's an example that I think showed up in practice Source code: newtype T a = MkT [a] newtype Foo m = MkFoo (forall a. m a -> Int) w1 :: Foo [] w1 = ... w2 :: Foo T w2 = MkFoo (\(MkT x) -> case w1 of MkFoo f -> f x) After desugaring, and discarding the data constructors for the newtypes, we get: w2 = w1 `cast` Foo CoT so the coercion tycon CoT must have kind: T ~ [] and arity: 0 ************************************************************************ * * \subsection{PrimRep} * * ************************************************************************ Note [rep swamp] GHC has a rich selection of types that represent "primitive types" of one kind or another. Each of them makes a different set of distinctions, and mostly the differences are for good reasons, although it's probably true that we could merge some of these. Roughly in order of "includes more information": - A Width (cmm/CmmType) is simply a binary value with the specified number of bits. It may represent a signed or unsigned integer, a floating-point value, or an address. data Width = W8 | W16 | W32 | W64 | W80 | W128 - Size, which is used in the native code generator, is Width + floating point information. data Size = II8 | II16 | II32 | II64 | FF32 | FF64 | FF80 it is necessary because e.g. the instruction to move a 64-bit float on x86 (movsd) is different from the instruction to move a 64-bit integer (movq), so the mov instruction is parameterised by Size. - CmmType wraps Width with more information: GC ptr, float, or other value. data CmmType = CmmType CmmCat Width data CmmCat -- "Category" (not exported) = GcPtrCat -- GC pointer | BitsCat -- Non-pointer | FloatCat -- Float It is important to have GcPtr information in Cmm, since we generate info tables containing pointerhood for the GC from this. As for why we have float (and not signed/unsigned) here, see Note [Signed vs unsigned]. - ArgRep makes only the distinctions necessary for the call and return conventions of the STG machine. It is essentially CmmType + void. - PrimRep makes a few more distinctions than ArgRep: it divides non-GC-pointers into signed/unsigned and addresses, information that is necessary for passing these values to foreign functions. There's another tension here: whether the type encodes its size in bytes, or whether its size depends on the machine word size. Width and CmmType have the size built-in, whereas ArgRep and PrimRep do not. This means to turn an ArgRep/PrimRep into a CmmType requires DynFlags. On the other hand, CmmType includes some "nonsense" values, such as CmmType GcPtrCat W32 on a 64-bit machine. -} -- | A 'PrimRep' is an abstraction of a type. It contains information that -- the code generator needs in order to pass arguments, return results, -- and store values of this type. data PrimRep = VoidRep | PtrRep | IntRep -- ^ Signed, word-sized value | WordRep -- ^ Unsigned, word-sized value | Int64Rep -- ^ Signed, 64 bit value (with 32-bit words only) | Word64Rep -- ^ Unsigned, 64 bit value (with 32-bit words only) | AddrRep -- ^ A pointer, but /not/ to a Haskell value (use 'PtrRep') | FloatRep | DoubleRep | VecRep Int PrimElemRep -- ^ A vector deriving( Eq, Show ) data PrimElemRep = Int8ElemRep | Int16ElemRep | Int32ElemRep | Int64ElemRep | Word8ElemRep | Word16ElemRep | Word32ElemRep | Word64ElemRep | FloatElemRep | DoubleElemRep deriving( Eq, Show ) instance Outputable PrimRep where ppr r = text (show r) instance Outputable PrimElemRep where ppr r = text (show r) isVoidRep :: PrimRep -> Bool isVoidRep VoidRep = True isVoidRep _other = False isGcPtrRep :: PrimRep -> Bool isGcPtrRep PtrRep = True isGcPtrRep _ = False -- | Find the size of a 'PrimRep', in words primRepSizeW :: DynFlags -> PrimRep -> Int primRepSizeW _ IntRep = 1 primRepSizeW _ WordRep = 1 primRepSizeW dflags Int64Rep = wORD64_SIZE `quot` wORD_SIZE dflags primRepSizeW dflags Word64Rep = wORD64_SIZE `quot` wORD_SIZE dflags primRepSizeW _ FloatRep = 1 -- NB. might not take a full word primRepSizeW dflags DoubleRep = dOUBLE_SIZE dflags `quot` wORD_SIZE dflags primRepSizeW _ AddrRep = 1 primRepSizeW _ PtrRep = 1 primRepSizeW _ VoidRep = 0 primRepSizeW dflags (VecRep len rep) = len * primElemRepSizeB rep `quot` wORD_SIZE dflags primElemRepSizeB :: PrimElemRep -> Int primElemRepSizeB Int8ElemRep = 1 primElemRepSizeB Int16ElemRep = 2 primElemRepSizeB Int32ElemRep = 4 primElemRepSizeB Int64ElemRep = 8 primElemRepSizeB Word8ElemRep = 1 primElemRepSizeB Word16ElemRep = 2 primElemRepSizeB Word32ElemRep = 4 primElemRepSizeB Word64ElemRep = 8 primElemRepSizeB FloatElemRep = 4 primElemRepSizeB DoubleElemRep = 8 -- | Return if Rep stands for floating type, -- returns Nothing for vector types. primRepIsFloat :: PrimRep -> Maybe Bool primRepIsFloat FloatRep = Just True primRepIsFloat DoubleRep = Just True primRepIsFloat (VecRep _ _) = Nothing primRepIsFloat _ = Just False {- ************************************************************************ * * \subsection{TyCon Construction} * * ************************************************************************ Note: the TyCon constructors all take a Kind as one argument, even though they could, in principle, work out their Kind from their other arguments. But to do so they need functions from Types, and that makes a nasty module mutual-recursion. And they aren't called from many places. So we compromise, and move their Kind calculation to the call site. -} -- | Given the name of the function type constructor and it's kind, create the -- corresponding 'TyCon'. It is reccomended to use 'TypeRep.funTyCon' if you want -- this functionality mkFunTyCon :: Name -> Kind -> TyCon mkFunTyCon name kind = FunTyCon { tyConUnique = nameUnique name, tyConName = name, tyConKind = kind, tyConArity = 2 } -- | This is the making of an algebraic 'TyCon'. Notably, you have to -- pass in the generic (in the -XGenerics sense) information about the -- type constructor - you can get hold of it easily (see Generics -- module) mkAlgTyCon :: Name -> Kind -- ^ Kind of the resulting 'TyCon' -> [TyVar] -- ^ 'TyVar's scoped over: see 'tyConTyVars'. -- Arity is inferred from the length of this -- list -> [Role] -- ^ The roles for each TyVar -> Maybe CType -- ^ The C type this type corresponds to -- when using the CAPI FFI -> [PredType] -- ^ Stupid theta: see 'algTcStupidTheta' -> AlgTyConRhs -- ^ Information about dat aconstructors -> TyConParent -> RecFlag -- ^ Is the 'TyCon' recursive? -> Bool -- ^ Was the 'TyCon' declared with GADT syntax? -> Maybe TyCon -- ^ Promoted version -> TyCon mkAlgTyCon name kind tyvars roles cType stupid rhs parent is_rec gadt_syn prom_tc = AlgTyCon { tyConName = name, tyConUnique = nameUnique name, tyConKind = kind, tyConArity = length tyvars, tyConTyVars = tyvars, tcRoles = roles, tyConCType = cType, algTcStupidTheta = stupid, algTcRhs = rhs, algTcParent = ASSERT2( okParent name parent, ppr name $$ ppr parent ) parent, algTcRec = is_rec, algTcGadtSyntax = gadt_syn, tcPromoted = prom_tc } -- | Simpler specialization of 'mkAlgTyCon' for classes mkClassTyCon :: Name -> Kind -> [TyVar] -> [Role] -> AlgTyConRhs -> Class -> RecFlag -> TyCon mkClassTyCon name kind tyvars roles rhs clas is_rec = mkAlgTyCon name kind tyvars roles Nothing [] rhs (ClassTyCon clas) is_rec False Nothing -- Class TyCons are not promoted mkTupleTyCon :: Name -> Kind -- ^ Kind of the resulting 'TyCon' -> Arity -- ^ Arity of the tuple -> [TyVar] -- ^ 'TyVar's scoped over: see 'tyConTyVars' -> DataCon -> TupleSort -- ^ Whether the tuple is boxed or unboxed -> Maybe TyCon -- ^ Promoted version -> TyConParent -> TyCon mkTupleTyCon name kind arity tyvars con sort prom_tc parent = AlgTyCon { tyConName = name, tyConUnique = nameUnique name, tyConKind = kind, tyConArity = arity, tyConTyVars = tyvars, tcRoles = replicate arity Representational, tyConCType = Nothing, algTcStupidTheta = [], algTcRhs = TupleTyCon { data_con = con, tup_sort = sort }, algTcParent = parent, algTcRec = NonRecursive, algTcGadtSyntax = False, tcPromoted = prom_tc } -- | Create an unlifted primitive 'TyCon', such as @Int#@ mkPrimTyCon :: Name -> Kind -> [Role] -> PrimRep -> TyCon mkPrimTyCon name kind roles rep = mkPrimTyCon' name kind roles rep True -- | Kind constructors mkKindTyCon :: Name -> Kind -> TyCon mkKindTyCon name kind = mkPrimTyCon' name kind [] VoidRep True -- | Create a lifted primitive 'TyCon' such as @RealWorld@ mkLiftedPrimTyCon :: Name -> Kind -> [Role] -> PrimRep -> TyCon mkLiftedPrimTyCon name kind roles rep = mkPrimTyCon' name kind roles rep False mkPrimTyCon' :: Name -> Kind -> [Role] -> PrimRep -> Bool -> TyCon mkPrimTyCon' name kind roles rep is_unlifted = PrimTyCon { tyConName = name, tyConUnique = nameUnique name, tyConKind = kind, tyConArity = length roles, tcRoles = roles, primTyConRep = rep, isUnLifted = is_unlifted } -- | Create a type synonym 'TyCon' mkSynonymTyCon :: Name -> Kind -> [TyVar] -> [Role] -> Type -> TyCon mkSynonymTyCon name kind tyvars roles rhs = SynonymTyCon { tyConName = name, tyConUnique = nameUnique name, tyConKind = kind, tyConArity = length tyvars, tyConTyVars = tyvars, tcRoles = roles, synTcRhs = rhs } -- | Create a type family 'TyCon' mkFamilyTyCon:: Name -> Kind -> [TyVar] -> FamTyConFlav -> TyConParent -> TyCon mkFamilyTyCon name kind tyvars flav parent = FamilyTyCon { tyConUnique = nameUnique name , tyConName = name , tyConKind = kind , tyConArity = length tyvars , tyConTyVars = tyvars , famTcFlav = flav , famTcParent = parent } -- | Create a promoted data constructor 'TyCon' -- Somewhat dodgily, we give it the same Name -- as the data constructor itself; when we pretty-print -- the TyCon we add a quote; see the Outputable TyCon instance mkPromotedDataCon :: DataCon -> Name -> Unique -> Kind -> [Role] -> TyCon mkPromotedDataCon con name unique kind roles = PromotedDataCon { tyConName = name, tyConUnique = unique, tyConArity = arity, tcRoles = roles, tyConKind = kind, dataCon = con } where arity = length roles -- | Create a promoted type constructor 'TyCon' -- Somewhat dodgily, we give it the same Name -- as the type constructor itself mkPromotedTyCon :: TyCon -> Kind -> TyCon mkPromotedTyCon tc kind = PromotedTyCon { tyConName = getName tc, tyConUnique = getUnique tc, tyConArity = tyConArity tc, tyConKind = kind, ty_con = tc } isFunTyCon :: TyCon -> Bool isFunTyCon (FunTyCon {}) = True isFunTyCon _ = False -- | Test if the 'TyCon' is algebraic but abstract (invisible data constructors) isAbstractTyCon :: TyCon -> Bool isAbstractTyCon (AlgTyCon { algTcRhs = AbstractTyCon {} }) = True isAbstractTyCon _ = False -- | Make an algebraic 'TyCon' abstract. Panics if the supplied 'TyCon' is not -- algebraic makeTyConAbstract :: TyCon -> TyCon makeTyConAbstract tc@(AlgTyCon { algTcRhs = rhs }) = tc { algTcRhs = AbstractTyCon (isDistinctAlgRhs rhs) } makeTyConAbstract tc = pprPanic "makeTyConAbstract" (ppr tc) -- | Does this 'TyCon' represent something that cannot be defined in Haskell? isPrimTyCon :: TyCon -> Bool isPrimTyCon (PrimTyCon {}) = True isPrimTyCon _ = False -- | Is this 'TyCon' unlifted (i.e. cannot contain bottom)? Note that this can -- only be true for primitive and unboxed-tuple 'TyCon's isUnLiftedTyCon :: TyCon -> Bool isUnLiftedTyCon (PrimTyCon {isUnLifted = is_unlifted}) = is_unlifted isUnLiftedTyCon (AlgTyCon { algTcRhs = rhs } ) | TupleTyCon { tup_sort = sort } <- rhs = not (isBoxed (tupleSortBoxity sort)) isUnLiftedTyCon _ = False -- | Returns @True@ if the supplied 'TyCon' resulted from either a -- @data@ or @newtype@ declaration isAlgTyCon :: TyCon -> Bool isAlgTyCon (AlgTyCon {}) = True isAlgTyCon _ = False isDataTyCon :: TyCon -> Bool -- ^ Returns @True@ for data types that are /definitely/ represented by -- heap-allocated constructors. These are scrutinised by Core-level -- @case@ expressions, and they get info tables allocated for them. -- -- Generally, the function will be true for all @data@ types and false -- for @newtype@s, unboxed tuples and type family 'TyCon's. But it is -- not guaranteed to return @True@ in all cases that it could. -- -- NB: for a data type family, only the /instance/ 'TyCon's -- get an info table. The family declaration 'TyCon' does not isDataTyCon (AlgTyCon {algTcRhs = rhs}) = case rhs of TupleTyCon { tup_sort = sort } -> isBoxed (tupleSortBoxity sort) DataTyCon {} -> True NewTyCon {} -> False DataFamilyTyCon {} -> False AbstractTyCon {} -> False -- We don't know, so return False isDataTyCon _ = False -- | 'isDistinctTyCon' is true of 'TyCon's that are equal only to -- themselves, even via coercions (except for unsafeCoerce). -- This excludes newtypes, type functions, type synonyms. -- It relates directly to the FC consistency story: -- If the axioms are consistent, -- and co : S tys ~ T tys, and S,T are "distinct" TyCons, -- then S=T. -- Cf Note [Pruning dead case alternatives] in Unify isDistinctTyCon :: TyCon -> Bool isDistinctTyCon (AlgTyCon {algTcRhs = rhs}) = isDistinctAlgRhs rhs isDistinctTyCon (FunTyCon {}) = True isDistinctTyCon (PrimTyCon {}) = True isDistinctTyCon (PromotedDataCon {}) = True isDistinctTyCon _ = False isDistinctAlgRhs :: AlgTyConRhs -> Bool isDistinctAlgRhs (TupleTyCon {}) = True isDistinctAlgRhs (DataTyCon {}) = True isDistinctAlgRhs (DataFamilyTyCon {}) = True isDistinctAlgRhs (AbstractTyCon distinct) = distinct isDistinctAlgRhs (NewTyCon {}) = False -- | Is this 'TyCon' that for a @newtype@ isNewTyCon :: TyCon -> Bool isNewTyCon (AlgTyCon {algTcRhs = NewTyCon {}}) = True isNewTyCon _ = False -- | Take a 'TyCon' apart into the 'TyVar's it scopes over, the 'Type' it expands -- into, and (possibly) a coercion from the representation type to the @newtype@. -- Returns @Nothing@ if this is not possible. unwrapNewTyCon_maybe :: TyCon -> Maybe ([TyVar], Type, CoAxiom Unbranched) unwrapNewTyCon_maybe (AlgTyCon { tyConTyVars = tvs, algTcRhs = NewTyCon { nt_co = co, nt_rhs = rhs }}) = Just (tvs, rhs, co) unwrapNewTyCon_maybe _ = Nothing unwrapNewTyConEtad_maybe :: TyCon -> Maybe ([TyVar], Type, CoAxiom Unbranched) unwrapNewTyConEtad_maybe (AlgTyCon { algTcRhs = NewTyCon { nt_co = co, nt_etad_rhs = (tvs,rhs) }}) = Just (tvs, rhs, co) unwrapNewTyConEtad_maybe _ = Nothing isProductTyCon :: TyCon -> Bool -- True of datatypes or newtypes that have -- one, non-existential, data constructor -- See Note [Product types] isProductTyCon tc@(AlgTyCon {}) = case algTcRhs tc of TupleTyCon {} -> True DataTyCon{ data_cons = [data_con] } -> null (dataConExTyVars data_con) NewTyCon {} -> True _ -> False isProductTyCon _ = False isDataProductTyCon_maybe :: TyCon -> Maybe DataCon -- True of datatypes (not newtypes) with -- one, vanilla, data constructor -- See Note [Product types] isDataProductTyCon_maybe (AlgTyCon { algTcRhs = rhs }) = case rhs of DataTyCon { data_cons = [con] } | null (dataConExTyVars con) -- non-existential -> Just con TupleTyCon { data_con = con } -> Just con _ -> Nothing isDataProductTyCon_maybe _ = Nothing {- Note [Product types] ~~~~~~~~~~~~~~~~~~~~~~~ A product type is * A data type (not a newtype) * With one, boxed data constructor * That binds no existential type variables The main point is that product types are amenable to unboxing for * Strict function calls; we can transform f (D a b) = e to fw a b = e via the worker/wrapper transformation. (Question: couldn't this work for existentials too?) * CPR for function results; we can transform f x y = let ... in D a b to fw x y = let ... in (# a, b #) Note that the data constructor /can/ have evidence arguments: equality constraints, type classes etc. So it can be GADT. These evidence arguments are simply value arguments, and should not get in the way. -} -- | Is this a 'TyCon' representing a regular H98 type synonym (@type@)? isTypeSynonymTyCon :: TyCon -> Bool isTypeSynonymTyCon (SynonymTyCon {}) = True isTypeSynonymTyCon _ = False -- As for newtypes, it is in some contexts important to distinguish between -- closed synonyms and synonym families, as synonym families have no unique -- right hand side to which a synonym family application can expand. -- isDecomposableTyCon :: TyCon -> Bool -- True iff we can decompose (T a b c) into ((T a b) c) -- I.e. is it injective? -- Specifically NOT true of synonyms (open and otherwise) -- Ultimately we may have injective associated types -- in which case this test will become more interesting -- -- It'd be unusual to call isDecomposableTyCon on a regular H98 -- type synonym, because you should probably have expanded it first -- But regardless, it's not decomposable isDecomposableTyCon (SynonymTyCon {}) = False isDecomposableTyCon (FamilyTyCon {}) = False isDecomposableTyCon _other = True -- | Is this an algebraic 'TyCon' declared with the GADT syntax? isGadtSyntaxTyCon :: TyCon -> Bool isGadtSyntaxTyCon (AlgTyCon { algTcGadtSyntax = res }) = res isGadtSyntaxTyCon _ = False -- | Is this an algebraic 'TyCon' which is just an enumeration of values? isEnumerationTyCon :: TyCon -> Bool -- See Note [Enumeration types] in TyCon isEnumerationTyCon (AlgTyCon { tyConArity = arity, algTcRhs = rhs }) = case rhs of DataTyCon { is_enum = res } -> res TupleTyCon {} -> arity == 0 _ -> False isEnumerationTyCon _ = False -- | Is this a 'TyCon', synonym or otherwise, that defines a family? isFamilyTyCon :: TyCon -> Bool isFamilyTyCon (FamilyTyCon {}) = True isFamilyTyCon (AlgTyCon {algTcRhs = DataFamilyTyCon {}}) = True isFamilyTyCon _ = False -- | Is this a 'TyCon', synonym or otherwise, that defines a family with -- instances? isOpenFamilyTyCon :: TyCon -> Bool isOpenFamilyTyCon (FamilyTyCon {famTcFlav = OpenSynFamilyTyCon }) = True isOpenFamilyTyCon (AlgTyCon {algTcRhs = DataFamilyTyCon }) = True isOpenFamilyTyCon _ = False -- | Is this a synonym 'TyCon' that can have may have further instances appear? isTypeFamilyTyCon :: TyCon -> Bool isTypeFamilyTyCon (FamilyTyCon {}) = True isTypeFamilyTyCon _ = False isOpenTypeFamilyTyCon :: TyCon -> Bool isOpenTypeFamilyTyCon (FamilyTyCon {famTcFlav = OpenSynFamilyTyCon }) = True isOpenTypeFamilyTyCon _ = False -- | Is this a non-empty closed type family? Returns 'Nothing' for -- abstract or empty closed families. isClosedSynFamilyTyConWithAxiom_maybe :: TyCon -> Maybe (CoAxiom Branched) isClosedSynFamilyTyConWithAxiom_maybe (FamilyTyCon {famTcFlav = ClosedSynFamilyTyCon mb}) = mb isClosedSynFamilyTyConWithAxiom_maybe _ = Nothing isBuiltInSynFamTyCon_maybe :: TyCon -> Maybe BuiltInSynFamily isBuiltInSynFamTyCon_maybe (FamilyTyCon {famTcFlav = BuiltInSynFamTyCon ops }) = Just ops isBuiltInSynFamTyCon_maybe _ = Nothing -- | Is this a synonym 'TyCon' that can have may have further instances appear? isDataFamilyTyCon :: TyCon -> Bool isDataFamilyTyCon (AlgTyCon {algTcRhs = DataFamilyTyCon {}}) = True isDataFamilyTyCon _ = False -- | Are we able to extract informationa 'TyVar' to class argument list -- mappping from a given 'TyCon'? isTyConAssoc :: TyCon -> Bool isTyConAssoc tc = isJust (tyConAssoc_maybe tc) tyConAssoc_maybe :: TyCon -> Maybe Class tyConAssoc_maybe tc = case tyConParent tc of AssocFamilyTyCon cls -> Just cls _ -> Nothing -- The unit tycon didn't used to be classed as a tuple tycon -- but I thought that was silly so I've undone it -- If it can't be for some reason, it should be a AlgTyCon isTupleTyCon :: TyCon -> Bool -- ^ Does this 'TyCon' represent a tuple? -- -- NB: when compiling @Data.Tuple@, the tycons won't reply @True@ to -- 'isTupleTyCon', because they are built as 'AlgTyCons'. However they -- get spat into the interface file as tuple tycons, so I don't think -- it matters. isTupleTyCon (AlgTyCon { algTcRhs = TupleTyCon {} }) = True isTupleTyCon _ = False tyConTuple_maybe :: TyCon -> Maybe TupleSort tyConTuple_maybe (AlgTyCon { algTcRhs = rhs }) | TupleTyCon { tup_sort = sort} <- rhs = Just sort tyConTuple_maybe _ = Nothing -- | Is this the 'TyCon' for an unboxed tuple? isUnboxedTupleTyCon :: TyCon -> Bool isUnboxedTupleTyCon (AlgTyCon { algTcRhs = rhs }) | TupleTyCon { tup_sort = sort } <- rhs = not (isBoxed (tupleSortBoxity sort)) isUnboxedTupleTyCon _ = False -- | Is this the 'TyCon' for a boxed tuple? isBoxedTupleTyCon :: TyCon -> Bool isBoxedTupleTyCon (AlgTyCon { algTcRhs = rhs }) | TupleTyCon { tup_sort = sort } <- rhs = isBoxed (tupleSortBoxity sort) isBoxedTupleTyCon _ = False -- | Is this a recursive 'TyCon'? isRecursiveTyCon :: TyCon -> Bool isRecursiveTyCon (AlgTyCon {algTcRec = Recursive}) = True isRecursiveTyCon _ = False promotableTyCon_maybe :: TyCon -> Maybe TyCon promotableTyCon_maybe (AlgTyCon { tcPromoted = prom }) = prom promotableTyCon_maybe _ = Nothing promoteTyCon :: TyCon -> TyCon promoteTyCon tc = case promotableTyCon_maybe tc of Just prom_tc -> prom_tc Nothing -> pprPanic "promoteTyCon" (ppr tc) -- | Is this a PromotedTyCon? isPromotedTyCon :: TyCon -> Bool isPromotedTyCon (PromotedTyCon {}) = True isPromotedTyCon _ = False -- | Retrieves the promoted TyCon if this is a PromotedTyCon; isPromotedTyCon_maybe :: TyCon -> Maybe TyCon isPromotedTyCon_maybe (PromotedTyCon { ty_con = tc }) = Just tc isPromotedTyCon_maybe _ = Nothing -- | Is this a PromotedDataCon? isPromotedDataCon :: TyCon -> Bool isPromotedDataCon (PromotedDataCon {}) = True isPromotedDataCon _ = False -- | Retrieves the promoted DataCon if this is a PromotedDataCon; isPromotedDataCon_maybe :: TyCon -> Maybe DataCon isPromotedDataCon_maybe (PromotedDataCon { dataCon = dc }) = Just dc isPromotedDataCon_maybe _ = Nothing -- | Identifies implicit tycons that, in particular, do not go into interface -- files (because they are implicitly reconstructed when the interface is -- read). -- -- Note that: -- -- * Associated families are implicit, as they are re-constructed from -- the class declaration in which they reside, and -- -- * Family instances are /not/ implicit as they represent the instance body -- (similar to a @dfun@ does that for a class instance). -- -- * Tuples are implicit iff they have a wired-in name -- (namely: boxed and unboxed tupeles are wired-in and implicit, -- but constraint tuples are not) isImplicitTyCon :: TyCon -> Bool isImplicitTyCon (FunTyCon {}) = True isImplicitTyCon (PrimTyCon {}) = True isImplicitTyCon (PromotedDataCon {}) = True isImplicitTyCon (PromotedTyCon {}) = True isImplicitTyCon (AlgTyCon { algTcRhs = rhs, algTcParent = parent, tyConName = name }) | TupleTyCon {} <- rhs = isWiredInName name | AssocFamilyTyCon {} <- parent = True | otherwise = False isImplicitTyCon (FamilyTyCon { famTcParent = parent }) | AssocFamilyTyCon {} <- parent = True | otherwise = False isImplicitTyCon (SynonymTyCon {}) = False tyConCType_maybe :: TyCon -> Maybe CType tyConCType_maybe tc@(AlgTyCon {}) = tyConCType tc tyConCType_maybe _ = Nothing {- ----------------------------------------------- -- Expand type-constructor applications ----------------------------------------------- -} expandSynTyCon_maybe :: TyCon -> [tyco] -- ^ Arguments to 'TyCon' -> Maybe ([(TyVar,tyco)], Type, [tyco]) -- ^ Returns a 'TyVar' substitution, the body -- type of the synonym (not yet substituted) -- and any arguments remaining from the -- application -- ^ Expand a type synonym application, if any expandSynTyCon_maybe tc tys | SynonymTyCon { tyConTyVars = tvs, synTcRhs = rhs } <- tc , let n_tvs = length tvs = case n_tvs `compare` length tys of LT -> Just (tvs `zip` tys, rhs, drop n_tvs tys) EQ -> Just (tvs `zip` tys, rhs, []) GT -> Nothing | otherwise = Nothing ---------------- -- | As 'tyConDataCons_maybe', but returns the empty list of constructors if no -- constructors could be found tyConDataCons :: TyCon -> [DataCon] -- It's convenient for tyConDataCons to return the -- empty list for type synonyms etc tyConDataCons tycon = tyConDataCons_maybe tycon `orElse` [] -- | Determine the 'DataCon's originating from the given 'TyCon', if the 'TyCon' -- is the sort that can have any constructors (note: this does not include -- abstract algebraic types) tyConDataCons_maybe :: TyCon -> Maybe [DataCon] tyConDataCons_maybe (AlgTyCon {algTcRhs = rhs}) = case rhs of DataTyCon { data_cons = cons } -> Just cons NewTyCon { data_con = con } -> Just [con] TupleTyCon { data_con = con } -> Just [con] _ -> Nothing tyConDataCons_maybe _ = Nothing -- | If the given 'TyCon' has a /single/ data constructor, i.e. it is a @data@ -- type with one alternative, a tuple type or a @newtype@ then that constructor -- is returned. If the 'TyCon' has more than one constructor, or represents a -- primitive or function type constructor then @Nothing@ is returned. In any -- other case, the function panics tyConSingleDataCon_maybe :: TyCon -> Maybe DataCon tyConSingleDataCon_maybe (AlgTyCon { algTcRhs = rhs }) = case rhs of DataTyCon { data_cons = [c] } -> Just c TupleTyCon { data_con = c } -> Just c NewTyCon { data_con = c } -> Just c _ -> Nothing tyConSingleDataCon_maybe _ = Nothing tyConSingleDataCon :: TyCon -> DataCon tyConSingleDataCon tc = case tyConSingleDataCon_maybe tc of Just c -> c Nothing -> pprPanic "tyConDataCon" (ppr tc) tyConSingleAlgDataCon_maybe :: TyCon -> Maybe DataCon -- Returns (Just con) for single-constructor -- *algebraic* data types *not* newtypes tyConSingleAlgDataCon_maybe (AlgTyCon { algTcRhs = rhs }) = case rhs of DataTyCon { data_cons = [c] } -> Just c TupleTyCon { data_con = c } -> Just c _ -> Nothing tyConSingleAlgDataCon_maybe _ = Nothing -- | Determine the number of value constructors a 'TyCon' has. Panics if the -- 'TyCon' is not algebraic or a tuple tyConFamilySize :: TyCon -> Int tyConFamilySize tc@(AlgTyCon { algTcRhs = rhs }) = case rhs of DataTyCon { data_cons = cons } -> length cons NewTyCon {} -> 1 TupleTyCon {} -> 1 DataFamilyTyCon {} -> 0 _ -> pprPanic "tyConFamilySize 1" (ppr tc) tyConFamilySize tc = pprPanic "tyConFamilySize 2" (ppr tc) -- | Extract an 'AlgTyConRhs' with information about data constructors from an -- algebraic or tuple 'TyCon'. Panics for any other sort of 'TyCon' algTyConRhs :: TyCon -> AlgTyConRhs algTyConRhs (AlgTyCon {algTcRhs = rhs}) = rhs algTyConRhs other = pprPanic "algTyConRhs" (ppr other) -- | Get the list of roles for the type parameters of a TyCon tyConRoles :: TyCon -> [Role] -- See also Note [TyCon Role signatures] tyConRoles tc = case tc of { FunTyCon {} -> const_role Representational ; AlgTyCon { tcRoles = roles } -> roles ; SynonymTyCon { tcRoles = roles } -> roles ; FamilyTyCon {} -> const_role Nominal ; PrimTyCon { tcRoles = roles } -> roles ; PromotedDataCon { tcRoles = roles } -> roles ; PromotedTyCon {} -> const_role Nominal } where const_role r = replicate (tyConArity tc) r -- | Extract the bound type variables and type expansion of a type synonym -- 'TyCon'. Panics if the 'TyCon' is not a synonym newTyConRhs :: TyCon -> ([TyVar], Type) newTyConRhs (AlgTyCon {tyConTyVars = tvs, algTcRhs = NewTyCon { nt_rhs = rhs }}) = (tvs, rhs) newTyConRhs tycon = pprPanic "newTyConRhs" (ppr tycon) -- | The number of type parameters that need to be passed to a newtype to -- resolve it. May be less than in the definition if it can be eta-contracted. newTyConEtadArity :: TyCon -> Int newTyConEtadArity (AlgTyCon {algTcRhs = NewTyCon { nt_etad_rhs = tvs_rhs }}) = length (fst tvs_rhs) newTyConEtadArity tycon = pprPanic "newTyConEtadArity" (ppr tycon) -- | Extract the bound type variables and type expansion of an eta-contracted -- type synonym 'TyCon'. Panics if the 'TyCon' is not a synonym newTyConEtadRhs :: TyCon -> ([TyVar], Type) newTyConEtadRhs (AlgTyCon {algTcRhs = NewTyCon { nt_etad_rhs = tvs_rhs }}) = tvs_rhs newTyConEtadRhs tycon = pprPanic "newTyConEtadRhs" (ppr tycon) -- | Extracts the @newtype@ coercion from such a 'TyCon', which can be used to -- construct something with the @newtype@s type from its representation type -- (right hand side). If the supplied 'TyCon' is not a @newtype@, returns -- @Nothing@ newTyConCo_maybe :: TyCon -> Maybe (CoAxiom Unbranched) newTyConCo_maybe (AlgTyCon {algTcRhs = NewTyCon { nt_co = co }}) = Just co newTyConCo_maybe _ = Nothing newTyConCo :: TyCon -> CoAxiom Unbranched newTyConCo tc = case newTyConCo_maybe tc of Just co -> co Nothing -> pprPanic "newTyConCo" (ppr tc) -- | Find the primitive representation of a 'TyCon' tyConPrimRep :: TyCon -> PrimRep tyConPrimRep (PrimTyCon {primTyConRep = rep}) = rep tyConPrimRep tc = ASSERT(not (isUnboxedTupleTyCon tc)) PtrRep -- | Find the \"stupid theta\" of the 'TyCon'. A \"stupid theta\" is the context -- to the left of an algebraic type declaration, e.g. @Eq a@ in the declaration -- @data Eq a => T a ...@ tyConStupidTheta :: TyCon -> [PredType] tyConStupidTheta (AlgTyCon {algTcStupidTheta = stupid}) = stupid tyConStupidTheta tycon = pprPanic "tyConStupidTheta" (ppr tycon) -- | Extract the 'TyVar's bound by a vanilla type synonym -- and the corresponding (unsubstituted) right hand side. synTyConDefn_maybe :: TyCon -> Maybe ([TyVar], Type) synTyConDefn_maybe (SynonymTyCon {tyConTyVars = tyvars, synTcRhs = ty}) = Just (tyvars, ty) synTyConDefn_maybe _ = Nothing -- | Extract the information pertaining to the right hand side of a type synonym -- (@type@) declaration. synTyConRhs_maybe :: TyCon -> Maybe Type synTyConRhs_maybe (SynonymTyCon {synTcRhs = rhs}) = Just rhs synTyConRhs_maybe _ = Nothing -- | Extract the flavour of a type family (with all the extra information that -- it carries) famTyConFlav_maybe :: TyCon -> Maybe FamTyConFlav famTyConFlav_maybe (FamilyTyCon {famTcFlav = flav}) = Just flav famTyConFlav_maybe _ = Nothing -- | Is this 'TyCon' that for a class instance? isClassTyCon :: TyCon -> Bool isClassTyCon (AlgTyCon {algTcParent = ClassTyCon _}) = True isClassTyCon _ = False -- | If this 'TyCon' is that for a class instance, return the class it is for. -- Otherwise returns @Nothing@ tyConClass_maybe :: TyCon -> Maybe Class tyConClass_maybe (AlgTyCon {algTcParent = ClassTyCon clas}) = Just clas tyConClass_maybe _ = Nothing ---------------------------------------------------------------------------- tyConParent :: TyCon -> TyConParent tyConParent (AlgTyCon {algTcParent = parent}) = parent tyConParent (FamilyTyCon {famTcParent = parent}) = parent tyConParent _ = NoParentTyCon ---------------------------------------------------------------------------- -- | Is this 'TyCon' that for a data family instance? isFamInstTyCon :: TyCon -> Bool isFamInstTyCon tc = case tyConParent tc of FamInstTyCon {} -> True _ -> False tyConFamInstSig_maybe :: TyCon -> Maybe (TyCon, [Type], CoAxiom Unbranched) tyConFamInstSig_maybe tc = case tyConParent tc of FamInstTyCon ax f ts -> Just (f, ts, ax) _ -> Nothing -- | If this 'TyCon' is that of a family instance, return the family in question -- and the instance types. Otherwise, return @Nothing@ tyConFamInst_maybe :: TyCon -> Maybe (TyCon, [Type]) tyConFamInst_maybe tc = case tyConParent tc of FamInstTyCon _ f ts -> Just (f, ts) _ -> Nothing -- | If this 'TyCon' is that of a family instance, return a 'TyCon' which -- represents a coercion identifying the representation type with the type -- instance family. Otherwise, return @Nothing@ tyConFamilyCoercion_maybe :: TyCon -> Maybe (CoAxiom Unbranched) tyConFamilyCoercion_maybe tc = case tyConParent tc of FamInstTyCon co _ _ -> Just co _ -> Nothing {- ************************************************************************ * * \subsection[TyCon-instances]{Instance declarations for @TyCon@} * * ************************************************************************ @TyCon@s are compared by comparing their @Unique@s. The strictness analyser needs @Ord@. It is a lexicographic order with the property @(a<=b) || (b<=a)@. -} instance Eq TyCon where a == b = case (a `compare` b) of { EQ -> True; _ -> False } a /= b = case (a `compare` b) of { EQ -> False; _ -> True } instance Ord TyCon where a <= b = case (a `compare` b) of { LT -> True; EQ -> True; GT -> False } a < b = case (a `compare` b) of { LT -> True; EQ -> False; GT -> False } a >= b = case (a `compare` b) of { LT -> False; EQ -> True; GT -> True } a > b = case (a `compare` b) of { LT -> False; EQ -> False; GT -> True } compare a b = getUnique a `compare` getUnique b instance Uniquable TyCon where getUnique tc = tyConUnique tc instance Outputable TyCon where -- At the moment a promoted TyCon has the same Name as its -- corresponding TyCon, so we add the quote to distinguish it here ppr tc = pprPromotionQuote tc <> ppr (tyConName tc) pprPromotionQuote :: TyCon -> SDoc pprPromotionQuote (PromotedDataCon {}) = char '\'' -- Quote promoted DataCons -- in types pprPromotionQuote (PromotedTyCon {}) = ifPprDebug (char '\'') pprPromotionQuote _ = empty -- However, we don't quote TyCons -- in kinds e.g. -- type family T a :: Bool -> * -- cf Trac #5952. -- Except with -dppr-debug instance NamedThing TyCon where getName = tyConName instance Data.Data TyCon where -- don't traverse? toConstr _ = abstractConstr "TyCon" gunfold _ _ = error "gunfold" dataTypeOf _ = mkNoRepType "TyCon" {- ************************************************************************ * * Walking over recursive TyCons * * ************************************************************************ Note [Expanding newtypes and products] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ When expanding a type to expose a data-type constructor, we need to be careful about newtypes, lest we fall into an infinite loop. Here are the key examples: newtype Id x = MkId x newtype Fix f = MkFix (f (Fix f)) newtype T = MkT (T -> T) Type Expansion -------------------------- T T -> T Fix Maybe Maybe (Fix Maybe) Id (Id Int) Int Fix Id NO NO NO Notice that we can expand T, even though it's recursive. And we can expand Id (Id Int), even though the Id shows up twice at the outer level. So, when expanding, we keep track of when we've seen a recursive newtype at outermost level; and bale out if we see it again. We sometimes want to do the same for product types, so that the strictness analyser doesn't unbox infinitely deeply. The function that manages this is checkRecTc. -} newtype RecTcChecker = RC NameSet initRecTc :: RecTcChecker initRecTc = RC emptyNameSet checkRecTc :: RecTcChecker -> TyCon -> Maybe RecTcChecker -- Nothing => Recursion detected -- Just rec_tcs => Keep going checkRecTc (RC rec_nts) tc | not (isRecursiveTyCon tc) = Just (RC rec_nts) | tc_name `elemNameSet` rec_nts = Nothing | otherwise = Just (RC (extendNameSet rec_nts tc_name)) where tc_name = tyConName tc
fmthoma/ghc
compiler/types/TyCon.hs
bsd-3-clause
71,358
0
16
21,373
8,172
4,671
3,501
721
7
module Turbinado.Layout.Helpers.Tags ( styleSheetTag ) where import Turbinado.View styleSheetTag :: String -> String -> VHtml styleSheetTag s m = itag "link" ! [strAttr "media" m, strAttr "type" "text/css", strAttr "rel" "stylesheet", strAttr "href" ("/css/" ++ s ++".css")]
alsonkemp/turbinado-website
Turbinado/Layout/Helpers/Tags.hs
bsd-3-clause
288
0
10
50
91
49
42
6
1
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd"> <helpset version="2.0" xml:lang="hi-IN"> <title>Port Scan | ZAP Extension</title> <maps> <homeID>top</homeID> <mapref location="map.jhm"/> </maps> <view> <name>TOC</name> <label>Contents</label> <type>org.zaproxy.zap.extension.help.ZapTocView</type> <data>toc.xml</data> </view> <view> <name>Index</name> <label>Index</label> <type>javax.help.IndexView</type> <data>index.xml</data> </view> <view> <name>Search</name> <label>Search</label> <type>javax.help.SearchView</type> <data engine="com.sun.java.help.search.DefaultSearchEngine"> JavaHelpSearch </data> </view> <view> <name>Favorites</name> <label>Favorites</label> <type>javax.help.FavoritesView</type> </view> </helpset>
thc202/zap-extensions
addOns/zest/src/main/javahelp/org/zaproxy/zap/extension/zest/resources/help_hi_IN/helpset_hi_IN.hs
apache-2.0
970
90
29
159
399
212
187
-1
-1
{-# LANGUAGE Trustworthy #-} {-# LANGUAGE ScopedTypeVariables #-} ----------------------------------------------------------------------------- -- | -- Module : Data.Bitraversable -- Copyright : (C) 2011-2016 Edward Kmett -- License : BSD-style (see the file LICENSE) -- -- Maintainer : libraries@haskell.org -- Stability : provisional -- Portability : portable -- -- @since 4.10.0.0 ---------------------------------------------------------------------------- module Data.Bitraversable ( Bitraversable(..) , bisequenceA , bisequence , bimapM , bifor , biforM , bimapAccumL , bimapAccumR , bimapDefault , bifoldMapDefault ) where import Control.Applicative import Data.Bifunctor import Data.Bifoldable import Data.Coerce import Data.Functor.Identity (Identity(..)) import Data.Functor.Utils (StateL(..), StateR(..)) import GHC.Generics (K1(..)) -- | 'Bitraversable' identifies bifunctorial data structures whose elements can -- be traversed in order, performing 'Applicative' or 'Monad' actions at each -- element, and collecting a result structure with the same shape. -- -- As opposed to 'Traversable' data structures, which have one variety of -- element on which an action can be performed, 'Bitraversable' data structures -- have two such varieties of elements. -- -- A definition of 'bitraverse' must satisfy the following laws: -- -- [/naturality/] -- @'bitraverse' (t . f) (t . g) ≡ t . 'bitraverse' f g@ -- for every applicative transformation @t@ -- -- [/identity/] -- @'bitraverse' 'Identity' 'Identity' ≡ 'Identity'@ -- -- [/composition/] -- @'Compose' . 'fmap' ('bitraverse' g1 g2) . 'bitraverse' f1 f2 -- ≡ 'traverse' ('Compose' . 'fmap' g1 . f1) ('Compose' . 'fmap' g2 . f2)@ -- -- where an /applicative transformation/ is a function -- -- @t :: ('Applicative' f, 'Applicative' g) => f a -> g a@ -- -- preserving the 'Applicative' operations: -- -- @ -- t ('pure' x) = 'pure' x -- t (f '<*>' x) = t f '<*>' t x -- @ -- -- and the identity functor 'Identity' and composition functors 'Compose' are -- defined as -- -- > newtype Identity a = Identity { runIdentity :: a } -- > -- > instance Functor Identity where -- > fmap f (Identity x) = Identity (f x) -- > -- > instance Applicative Identity where -- > pure = Identity -- > Identity f <*> Identity x = Identity (f x) -- > -- > newtype Compose f g a = Compose (f (g a)) -- > -- > instance (Functor f, Functor g) => Functor (Compose f g) where -- > fmap f (Compose x) = Compose (fmap (fmap f) x) -- > -- > instance (Applicative f, Applicative g) => Applicative (Compose f g) where -- > pure = Compose . pure . pure -- > Compose f <*> Compose x = Compose ((<*>) <$> f <*> x) -- -- Some simple examples are 'Either' and '(,)': -- -- > instance Bitraversable Either where -- > bitraverse f _ (Left x) = Left <$> f x -- > bitraverse _ g (Right y) = Right <$> g y -- > -- > instance Bitraversable (,) where -- > bitraverse f g (x, y) = (,) <$> f x <*> g y -- -- 'Bitraversable' relates to its superclasses in the following ways: -- -- @ -- 'bimap' f g ≡ 'runIdentity' . 'bitraverse' ('Identity' . f) ('Identity' . g) -- 'bifoldMap' f g = 'getConst' . 'bitraverse' ('Const' . f) ('Const' . g) -- @ -- -- These are available as 'bimapDefault' and 'bifoldMapDefault' respectively. -- -- @since 4.10.0.0 class (Bifunctor t, Bifoldable t) => Bitraversable t where -- | Evaluates the relevant functions at each element in the structure, -- running the action, and builds a new structure with the same shape, using -- the results produced from sequencing the actions. -- -- @'bitraverse' f g ≡ 'bisequenceA' . 'bimap' f g@ -- -- For a version that ignores the results, see 'bitraverse_'. -- -- @since 4.10.0.0 bitraverse :: Applicative f => (a -> f c) -> (b -> f d) -> t a b -> f (t c d) bitraverse f g = bisequenceA . bimap f g -- | Alias for 'bisequence'. -- -- @since 4.10.0.0 bisequenceA :: (Bitraversable t, Applicative f) => t (f a) (f b) -> f (t a b) bisequenceA = bisequence -- | Alias for 'bitraverse'. -- -- @since 4.10.0.0 bimapM :: (Bitraversable t, Applicative f) => (a -> f c) -> (b -> f d) -> t a b -> f (t c d) bimapM = bitraverse -- | Sequences all the actions in a structure, building a new structure with -- the same shape using the results of the actions. For a version that ignores -- the results, see 'bisequence_'. -- -- @'bisequence' ≡ 'bitraverse' 'id' 'id'@ -- -- @since 4.10.0.0 bisequence :: (Bitraversable t, Applicative f) => t (f a) (f b) -> f (t a b) bisequence = bitraverse id id -- | @since 4.10.0.0 instance Bitraversable (,) where bitraverse f g ~(a, b) = liftA2 (,) (f a) (g b) -- | @since 4.10.0.0 instance Bitraversable ((,,) x) where bitraverse f g ~(x, a, b) = liftA2 ((,,) x) (f a) (g b) -- | @since 4.10.0.0 instance Bitraversable ((,,,) x y) where bitraverse f g ~(x, y, a, b) = liftA2 ((,,,) x y) (f a) (g b) -- | @since 4.10.0.0 instance Bitraversable ((,,,,) x y z) where bitraverse f g ~(x, y, z, a, b) = liftA2 ((,,,,) x y z) (f a) (g b) -- | @since 4.10.0.0 instance Bitraversable ((,,,,,) x y z w) where bitraverse f g ~(x, y, z, w, a, b) = liftA2 ((,,,,,) x y z w) (f a) (g b) -- | @since 4.10.0.0 instance Bitraversable ((,,,,,,) x y z w v) where bitraverse f g ~(x, y, z, w, v, a, b) = liftA2 ((,,,,,,) x y z w v) (f a) (g b) -- | @since 4.10.0.0 instance Bitraversable Either where bitraverse f _ (Left a) = Left <$> f a bitraverse _ g (Right b) = Right <$> g b -- | @since 4.10.0.0 instance Bitraversable Const where bitraverse f _ (Const a) = Const <$> f a -- | @since 4.10.0.0 instance Bitraversable (K1 i) where bitraverse f _ (K1 c) = K1 <$> f c -- | 'bifor' is 'bitraverse' with the structure as the first argument. For a -- version that ignores the results, see 'bifor_'. -- -- @since 4.10.0.0 bifor :: (Bitraversable t, Applicative f) => t a b -> (a -> f c) -> (b -> f d) -> f (t c d) bifor t f g = bitraverse f g t -- | Alias for 'bifor'. -- -- @since 4.10.0.0 biforM :: (Bitraversable t, Applicative f) => t a b -> (a -> f c) -> (b -> f d) -> f (t c d) biforM = bifor -- | The 'bimapAccumL' function behaves like a combination of 'bimap' and -- 'bifoldl'; it traverses a structure from left to right, threading a state -- of type @a@ and using the given actions to compute new elements for the -- structure. -- -- @since 4.10.0.0 bimapAccumL :: Bitraversable t => (a -> b -> (a, c)) -> (a -> d -> (a, e)) -> a -> t b d -> (a, t c e) bimapAccumL f g s t = runStateL (bitraverse (StateL . flip f) (StateL . flip g) t) s -- | The 'bimapAccumR' function behaves like a combination of 'bimap' and -- 'bifoldl'; it traverses a structure from right to left, threading a state -- of type @a@ and using the given actions to compute new elements for the -- structure. -- -- @since 4.10.0.0 bimapAccumR :: Bitraversable t => (a -> b -> (a, c)) -> (a -> d -> (a, e)) -> a -> t b d -> (a, t c e) bimapAccumR f g s t = runStateR (bitraverse (StateR . flip f) (StateR . flip g) t) s -- | A default definition of 'bimap' in terms of the 'Bitraversable' -- operations. -- -- @'bimapDefault' f g ≡ -- 'runIdentity' . 'bitraverse' ('Identity' . f) ('Identity' . g)@ -- -- @since 4.10.0.0 bimapDefault :: forall t a b c d . Bitraversable t => (a -> b) -> (c -> d) -> t a c -> t b d -- See Note [Function coercion] in Data.Functor.Utils. bimapDefault = coerce (bitraverse :: (a -> Identity b) -> (c -> Identity d) -> t a c -> Identity (t b d)) {-# INLINE bimapDefault #-} -- | A default definition of 'bifoldMap' in terms of the 'Bitraversable' -- operations. -- -- @'bifoldMapDefault' f g ≡ -- 'getConst' . 'bitraverse' ('Const' . f) ('Const' . g)@ -- -- @since 4.10.0.0 bifoldMapDefault :: forall t m a b . (Bitraversable t, Monoid m) => (a -> m) -> (b -> m) -> t a b -> m -- See Note [Function coercion] in Data.Functor.Utils. bifoldMapDefault = coerce (bitraverse :: (a -> Const m ()) -> (b -> Const m ()) -> t a b -> Const m (t () ())) {-# INLINE bifoldMapDefault #-}
ezyang/ghc
libraries/base/Data/Bitraversable.hs
bsd-3-clause
8,142
0
13
1,744
1,847
1,055
792
76
1
{-# LANGUAGE OverloadedStrings #-} module Lib ( main , Db.Config(..) ) where import qualified TFB.Types as Types import qualified TFB.Db as Db import qualified Data.Either as Either import Data.List (sortOn) import Control.Monad (replicateM, join) import qualified Data.ByteString.Lazy as LBS import qualified Data.ByteString.Lazy.Char8 as LBSC import qualified Network.HTTP.Types.Status as Status import qualified Network.HTTP.Types.Header as Header import qualified Network.Wai as Wai import qualified Network.Wai.Handler.Warp as Warp import qualified Data.BufferBuilder.Json as Json import Data.BufferBuilder.Json ((.=)) import qualified System.Random.MWC as MWC import qualified Html import Html ((#)) -- entry point main :: Db.Config -> IO () main dbConfig = do putStrLn "Config is:" print dbConfig putStrLn "Initializing database connection pool..." dbPool <- Db.mkPool dbConfig putStrLn "Initializing PRNG seed..." gen <- MWC.create putStrLn "Warp core online" Warp.run 7041 $ app gen dbPool -- router app :: MWC.GenIO -> Db.Pool -> Wai.Application app gen dbPool req respond = do let qParams = Wai.queryString req let mCount = Types.parseCount =<< join (lookup "queries" qParams) case (Wai.requestMethod req, Wai.pathInfo req) of ("GET", ["plaintext"]) -> respond getPlaintext ("GET", ["json"]) -> respond getJson ("GET", ["db"]) -> respond =<< getWorld gen dbPool ("GET", ["fortunes"]) -> respond =<< getFortunes dbPool ("GET", ["queries"]) -> respond =<< getWorlds gen dbPool mCount ("GET", ["updates"]) -> respond =<< updateWorlds gen dbPool mCount _ -> respond routeNotFound -- * response helpers contentText :: Header.ResponseHeaders contentText = [(Header.hContentType, "text/plain")] respondText :: Status.Status -> LBS.ByteString -> Wai.Response respondText code = Wai.responseLBS code contentText contentJson :: Header.ResponseHeaders contentJson = [(Header.hContentType, "application/json")] {-# SPECIALIZE respondJson :: Json.ObjectBuilder -> Wai.Response #-} {-# SPECIALIZE respondJson :: Types.World -> Wai.Response #-} respondJson :: Json.ToJson a => a -> Wai.Response respondJson = Wai.responseLBS Status.status200 contentJson . mkBs where mkBs = LBS.fromStrict . Json.encodeJson contentHtml :: Header.ResponseHeaders contentHtml = [(Header.hContentType, "text/html; charset=UTF-8")] respondHtml :: Types.FortunesHtml -> Wai.Response respondHtml = Wai.responseLBS Status.status200 contentHtml . Html.renderByteString -- * error responses routeNotFound :: Wai.Response routeNotFound = respondText Status.status400 "Bad route" respondInternalError :: LBS.ByteString -> Wai.Response respondInternalError = respondText Status.status500 respondDbError :: Db.Error -> Wai.Response respondDbError = respondInternalError . LBSC.pack . show -- * route implementations getPlaintext :: Wai.Response getPlaintext = respondText Status.status200 "Hello, World!" {-# INLINE getPlaintext #-} getJson :: Wai.Response getJson = respondJson $ "message" .= Types.unsafeJsonString "Hello, World!" {-# INLINE getJson #-} getWorld :: MWC.GenIO -> Db.Pool -> IO Wai.Response getWorld gen dbPool = do wId <- randomId gen res <- Db.queryWorldById dbPool wId pure . mkResponse $ res where mkResponse = Either.either respondDbError respondJson {-# INLINE getWorld #-} getWorlds :: MWC.GenIO -> Db.Pool -> Maybe Types.Count -> IO Wai.Response getWorlds gen dbPool mCount = do wIds <- replicateM count $ randomId gen res <- Db.queryWorldByIds dbPool wIds pure . mkResponse $ res where count = Types.getCount mCount mkResponse = Either.either respondDbError respondJson {-# INLINE getWorlds #-} updateWorlds :: MWC.GenIO -> Db.Pool -> Maybe Types.Count -> IO Wai.Response updateWorlds gen dbPool mCount = do wIds <- replicateM count $ randomId gen res <- Db.queryWorldByIds dbPool wIds Either.either (pure . respondDbError) (go dbPool) res where count = Types.getCount mCount mkResponse = Either.either respondDbError respondJson go conn ws = do wNumbers <- replicateM count $ randomId gen wsUp <- Db.updateWorlds conn . zip ws $ fmap fromIntegral wNumbers return $ mkResponse wsUp {-# INLINE updateWorlds #-} getFortunes :: Db.Pool -> IO Wai.Response getFortunes dbPool = do res <- Db.queryFortunes dbPool return $ case res of Left e -> respondDbError e Right fs -> respondHtml $ do let new = Types.Fortune 0 "Additional fortune added at request time." let header = Html.tr_ $ Html.th_ (Html.Raw "id") # Html.th_ (Html.Raw "message") let mkRow f = Html.tr_ $ Html.td_ (fromIntegral $ Types.fId f) # Html.td_ (Types.fMessage $ f) let rows = fmap mkRow $ sortOn Types.fMessage (new : fs) Html.doctype_ # Html.html_ ( Html.head_ ( Html.title_ (Html.Raw "Fortunes") ) # Html.body_ ( Html.table_ $ header # rows ) ) {-# INLINE getFortunes #-} randomId :: MWC.GenIO -> IO Types.QId randomId = MWC.uniformR (1, 10000)
sumeetchhetri/FrameworkBenchmarks
frameworks/Haskell/warp/warp-shared/src/Lib.hs
bsd-3-clause
5,158
0
23
996
1,501
781
720
122
7
-- This file contains my first attempt to represent hypergraphs in -- Haskell and use the Haskell compiler to type check them --------------------------------------------- -- Attempt using 'data' to represent atoms -- --------------------------------------------- data TV = NullTV | SimpleTV Float Float | TVAnd TV TV | TVOr TV TV | TVEvaluation Evaluation | TVMember Member | TVGetTV Atom data Atom = AtomPredicate Predicate | AtomConcept Concept | AtomNumber Float | AtomMember Member | AtomList List | AtomSchema Schema data Concept = Concept String | ConceptAnd Concept Concept | ConceptOr Concept Concept data Member = Member Atom Concept data Predicate = Predicate (Atom -> TV) | PredicateAnd Predicate Predicate | PredicateOr Predicate Predicate data Evaluation = Evaluation Predicate Atom data Schema = Schema (Atom -> Atom) data ExecutionOutput = ExecutionOutput Schema Atom data List = List [Atom] ------------- -- Example -- ------------- -- Functions from [Atom] to TV to build predicates is_bottom :: Atom -> TV is_bottom = undefined is_top :: Atom -> TV is_top = undefined is_car :: Atom -> TV is_car = undefined -- And/Or hypergraph of concepts h1 = ConceptOr (ConceptAnd (Concept "A") (Concept "B")) (Concept "C") -- And/Or hypergraph of predicates h2 = PredicateOr (PredicateAnd (Predicate is_top) (Predicate is_car)) (Predicate is_bottom) -- Apply EvaluationLink to predicate h2 tv3 = TVEvaluation (Evaluation h2 (AtomConcept (Concept "BMW"))) -- Build a SchemaLink add :: Atom -> Atom add (AtomList (List [AtomNumber x, AtomNumber y])) = AtomNumber (x + y) add _ = undefined h4 = Schema add -- Test GetTVLink tv1 = TVGetTV (AtomConcept h1) tv2 = TVGetTV (AtomPredicate h2) -- Test AndLink with TV tv4 = TVAnd tv1 tv3
kinoc/opencog
opencog/haskell/hypergraph_type_checking/experiment_with_data.hs
agpl-3.0
1,907
21
13
440
513
253
260
40
1
main = do let sumOfSq = sum [x^2 | x <- [1..100]] let sqOfSum = (sum [1..100]) ^ 2 print sumOfSq print sqOfSum print (sqOfSum - sumOfSq)
ddeeff/sandbox
haskell/eular/6.hs
mit
148
1
14
38
90
41
49
6
1
module Remotion.Server.Connection where import Remotion.Util.Prelude hiding (State, listen, interact) import qualified Remotion.Protocol as P import qualified Remotion.Session as S runConnection :: (MonadIO m, Applicative m, Serializable IO i, Serializable IO o) => S.Socket -> ServerIsAvailable -> Authenticate -> P.Timeout -> P.UserProtocolSignature -> ProcessUserRequest i o s -> m (Either ConnectionFailure ()) runConnection socket available authenticate timeout userVersion processRequest = runEitherT $ do do r <- lift $ S.run (handshake available authenticate timeout userVersion) (socket, 10^6*3) hoistEither $ join . liftM (fmapL HandshakeFailure) $ fmapL SessionFailure r do r <- lift $ S.run (interact processRequest) (socket, timeout) hoistEither $ fmapL SessionFailure r data ConnectionFailure = HandshakeFailure P.HandshakeFailure | SessionFailure S.Failure deriving (Show) -- Handshake ----------------------------- -- | -- A function, which checks the authentication data. -- If you want to provide access to anybody, use @(const $ return True)@. type Authenticate = P.Credentials -> IO Bool -- | -- type ServerIsAvailable = Bool handshake :: (MonadIO m, Applicative m) => ServerIsAvailable -> Authenticate -> P.Timeout -> P.UserProtocolSignature -> S.Session m (Either P.HandshakeFailure ()) handshake available authenticate timeout userVersion = runEitherT $ do do check (not available) $ P.ServerIsBusy do cv <- receive check (cv /= P.version) $ P.ProtocolVersionMismatch cv P.version do cv <- receive check (cv /= userVersion) $ P.UserProtocolSignatureMismatch cv userVersion do credentials <- receive ok <- liftIO $ authenticate $ credentials check (not ok) $ P.Unauthenticated do 0::Int <- receive -- A workaround for otherwise unpredictable behaviour, -- happening in case of multiple sends. send $ timeout where receive = lift $ S.receive send = lift . S.send check condition failure = do let failureM = if condition then Just $ failure else Nothing send failureM maybe (return ()) left failureM -- Interaction ----------------------------- -- | -- A function which processes requests of type @i@ from client and -- produces a response of type @o@, -- while maintaining a user-defined session state of type @s@ per each client. -- -- This function essentially is what defines what the server actually does. type ProcessUserRequest i o s = State s -> i -> IO o -- | -- A mutable state associated with particular client's connection. -- Since we're in `IO` anyway, we use a mutable state with `IORef` wrapper. -- You're free to extend it with whatever the data structure you want. type State s = IORef (Maybe s) interact :: forall i o s m. (MonadIO m, Serializable IO i, Serializable IO o, Applicative m) => ProcessUserRequest i o s -> S.Session m () interact processRequest = do state <- liftIO $ newIORef Nothing let loop = do i <- catchError receive $ \e -> do case e of S.ReceiveTimeoutReached t -> send $ Left $ P.TimeoutReached t S.SendTimeoutReached t -> send $ Left $ P.TimeoutReached t S.CorruptData t -> send $ Left $ P.CorruptRequest t _ -> return () throwError e case i of P.CloseSession -> do send $ Right $ Nothing P.Keepalive -> do send $ Right $ Nothing loop P.UserRequest a -> do o <- liftIO $ processRequest state a send $ Right $ Just o loop loop where receive = S.receive :: S.Session m (P.Request i) send = S.send :: P.Response o -> S.Session m ()
nikita-volkov/remotion
library/Remotion/Server/Connection.hs
mit
3,755
0
22
896
1,049
526
523
-1
-1
{-# LANGUAGE CPP #-} module Stackage.Config where import Control.Monad (when, unless) import Control.Monad.Trans.Writer (Writer, execWriter, tell) import Data.Char (toLower) import qualified Data.Map as Map import Data.Maybe (fromMaybe) import Data.Set (fromList, singleton) import Distribution.Text (simpleParse) import Stackage.Types -- | Packages which are shipped with GHC but are not included in the -- Haskell Platform list of core packages. defaultExtraCore :: GhcMajorVersion -> Set PackageName defaultExtraCore _ = fromList $ map PackageName $ words "binary Win32 ghc-prim integer-gmp" -- | Test suites which are expected to fail for some reason. The test suite -- will still be run and logs kept, but a failure will not indicate an -- error in our package combination. defaultExpectedFailures :: GhcMajorVersion -> Bool -- ^ haskell platform -> Set PackageName defaultExpectedFailures ghcVer requireHP = execWriter $ do -- Requires an old version of WAI and Warp for tests add "HTTP" -- text and setenv have recursive dependencies in their tests, which -- cabal can't (yet) handle add "text" add "setenv" -- The version of GLUT included with the HP does not generate -- documentation correctly. add "GLUT" -- https://github.com/bos/statistics/issues/42 add "statistics" -- https://github.com/kazu-yamamoto/simple-sendfile/pull/10 add "simple-sendfile" -- http://hackage.haskell.org/trac/hackage/ticket/954 add "diagrams" -- https://github.com/fpco/stackage/issues/24 add "unix-time" -- With transformers 0.3, it doesn't provide any modules add "transformers-compat" -- Tests require shell script and are incompatible with sandboxed package -- databases add "HTF" -- https://github.com/simonmar/monad-par/issues/28 add "monad-par" -- Unfortunately network failures seem to happen haphazardly add "network" -- https://github.com/ekmett/hyphenation/issues/1 add "hyphenation" -- Test suite takes too long to run on some systems add "punycode" -- http://hub.darcs.net/stepcut/happstack/issue/1 add "happstack-server" -- Requires a Facebook app. add "fb" -- https://github.com/tibbe/hashable/issues/64 add "hashable" -- https://github.com/vincenthz/language-java/issues/10 add "language-java" add "threads" add "crypto-conduit" add "pandoc" add "language-ecmascript" add "hspec" add "alex" -- https://github.com/basvandijk/concurrent-extra/issues/ add "concurrent-extra" -- https://github.com/skogsbaer/xmlgen/issues/2 add "xmlgen" -- Something very strange going on with the test suite, I can't figure -- out how to fix it add "bson" -- Requires a locally running PostgreSQL server with appropriate users add "postgresql-simple" -- Missing files add "websockets" -- Some kind of Cabal bug when trying to run tests add "thyme" add "shake" -- https://github.com/jgm/pandoc-citeproc/issues/5 add "pandoc-citeproc" -- Problems with doctest and sandboxing add "warp" add "wai-logger" -- https://github.com/fpco/stackage/issues/163 add "hTalos" add "seqloc" -- https://github.com/bos/math-functions/issues/25 add "math-functions" -- FIXME the test suite fails fairly regularly in builds, though I haven't -- discovered why yet add "crypto-numbers" -- Test suite is currently failing regularly, needs to be worked out still. add "lens" -- Requires too old a version of test-framework add "time" -- No code included any more, therefore Haddock fails mapM_ add $ words =<< [ "comonad-transformers comonads-fd groupoids" , "profunctor-extras semigroupoid-extras" , "hamlet shakespeare-css shakespeare-i18n" , "shakespeare-js shakespeare-text" , "attoparsec-conduit blaze-builder-conduit http-client-conduit" , "network-conduit zlib-conduit http-client-multipart" , "wai-eventsource wai-test" ] -- Cloud Haskell tests seem to be unreliable mapM_ add $ words =<< [ "distributed-process lockfree-queue network-transport-tcp" ] -- Pulls in monad-peel which does not compile when (ghcVer >= GhcMajorVersion 7 8) $ add "monad-control" -- issues with pthread mapM_ add $ words "hlibgit2 gitlib-s3 gitlib-libgit2" -- https://github.com/fpco/stackage/issues/226 add "options" -- https://github.com/gtk2hs/gtk2hs/issues/36 add "glib" add "pango" -- https://github.com/acw/bytestring-progress/issues/3 add "bytestring-progress" -- Seems to require 32-bit functions add "nettle" -- Depends on a missing graphviz executable add "graphviz" -- https://github.com/silkapp/json-schema/issues/8 when (ghcVer <= GhcMajorVersion 7 6) $ add "json-schema" -- No AWS creds available add "aws" -- Not sure why... add "singletons" add "hspec2" add "hspec-wai" -- Requires too new a version of time when (ghcVer < GhcMajorVersion 7 8) $ add "cookie" -- https://github.com/fpco/stackage/issues/285 add "diagrams-haddock" add "scientific" add "json-schema" -- https://github.com/BioHaskell/octree/issues/4 add "Octree" -- No code until we upgrade to network 2.6 add "network-uri" -- https://github.com/goldfirere/th-desugar/issues/12 add "th-desugar" -- https://github.com/haskell/c2hs/issues/108 add "c2hs" -- https://github.com/jmillikin/haskell-filesystem/issues/3 add "system-filepath" -- For some unknown reason, doctest has trouble on GHC 7.6. This only -- happens during a Stackage test. -- -- See: http://www.reddit.com/r/haskell/comments/2go92u/beginner_error_messages_in_c_vs_haskell/cklaspk when (ghcVer == GhcMajorVersion 7 6) $ add "http-types" -- Requires a running webdriver server add "webdriver" add "webdriver-snoy" -- Weird conflicts with sandboxing add "ghc-mod" add "ghcid" -- Requires locally running server add "bloodhound" when (ghcVer == GhcMajorVersion 7 8 && requireHP) $ do -- https://github.com/vincenthz/hs-asn1/issues/11 add "asn1-encoding" -- https://github.com/vincenthz/hs-tls/issues/84 add "tls" add "x509" where add = tell . singleton . PackageName -- | List of packages for our stable Hackage. All dependencies will be -- included as well. Please indicate who will be maintaining the package -- via comments. defaultStablePackages :: GhcMajorVersion -> Bool -- ^ using haskell platform? -> Map PackageName (VersionRange, Maintainer) defaultStablePackages ghcVer requireHP = unPackageMap $ execWriter $ do when (ghcVer == GhcMajorVersion 7 8 && requireHP) haskellPlatform78 mapM_ (add "michael@snoyman.com") $ words =<< [ "yesod yesod-newsfeed yesod-sitemap yesod-static yesod-test yesod-bin" , "markdown mime-mail-ses" , "persistent persistent-template persistent-sqlite persistent-postgresql persistent-mysql" , "network-conduit-tls yackage warp-tls keter" , "process-conduit stm-conduit" , "classy-prelude-yesod yesod-fay yesod-eventsource wai-websockets" , "random-shuffle hebrew-time" , "bzlib-conduit case-insensitive" , "conduit-extra conduit-combinators yesod-websockets" , "cabal-src" , "yesod-auth-deskcom monadcryptorandom sphinx" ] -- https://github.com/fpco/stackage/issues/261 addRange "Michael Snoyman" "cabal-install" $ case () of () | ghcVer <= GhcMajorVersion 7 6 -> "< 1.17" | ghcVer <= GhcMajorVersion 7 8 -> "< 1.19" | otherwise -> "-any" mapM_ (add "FP Complete <michael@fpcomplete.com>") $ words =<< [ "web-fpco th-expand-syns configurator smtLib" , "fixed-list indents language-c pretty-class" , "csv-conduit cassava" , "async shelly thyme" , "hxt hxt-relaxng dimensional" , "cairo diagrams-cairo gtk2hs-buildtools" , "base16-bytestring convertible" , "compdata hybrid-vectors" , "executable-path formatting quandl-api" , "fgl hmatrix hmatrix-gsl" , "alex happy c2hs" , "fpco-api aws persistent-mongoDB" , "random-fu lhs2tex" , "Chart Chart-diagrams histogram-fill random-source" , "webdriver-snoy" -- Replace with webdriver after: https://github.com/kallisti-dev/hs-webdriver/issues/53 -- https://github.com/Soostone/retry/pull/15 -- , "retry" ] when (ghcVer < GhcMajorVersion 7 8) $ do -- No GHC 7.8 support mapM_ (add "FP Complete <michael@fpcomplete.com>") $ words =<< [ "" -- too unreliable for the moment "distributed-process distributed-process-simplelocalnet" -- https://github.com/fpco/stackage/issues/295 --, "threepenny-gui unification-fd" ] addRange "FP Complete <michael@fpcomplete.com>" "compdata" "< 0.8" when (ghcVer >= GhcMajorVersion 7 8 && not requireHP) $ mapM_ (add "FP Complete <michael@fpcomplete.com>") $ words =<< [ "criterion" , "th-lift singletons th-desugar quickcheck-assertions" ] addRange "FP Complete <michael@fpcomplete.com>" "kure" "<= 2.4.10" mapM_ (add "Neil Mitchell") $ words "hlint hoogle shake derive tagsoup cmdargs safe uniplate nsis js-jquery extra bake ghcid" mapM_ (add "Alan Zimmerman") $ words "hjsmin language-javascript" mapM_ (add "Jasper Van der Jeugt") $ words "blaze-html blaze-markup stylish-haskell" mapM_ (add "Antoine Latter") $ words "uuid byteorder" mapM_ (add "Stefan Wehr <wehr@factisresearch.com>") $ words "HTF xmlgen stm-stats" when (ghcVer < GhcMajorVersion 7 8) $ add "Stefan Wehr <wehr@factisresearch.com>" "hscurses" mapM_ (add "Bart Massey <bart.massey+stackage@gmail.com>") $ words "parseargs" mapM_ (add "Vincent Hanquez") $ words =<< [ "bytedump certificate cipher-aes cipher-rc4 connection" , "cprng-aes cpu crypto-pubkey-types crypto-random-api cryptocipher" , "cryptohash hit language-java libgit pem siphash socks tls" , "tls-debug vhd language-java" ] mapM_ (add "Chris Done") $ words "statistics-linreg" -- https://github.com/isomorphism/these/issues/11 -- when (ghcVer >= GhcMajorVersion 7 8) $ add "Chris Done" "shell-conduit" #if !defined(mingw32_HOST_OS) && !defined(__MINGW32__) -- Does not compile on Windows mapM_ (add "Vincent Hanquez") $ words "udbus xenstore" #endif when (ghcVer < GhcMajorVersion 7 8) $ mapM_ (add "Alberto G. Corona <agocorona@gmail.com>") $ words "RefSerialize TCache Workflow MFlow" mapM_ (add "Edward Kmett <ekmett@gmail.com>") $ words =<< [ "ad adjunctions bifunctors bound charset comonad comonad-transformers" , "comonads-fd compressed concurrent-supply constraints contravariant" , "distributive either eq free groupoids heaps hyphenation" , "integration intervals kan-extensions lca lens linear monadic-arrays machines" , "mtl profunctors profunctor-extras reducers reflection" , "semigroups semigroupoids semigroupoid-extras speculation tagged void" , "graphs monad-products monad-st wl-pprint-extras wl-pprint-terminfo" , "numeric-extras parsers pointed prelude-extras reducers" , "streams vector-instances" ] when (ghcVer < GhcMajorVersion 7 8) $ mapM_ (add "Edward Kmett <ekmett@gmail.com>") $ words =<< [ "categories comonad-extras recursion-schemes syb-extras" ] mapM_ (add "Andrew Farmer <afarmer@ittc.ku.edu>") $ words "scotty wai-middleware-static" mapM_ (add "Simon Hengel <sol@typeful.net>") $ words "hspec doctest base-compat" mapM_ (add "Mario Blazevic <blamario@yahoo.com>") $ words "monad-parallel monad-coroutine incremental-parser monoid-subclasses" mapM_ (add "Brent Yorgey <byorgey@gmail.com>") $ words =<< [ "monoid-extras dual-tree vector-space-points active force-layout" , "diagrams diagrams-contrib diagrams-core diagrams-lib diagrams-svg" , "diagrams-postscript haxr" , "BlogLiterately" , "MonadRandom" -- #289: diagrams-builder diagrams-haddock BlogLiterately-diagrams ] mapM_ (add "Vincent Berthoux <vincent.berthoux@gmail.com>") $ words "JuicyPixels" mapM_ (add "Patrick Brisbin") $ words "gravatar" -- https://github.com/fpco/stackage/issues/299 -- mapM_ (add "Paul Harper <benekastah@gmail.com>") $ words "yesod-auth-oauth2" mapM_ (add "Felipe Lessa <felipe.lessa@gmail.com>") $ words "esqueleto fb fb-persistent yesod-fb yesod-auth-fb" mapM_ (add "Alexander Altman <alexanderaltman@me.com>") $ words "base-unicode-symbols containers-unicode-symbols" if ghcVer >= GhcMajorVersion 7 8 then add "Ryan Newton <ryan.newton@alum.mit.edu>" "accelerate" else addRange "Ryan Newton <ryan.newton@alum.mit.edu>" "accelerate" "< 0.15" mapM_ (add "Dan Burton <danburton.email@gmail.com>") $ words =<< [ "basic-prelude composition io-memoize numbers rev-state runmemo" , "tardis lens-family-th" ] mapM_ (add "Daniel Díaz <dhelta.diaz@gmail.com>") $ words "HaTeX matrix" when (ghcVer >= GhcMajorVersion 7 8) $ mapM_ (add "Daniel Díaz <dhelta.diaz@gmail.com>") $ words "binary-list" mapM_ (add "Gabriel Gonzalez <Gabriel439@gmail.com>") ["pipes", "pipes-parse", "pipes-concurrency"] when (ghcVer >= GhcMajorVersion 7 8) $ mapM_ (add "Chris Allen <cma@bitemyapp.com>") ["bloodhound"] mapM_ (add "Adam Bergmark <adam@bergmark.nl>") $ words "fay fay-base fay-dom fay-jquery fay-text fay-uri snaplet-fay" mapM_ (add "Boris Lykah <lykahb@gmail.com>") $ words "groundhog groundhog-th groundhog-sqlite groundhog-postgresql groundhog-mysql" mapM_ (add "Janne Hellsten <jjhellst@gmail.com>") $ words "sqlite-simple" mapM_ (add "Michal J. Gajda") $ words "iterable Octree FenwickTree" -- https://github.com/BioHaskell/hPDB/issues/2 when (ghcVer >= GhcMajorVersion 7 8) $ do mapM_ (add "Michal J. Gajda") $ words "hPDB hPDB-examples" mapM_ (add "Roman Cheplyaka <roma@ro-che.info>") $ words =<< [ "smallcheck tasty tasty-smallcheck tasty-quickcheck tasty-hunit tasty-golden" , "traverse-with-class regex-applicative time-lens" , "haskell-names haskell-packages hse-cpp" ] mapM_ (add "George Giorgidze <giorgidze@gmail.com>") $ words "HCodecs YampaSynth" mapM_ (add "Phil Hargett <phil@haphazardhouse.net>") $ words "courier" #if !defined(mingw32_HOST_OS) && !defined(__MINGW32__) mapM_ (add "Aycan iRiCAN <iricanaycan@gmail.com>") $ words "hdaemonize hsyslog hweblib" #else mapM_ (add "Aycan iRiCAN <iricanaycan@gmail.com>") $ words "hweblib" #endif mapM_ (add "Joachim Breitner <mail@joachim-breitner.de>") $ words "circle-packing arbtt" when (ghcVer >= GhcMajorVersion 7 8) $ mapM_ (add "Joachim Breitner <mail@joachim-breitner.de>") $ words "ghc-heap-view" when (ghcVer < GhcMajorVersion 7 8) $ mapM_ (add "John Wiegley") $ words =<< -- Removed these: bad semigroups upper bound [ "bindings-DSL github monad-extras numbers hlibgit2" , "gitlib gitlib-cmdline gitlib-test" , "gitlib-libgit2" -- https://github.com/jwiegley/gitlib/issues/31 -- "gitlib-s3" ] mapM_ (add "Aditya Bhargava <adit@adit.io") $ words "HandsomeSoup" mapM_ (add "Clint Adams <clint@debian.org>") $ words "hOpenPGP openpgp-asciiarmor MusicBrainz DAV hopenpgp-tools" -- https://github.com/fpco/stackage/issues/160 mapM_ (add "Ketil Malde") $ words =<< [ "biocore biofasta biofastq biosff" , "blastxml bioace biophd" , "biopsl" -- https://github.com/ingolia/SamTools/issues/3 samtools , "seqloc bioalign BlastHTTP" -- The following have out-of-date dependencies currently -- biostockholm memexml RNAwolf -- , "Biobase BiobaseDotP BiobaseFR3D BiobaseInfernal BiobaseMAF" -- , "BiobaseTrainingData BiobaseTurner BiobaseXNA BiobaseVienna" -- , "BiobaseTypes BiobaseFasta" -- MC-Fold-DP ] -- https://github.com/fpco/stackage/issues/163 addRange "Michael Snoyman" "biophd" "< 0.0.6 || > 0.0.6" mapM_ (add "Silk <code@silk.co>") $ words =<< [ "arrow-list attoparsec-expr bumper code-builder fay-builder" , "hxt-pickle-utils multipart regular-xmlpickler" , "tostring uri-encode imagesize-conduit" ] when (ghcVer >= GhcMajorVersion 7 8 && not requireHP) $ do mapM_ (add "Silk <code@silk.co>") $ words =<< [ "aeson-utils generic-aeson json-schema" , "rest-client rest-core rest-gen rest-happstack rest-snap rest-stringmap" , "rest-types rest-wai tostring uri-encode imagesize-conduit" ] mapM_ (add "Simon Michael <simon@joyful.com>") $ words "hledger" mapM_ (add "Mihai Maruseac <mihai.maruseac@gmail.com>") $ words "io-manager" mapM_ (add "Dimitri Sabadie <dimitri.sabadie@gmail.com") $ words "monad-journal" mapM_ (add "Thomas Schilling <nominolo@googlemail.com>") $ words "ghc-syb-utils" mapM_ (add "Boris Buliga <d12frosted@icloud.com>") $ words "ghc-mod io-choice" when (ghcVer >= GhcMajorVersion 7 8) $ mapM_ (add "Boris Buliga <d12frosted@icloud.com>") $ words "system-canonicalpath" when (ghcVer >= GhcMajorVersion 7 8) $ mapM_ (add "Yann Esposito <yann.esposito@gmail.com>") $ words "holy-project" when requireHP $ addRange "Yann Esposito <yann.esposito@gmail.com>" "holy-project" "< 0.1.1.1" mapM_ (add "Paul Rouse <pgr@doynton.org>") $ words "yesod-auth-hashdb" add "Toralf Wittner <tw@dtex.org>" "zeromq4-haskell" mapM_ (add "trupill@gmail.com") $ words "djinn-lib djinn-ghc" -- https://github.com/fpco/stackage/issues/216 -- QuickCheck constraint -- when (ghcVer == GhcMajorVersion 7 6) $ -- addRange "Michael Snoyman" "repa" "< 3.2.5.1" -- https://github.com/fpco/stackage/issues/217 addRange "Michael Snoyman" "transformers" "< 0.4" addRange "Michael Snoyman" "mtl" "< 2.2" addRange "Michael Snoyman" "lifted-base" "< 0.2.2.2" -- https://github.com/liyang/thyme/issues/29 when (ghcVer <= GhcMajorVersion 7 6) $ addRange "Michael Snoyman" "thyme" "< 0.3.5.3" -- https://github.com/fpco/stackage/issues/224 when (ghcVer <= GhcMajorVersion 7 6) $ do addRange "Michael Snoyman" "zip-archive" "== 0.2.2.1" addRange "Michael Snoyman" "pandoc" "== 1.12.4.2" addRange "Michael Snoyman" "texmath" "<= 0.6.6.3" addRange "Michael Snoyman" "attoparsec" "== 0.11.3.1" addRange "Michael Snoyman" "parsers" "< 0.11" addRange "Michael Snoyman" "scientific" "< 0.3" addRange "Michael Snoyman" "aeson" "< 0.7.0.5" addRange "Michael Snoyman" "aeson-utils" "< 0.2.2" addRange "Michael Snoyman" "formatting" "< 5" addRange "Michael Snoyman" "aws" "< 0.10" -- 0.16.2 fixes dependency issues with different version of GHC -- and Haskell Platform. Now builds on GHC 7.4-7.8. Version 1.0 is -- guaranteed to break the API. See -- https://travis-ci.org/jswebtools/language-ecmascript for -- current build status. addRange "Andrey Chudnov <oss@chudnov.com>" "language-ecmascript" ">= 0.16.2 && < 1.0" -- https://github.com/fpco/stackage/issues/271 when (ghcVer < GhcMajorVersion 7 8) $ addRange "Michael Snoyman" "aeson" "< 0.8" -- https://github.com/fpco/stackage/issues/274 addRange "Michael Snoyman" "pandoc-citeproc" "< 0.4" -- https://github.com/fpco/stackage/issues/276 addRange "Michael Snoyman" "network" "< 2.6" addRange "Michael Snoyman" "network-uri" "< 2.6" -- https://github.com/fpco/stackage/issues/279 addRange "Michael Snoyman" "MonadRandom" "< 0.2" -- https://github.com/fpco/stackage/issues/288 addRange "Michael Snoyman" "text" "< 1.2" -- Force a specific version that's compatible with transformers 0.3 addRange "Michael Snoyman" "transformers-compat" "== 0.3.3.3" -- https://github.com/fpco/stackage/issues/291 addRange "Michael Snoyman" "random" "< 1.0.1.3" -- https://github.com/fpco/stackage/issues/314 addRange "Michael Snoyman" "hxt" "< 9.3.1.9" when (ghcVer == GhcMajorVersion 7 8 && requireHP) $ do -- Yay workarounds for unnecessarily old versions let peg x y = addRange "Haskell Platform" x y peg "aeson" "== 0.7.0.4" peg "scientific" "== 0.2.0.2" peg "criterion" "<= 0.8.1.0" peg "tasty-quickcheck" "< 0.8.0.3" peg "formatting" "< 5.0" peg "parsers" "< 0.11" peg "lens" "< 4.2" peg "contravariant" "< 1" peg "adjunctions" "< 4.2" peg "kan-extensions" "< 4.1" peg "semigroupoids" "< 4.1" peg "aws" "< 0.10" peg "pandoc" "< 1.13" peg "texmath" "<= 0.6.6.3" peg "checkers" "== 0.3.2" peg "HandsomeSoup" "< 0.3.3" add :: String -> String -> Writer PackageMap () add maintainer package = addRange maintainer package "-any" addRange :: String -> String -> String -> Writer PackageMap () addRange maintainer package range = case simpleParse range of Nothing -> error $ "Invalid range " ++ show range ++ " for " ++ package Just range' -> tell $ PackageMap $ Map.singleton (PackageName package) (range', Maintainer maintainer) -- | Hard coded Haskell Platform versions haskellPlatform78 :: Writer PackageMap () haskellPlatform78 = do addRange "Haskell Platform" "ghc" "== 7.8.3" addRange "Haskell Platform" "haddock" "== 2.14.3" addRange "Haskell Platform" "array" "== 0.5.0.0" addRange "Haskell Platform" "base" "== 4.7.0.1" addRange "Haskell Platform" "bytestring" "== 0.10.4.0" addRange "Haskell Platform" "Cabal" "== 1.18.1.3" addRange "Haskell Platform" "containers" "== 0.5.5.1" addRange "Haskell Platform" "deepseq" "== 1.3.0.2" addRange "Haskell Platform" "directory" "== 1.2.1.0" addRange "Haskell Platform" "filepath" "== 1.3.0.2" addRange "Haskell Platform" "haskell2010" "== 1.1.2.0" addRange "Haskell Platform" "haskell98" "== 2.0.0.3" addRange "Haskell Platform" "hpc" "== 0.6.0.1" addRange "Haskell Platform" "old-locale" "== 1.0.0.6" addRange "Haskell Platform" "old-time" "== 1.1.0.2" addRange "Haskell Platform" "pretty" "== 1.1.1.1" addRange "Haskell Platform" "process" "== 1.2.0.0" addRange "Haskell Platform" "template-haskell" "== 2.9.0.0" addRange "Haskell Platform" "time" "== 1.4.2" addRange "Haskell Platform" "transformers" "== 0.3.0.0" addRange "Haskell Platform" "unix" "== 2.7.0.1" addRange "Haskell Platform" "xhtml" "== 3000.2.1" addRange "Haskell Platform" "async" "== 2.0.1.5" addRange "Haskell Platform" "attoparsec" "== 0.10.4.0" addRange "Haskell Platform" "case-insensitive" "== 1.1.0.3" addRange "Haskell Platform" "fgl" "== 5.5.0.1" addRange "Haskell Platform" "GLURaw" "== 1.4.0.1" addRange "Haskell Platform" "GLUT" "== 2.5.1.1" addRange "Haskell Platform" "hashable" "== 1.2.2.0" addRange "Haskell Platform" "haskell-src" "== 1.0.1.6" addRange "Haskell Platform" "html" "== 1.0.1.2" addRange "Haskell Platform" "HTTP" "== 4000.2.10" addRange "Haskell Platform" "HUnit" "== 1.2.5.2" addRange "Haskell Platform" "mtl" "== 2.1.3.1" addRange "Haskell Platform" "network" "== 2.4.2.3" addRange "Haskell Platform" "OpenGL" "== 2.9.2.0" addRange "Haskell Platform" "OpenGLRaw" "== 1.5.0.0" addRange "Haskell Platform" "parallel" "== 3.2.0.4" addRange "Haskell Platform" "parsec" "== 3.1.5" addRange "Haskell Platform" "primitive" "== 0.5.2.1" addRange "Haskell Platform" "QuickCheck" "== 2.6" addRange "Haskell Platform" "random" "== 1.0.1.1" addRange "Haskell Platform" "regex-base" "== 0.93.2" addRange "Haskell Platform" "regex-compat" "== 0.95.1" addRange "Haskell Platform" "regex-posix" "== 0.95.2" addRange "Haskell Platform" "split" "== 0.2.2" addRange "Haskell Platform" "stm" "== 2.4.2" addRange "Haskell Platform" "syb" "== 0.4.1" addRange "Haskell Platform" "text" "== 1.1.0.0" addRange "Haskell Platform" "transformers" "== 0.3.0.0" addRange "Haskell Platform" "unordered-containers" "== 0.2.4.0" addRange "Haskell Platform" "vector" "== 0.10.9.1" addRange "Haskell Platform" "xhtml" "== 3000.2.1" addRange "Haskell Platform" "zlib" "== 0.5.4.1" addRange "Haskell Platform" "alex" "== 3.1.3" addRange "Haskell Platform" "cabal-install" "== 1.18.0.5" addRange "Haskell Platform" "happy" "== 1.19.4" addRange "Haskell Platform" "hscolour" "== 1.20.3" -- | Replacement Github users. This is useful when a project is owned by an -- organization. It also lets you ping multiple users. -- -- Note that cross organization team mentions aren't allowed by Github. convertGithubUser :: String -> [String] convertGithubUser x = fromMaybe [x] $ Map.lookup (map toLower x) pairs where pairs = Map.fromList [ ("diagrams", ["byorgey", "fryguybob", "jeffreyrosenbluth", "bergey"]) , ("yesodweb", ["snoyberg"]) , ("fpco", ["snoyberg"]) , ("faylang", ["bergmark"]) , ("silkapp", ["bergmark", "hesselink"]) , ("snapframework",["mightybyte"]) , ("haskell-ro", ["mihaimaruseac"]) ]
Tarrasch/stackage
Stackage/Config.hs
mit
26,537
0
16
6,248
4,018
1,867
2,151
416
2
{-# LANGUAGE BangPatterns #-} {-# LANGUAGE LambdaCase #-} {-# LANGUAGE PatternGuards #-} {-# LANGUAGE RecordWildCards #-} module Language.F2JS.Saturate where import Data.List import qualified Data.Map as M import Language.F2JS.AST import Language.F2JS.Util type JSMap = M.Map Name Int -- | Check to see whether the expr is a chain of applications -- terminating in a global name. If this is the case it returns the -- global name and the length of the chain. appChain :: Expr -> Maybe (Name, Int) appChain (Global n) = Just (n, 0) appChain (App l _) | Just (n, i) <- appChain l = Just (n, i + 1) | otherwise = Nothing appChain e = Nothing -- | Given an expression that needs to be applied to N more arguments, -- eta expand it so it is fully saturated. saturate :: Int -> Expr -> Expr saturate i e = let e' = succExpr i e vars = map Var [i - 1, i - 2 .. 0] in abstract i (foldl' App e' vars) where abstract 0 !e = e abstract n !e = abstract (n - 1) (Lam Nothing e) -- | In an expression, saturate all foreign calls and primop -- applications so they're fully applied. This needs the arity of all -- foreign calls, these are stored in a 'JSMap'. saturateExpr :: JSMap -> Expr -> Expr saturateExpr jsm = \case -- Expand JS calls and primops e | Just (n, i) <- appChain e , Just j <- M.lookup n jsm , i < j -> goChain (saturate (j - i) e) App (App p@PrimOp{} l) r -> App (App p $ go l) (go r) App p@PrimOp{} r -> Lam Nothing $ App (App p (succExpr 1 $ go r)) (Var 0) p@PrimOp{} -> Lam Nothing . Lam Nothing $ App (App p $ Var 1) (Var 0) -- And recurse everywhere else Proj e n -> Proj (go e) n Record ns -> Record $ map (fmap go) ns LetRec binds e -> LetRec (map goBind binds) (go e) Lam c e -> Lam c (go e) App l r -> App (go l) (go r) Case e alts -> Case (go e) (map (fmap go) alts) Con t es -> Con t (map go es) e -> e where go = saturateExpr jsm -- | Expand all the arguments to a function call chain goChain (App l r) = App (goChain l) (go r) goChain e = e goBind (Bind clos expr) = Bind clos (go expr) -- | Build up a map of JS arities from a list of declarations. buildJSMap :: [Decl] -> JSMap buildJSMap = foldl' go M.empty where go jsm Foreign {..} = M.insert jsName jsArity jsm go jsm _ = jsm -- | Saturate all primitive and foreign applications saturateDecls :: [Decl] -> [Decl] saturateDecls decs = map go decs where go f@Foreign{} = f go (TopLevel n bound e) = TopLevel n bound (saturateExpr jsm e) jsm = buildJSMap decs
jozefg/f2js
src/Language/F2JS/Saturate.hs
mit
2,634
0
14
702
966
486
480
52
13
{- | Description : Compiler Correctness from Graham Hutton’s Programming in Haskell first edition, Section 13.7. Revised & more efficient compiler, called comp' -} module CompilerRev where data Expr = Val Int | Add Expr Expr eval :: Expr -> Int eval (Val n) = n eval (Add x y) = eval x + eval y type Stack = [Int] type Code = [Op] data Op = PUSH Int | ADD deriving Show exec :: Code -> Stack -> Maybe Stack exec [] s = Just s exec (PUSH n : c) s = exec c (n : s) exec (ADD : c) (m : n : s) = exec c (n+m : s) exec _ _ = Nothing comp :: Expr -> Code comp e = comp' e [] comp' :: Expr -> Code -> Code comp' (Val n) c = PUSH n:c comp' (Add x y) c = comp' x (comp' y (ADD:c))
antalsz/hs-to-coq
examples/compiler/CompilerRev.hs
mit
729
0
9
210
318
166
152
19
1
{-# LANGUAGE DeriveDataTypeable, GeneralizedNewtypeDeriving, RecordWildCards, TemplateHaskell , TypeFamilies, OverloadedStrings, ScopedTypeVariables #-} module Main where import Control.Exception (bracket) import Data.Acid (AcidState, openLocalState) import Data.Acid.Local (createCheckpointAndClose) import Happstack.Lite import Blog.Models import Blog.Controllers fileServing :: ServerPart Response fileServing = serveDirectory EnableBrowsing ["index.html"] "static" config :: ServerConfig config = ServerConfig { port = 8001 , ramQuota = 1 * 10^6 , diskQuota = 20 * 10^6 , tmpDir = "/tmp/" } main :: IO () main = do bracket (openLocalState initialBlogState) (createCheckpointAndClose) (\acid -> serve (Just config) (blogRoutes acid)) -- Routing blogRoutes :: AcidState Blog -> ServerPart Response blogRoutes acid = msum [ dir "static" $ fileServing, dir "post" $ postDetail acid, dir "post2" $ postDetail2 acid, dir "post3" $ postCrud acid, dir "edit" $ editPost acid, homePage acid ]
TheFrameworkGame/blog-haskell-happstack-attempt
Main.hs
mit
1,177
0
12
315
283
152
131
31
1
import Prelude hiding (replicate) replicate :: Int -> a -> [a] replicate 0 _ = [] replicate n x = x : replicate (n - 1) x test = replicate 3 'x'
calebgregory/fp101x
wk3/replicate_recur.hs
mit
147
0
8
34
74
39
35
5
1
{-# OPTIONS #-} -- ------------------------------------------------------------ module Holumbus.Crawler.Html where import Data.Function.Selector import Data.List import Data.Maybe import Holumbus.Crawler.Types import Holumbus.Crawler.URIs import System.FilePath import Text.XML.HXT.Core hiding ( when , getState ) {- just for debugging import qualified Debug.Trace as D -- -} -- ------------------------------------------------------------ defaultHtmlCrawlerConfig :: AccumulateDocResult a r -> MergeDocResults r -> CrawlerConfig a r defaultHtmlCrawlerConfig op op2 = ( setS theSysConfig ( withValidate no >>> withParseHTML yes >>> withInputEncoding isoLatin1 >>> withWarnings no >>> withIgnoreNoneXmlContents yes ) >>> setS thePreRefsFilter this >>> setS theProcessRefs getHtmlReferences $ defaultCrawlerConfig op op2 ) -- ------------------------------------------------------------ -- | Collect all HTML references to other documents within a, frame and iframe elements getHtmlReferences :: ArrowXml a => a XmlTree URI getHtmlReferences = fromLA (getRefs $< computeDocBase) where getRefs base = deep (hasNameWith ( (`elem` ["a","frame","iframe"]) . localPart ) ) >>> ( getAttrValue0 "href" <+> getAttrValue0 "src" ) >>^ toAbsRef base getDocReferences :: ArrowXml a => a XmlTree URI getDocReferences = fromLA (getRefs $< computeDocBase) where getRefs base = multi selRefs >>^ toAbsRef base where hasLocName n = hasNameWith ((== n) . localPart) selRef en an = hasLocName en :-> getAttrValue0 an selRefs = choiceA $ map (uncurry selRef) names ++ [ appletRefs , objectRefs , this :-> none ] names = [ ("img", "src") , ("input", "src") -- input type="image" scr="..." , ("link", "href") , ("script", "src") ] appletRefs = hasLocName "applet" :-> (getAppRef $< getAppBase) where getAppBase = (getAttrValue0 "codebase" `withDefault` ".") >>^ toAbsRef base getAppRef ab = getAttrValue0 "code" >>^ toAbsRef ab objectRefs = hasLocName "object" :-> none -- TODO -- | construct an absolute URI by a base URI and a possibly relative URI toAbsRef :: URI -> URI -> URI toAbsRef base ref = ( expandURIString ref -- here >>> is normal function composition >>> fromMaybe ref >>> removeFragment ) base where removeFragment r | "#" `isPrefixOf` path = reverse . tail $ path | otherwise = r where path = dropWhile (/='#') . reverse $ r -- ------------------------------------------------------------ -- | Compute the base URI of a HTML page with respect to a possibly -- given base element in the head element of a html page. -- -- Stolen from Uwe Schmidt, http:\/\/www.haskell.org\/haskellwiki\/HXT -- and then stolen back again by Uwe from Holumbus.Utility computeDocBase :: ArrowXml a => a XmlTree String computeDocBase = ( ( ( getByPath ["html", "head", "base"] >>> getAttrValue "href" -- and compute document base with transfer uri and base ) &&& getAttrValue transferURI ) >>> expandURI ) `orElse` getAttrValue transferURI -- the default: take the transfer uri -- ------------------------------------------------------------ getByPath :: ArrowXml a => [String] -> a XmlTree XmlTree getByPath = seqA . map (\ n -> getChildren >>> hasName n) getHtmlTitle :: ArrowXml a => a XmlTree String getHtmlTitle = getAllText $ getByPath ["html", "head", "title"] getHtmlPlainText :: ArrowXml a => a XmlTree String getHtmlPlainText = getAllText $ getByPath ["html", "body"] getAllText :: ArrowXml a => a XmlTree XmlTree -> a XmlTree String getAllText getText' = ( getText' >>> ( fromLA $ deep getText ) >>^ (" " ++) -- text parts are separated by a space ) >. (concat >>> normalizeWS) -- normalize Space isHtmlContents :: ArrowXml a => a XmlTree XmlTree isHtmlContents = ( getAttrValue transferMimeType >>> isA ( `elem` [text_html, application_xhtml] ) ) `guards` this isPdfContents :: ArrowXml a => a XmlTree XmlTree isPdfContents = ( getAttrValue transferMimeType >>> isA ( == application_pdf ) ) `guards` this getTitleOrDocName :: ArrowXml a => a XmlTree String getTitleOrDocName = ( getHtmlTitle >>> isA (not . null) ) `orElse` ( getAttrValue transferURI >>^ takeFileName ) isElemWithAttr :: ArrowXml a => String -> String -> (String -> Bool) -> a XmlTree XmlTree isElemWithAttr en an av = isElem >>> hasName en >>> hasAttrValue an av -- ------------------------------------------------------------ application_pdf :: String application_pdf = "application/pdf" -- ------------------------------------------------------------ -- | normalize whitespace by splitting a text into words and joining this together with unwords normalizeWS :: String -> String normalizeWS = words >>> unwords -- | take the first n chars of a string, if the input -- is too long the cut off is indicated by \"...\" at the end limitLength :: Int -> String -> String limitLength n s | length s' <= n = s | otherwise = take (n - 3) s' ++ "..." where s' = take (n + 1) s -- ------------------------------------------------------------
ichistmeinname/holumbus
src/Holumbus/Crawler/Html.hs
mit
8,764
0
15
4,596
1,227
653
574
120
1
{- | Translation from Concrete to Abstract and back -} {-# LANGUAGE MultiParamTypeClasses, FlexibleInstances, TypeSynonymInstances, UndecidableInstances #-} {-# LANGUAGE NondecreasingIndentation #-} module Scoping (Scope(..),Parse(..),ParseError,Print(..)) where import Prelude hiding (mapM,print) import Control.Applicative import Control.Monad.State hiding (mapM) import Data.Map (Map) import qualified Data.Map as Map import Data.Set (Set) import qualified Data.Set as Set import Data.Traversable import qualified Concrete as C import qualified Abstract as A import qualified OperatorPrecedenceParser as O import Util import Text.PrettyPrint -- * abstract scoping monad type ParseError = O.ParseError {- class (Applicative m, Monad m) => ScopeReader m where askName :: A.Name -> m C.Name askFixity :: C.Name -> m (Maybe C.Fixity) -} class (Applicative m, Monad m) => Scope m where addGlobal :: (A.Name -> A.Ident) -> C.Name -> m A.Name addCon :: C.Name -> m A.Name addCon = addGlobal A.Con addDef :: C.Name -> m A.Name addDef = addGlobal A.Def addFixity :: C.Name -> C.Fixity -> m () addLocal :: (A.Name -> A.Ident) -> C.Name -> (A.Name -> m a) -> m a addVar :: C.Name -> (A.Name -> m a) -> m a addVar = addLocal A.Var addLet :: C.Name -> (A.Name -> m a) -> m a addLet = addLocal A.Let getName :: A.Name -> m C.Name getFixity :: C.Name -> m (Maybe C.Fixity) getIdent :: C.Name -> m A.Ident -- getIdent n = fst <$> getIdentAndFixity n -- getIdentAndFixity :: C.Name -> m (A.Ident, C.Fixity) parseError :: ParseError -> m a genericError :: String -> m a genericError = parseError . O.GenericError -- * parsing class Parse c a where parse :: Scope m => c -> m a instance Parse c a => Parse (Maybe c) (Maybe a) where parse = mapM parse instance Parse C.Declarations A.Declarations where parse (C.Declarations cdecls) = A.Declarations . concat <$> mapM parse cdecls instance Parse C.Declaration [A.Declaration] where parse cdecl = case cdecl of C.TypeSig n t -> return <$> do t <- parse t n <- addCon n return $ A.TypeSig n t C.Defn n mt e -> return <$> do mt <- parse mt e <- parse e n <- addDef n return $ A.Defn n mt e {- C.GLet n e -> return <$> do e <- parse e n <- addDef n return $ A.GLet n e -} C.Fixity n fx -> const [] <$> addFixity n fx {- instance Parse C.Atom A.Atom where parse a = case a of C.Typ -> return $ A.Typ C.Ident n -> getAtom n -} instance Parse C.Expr A.Expr where parse cexpr = case cexpr of C.Typ -> return $ A.Typ C.Ident n -> A.Ident <$> getIdent n -- C.Atom a -> A.Atom <$> parse a C.Fun t1 t2 -> A.Pi Nothing <$> parse t1 <*> parse t2 C.Pi x t1 t2 -> do t1 <- parse t1 addVar x $ \ x -> A.Pi (Just x) t1 <$> parse t2 C.LLet x e e' -> do e <- parse e addLet x $ \ x -> A.LLet x e <$> parse e' C.Lam x mt e -> do mt <- parse mt addVar x $ \ x -> A.Lam x mt <$> parse e C.Apps es -> parseApplication =<< mapM parse es -- applications [C.Expr] are parsed into list of Stack items -- which is then resolved into an A.Expr type Item = O.Item Int A.Expr {- instance Parse C.Atom Item where parse catom = case catom of C.Typ -> return $ O.Atom (A.Atom A.Typ) C.Ident n -> do (a, fx) <- getAtomAndFixity n return $ case fx of O.Nofix -> O.Atom $ A.Atom a O.Infix{} -> O.Op fx (\ [x,y] -> A.Atom a `A.App` x `A.App` y) _ -> O.Op fx (\ [x] -> A.Atom a `A.App` x) -} {- instance Parse C.Ident Item where parse n = do (a, fx) <- getIdentAndFixity n return $ case fx of O.Nofix -> O.Ident $ A.Ident a O.Infix{} -> O.Op fx (\ [x,y] -> A.Ident a `A.App` x `A.App` y) _ -> O.Op fx (\ [x] -> A.Ident a `A.App` x) -} instance Parse C.Expr Item where parse cexpr = case cexpr of C.Ident n -> do a <- getIdent n mfx <- getFixity n return $ case mfx of Nothing -> O.Atom $ A.Ident a Just fx@O.Infix{} -> O.Op fx (\ [x,y] -> A.Ident a `A.App` x `A.App` y) Just fx -> O.Op fx (\ [x] -> A.Ident a `A.App` x) _ -> O.Atom <$> parse cexpr instance O.Juxtaposition A.Expr where juxtaposition = A.App parseApplication :: Scope m => [Item] -> m A.Expr parseApplication is = case O.parseApplication is of Left err -> parseError err Right e -> return e -- * unparsing {- How to print an expression We distinguish 3 kinds of abstract names - global names (Con,Def) - user-generated local names - system-generated local names (e.g., from quote) We can print an expression from left-to-right, bottom-up as follows: a) never shadow a global name by a local name - state: * used concrete names, initially the set of all global names * map from abstract names to concrete names - when encountering a global id, just look up its name - when encountering a local id, check whether it is already in the map * if yes, print its name * if no, assign it a name and add it - when abstracting a name, check whether it is in the map * if yes, print its name, delete it * if no, we have a void abstraction, then choose an unused version of it b) shadowing of unused global names by local names allowed - first compute the used global names - proceed as above, but start with the set of computed global names In case of b, we do not need to maintain a set of global names after scope checking. We can store name suggestions locally with it each abstract name as in Agda. Note that there will be sharing, so it is not more memory intensive. -} class Print a c where print :: a -> c instance Print A.Declarations C.Declarations where print (A.Declarations adecls) = C.Declarations $ map print adecls instance Print A.Declaration C.Declaration where print adecl = case adecl of A.TypeSig n t -> C.TypeSig (A.suggestion n) $ print t A.Defn n mt e -> C.Defn (A.suggestion n) (fmap print mt) $ print e -- A.GLet n e -> C.GLet (A.suggestion n) $ print e instance Print A.Expr C.Expr where print e = evalState (printExpr e) $ nameSet $ A.globalCNames e -- print e = fst $ printExpr e $ nameSet $ A.globalCNames e printIdent :: A.Ident -> NameM C.Name printIdent id = case id of A.Var n -> askName n -- locals are potentially renamed _ -> return $ A.suggestion $ A.name id -- globals have unique concrete name printExpr :: A.Expr -> NameM C.Expr printExpr e = case e of A.Ident a -> C.Ident <$> printIdent a A.Typ -> return $ C.Typ A.Pi Nothing t1 t2 -> C.Fun <$> printExpr t1 <*> printExpr t2 A.Pi (Just x) t1 t2 -> do t1 <- printExpr t1 t2 <- printExpr t2 x <- bindName x return $ C.Pi x t1 t2 -- C.Pi <$> bindName x <*> printExpr t1 <*> printExpr t2 A.LLet x e e' -> do e <- printExpr e e' <- printExpr e' x <- bindName x return $ C.LLet x e e' A.Lam x mt e -> do mt <- mapM printExpr mt e <- printExpr e x <- bindName x return $ C.Lam x mt e -- C.Lam <$> bindName x <*> mapM printExpr mt <*> printExpr e A.App f e -> C.Apps <$> printApp f [e] printApp :: A.Expr -> [A.Expr] -> NameM [C.Expr] printApp f es = case f of A.App f e -> printApp f (e : es) -- put extra parentheses around lambda if it is in the head: A.Lam{} -> printExpr f >>= \ f -> (C.Apps [f] :) <$> mapM printExpr es _ -> mapM printExpr (f : es) data NameSet = NameSet { usedNames :: Set C.Name , naming :: Map A.UID C.Name } -- | Create a name set from an initial set of used global names. nameSet :: Set C.Name -> NameSet nameSet ns = NameSet ns Map.empty type NameM = State NameSet -- type NameM a = NameSet -> (a, NameSet) -- | Retrieve and delete name. bindName :: A.Name -> NameM C.Name bindName (A.Name x n) = do ns <- get let nam = naming ns case Map.lookup x nam of Just n' -> do put $ NameSet (Set.delete n' (usedNames ns)) (Map.delete x nam) return n' Nothing -> nextVariant n -- | Retrieve or insert name. askName :: A.Name -> NameM C.Name askName n = do nam <- gets naming -- maybe (internalError ["Scoping.askName: not in map", show n]) return $ maybe (nextName n) return $ Map.lookup (A.uid n) nam nextName :: A.Name -> NameM C.Name nextName (A.Name x n) = do n' <- nextVariant n modify $ \ (NameSet ns nam) -> NameSet (Set.insert n' ns) (Map.insert x n' nam) return n' -- | Returns an unused variant of a concrete name. nextVariant :: C.Name -> NameM C.Name nextVariant n = do ns <- gets usedNames let loop i = do let n' = variant n i if Set.member n' ns then loop (i+1) else return n' loop 0 variant :: C.Name -> Int -> C.Name variant n i = case i of 0 -> n 1 -> n ++ "'" 2 -> n ++ "''" 3 -> n ++ "'''" _ -> n ++ "'" ++ show i
andreasabel/helf
src/Scoping.hs
mit
9,200
0
21
2,606
2,630
1,296
1,334
174
7
----------------------------------------------------------------------------- -- | -- Module : ComponentModel.Types -- Copyright : (c) Rodrigo Bonifacio 2008, 2009 -- License : LGPL -- -- Maintainer : rba2@cin.ufpe.br -- Stability : provisional -- Portability : portable -- -- A generic (and simple) component model in Haskell for product -- line development. Although simple, it can be used with the purpose -- of representing source code assets as file paths. -- ----------------------------------------------------------------------------- {-# OPTIONS -fglasgow-exts #-} module ComponentModel.Types where import Data.Generics import BasicTypes type Component = String data GeneratedBuildData = GeneratedBuildData { components :: [ComponentMapping], buildEntries :: [Id], preProcessFiles :: [Component] } deriving(Data, Typeable) type ComponentModel = [ComponentMapping] type ComponentMapping = (Id, Component)
hephaestus-pl/hephaestus
willian/hephaestus-integrated/asset-base/component-model/src/ComponentModel/Types.hs
mit
954
0
9
149
104
72
32
12
0
{-# LANGUAGE PatternSynonyms #-} -- For HasCallStack compatibility {-# LANGUAGE ImplicitParams, ConstraintKinds, KindSignatures #-} {-# OPTIONS_GHC -fno-warn-unused-imports #-} module JSDOM.Generated.WebGPUCommandQueue (createCommandBuffer, createCommandBuffer_, createCommandBufferUnsafe, createCommandBufferUnchecked, setLabel, getLabel, WebGPUCommandQueue(..), gTypeWebGPUCommandQueue) where import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, realToFrac, fmap, Show, Read, Eq, Ord, Maybe(..)) import qualified Prelude (error) import Data.Typeable (Typeable) import Data.Traversable (mapM) import Language.Javascript.JSaddle (JSM(..), JSVal(..), JSString, strictEqual, toJSVal, valToStr, valToNumber, valToBool, js, jss, jsf, jsg, function, asyncFunction, new, array, jsUndefined, (!), (!!)) import Data.Int (Int64) import Data.Word (Word, Word64) import JSDOM.Types import Control.Applicative ((<$>)) import Control.Monad (void) import Control.Lens.Operators ((^.)) import JSDOM.EventTargetClosures (EventName, unsafeEventName, unsafeEventNameAsync) import JSDOM.Enums -- | <https://developer.mozilla.org/en-US/docs/Web/API/WebGPUCommandQueue.createCommandBuffer Mozilla WebGPUCommandQueue.createCommandBuffer documentation> createCommandBuffer :: (MonadDOM m) => WebGPUCommandQueue -> m (Maybe WebGPUCommandBuffer) createCommandBuffer self = liftDOM ((self ^. jsf "createCommandBuffer" ()) >>= fromJSVal) -- | <https://developer.mozilla.org/en-US/docs/Web/API/WebGPUCommandQueue.createCommandBuffer Mozilla WebGPUCommandQueue.createCommandBuffer documentation> createCommandBuffer_ :: (MonadDOM m) => WebGPUCommandQueue -> m () createCommandBuffer_ self = liftDOM (void (self ^. jsf "createCommandBuffer" ())) -- | <https://developer.mozilla.org/en-US/docs/Web/API/WebGPUCommandQueue.createCommandBuffer Mozilla WebGPUCommandQueue.createCommandBuffer documentation> createCommandBufferUnsafe :: (MonadDOM m, HasCallStack) => WebGPUCommandQueue -> m WebGPUCommandBuffer createCommandBufferUnsafe self = liftDOM (((self ^. jsf "createCommandBuffer" ()) >>= fromJSVal) >>= maybe (Prelude.error "Nothing to return") return) -- | <https://developer.mozilla.org/en-US/docs/Web/API/WebGPUCommandQueue.createCommandBuffer Mozilla WebGPUCommandQueue.createCommandBuffer documentation> createCommandBufferUnchecked :: (MonadDOM m) => WebGPUCommandQueue -> m WebGPUCommandBuffer createCommandBufferUnchecked self = liftDOM ((self ^. jsf "createCommandBuffer" ()) >>= fromJSValUnchecked) -- | <https://developer.mozilla.org/en-US/docs/Web/API/WebGPUCommandQueue.label Mozilla WebGPUCommandQueue.label documentation> setLabel :: (MonadDOM m, ToJSString val) => WebGPUCommandQueue -> val -> m () setLabel self val = liftDOM (self ^. jss "label" (toJSVal val)) -- | <https://developer.mozilla.org/en-US/docs/Web/API/WebGPUCommandQueue.label Mozilla WebGPUCommandQueue.label documentation> getLabel :: (MonadDOM m, FromJSString result) => WebGPUCommandQueue -> m result getLabel self = liftDOM ((self ^. js "label") >>= fromJSValUnchecked)
ghcjs/jsaddle-dom
src/JSDOM/Generated/WebGPUCommandQueue.hs
mit
3,282
0
13
470
678
397
281
-1
-1
module Models.Record where import Import import Yesod.Form.Bootstrap3 import Helpers.Common recordPagesTotal :: Record -> Int recordPagesTotal r = _recordPageEnd r - _recordPageStart r recordForm :: ReadableId -> UserId -> Maybe Record -> Form Record recordForm readableId userId mrecord = renderBootstrap3 BootstrapBasicForm $ Record <$> pure readableId <*> areq intField (bs "Start page") (_recordPageStart <$> mrecord) <*> areq intField (bs "End page") (_recordPageEnd <$> mrecord) <*> pure userId <*> lift createdAt <* submitButton "Submit" where createdAt = case mrecord of Nothing -> (liftIO getCurrentTime) Just record -> return $ _recordCreatedAt record
darthdeus/reedink
Models/Record.hs
mit
744
0
12
171
204
101
103
18
2
{-# htermination (map :: (a -> b) -> (List a) -> (List b)) #-} import qualified Prelude data MyBool = MyTrue | MyFalse data List a = Cons a (List a) | Nil map :: (b -> a) -> (List b) -> (List a); map f Nil = Nil; map f (Cons x xs) = Cons (f x) (map f xs);
ComputationWithBoundedResources/ara-inference
doc/tpdb_trs/Haskell/basic_haskell/map_1.hs
mit
273
0
8
78
119
65
54
6
1
module Solidran.ListSpec (spec) where import Test.Hspec import Solidran.List import Data.Map (Map) import qualified Data.Map as Map spec :: Spec spec = do describe "Solidran.Dna.List" $ do describe "splitBy" $ do it "should do nothing on an empty list" $ do splitBy (==',') "" `shouldBe` [] it "should split correctly" $ do splitBy (==',') "a,bc,erg,s,wer,ss" `shouldBe` ["a", "bc", "erg", "s", "wer", "ss"] it "should split also list of numbers" $ do splitBy (==0) [1, 0, 1, 1, 3, 0, 4, 2] `shouldBe` [[1], [1, 1, 3], [4, 2]] describe "countDistinct" $ do it "should return an empty map on empty string" $ do countDistinct "" `shouldBe` Map.empty it "should count all letters" $ do countDistinct "ajuu92333" `shouldBe` Map.fromList [ ('a', 1) , ('j', 1) , ('u', 2) , ('9', 1) , ('2', 1) , ('3', 3) ] describe "countIf" $ do it "should return 0 on empty string" $ do countIf (=='A') "" `shouldBe` 0 it "should count the number of elements that satisfy the predicate" $ do countIf (=='C') "AYCGRIONCXCC" `shouldBe` 4 describe "groupEvery" $ do it "should return an empty list on 0" $ do groupEvery 0 "test" `shouldBe` [] groupEvery 0 [1, 2] `shouldBe` [] it "should return an empty list on an empty list" $ do groupEvery 123 "" `shouldBe` [] --groupEvery 3 [] `shouldBe` [] it "should group by n elements" $ do groupEvery 4 "1234567890ab" `shouldBe` ["1234", "5678", "90ab"] groupEvery 1 "abcd" `shouldBe` ["a", "b", "c", "d"] groupEvery 2 [4, 2, 6, 2] `shouldBe` [[4, 2], [6, 2]] it "should return a partial last group in some cases" $ do groupEvery 3 "1234lkjgsy" `shouldBe` ["123", "4lk", "jgs", "y"] groupEvery 2 "1" `shouldBe` ["1"] groupEvery 9 [9, 1, 45] `shouldBe` [[9, 1, 45]]
Jefffrey/Solidran
test/Solidran/ListSpec.hs
mit
2,445
0
19
1,047
681
372
309
50
1
euler1:: Int euler1 = sum[ x | x <-[1..999], mod x 3 == 0 || mod x 5 == 0] main = print euler1
neutronest/eulerproject-douby
e1/e1.hs
mit
95
0
11
24
64
32
32
3
1
module Test_Ch_09 (ch_09Suite_Props,ch_09Suite_Units) where import Test.Tasty import Test.Tasty.SmallCheck as SC --import Test.Tasty.QuickCheck as SC import Test.Tasty.HUnit import Ch_09 (boundedMin) ch_09Suite_Props = testGroup "CH 9 Properties" [] ch_09Suite_Units = testGroup "Ch 9 Units" [boundedMinUnits] -- boundedMinProperties = testGroup "Golden section search properties" -- [ -- SC.testProperty "f(x) = x^2+x has a min at -1/2" -- ] err :: Double err = 2**(-6) boundedMinUnits = testGroup "Golden section search unit tests" [ testCase "f(x) = x^2+x has a min at -1/2 between -1 and 1 <err" $ boundedMin (\x -> x*x+x) (-1) 1 err `compare` ((-1)/2 + err) @?= LT, testCase "f(x) = x^2+x has a min at -1/2 between -1 and 1 >err" $ boundedMin (\x -> x*x+x) (-1) 1 err `compare` ((-1)/2 - err) @?= GT, testCase "f(x) = x^2+x has a min at -1/2 between -10 and 10 <err" $ boundedMin (\x -> x*x+x) (-10) 10 err `compare` ((-1)/2 + err) @?= LT, testCase "f(x) = x^2+x has a min at -1/2 between -10 and 10 >err" $ boundedMin (\x -> x*x+x) (-10) 10 err `compare` ((-1)/2 - err) @?= GT, testCase "f(x) = x^2+x has a min at -1/2 between -10 and 0 <err" $ boundedMin (\x -> x*x+x) (-10) 0 err `compare` ((-1)/2 + err) @?= LT, testCase "f(x) = x^2+x has a min at -1/2 between -10 and 0 >err" $ boundedMin (\x -> x*x+x) (-10) 0 err `compare` ((-1)/2 - err) @?= GT, testCase "f(x) = x^2+x has a min at 0 between 0 and 10 <err" $ boundedMin (\x -> x*x+x) 0 10 err `compare` (0 + err) @?= LT, testCase "f(x) = x^2+x has a min at 0 between 0 and 10 >err" $ boundedMin (\x -> x*x+x) 0 10 err `compare` (0 - err) @?= GT ]
Marcus-Rosti/numerical-methods
test/Test_Ch_09.hs
gpl-2.0
1,707
0
14
393
598
332
266
27
1
{- Copyright (C) 2005 John Goerzen <jgoerzen@complete.org> This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -} module Utils where import Control.Monad import System.IO counter :: (Int -> IO ()) -> Int -> [a] -> IO [a] counter dispf interval inplist = zipWithM writeit inplist [0..] where writeit item count = do if count `mod` interval == 0 then do dispf count hFlush stdout else return () return item mb :: Num a => a mb = 1048576
jgoerzen/media-index
Utils.hs
gpl-2.0
1,172
0
12
300
148
76
72
14
2
-- | -- Copyright : (c) 2010-2012 Benedikt Schmidt -- License : GPL v3 (see LICENSE) -- -- Maintainer : Benedikt Schmidt <beschmi@gmail.com> -- -- Convenience abbreviations, mostly used for testing and debugging. module Term.Builtin.Convenience where import Term.Term import Term.LTerm import Term.Builtin.Signature ---------------------------------------------------------------------- -- Shorter syntax for Term constructors ---------------------------------------------------------------------- (*:) :: Ord a => Term a -> Term a -> Term a b *: e = fAppAC Mult [b,e] (#) :: Ord a => Term a -> Term a -> Term a b # e = fAppAC Union [b,e] (+:) :: Ord a => Term a -> Term a -> Term a b +: e = fAppAC Xor [b,e] adec, aenc, sdec, senc, sign, revealSign, rep, check_rep:: (Term a,Term a) -> Term a adec (a,b) = fAppNoEq adecSym [a,b] aenc (a,b) = fAppNoEq aencSym [a,b] sdec (a,b) = fAppNoEq sdecSym [a,b] senc (a,b) = fAppNoEq sencSym [a,b] sign (a,b) = fAppNoEq signSym [a,b] revealSign (a,b) = fAppNoEq revealSignSym [a,b] rep (a,b) = fAppNoEq repSym [a,b] check_rep (a,b) = fAppNoEq checkRepSym [a,b] verify, revealVerify :: (Term a,Term a,Term a) -> Term a verify (a,b,c) = fAppNoEq verifySym [a,b,c] revealVerify (a,b,c) = fAppNoEq revealVerifySym [a,b,c] pk, extractMessage, get_rep:: Term a -> Term a pk a = fAppNoEq pkSym [a] extractMessage a = fAppNoEq extractMessageSym [a] get_rep a = fAppNoEq getRepSym [a] trueC :: Term a trueC = fAppNoEq trueSym [] var :: String -> Integer -> LNTerm var s i = varTerm $ LVar s LSortMsg i ---------------------------------------------------------------------- -- Predefined variables and names ---------------------------------------------------------------------- x0,x1,x2,x3,x4,x5,x6,x7,x8,x9,x10 :: LNTerm x0 = var "x" 0 x1 = var "x" 1 x2 = var "x" 2 x3 = var "x" 3 x4 = var "x" 4 x5 = var "x" 5 x6 = var "x" 6 x7 = var "x" 7 x8 = var "x" 8 x9 = var "x" 9 x10 = var "x" 10 y0,y1,y2,y3,y4,y5,y6,y7,y8,y9 :: LNTerm y0 = var "y" 0 y1 = var "y" 1 y2 = var "y" 2 y3 = var "y" 3 y4 = var "y" 4 y5 = var "y" 5 y6 = var "y" 6 y7 = var "y" 7 y8 = var "y" 8 y9 = var "y" 9 freshVar :: String -> Integer -> LNTerm freshVar s i = varTerm $ LVar s LSortFresh i fx0,fx1,fx2,fx3,fx4,fx5,fx6,fx7,fx8,fx9,fx10 :: LNTerm fx0 = freshVar "fx" 0 fx1 = freshVar "fx" 1 fx2 = freshVar "fx" 2 fx3 = freshVar "fx" 3 fx4 = freshVar "fx" 4 fx5 = freshVar "fx" 5 fx6 = freshVar "fx" 6 fx7 = freshVar "fx" 7 fx8 = freshVar "fx" 8 fx9 = freshVar "fx" 9 fx10 = freshVar "fx" 10 pubVar :: String -> Integer -> LNTerm pubVar s i = varTerm $ LVar s LSortPub i px0,px1,px2,px3,px4,px5,px6,px7,px8,px9,px10 :: LNTerm px0 = pubVar "px" 0 px1 = pubVar "px" 1 px2 = pubVar "px" 2 px3 = pubVar "px" 3 px4 = pubVar "px" 4 px5 = pubVar "px" 5 px6 = pubVar "px" 6 px7 = pubVar "px" 7 px8 = pubVar "px" 8 px9 = pubVar "px" 9 px10 = pubVar "px" 10 lx1,lx2,lx3,lx4,lx5,lx6,lx7,lx8,lx9,lx10 :: LVar lx1 = LVar "x" LSortMsg 1 lx2 = LVar "x" LSortMsg 2 lx3 = LVar "x" LSortMsg 3 lx4 = LVar "x" LSortMsg 4 lx5 = LVar "x" LSortMsg 5 lx6 = LVar "x" LSortMsg 6 lx7 = LVar "x" LSortMsg 7 lx8 = LVar "x" LSortMsg 8 lx9 = LVar "x" LSortMsg 9 lx10 = LVar "x" LSortMsg 10 f1,f2,f3,f4,f5,f6,f7,f8,f9 :: LNTerm f1 = freshTerm "f1" f2 = freshTerm "f2" f3 = freshTerm "f3" f4 = freshTerm "f4" f5 = freshTerm "f5" f6 = freshTerm "f6" f7 = freshTerm "f7" f8 = freshTerm "f8" f9 = freshTerm "f9" p1,p2,p3,p4,p5,p6,p7,p8,p9 :: LNTerm p1 = pubTerm "p1" p2 = pubTerm "p2" p3 = pubTerm "p3" p4 = pubTerm "p4" p5 = pubTerm "p5" p6 = pubTerm "p6" p7 = pubTerm "p7" p8 = pubTerm "p8" p9 = pubTerm "p9" lv1,lv2,lv3,lv4,lv5,lv6,lv7,lv8,lv9 :: LVar lv1 = LVar "v1" LSortMsg 0 lv2 = LVar "v2" LSortMsg 0 lv3 = LVar "v3" LSortMsg 0 lv4 = LVar "v4" LSortMsg 0 lv5 = LVar "v5" LSortMsg 0 lv6 = LVar "v6" LSortMsg 0 lv7 = LVar "v7" LSortMsg 0 lv8 = LVar "v8" LSortMsg 0 lv9 = LVar "v9" LSortMsg 0 v1,v2,v3,v4,v5,v6,v7,v8,v9 :: LNTerm v1 = lit $ Var $ lv1 v2 = lit $ Var $ lv2 v3 = lit $ Var $ lv3 v4 = lit $ Var $ lv4 v5 = lit $ Var $ lv5 v6 = lit $ Var $ lv6 v7 = lit $ Var $ lv7 v8 = lit $ Var $ lv8 v9 = lit $ Var $ lv9 li1,li2,li3,li4,li5,li6,li7,li8,li9 :: LVar li1 = LVar "i1" LSortNode 0 li2 = LVar "i2" LSortNode 0 li3 = LVar "i3" LSortNode 0 li4 = LVar "i4" LSortNode 0 li5 = LVar "i5" LSortNode 0 li6 = LVar "i6" LSortNode 0 li7 = LVar "i7" LSortNode 0 li8 = LVar "i8" LSortNode 0 li9 = LVar "i9" LSortNode 0 i1,i2,i3,i4,i5,i6,i7,i8,i9 :: LNTerm i1 = lit $ Var $ li1 i2 = lit $ Var $ li2 i3 = lit $ Var $ li3 i4 = lit $ Var $ li4 i5 = lit $ Var $ li5 i6 = lit $ Var $ li6 i7 = lit $ Var $ li7 i8 = lit $ Var $ li8 i9 = lit $ Var $ li9
tamarin-prover/tamarin-prover
lib/term/src/Term/Builtin/Convenience.hs
gpl-3.0
4,724
0
8
1,013
2,107
1,199
908
152
1
{- This file is part of the Haskell Term Rewriting Library. The Haskell Term Rewriting Library is free software: you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. The Haskell Term Rewriting Library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with the Haskell Term Rewriting Library. If not, see <http://www.gnu.org/licenses/>. -} {-# LANGUAGE TypeSynonymInstances #-} {-# LANGUAGE FlexibleInstances #-} module Termlib.Repl ( -- | This Module provides enlists most common used functions. -- * Data Structures containing terms WithTerms(..) , isSubtermOf -- | 't ``isSubtermOf`` a' checks if 't' occurs in the list of subterms of 'a' , nonVariableSubterms -- | filters variables from the list of subterms -- * Terms , Term.Term(..) -- ** Querying , Term.depth -- | returns the depth of a term. Variables and constants admit depth '0' , Term.size -- | returns the size of a term. Variables and constants admit size '1' , Term.fsize -- | returns the number of function symbols , Term.root -- | returns the root of the term , Term.immediateSubterms -- | returns the list of direct subterms or '[]' if argument is a variable , Term.properSubterms -- | returns the list of subterms, excluding the argument , Term.isSupertermOf -- | converse of 'isSubtermOf' , Term.cardinality -- | returns the number of occurences of the first argument , Subst.isUnifiable -- | returns 'True' iff the arguments are unifiable , Subst.isRenamedUnifiable -- | returns 'True' iff renamings without common variables are unifiable , Subst.matches -- | 's ``matches`` t' returns 'True' iff 't' is an instance of 's' , Subst.subsumes -- | inverse of matches , Subst.encompasses -- | 's ``encomapsses`` t' returns 'True' iff 's' encompasses 't' , Subst.variant -- | two terms are variants if they subsume eatch other , Term.isVariable -- | returns 'True' if the argument is a variable , Rule.Overlap (..) -- * Term Rewriting Rule , Rule.Rule(..) -- ** Predicates -- | See module "Termlib.Rule" for further predicates. Predicates can be lifted -- with @all@ and @any@ from module 'Data.Foldable' to 'Trs.Trs's. , Rule.isNonErasing -- | rule is non-erasing if every occurence of a variable in the left-hand -- side occurs in the right-hand side , Rule.isErasing -- | inverse of 'isNonErasing' , Rule.isNonDuplicating -- | a rule is non-duplicating if no variable appears more often -- in the right-hand side than in the left-hand side , Rule.isDuplicating -- | inverse of 'isNonDuplicating' -- ** Modification , Rule.invert -- | convert a rule 'l -> r' to 'r -> l' , Rule.canonise -- | renames variables to a canonical form. -- Two rules 'r1' and 'r2' are equal modulo variable renaming, -- iff 'canonise r1' and 'canonise r2' are syntactically equal. -- * Term Rewrite System , Trs.Trs -- | A TRS is a list of 'Rule.Rule'. , emptyTrs -- | The empty 'Trs.Trs'.. , Trs.fromRules -- | translates a list of rewrite rules to a 'Trs.Trs'. , Trs.toRules -- | returns the list of rewrite rules in the TRS -- ** Set Operations , Trs.union -- | union operator on the set of rules, removing duplicates , Trs.append -- | like union, but does not remove duplicates , (Trs.\\) -- | difference on the set of rules , Trs.intersect -- | intersection on the set of rules , Trs.member -- | checks if a rule is contained in the TRS, does not perform -- variable conversion , Trs.insert -- | inserts a rule into a TRS, if the rule is not already contained -- ** Querying , Trs.lhss -- | returns the list of left-hand sides , Trs.rhss -- | returns the list of left-hand sides , Trs.definedSymbols -- | returns roots of right-hand sides , Trs.constructors -- | returns all symbols which are not roots of right-hand sides , Trs.overlaps -- | returns the 'Rule.Overlap' of a term rewrite system -- ** Predicates -- | Many predicates are defined in "Termlib.Rule". These can be lifted to -- 'Trs.Trs' by 'Fold.all' and 'Fold.any' , Trs.isEmpty -- | checks if the list of rules is empty , Trs.wellFormed -- | checks that no left-hand side is a variable -- and all variables of a right-hand side are included -- in the corresponding left-hand side , Trs.isOverlapping -- | returns 'True' iff it contains two overlapping rules , Trs.isOverlay -- | returns 'True' iff all overlaps are only root overlaps , Trs.isOrthogonal -- | returns 'True' iff the given TRS is orthogonal , Trs.isNestedRecursive -- | returns 'True' iff there exists a rule 'f(..) -> C[f(..C[f(..)]..)]'. -- ** Modification , Trs.filterRules -- | removes rules from the TRS matching the predicate , Trs.mapRules -- | map the given function over the rules -- * Complexity Problem , Prob.Problem(..) , Prob.StartTerms (..) , Prob.Ruleset (..) , Prob.emptyRuleset , Prob.Strategy (..) -- ** Querying , Prob.weakComponents -- | returns weak dependency pairs and weak rewrite rules , Prob.strictComponents -- | returns strict dependency pairs and strict rewrite rules , Prob.dpComponents -- | returns all dependency pairs , Prob.trsComponents -- | returns all dependency rules which are not dependency pairs , Prob.allComponents -- | returns all dependency pairs and rules -- ** Predicates , Prob.isRCProblem -- | returns 'True' iff the set of start-terms is basic , Prob.isDCProblem -- | converse of 'isRCProblem' , Prob.isDPProblem -- | returns 'True' iff the set of start-terms is basic, and all -- defined symbols are marked in the set of basic terms -- ** Modification , Prob.withFreshCompounds -- | replaces all compound symbols by fresh compound symbols, -- and removes unary compound symbols -- * Parsing Utilities , parseFromString ) where import qualified Termlib.Term.Parser as TParser import qualified Termlib.Problem as Prob import qualified Termlib.Trs as Trs import qualified Termlib.Substitution as Subst import qualified Termlib.FunctionSymbol as F import qualified Termlib.Variable as V import qualified Termlib.Term as Term import qualified Termlib.Rule as Rule import qualified Data.Foldable as Fold import Data.Set (Set) import qualified Data.Set as Set import Termlib.Utils (PrettyPrintable(..)) class WithTerms a where -- | extracts the set of variables vars :: a -> Set V.Variable -- | extracts the set of function symbols symbols :: a -> Set F.Symbol -- | returns the list of subterms subterms :: a -> Set Term.Term isLinear :: a -> Bool isGround :: a -> Bool isFlat :: a -> Bool isShallow :: a -> Bool nonVariableSubterms :: WithTerms a => a -> Set Term.Term nonVariableSubterms = Set.filter (not . Term.isVariable) . subterms isSubtermOf :: WithTerms a => Term.Term -> a -> Bool isSubtermOf t a = t `Set.member` subterms a instance WithTerms Term.Term where vars = Term.variables symbols = Term.functionSymbols subterms = Set.fromList . Term.subterms isLinear = Term.isLinear isGround = Term.isGround isFlat = Term.isFlat isShallow = Term.isShallow instance WithTerms Rule.Rule where vars = Rule.variables symbols = Rule.functionSymbols subterms r = subterms (Rule.lhs r) `Set.union` subterms (Rule.rhs r) isLinear = Rule.isLinear isGround = Rule.isGround isFlat = Rule.isFlat isShallow = Rule.isShallow instance WithTerms Trs.Trs where vars = Trs.variables symbols = Trs.functionSymbols subterms r = Fold.foldl (\ ss rl -> subterms rl `Set.union` ss) Set.empty r isLinear = Fold.all isLinear isGround = Fold.all isGround isFlat = Fold.all isFlat isShallow = Fold.all isShallow emptyTrs :: Trs.Trs emptyTrs = Trs.empty parseFromString :: TParser.TermParser a -> String -> Prob.Problem -> (a,Prob.Problem) parseFromString parser str prob = case TParser.parseFromString (Prob.signature prob) (Prob.variables prob) parser str of Right ((t,fs,vs),_) -> (t, prob { Prob.variables = vs, Prob.signature = fs }) Left e -> error $ "Failed Parsing:\n" ++ show (pprint e)
mzini/termlib
Termlib/Repl.hs
gpl-3.0
10,151
0
11
3,343
1,149
702
447
124
2
module Main where import System.Environment(getArgs) processLine :: String -> Maybe String processLine line = let (ms:list) = reverse $ words line m = read ms - 1 in if m < length list then Just (list !! m) else Nothing maybeIO :: (a -> IO ()) -> Maybe a -> IO () maybeIO _ Nothing = return () maybeIO f (Just x) = f x main :: IO () main = do [inputFile] <- getArgs input <- readFile inputFile mapM_ (maybeIO putStrLn) $ map processLine $ lines input
cryptica/CodeEval
Challenges/10_MthToLastElement/main.hs
gpl-3.0
486
0
11
123
225
110
115
15
2
-- Haskell Music Player, client for the MPD (Music Player Daemon) -- Copyright (C) 2011 Ivan Vitjuk <v@iv.id.au> -- This program is free software: you can redistribute it and/or modify -- it under the terms of the GNU General Public License as published by -- the Free Software Foundation, either version 3 of the License, or -- (at your option) any later version. -- This program is distributed in the hope that it will be useful, -- but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -- GNU General Public License for more details. -- You should have received a copy of the GNU General Public License -- along with this program. If not, see <http://www.gnu.org/licenses/>. import qualified Network.MPD as MPD import qualified Network.MPD.Core as MPDC import Data.IORef import Control.Monad.Trans (liftIO) import Data.Maybe (fromMaybe) import Data.Either (rights) import Graphics.UI.Gtk import Graphics.UI.Gtk.Glade import qualified Playlist as PL import qualified Progressbar as PB import qualified StatusUpdate as SU import qualified CurrentSong as CS import qualified GuiData as GD import qualified Util as U import qualified HMP as HMP execMpdCommand gdref cmd = do gd <- readIORef gdref MPDC.withMPDPersistent (GD.mpd gd) cmd setupPlayButton' btn MPD.Paused = buttonSetLabel btn "Play" setupPlayButton' btn MPD.Stopped = buttonSetLabel btn "Play" setupPlayButton' btn MPD.Playing = buttonSetLabel btn "Pause" setupPlayButton :: GD.GuiDataRef -> IO () setupPlayButton gdref = do gd <- readIORef gdref setupPlayButton' (GD.playButton gd) (MPD.stState (GD.currentStatus gd)) processPlayButton' gdref MPD.Paused = execMpdCommand gdref (MPD.pause False) processPlayButton' gdref MPD.Stopped = execMpdCommand gdref (MPD.play Nothing) processPlayButton' gdref MPD.Playing = execMpdCommand gdref (MPD.pause True) processPlayButton :: GD.GuiDataRef -> Button -> IO () processPlayButton gdref btn = do gd <- readIORef gdref processPlayButton' gdref (MPD.stState (GD.currentStatus gd)) return () setupUi :: GD.GuiDataRef -> GladeXML -> HMP.HMP () setupUi gdref xml = do gd <- liftIO $ readIORef gdref -- main window window <- liftIO $ xmlGetWidget xml castToWindow "mainWindow" liftIO $ onDestroy window mainQuit -- buttons prevButton <- liftIO $ xmlGetWidget xml castToButton "prevButton" liftIO $ onClicked prevButton $ do execMpdCommand gdref MPD.previous return () nextButton <- liftIO $ xmlGetWidget xml castToButton "nextButton" liftIO $ onClicked nextButton $ do execMpdCommand gdref MPD.next return () playButton <- liftIO $ xmlGetWidget xml castToButton "playButton" liftIO $ onClicked playButton $ do processPlayButton gdref playButton stopButton <- liftIO $ xmlGetWidget xml castToButton "stopButton" liftIO $ onClicked stopButton $ do execMpdCommand gdref MPD.stop return () liftIO $ PB.setup gdref liftIO $ PL.setup gdref xml liftIO $ SU.setup gdref xml setupPlayButton -- show liftIO $ widgetShowAll window return () makeGuiRef :: GladeXML -> HMP.HMP GD.GuiDataRef makeGuiRef xml = do mpd <- HMP.getMpd playButton <- liftIO $ xmlGetWidget xml castToButton "playButton" plist <- liftIO $ PL.init xml pbar <- liftIO $ PB.init xml csong <- liftIO $ CS.init xml gdref <- liftIO $ newIORef $ GD.GData mpd playButton MPD.defaultStatus MPD.defaultStatus pbar plist csong return gdref gui :: HMP.HMP () gui = do liftIO initGUI Just xml <- liftIO $ xmlNew "HaskellMusicPlayer.glade" gdref <- makeGuiRef xml setupUi gdref xml liftIO mainGUI return () main :: IO () main = do HMP.execHMP "localhost" 6600 "" gui
ivitjuk/Haskell-Music-Player
src/Main.hs
gpl-3.0
3,908
0
12
839
994
483
511
82
1
module Lamdu.CharClassification ( operatorChars, alphaNumericChars ) where operatorChars :: [Char] operatorChars = "\\+-*/^=><&|%$:." alphaNumericChars :: [Char] alphaNumericChars = ['a'..'z'] ++ ['0'..'9']
Mathnerd314/lamdu
src/Lamdu/CharClassification.hs
gpl-3.0
213
0
6
27
54
33
21
6
1
{-# LANGUAGE MultiWayIf #-} {-# LANGUAGE PatternSynonyms #-} module GraphAlgorithms( ParentType(..), annotateGraph, collapseAnnotatedGraph ) where import qualified Data.Graph.Inductive as ING import Data.List(foldl', find) import qualified Data.Set as Set import Data.Tuple(swap) import GHC.Stack(HasCallStack) import Constants(pattern ResultPortConst, pattern InputPortConst) import Types(SyntaxNode(..), IngSyntaxGraph, Edge(..), CaseOrMultiIfTag(..), Port(..), NameAndPort(..), SgNamedNode , AnnotatedGraph, EmbedInfo(..), EmbedDirection(..), NodeInfo(..) , Embedder(..), Named(..), EmbedderSyntaxNode, NodeName) import Util(fromMaybeError) {-# ANN module "HLint: ignore Use record patterns" #-} data ParentType = ApplyParent | CaseParent | MultiIfParent | LambdaParent | NotAParent deriving (Eq, Show) -- Helper functions parentAndChild :: EmbedDirection -> (a, a) -- ^ (from, to) -> (a, a) -- ^ (parent, child) parentAndChild embedDirection = case embedDirection of EdEmbedTo -> id EdEmbedFrom -> swap -- End helper functions -- START annotateGraph -- -- TODO Use pattern synonyms here -- | A syntaxNodeIsEmbeddable if it can be collapsed into another node syntaxNodeIsEmbeddable :: ParentType -> SyntaxNode -> Maybe Port -> Maybe Port -> Bool syntaxNodeIsEmbeddable parentType syntaxNode mParentPort mChildPort = case (parentType, syntaxNode) of (ApplyParent, ApplyNode _ _) -> parentPortNotResult (ApplyParent, LiteralNode _) -> parentPortNotResult (ApplyParent, FunctionDefNode _ _) -> parentPortNotResult && isResult mChildPort -- The match below works, but can make messy drawings with the current -- icon for lambdas. -- (LambdaParent, ApplyNode _ _ _) -> parentPortIsInput (LambdaParent, LiteralNode _) -> parentPortIsInput (LambdaParent, FunctionDefNode _ _) -> parentPortIsInput && isResult mChildPort (CaseParent, LiteralNode _) -> parentPortNotResult (CaseParent, ApplyNode _ _) -> parentPortNotResult && parentPortNotInput (CaseParent, PatternApplyNode _ _) -> parentPortNotResult && parentPortNotInput (MultiIfParent, LiteralNode _) -> parentPortNotResult (MultiIfParent, ApplyNode _ _) -> parentPortNotResult && parentPortNotInput _ -> False where isInput mPort = case mPort of Just InputPortConst -> True _ -> False isResult mPort = case mPort of Nothing -> True Just ResultPortConst -> True Just _ -> False parentPortIsInput = isInput mParentPort parentPortNotInput = not $ isInput mParentPort parentPortNotResult = not $ isResult mParentPort parentTypeForNode :: SyntaxNode -> ParentType parentTypeForNode n = case n of (ApplyNode _ _) -> ApplyParent CaseOrMultiIfNode CaseTag _ -> CaseParent CaseOrMultiIfNode MultiIfTag _ -> MultiIfParent (FunctionDefNode _ _) -> LambdaParent _ -> NotAParent lookupSyntaxNode :: ING.Graph gr => IngSyntaxGraph gr -> ING.Node -> Maybe EmbedderSyntaxNode lookupSyntaxNode gr node = naVal <$> ING.lab gr node lookupParentType :: ING.Graph gr => IngSyntaxGraph gr -> ING.Node -> ParentType lookupParentType graph node = maybe NotAParent parentTypeForNode $ emNode <$> lookupSyntaxNode graph node {-# ANN edgeIsSingular "HLint: ignore Redundant bracket" #-} edgeIsSingular :: ING.Graph gr => gr a Edge -> ING.Node -> Edge -> Bool edgeIsSingular graph node edge = numEdges <= 1 where (childNamePort, _) = edgeConnection edge edgeLabels = filter (childNamePort ==) ((fst . edgeConnection . snd) <$> ING.lsuc graph node) numEdges = length edgeLabels parentCanEmbedChild :: ING.Graph gr => IngSyntaxGraph gr -> ING.Node -> ING.Node -> Edge -> EmbedDirection -> Bool parentCanEmbedChild graph parent child edge embedDirection = case lookupSyntaxNode graph child of Nothing -> False Just childSyntaxNode -> edgeIsSingular graph child edge && syntaxNodeIsEmbeddable parentType (emNode childSyntaxNode) parentPort childPort where parentType = lookupParentType graph parent (NameAndPort _ fromPort, NameAndPort _ toPort) = edgeConnection edge (parentPort, childPort) = parentAndChild embedDirection (fromPort, toPort) findEmbedDir :: ING.Graph gr => IngSyntaxGraph gr -> ING.Node -> ING.Node -> Edge -> Maybe EmbedDirection findEmbedDir gr fromNode toNode e = if | parentCanEmbedChild gr fromNode toNode e EdEmbedTo -> Just EdEmbedTo | parentCanEmbedChild gr toNode fromNode e EdEmbedFrom -> Just EdEmbedFrom | otherwise -> Nothing annotateGraph :: ING.DynGraph gr => IngSyntaxGraph gr -> gr SgNamedNode (EmbedInfo Edge) annotateGraph gr = ING.gmap edgeMapper gr where edgeMapper :: ING.Context SgNamedNode Edge -> ING.Context SgNamedNode (EmbedInfo Edge) edgeMapper (inEdges, node, nodeLabel, outEdges) = (getInEmbedInfo node inEdges , node , nodeLabel , getOutEmbedInfo node outEdges) getInEmbedInfo toNode = fmap (\(e, fromNode) -> (EmbedInfo (findEmbedDir gr fromNode toNode e) e, fromNode)) getOutEmbedInfo fromNode = fmap (\(e, toNode) -> (EmbedInfo (findEmbedDir gr fromNode toNode e) e, toNode)) -- END annotateGraph -- -- START collapseAnnotatedGraph -- findEdgeLabel :: ING.Graph gr => gr a b -> ING.Node -> ING.Node -> Maybe b findEdgeLabel graph node1 node2 = fmap fst matchingEdges where labelledEdges = ING.lneighbors graph node1 matchingEdges = find ((== node2) . snd) labelledEdges -- | Replace the a node's label changeNodeLabel :: ING.DynGraph gr => ING.Node -> a -> gr a b -> gr a b changeNodeLabel node newLabel graph = case ING.match node graph of (Just (inEdges, _, _, outEdges), restOfTheGraph) -> (inEdges, node, newLabel, outEdges) ING.& restOfTheGraph (Nothing, _) -> graph addChildToNodeLabel :: (NodeName, Edge) -> EmbedderSyntaxNode -> EmbedderSyntaxNode addChildToNodeLabel child (Embedder existingNodes oldSyntaxNode) = Embedder (Set.insert child existingNodes) oldSyntaxNode -- | Change the node label of the parent to be nested. embedChildSyntaxNode :: ING.DynGraph gr => ING.Node -> ING.Node -> AnnotatedGraph gr -> AnnotatedGraph gr embedChildSyntaxNode parentNode childNode oldGraph = newGraph where mChildAndEdge = (,) <$> ING.lab oldGraph childNode <*> findEdgeLabel oldGraph parentNode childNode newGraph = case ING.lab oldGraph parentNode of Nothing -> error "embedChildSyntaxNode: parentNode not found" Just (NodeInfo isChild oldNodeLabel) -> -- TODO Refactor with the Maybe Monad? case mChildAndEdge of Nothing -> error "embedChildSyntaxNode: childNode not found." Just (NodeInfo _ childNodeLab, EmbedInfo _ edge) -> changeNodeLabel childNode (NodeInfo (Just parentNode) childNodeLab) $ changeNodeLabel parentNode newNodeLabel oldGraph where Named nodeName oldSyntaxNode = oldNodeLabel newSyntaxNode = addChildToNodeLabel (naName childNodeLab, edge) oldSyntaxNode newNodeLabel = NodeInfo isChild (Named nodeName newSyntaxNode) collapseEdge :: (HasCallStack, ING.DynGraph gr) => AnnotatedGraph gr -> ING.LEdge (EmbedInfo Edge) -> AnnotatedGraph gr collapseEdge oldGraph lEdge@(fromNode, toNode, EmbedInfo mEmbedDir _) = case mEmbedDir of Nothing -> oldGraph Just embedDir -> ING.delLEdge lEdge childEmbeddedGraph where (parentNode, childNode) = parentAndChild embedDir (fromNode, toNode) childEmbeddedGraph = embedChildSyntaxNode parentNode childNode oldGraph mapEdges :: (ING.Graph gr1, ING.Graph gr2) => (ING.LEdge b1 -> ING.LEdge b2) -> gr1 a b1 -> gr2 a b2 mapEdges f gr = ING.mkGraph nodes mappedEdges where nodes = ING.labNodes gr mappedEdges = f <$> ING.labEdges gr findRootAncestor :: ING.Graph gr => gr (NodeInfo a) b -> ING.Node -> ING.Node findRootAncestor gr node = let nodeLab = fromMaybeError "findRootAncestor: node does not exist" (ING.lab gr node) in case niParent nodeLab of Nothing -> node Just parentNode -> findRootAncestor gr parentNode -- Note: modifying the edges could probably be eliminated if the algorithms in -- Rendering were re-written to us the node's parent. -- | For all of the graph edges, this function moves edge to from and to nodes -- of the edge to be root (the parents's parent parent etc.) of the edge's -- from and to nodes. moveEdges :: (ING.Graph gr1, ING.Graph gr2) => gr1 (NodeInfo a) b -> gr2 (NodeInfo a) b moveEdges gr = mapEdges moveEdge gr where moveEdge (fromNode, toNode, label) = (newFrom, newTo, label) where newFrom = findRootAncestor gr fromNode newTo = findRootAncestor gr toNode collapseAnnotatedGraph :: (HasCallStack, ING.DynGraph gr) => gr SgNamedNode (EmbedInfo Edge) -> AnnotatedGraph gr collapseAnnotatedGraph origGraph = moveEdges newGraph where defaultNodeInfoGraph = ING.nmap (NodeInfo Nothing) origGraph -- TODO Check that there are no embedded edges left. newGraph = foldl' collapseEdge defaultNodeInfoGraph (ING.labEdges origGraph)
rgleichman/glance
app/GraphAlgorithms.hs
gpl-3.0
9,788
0
18
2,435
2,455
1,278
1,177
-1
-1
module Utils ( runAcceptLoop, bindPort, readRequestUri ) where import Control.Monad ( forever ) import Control.Concurrent ( forkIO ) import Control.Exception ( bracketOnError, finally ) import Network.Socket ( Socket, socket, sClose , Family(..), SocketType(..) , AddrInfo(..), AddrInfoFlag(..), getAddrInfo , defaultHints, defaultProtocol, maxListenQueue , accept, bindSocket, listen, recv , SocketOption(..), setSocketOption ) -- | Accept connections on the given socket and spawn a new handler -- for each. Close the socket even if the handler throws an -- exception. runAcceptLoop :: Socket -> (Socket -> IO ()) -> IO () runAcceptLoop lsocket handler = forever $ do (sock, _addr) <- accept lsocket _ <- forkIO $ finally (handler sock) (sClose sock) return () -- | Create a socket and bind it to the given port. bindPort :: Int -> IO Socket bindPort port = do addrInfos <- getAddrInfo (Just (defaultHints { addrFlags = [AI_PASSIVE] , addrFamily = AF_INET })) Nothing (Just (show port)) let addr = head addrInfos bracketOnError (socket (addrFamily addr) Stream defaultProtocol) sClose (\sock -> do setSocketOption sock ReuseAddr 1 bindSocket sock (addrAddress addr) listen sock maxListenQueue return sock) -- | Read an HTTP request and return its uri. Fails if the request -- isn't a @GET@ method, or if the client is too slow in sending the -- request. Does no sanitisation of the request (i.e. a malicious -- client could request arbitrary files from the filesystem). readRequestUri :: Socket -> IO FilePath readRequestUri sock = do -- FIXME Do a better job of reading the request's first line. requestLine <- recv sock 1024 let ("GET" : uri : _) = words requestLine return ('.' : uri)
scvalex/dissemina2
Utils.hs
gpl-3.0
2,018
0
14
596
456
245
211
36
1
{-#LANGUAGE OverloadedStrings#-} module Math.Matrix.IO where import Control.Monad import Data.Text hiding (map) import Data.Text.IO import Math.Matrix import Prelude hiding (lines, words, putStrLn, getContents, getLine, readFile) -- | Reads from a file -- Returns single Text of contents open :: FilePath -> IO Text open = readFile readCSV :: FilePath -> IO [[Text]] readCSV file = liftM (map (splitOn ",") . lines) (open file) -- | Converts a Matrix to a list of comma seperated lines. toCSV :: Show a => Matrix a -> [Text] toCSV = map (intercalate ",") . toLists . fmap (pack . show) -- | Prints a matrix in CSV format. printCSV :: Show a => Matrix a -> IO () printCSV = mapM_ putStrLn . toCSV
Jiggins/Matrix
Math/Matrix/IO.hs
gpl-3.0
701
0
10
124
223
123
100
15
1
{-# LANGUAGE OverloadedStrings #-} module Controller.Permission ( checkPermissionOld , checkPermission , userCanReadData , authAccount , checkMemberADMIN , checkVerfHeader , guardVerfHeader ) where import Control.Monad (void, unless, liftM2) import Has (Has, view, peek, peeks) import Model.Permission hiding (checkPermission) import Model.Release import Model.Party import Model.Identity import HTTP.Request import Action -- TODO: use Model.checkPermission everywhere instead {-# DEPRECATED checkPermissionOld "Use checkPermission instead" #-} checkPermissionOld :: Has Permission a => Permission -> a -> Handler a checkPermissionOld requiredPermissionLevel objectWithCurrentUserPermLevel = checkPermission view requiredPermissionLevel objectWithCurrentUserPermLevel -- | Determine if the requested permission is granted, or throw an HTTP 403. -- -- This function is probably due for another 3 or 4 rewrites: it's a bit -- abstract, serving mostly as a description for its arguments. -- TODO: Maybe replace with requestAccess checkPermission :: (a -> Permission) -- ^ How to extract the granted permission for current user -> Permission -- ^ Requested permission permission -> a -- ^ Object under scrutiny -> Handler a -- ^ Just returns the 3rd arg, unless it short-circuits with a 403. checkPermission getCurrentUserPermLevel requestingAccessAtPermLevel obj = do unless (getCurrentUserPermLevel obj >= requestingAccessAtPermLevel) $ do resp <- peeks forbiddenResponse result resp return obj userCanReadData :: (a -> EffectiveRelease) -> (a -> VolumeRolePolicy) -> a -> Handler a userCanReadData getObjEffectiveRelease getCurrentUserPermLevel obj = do unless (canReadData2 getObjEffectiveRelease getCurrentUserPermLevel obj) $ do resp <- peeks forbiddenResponse result resp return obj -- | -- Pulls the Account out of the Handler context authAccount :: Handler Account authAccount = do ident <- peek case ident of NotLoggedIn -> result =<< peeks forbiddenResponse IdentityNotNeeded -> result =<< peeks forbiddenResponse Identified s -> return $ view s ReIdentified u -> return $ view u -- newtype Handler a = Handler { unHandler :: ReaderT RequestContext IO a } -- deriving (Functor, Applicative, Alternative, Monad, MonadPlus, MonadIO, -- MonadBase IO, MonadThrow, MonadReader RequestContext) -- A: Handler satisfies a (MonadHas Access) constraint because... -- 1. it has a MonadReader RequestContext -- 2. RequestContext satisfies (Has Access) -- -- B: (A.2) is true because... -- 1. RequestContext satisfies (Has Identity) by concretely carrying an Identity -- value -- 2. It "inherits" the (Has Access) of its Identity -- -- C: Identity satisfies (Has Access) because... -- 1. It satisfies (Has SiteAuth) by *building* a SiteAuth in different ways -- a. Generate a 'nobody' -- b. Reach into a sub-sub-field, not using the Has mechanism (although it -- should?) -- c. 1 constructor has a concrete SiteAuth field -- 2. It "inherits" the (Has Access) of the SiteAuth -- -- D: SiteAuth satisfies (Has Access) because it has a concrete Access field. -- | (Maybe) tests whether someone is a superadmin? checkMemberADMIN :: Handler () checkMemberADMIN = do a :: Access <- peek let admin = accessMember' a void $ checkPermissionOld PermissionADMIN admin checkVerfHeader :: Handler Bool checkVerfHeader = do header <- peeks $ lookupRequestHeader "x-csverf" peeks $ or . liftM2 (==) header . identityVerf guardVerfHeader :: Handler () guardVerfHeader = do c <- checkVerfHeader unless c $ result =<< peeks forbiddenResponse
databrary/databrary
src/Controller/Permission.hs
agpl-3.0
3,659
0
11
643
566
299
267
-1
-1
-- brittany {lconfig_indentPolicy: IndentPolicyFree } data EnterpriseGrantsForCompanyResponse = EnterpriseGrantsForCompanyResponse Types.Company [EnterpriseGrantResponse]
lspitzner/brittany
data/Test71.hs
agpl-3.0
259
0
7
99
18
10
8
3
0
-- GSoC 2015 - Haskell bindings for OpenCog. {-# LANGUAGE GADTs #-} {-# LANGUAGE ExistentialQuantification #-} {-# LANGUAGE RankNTypes #-} {-# LANGUAGE AutoDeriveTypeable #-} {-# LANGUAGE TypeOperators #-} {-# LANGUAGE DataKinds #-} {-# LANGUAGE ConstraintKinds #-} {-# LANGUAGE KindSignatures #-} {-# LANGUAGE StandaloneDeriving #-} -- | This Module defines the main data types for Haskell bindings. module OpenCog.AtomSpace.Types ( TruthVal (..) , AtomName (..) , Atom (..) , AtomGen (..) , Gen (..) , appGen , getType ) where import OpenCog.AtomSpace.Inheritance (type (<~)) import OpenCog.AtomSpace.AtomType (AtomType(..)) import Data.Typeable (Typeable,typeRep) import Control.Monad (Functor,Monad) -- | Atom name type. type AtomName = String -- | 'TruthVal' represent the different types of TruthValues. data TruthVal = SimpleTV { tvMean :: Double , tvConfidence :: Double } | CountTV { tvMean :: Double , tvCount :: Double , tvConfidence :: Double } | IndefTV { tvMean :: Double , tvL :: Double , tvU :: Double , tvConfLevel :: Double , tvDiff :: Double } | FuzzyTV { tvMean :: Double , tvConfidence :: Double } | ProbTV { tvMean :: Double , tvCount :: Double , tvConfidence :: Double } deriving (Show,Eq) type TVal = Maybe TruthVal -- | 'Gen' groups all the atoms that are children of the atom type a. data Gen a where Gen :: (Typeable a,b <~ a) => Atom b -> Gen a deriving instance Show (Gen a) -- | 'appGen' evaluates a given function with the atom type instance -- wrapped inside the 'Gen' type. appGen :: (forall b. (Typeable a,b <~ a) => Atom b -> c) -> Gen a -> c appGen f (Gen at) = f at -- | 'AtomGen' is a general atom type hiding the type variables. -- (necessary when working with many instances of different atoms, -- for example, for lists of general atoms) type AtomGen = Gen AtomT -- | 'Atom' is the main data type to represent the different types of atoms. -- -- Here we impose type constraints in how atoms relate between them. -- -- The '<~' type operator means that the type on the left "inherits" from the -- type on the right. -- -- DEFINING NEW ATOM TYPES: -- -- If it is a node: -- We add a new data constructor such as: -- NewAtomTypeNode :: AtomName -> TVal -> Atom NewAtomTypeT -- where NewAtomTypeT is a phantom type (automatically generated by temp. hask.). -- -- If it is a link: -- If it is of a fixed arity: -- We impose the type constraints on each of the members of its outgoing set. -- NewAtomTypeLink :: (a <~ t1,b <~ t2,c <~ t3) => -- TVal -> Atom a -> Atom b -> Atom c -> Atom NewAtomTypeT -- -- If it is of unlimited arity: -- We define it as a data constructor that takes a list of atoms as first argument. -- All of the members of its outgoing set will satisfy the same constraints. -- For example suppose NewAtomTypeLink accepts nodes that are concepts: -- NewAtomTypeLink :: TVal -> [Gen ConceptT] -> Atom NewAtomTypeT -- -- Also, you have to modify the module Internal. Adding proper case clauses for -- this new atom type to the functions "toRaw" and "fromRaw". -- data Atom (a :: AtomType) where PredicateNode :: AtomName -> TVal -> Atom PredicateT AndLink :: TVal -> [AtomGen] -> Atom AndT OrLink :: TVal -> [AtomGen] -> Atom OrT ImplicationLink :: (a <~ AtomT,b <~ AtomT) => TVal -> Atom a -> Atom b -> Atom ImplicationT EquivalenceLink :: (a <~ AtomT,b <~ AtomT) => TVal -> Atom a -> Atom b -> Atom EquivalenceT EvaluationLink :: (p <~ PredicateT,l <~ ListT) => TVal -> Atom p -> Atom l -> Atom EvaluationT ConceptNode :: AtomName -> TVal -> Atom ConceptT InheritanceLink :: (c1 <~ ConceptT,c2 <~ ConceptT) => TVal -> Atom c1 -> Atom c2 -> Atom InheritanceT SimilarityLink :: (c1 <~ ConceptT,c2 <~ ConceptT) => TVal -> Atom c1 -> Atom c2 -> Atom SimilarityT MemberLink :: (c1 <~ NodeT,c2 <~ NodeT) => TVal -> Atom c1 -> Atom c2 -> Atom MemberT SatisfyingSetLink :: (p <~ PredicateT) => Atom p -> Atom SatisfyingSetT NumberNode :: Double -> Atom NumberT ListLink :: [AtomGen] -> Atom ListT SetLink :: [AtomGen] -> Atom SetT SchemaNode :: AtomName -> Atom SchemaT GroundedSchemaNode :: AtomName -> Atom GroundedSchemaT ExecutionLink :: (s <~ SchemaT,l <~ ListT,a <~ AtomT) => Atom s -> Atom l -> Atom a -> Atom ExecutionT VariableNode :: AtomName -> Atom VariableT VariableList :: [Gen VariableT] -> Atom VariableT SatisfactionLink :: (v <~ VariableT,l <~ LinkT) => Atom v -> Atom l -> Atom SatisfactionT ForAllLink :: (v <~ ListT,i <~ ImplicationT) => TVal -> Atom v -> Atom i -> Atom ForAllT AverageLink :: (v <~ VariableT,a <~ AtomT) => TVal -> Atom v -> Atom a -> Atom AverageT QuoteLink :: (a <~ AtomT) => Atom a -> Atom a BindLink :: (v <~ VariableT,p <~ AtomT,q <~ AtomT) => Atom v -> Atom p -> Atom q -> Atom BindT deriving instance Show (Atom a) deriving instance Typeable Atom -- TODO: improve this code, defining an instance of Data for Atom GADT -- and use the Data type class interface to get the constructor. getType :: (Typeable a) => Atom a -> String getType = head . words . show
printedheart/atomspace
opencog/haskell/OpenCog/AtomSpace/Types.hs
agpl-3.0
6,314
0
10
2,259
1,226
677
549
-1
-1
-- types and typeclasses removeNonUpperCase st = [c | c <- st, c `elem` ['A'..'Z']] -- now, we can call removeNonUpperCase "AaBb" addThree x y z = x + y + z factorial n = product[1..n] circumference r = 2 * pi * r -- :t head -- head is generic tipe (takes list of any type and returns element of that type) -- typeclasses -- :t (==) lengthPlusVal = fromIntegral(length[1,2,3,4]) + 3.2
pwittchen/learning-haskell
tasks/03_types_and_typeclasses.hs
apache-2.0
392
0
9
80
123
68
55
5
1
-- Knight's quest -- example from chapter 12 of LYAH -- ex: runhaskell --ghc-arg="-package sort" haskell/knights_quest.hs import Control.Monad ((<=<)) import Data.Sort (sort) import Data.List (nub) type KnightPos = (Int,Int) valid :: KnightPos -> Bool valid (c,r) = c `elem` [1..8] && r `elem` [1..8] generateMoves :: KnightPos -> [KnightPos] generateMoves (c,r) = [(c + fst m * a, r + snd m * b) | m <- [(1,2), (2,1)], a <- [1,-1], b <- [1,-1]] moveKnight :: KnightPos -> [KnightPos] moveKnight pos = filter valid $ generateMoves pos in3 :: KnightPos -> [KnightPos] in3 start = return start >>= moveKnight >>= moveKnight >>= moveKnight canReachIn3 :: KnightPos -> KnightPos -> Bool canReachIn3 start end = end `elem` in3 start inMany :: Int -> KnightPos -> [KnightPos] inMany x start = return start >>= foldr (<=<) return (replicate x moveKnight) canReachIn :: Int -> KnightPos -> KnightPos -> Bool canReachIn x start end = end `elem` inMany x start main = do putStrLn "From (4,4) a knight can move to:" print $ moveKnight (4,4) putStrLn "From (1,1) a knight can move to:" print $ moveKnight (1,1) putStrLn "From (1,3) a knight can move to:" print $ moveKnight (1,3) putStrLn "Can reach in 3 moves" putStrLn (show (nub $ sort $ in3 (4,4))) putStrLn $ "Can reach in 1 moves: " ++ (show (length (nub $ sort $ inMany 1 (1,2)))) putStrLn $ "Can reach in 2 moves: " ++ (show (length (nub $ sort $ inMany 2 (1,2)))) putStrLn $ "Can reach in 3 moves: " ++ (show (length (nub $ sort $ inMany 3 (1,2)))) putStrLn $ "Can reach in 4 moves: " ++ (show (length (nub $ sort $ inMany 4 (1,2)))) putStrLn $ "Can reach in 5 moves: " ++ (show (length (nub $ sort $ inMany 5 (1,2))))
cbare/Etudes
haskell/knights_quest.hs
apache-2.0
1,754
0
15
389
733
388
345
35
1
----------------------------------------------------------------------------- -- | -- Module : Parsimony.IO -- Copyright : (c) Iavor S. Diatchki 2009 -- License : BSD3 -- -- Maintainer : iavor.diatchki@gmail.com -- Stability : provisional -- -- Utilities for parsing content from files. -- ----------------------------------------------------------------------------- module Parsimony.IO ( parseFile , parseLargeFile , parseBinaryFile , parseLargeBinaryFile , uparseFile , uparseLargeFile , uparseBinaryFile , uparseLargeBinaryFile ) where import Parsimony.Prim import Parsimony.Error import Parsimony.Combinator import Parsimony.UserState import qualified Data.ByteString as Strict import qualified Data.ByteString.Lazy as Lazy import qualified Data.Text as T import qualified Data.Text.IO as T import qualified Data.Text.Lazy as LT import qualified Data.Text.Lazy.IO as LT -- | Parse a text file in one go. -- This functions loads the whole file in memory. parseFile :: FilePath -> Parser T.Text a -> IO (Either ParseError a) parseFile f p = parseSource p f `fmap` T.readFile f -- | Parse a text file in chunks. -- This functions loads the file in chunks. parseLargeFile :: FilePath -> Parser LT.Text a -> IO (Either ParseError a) parseLargeFile f p = parseSource p f `fmap` LT.readFile f -- | Parse a binary file in one go. -- This functions loads the whole file in memory. parseBinaryFile :: FilePath -> Parser Strict.ByteString a -> IO (Either ParseError a) parseBinaryFile f p = parseSource p f `fmap` Strict.readFile f -- | Parse a text file in chunks. -- This functions loads the file in chunks. parseLargeBinaryFile :: FilePath -> Parser Lazy.ByteString a -> IO (Either ParseError a) parseLargeBinaryFile f p = parseSource p f `fmap` Lazy.readFile f -- With user state ------------------------------------------------------------- -- | Parse a text file in one go, using user state. -- This functions loads the whole file in memory. uparseFile :: FilePath -> ParserU u T.Text a -> u -> IO (Either ParseError a) uparseFile f p u = uparseSource p u f `fmap` T.readFile f -- | Parse a text file in chunks, using user state. -- This functions loads the file in chunks. uparseLargeFile :: FilePath -> ParserU u LT.Text a -> u -> IO (Either ParseError a) uparseLargeFile f p u = uparseSource p u f `fmap` LT.readFile f -- | Parse a binary file in one go, using user state. -- This functions loads the whole file in memory. uparseBinaryFile :: FilePath -> ParserU u Strict.ByteString a -> u -> IO (Either ParseError a) uparseBinaryFile f p u = uparseSource p u f `fmap` Strict.readFile f -- | Parse a text file in chunks, using user state. -- This functions loads the file in chunks. uparseLargeBinaryFile :: FilePath -> ParserU u Lazy.ByteString a -> u -> IO (Either ParseError a) uparseLargeBinaryFile f p u = uparseSource p u f `fmap` Lazy.readFile f
yav/parsimony
src/Parsimony/IO.hs
bsd-2-clause
3,237
0
10
826
630
347
283
40
1
module Main (main) where import Saturnin.Server main :: IO () main = runYBServer
yaccz/saturnin
executable/main.hs
bsd-3-clause
82
0
6
14
29
17
12
4
1
module PgRecorder.Config ( prettyVersion , minimumPgVersion , readOptions , AppConfig (..) ) where import PgRecorder.Prelude import qualified Data.Text as T import Data.Version (versionBranch) import Options.Applicative import Options.Applicative.Text import Paths_pg_recorder (version) -- | Data type to store all command line options data AppConfig = AppConfig { configDatabase :: Text , channel :: Text , dispatcherFunction :: Text } argParser :: Parser AppConfig argParser = AppConfig <$> argument text (help "(REQUIRED) database connection string, e.g. postgres://user:pass@host:port/db" <> metavar "DB_URL") <*> textOption (long "channel" <> short 'c' <> help "(REQUIRED) channel to listen to notifications for async commands" <> metavar "CHANNEL") <*> textOption (long "dispatcher-function" <> short 'f' <> help "(REQUIRED) function called to dispatch notifications for async commands" <> metavar "DISPATCHER_FUNCTION") -- | User friendly version number prettyVersion :: Text prettyVersion = T.intercalate "." $ show <$> versionBranch version -- | Tells the minimum PostgreSQL version required by this version of Haskell Tools minimumPgVersion :: Integer minimumPgVersion = 90500 -- | Function to read and parse options from the command line readOptions :: IO AppConfig readOptions = customExecParser parserPrefs opts where opts = info (helper <*> argParser) $ fullDesc <> (progDesc . toS) ( ("pg-recorder " :: Text) <> prettyVersion <> (" / Records database notifications" :: Text) ) parserPrefs = prefs showHelpOnError
diogob/pg-recorder
src/PgRecorder/Config.hs
bsd-3-clause
1,947
0
12
630
320
175
145
31
1
{-# LANGUAGE FlexibleContexts , OverloadedStrings #-} module Application where import Imports hiding ((</>)) import Routes import System.Directory import Path.Extended import qualified Data.Text as T import Network.Wai.Session data AuthRole = NeedsLogin authorize :: ( MonadApp m ) => Request -> [AuthRole] -> m (Response -> Response) authorize req ss = return id -- no auth roles yet sessionLayer :: MiddlewareT AppM sessionLayer app req respond = do cfg <- sessionCfg env <- ask liftMiddleware (`runAppT` env) (sessionMiddleware cfg) app req respond -- * Middlewares securityLayer :: MonadApp m => MiddlewareT m securityLayer = extractAuth authorize routes loginLayer :: MonadApp m => MiddlewareT m loginLayer = route loginRoutes contentLayer :: MonadApp m => MiddlewareT m contentLayer = route routes staticLayer :: MonadApp m => MiddlewareT m staticLayer app req respond = do let fileRequested = T.unpack . T.intercalate "/" $ pathInfo req basePath <- envStatic <$> ask let file = toFilePath basePath ++ fileRequested fileExists <- liftIO (doesFileExist file) if fileExists then respond $ responseFile status200 [] file Nothing else app req respond defApp :: Application defApp _ respond = respond $ textOnlyStatus status404 "Not Found! :("
athanclark/nested-routes-website
src/Application.hs
bsd-3-clause
1,334
0
13
274
383
196
187
37
2
{-# LANGUAGE TypeOperators, MultiParamTypeClasses #-} -- | In most cases, point-free programming is quite lovely. However, -- when I find mysefl expressing deep structural manipulations, such -- as `(bfirst . bsecond . bfirst . bfirst . bsecond) action`, it -- can feel verbose. Use of local `where` clauses will help, but I -- would often prefer to have pre-defined names for quick access. -- -- This module, BStruct, provides many convenience operations. These -- can be grouped into four classes: -- -- deep application of behaviors (similar to Lisp setcaddr!) -- deep extraction of signals from products (like Lisp caddr) -- deep injection of signals that model choices -- treating complex signals as a FORTH-like stack. -- -- BStruct thus provides convenient deep structural abstractions. -- -- See Also: -- FRP.Sirea.Behavior -- module Sirea.BDeep ( bxf, bxs, bxff, bxfs, bxsf, bxss -- DEEP EXTRACTION (30) , bxfff, bxffs, bxfsf, bxfss, bxsff, bxsfs, bxssf, bxsss , bxffff, bxfffs, bxffsf, bxffss, bxfsff, bxfsfs, bxfssf, bxfsss , bxsfff, bxsffs, bxsfsf, bxsfss, bxssff, bxssfs, bxsssf, bxssss , binl, binr, binll, binlr, binrl, binrr -- DEEP INJECTION (30) , binlll, binllr, binlrl, binlrr, binrll, binrlr, binrrl, binrrr , binllll, binlllr, binllrl, binllrr, binlrll, binlrlr, binlrrl, binlrrr , binrlll, binrllr, binrlrl, binrlrr, binrrll, binrrlr, binrrrl, binrrrr , bonf, bons, bonff, bonfs, bonsf, bonss -- DEEP (:&:) APPLICATION (30) , bonfff, bonffs, bonfsf, bonfss, bonsff, bonsfs, bonssf, bonsss , bonffff, bonfffs, bonffsf, bonffss, bonfsff, bonfsfs, bonfssf, bonfsss , bonsfff, bonsffs, bonsfsf, bonsfss, bonssff, bonssfs, bonsssf, bonssss , bonl, bonr, bonll, bonlr, bonrl, bonrr -- DEEP (:|:) APPLICATION (30) , bonlll, bonllr, bonlrl, bonlrr, bonrll, bonrlr, bonrrl, bonrrr , bonllll, bonlllr, bonllrl, bonllrr, bonlrll, bonlrlr, bonlrrl, bonlrrr , bonrlll, bonrllr, bonrlrl, bonrlrr, bonrrll, bonrrlr, bonrrrl, bonrrrr ) where -- TODO: Also consider design of auto-wiring based on type tags. import Control.Category ((<<<)) import Sirea.Behavior -- | DEEP EXTRACTION FOR PRODUCTS (:&:) bxf :: (BProd b) => b (e :&: s0) e bxs :: (BProd b) => b (f0 :&: e ) e bxff :: (BProd b) => b ((e :&: s0) :&: s1) e bxfs :: (BProd b) => b ((f0 :&: e ) :&: s1) e bxsf :: (BProd b) => b (f1 :&: (e :&: s0)) e bxss :: (BProd b) => b (f1 :&: (f0 :&: e )) e bxfff :: (BProd b) => b (((e :&: s0) :&: s1) :&: s2) e bxffs :: (BProd b) => b (((f0 :&: e ) :&: s1) :&: s2) e bxfsf :: (BProd b) => b ((f1 :&: (e :&: s0)) :&: s2) e bxfss :: (BProd b) => b ((f1 :&: (f0 :&: e )) :&: s2) e bxsff :: (BProd b) => b (f2 :&: ((e :&: s0) :&: s1)) e bxsfs :: (BProd b) => b (f2 :&: ((f0 :&: e ) :&: s1)) e bxssf :: (BProd b) => b (f2 :&: (f1 :&: (e :&: s0))) e bxsss :: (BProd b) => b (f2 :&: (f1 :&: (f0 :&: e ))) e bxffff :: (BProd b) => b ((((e :&: s0) :&: s1) :&: s2) :&: s3) e bxfffs :: (BProd b) => b ((((f0 :&: e ) :&: s1) :&: s2) :&: s3) e bxffsf :: (BProd b) => b (((f1 :&: (e :&: s0)) :&: s2) :&: s3) e bxffss :: (BProd b) => b (((f1 :&: (f0 :&: e )) :&: s2) :&: s3) e bxfsff :: (BProd b) => b ((f2 :&: ((e :&: s0) :&: s1)) :&: s3) e bxfsfs :: (BProd b) => b ((f2 :&: ((f0 :&: e ) :&: s1)) :&: s3) e bxfssf :: (BProd b) => b ((f2 :&: (f1 :&: (e :&: s0))) :&: s3) e bxfsss :: (BProd b) => b ((f2 :&: (f1 :&: (f0 :&: e ))) :&: s3) e bxsfff :: (BProd b) => b (f3 :&: (((e :&: s0) :&: s1) :&: s2)) e bxsffs :: (BProd b) => b (f3 :&: (((f0 :&: e ) :&: s1) :&: s2)) e bxsfsf :: (BProd b) => b (f3 :&: ((f1 :&: (e :&: s0)) :&: s2)) e bxsfss :: (BProd b) => b (f3 :&: ((f1 :&: (f0 :&: e )) :&: s2)) e bxssff :: (BProd b) => b (f3 :&: (f2 :&: ((e :&: s0) :&: s1))) e bxssfs :: (BProd b) => b (f3 :&: (f2 :&: ((f0 :&: e ) :&: s1))) e bxsssf :: (BProd b) => b (f3 :&: (f2 :&: (f1 :&: (e :&: s0)))) e bxssss :: (BProd b) => b (f3 :&: (f2 :&: (f1 :&: (f0 :&: e )))) e bxf = bfst -- for consistent naming bxs = bsnd -- for consistent naming bxff = bxf >>> bxf bxfs = bxf >>> bxs bxsf = bxs >>> bxf bxss = bxs >>> bxs bxfff = bxf >>> bxff bxffs = bxf >>> bxfs bxfsf = bxf >>> bxsf bxfss = bxf >>> bxss bxsff = bxs >>> bxff bxsfs = bxs >>> bxfs bxssf = bxs >>> bxsf bxsss = bxs >>> bxss bxffff = bxf >>> bxfff bxfffs = bxf >>> bxffs bxffsf = bxf >>> bxfsf bxffss = bxf >>> bxfss bxfsff = bxf >>> bxsff bxfsfs = bxf >>> bxsfs bxfssf = bxf >>> bxssf bxfsss = bxf >>> bxsss bxsfff = bxs >>> bxfff bxsffs = bxs >>> bxffs bxsfsf = bxs >>> bxfsf bxsfss = bxs >>> bxfss bxssff = bxs >>> bxsff bxssfs = bxs >>> bxsfs bxsssf = bxs >>> bxssf bxssss = bxs >>> bxsss -- | DEEP INJECTION FOR SUMS (:|:) -- binl is defined in FRP.Sirea.Behavior. -- binr is defined in FRP.Sirea.Behavior. binll :: (BSum b) => b e ((e :|: r0) :|: r1) binlr :: (BSum b) => b e ((l0 :|: e ) :|: r1) binrl :: (BSum b) => b e (l1 :|: (e :|: r0)) binrr :: (BSum b) => b e (l1 :|: (l0 :|: e )) binlll :: (BSum b) => b e (((e :|: r0) :|: r1) :|: r2) binllr :: (BSum b) => b e (((l0 :|: e ) :|: r1) :|: r2) binlrl :: (BSum b) => b e ((l1 :|: (e :|: r0)) :|: r2) binlrr :: (BSum b) => b e ((l1 :|: (l0 :|: e )) :|: r2) binrll :: (BSum b) => b e (l2 :|: ((e :|: r0) :|: r1)) binrlr :: (BSum b) => b e (l2 :|: ((l0 :|: e ) :|: r1)) binrrl :: (BSum b) => b e (l2 :|: (l1 :|: (e :|: r0))) binrrr :: (BSum b) => b e (l2 :|: (l1 :|: (l0 :|: e ))) binllll :: (BSum b) => b e ((((e :|: r0) :|: r1) :|: r2) :|: r3) binlllr :: (BSum b) => b e ((((l0 :|: e ) :|: r1) :|: r2) :|: r3) binllrl :: (BSum b) => b e (((l1 :|: (e :|: r0)) :|: r2) :|: r3) binllrr :: (BSum b) => b e (((l1 :|: (l0 :|: e )) :|: r2) :|: r3) binlrll :: (BSum b) => b e ((l2 :|: ((e :|: r0) :|: r1)) :|: r3) binlrlr :: (BSum b) => b e ((l2 :|: ((l0 :|: e ) :|: r1)) :|: r3) binlrrl :: (BSum b) => b e ((l2 :|: (l1 :|: (e :|: r0))) :|: r3) binlrrr :: (BSum b) => b e ((l2 :|: (l1 :|: (l0 :|: e ))) :|: r3) binrlll :: (BSum b) => b e (l3 :|: (((e :|: r0) :|: r1) :|: r2)) binrllr :: (BSum b) => b e (l3 :|: (((l0 :|: e ) :|: r1) :|: r2)) binrlrl :: (BSum b) => b e (l3 :|: ((l1 :|: (e :|: r0)) :|: r2)) binrlrr :: (BSum b) => b e (l3 :|: ((l1 :|: (l0 :|: e )) :|: r2)) binrrll :: (BSum b) => b e (l3 :|: (l2 :|: ((e :|: r0) :|: r1))) binrrlr :: (BSum b) => b e (l3 :|: (l2 :|: ((l0 :|: e ) :|: r1))) binrrrl :: (BSum b) => b e (l3 :|: (l2 :|: (l1 :|: (e :|: r0)))) binrrrr :: (BSum b) => b e (l3 :|: (l2 :|: (l1 :|: (l0 :|: e )))) -- binl already defined in FRP.Sirea.Behavior. -- binr already defined in FRP.Sirea.Behavior. binll = binl <<< binl binlr = binl <<< binr binrl = binr <<< binl binrr = binr <<< binr binlll = binl <<< binll binllr = binl <<< binlr binlrl = binl <<< binrl binlrr = binl <<< binrr binrll = binr <<< binll binrlr = binr <<< binlr binrrl = binr <<< binrl binrrr = binr <<< binrr binllll = binl <<< binlll binlllr = binl <<< binllr binllrl = binl <<< binlrl binllrr = binl <<< binlrr binlrll = binl <<< binrll binlrlr = binl <<< binrlr binlrrl = binl <<< binrrl binlrrr = binl <<< binrrr binrlll = binr <<< binlll binrllr = binr <<< binllr binrlrl = binr <<< binlrl binrlrr = binr <<< binlrr binrrll = binr <<< binrll binrrlr = binr <<< binrlr binrrrl = binr <<< binrrl binrrrr = binr <<< binrrr -- | DEEP APPLICATIONS FOR SUMS (:|:) bonl :: (BSum b) => b e e' -> b (e :|: r0) {- ~> -} (e' :|: r0) bonr :: (BSum b) => b e e' -> b (l0 :|: e) {- ~> -} (l0 :|: e') bonll :: (BSum b) => b e e' -> b ((e :|: r0) :|: r1) {- ~> -} ((e' :|: r0) :|: r1) bonlr :: (BSum b) => b e e' -> b ((l0 :|: e ) :|: r1) {- ~> -} ((l0 :|: e') :|: r1) bonrl :: (BSum b) => b e e' -> b (l1 :|: (e :|: r0)) {- ~> -} (l1 :|: (e' :|: r0)) bonrr :: (BSum b) => b e e' -> b (l1 :|: (l0 :|: e )) {- ~> -} (l1 :|: (l0 :|: e')) bonlll :: (BSum b) => b e e' -> b (((e :|: r0) :|: r1) :|: r2) {- ~> -} (((e' :|: r0) :|: r1) :|: r2) bonllr :: (BSum b) => b e e' -> b (((l0 :|: e ) :|: r1) :|: r2) {- ~> -} (((l0 :|: e') :|: r1) :|: r2) bonlrl :: (BSum b) => b e e' -> b ((l1 :|: (e :|: r0)) :|: r2) {- ~> -} ((l1 :|: (e' :|: r0)) :|: r2) bonlrr :: (BSum b) => b e e' -> b ((l1 :|: (l0 :|: e )) :|: r2) {- ~> -} ((l1 :|: (l0 :|: e')) :|: r2) bonrll :: (BSum b) => b e e' -> b (l2 :|: ((e :|: r0) :|: r1)) {- ~> -} (l2 :|: ((e' :|: r0) :|: r1)) bonrlr :: (BSum b) => b e e' -> b (l2 :|: ((l0 :|: e ) :|: r1)) {- ~> -} (l2 :|: ((l0 :|: e') :|: r1)) bonrrl :: (BSum b) => b e e' -> b (l2 :|: (l1 :|: (e :|: r0))) {- ~> -} (l2 :|: (l1 :|: (e' :|: r0))) bonrrr :: (BSum b) => b e e' -> b (l2 :|: (l1 :|: (l0 :|: e ))) {- ~> -} (l2 :|: (l1 :|: (l0 :|: e'))) bonllll :: (BSum b) => b e e' -> b ((((e :|: r0) :|: r1) :|: r2) :|: r3) {- ~> -} ((((e' :|: r0) :|: r1) :|: r2) :|: r3) bonlllr :: (BSum b) => b e e' -> b ((((l0 :|: e ) :|: r1) :|: r2) :|: r3) {- ~> -} ((((l0 :|: e') :|: r1) :|: r2) :|: r3) bonllrl :: (BSum b) => b e e' -> b (((l1 :|: (e :|: r0)) :|: r2) :|: r3) {- ~> -} (((l1 :|: (e' :|: r0)) :|: r2) :|: r3) bonllrr :: (BSum b) => b e e' -> b (((l1 :|: (l0 :|: e )) :|: r2) :|: r3) {- ~> -} (((l1 :|: (l0 :|: e')) :|: r2) :|: r3) bonlrll :: (BSum b) => b e e' -> b ((l2 :|: ((e :|: r0) :|: r1)) :|: r3) {- ~> -} ((l2 :|: ((e' :|: r0) :|: r1)) :|: r3) bonlrlr :: (BSum b) => b e e' -> b ((l2 :|: ((l0 :|: e ) :|: r1)) :|: r3) {- ~> -} ((l2 :|: ((l0 :|: e') :|: r1)) :|: r3) bonlrrl :: (BSum b) => b e e' -> b ((l2 :|: (l1 :|: (e :|: r0))) :|: r3) {- ~> -} ((l2 :|: (l1 :|: (e' :|: r0))) :|: r3) bonlrrr :: (BSum b) => b e e' -> b ((l2 :|: (l1 :|: (l0 :|: e ))) :|: r3) {- ~> -} ((l2 :|: (l1 :|: (l0 :|: e'))) :|: r3) bonrlll :: (BSum b) => b e e' -> b (l3 :|: (((e :|: r0) :|: r1) :|: r2)) {- ~> -} (l3 :|: (((e' :|: r0) :|: r1) :|: r2)) bonrllr :: (BSum b) => b e e' -> b (l3 :|: (((l0 :|: e ) :|: r1) :|: r2)) {- ~> -} (l3 :|: (((l0 :|: e') :|: r1) :|: r2)) bonrlrl :: (BSum b) => b e e' -> b (l3 :|: ((l1 :|: (e :|: r0)) :|: r2)) {- ~> -} (l3 :|: ((l1 :|: (e' :|: r0)) :|: r2)) bonrlrr :: (BSum b) => b e e' -> b (l3 :|: ((l1 :|: (l0 :|: e )) :|: r2)) {- ~> -} (l3 :|: ((l1 :|: (l0 :|: e')) :|: r2)) bonrrll :: (BSum b) => b e e' -> b (l3 :|: (l2 :|: ((e :|: r0) :|: r1))) {- ~> -} (l3 :|: (l2 :|: ((e' :|: r0) :|: r1))) bonrrlr :: (BSum b) => b e e' -> b (l3 :|: (l2 :|: ((l0 :|: e ) :|: r1))) {- ~> -} (l3 :|: (l2 :|: ((l0 :|: e') :|: r1))) bonrrrl :: (BSum b) => b e e' -> b (l3 :|: (l2 :|: (l1 :|: (e :|: r0)))) {- ~> -} (l3 :|: (l2 :|: (l1 :|: (e' :|: r0)))) bonrrrr :: (BSum b) => b e e' -> b (l3 :|: (l2 :|: (l1 :|: (l0 :|: e )))) {- ~> -} (l3 :|: (l2 :|: (l1 :|: (l0 :|: e')))) bonl = bleft -- for consistent naming bonr = bright -- for consistent naming bonll = bonl . bonl bonlr = bonl . bonr bonrl = bonr . bonl bonrr = bonr . bonr bonlll = bonl . bonll bonllr = bonl . bonlr bonlrl = bonl . bonrl bonlrr = bonl . bonrr bonrll = bonr . bonll bonrlr = bonr . bonlr bonrrl = bonr . bonrl bonrrr = bonr . bonrr bonllll = bonl . bonlll bonlllr = bonl . bonllr bonllrl = bonl . bonlrl bonllrr = bonl . bonlrr bonlrll = bonl . bonrll bonlrlr = bonl . bonrlr bonlrrl = bonl . bonrrl bonlrrr = bonl . bonrrr bonrlll = bonr . bonlll bonrllr = bonr . bonllr bonrlrl = bonr . bonlrl bonrlrr = bonr . bonlrr bonrrll = bonr . bonrll bonrrlr = bonr . bonrlr bonrrrl = bonr . bonrrl bonrrrr = bonr . bonrrr -- | DEEP APPLICATIONS FOR PRODUCTS (:&:) bonf :: (BProd b) => b e e' -> b (e :&: s0) {- ~> -} (e' :&: s0) bons :: (BProd b) => b e e' -> b (f0 :&: e) {- ~> -} (f0 :&: e') bonff :: (BProd b) => b e e' -> b ((e :&: s0) :&: s1) {- ~> -} ((e' :&: s0) :&: s1) bonfs :: (BProd b) => b e e' -> b ((f0 :&: e ) :&: s1) {- ~> -} ((f0 :&: e') :&: s1) bonsf :: (BProd b) => b e e' -> b (f1 :&: (e :&: s0)) {- ~> -} (f1 :&: (e' :&: s0)) bonss :: (BProd b) => b e e' -> b (f1 :&: (f0 :&: e )) {- ~> -} (f1 :&: (f0 :&: e')) bonfff :: (BProd b) => b e e' -> b (((e :&: s0) :&: s1) :&: s2) {- ~> -} (((e' :&: s0) :&: s1) :&: s2) bonffs :: (BProd b) => b e e' -> b (((f0 :&: e ) :&: s1) :&: s2) {- ~> -} (((f0 :&: e') :&: s1) :&: s2) bonfsf :: (BProd b) => b e e' -> b ((f1 :&: (e :&: s0)) :&: s2) {- ~> -} ((f1 :&: (e' :&: s0)) :&: s2) bonfss :: (BProd b) => b e e' -> b ((f1 :&: (f0 :&: e )) :&: s2) {- ~> -} ((f1 :&: (f0 :&: e')) :&: s2) bonsff :: (BProd b) => b e e' -> b (f2 :&: ((e :&: s0) :&: s1)) {- ~> -} (f2 :&: ((e' :&: s0) :&: s1)) bonsfs :: (BProd b) => b e e' -> b (f2 :&: ((f0 :&: e ) :&: s1)) {- ~> -} (f2 :&: ((f0 :&: e') :&: s1)) bonssf :: (BProd b) => b e e' -> b (f2 :&: (f1 :&: (e :&: s0))) {- ~> -} (f2 :&: (f1 :&: (e' :&: s0))) bonsss :: (BProd b) => b e e' -> b (f2 :&: (f1 :&: (f0 :&: e ))) {- ~> -} (f2 :&: (f1 :&: (f0 :&: e'))) bonffff :: (BProd b) => b e e' -> b ((((e :&: s0) :&: s1) :&: s2) :&: s3) {- ~> -} ((((e' :&: s0) :&: s1) :&: s2) :&: s3) bonfffs :: (BProd b) => b e e' -> b ((((f0 :&: e ) :&: s1) :&: s2) :&: s3) {- ~> -} ((((f0 :&: e') :&: s1) :&: s2) :&: s3) bonffsf :: (BProd b) => b e e' -> b (((f1 :&: (e :&: s0)) :&: s2) :&: s3) {- ~> -} (((f1 :&: (e' :&: s0)) :&: s2) :&: s3) bonffss :: (BProd b) => b e e' -> b (((f1 :&: (f0 :&: e )) :&: s2) :&: s3) {- ~> -} (((f1 :&: (f0 :&: e')) :&: s2) :&: s3) bonfsff :: (BProd b) => b e e' -> b ((f2 :&: ((e :&: s0) :&: s1)) :&: s3) {- ~> -} ((f2 :&: ((e' :&: s0) :&: s1)) :&: s3) bonfsfs :: (BProd b) => b e e' -> b ((f2 :&: ((f0 :&: e ) :&: s1)) :&: s3) {- ~> -} ((f2 :&: ((f0 :&: e') :&: s1)) :&: s3) bonfssf :: (BProd b) => b e e' -> b ((f2 :&: (f1 :&: (e :&: s0))) :&: s3) {- ~> -} ((f2 :&: (f1 :&: (e' :&: s0))) :&: s3) bonfsss :: (BProd b) => b e e' -> b ((f2 :&: (f1 :&: (f0 :&: e ))) :&: s3) {- ~> -} ((f2 :&: (f1 :&: (f0 :&: e'))) :&: s3) bonsfff :: (BProd b) => b e e' -> b (f3 :&: (((e :&: s0) :&: s1) :&: s2)) {- ~> -} (f3 :&: (((e' :&: s0) :&: s1) :&: s2)) bonsffs :: (BProd b) => b e e' -> b (f3 :&: (((f0 :&: e ) :&: s1) :&: s2)) {- ~> -} (f3 :&: (((f0 :&: e') :&: s1) :&: s2)) bonsfsf :: (BProd b) => b e e' -> b (f3 :&: ((f1 :&: (e :&: s0)) :&: s2)) {- ~> -} (f3 :&: ((f1 :&: (e' :&: s0)) :&: s2)) bonsfss :: (BProd b) => b e e' -> b (f3 :&: ((f1 :&: (f0 :&: e )) :&: s2)) {- ~> -} (f3 :&: ((f1 :&: (f0 :&: e')) :&: s2)) bonssff :: (BProd b) => b e e' -> b (f3 :&: (f2 :&: ((e :&: s0) :&: s1))) {- ~> -} (f3 :&: (f2 :&: ((e' :&: s0) :&: s1))) bonssfs :: (BProd b) => b e e' -> b (f3 :&: (f2 :&: ((f0 :&: e ) :&: s1))) {- ~> -} (f3 :&: (f2 :&: ((f0 :&: e') :&: s1))) bonsssf :: (BProd b) => b e e' -> b (f3 :&: (f2 :&: (f1 :&: (e :&: s0)))) {- ~> -} (f3 :&: (f2 :&: (f1 :&: (e' :&: s0)))) bonssss :: (BProd b) => b e e' -> b (f3 :&: (f2 :&: (f1 :&: (f0 :&: e )))) {- ~> -} (f3 :&: (f2 :&: (f1 :&: (f0 :&: e')))) bonf = bfirst -- for consistent naming bons = bsecond -- for consistent naming bonff = bonf . bonf bonfs = bonf . bons bonsf = bons . bonf bonss = bons . bons bonfff = bonf . bonff bonffs = bonf . bonfs bonfsf = bonf . bonsf bonfss = bonf . bonss bonsff = bons . bonff bonsfs = bons . bonfs bonssf = bons . bonsf bonsss = bons . bonss bonffff = bonf . bonfff bonfffs = bonf . bonffs bonffsf = bonf . bonfsf bonffss = bonf . bonfss bonfsff = bonf . bonsff bonfsfs = bonf . bonsfs bonfssf = bonf . bonssf bonfsss = bonf . bonsss bonsfff = bons . bonfff bonsffs = bons . bonffs bonsfsf = bons . bonfsf bonsfss = bons . bonfss bonssff = bons . bonsff bonssfs = bons . bonsfs bonsssf = bons . bonssf bonssss = bons . bonsss -- | The stack-based operations treat complex signals like a stack. -- The complex signal: -- -- (x :&: (y :&: (z :&: ... -- -- Is treated as a stack with at least 3 elements. The last element -- is not accessible, so if we have a stack with exactly three -- elements: -- -- (x :&: (y :&: (z :&: omega))) -- -- Then the omega element is not accessible to stack operations. The -- usual action is to "create" the initial stack by duplicating -- some behavior. -- TODO: convenience operators? -- I've added Bdeep - eqvs. of bcadadr and setf bcadadr from Lisp -- Need some stack-like operators -- on (x :&: (y :&: (z :& ... -- kswap, krotl(3,4,5,6,7), krotr(3,4,5,6,7), kdup, kover, -- kdisjoin would be feasible for some number of arguments. -- ktake,kput -- Maybe some support for data-driven dynamic patterns. -- folds, recursion --
dmbarbour/Sirea
src/Sirea/BDeep.hs
bsd-3-clause
16,395
0
15
4,148
8,430
4,721
3,709
256
1
{-# LANGUAGE OverloadedStrings #-} module Main where import Data.Maybe (fromMaybe) import qualified Data.Text as T import qualified Data.Text.IO as T import Network import Options.Applicative data Options = Options { optionsPort :: Maybe Int } main :: IO () main = do Options port <- execParser opts cmd <- T.getLine client cmd (PortNumber . fromIntegral $ fromMaybe 4242 port) >>= print where parser = Options <$> optional (option auto (long "port" <> short 'p')) opts = info parser mempty client :: T.Text -> PortID -> IO T.Text client cmd port = do h <- connectTo "localhost" port T.hPutStrLn h cmd T.hGetLine h
passy/psc-ide
src/Main.hs
bsd-3-clause
711
0
13
197
228
117
111
21
1
module ResultWorthy.Tests.SwiftParsers where import ResultWorthy.SwiftParsers import ResultWorthy.Parsers import Test.HUnit import Data.Either import Text.Parsec.String assertDidParse :: Parser a -> String -> Assertion assertDidParse p s = assertBool ("Did Not Parse " ++ s) (isRight (parseString p s)) assertDidNotParse :: Parser a -> String -> Assertion assertDidParse p s = assertBool ("Did Parse " ++ s) (isLeft (parseString p s)) -- Function Parsing tests = TestList [ TestLabel "Parses Functions" $ TestCase $ assertDidParse functionDeclParser "func foo() -> Bool", TestLabel "Fails To Parse Bad Functions" $ TestCase $ assertDidParse functionDeclParser "func fooa aa() -> Bool" ]
lawrencelomax/ResultWorthy
ResultWorthy/Tests/SwiftParsers.hs
bsd-3-clause
715
0
9
122
184
96
88
15
1
module Database.HongoDB.Base ( DB(..), Action(..), ) where import qualified Data.ByteString.Char8 as B import qualified Data.Enumerator as E import Data.Maybe data Action = Replace B.ByteString | Remove | Nop class (Monad m) => DB m where accept :: B.ByteString -> (Maybe B.ByteString -> m (Action, a)) -> m a get :: B.ByteString -> m (Maybe B.ByteString) get key = accept key $ \r -> return (Nop, r) {-# INLINABLE get #-} set :: B.ByteString -> B.ByteString -> m () set key val = accept key $ \_ -> return (Replace val, ()) {-# INLINABLE set #-} add :: B.ByteString -> B.ByteString -> m Bool add key val = accept key f where f Nothing = return (Replace val, True) f _ = return (Nop, False) {-# INLINABLE add #-} remove :: B.ByteString -> m Bool remove key = accept key $ \m -> return (Remove, isJust m) {-# INLINABLE remove #-} count :: m Int clear :: m () enum :: m (E.Enumerator (B.ByteString, B.ByteString) m a)
tanakh/HongoDB
Database/HongoDB/Base.hs
bsd-3-clause
1,009
0
12
259
401
216
185
27
0
{-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE NoMonomorphismRestriction #-} {-# LANGUAGE RecordWildCards #-} module Web.UAParser ( -- * Readying parser UAConfig , loadUAParser -- * Parsing browser (user agent) , parseUA , UAResult (..) , uarVersion -- * Parsing OS , parseOS , OSResult (..) , osrVersion ) where ------------------------------------------------------------------------------- import Control.Applicative import Control.Monad import Data.Aeson import Data.ByteString.Char8 (ByteString) import qualified Data.ByteString.Char8 as B import Data.Text (Text) import qualified Data.Text as T import qualified Data.Text.Encoding as T import Data.Yaml import Text.Regex.PCRE.Light import System.FilePath.Posix ------------------------------------------------------------------------------- import Paths_ua_parser import Web.UAParser.Core ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- -- | Load a user agent string parser state, ready to be used with one -- of the parsing functions. -- -- This function will load the YAML parser definitions stored in -- package's cabal 'getDataDir'. loadUAParser :: IO UAConfig loadUAParser = do dir <- getDataDir loadConfig $ dir </> "resources" </> "user_agent_parser.yaml"
ozataman/ua-parser-standalone
src/Web/UAParser.hs
bsd-3-clause
1,527
0
9
337
179
119
60
30
1
module Singletons.Sections where import Data.Singletons import Data.Singletons.Prelude.List import Data.Singletons.SuppressUnusedWarnings import Data.Singletons.TH import Singletons.Nat $(singletons [d| (+) :: Nat -> Nat -> Nat Zero + m = m (Succ n) + m = Succ (n + m) foo1 :: [Nat] foo1 = map ((Succ Zero)+) [Zero, Succ Zero] foo2 :: [Nat] foo2 = map (+(Succ Zero)) [Zero, Succ Zero] foo3 :: [Nat] foo3 = zipWith (+) [Succ Zero, Succ Zero] [Zero, Succ Zero] |]) foo1a :: Proxy Foo1 foo1a = Proxy foo1b :: Proxy [Succ Zero, Succ (Succ Zero)] foo1b = foo1a foo2a :: Proxy Foo2 foo2a = Proxy foo2b :: Proxy [Succ Zero, Succ (Succ Zero)] foo2b = foo2a foo3a :: Proxy Foo3 foo3a = Proxy foo3b :: Proxy [Succ Zero, Succ (Succ Zero)] foo3b = foo3a
int-index/singletons
tests/compile-and-dump/Singletons/Sections.hs
bsd-3-clause
772
0
9
157
181
100
81
-1
-1
module ACO where import Data.Graph.Inductive as G import Control.Applicative ((<$>)) import Text.Printf import Data.Maybe import Data.Map as M import Prelude as P import Data.List as L import System.Random as R import System.Environment (getArgs) import Control.Parallel import Control.DeepSeq import Control.Monad.Par import System.Console.ArgParser import System.FilePath instance NFData StdGen data Arguments = Arguments Int Int FilePath FilePath Int Bool runArgParser :: ParserSpec Arguments runArgParser = Arguments `parsedBy`reqPos "from_node" `Descr` "a starting point node represented by an integer" `andBy` reqPos "to_node" `Descr` "a destination point node represented by an integer" `andBy` reqPos "path_file" `Descr` "a file defining the paths (routs) in the graph" `andBy` reqPos "node_file" `Descr` "a file defining the nodes (stations) in the graph" `andBy` optPos 500 "number_of_ants" `andBy` boolFlag "p" `Descr` "run in parallel" defaultMain :: IO () defaultMain = mkApp runArgParser >>= \args -> runApp args setup setup :: Arguments -> IO () setup (Arguments from to pathFile nodeFile n p) = do s1 <- mkLNodes . lines <$> readFile nodeFile m <- return . M.fromList $ P.map (\(a,b) -> (b,a)) s1 priNodes <-return . fromList $ P.map (\(a,b) -> (a,1)) s1 s2 <- mkLEdges pathFile m . lines <$> readFile pathFile let g = mkGraph s1 s2 :: Gr String Integer in let totLength = P.foldr (\(a,b,c) -> (+) c) 0 s2 in if not p then do (priNodes,path) <- newStdGen >>= \gen -> return (runIt (from,to) n 0 totLength gen priNodes g []) printRes g path else do (priNodes,path) <- newStdGen >>= \gen -> return (runItPar (from,to) n 0 totLength gen priNodes g []) printRes g path printRes :: Graph gr => gr String Integer -> Path -> IO () printRes g path = do mapM_ (uncurry $ printf "%-20s %-3i\n") $ reverse (pathToStrings path g) `zip` scanl (+) 0 (reverse $ pathTimes path g) printf "\ntotal length = %i\n" $ pathLength path g putStrLn (replicate 80 '#') --Graph Creation -------------------------------------------------------------------------------- mkLNodes :: [String] -> [LNode String] mkLNodes ss = [0 .. ] `zip` ss mkLEdges :: String -> Map String Node -> [String] -> [LEdge Integer] mkLEdges p m xs | snd (splitFileName p) == "paths.txt" = mkLEdgesGbg m xs | otherwise = mkLEdgesBvs m xs mkLEdgesBvs :: Map String Node -> [String] -> [LEdge Integer] mkLEdgesBvs _ [] = [] mkLEdgesBvs m (x:xs) = (n1,n2,d):(n2,n1,d) : mkLEdgesBvs m xs where (n1,n2,d) = (fromJust $ M.lookup s1 m, fromJust $ M.lookup s2 m,read s3) [s1,s2,s3] = words x mkLEdgesGbg :: Map String Node -> [String] -> [LEdge Integer] mkLEdgesGbg m ss = nub . concatMap addOneLine $ readLinePaths m ss addOneLine :: [(LNode String,Integer)] -> [LEdge Integer] addOneLine [] = [] addOneLine [x] = [] addOneLine (x:y:xs) = newEdge : addOneLine (y:xs) where newEdge = (\((node1,label1),i1) ((node2,label2),i2) -> (node1,node2,i2)) x y readLinePath :: Map String Node -> [String] -> [(LNode String,Integer)] readLinePath m [] = [] readLinePath m (s:ss) = ((findId label m,label) , read (drop (length label) s)) : readLinePath m ss where label = takeWhile (\x -> x /= ' ' && x /= '\t') s readLinePaths :: Map String Node -> [String] -> [[(LNode String,Integer)]] readLinePaths _ [] = [] readLinePaths m xs = lst : readLinePaths m (drop (length lst+1) xs) where lst = readLinePath m $ takeWhile (/= "") xs findId :: String -> Map String Node -> Int findId s xs = if i == (-1) then error ("The node " ++ s ++ " doesn't exist in graph") else i where i = fromMaybe (-1) $ M.lookup s xs --Run algorithm -------------------------------------------------------------------------------- --Sequential version runIt :: Graph gr => (Int,Int) -> Int -> Int -> Integer -> StdGen -> Map Node Integer -> gr String Integer -> [Node] -> ([(Node,Integer)],Path) runIt (f,t) n c len gen m g ims | c >= n = (toList m,p) | otherwise = if len' <= len then runIt (f,t) n (c+1) len' gen' (incAttraction p m) g ims' else runIt (f,t) n (c+1) len gen' m g ims' where len' = pathLength p g (p,ims',gen') = findPath gen m f f t [] g (ims,[]) --Parallel version runItPar :: Graph gr => (Int,Int) -> Int -> Int -> Integer -> StdGen -> Map Node Integer -> gr String Integer -> [Node] -> ([(Node,Integer)],Path) runItPar (f,t) n c len gen m g ims | c >= n = (toList m,p) | otherwise = if len' <= len then runItPar (f,t) n (c+1) len' gen' (incAttraction p m) (delNodes ims' g) [] else runItPar (f,t) n (c+1) len gen' m (delNodes ims' g) [] where ((p,ims',gen'),len') = getMinRes xs g xs = findPathPar $ createArgs gen m f f t [] g (ims,[]) --Scout for a path from one node to another -------------------------------------------------------------------------------- findPath :: Graph gr => StdGen -> Map Node Integer -> Node -> Node -> Node -> Path -> gr String Integer -> ([Node],[Node]) -> (Path,[Node],StdGen) findPath gen m sn tn dn p g t@(ims,vs) | tn == dn = (tn:p,ims,gen') | isNothing mNode = if p /= [] then findPath gen' m sn (head p) dn (tail p) g (ims',tn:vs) else findPath gen' m sn sn dn p g (ims',tn:vs) | otherwise = findPath gen' m sn (fromJust mNode) dn (tn:p) g (ims',vs) where (mNode,ims',gen') = shuffleNode gen m tn (suc g tn) p t findPathPar :: Graph gr => [(StdGen, Map Node Integer, Node,Node,Node,Path, gr String Integer, ([Node],[Node]))] -> [(Path,[Node],StdGen)] findPathPar xs = runPar $ parMap (\(gen, m, sn, tn, dn, p, g, t) -> findPath gen m sn tn dn p g t) xs shuffleNode :: StdGen -> Map Node Integer -> Node -> [Node] -> Path -> ([Node],[Node]) -> (Maybe Node,[Node],StdGen) shuffleNode gen _ n [] _ (ims,vs) = (Nothing,n:ims,gen) shuffleNode gen m n xs [] t = shuffleNode' gen m xs [] t shuffleNode gen m n xs pa@(p:pp) t@(ims,vs) | nodSel `elem` ims || nodSel == p = shuffleNode gen' m n (L.delete nodSel xs) pa t | otherwise = shuffleNode' gen' m xs pa t where (nodSel,gen') = selNode gen (genPN m xs) shuffleNode' :: StdGen -> Map Node Integer -> [Node] -> Path -> ([Node],[Node]) -> (Maybe Node,[Node],StdGen) shuffleNode' gen _ [] p (ims,vs) = (Nothing,ims,gen) shuffleNode' gen m xs p t@(ims,vs) | nodSel `notElem` p && nodSel `notElem` ims && nodSel `notElem` vs = (Just nodSel,ims,gen') | otherwise = shuffleNode' gen' m (L.delete nodSel xs) p t where (nodSel,gen') = selNode gen (genPN m xs) genPN :: Map Node Integer -> [Node] -> [(Integer,Node)] genPN _ [] = [] genPN m (x:xs) = (fromJust (M.lookup x m),x) : genPN m xs --Help functions -------------------------------------------------------------------------------- pathLength :: Graph gr => Path -> gr String Integer -> Integer pathLength [x] _ = 0 pathLength (x:y:xs) g = el + pathLength (y:xs) g where el = (\(a,b,c) -> a) . minimum . P.map (\(a,b,c) -> (c,a,b)) $ L.filter (`eqEdge` (x,y)) (out g x) eqEdge :: LEdge Integer -> Edge -> Bool eqEdge (a,b,_) (d,e) = (a,b) == (d,e) getMinRes :: Graph gr => [(Path,[Node],StdGen)] -> gr String Integer -> ((Path,[Node],StdGen),Integer) getMinRes xs gr = (xs !! index,len) where index = fromJust $ L.findIndex (== len) xs' len = minimum xs' xs' = (P.map (\(x,y,z) -> pathLength x gr) xs) mkStdGens :: Int -> StdGen -> [StdGen] mkStdGens 0 _ = [] mkStdGens n g = g:g1: mkStdGens (n-1) g2 where (g1,g2) = R.split g createArgs :: StdGen -> Map Node Integer -> Node -> Node -> Node -> Path -> gr String Integer -> ([Node],[Node]) -> [(StdGen, Map Node Integer, Node,Node,Node,Path, gr String Integer, ([Node],[Node]))] createArgs genC m sn tn dn p g t = [(gen,m,sn,tn,dn,p,g,t) | gen <- xs] where xs = mkStdGens 2 genC --Pheromone operation -------------------------------------------------------------------------------- --Choose a random node with attraction mind selNode :: StdGen -> [(Integer,Node)] -> (Node,StdGen) selNode _ [] = error "node seems to have no neighbors" selNode g xs = (selNode' sel (sort xs),g') where (sel,g') = randomR(1,range) g range = P.foldr (\(a,b) -> (+) a) 0 xs selNode' :: Integer -> [(Integer,Node)] -> Node selNode' _ [(p,n)] = n selNode' s ((p1,n1):(p2,n2):xs) | p1 >= s = n1 | otherwise = selNode' s ((p2+p1,n2):xs) --Increase attraction incAttraction :: Path -> Map Node Integer -> Map Node Integer incAttraction xs m = P.foldl (flip $ adjust succ) m xs --Easy print -------------------------------------------------------------------------------- pathTimes :: Graph gr => Path -> gr String Integer -> [Integer] pathTimes [x] _ = [] pathTimes (x:y:xs) g = el : pathTimes (y:xs) g where el = (\(a,b,c) -> a) . minimum . P.map (\(a,b,c) -> (c,a,b)) $ L.filter (`eqEdge` (x,y)) (out g x) pathToStrings :: Graph gr => Path -> gr String Integer -> [String] pathToStrings xs g = P.map (lab' . context g) xs
istehem/ACO-for-vasttrafik
ACO.hs
bsd-3-clause
9,603
0
20
2,427
4,305
2,322
1,983
166
2
-- | An Applicative functor encapsulating a monadic computation that doesn't carry its parameter. module Control.Applicative.Effect ( Effect (..), effect ) where -- | An Applicative that encapsulates some monadic effect and doesn't -- care about its argument. (It's not 'Data.Functor.Const.Const' -- because that would require @m r@ to be a Monoid which is weird). -- We need this in order to apply a natural transformation over a -- 'Constraint' without actually having to solve an 'Exist' constraint -- (because the GADT wants a result term in that case) newtype Effect r m a = Effect { runEffect :: m r } effect :: m r -> Effect r m a effect = Effect instance Functor (Effect r m) where fmap _ = effect . runEffect instance (Applicative m, Monoid r) => Applicative (Effect r m) where pure _ = effect (pure mempty) ef <*> ex = effect (mappend <$> runEffect ef <*> runEffect ex)
lambdageek/small
src/Control/Applicative/Effect.hs
bsd-3-clause
898
0
10
173
176
97
79
11
1
module Stats.HammingDistance ( bytestrings , strings ) where import qualified Data.ByteString as B import Data.Word (Word8) import Utils.Bytes (c2w) import Utils.Elmify ((|>)) import qualified Data.Bits as BB bytestrings :: (B.ByteString, B.ByteString) -> Int bytestrings (bs1, bs2) = B.zip bs1 bs2 |> hamming hamming :: [(Word8, Word8)] -> Int hamming bss = map (uncurry BB.xor) bss |> map BB.popCount |> sum strings :: [Char] -> [Char] -> Int strings str1 str2 = zip (map c2w str1) (map c2w str2) |> hamming
eelcoh/cryptochallenge
src/Stats/HammingDistance.hs
bsd-3-clause
534
0
10
104
212
120
92
21
1
{-# LANGUAGE DeriveFunctor #-} {-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE ExistentialQuantification #-} module Channel where import Control.Applicative import Control.Monad.IO.Class (MonadIO, liftIO) import Control.Monad.Trans.Free import Data.Foldable (forM_) import Data.IORef import Data.Sequence (Seq, viewl, ViewL(..), (|>)) import qualified Data.Sequence as S -------------------------------------------------------------------------------- type ChanState a = IORef (Seq a) data Chan a = Chan { putChan :: a -> IO () , getChan :: IO (Maybe a) } newChan :: IO (Chan a) newChan = do q <- newIORef S.empty return $ Chan (putQ q) (getQ q) -- (putQ _) (getQ q) getQ :: ChanState a -> IO (Maybe a) getQ q = do chan <- readIORef q case viewl chan of EmptyL -> return Nothing a :< as -> do writeIORef q as return $ Just a putQ :: ChanState a -> a -> IO () putQ q a = do modifyIORef q (|> a) -------------------------------------------------------------------------------- data ChannelF next = forall a. Put (Chan a) a next | forall a. Recv (Chan a) (Maybe a -> next) | forall a. MkChan (Chan a -> next) instance Functor ChannelF where fmap f (Put chan a next) = Put chan a (f next) fmap f (Recv chan next) = Recv chan (f . next) fmap f (MkChan next) = MkChan (f . next) type Channel m = FreeT ChannelF m putC :: MonadFree ChannelF m => Chan a -> a -> m () putC chan a = liftF $ Put chan a () recvC :: MonadFree ChannelF m => Chan a -> m (Maybe a) recvC chan = liftF $ Recv chan id mkChan :: MonadFree ChannelF m => m (Chan a) mkChan = liftF $ MkChan id -------------------------------------------------------------------------------- prog :: (MonadIO m) => Channel m (Maybe (Int, String)) prog = do chan <- mkChan chan2 <- mkChan putC chan (1 :: Int) el <- recvC chan el2 <- recvC chan liftIO $ print (el, el2) forM_ [5..10 :: Int] $ (putC chan2) . show sequence_ $ replicate 6 $ do e <- recvC chan2 case e of Nothing -> return () Just n -> liftIO $ putStrLn ("Chan2 got: " ++ n) putC chan 10 putC chan2 "Bye" eInt <- recvC chan eStr <- recvC chan2 return $ (,) <$> eInt <*> eStr -------------------------------------------------------------------------------- interpret :: MonadIO m => FreeT ChannelF m b -> m b interpret chan' = do c <- runFreeT chan' case c of Pure a -> return a (Free (MkChan nxt)) -> do interpret $ nxt =<< liftIO newChan (Free (Put chan a nxt)) -> do liftIO $ putChan chan a interpret nxt (Free (Recv chan nxt)) -> do interpret $ nxt =<< liftIO (getChan chan) test :: IO (Maybe (Int, String)) test = interpret prog
boothead/free-channels
Channel.hs
bsd-3-clause
2,833
0
16
741
1,094
546
548
77
4
{-# LANGUAGE CPP #-} {-# LANGUAGE DefaultSignatures #-} {-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE GeneralizedNewtypeDeriving #-} {-# LANGUAGE MultiWayIf #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE TupleSections #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE DataKinds #-} {-# LANGUAGE GADTs #-} {-# LANGUAGE StandaloneDeriving #-} -- | The Config type. module Stack.Types.Config ( -- * Main configuration types and classes -- ** HasPlatform & HasStackRoot HasPlatform(..) ,HasStackRoot(..) ,PlatformVariant(..) -- ** Config & HasConfig ,Config(..) ,HasConfig(..) ,askConfig ,askLatestSnapshotUrl ,explicitSetupDeps ,getMinimalEnvOverride -- ** BuildConfig & HasBuildConfig ,BuildConfig(..) ,bcRoot ,bcWorkDir ,bcWantedCompiler ,HasBuildConfig(..) -- ** GHCVariant & HasGHCVariant ,GHCVariant(..) ,ghcVariantName ,ghcVariantSuffix ,parseGHCVariant ,HasGHCVariant(..) ,snapshotsDir -- ** EnvConfig & HasEnvConfig ,EnvConfig(..) ,HasEnvConfig(..) ,getWhichCompiler ,getCompilerPath -- * Details -- ** ApplyGhcOptions ,ApplyGhcOptions(..) -- ** ConfigException ,ConfigException(..) -- ** WhichSolverCmd ,WhichSolverCmd(..) -- ** ConfigMonoid ,ConfigMonoid(..) -- ** EnvSettings ,EnvSettings(..) ,minimalEnvSettings -- ** GlobalOpts & GlobalOptsMonoid ,GlobalOpts(..) ,GlobalOptsMonoid(..) ,defaultLogLevel -- ** LoadConfig ,LoadConfig(..) -- ** PackageEntry & PackageLocation ,PackageEntry(..) ,TreatLikeExtraDep ,PackageLocation(..) ,RemotePackageType(..) -- ** PackageIndex, IndexName & IndexLocation ,PackageIndex(..) ,IndexName(..) ,configPackageIndex ,configPackageIndexCache ,configPackageIndexGz ,configPackageIndexRoot ,configPackageIndexRepo ,configPackageTarball ,indexNameText ,IndexLocation(..) -- ** Project & ProjectAndConfigMonoid ,Project(..) ,ProjectAndConfigMonoid(..) -- ** PvpBounds ,PvpBounds(..) ,parsePvpBounds -- ** Resolver & AbstractResolver ,Resolver ,LoadedResolver ,ResolverThat's(..) ,parseResolverText ,resolverDirName ,resolverName ,customResolverHash ,toResolverNotLoaded ,AbstractResolver(..) -- ** SCM ,SCM(..) -- ** CustomSnapshot ,CustomSnapshot(..) -- ** GhcOptions ,GhcOptions(..) ,ghcOptionsFor -- ** PackageFlags ,PackageFlags(..) -- * Paths ,bindirSuffix ,configInstalledCache ,configMiniBuildPlanCache ,getProjectWorkDir ,docDirSuffix ,flagCacheLocal ,extraBinDirs ,hpcReportDir ,installationRootDeps ,installationRootLocal ,packageDatabaseDeps ,packageDatabaseExtra ,packageDatabaseLocal ,platformOnlyRelDir ,platformGhcRelDir ,useShaPathOnWindows ,getWorkDir -- * Command-specific types -- ** Eval ,EvalOpts(..) -- ** Exec ,ExecOpts(..) ,SpecialExecCmd(..) ,ExecOptsExtra(..) -- ** Setup ,DownloadInfo(..) ,VersionedDownloadInfo(..) ,SetupInfo(..) ,SetupInfoLocation(..) -- ** Docker entrypoint ,DockerEntrypoint(..) ,DockerUser(..) ,module X ) where import Control.Applicative import Control.Arrow ((&&&)) import Control.Exception import Control.Monad (liftM, mzero, forM, join) import Control.Monad.Catch (MonadThrow, throwM) import Control.Monad.Logger (LogLevel(..)) import Control.Monad.Reader (MonadReader, ask, asks, MonadIO, liftIO) import Data.Aeson.Extended (ToJSON, toJSON, FromJSON, parseJSON, withText, object, (.=), (..:), (..:?), (..!=), Value(String, Object), withObjectWarnings, WarningParser, Object, jsonSubWarnings, jsonSubWarningsT, jsonSubWarningsTT, WithJSONWarnings(..), noJSONWarnings) import Data.Attoparsec.Args import Data.ByteString (ByteString) import qualified Data.ByteString.Char8 as S8 import Data.Either (partitionEithers) import Data.IORef (IORef) import Data.List (stripPrefix) import Data.List.NonEmpty (NonEmpty) import qualified Data.List.NonEmpty as NonEmpty import Data.Hashable (Hashable) import Data.Map (Map) import qualified Data.Map as Map import qualified Data.Map.Strict as M import Data.Maybe import Data.Monoid.Extra import Data.Set (Set) import qualified Data.Set as Set import Data.Store (Store) import Data.Text (Text) import qualified Data.Text as T import Data.Text.Encoding (encodeUtf8, decodeUtf8) import Data.Typeable import Data.Yaml (ParseException) import Distribution.System (Platform) import qualified Distribution.Text import Distribution.Version (anyVersion) import GHC.Generics (Generic) import Generics.Deriving.Monoid (memptydefault, mappenddefault) import Network.HTTP.Client (parseUrl) import Path import qualified Paths_stack as Meta import Stack.Types.BuildPlan (MiniBuildPlan(..), SnapName, renderSnapName, parseSnapName, SnapshotHash (..), trimmedSnapshotHash) import Stack.Types.Urls import Stack.Types.Compiler import Stack.Types.Docker import Stack.Types.Nix import Stack.Types.FlagName import Stack.Types.Image import Stack.Types.PackageIdentifier import Stack.Types.PackageIndex import Stack.Types.PackageName import Stack.Types.TemplateName import Stack.Types.Version import System.FilePath (takeBaseName) import System.PosixCompat.Types (UserID, GroupID, FileMode) import System.Process.Read (EnvOverride, findExecutable) -- Re-exports import Stack.Types.Config.Build as X #ifdef mingw32_HOST_OS import qualified Crypto.Hash.SHA1 as SHA1 import qualified Data.ByteString.Base16 as B16 #endif -- | The top-level Stackage configuration. data Config = Config {configStackRoot :: !(Path Abs Dir) -- ^ ~/.stack more often than not ,configWorkDir :: !(Path Rel Dir) -- ^ this allows to override .stack-work directory ,configUserConfigPath :: !(Path Abs File) -- ^ Path to user configuration file (usually ~/.stack/config.yaml) ,configBuild :: !BuildOpts -- ^ Build configuration ,configDocker :: !DockerOpts -- ^ Docker configuration ,configNix :: !NixOpts -- ^ Execution environment (e.g nix-shell) configuration ,configEnvOverride :: !(EnvSettings -> IO EnvOverride) -- ^ Environment variables to be passed to external tools ,configLocalProgramsBase :: !(Path Abs Dir) -- ^ Non-platform-specific path containing local installations ,configLocalPrograms :: !(Path Abs Dir) -- ^ Path containing local installations (mainly GHC) ,configConnectionCount :: !Int -- ^ How many concurrent connections are allowed when downloading ,configHideTHLoading :: !Bool -- ^ Hide the Template Haskell "Loading package ..." messages from the -- console ,configPlatform :: !Platform -- ^ The platform we're building for, used in many directory names ,configPlatformVariant :: !PlatformVariant -- ^ Variant of the platform, also used in directory names ,configGHCVariant0 :: !(Maybe GHCVariant) -- ^ The variant of GHC requested by the user. -- In most cases, use 'BuildConfig' or 'MiniConfig's version instead, -- which will have an auto-detected default. ,configUrls :: !Urls -- ^ URLs for other files used by stack. -- TODO: Better document -- e.g. The latest snapshot file. -- A build plan name (e.g. lts5.9.yaml) is appended when downloading -- the build plan actually. ,configPackageIndices :: ![PackageIndex] -- ^ Information on package indices. This is left biased, meaning that -- packages in an earlier index will shadow those in a later index. -- -- Warning: if you override packages in an index vs what's available -- upstream, you may correct your compiled snapshots, as different -- projects may have different definitions of what pkg-ver means! This -- feature is primarily intended for adding local packages, not -- overriding. Overriding is better accomplished by adding to your -- list of packages. -- -- Note that indices specified in a later config file will override -- previous indices, /not/ extend them. -- -- Using an assoc list instead of a Map to keep track of priority ,configSystemGHC :: !Bool -- ^ Should we use the system-installed GHC (on the PATH) if -- available? Can be overridden by command line options. ,configInstallGHC :: !Bool -- ^ Should we automatically install GHC if missing or the wrong -- version is available? Can be overridden by command line options. ,configSkipGHCCheck :: !Bool -- ^ Don't bother checking the GHC version or architecture. ,configSkipMsys :: !Bool -- ^ On Windows: don't use a locally installed MSYS ,configCompilerCheck :: !VersionCheck -- ^ Specifies which versions of the compiler are acceptable. ,configLocalBin :: !(Path Abs Dir) -- ^ Directory we should install executables into ,configRequireStackVersion :: !VersionRange -- ^ Require a version of stack within this range. ,configJobs :: !Int -- ^ How many concurrent jobs to run, defaults to number of capabilities ,configExtraIncludeDirs :: !(Set Text) -- ^ --extra-include-dirs arguments ,configExtraLibDirs :: !(Set Text) -- ^ --extra-lib-dirs arguments ,configConcurrentTests :: !Bool -- ^ Run test suites concurrently ,configImage :: !ImageOpts ,configTemplateParams :: !(Map Text Text) -- ^ Parameters for templates. ,configScmInit :: !(Maybe SCM) -- ^ Initialize SCM (e.g. git) when creating new projects. ,configGhcOptions :: !GhcOptions -- ^ Additional GHC options to apply to either all packages (Nothing) -- or a specific package (Just). ,configSetupInfoLocations :: ![SetupInfoLocation] -- ^ Additional SetupInfo (inline or remote) to use to find tools. ,configPvpBounds :: !PvpBounds -- ^ How PVP upper bounds should be added to packages ,configModifyCodePage :: !Bool -- ^ Force the code page to UTF-8 on Windows ,configExplicitSetupDeps :: !(Map (Maybe PackageName) Bool) -- ^ See 'explicitSetupDeps'. 'Nothing' provides the default value. ,configRebuildGhcOptions :: !Bool -- ^ Rebuild on GHC options changes ,configApplyGhcOptions :: !ApplyGhcOptions -- ^ Which packages to ghc-options on the command line apply to? ,configAllowNewer :: !Bool -- ^ Ignore version ranges in .cabal files. Funny naming chosen to -- match cabal. ,configDefaultTemplate :: !(Maybe TemplateName) -- ^ The default template to use when none is specified. -- (If Nothing, the default default is used.) ,configAllowDifferentUser :: !Bool -- ^ Allow users other than the stack root owner to use the stack -- installation. ,configPackageCaches :: !(IORef (Maybe (Map PackageIdentifier (PackageIndex, PackageCache)))) -- ^ In memory cache of hackage index. ,configMaybeProject :: !(Maybe (Project, Path Abs File)) } -- | Which packages to ghc-options on the command line apply to? data ApplyGhcOptions = AGOTargets -- ^ all local targets | AGOLocals -- ^ all local packages, even non-targets | AGOEverything -- ^ every package deriving (Show, Read, Eq, Ord, Enum, Bounded) instance FromJSON ApplyGhcOptions where parseJSON = withText "ApplyGhcOptions" $ \t -> case t of "targets" -> return AGOTargets "locals" -> return AGOLocals "everything" -> return AGOEverything _ -> fail $ "Invalid ApplyGhcOptions: " ++ show t -- | Information on a single package index data PackageIndex = PackageIndex { indexName :: !IndexName , indexLocation :: !IndexLocation , indexDownloadPrefix :: !Text -- ^ URL prefix for downloading packages , indexGpgVerify :: !Bool -- ^ GPG-verify the package index during download. Only applies to Git -- repositories for now. , indexRequireHashes :: !Bool -- ^ Require that hashes and package size information be available for packages in this index } deriving Show instance FromJSON (WithJSONWarnings PackageIndex) where parseJSON = withObjectWarnings "PackageIndex" $ \o -> do name <- o ..: "name" prefix <- o ..: "download-prefix" mgit <- o ..:? "git" mhttp <- o ..:? "http" loc <- case (mgit, mhttp) of (Nothing, Nothing) -> fail $ "Must provide either Git or HTTP URL for " ++ T.unpack (indexNameText name) (Just git, Nothing) -> return $ ILGit git (Nothing, Just http) -> return $ ILHttp http (Just git, Just http) -> return $ ILGitHttp git http gpgVerify <- o ..:? "gpg-verify" ..!= False reqHashes <- o ..:? "require-hashes" ..!= False return PackageIndex { indexName = name , indexLocation = loc , indexDownloadPrefix = prefix , indexGpgVerify = gpgVerify , indexRequireHashes = reqHashes } -- | Unique name for a package index newtype IndexName = IndexName { unIndexName :: ByteString } deriving (Show, Eq, Ord, Hashable, Store) indexNameText :: IndexName -> Text indexNameText = decodeUtf8 . unIndexName instance ToJSON IndexName where toJSON = toJSON . indexNameText instance FromJSON IndexName where parseJSON = withText "IndexName" $ \t -> case parseRelDir (T.unpack t) of Left e -> fail $ "Invalid index name: " ++ show e Right _ -> return $ IndexName $ encodeUtf8 t -- | Location of the package index. This ensures that at least one of Git or -- HTTP is available. data IndexLocation = ILGit !Text | ILHttp !Text | ILGitHttp !Text !Text deriving (Show, Eq, Ord) -- | Controls which version of the environment is used data EnvSettings = EnvSettings { esIncludeLocals :: !Bool -- ^ include local project bin directory, GHC_PACKAGE_PATH, etc , esIncludeGhcPackagePath :: !Bool -- ^ include the GHC_PACKAGE_PATH variable , esStackExe :: !Bool -- ^ set the STACK_EXE variable to the current executable name , esLocaleUtf8 :: !Bool -- ^ set the locale to C.UTF-8 } deriving (Show, Eq, Ord) data ExecOpts = ExecOpts { eoCmd :: !SpecialExecCmd , eoArgs :: ![String] , eoExtra :: !ExecOptsExtra } deriving (Show) data SpecialExecCmd = ExecCmd String | ExecGhc | ExecRunGhc deriving (Show, Eq) data ExecOptsExtra = ExecOptsPlain | ExecOptsEmbellished { eoEnvSettings :: !EnvSettings , eoPackages :: ![String] } deriving (Show) data EvalOpts = EvalOpts { evalArg :: !String , evalExtra :: !ExecOptsExtra } deriving (Show) -- | Parsed global command-line options. data GlobalOpts = GlobalOpts { globalReExecVersion :: !(Maybe String) -- ^ Expected re-exec in container version , globalDockerEntrypoint :: !(Maybe DockerEntrypoint) -- ^ Data used when stack is acting as a Docker entrypoint (internal use only) , globalLogLevel :: !LogLevel -- ^ Log level , globalConfigMonoid :: !ConfigMonoid -- ^ Config monoid, for passing into 'loadConfig' , globalResolver :: !(Maybe AbstractResolver) -- ^ Resolver override , globalCompiler :: !(Maybe CompilerVersion) -- ^ Compiler override , globalTerminal :: !Bool -- ^ We're in a terminal? , globalStackYaml :: !(Maybe FilePath) -- ^ Override project stack.yaml } deriving (Show) -- | Parsed global command-line options monoid. data GlobalOptsMonoid = GlobalOptsMonoid { globalMonoidReExecVersion :: !(First String) -- ^ Expected re-exec in container version , globalMonoidDockerEntrypoint :: !(First DockerEntrypoint) -- ^ Data used when stack is acting as a Docker entrypoint (internal use only) , globalMonoidLogLevel :: !(First LogLevel) -- ^ Log level , globalMonoidConfigMonoid :: !ConfigMonoid -- ^ Config monoid, for passing into 'loadConfig' , globalMonoidResolver :: !(First AbstractResolver) -- ^ Resolver override , globalMonoidCompiler :: !(First CompilerVersion) -- ^ Compiler override , globalMonoidTerminal :: !(First Bool) -- ^ We're in a terminal? , globalMonoidStackYaml :: !(First FilePath) -- ^ Override project stack.yaml } deriving (Show, Generic) instance Monoid GlobalOptsMonoid where mempty = memptydefault mappend = mappenddefault -- | Either an actual resolver value, or an abstract description of one (e.g., -- latest nightly). data AbstractResolver = ARLatestNightly | ARLatestLTS | ARLatestLTSMajor !Int | ARResolver !Resolver | ARGlobal deriving Show -- | Default logging level should be something useful but not crazy. defaultLogLevel :: LogLevel defaultLogLevel = LevelInfo -- | A superset of 'Config' adding information on how to build code. The reason -- for this breakdown is because we will need some of the information from -- 'Config' in order to determine the values here. data BuildConfig = BuildConfig { bcConfig :: !Config , bcResolver :: !LoadedResolver -- ^ How we resolve which dependencies to install given a set of -- packages. , bcWantedMiniBuildPlan :: !MiniBuildPlan -- ^ Compiler version wanted for this build , bcPackageEntries :: ![PackageEntry] -- ^ Local packages , bcExtraDeps :: !(Map PackageName Version) -- ^ Extra dependencies specified in configuration. -- -- These dependencies will not be installed to a shared location, and -- will override packages provided by the resolver. , bcExtraPackageDBs :: ![Path Abs Dir] -- ^ Extra package databases , bcStackYaml :: !(Path Abs File) -- ^ Location of the stack.yaml file. -- -- Note: if the STACK_YAML environment variable is used, this may be -- different from bcRoot </> "stack.yaml" , bcFlags :: !PackageFlags -- ^ Per-package flag overrides , bcImplicitGlobal :: !Bool -- ^ Are we loading from the implicit global stack.yaml? This is useful -- for providing better error messages. , bcGHCVariant :: !GHCVariant -- ^ The variant of GHC used to select a GHC bindist. } -- | Directory containing the project's stack.yaml file bcRoot :: BuildConfig -> Path Abs Dir bcRoot = parent . bcStackYaml -- | @"'bcRoot'/.stack-work"@ bcWorkDir :: (MonadReader env m, HasConfig env) => BuildConfig -> m (Path Abs Dir) bcWorkDir bconfig = do workDir <- getWorkDir return (bcRoot bconfig </> workDir) bcWantedCompiler :: BuildConfig -> CompilerVersion bcWantedCompiler = mbpCompilerVersion . bcWantedMiniBuildPlan -- | Configuration after the environment has been setup. data EnvConfig = EnvConfig {envConfigBuildConfig :: !BuildConfig ,envConfigCabalVersion :: !Version ,envConfigCompilerVersion :: !CompilerVersion ,envConfigPackages :: !(Map (Path Abs Dir) TreatLikeExtraDep)} instance HasBuildConfig EnvConfig where getBuildConfig = envConfigBuildConfig instance HasConfig EnvConfig instance HasPlatform EnvConfig instance HasGHCVariant EnvConfig instance HasStackRoot EnvConfig class (HasBuildConfig r, HasGHCVariant r) => HasEnvConfig r where getEnvConfig :: r -> EnvConfig instance HasEnvConfig EnvConfig where getEnvConfig = id -- | Value returned by 'Stack.Config.loadConfig'. data LoadConfig m = LoadConfig { lcConfig :: !Config -- ^ Top-level Stack configuration. , lcLoadBuildConfig :: !(Maybe CompilerVersion -> m BuildConfig) -- ^ Action to load the remaining 'BuildConfig'. , lcProjectRoot :: !(Maybe (Path Abs Dir)) -- ^ The project root directory, if in a project. } data PackageEntry = PackageEntry { peExtraDep :: !TreatLikeExtraDep , peLocation :: !PackageLocation , peSubdirs :: ![FilePath] } deriving Show -- | Should a package be treated just like an extra-dep? -- -- 'True' means, it will only be built as a dependency -- for others, and its test suite/benchmarks will not be run. -- -- Useful modifying an upstream package, see: -- https://github.com/commercialhaskell/stack/issues/219 -- https://github.com/commercialhaskell/stack/issues/386 type TreatLikeExtraDep = Bool instance ToJSON PackageEntry where toJSON pe | not (peExtraDep pe) && null (peSubdirs pe) = toJSON $ peLocation pe toJSON pe = object [ "extra-dep" .= peExtraDep pe , "location" .= peLocation pe , "subdirs" .= peSubdirs pe ] instance FromJSON (WithJSONWarnings PackageEntry) where parseJSON (String t) = do WithJSONWarnings loc _ <- parseJSON $ String t return $ noJSONWarnings (PackageEntry { peExtraDep = False , peLocation = loc , peSubdirs = [] }) parseJSON v = withObjectWarnings "PackageEntry" (\o -> PackageEntry <$> o ..:? "extra-dep" ..!= False <*> jsonSubWarnings (o ..: "location") <*> o ..:? "subdirs" ..!= []) v data PackageLocation = PLFilePath FilePath -- ^ Note that we use @FilePath@ and not @Path@s. The goal is: first parse -- the value raw, and then use @canonicalizePath@ and @parseAbsDir@. | PLRemote Text RemotePackageType -- ^ URL and further details deriving Show data RemotePackageType = RPTHttp | RPTGit Text -- ^ Commit | RPTHg Text -- ^ Commit deriving Show instance ToJSON PackageLocation where toJSON (PLFilePath fp) = toJSON fp toJSON (PLRemote t RPTHttp) = toJSON t toJSON (PLRemote x (RPTGit y)) = toJSON $ T.unwords ["git", x, y] toJSON (PLRemote x (RPTHg y)) = toJSON $ T.unwords ["hg", x, y] instance FromJSON (WithJSONWarnings PackageLocation) where parseJSON v = (noJSONWarnings <$> withText "PackageLocation" (\t -> http t <|> file t) v) <|> git v <|> hg v where file t = pure $ PLFilePath $ T.unpack t http t = case parseUrl $ T.unpack t of Left _ -> mzero Right _ -> return $ PLRemote t RPTHttp git = withObjectWarnings "PackageGitLocation" $ \o -> PLRemote <$> o ..: "git" <*> (RPTGit <$> o ..: "commit") hg = withObjectWarnings "PackageHgLocation" $ \o -> PLRemote <$> o ..: "hg" <*> (RPTHg <$> o ..: "commit") -- | A project is a collection of packages. We can have multiple stack.yaml -- files, but only one of them may contain project information. data Project = Project { projectUserMsg :: !(Maybe String) -- ^ A warning message to display to the user when the auto generated -- config may have issues. , projectPackages :: ![PackageEntry] -- ^ Components of the package list , projectExtraDeps :: !(Map PackageName Version) -- ^ Components of the package list referring to package/version combos, -- see: https://github.com/fpco/stack/issues/41 , projectFlags :: !PackageFlags -- ^ Per-package flag overrides , projectResolver :: !Resolver -- ^ How we resolve which dependencies to use , projectCompiler :: !(Maybe CompilerVersion) -- ^ When specified, overrides which compiler to use , projectExtraPackageDBs :: ![FilePath] } deriving Show instance ToJSON Project where toJSON p = object $ (maybe id (\cv -> (("compiler" .= cv) :)) (projectCompiler p)) ((maybe id (\msg -> (("user-message" .= msg) :)) (projectUserMsg p)) [ "packages" .= projectPackages p , "extra-deps" .= map fromTuple (Map.toList $ projectExtraDeps p) , "flags" .= projectFlags p , "resolver" .= projectResolver p , "extra-package-dbs" .= projectExtraPackageDBs p ]) data IsLoaded = Loaded | NotLoaded type LoadedResolver = ResolverThat's 'Loaded type Resolver = ResolverThat's 'NotLoaded -- TODO: once GHC 8.0 is the lowest version we support, make these into -- actual haddock comments... -- | How we resolve which dependencies to install given a set of packages. data ResolverThat's (l :: IsLoaded) where -- Use an official snapshot from the Stackage project, either an LTS -- Haskell or Stackage Nightly. ResolverSnapshot :: !SnapName -> ResolverThat's l -- Require a specific compiler version, but otherwise provide no -- build plan. Intended for use cases where end user wishes to -- specify all upstream dependencies manually, such as using a -- dependency solver. ResolverCompiler :: !CompilerVersion -> ResolverThat's l -- A custom resolver based on the given name and URL. When a URL is -- provided, it file is to be completely immutable. Filepaths are -- always loaded. This constructor is used before the build-plan has -- been loaded, as we do not yet know the custom snapshot's hash. ResolverCustom :: !Text -> !Text -> ResolverThat's 'NotLoaded -- Like 'ResolverCustom', but after loading the build-plan, so we -- have a hash. This is necessary in order to identify the location -- files are stored for the resolver. ResolverCustomLoaded :: !Text -> !Text -> !SnapshotHash -> ResolverThat's 'Loaded deriving instance Show (ResolverThat's k) instance ToJSON (ResolverThat's k) where toJSON x = case x of ResolverSnapshot{} -> toJSON $ resolverName x ResolverCompiler{} -> toJSON $ resolverName x ResolverCustom n l -> handleCustom n l ResolverCustomLoaded n l _ -> handleCustom n l where handleCustom n l = object [ "name" .= n , "location" .= l ] instance FromJSON (WithJSONWarnings (ResolverThat's 'NotLoaded)) where -- Strange structuring is to give consistent error messages parseJSON v@(Object _) = withObjectWarnings "Resolver" (\o -> ResolverCustom <$> o ..: "name" <*> o ..: "location") v parseJSON (String t) = either (fail . show) return (noJSONWarnings <$> parseResolverText t) parseJSON _ = fail $ "Invalid Resolver, must be Object or String" -- | Convert a Resolver into its @Text@ representation, as will be used by -- directory names resolverDirName :: LoadedResolver -> Text resolverDirName (ResolverSnapshot name) = renderSnapName name resolverDirName (ResolverCompiler v) = compilerVersionText v resolverDirName (ResolverCustomLoaded name _ hash) = "custom-" <> name <> "-" <> decodeUtf8 (trimmedSnapshotHash hash) -- | Convert a Resolver into its @Text@ representation for human -- presentation. resolverName :: ResolverThat's l -> Text resolverName (ResolverSnapshot name) = renderSnapName name resolverName (ResolverCompiler v) = compilerVersionText v resolverName (ResolverCustom name _) = "custom-" <> name resolverName (ResolverCustomLoaded name _ _) = "custom-" <> name customResolverHash :: LoadedResolver-> Maybe SnapshotHash customResolverHash (ResolverCustomLoaded _ _ hash) = Just hash customResolverHash _ = Nothing -- | Try to parse a @Resolver@ from a @Text@. Won't work for complex resolvers (like custom). parseResolverText :: MonadThrow m => Text -> m Resolver parseResolverText t | Right x <- parseSnapName t = return $ ResolverSnapshot x | Just v <- parseCompilerVersion t = return $ ResolverCompiler v | otherwise = throwM $ ParseResolverException t toResolverNotLoaded :: LoadedResolver -> Resolver toResolverNotLoaded r = case r of ResolverSnapshot s -> ResolverSnapshot s ResolverCompiler v -> ResolverCompiler v ResolverCustomLoaded n l _ -> ResolverCustom n l -- | Class for environment values which have access to the stack root class HasStackRoot env where getStackRoot :: env -> Path Abs Dir default getStackRoot :: HasConfig env => env -> Path Abs Dir getStackRoot = configStackRoot . getConfig {-# INLINE getStackRoot #-} -- | Class for environment values which have a Platform class HasPlatform env where getPlatform :: env -> Platform default getPlatform :: HasConfig env => env -> Platform getPlatform = configPlatform . getConfig {-# INLINE getPlatform #-} getPlatformVariant :: env -> PlatformVariant default getPlatformVariant :: HasConfig env => env -> PlatformVariant getPlatformVariant = configPlatformVariant . getConfig {-# INLINE getPlatformVariant #-} instance HasPlatform (Platform,PlatformVariant) where getPlatform (p,_) = p getPlatformVariant (_,v) = v -- | Class for environment values which have a GHCVariant class HasGHCVariant env where getGHCVariant :: env -> GHCVariant default getGHCVariant :: HasBuildConfig env => env -> GHCVariant getGHCVariant = bcGHCVariant . getBuildConfig {-# INLINE getGHCVariant #-} instance HasGHCVariant GHCVariant where getGHCVariant = id -- | Class for environment values that can provide a 'Config'. class (HasStackRoot env, HasPlatform env) => HasConfig env where getConfig :: env -> Config default getConfig :: HasBuildConfig env => env -> Config getConfig = bcConfig . getBuildConfig {-# INLINE getConfig #-} instance HasStackRoot Config instance HasPlatform Config instance HasConfig Config where getConfig = id {-# INLINE getConfig #-} -- | Class for environment values that can provide a 'BuildConfig'. class HasConfig env => HasBuildConfig env where getBuildConfig :: env -> BuildConfig instance HasStackRoot BuildConfig instance HasPlatform BuildConfig instance HasGHCVariant BuildConfig instance HasConfig BuildConfig instance HasBuildConfig BuildConfig where getBuildConfig = id {-# INLINE getBuildConfig #-} -- An uninterpreted representation of configuration options. -- Configurations may be "cascaded" using mappend (left-biased). data ConfigMonoid = ConfigMonoid { configMonoidStackRoot :: !(First (Path Abs Dir)) -- ^ See: 'configStackRoot' , configMonoidWorkDir :: !(First FilePath) -- ^ See: 'configWorkDir'. , configMonoidBuildOpts :: !BuildOptsMonoid -- ^ build options. , configMonoidDockerOpts :: !DockerOptsMonoid -- ^ Docker options. , configMonoidNixOpts :: !NixOptsMonoid -- ^ Options for the execution environment (nix-shell or container) , configMonoidConnectionCount :: !(First Int) -- ^ See: 'configConnectionCount' , configMonoidHideTHLoading :: !(First Bool) -- ^ See: 'configHideTHLoading' , configMonoidLatestSnapshotUrl :: !(First Text) -- ^ Deprecated in favour of 'urlsMonoidLatestSnapshot' , configMonoidUrls :: !UrlsMonoid -- ^ See: 'configUrls , configMonoidPackageIndices :: !(First [PackageIndex]) -- ^ See: 'configPackageIndices' , configMonoidSystemGHC :: !(First Bool) -- ^ See: 'configSystemGHC' ,configMonoidInstallGHC :: !(First Bool) -- ^ See: 'configInstallGHC' ,configMonoidSkipGHCCheck :: !(First Bool) -- ^ See: 'configSkipGHCCheck' ,configMonoidSkipMsys :: !(First Bool) -- ^ See: 'configSkipMsys' ,configMonoidCompilerCheck :: !(First VersionCheck) -- ^ See: 'configCompilerCheck' ,configMonoidRequireStackVersion :: !IntersectingVersionRange -- ^ See: 'configRequireStackVersion' ,configMonoidOS :: !(First String) -- ^ Used for overriding the platform ,configMonoidArch :: !(First String) -- ^ Used for overriding the platform ,configMonoidGHCVariant :: !(First GHCVariant) -- ^ Used for overriding the GHC variant ,configMonoidJobs :: !(First Int) -- ^ See: 'configJobs' ,configMonoidExtraIncludeDirs :: !(Set Text) -- ^ See: 'configExtraIncludeDirs' ,configMonoidExtraLibDirs :: !(Set Text) -- ^ See: 'configExtraLibDirs' ,configMonoidConcurrentTests :: !(First Bool) -- ^ See: 'configConcurrentTests' ,configMonoidLocalBinPath :: !(First FilePath) -- ^ Used to override the binary installation dir ,configMonoidImageOpts :: !ImageOptsMonoid -- ^ Image creation options. ,configMonoidTemplateParameters :: !(Map Text Text) -- ^ Template parameters. ,configMonoidScmInit :: !(First SCM) -- ^ Initialize SCM (e.g. git init) when making new projects? ,configMonoidGhcOptions :: !GhcOptions -- ^ See 'configGhcOptions' ,configMonoidExtraPath :: ![Path Abs Dir] -- ^ Additional paths to search for executables in ,configMonoidSetupInfoLocations :: ![SetupInfoLocation] -- ^ Additional setup info (inline or remote) to use for installing tools ,configMonoidPvpBounds :: !(First PvpBounds) -- ^ See 'configPvpBounds' ,configMonoidModifyCodePage :: !(First Bool) -- ^ See 'configModifyCodePage' ,configMonoidExplicitSetupDeps :: !(Map (Maybe PackageName) Bool) -- ^ See 'configExplicitSetupDeps' ,configMonoidRebuildGhcOptions :: !(First Bool) -- ^ See 'configMonoidRebuildGhcOptions' ,configMonoidApplyGhcOptions :: !(First ApplyGhcOptions) -- ^ See 'configApplyGhcOptions' ,configMonoidAllowNewer :: !(First Bool) -- ^ See 'configMonoidAllowNewer' ,configMonoidDefaultTemplate :: !(First TemplateName) -- ^ The default template to use when none is specified. -- (If Nothing, the default default is used.) , configMonoidAllowDifferentUser :: !(First Bool) -- ^ Allow users other than the stack root owner to use the stack -- installation. } deriving (Show, Generic) instance Monoid ConfigMonoid where mempty = memptydefault mappend = mappenddefault instance FromJSON (WithJSONWarnings ConfigMonoid) where parseJSON = withObjectWarnings "ConfigMonoid" parseConfigMonoidJSON -- | Parse a partial configuration. Used both to parse both a standalone config -- file and a project file, so that a sub-parser is not required, which would interfere with -- warnings for missing fields. parseConfigMonoidJSON :: Object -> WarningParser ConfigMonoid parseConfigMonoidJSON obj = do -- Parsing 'stackRoot' from 'stackRoot'/config.yaml would be nonsensical let configMonoidStackRoot = First Nothing configMonoidWorkDir <- First <$> obj ..:? configMonoidWorkDirName configMonoidBuildOpts <- jsonSubWarnings (obj ..:? configMonoidBuildOptsName ..!= mempty) configMonoidDockerOpts <- jsonSubWarnings (obj ..:? configMonoidDockerOptsName ..!= mempty) configMonoidNixOpts <- jsonSubWarnings (obj ..:? configMonoidNixOptsName ..!= mempty) configMonoidConnectionCount <- First <$> obj ..:? configMonoidConnectionCountName configMonoidHideTHLoading <- First <$> obj ..:? configMonoidHideTHLoadingName configMonoidLatestSnapshotUrl <- First <$> obj ..:? configMonoidLatestSnapshotUrlName configMonoidUrls <- jsonSubWarnings (obj ..:? configMonoidUrlsName ..!= mempty) configMonoidPackageIndices <- First <$> jsonSubWarningsTT (obj ..:? configMonoidPackageIndicesName) configMonoidSystemGHC <- First <$> obj ..:? configMonoidSystemGHCName configMonoidInstallGHC <- First <$> obj ..:? configMonoidInstallGHCName configMonoidSkipGHCCheck <- First <$> obj ..:? configMonoidSkipGHCCheckName configMonoidSkipMsys <- First <$> obj ..:? configMonoidSkipMsysName configMonoidRequireStackVersion <- IntersectingVersionRange <$> unVersionRangeJSON <$> obj ..:? configMonoidRequireStackVersionName ..!= VersionRangeJSON anyVersion configMonoidOS <- First <$> obj ..:? configMonoidOSName configMonoidArch <- First <$> obj ..:? configMonoidArchName configMonoidGHCVariant <- First <$> obj ..:? configMonoidGHCVariantName configMonoidJobs <- First <$> obj ..:? configMonoidJobsName configMonoidExtraIncludeDirs <- obj ..:? configMonoidExtraIncludeDirsName ..!= Set.empty configMonoidExtraLibDirs <- obj ..:? configMonoidExtraLibDirsName ..!= Set.empty configMonoidConcurrentTests <- First <$> obj ..:? configMonoidConcurrentTestsName configMonoidLocalBinPath <- First <$> obj ..:? configMonoidLocalBinPathName configMonoidImageOpts <- jsonSubWarnings (obj ..:? configMonoidImageOptsName ..!= mempty) templates <- obj ..:? "templates" (configMonoidScmInit,configMonoidTemplateParameters) <- case templates of Nothing -> return (First Nothing,M.empty) Just tobj -> do scmInit <- tobj ..:? configMonoidScmInitName params <- tobj ..:? configMonoidTemplateParametersName return (First scmInit,fromMaybe M.empty params) configMonoidCompilerCheck <- First <$> obj ..:? configMonoidCompilerCheckName configMonoidGhcOptions <- obj ..:? configMonoidGhcOptionsName ..!= mempty extraPath <- obj ..:? configMonoidExtraPathName ..!= [] configMonoidExtraPath <- forM extraPath $ either (fail . show) return . parseAbsDir . T.unpack configMonoidSetupInfoLocations <- maybeToList <$> jsonSubWarningsT (obj ..:? configMonoidSetupInfoLocationsName) configMonoidPvpBounds <- First <$> obj ..:? configMonoidPvpBoundsName configMonoidModifyCodePage <- First <$> obj ..:? configMonoidModifyCodePageName configMonoidExplicitSetupDeps <- (obj ..:? configMonoidExplicitSetupDepsName ..!= mempty) >>= fmap Map.fromList . mapM handleExplicitSetupDep . Map.toList configMonoidRebuildGhcOptions <- First <$> obj ..:? configMonoidRebuildGhcOptionsName configMonoidApplyGhcOptions <- First <$> obj ..:? configMonoidApplyGhcOptionsName configMonoidAllowNewer <- First <$> obj ..:? configMonoidAllowNewerName configMonoidDefaultTemplate <- First <$> obj ..:? configMonoidDefaultTemplateName configMonoidAllowDifferentUser <- First <$> obj ..:? configMonoidAllowDifferentUserName return ConfigMonoid {..} where handleExplicitSetupDep :: Monad m => (Text, Bool) -> m (Maybe PackageName, Bool) handleExplicitSetupDep (name', b) = do name <- if name' == "*" then return Nothing else case parsePackageNameFromString $ T.unpack name' of Left e -> fail $ show e Right x -> return $ Just x return (name, b) configMonoidWorkDirName :: Text configMonoidWorkDirName = "work-dir" configMonoidBuildOptsName :: Text configMonoidBuildOptsName = "build" configMonoidDockerOptsName :: Text configMonoidDockerOptsName = "docker" configMonoidNixOptsName :: Text configMonoidNixOptsName = "nix" configMonoidConnectionCountName :: Text configMonoidConnectionCountName = "connection-count" configMonoidHideTHLoadingName :: Text configMonoidHideTHLoadingName = "hide-th-loading" configMonoidLatestSnapshotUrlName :: Text configMonoidLatestSnapshotUrlName = "latest-snapshot-url" configMonoidUrlsName :: Text configMonoidUrlsName = "urls" configMonoidPackageIndicesName :: Text configMonoidPackageIndicesName = "package-indices" configMonoidSystemGHCName :: Text configMonoidSystemGHCName = "system-ghc" configMonoidInstallGHCName :: Text configMonoidInstallGHCName = "install-ghc" configMonoidSkipGHCCheckName :: Text configMonoidSkipGHCCheckName = "skip-ghc-check" configMonoidSkipMsysName :: Text configMonoidSkipMsysName = "skip-msys" configMonoidRequireStackVersionName :: Text configMonoidRequireStackVersionName = "require-stack-version" configMonoidOSName :: Text configMonoidOSName = "os" configMonoidArchName :: Text configMonoidArchName = "arch" configMonoidGHCVariantName :: Text configMonoidGHCVariantName = "ghc-variant" configMonoidJobsName :: Text configMonoidJobsName = "jobs" configMonoidExtraIncludeDirsName :: Text configMonoidExtraIncludeDirsName = "extra-include-dirs" configMonoidExtraLibDirsName :: Text configMonoidExtraLibDirsName = "extra-lib-dirs" configMonoidConcurrentTestsName :: Text configMonoidConcurrentTestsName = "concurrent-tests" configMonoidLocalBinPathName :: Text configMonoidLocalBinPathName = "local-bin-path" configMonoidImageOptsName :: Text configMonoidImageOptsName = "image" configMonoidScmInitName :: Text configMonoidScmInitName = "scm-init" configMonoidTemplateParametersName :: Text configMonoidTemplateParametersName = "params" configMonoidCompilerCheckName :: Text configMonoidCompilerCheckName = "compiler-check" configMonoidGhcOptionsName :: Text configMonoidGhcOptionsName = "ghc-options" configMonoidExtraPathName :: Text configMonoidExtraPathName = "extra-path" configMonoidSetupInfoLocationsName :: Text configMonoidSetupInfoLocationsName = "setup-info" configMonoidPvpBoundsName :: Text configMonoidPvpBoundsName = "pvp-bounds" configMonoidModifyCodePageName :: Text configMonoidModifyCodePageName = "modify-code-page" configMonoidExplicitSetupDepsName :: Text configMonoidExplicitSetupDepsName = "explicit-setup-deps" configMonoidRebuildGhcOptionsName :: Text configMonoidRebuildGhcOptionsName = "rebuild-ghc-options" configMonoidApplyGhcOptionsName :: Text configMonoidApplyGhcOptionsName = "apply-ghc-options" configMonoidAllowNewerName :: Text configMonoidAllowNewerName = "allow-newer" configMonoidDefaultTemplateName :: Text configMonoidDefaultTemplateName = "default-template" configMonoidAllowDifferentUserName :: Text configMonoidAllowDifferentUserName = "allow-different-user" data ConfigException = ParseConfigFileException (Path Abs File) ParseException | ParseCustomSnapshotException Text ParseException | ParseResolverException Text | NoProjectConfigFound (Path Abs Dir) (Maybe Text) | UnexpectedArchiveContents [Path Abs Dir] [Path Abs File] | UnableToExtractArchive Text (Path Abs File) | BadStackVersionException VersionRange | NoMatchingSnapshot WhichSolverCmd (NonEmpty SnapName) | forall l. ResolverMismatch WhichSolverCmd (ResolverThat's l) String | ResolverPartial WhichSolverCmd Resolver String | NoSuchDirectory FilePath | ParseGHCVariantException String | BadStackRoot (Path Abs Dir) | Won'tCreateStackRootInDirectoryOwnedByDifferentUser (Path Abs Dir) (Path Abs Dir) -- ^ @$STACK_ROOT@, parent dir | UserDoesn'tOwnDirectory (Path Abs Dir) deriving Typeable instance Show ConfigException where show (ParseConfigFileException configFile exception) = concat [ "Could not parse '" , toFilePath configFile , "':\n" , show exception , "\nSee http://docs.haskellstack.org/en/stable/yaml_configuration/." ] show (ParseCustomSnapshotException url exception) = concat [ "Could not parse '" , T.unpack url , "':\n" , show exception -- FIXME: Link to docs about custom snapshots -- , "\nSee http://docs.haskellstack.org/en/stable/yaml_configuration/." ] show (ParseResolverException t) = concat [ "Invalid resolver value: " , T.unpack t , ". Possible valid values include lts-2.12, nightly-YYYY-MM-DD, ghc-7.10.2, and ghcjs-0.1.0_ghc-7.10.2. " , "See https://www.stackage.org/snapshots for a complete list." ] show (NoProjectConfigFound dir mcmd) = concat [ "Unable to find a stack.yaml file in the current directory (" , toFilePath dir , ") or its ancestors" , case mcmd of Nothing -> "" Just cmd -> "\nRecommended action: stack " ++ T.unpack cmd ] show (UnexpectedArchiveContents dirs files) = concat [ "When unpacking an archive specified in your stack.yaml file, " , "did not find expected contents. Expected: a single directory. Found: " , show ( map (toFilePath . dirname) dirs , map (toFilePath . filename) files ) ] show (UnableToExtractArchive url file) = concat [ "Archive extraction failed. We support tarballs and zip, couldn't handle the following URL, " , T.unpack url, " downloaded to the file ", toFilePath $ filename file ] show (BadStackVersionException requiredRange) = concat [ "The version of stack you are using (" , show (fromCabalVersion Meta.version) , ") is outside the required\n" ,"version range specified in stack.yaml (" , T.unpack (versionRangeText requiredRange) , ")." ] show (NoMatchingSnapshot whichCmd names) = concat $ [ "None of the following snapshots provides a compiler matching " , "your package(s):\n" , unlines $ map (\name -> " - " <> T.unpack (renderSnapName name)) (NonEmpty.toList names) , showOptions whichCmd ] show (ResolverMismatch whichCmd resolver errDesc) = concat [ "Resolver '" , T.unpack (resolverName resolver) , "' does not have a matching compiler to build some or all of your " , "package(s).\n" , errDesc , showOptions whichCmd ] show (ResolverPartial whichCmd resolver errDesc) = concat [ "Resolver '" , T.unpack (resolverName resolver) , "' does not have all the packages to match your requirements.\n" , unlines $ fmap (" " <>) (lines errDesc) , showOptions whichCmd ] show (NoSuchDirectory dir) = concat ["No directory could be located matching the supplied path: " ,dir ] show (ParseGHCVariantException v) = concat [ "Invalid ghc-variant value: " , v ] show (BadStackRoot stackRoot) = concat [ "Invalid stack root: '" , toFilePath stackRoot , "'. Please provide a valid absolute path." ] show (Won'tCreateStackRootInDirectoryOwnedByDifferentUser envStackRoot parentDir) = concat [ "Preventing creation of stack root '" , toFilePath envStackRoot , "'. Parent directory '" , toFilePath parentDir , "' is owned by someone else." ] show (UserDoesn'tOwnDirectory dir) = concat [ "You are not the owner of '" , toFilePath dir , "'. Aborting to protect file permissions." , "\nRetry with '--" , T.unpack configMonoidAllowDifferentUserName , "' to disable this precaution." ] instance Exception ConfigException showOptions :: WhichSolverCmd -> String showOptions whichCmd = unlines $ ["\nThis may be resolved by:"] ++ options where options = case whichCmd of IsSolverCmd -> [useResolver] IsInitCmd -> both IsNewCmd -> both both = [omitPackages, useResolver] omitPackages = " - Using '--omit-packages to exclude mismatching package(s)." useResolver = " - Using '--resolver' to specify a matching snapshot/resolver" data WhichSolverCmd = IsInitCmd | IsSolverCmd | IsNewCmd -- | Helper function to ask the environment and apply getConfig askConfig :: (MonadReader env m, HasConfig env) => m Config askConfig = liftM getConfig ask -- | Get the URL to request the information on the latest snapshots askLatestSnapshotUrl :: (MonadReader env m, HasConfig env) => m Text askLatestSnapshotUrl = asks (urlsLatestSnapshot . configUrls . getConfig) -- | Root for a specific package index configPackageIndexRoot :: (MonadReader env m, HasConfig env, MonadThrow m) => IndexName -> m (Path Abs Dir) configPackageIndexRoot (IndexName name) = do config <- asks getConfig dir <- parseRelDir $ S8.unpack name return (configStackRoot config </> $(mkRelDir "indices") </> dir) -- | Git repo directory for a specific package index, returns 'Nothing' if not -- a Git repo configPackageIndexRepo :: (MonadReader env m, HasConfig env, MonadThrow m) => IndexName -> m (Maybe (Path Abs Dir)) configPackageIndexRepo name = do indices <- asks $ configPackageIndices . getConfig case filter (\p -> indexName p == name) indices of [index] -> do let murl = case indexLocation index of ILGit x -> Just x ILHttp _ -> Nothing ILGitHttp x _ -> Just x case murl of Nothing -> return Nothing Just url -> do sDir <- configPackageIndexRoot name repoName <- parseRelDir $ takeBaseName $ T.unpack url let suDir = sDir </> $(mkRelDir "git-update") return $ Just $ suDir </> repoName _ -> assert False $ return Nothing -- | Location of the 00-index.cache file configPackageIndexCache :: (MonadReader env m, HasConfig env, MonadThrow m) => IndexName -> m (Path Abs File) configPackageIndexCache = liftM (</> $(mkRelFile "00-index.cache")) . configPackageIndexRoot -- | Location of the 00-index.tar file configPackageIndex :: (MonadReader env m, HasConfig env, MonadThrow m) => IndexName -> m (Path Abs File) configPackageIndex = liftM (</> $(mkRelFile "00-index.tar")) . configPackageIndexRoot -- | Location of the 00-index.tar.gz file configPackageIndexGz :: (MonadReader env m, HasConfig env, MonadThrow m) => IndexName -> m (Path Abs File) configPackageIndexGz = liftM (</> $(mkRelFile "00-index.tar.gz")) . configPackageIndexRoot -- | Location of a package tarball configPackageTarball :: (MonadReader env m, HasConfig env, MonadThrow m) => IndexName -> PackageIdentifier -> m (Path Abs File) configPackageTarball iname ident = do root <- configPackageIndexRoot iname name <- parseRelDir $ packageNameString $ packageIdentifierName ident ver <- parseRelDir $ versionString $ packageIdentifierVersion ident base <- parseRelFile $ packageIdentifierString ident ++ ".tar.gz" return (root </> $(mkRelDir "packages") </> name </> ver </> base) -- | @".stack-work"@ getWorkDir :: (MonadReader env m, HasConfig env) => m (Path Rel Dir) getWorkDir = configWorkDir `liftM` asks getConfig -- | Per-project work dir getProjectWorkDir :: (HasBuildConfig env, MonadReader env m) => m (Path Abs Dir) getProjectWorkDir = do bc <- asks getBuildConfig workDir <- getWorkDir return (bcRoot bc </> workDir) -- | File containing the installed cache, see "Stack.PackageDump" configInstalledCache :: (HasBuildConfig env, MonadReader env m) => m (Path Abs File) configInstalledCache = liftM (</> $(mkRelFile "installed-cache.bin")) getProjectWorkDir -- | Relative directory for the platform identifier platformOnlyRelDir :: (MonadReader env m, HasPlatform env, MonadThrow m) => m (Path Rel Dir) platformOnlyRelDir = do platform <- asks getPlatform platformVariant <- asks getPlatformVariant parseRelDir (Distribution.Text.display platform ++ platformVariantSuffix platformVariant) -- | Directory containing snapshots snapshotsDir :: (MonadReader env m, HasConfig env, HasGHCVariant env, MonadThrow m) => m (Path Abs Dir) snapshotsDir = do config <- asks getConfig platform <- platformGhcRelDir return $ configStackRoot config </> $(mkRelDir "snapshots") </> platform -- | Installation root for dependencies installationRootDeps :: (MonadThrow m, MonadReader env m, HasEnvConfig env) => m (Path Abs Dir) installationRootDeps = do config <- asks getConfig -- TODO: also useShaPathOnWindows here, once #1173 is resolved. psc <- platformSnapAndCompilerRel return $ configStackRoot config </> $(mkRelDir "snapshots") </> psc -- | Installation root for locals installationRootLocal :: (MonadThrow m, MonadReader env m, HasEnvConfig env) => m (Path Abs Dir) installationRootLocal = do bc <- asks getBuildConfig psc <- useShaPathOnWindows =<< platformSnapAndCompilerRel return $ getProjectWorkDir bc </> $(mkRelDir "install") </> psc -- | Path for platform followed by snapshot name followed by compiler -- name. platformSnapAndCompilerRel :: (MonadReader env m, HasEnvConfig env, MonadThrow m) => m (Path Rel Dir) platformSnapAndCompilerRel = do bc <- asks getBuildConfig platform <- platformGhcRelDir name <- parseRelDir $ T.unpack $ resolverDirName $ bcResolver bc ghc <- compilerVersionDir useShaPathOnWindows (platform </> name </> ghc) -- | Relative directory for the platform identifier platformGhcRelDir :: (MonadReader env m, HasPlatform env, HasGHCVariant env, MonadThrow m) => m (Path Rel Dir) platformGhcRelDir = do platform <- asks getPlatform platformVariant <- asks getPlatformVariant ghcVariant <- asks getGHCVariant parseRelDir (mconcat [ Distribution.Text.display platform , platformVariantSuffix platformVariant , ghcVariantSuffix ghcVariant ]) -- | This is an attempt to shorten stack paths on Windows to decrease our -- chances of hitting 260 symbol path limit. The idea is to calculate -- SHA1 hash of the path used on other architectures, encode with base -- 16 and take first 8 symbols of it. useShaPathOnWindows :: MonadThrow m => Path Rel Dir -> m (Path Rel Dir) useShaPathOnWindows = #ifdef mingw32_HOST_OS parseRelDir . S8.unpack . S8.take 8 . B16.encode . SHA1.hash . encodeUtf8 . T.pack . toFilePath #else return #endif compilerVersionDir :: (MonadThrow m, MonadReader env m, HasEnvConfig env) => m (Path Rel Dir) compilerVersionDir = do compilerVersion <- asks (envConfigCompilerVersion . getEnvConfig) parseRelDir $ case compilerVersion of GhcVersion version -> versionString version GhcjsVersion {} -> compilerVersionString compilerVersion -- | Package database for installing dependencies into packageDatabaseDeps :: (MonadThrow m, MonadReader env m, HasEnvConfig env) => m (Path Abs Dir) packageDatabaseDeps = do root <- installationRootDeps return $ root </> $(mkRelDir "pkgdb") -- | Package database for installing local packages into packageDatabaseLocal :: (MonadThrow m, MonadReader env m, HasEnvConfig env) => m (Path Abs Dir) packageDatabaseLocal = do root <- installationRootLocal return $ root </> $(mkRelDir "pkgdb") -- | Extra package databases packageDatabaseExtra :: (MonadThrow m, MonadReader env m, HasEnvConfig env) => m [Path Abs Dir] packageDatabaseExtra = do bc <- asks getBuildConfig return $ bcExtraPackageDBs bc -- | Directory for holding flag cache information flagCacheLocal :: (MonadThrow m, MonadReader env m, HasEnvConfig env) => m (Path Abs Dir) flagCacheLocal = do root <- installationRootLocal return $ root </> $(mkRelDir "flag-cache") -- | Where to store mini build plan caches configMiniBuildPlanCache :: (MonadThrow m, MonadReader env m, HasConfig env, HasGHCVariant env) => SnapName -> m (Path Abs File) configMiniBuildPlanCache name = do root <- asks getStackRoot platform <- platformGhcRelDir file <- parseRelFile $ T.unpack (renderSnapName name) ++ ".cache" -- Yes, cached plans differ based on platform return (root </> $(mkRelDir "build-plan-cache") </> platform </> file) -- | Suffix applied to an installation root to get the bin dir bindirSuffix :: Path Rel Dir bindirSuffix = $(mkRelDir "bin") -- | Suffix applied to an installation root to get the doc dir docDirSuffix :: Path Rel Dir docDirSuffix = $(mkRelDir "doc") -- | Where HPC reports and tix files get stored. hpcReportDir :: (MonadThrow m, MonadReader env m, HasEnvConfig env) => m (Path Abs Dir) hpcReportDir = do root <- installationRootLocal return $ root </> $(mkRelDir "hpc") -- | Get the extra bin directories (for the PATH). Puts more local first -- -- Bool indicates whether or not to include the locals extraBinDirs :: (MonadThrow m, MonadReader env m, HasEnvConfig env) => m (Bool -> [Path Abs Dir]) extraBinDirs = do deps <- installationRootDeps local <- installationRootLocal return $ \locals -> if locals then [local </> bindirSuffix, deps </> bindirSuffix] else [deps </> bindirSuffix] -- | Get the minimal environment override, useful for just calling external -- processes like git or ghc getMinimalEnvOverride :: (MonadReader env m, HasConfig env, MonadIO m) => m EnvOverride getMinimalEnvOverride = do config <- asks getConfig liftIO $ configEnvOverride config minimalEnvSettings minimalEnvSettings :: EnvSettings minimalEnvSettings = EnvSettings { esIncludeLocals = False , esIncludeGhcPackagePath = False , esStackExe = False , esLocaleUtf8 = False } getWhichCompiler :: (MonadReader env m, HasEnvConfig env) => m WhichCompiler getWhichCompiler = asks (whichCompiler . envConfigCompilerVersion . getEnvConfig) -- | Get the path for the given compiler ignoring any local binaries. -- -- https://github.com/commercialhaskell/stack/issues/1052 getCompilerPath :: (MonadIO m, MonadThrow m, MonadReader env m, HasConfig env) => WhichCompiler -> m (Path Abs File) getCompilerPath wc = do config <- asks getConfig eoWithoutLocals <- liftIO $ configEnvOverride config minimalEnvSettings { esLocaleUtf8 = True } join (findExecutable eoWithoutLocals (compilerExeName wc)) data ProjectAndConfigMonoid = ProjectAndConfigMonoid !Project !ConfigMonoid instance FromJSON (WithJSONWarnings ProjectAndConfigMonoid) where parseJSON = withObjectWarnings "ProjectAndConfigMonoid" $ \o -> do dirs <- jsonSubWarningsTT (o ..:? "packages") ..!= [packageEntryCurrDir] extraDeps' <- o ..:? "extra-deps" ..!= [] extraDeps <- case partitionEithers $ goDeps extraDeps' of ([], x) -> return $ Map.fromList x (errs, _) -> fail $ unlines errs flags <- o ..:? "flags" ..!= mempty resolver <- jsonSubWarnings (o ..: "resolver") compiler <- o ..:? "compiler" msg <- o ..:? "user-message" config <- parseConfigMonoidJSON o extraPackageDBs <- o ..:? "extra-package-dbs" ..!= [] let project = Project { projectUserMsg = msg , projectPackages = dirs , projectExtraDeps = extraDeps , projectFlags = flags , projectResolver = resolver , projectCompiler = compiler , projectExtraPackageDBs = extraPackageDBs } return $ ProjectAndConfigMonoid project config where goDeps = map toSingle . Map.toList . Map.unionsWith Set.union . map toMap where toMap i = Map.singleton (packageIdentifierName i) (Set.singleton (packageIdentifierVersion i)) toSingle (k, s) = case Set.toList s of [x] -> Right (k, x) xs -> Left $ concat [ "Multiple versions for package " , packageNameString k , ": " , unwords $ map versionString xs ] -- | A PackageEntry for the current directory, used as a default packageEntryCurrDir :: PackageEntry packageEntryCurrDir = PackageEntry { peExtraDep = False , peLocation = PLFilePath "." , peSubdirs = [] } -- | A software control system. data SCM = Git deriving (Show) instance FromJSON SCM where parseJSON v = do s <- parseJSON v case s of "git" -> return Git _ -> fail ("Unknown or unsupported SCM: " <> s) instance ToJSON SCM where toJSON Git = toJSON ("git" :: Text) -- | A variant of the platform, used to differentiate Docker builds from host data PlatformVariant = PlatformVariantNone | PlatformVariant String -- | Render a platform variant to a String suffix. platformVariantSuffix :: PlatformVariant -> String platformVariantSuffix PlatformVariantNone = "" platformVariantSuffix (PlatformVariant v) = "-" ++ v -- | Specialized bariant of GHC (e.g. libgmp4 or integer-simple) data GHCVariant = GHCStandard -- ^ Standard bindist | GHCGMP4 -- ^ Bindist that supports libgmp4 (centos66) | GHCArch -- ^ Bindist built on Arch Linux (bleeding-edge) | GHCIntegerSimple -- ^ Bindist that uses integer-simple | GHCCustom String -- ^ Other bindists deriving (Show) instance FromJSON GHCVariant where -- Strange structuring is to give consistent error messages parseJSON = withText "GHCVariant" (either (fail . show) return . parseGHCVariant . T.unpack) -- | Render a GHC variant to a String. ghcVariantName :: GHCVariant -> String ghcVariantName GHCStandard = "standard" ghcVariantName GHCGMP4 = "gmp4" ghcVariantName GHCArch = "arch" ghcVariantName GHCIntegerSimple = "integersimple" ghcVariantName (GHCCustom name) = "custom-" ++ name -- | Render a GHC variant to a String suffix. ghcVariantSuffix :: GHCVariant -> String ghcVariantSuffix GHCStandard = "" ghcVariantSuffix v = "-" ++ ghcVariantName v -- | Parse GHC variant from a String. parseGHCVariant :: (MonadThrow m) => String -> m GHCVariant parseGHCVariant s = case stripPrefix "custom-" s of Just name -> return (GHCCustom name) Nothing | s == "" -> return GHCStandard | s == "standard" -> return GHCStandard | s == "gmp4" -> return GHCGMP4 | s == "arch" -> return GHCArch | s == "integersimple" -> return GHCIntegerSimple | otherwise -> return (GHCCustom s) -- | Information for a file to download. data DownloadInfo = DownloadInfo { downloadInfoUrl :: Text , downloadInfoContentLength :: Maybe Int , downloadInfoSha1 :: Maybe ByteString } deriving (Show) instance FromJSON (WithJSONWarnings DownloadInfo) where parseJSON = withObjectWarnings "DownloadInfo" parseDownloadInfoFromObject -- | Parse JSON in existing object for 'DownloadInfo' parseDownloadInfoFromObject :: Object -> WarningParser DownloadInfo parseDownloadInfoFromObject o = do url <- o ..: "url" contentLength <- o ..:? "content-length" sha1TextMay <- o ..:? "sha1" return DownloadInfo { downloadInfoUrl = url , downloadInfoContentLength = contentLength , downloadInfoSha1 = fmap encodeUtf8 sha1TextMay } data VersionedDownloadInfo = VersionedDownloadInfo { vdiVersion :: Version , vdiDownloadInfo :: DownloadInfo } deriving Show instance FromJSON (WithJSONWarnings VersionedDownloadInfo) where parseJSON = withObjectWarnings "VersionedDownloadInfo" $ \o -> do version <- o ..: "version" downloadInfo <- parseDownloadInfoFromObject o return VersionedDownloadInfo { vdiVersion = version , vdiDownloadInfo = downloadInfo } data SetupInfo = SetupInfo { siSevenzExe :: Maybe DownloadInfo , siSevenzDll :: Maybe DownloadInfo , siMsys2 :: Map Text VersionedDownloadInfo , siGHCs :: Map Text (Map Version DownloadInfo) , siGHCJSs :: Map Text (Map CompilerVersion DownloadInfo) , siStack :: Map Text (Map Version DownloadInfo) } deriving Show instance FromJSON (WithJSONWarnings SetupInfo) where parseJSON = withObjectWarnings "SetupInfo" $ \o -> do siSevenzExe <- jsonSubWarningsT (o ..:? "sevenzexe-info") siSevenzDll <- jsonSubWarningsT (o ..:? "sevenzdll-info") siMsys2 <- jsonSubWarningsT (o ..:? "msys2" ..!= mempty) siGHCs <- jsonSubWarningsTT (o ..:? "ghc" ..!= mempty) siGHCJSs <- jsonSubWarningsTT (o ..:? "ghcjs" ..!= mempty) siStack <- jsonSubWarningsTT (o ..:? "stack" ..!= mempty) return SetupInfo {..} -- | For @siGHCs@ and @siGHCJSs@ fields maps are deeply merged. -- For all fields the values from the last @SetupInfo@ win. instance Monoid SetupInfo where mempty = SetupInfo { siSevenzExe = Nothing , siSevenzDll = Nothing , siMsys2 = Map.empty , siGHCs = Map.empty , siGHCJSs = Map.empty , siStack = Map.empty } mappend l r = SetupInfo { siSevenzExe = siSevenzExe r <|> siSevenzExe l , siSevenzDll = siSevenzDll r <|> siSevenzDll l , siMsys2 = siMsys2 r <> siMsys2 l , siGHCs = Map.unionWith (<>) (siGHCs r) (siGHCs l) , siGHCJSs = Map.unionWith (<>) (siGHCJSs r) (siGHCJSs l) , siStack = Map.unionWith (<>) (siStack l) (siStack r) } -- | Remote or inline 'SetupInfo' data SetupInfoLocation = SetupInfoFileOrURL String | SetupInfoInline SetupInfo deriving (Show) instance FromJSON (WithJSONWarnings SetupInfoLocation) where parseJSON v = (noJSONWarnings <$> withText "SetupInfoFileOrURL" (pure . SetupInfoFileOrURL . T.unpack) v) <|> inline where inline = do WithJSONWarnings si w <- parseJSON v return $ WithJSONWarnings (SetupInfoInline si) w -- | How PVP bounds should be added to .cabal files data PvpBounds = PvpBoundsNone | PvpBoundsUpper | PvpBoundsLower | PvpBoundsBoth deriving (Show, Read, Eq, Typeable, Ord, Enum, Bounded) pvpBoundsText :: PvpBounds -> Text pvpBoundsText PvpBoundsNone = "none" pvpBoundsText PvpBoundsUpper = "upper" pvpBoundsText PvpBoundsLower = "lower" pvpBoundsText PvpBoundsBoth = "both" parsePvpBounds :: Text -> Either String PvpBounds parsePvpBounds t = case Map.lookup t m of Nothing -> Left $ "Invalid PVP bounds: " ++ T.unpack t Just x -> Right x where m = Map.fromList $ map (pvpBoundsText &&& id) [minBound..maxBound] instance ToJSON PvpBounds where toJSON = toJSON . pvpBoundsText instance FromJSON PvpBounds where parseJSON = withText "PvpBounds" (either fail return . parsePvpBounds) -- | Provide an explicit list of package dependencies when running a custom Setup.hs explicitSetupDeps :: (MonadReader env m, HasConfig env) => PackageName -> m Bool explicitSetupDeps name = do m <- asks $ configExplicitSetupDeps . getConfig return $ -- Yes there are far cleverer ways to write this. I honestly consider -- the explicit pattern matching much easier to parse at a glance. case Map.lookup (Just name) m of Just b -> b Nothing -> case Map.lookup Nothing m of Just b -> b Nothing -> False -- default value -- | Data passed into Docker container for the Docker entrypoint's use data DockerEntrypoint = DockerEntrypoint { deUser :: !(Maybe DockerUser) -- ^ UID/GID/etc of host user, if we wish to perform UID/GID switch in container } deriving (Read,Show) -- | Docker host user info data DockerUser = DockerUser { duUid :: UserID -- ^ uid , duGid :: GroupID -- ^ gid , duGroups :: [GroupID] -- ^ Supplemantal groups , duUmask :: FileMode -- ^ File creation mask } } deriving (Read,Show) -- TODO: See section of -- https://github.com/commercialhaskell/stack/issues/1265 about -- rationalizing the config. It would also be nice to share more code. -- For now it's more convenient just to extend this type. However, it's -- unpleasant that it has overlap with both 'Project' and 'Config'. data CustomSnapshot = CustomSnapshot { csCompilerVersion :: !(Maybe CompilerVersion) , csPackages :: !(Set PackageIdentifier) , csDropPackages :: !(Set PackageName) , csFlags :: !PackageFlags , csGhcOptions :: !GhcOptions } instance FromJSON (WithJSONWarnings (CustomSnapshot, Maybe Resolver)) where parseJSON = withObjectWarnings "CustomSnapshot" $ \o -> (,) <$> (CustomSnapshot <$> o ..:? "compiler" <*> o ..:? "packages" ..!= mempty <*> o ..:? "drop-packages" ..!= mempty <*> o ..:? "flags" ..!= mempty <*> o ..:? configMonoidGhcOptionsName ..!= mempty) <*> jsonSubWarningsT (o ..:? "resolver") newtype GhcOptions = GhcOptions { unGhcOptions :: Map (Maybe PackageName) [Text] } deriving Show instance FromJSON GhcOptions where parseJSON val = do ghcOptions <- parseJSON val fmap (GhcOptions . Map.fromList) $ mapM handleGhcOptions $ Map.toList ghcOptions where handleGhcOptions :: Monad m => (Text, Text) -> m (Maybe PackageName, [Text]) handleGhcOptions (name', vals') = do name <- if name' == "*" then return Nothing else case parsePackageNameFromString $ T.unpack name' of Left e -> fail $ show e Right x -> return $ Just x case parseArgs Escaping vals' of Left e -> fail e Right vals -> return (name, map T.pack vals) instance Monoid GhcOptions where mempty = GhcOptions mempty -- FIXME: Should GhcOptions really monoid like this? Keeping it this -- way preserves the behavior of the ConfigMonoid. However, this -- means there isn't the ability to fully override snapshot -- ghc-options in the same way there is for flags. Do we want to -- change the semantics here? (particularly for extensible -- snapshots) mappend (GhcOptions l) (GhcOptions r) = GhcOptions (Map.unionWith (++) l r) ghcOptionsFor :: PackageName -> GhcOptions -> [Text] ghcOptionsFor name (GhcOptions mp) = M.findWithDefault [] Nothing mp ++ M.findWithDefault [] (Just name) mp newtype PackageFlags = PackageFlags { unPackageFlags :: Map PackageName (Map FlagName Bool) } deriving Show instance FromJSON PackageFlags where parseJSON val = PackageFlags <$> parseJSON val instance ToJSON PackageFlags where toJSON = toJSON . unPackageFlags instance Monoid PackageFlags where mempty = PackageFlags mempty mappend (PackageFlags l) (PackageFlags r) = PackageFlags (Map.unionWith Map.union l r)
sjakobi/stack
src/Stack/Types/Config.hs
bsd-3-clause
71,171
0
24
17,039
13,721
7,389
6,332
-1
-1
{-# LANGUAGE OverloadedStrings, TemplateHaskell #-} import Web.Scotty import Control.Applicative ((<|>)) import Data.Maybe (fromJust) import Control.Monad (liftM) import Control.Monad.Trans (liftIO) import Data.List (find) import Data.Monoid (mconcat) import Data.Text.Lazy (pack) import System.Environment (getEnvironment) import Text.Hamlet (shamletFile) import Text.Blaze.Html.Renderer.Text (renderHtml) import Channels (DiffChannel (..), channels, jobset) main = do env <- getEnvironment let port = maybe 3000 read $ lookup "PORT" env scotty port $ do get "/:channel" $ do allChannels <- liftIO $ channels channelName <- param "channel" let mainChannel = findChannel channelName allChannels html $ renderHtml $(shamletFile "index.hamlet") findChannel :: String -> [DiffChannel] -> DiffChannel findChannel channelName chans = fromJust $ lookup channelName <|> lookup "nixos-unstable" <|> Just (head chans) where lookup n = find (\c -> dname c == n) chans
domenkozar/howoldis
Main.hs
bsd-3-clause
1,097
0
17
260
322
170
152
28
1
{-# LANGUAGE QuasiQuotes #-} {-# LANGUAGE TemplateHaskell #-} -- -- Quasiquote Ivory areas. -- -- Copyright (C) 2014, Galois, Inc. -- All rights reserved. -- module Ivory.Language.Syntax.Concrete.QQ.AreaQQ ( fromArea , fromAreaImport ) where import Language.Haskell.TH hiding (Exp, Stmt, Type) import Ivory.Language.Syntax.Concrete.ParseAST import Ivory.Language.Syntax.Concrete.QQ.Common import Ivory.Language.Syntax.Concrete.QQ.ExprQQ import Ivory.Language.Syntax.Concrete.QQ.TypeQQ import qualified Ivory.Language.Init as I import qualified Ivory.Language.MemArea as I fromArea :: AreaDef -> Q [Dec] fromArea a = do (t, _) <- runToQ (fromType (areaType a)) let ty = AppT (ConT (if c then ''I.ConstMemArea else ''I.MemArea)) t return [SigD (mkName nm) ty, d] where c = areaConst a nm = allocRefVar (areaInit a) d = ValD (VarP $ mkName nm) (NormalB imp) [] cntr = VarE (if c then 'I.constArea else 'I.area) imp = AppE (AppE cntr (LitE (StringL nm))) ins conIns z = if c then z else AppE (ConE 'Just) z -- conIns = if c then ins else AppE (ConE 'Just) ins ins = case areaInit a of AllocBase _ mi -> case mi of Nothing -> if c then VarE 'I.izero else (ConE 'Nothing) Just i -> conIns (AppE (VarE 'I.ival) (toExp [] i)) AllocArr _ i -> case i of [] -> if c then VarE 'I.izero else (ConE 'Nothing) es -> let mkIval = AppE (VarE 'I.ival) in let is = map (toExp []) es in let lis = ListE (map mkIval is) in conIns (AppE (VarE 'I.iarray) lis) AllocStruct _ i -> case i of Empty -> if c then AppE (VarE 'I.istruct) (ListE []) else (ConE 'Nothing) MacroInit (fn,args) -> let es = map (toExp []) args in conIns (callit (mkVar fn) es) FieldInits fieldAssigns -> let es = map (toExp [] . snd) fieldAssigns in let ls = ListE $ map assign (zip (fst $ unzip fieldAssigns) es) in conIns (AppE (VarE 'I.istruct) ls) where assign (fnm, e) = InfixE (Just $ mkVar fnm) (VarE '(I..=)) (Just $ mkIval e) mkIval = AppE (VarE 'I.ival) fromAreaImport :: AreaImportDef -> Q [Dec] fromAreaImport a = do (t, _) <- runToQ (fromType (aiType a)) let ty = AppT (ConT (if c then ''I.ConstMemArea else ''I.MemArea)) t return [SigD (mkName nm) ty, d] where c = aiConst a nm = aiSym a d = ValD (VarP $ mkName nm) (NormalB imp) [] cntr = VarE (if c then 'I.importConstArea else 'I.importArea) imp = AppE (AppE cntr (LitE (StringL nm))) (LitE $ StringL (aiFile a))
GaloisInc/ivory
ivory/src/Ivory/Language/Syntax/Concrete/QQ/AreaQQ.hs
bsd-3-clause
2,966
0
25
1,037
1,092
578
514
60
13
{-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE MultiParamTypeClasses #-} module FPNLA.Matrix.Instances.RepaMatrix (RepaMatrix, RepaVector) where import Control.DeepSeq (NFData (rnf)) import FPNLA.Matrix (Matrix (generate_m, fromList_m, transpose_m, dim_m, elem_m, map_m, zipWith_m, subMatrix_m, fromBlocks_m), MatrixVector (row_vm, col_vm, fromCols_vm), Vector (generate_v, fromList_v, concat_v, elem_v, length_v, foldr_v, map_v, zipWith_v)) import Data.Array.Repa ((:.) (..), All (..), Any (..), Array, D, DIM1, DIM2, Shape, Source, Z (..), append, computeUnboxedS, deepSeqArray, delay, extend, extent, extract, fromFunction, fromFunction, fromListUnboxed, index, map, size, slice, toList, transpose, zipWith) import Data.Array.Repa.Eval () import Data.Array.Repa.Repr.Unboxed (Unbox ()) import Data.Foldable (foldr') import Prelude (Int, Show (..), foldr1, length, ($), (.)) import qualified Prelude as P hiding (Show) type RepaVector = Array D DIM1 type RepaMatrix = Array D DIM2 instance (Shape sh, Show sh, Show e, Unbox e) => Show (Array D sh e) where show = show . computeUnboxedS instance (Source r e, Shape sh, NFData e) => NFData (Array r sh e) where rnf m = deepSeqArray m () instance (Unbox e) => Vector RepaVector e where generate_v size gen = fromFunction (Z:.size) (\(Z:.pos) -> gen pos) fromList_v l = delay $ fromListUnboxed (Z :. length l) l concat_v = foldr1 append elem_v pos v = index v (Z:.pos) length_v v = size $ extent v --foldr_v cons zero v = foldAllS cons zero v -- foldAllS no sirve por su tipo foldr_v cons zero v = foldr' cons zero (toList v) -- es lo que hay... map_v = map zipWith_v = zipWith instance (Unbox e) => Matrix RepaMatrix e where generate_m rows cols gen = fromFunction (Z:.rows:.cols) (\(Z:.i:.j) -> gen i j) fromList_m m n l = delay $ fromListUnboxed (Z:.m:.n) l transpose_m = transpose dim_m = (\(Z:.rows:.cols) -> (rows, cols)) . extent elem_m i j m = index m (Z:.i:.j) map_m = map zipWith_m = zipWith subMatrix_m posI posJ cantRows cantCols = extract (Z:.posI:.posJ) (Z:.cantRows:.cantCols) fromBlocks_m = transpose . foldr1 append . P.map (transpose . foldr1 append) --toBlocks_m = -- Queda el default usado submatrices (extract) instance (Unbox e) => MatrixVector RepaMatrix RepaVector e where row_vm pos m = slice m (Any:.pos:.All) col_vm pos m = slice m (Any:.pos) fromCols_vm = foldr1 append . P.map (transpose . extend (Any:.(1::Int):.All)) --toCols_vm = -- Queda el default usado submatrices (extract)
mauroblanco/fpnla-examples
src/FPNLA/Matrix/Instances/RepaMatrix.hs
bsd-3-clause
3,173
0
14
1,080
947
549
398
49
0
{-# LANGUAGE RecordWildCards #-} module Inspection.Data.TaskQueue ( TaskQueue(..) , addTask , selectTasks , singleTask , completedTasks , difference ) where import Prelude () import MyLittlePrelude import qualified Data.Set as Set import Data.Aeson.Extra (ToJSON, FromJSON) import Inspection.Data import Inspection.BuildMatrix as BuildMatrix newtype TaskQueue = TaskQueue (Set Task) deriving (Show, Eq, Generic, Typeable) instance Monoid TaskQueue where mempty = TaskQueue mempty mappend (TaskQueue a) (TaskQueue b) = TaskQueue (mappend a b) instance ToJSON TaskQueue instance FromJSON TaskQueue addTask :: Task -> TaskQueue -> TaskQueue addTask task (TaskQueue queue) = TaskQueue $ Set.insert task queue difference :: TaskQueue -> TaskQueue -> TaskQueue difference (TaskQueue a) (TaskQueue b) = TaskQueue $ Set.difference a b selectTasks :: Maybe Compiler -> Maybe (ReleaseTag Compiler) -> Maybe PackageName -> Maybe (ReleaseTag Package) -> TaskQueue -> TaskQueue selectTasks mCompiler mCompilerVersion mPackageName mPackageVersion (TaskQueue queue) = TaskQueue $ Set.filter match queue where match Task{ taskBuildConfig = BuildConfig{..} , taskTarget = Target packageName packageVersion } = and (fromMaybe True <$> [ fmap (buildConfigCompiler ==) mCompiler , fmap (buildConfigCompilerRelease ==) mCompilerVersion , fmap (packageName ==) mPackageName , fmap (packageVersion ==) mPackageVersion ]) singleTask :: TaskQueue -> Maybe Task singleTask (TaskQueue queue) = fst <$> Set.minView queue completedTasks :: BuildMatrix -> TaskQueue completedTasks matrix = TaskQueue $ Set.map fromEntry $ BuildMatrix.entries Nothing Nothing Nothing Nothing matrix where fromEntry :: Entry -> Task fromEntry Entry{..} = Task entryBuildConfig entryTarget
zudov/purescript-inspection
src/Inspection/Data/TaskQueue.hs
bsd-3-clause
2,034
0
12
520
544
287
257
48
1