code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|
----------------------------------------------------------------------------
-- |
-- Module : ModuleWithMultilineImportList
-- Copyright : (c) Sergey Vinokurov 2015
-- License : BSD3-style (see LICENSE)
-- Maintainer : serg.foo@gmail.com
----------------------------------------------------------------------------
module ModuleWithMultilineImportList where
import Imported1 ( foo
, bar
)
-- somewhat degenerate but still syntactically valid import
import
Imported2
( foo2
, bar2
)
baz :: a -> a
baz x = x
|
sergv/tags-server
|
test-data/0001module_with_imports/ModuleWithMultilineImportList.hs
|
bsd-3-clause
| 563
| 0
| 5
| 116
| 51
| 34
| 17
| 9
| 1
|
{-#LANGUAGE RecordWildCards #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE QuasiQuotes, TemplateHaskell, TypeFamilies #-}
{-# LANGUAGE OverloadedStrings, GADTs, FlexibleContexts #-}
{-# LANGUAGE GeneralizedNewtypeDeriving, MultiParamTypeClasses #-}
module DirectoryServer where
import Network hiding (accept, sClose)
import Network.Socket hiding (send, recv, sendTo, recvFrom, Broadcast)
import Network.Socket.ByteString
import Data.ByteString.Char8 (pack, unpack)
import System.Environment
import System.IO
import Control.Concurrent
import Control.Concurrent.STM
import Control.Exception
import Control.Monad (forever, when, join)
import Data.List.Split
import Data.Word
import Text.Printf (printf)
import System.Directory
import Data.Map (Map) -- from the `containers` library
import Data.Time
import System.Random
import qualified Data.Map as M
type Uuid = Int
type Address = String
type Port = String
type Filename = String
type Timestamp = IO String
--Server data type allows me to pass address and port details easily
data DirectoryServer = DirectoryServer
{ address :: String
, port :: String
, filemappings :: TVar (M.Map Filename Filemapping)
, fileservers :: TVar (M.Map Uuid Fileserver)
, fileservercount :: TVar Int
}
--Constructor
newDirectoryServer :: String -> String -> IO DirectoryServer
newDirectoryServer address port = atomically $ do DirectoryServer <$> return address <*> return port <*> newTVar M.empty <*> newTVar M.empty <*> newTVar 0
addFilemapping :: DirectoryServer -> Filename -> Uuid -> Address -> Port -> Timestamp -> STM ()
addFilemapping DirectoryServer{..} filename uuid fmaddress fmport timestamp = do
fm <- newFilemapping filename uuid fmaddress fmport timestamp
modifyTVar filemappings . M.insert filename $ fm
addFileserver :: DirectoryServer -> Uuid -> Address -> Port -> STM ()
addFileserver DirectoryServer{..} uuid fsaddress fsport = do
fs <- newFileserver uuid fsaddress fsport
modifyTVar fileservers . M.insert uuid $ fs
lookupFilemapping :: DirectoryServer -> Filename -> STM (Maybe Filemapping)
lookupFilemapping DirectoryServer{..} filename = M.lookup filename <$> readTVar filemappings
lookupFileserver :: DirectoryServer -> Uuid -> STM (Maybe Fileserver)
lookupFileserver DirectoryServer{..} uuid = M.lookup uuid <$> readTVar fileservers
data Filemapping = Filemapping
{ fmfilename :: Filename
, fmuuid :: Uuid
, fmaddress :: Address
, fmport :: Port
, fmtimestamp :: Timestamp
}
newFilemapping :: Filename -> Uuid -> Address -> Port -> Timestamp -> STM Filemapping
newFilemapping fmfilename fmuuid fmaddress fmport fmtimestamp = Filemapping <$> return fmfilename <*> return fmuuid <*> return fmaddress <*> return fmport <*> return fmtimestamp
getFilemappinguuid :: Filemapping -> Uuid
getFilemappinguuid Filemapping{..} = fmuuid
getFilemappingaddress :: Filemapping -> Address
getFilemappingaddress Filemapping{..} = fmaddress
getFilemappingport :: Filemapping -> Port
getFilemappingport Filemapping{..} = fmport
getFilemappingtimestamp :: Filemapping -> Timestamp
getFilemappingtimestamp Filemapping{..} = fmtimestamp
data Fileserver = Fileserver
{ fsuuid :: Uuid
, fsaddress :: HostName
, fsport :: Port
}
newFileserver :: Uuid -> Address -> Port -> STM Fileserver
newFileserver fsuuid fsaddress fsport = Fileserver <$> return fsuuid <*> return fsaddress <*> return fsport
getFileserveraddress :: Fileserver -> HostName
getFileserveraddress Fileserver{..} = fsaddress
getFileserverport :: Fileserver -> Port
getFileserverport Fileserver{..} = fsport
--4 is easy for testing the pooling
maxnumThreads = 4
serverport :: String
serverport = "7008"
serverhost :: String
serverhost = "localhost"
dirrun:: IO ()
dirrun = withSocketsDo $ do
--Command line arguments for port and address
--args <- getArgs
server <- newDirectoryServer serverhost serverport
--sock <- listenOn (PortNumber (fromIntegral serverport))
addrinfos <- getAddrInfo
(Just (defaultHints {addrFlags = [AI_PASSIVE]}))
Nothing (Just serverport)
let serveraddr = head addrinfos
sock <- socket (addrFamily serveraddr) Stream defaultProtocol
bindSocket sock (addrAddress serveraddr)
listen sock 5
_ <- printf "Listening on port %s\n" serverport
--Listen on port from command line argument
--New Abstract FIFO Channel
chan <- newChan
--Tvars are variables Stored in memory, this way we can access the numThreads from any method
numThreads <- atomically $ newTVar 0
--Spawns a new thread to handle the clientconnectHandler method, passes socket, channel, numThreads and server
forkIO $ clientconnectHandler sock chan numThreads server
--Calls the mainHandler which will monitor the FIFO channel
mainHandler sock chan
mainHandler :: Socket -> Chan String -> IO ()
mainHandler sock chan = do
--Read current message on the FIFO channel
chanMsg <- readChan chan
--If KILL_SERVICE, stop mainHandler running, If anything else, call mainHandler again, keeping the service running
case (chanMsg) of
("KILL_SERVICE") -> putStrLn "Terminating the Service!"
_ -> mainHandler sock chan
clientconnectHandler :: Socket -> Chan String -> TVar Int -> DirectoryServer -> IO ()
clientconnectHandler sock chan numThreads server = do
--Accept the socket which returns a handle, host and port
--(handle, host, port) <- accept sock
(s,a) <- accept sock
--handle <- socketToHandle s ReadWriteMode
--Read numThreads from memory and print it on server console
count <- atomically $ readTVar numThreads
putStrLn $ "numThreads = " ++ show count
--If there are still threads remaining create new thread and increment (exception if thread is lost -> decrement), else tell user capacity has been reached
if (count < maxnumThreads) then do
forkFinally (clientHandler s chan server) (\_ -> atomically $ decrementTVar numThreads)
atomically $ incrementTVar numThreads
else do
send s (pack ("Maximum number of threads in use. try again soon"++"\n\n"))
sClose s
clientconnectHandler sock chan numThreads server
clientHandler :: Socket -> Chan String -> DirectoryServer -> IO ()
clientHandler sock chan server@DirectoryServer{..} =
forever $ do
message <- recv sock 1024
let msg = unpack message
print $ msg ++ "!ENDLINE!"
let cmd = head $ words $ head $ splitOn ":" msg
print cmd
case cmd of
("HELO") -> heloCommand sock server $ (words msg) !! 1
("KILL_SERVICE") -> killCommand chan sock
("DOWNLOAD") -> downloadCommand sock server msg
("UPLOAD") -> uploadCommand sock server msg
("JOIN") -> joinCommand sock server msg
_ -> do send sock (pack ("Unknown Command - " ++ msg ++ "\n\n")) ; return ()
--Function called when HELO text command recieved
heloCommand :: Socket -> DirectoryServer -> String -> IO ()
heloCommand sock DirectoryServer{..} msg = do
send sock $ pack $ "HELO " ++ msg ++ "\n" ++
"IP:" ++ "192.168.6.129" ++ "\n" ++
"Port:" ++ port ++ "\n" ++
"StudentID:12306421\n\n"
return ()
killCommand :: Chan String -> Socket -> IO ()
killCommand chan sock = do
send sock $ pack $ "Service is now terminating!"
writeChan chan "KILL_SERVICE"
downloadCommand :: Socket -> DirectoryServer ->String -> IO ()
downloadCommand sock server@DirectoryServer{..} command = do
let clines = splitOn "\\n" command
filename = (splitOn ":" $ clines !! 1) !! 1
fm <- atomically $ lookupFilemapping server filename
case fm of
(Nothing) -> send sock $ pack $ "DOWNLOAD: " ++ filename ++ "\n" ++
"STATUS: " ++ "File not found" ++ "\n\n"
(Just fm) -> do print (getFilemappingaddress fm)
print (getFilemappingport fm)
forkIO $ downloadmsg filename (getFilemappingaddress fm) (getFilemappingport fm) sock
send sock $ pack $ "DOWNLOAD: " ++ filename ++ "\n" ++
"STATUS: " ++ "SUCCESSFUL" ++ "\n\n"
return ()
downloadmsg :: String -> String -> String -> Socket -> IO()
downloadmsg filename host port sock = do
addrInfo <- getAddrInfo (Just (defaultHints {addrFlags = [AI_PASSIVE]})) Nothing (Just "7007")
let serverAddr = head addrInfo
clsock <- socket (addrFamily serverAddr) Stream defaultProtocol
connect clsock (addrAddress serverAddr)
send clsock $ pack $ "DOWNLOAD:FILE" ++ "\\n" ++
"FILENAME:" ++ filename ++ "\n\n"
resp <- recv clsock 1024
let msg = unpack resp
let clines = splitOn "\\n" msg
fdata = (splitOn ":" $ clines !! 1) !! 1
sClose clsock
send sock $ pack $ "DOWNLOAD: " ++ filename ++ "\n" ++
"DATA: " ++ fdata ++ "\n\n"
-- forkIO $ returndata filename sock fdata
return ()
returndata :: String -> Socket -> String -> IO ()
returndata filename sock fdata = do
send sock $ pack $ "DOWNLOAD: " ++ filename ++ "\\n" ++
"DATA: " ++ fdata ++ "\n\n"
return ()
uploadCommand :: Socket -> DirectoryServer ->String -> IO ()
uploadCommand sock server@DirectoryServer{..} command = do
let clines = splitOn "\\n" command
filename = (splitOn ":" $ clines !! 1) !! 1
fdata = (splitOn ":" $ clines !! 2) !! 1
fm <- atomically $ lookupFilemapping server filename
case fm of
(Just fm) -> send sock $ pack $ "UPLOAD: " ++ filename ++ "\n" ++
"STATUS: " ++ "File Already Exists" ++ "\n\n"
(Nothing) -> do numfs <- atomically $ M.size <$> readTVar fileservers
rand <- randomRIO (0, (numfs-1))
fs <- atomically $ lookupFileserver server rand
case fs of
(Nothing) -> send sock $ pack $ "UPLOAD: " ++ filename ++ "\n"++
"FAILED: " ++ "No valid Fileserver found to host" ++ "\n\n"
(Just fs) -> do forkIO $ uploadmsg sock filename fdata fs rand server
fm <- atomically $ newFilemapping filename rand (getFileserveraddress fs) (getFileserverport fs) (fmap show getZonedTime)
atomically $ addFilemapping server filename rand (getFileserveraddress fs) (getFileserverport fs) (fmap show getZonedTime)
send sock $ pack $ "UPLOAD: " ++ filename ++ "\\n" ++
"STATUS: " ++ "Successfull" ++ "\n\n"
return ()
uploadmsg :: Socket -> String -> String -> Fileserver -> Int -> DirectoryServer -> IO ()
uploadmsg sock filename fdata fs rand server@DirectoryServer{..} = withSocketsDo $ do
addrInfo <- getAddrInfo (Just (defaultHints {addrFlags = [AI_PASSIVE]})) Nothing (Just "7007")
let serverAddr = head addrInfo
clsock <- socket (addrFamily serverAddr) Stream defaultProtocol
connect clsock (addrAddress serverAddr)
send clsock $ pack $ "UPLOAD:FILE" ++ "\\n" ++
"FILENAME:" ++ filename ++ "\\n" ++
"DATA:" ++ fdata ++ "\\n"
resp <- recv clsock 1024
sClose clsock
let msg = unpack resp
print $ msg ++ "!ENDLINE!"
let clines = splitOn "\\n" msg
status = (splitOn ":" $ clines !! 1) !! 1
return ()
joinCommand :: Socket -> DirectoryServer ->String -> IO ()
joinCommand sock server@DirectoryServer{..} command = do
let clines = splitOn "\\n" command
newaddress = (splitOn ":" $ clines !! 1) !! 1
newport = (splitOn ":" $ clines !! 2) !! 1
nodeID <- atomically $ readTVar fileservercount
fs <- atomically $ newFileserver nodeID newaddress newport
atomically $ addFileserver server nodeID newaddress newport
atomically $ incrementFileserverCount fileservercount
send sock $ pack $ "JOINED DISTRIBUTED FILE SERVICE as fileserver: " ++ (show nodeID) ++ "\n\n"
return ()
--Increment Tvar stored in memory i.e. numThreads
incrementTVar :: TVar Int -> STM ()
incrementTVar tv = modifyTVar tv ((+) 1)
--Decrement Tvar stored in memory i.e. numThreads
decrementTVar :: TVar Int -> STM ()
decrementTVar tv = modifyTVar tv (subtract 1)
incrementFileserverCount :: TVar Int -> STM ()
incrementFileserverCount tv = modifyTVar tv ((+) 1)
|
Garygunn94/DFS
|
.stack-work/intero/intero41173CXn.hs
|
bsd-3-clause
| 12,595
| 315
| 15
| 3,101
| 3,311
| 1,710
| 1,601
| 228
| 6
|
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree.
{-# LANGUAGE GADTs #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE LambdaCase #-}
module Duckling.Ordinal.TA.Rules
( rules ) where
import Control.Monad (join)
import Data.HashMap.Strict ( HashMap)
import Data.String
import Data.Text (Text)
import Prelude
import qualified Data.HashMap.Strict as HashMap
import qualified Data.Text as Text
import Duckling.Dimensions.Types
import Duckling.Numeral.Helpers (parseInt)
import Duckling.Ordinal.Helpers
import Duckling.Regex.Types
import Duckling.Types
oneToNineMap :: HashMap Text Int
oneToNineMap = HashMap.fromList
[ ( "முதல்", 1 )
, ( "இரண்டாம்", 2 )
, ( "மூன்றாம்", 3 )
, ( "நான்காம்", 4 )
, ( "ஐந்தாம்", 5 )
, ( "ஆறாம்", 6 )
, ( "ஏழாம்", 7 )
, ( "எட்டாம்", 8 )
, ( "ஒன்பதாம்", 9 )
]
ruleOneToNine :: Rule
ruleOneToNine = Rule
{ name = "integer (1..9)"
, pattern =
[ regex "(முதல்|இரண்டாம்|மூன்றாம்|நான்காம்|ஐந்தாம்|ஆறாம்|ஏழாம்|எட்டாம்|ஒன்பதாம்)"
]
, prod = \case
(Token RegexMatch (GroupMatch (match:_)):_) ->
ordinal <$> HashMap.lookup (Text.toLower match) oneToNineMap
_ -> Nothing
}
tenToNineteenMap :: HashMap Text Int
tenToNineteenMap = HashMap.fromList
[ ( "பத்தாம்", 10 )
, ( "பதினொன்றாம்", 11 )
, ( "பன்னிரண்டாம்", 12 )
, ( "பதின்மூன்றாம்", 13 )
, ( "பதினான்காம்", 14 )
, ( "பதினைந்தாம்", 15 )
, ( "பதினாறாம்", 16 )
, ( "பதினேழாம்", 17 )
, ( "பதினெட்டாம்", 18 )
, ( "பத்தொன்பதாம்", 19 )
]
ruleTenToNineteen :: Rule
ruleTenToNineteen = Rule
{ name = "integer (10..19)"
, pattern =
[ regex "(பத்தாம்|பதினொன்றாம்|பன்னிரண்டாம்|பதின்மூன்றாம்|பதினான்காம்|பதினைந்தாம்|பதினாறாம்|பதினேழாம்|பதினெட்டாம்|பத்தொன்பதாம்)"
]
, prod = \case
(Token RegexMatch (GroupMatch (match:_)):_) ->
ordinal <$> HashMap.lookup (Text.toLower match) tenToNineteenMap
_ -> Nothing
}
tensMap :: HashMap Text Int
tensMap = HashMap.fromList
[ ( "இருபதாம்", 20 )
, ( "முப்பதாம்", 30 )
, ( "நாற்பதாம்", 40 )
, ( "ஐம்பதாம்", 50 )
, ( "அறுபதாம்", 60 )
, ( "எழுபதாம்", 70 )
, ( "எண்பதாம்", 80 )
, ( "தொண்ணூறாம்", 90 )
]
ruleTens :: Rule
ruleTens = Rule
{ name = "integer (20..90)"
, pattern =
[ regex "(இருபதாம்|முப்பதாம்|நாற்பதாம்|ஐம்பதாம்|அறுபதாம்|எழுபதாம்|எண்பதாம்|தொண்ணூறாம்)"
]
, prod = \case
(Token RegexMatch (GroupMatch (match:_)):_) ->
ordinal <$> HashMap.lookup (Text.toLower match) tensMap
_ -> Nothing
}
tensOrdinalMap :: HashMap Text Int
tensOrdinalMap = HashMap.fromList
[ ( "இருபத்தி", 20 )
, ( "முப்பத்து", 30 )
, ( "நாற்பத்து", 40 )
, ( "ஐம்பத்தி", 50 )
, ( "அறுபத்", 60 )
, ( "எழுபத்தி", 70 )
, ( "எண்பத்தி", 80 )
, ( "தொண்ணுற்று", 90 )
]
oneToNineOrdinalMap :: HashMap Text Int
oneToNineOrdinalMap = HashMap.fromList
[ ( "ஒன்றாம்", 1 )
, ( "இரண்டாம்", 2 )
, ( "மூன்றாம்", 3 )
, ( "நான்காம்", 4 )
, ( "ஐந்தாம்", 5 )
, ( "ஆறாம்", 6 )
, ( "ஏழாம்", 7 )
, ( "எட்டாம்", 8 )
, ( "ஒன்பதாம்", 9 )
]
ruleCompositeTens :: Rule
ruleCompositeTens = Rule
{ name = "integer ([2-9][1-9])"
, pattern =
[ regex "(இருபத்தி|முப்பத்து|நாற்பத்து|ஐம்பத்தி|அறுபத்|எழுபத்தி|எண்பத்தி|தொண்ணுற்று)(ஒன்றாம்|இரண்டாம்|மூன்றாம்|நான்காம்|ஐந்தாம்|ஆறாம்|ஏழாம்|எட்டாம்|ஒன்பதாம்)"
]
, prod = \case
(Token RegexMatch (GroupMatch (m1:m2:_)):_) -> do
v1 <- HashMap.lookup (Text.toLower m1) tensOrdinalMap
v2 <- HashMap.lookup (Text.toLower m2) oneToNineOrdinalMap
Just $ ordinal $ (v1 + v2)
_ -> Nothing
}
ruleOrdinalDigits :: Rule
ruleOrdinalDigits = Rule
{ name = "ordinal (digits)"
, pattern =
[ regex "0*(\\d+)\\."
]
, prod = \case
( Token RegexMatch (GroupMatch (match :_)) : _) -> ordinal <$> parseInt match
_ -> Nothing
}
rules :: [Rule]
rules =
[ ruleOrdinalDigits
, ruleOneToNine
, ruleTenToNineteen
, ruleTens
, ruleCompositeTens
]
|
facebookincubator/duckling
|
Duckling/Ordinal/TA/Rules.hs
|
bsd-3-clause
| 5,524
| 0
| 17
| 884
| 1,140
| 679
| 461
| 124
| 2
|
module Main where
import Lib
main :: IO ()
main = someFunc
{-99 Haskell Problems-}
{-| Get the last element of a list-}
myLast :: [a] -> a
myLast [x] = x
myLast (_:xs) = myLast xs
{-| Get the second to last element of a list-}
myButtLast :: [a] -> a
myButtLast [x, _] = x
myButtLast (_:xs) = myButtLast xs
{-| Get the kth element of a list-}
elementAt :: [a] -> Int -> a
elementAt (x:_) 0 = x
elementAt (_:xs) k = elementAt xs (k - 1)
{-| Get the length of a list-}
myLength :: [a] -> Int
myLength [] = 0
myLength (_:xs) = 1 + (myLength xs)
{-| Reverse a list-}
myReverse :: [a] -> [a]
myReverse [] = []
myReverse (x:xs) = (myReverse xs) ++ [x]
{-|k/intero/intero-script1846t8D
~/Haskell/Scratch/app/Ma Checks if list is a palindrome.-}
myPalindrome :: (Eq a) => [a] -> Bool
myPalindrome x
| x == (reverse x) = True
| otherwise = False
{-| Remove dupes in list-}
compress :: (Eq a) => [a] -> [a]
compress [] = []
compress (x:xs) = [x] ++ compress (clean x xs)
where clean _ [] = []
clean y (x:xs)
| y == x = clean y xs
| otherwise = [x] ++ clean y xs
{-| Put duplicates in sublists-}
pack :: (Eq a) => [a] -> [[a]]
pack [] = []
pack [x] = [[x]]
pack (x:xs) = combine x xs ++ pack (clean x xs)
where
combine _ [] = []
combine x s = [[z | z <- x:s, z == x]]
clean _ [] = []
clean y (x:xs)
| y == x = clean y xs
| otherwise = [x] ++ clean y xs
{-| Does stuff-}
encode :: (Eq a) => [a] -> [(Int, a)]
encode [] = []
encode s = map (\(x:xs) -> (length (x:xs), x)) (pack s)
data List a = Single a | Multiple Int a
deriving Show
{-| Similar to before-}
encodeModified :: (Eq a) => [a] -> [List a]
encodeModified s = map f (encode s)
where f (1, x) = Single x
f (n, x) = Multiple n x
decode :: [List a] -> [a]
decode s = foldr (++) [] (map f s)
where f (Single x) = [x]
f (Multiple n x) = replicate n x
encodeDirect :: (Eq a) => [a] -> [List a]
encodeDirect [] = []
encodeDirect (x:xs) = [toList (count x (x:xs)) x] ++
encodeDirect (filter (x /=) xs)
where count x s = length (filter (x==) s)
toList 1 x = Single x
toList n x = Multiple n x
dupl :: [a] -> [a]
dupl [] = []
dupl (x:xs) = [x,x] ++ dupl xs
repli :: [a] -> Int -> [a]
repli [] _ = []
repli (x:xs) n = replicate n x ++ repli xs n
dropEvery :: [a] -> Int -> [a]
dropEvery [] _ = []
dropEvery s n = foldr (++) [] (map (f n) (zip [1..] s))
where f n (m, x)
| m `mod` n == 0 = []
| otherwise = [x]
spliter :: [a] -> Int -> [[a]]
spliter [] _ = []
spliter s n = [reverse (drop ((length s) - n) (reverse s))] ++ [drop n s]
slice :: [a] -> Int -> Int -> [a]
slice [] _ _ = []
slice s start stop = reverse (drop (((length s)) - stop) (reverse (drop (start - 1) s)))
rotate :: [a] -> Int -> [a]
rotate [] _ = []
rotate s n = slice s ((f n (length s)) + 1) (length s) ++ slice s 1 (f n (length s))
where f n m
| n > m = f (n - m) m
| n < 0 = f (m + n) m
| otherwise = n
removeAt :: [a] -> Int -> (a, [a])
removeAt s n = (elementAt (slice s (n + 1) (n + 2)) 0,
slice s 1 n ++ slice s (n+2) (length s))
insertAt :: [a] -> a -> Int -> [a]
insertAt xs x n = slice xs 1 (n-1) ++ [x] ++ slice xs n (length xs)
range :: Int -> Int -> [Int]
range n1 n2 = [n1..n2]
listEq :: (Eq a) => [a] -> [a] -> Bool
listEq [] [] = True
listEq [] _ = False
listEq _ [] = False
listEq s1 s2 = False `notElem` (map (`elem`s1) s2 ++ map (`elem`s2) s1)
listNeq :: (Eq a) => [a] -> [a] -> Bool
listNeq s1 s2
| listEq s1 s2 = False
| otherwise = True
listRemoveDupes :: (Eq a) => [[a]] -> [[a]]
listRemoveDupes [[]] = [[]]
listRemoveDupes [] = []
listRemoveDupes (x:xs) = [x] ++ listRemoveDupes (filter (listNeq x) xs)
combinations :: (Eq a) => Int -> [a] -> [[a]]
combinations 0 _ = [[]]
combinations _ [] = [[]]
combinations n s = f n 1 s (map (\x -> [x]) s)
where f n1 n2 s1 s2
| n1 == n2 = s2
| otherwise = f n1 (n2 + 1) s1 (listRemoveDupes
[x ++ [y] |
x <- s2,
y <- s1,
y `notElem` x])
{- TODO the second combinatorics problem on the haskell website.-}
isDisjoint :: (Eq a) => [a] -> [a] -> Bool
isDisjoint [] [] = False
isDisjoint [] _ = True
isDisjoint _ [] = True
isDisjoint (x:xs) s2
| x `elem` s2 = False
| otherwise = isDisjoint xs s2
{-| TODO Finish this.-}
{-grouper :: (Eq a) => [Int] -> [a] -> [[[a]]]
grouper n s = g (map (`combinations`s) n)
where f x s = filter (isDisjoint x) s
g (x:y:s)
|y == [] = []
|otherwise = map (\z -> g (f z y) (y:s)) x -}
sortOnLength :: [[a]] -> [[a]]
sortOnLength [] = []
sortOnLength (x:xs) =
sortOnLength [y | y <- xs, (length y) < (length x)]
++ [x]
++ sortOnLength [y | y <- xs, (length y) > (length x)]
sieveEratosthenes :: Int -> [Int]
sieveEratosthenes n = f n [2..n]
where f n [] = []
f n (x:xs) = [x] ++ f n [y | y <- xs,
y `notElem` (map (x*) [2..n])]
isPrime :: Int -> Bool
isPrime n = n `elem` (sieveEratosthenes n)
gcd' :: Int -> Int -> Int
gcd' n1 n2
| n1 == n2 = n1
| n1 > n2 = gcd' (n1 - n2) n2
| otherwise = gcd' (n2 - n1) n1
isCoPrime :: Int -> Int -> Bool
isCoPrime n1 n2
| (gcd' n1 n2) == 1 = True
| otherwise = False
eulerTotient :: Int -> Int
eulerTotient n = length (filter id (map (isCoPrime n) [1..n]))
primeFactors :: Int -> [Int]
primeFactors n
|isPrime n = [n]
|otherwise = [f] ++ primeFactors (n `div` f)
where f = fst (head (filter (\(x,y) ->
y == 0) (map (\x ->
(x, (n `mod` x)))
(sieveEratosthenes n))))
encodePrimeFactors :: Int -> [(Int, Int)]
encodePrimeFactors = encode . primeFactors
eulerTotient' :: Int -> Int
eulerTotient' n = foldr (*) 1
. map (\(x, y) ->
(y-1) * (y^(x - 1)))
. encodePrimeFactors $ n
primesRange :: Int -> Int -> [Int]
primesRange l u = filter (>=l) (sieveEratosthenes u)
combinationsWithDupes :: (Eq a) => Int -> [a] -> [[a]]
combinationsWithDupes 0 _ = [[]]
combinationsWithDupes _ [] = [[]]
combinationsWithDupes n s = f n 1 s (map (\x -> [x]) s)
where f n1 n2 s1 s2
| n1 == n2 = s2
| otherwise = f n1 (n2 + 1) s1 [x ++ [y] |
x <- s2,
y <- s1,
y `notElem` x]
{-| Fix empty list issue-}
goldbach :: Int -> (Int,Int)
goldbach n = snd
. head
. filter (\(x, _) -> x == n)
. map (\[x,y] -> ((x+y),(x,y)))
. combinationsWithDupes 2
. sieveEratosthenes $ n
goldbachList :: Int -> Int -> [(Int,Int)]
goldbachList l u = map goldbach
. dropWhile (<= l) $ [2,4 .. u]
grayC :: Int -> [String]
grayC n = combinationsWithDupes n
$ replicate n '1' ++ replicate n '0'
data BSTNode a = Branch a (BSTNode a) (BSTNode a)
| Empty
deriving (Show, Eq)
leaf :: a -> BSTNode a
leaf x = Branch x Empty Empty
permutations :: Int -> BSTNode a
permutations n =
|
MauriceIsAG/HaskellScratch
|
.stack-work/intero/intero122446ZD.hs
|
bsd-3-clause
| 7,385
| 1
| 19
| 2,419
| 3,712
| 1,965
| 1,747
| -1
| -1
|
module MPS.MathExt where
import Data.List
import Data.Ratio
import MPS.Hack.Dot
import MPS.Math.CombinatoricsGeneration
import MPS.Math.NumberTheoryFundamentals
import qualified MPS.Math.PermutationGroups as Group
import MPS.Math.Primes
import MPS.Snippets
import Prelude hiding ((.), (^))
phi = (sqrt(5)+1)/2
fib n = (( phi^n - (1-phi)^n ) / sqrt(5)).round
fibs = 1 : 1 : zipWith (+) fibs (fibs.tail)
primes = primesTo10000 ++ [10007, 10009 ..].filter(isPrime)
m_indices m n = [ (i,j) | i <- [0..m-1], j <- [0..n-1] ]
at2 (i,j) m = m.at(i).at(j)
factors = primePowerFactors
divisors n = n.factors.map(\(p,q) -> [ p ^ i | i <- [0..q] ]).cartProd.map(product).sort.init
perm_divisors 1 = [[]]
perm_divisors n = [i:xs |
i <- (n: n.divisors.tail.reverse),
let j = n `div` i,
let xss = perm_divisors j,
xs <- xss
].map sort .unique .rsort
totient m = m.factors.map phi_token.product
where phi_token (p, l) = (p-1)*p^(l-1)
combinations_for 0 xs = [[]]
combinations_for n xs = [y:ys | y <- xs, ys <- combinations_for (n - 1) xs]
is_prime = isPrime
power_permutations xs = xs.powerset.concatMap(permutationsOf)
is_proper_integer x = x.floor .is ( x.ceiling )
is_square x = x.fromIntegral.sqrt.is_proper_integer
is_pythagorean a b c = a^2 + b^2 == c^2
cf_iter x y = x + 1 / y
cf xs = xs.map(%1) .foldr1 cf_iter
cf_e = 2: ([[1,n,1] | n <- [2,4..]] .join')
-- farey sequence
median_q x y = ( x.numerator + y.numerator ) % ( x.denominator + y.denominator )
farey_iter xs = xs
.zip (xs.tail)
.map (splash median_q)
.zip (xs.tail)
.map swap
.map list2
.join'
.(xs.first : )
fareys' xs 1 = xs
fareys' xs n = fareys' (farey_iter xs) (n-1)
fareys n = fareys' [0,1] n
bounded_fareys' xs 1 _ = xs
bounded_fareys' xs n c = bounded_fareys' filtered (n-1) (c+1) where
filtered = farey_iter xs
.reject (\q -> q.denominator > c)
bounded_fareys n = bounded_fareys' [0,1] n 2 .tail .init
-- pythagoreans generators
pythagoreans = [ (s, (a, b, c), (m, n) ) |
m <- [2..],
n <- [1..m-1],
(m + n).odd,
gcd m n == 1,
let a = m^2 - n ^2,
let b = 2 * m * n,
let c = m^2 + n ^ 2,
let s = a + b + c
]
pythagoreans' x = pythagoreans
.ub(\(_, _, (m,_)) -> m <= bound)
.filter(\(s, _, _) -> s <= x)
where bound = x.from_i.sqrt.floor
turns xs = xs.iterate Group.rotateL .take(xs.length)
|
nfjinjing/mps
|
src/MPS/MathExt.hs
|
bsd-3-clause
| 2,430
| 18
| 14
| 569
| 1,270
| 688
| 582
| 68
| 1
|
{-# LANGUAGE GADTs, DeriveDataTypeable, PatternGuards, Rank2Types, BangPatterns #-}
-----------------------------------------------------------------------------
-- |
-- Module : Data.Pass.L
-- Copyright : (C) 2012-2013 Edward Kmett
-- License : BSD-style (see the file LICENSE)
--
-- Maintainer : Edward Kmett <ekmett@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GADTs, Rank2Types)
--
----------------------------------------------------------------------------
module Data.Pass.L
( L(..)
, getL
, callL
, ordL
, eqL
, selectM
, breakdown
, (@#)
) where
import Control.Monad (liftM, liftM2)
import Data.Typeable
import Data.Hashable
import Data.Pass.Named
import Data.IntMap (IntMap)
import Data.Key (foldrWithKey)
import Data.List (sort)
import qualified Data.IntMap as IM
import Data.Pass.L.Estimator
import Data.Pass.Eval
import Data.Pass.Eval.Naive
import Data.Pass.L.By
import Data.Pass.Util (clamp)
import Data.Binary
-- | An L-Estimator represents a linear combination of order statistics
data L a b where
LTotal :: (Num a, Ord a) => L a a
LMean :: (Fractional a, Ord a) => L a a
LScale :: (Fractional a, Ord a) => L a a
NthLargest :: (Num a, Ord a) => Int -> L a a
NthSmallest :: (Num a, Ord a) => Int -> L a a
QuantileBy :: (Fractional a, Ord a) => Estimator -> Rational -> L a a
Winsorized :: (Fractional b, Ord b) => Rational -> L a b -> L a b
Trimmed :: (Fractional b, Ord b) => Rational -> L a b -> L a b
Jackknifed :: (Fractional b, Ord b) => L a b -> L a b
(:*) :: Fractional b => Rational -> L a b -> L a b
(:+) :: Num b => L a b -> L a b -> L a b
deriving Typeable
instance By L where
QuantileBy _ q `by` r = QuantileBy r q
Trimmed p f `by` r = Trimmed p (by f r)
Winsorized p f `by` r = Winsorized p (by f r)
Jackknifed f `by` r = Jackknifed (by f r)
(n :* f) `by` r = n :* by f r
(f :+ g) `by` r = by f r :+ by g r
f `by` _ = f
infixl 7 :*
infixl 6 :+
instance Named L where
showsFun _ LTotal = showString "LTotal"
showsFun _ LMean = showString "LMean"
showsFun _ LScale = showString "LScale"
showsFun d (QuantileBy e q) = showParen (d > 10) $ showString "QuantileBy " . showsPrec 10 e . showChar ' ' . showsPrec 10 q
showsFun d (Winsorized p f) = showParen (d > 10) $ showString "Winsorized " . showsPrec 10 p . showChar ' ' . showsFun 10 f
showsFun d (Trimmed p f) = showParen (d > 10) $ showString "Trimmed " . showsPrec 10 p . showChar ' ' . showsFun 10 f
showsFun d (Jackknifed f) = showParen (d > 10) $ showString "Jackknifed " . showsFun 10 f
showsFun d (NthLargest n) = showParen (d > 10) $ showString "NthLargest " . showsPrec 10 n
showsFun d (NthSmallest n) = showParen (d > 10) $ showString "NthSmallest " . showsPrec 10 n
showsFun d (x :* y) = showParen (d > 7) $ showsPrec 8 x . showString " :* " . showsPrec 7 y
showsFun d (x :+ y) = showParen (d > 6) $ showsPrec 7 x . showString " :+ " . showsPrec 6 y
hashFunWithSalt n LTotal = 0 `hashWithSalt` n
hashFunWithSalt n LMean = 1 `hashWithSalt` n
hashFunWithSalt n LScale = 2 `hashWithSalt` n
hashFunWithSalt n (QuantileBy e q) = 4 `hashWithSalt` n `hashWithSalt` e `hashWithSalt` q
hashFunWithSalt n (Winsorized p f) = 5 `hashWithSalt` n `hashWithSalt` p `hashFunWithSalt` f
hashFunWithSalt n (Trimmed p f) = 6 `hashWithSalt` n `hashWithSalt` p `hashFunWithSalt` f
hashFunWithSalt n (Jackknifed f) = 7 `hashWithSalt` n `hashFunWithSalt` f
hashFunWithSalt n (NthLargest m) = 8 `hashWithSalt` n `hashWithSalt` m
hashFunWithSalt n (NthSmallest m) = 9 `hashWithSalt` n `hashWithSalt` m
hashFunWithSalt n (x :* y) = 10 `hashWithSalt` n `hashWithSalt` x `hashFunWithSalt` y
hashFunWithSalt n (x :+ y) = 11 `hashWithSalt` n `hashFunWithSalt` x `hashFunWithSalt` y
equalFun LTotal LTotal = True
equalFun LMean LMean = True
equalFun LScale LScale = True
equalFun (QuantileBy e p) (QuantileBy f q) = e == f && p == q
equalFun (Winsorized p f) (Winsorized q g) = p == q && equalFun f g
equalFun (Trimmed p f) (Trimmed q g) = p == q && equalFun f g
equalFun (Jackknifed f) (Jackknifed g) = equalFun f g
equalFun (NthLargest n) (NthLargest m) = n == m
equalFun (NthSmallest n) (NthSmallest m) = n == m
equalFun (a :+ b) (c :+ d) = equalFun a c && equalFun b d
equalFun (a :* b) (c :* d) = typeOf a == typeOf c && cast a == Just c && equalFun b d
equalFun _ _ = False
putFun LTotal = put (0 :: Word8)
putFun LMean = put (1 :: Word8)
putFun LScale = put (2 :: Word8)
putFun (QuantileBy e r) = put (4 :: Word8) >> put e >> put r
putFun (Winsorized p f) = put (5 :: Word8) >> put p >> putFun f
putFun (Trimmed p f) = put (6 :: Word8) >> put p >> putFun f
putFun (Jackknifed f) = put (7 :: Word8) >> putFun f
putFun (NthLargest n) = put (8 :: Word8) >> put n
putFun (NthSmallest n) = put (9 :: Word8) >> put n
putFun (x :* y) = put (10 :: Word8) >> put x >> putFun y
putFun (x :+ y) = put (11 :: Word8) >> putFun x >> putFun y
getL :: (Fractional a, Ord a) => Get (L a a)
getL = do
i <- get :: Get Word8
case i of
0 -> return LTotal
1 -> return LMean
2 -> return LScale
4 -> liftM2 QuantileBy get get
5 -> liftM2 Winsorized get getL
6 -> liftM2 Trimmed get getL
7 -> liftM Jackknifed getL
8 -> liftM NthLargest get
9 -> liftM NthSmallest get
10 -> liftM2 (:*) get getL
11 -> liftM2 (:+) getL getL
_ -> error "getL: Unknown L-estimator"
instance Show (L a b) where
showsPrec = showsFun
instance Hashable (L a b) where
hashWithSalt = hashFunWithSalt
instance Eq (L a b) where
(==) = equalFun
-- | A common measure of how robust an L estimator is in the presence of outliers.
breakdown :: (Num b, Eq b) => L a b -> Int
breakdown f
| IM.null m = 50
| otherwise = fst (IM.findMin m) `min` (100 - fst (IM.findMax m))
where m = IM.filter (/= 0) $ callL f 101
infixl 0 @#
-- | @f \@# n@ Return a list of the coefficients that would be used by an L-Estimator for an input of length @n@
(@#) :: Num a => L a a -> Int -> [a]
f @# n = [ IM.findWithDefault 0 k fn | k <- [0..n-1] ]
where fn = callL f n
callL :: L a b -> Int -> IntMap b
callL LTotal n = IM.fromList [ (i,1) | i <- [0..n-1]]
callL LMean n = IM.fromList [ (i, oon) | i <- [0..n-1]]
where oon = recip (fromIntegral n)
callL LScale n = IM.fromList [ (i - 1, scale * (2 * fromIntegral i - 1 - r)) | i <- [1..n]]
where r = fromIntegral n
scale = 1 / (r *(r-1))
callL (QuantileBy f p) n = case estimateBy f p n of
Estimate h qp -> case properFraction h of
(w, 0) -> IM.singleton (clamp n (w - 1)) 1
_ -> qp
callL (Winsorized p g) n = case properFraction (fromIntegral n * p) of
(w, 0) -> IM.fromAscListWith (+) [ (w `max` min (n - 1 - w) k, v) | (k,v) <- IM.toAscList (callL g n) ]
(w, f) | w' <- w + 1 -> IM.fromListWith (+) $ do
(k,v) <- IM.toList (callL g n)
[ (w `max` min (n - 1 - w ) k, v * fromRational (1 - f)),
(w' `max` min (n - 1 - w') k, v * fromRational f)]
callL (Trimmed p g) n = case properFraction (fromIntegral n * p) of
(w, 0) -> IM.fromDistinctAscList [ (k + w, v) | (k, v) <- IM.toAscList $ callL g (n - w*2)]
(w, f) | w' <- w + 1 -> IM.fromListWith (+) $ [ (k + w, fromRational (1 - f) * v) | (k,v) <- IM.toList $ callL g (n - w*2)] ++
[ (k + w', fromRational f * v) | (k,v) <- IM.toList $ callL g (n - w'*2)]
callL (Jackknifed g) n = IM.fromAscListWith (+) $ do
let n' = fromIntegral n
(k, v) <- IM.toAscList $ callL g (n - 1)
let k' = fromIntegral k + 1
[(k, (n' - k') * v / n'), (k + 1, k' * v / n')]
callL (NthLargest m) n = IM.singleton (clamp n (n - m - 1)) 1
callL (NthSmallest m) n = IM.singleton (clamp n m) 1
callL (x :+ y) n = IM.unionWith (+) (callL x n) (callL y n)
callL (s :* y) n = fmap (r *) (callL y n) where r = fromRational s
instance Naive L where
naive m n xs = ordL m $ foldrWithKey step 0 $ sort $ eqL m xs where
coefs = callL m n
step = ordL m $ \g v x -> IM.findWithDefault 0 g coefs * v + x
instance Eval L where
-- eval m n xs = ordL m $ callL m n `selectM` eqL m xs
eval = naive -- faster for now
-- perform a hedged quickselect using the keys for the sparse
selectM :: (Num a, Ord a) => IntMap a -> [a] -> a
selectM = go 0 where
go !_ !_ [] = 0
go !b !m (x:xs) = i + j + k
where (lo,n, hi) = partitionAndCount (<x) xs
(lm,mm,hm) = IM.splitLookup (b+n) m
i = if IM.null lm then 0 else go b lm lo
j = maybe 0 (x*) mm
k = if IM.null hm then 0 else go (b+n+1) hm hi
-- NB: unstable, reverses the sub-lists each time
partitionAndCount :: (a -> Bool) -> [a] -> ([a],Int,[a])
partitionAndCount f = go [] 0 [] where
go !ts !n !fs [] = (ts,n,fs)
go !ts !n !fs (x:xs)
| f x = go (x:ts) (n + 1) fs xs
| otherwise = go ts n (x:fs) xs
eqL :: L a b -> p a -> p b
eqL LTotal a = a
eqL LMean a = a
eqL LScale a = a
eqL (NthLargest _) a = a
eqL (NthSmallest _) a = a
eqL (QuantileBy _ _) a = a
eqL (Winsorized _ x) a = eqL x a
eqL (Jackknifed x) a = eqL x a
eqL (Trimmed _ x) a = eqL x a
eqL (x :+ _) a = eqL x a
eqL (_ :* x) a = eqL x a
ordL :: L a b -> ((Ord b, Num b) => r) -> r
ordL LTotal a = a
ordL LMean a = a
ordL LScale a = a
ordL (NthLargest _) a = a
ordL (NthSmallest _) a = a
ordL (QuantileBy _ _) a = a
ordL (Winsorized _ x) a = ordL x a
ordL (Trimmed _ x) a = ordL x a
ordL (Jackknifed x) a = ordL x a
ordL (x :+ _) a = ordL x a
ordL (_ :* x) a = ordL x a
|
ekmett/multipass
|
Data/Pass/L.hs
|
bsd-3-clause
| 9,853
| 0
| 18
| 2,733
| 4,680
| 2,415
| 2,265
| 199
| 12
|
module Utilities
( wordPair ) where
import Control.Lens
import Data.Bits
import Data.Word
byteToWord :: (Word8, Word8) -> Word16
byteToWord (x, y) =
(fromIntegral x `shiftL` 8) + fromIntegral y
wordToByte :: Word16 -> (Word8, Word8)
wordToByte x =
(fromIntegral x, fromIntegral $ x `shiftR` 8)
wordPair :: Iso' Word16 (Word8, Word8)
wordPair = iso wordToByte byteToWord
|
intolerable/gbc
|
src/Utilities.hs
|
bsd-3-clause
| 380
| 0
| 8
| 66
| 140
| 80
| 60
| 13
| 1
|
module Database.DSH.Backend.Sql.Opt.Properties.Card1 where
import Database.Algebra.Table.Lang
import Database.DSH.Backend.Sql.Opt.Properties.Types
inferCard1NullOp :: NullOp -> Card1
inferCard1NullOp op =
case op of
LitTable (vals, _) -> length vals == 1
TableRef (_, _, _) -> False
inferCard1UnOp :: Card1 -> Empty -> UnOp -> Card1
inferCard1UnOp childCard1 childEmpty op =
case op of
WinFun _ -> childCard1
RowNum (_, _, _) -> childCard1
RowRank (_, _) -> childCard1
Rank (_, _) -> childCard1
Project _ -> childCard1
Select _ -> False
Distinct _ -> childCard1
Aggr (_, _ : _) -> childCard1
Aggr (_, []) -> not childEmpty
Serialize _ -> childCard1
inferCard1BinOp :: Card1 -> Card1 -> BinOp -> Card1
inferCard1BinOp leftCard1 rightCard1 op =
case op of
Cross _ -> leftCard1 && rightCard1
ThetaJoin _ -> False
LeftOuterJoin _ -> False
SemiJoin _ -> False
AntiJoin _ -> False
DisjUnion _ -> False
Difference _ -> False
|
ulricha/dsh-sql
|
src/Database/DSH/Backend/Sql/Opt/Properties/Card1.hs
|
bsd-3-clause
| 1,193
| 0
| 10
| 420
| 354
| 184
| 170
| 31
| 10
|
module InputText (
Codepoint
, InputText
, toCodepoints
, charToCodepoint
)
where
import qualified Data.ByteString as B
import Data.Traversable (traverse)
import Data.Char (ord)
import Data.Word (Word8)
import Data.List (intercalate)
import Utils
type Codepoint = Word8
type InputText = B.ByteString
toCodepoints :: [String] -> Maybe InputText
toCodepoints = fmap (B.pack . intercalate [fromInteger stringSeparationCP])
. traverse (traverse charToCodepoint)
validCodepoint :: Char -> Bool
validCodepoint x = (x >= '\x20') && (x < '\xa0')
charToCodepoint :: Char -> Maybe Codepoint
charToCodepoint x | validCodepoint x = Just . fromIntegral $ ord x
| otherwise = Nothing
stringToCodepoint :: String -> Maybe [Codepoint]
stringToCodepoint xs =
fmap (++[0]) $ traverse charToCodepoint xs
|
pcrama/message-compiler
|
src/InputText.hs
|
bsd-3-clause
| 829
| 0
| 11
| 153
| 263
| 143
| 120
| 24
| 1
|
--
-- Data/Diff/Algorithm/ONP.hs - O(NP) diff algorithm module
--
-- Copyright (C) 2008, Hiroki Hattori
-- Licensed under BSD3, see COPYING
--
-- References:
-- [1] E.W.Myers, "An O(ND) difference algorithm and its variations", Algorithmixa, 1 (1986), pp.251-266
-- [2] S.W.maner, "G.Myers, W.Miller, An O(NP) Sequence Comparison Algorith", August 1989
-- [3] 文書比較アルゴリズム http://hp.vector.co.jp/authors/VA007799/viviProg/doc5.htm
-- [4] diff (3) http://www.slash-zero.jp/archives/program/476
--
module Data.Diff.Algorithm.ONP
(
module Data.Diff,
genericDiff, diff,
) where
import Control.Monad.Fix (fix)
import Data.Diff
diff :: Eq a => DiffAlgorithm a
diff = genericDiff (==)
genericDiff :: GenericDiffAlgorithm a
genericDiff cmp oldlist newlist = let ((_, _, _, path): _, _) = onp (fp_ab_0, []) in reverse path
where
--
snake = genericSnake cmp
-- 正規化した入力値
(xn, xs, insopr, yn, ys, delopr)
| oldlen <= newlen = (oldlen, oldlist, addInsertPath, newlen, newlist, addDeletePath)
| otherwise = (newlen, newlist, addDeletePath, oldlen, oldlist, addInsertPath)
where
oldlen = length oldlist
newlen = length newlist
-- 初期状態
-- fp_a_p=fp[delta, p], fp_b_p=fp[(delta-1)..-p, p], fp_c_p=fp[delta+1..delta+p.p]
-- とした時のp=0の時の状態 fp_ab_0=(fp_a_0:fp_b_0)。定義上、fp_c_0は常に[]。
fp_ab_0 = reverse $ fix (\f (n, fp) -> fp:if n == 0 then [] else f ((n - 1), newfp_ins fp) ) (yn - xn {-delta-}, fp_0_0)
where fp_0_0 = let (s, xs', ys') = snake xs ys in (s, xs', ys', addCommonPath s [])
--
newfp_ins (s, xs', (_:ys'), path) = let (n, x, y) = snake xs' ys' in (n + s + 1, x, y, addCommonPath n $ insopr 1 path)
newfp_del (s, (_:xs'), ys', path) = let (n, x, y) = snake xs' ys' in (n + s, x, y, addCommonPath n $ delopr 1 path)
--
-- O(NP)本体
--
onp fps@(fp_ab@(fp_a@(_, _, s_y, _):_), fp_c)
| null s_y = fps
| otherwise = onp (newfp_a:newfp_b, newfp_c)
where
newfp_a = newfp (head newfp_b) (head newfp_c) -- k == delta
newfp_b = newfp_b' fp_ab -- k < delta
where
newfp_b' ((_, [], _, _):[]) = [] -- xがxnを越えるので刈る
newfp_b' (fp:[]) = [newfp_del fp]
newfp_b' (fp:fps') = let newfps = newfp_b' fps' in newfp (head newfps) fp : newfps
newfp_c = newfp_c' (fp_a:fp_c) -- k > delta
where
newfp_c' ((_, _, [], _):[]) = [] -- yがynを越えるので刈る
newfp_c' (fp:[]) = [newfp_ins fp]
newfp_c' (fp:fps') = let newfps = newfp_c' fps' in newfp fp (head newfps) : newfps
--
newfp fp_l@(s_l, _, _, _) fp_r@(s_r, _, _, _)
| s_l + 1 >= s_r = newfp_ins fp_l
| otherwise = newfp_del fp_r
|
seagull-kamome/Data-Diff
|
Data/Diff/Algorithm/ONP.hs
|
bsd-3-clause
| 3,083
| 0
| 16
| 925
| 955
| 532
| 423
| 35
| 6
|
{-|
Copyright : (c) Dave Laing, 2017
License : BSD3
Maintainer : dave.laing.80@gmail.com
Stability : experimental
Portability : non-portable
-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE DeriveFoldable #-}
{-# LANGUAGE DeriveTraversable #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE TemplateHaskell #-}
module Fragment.Record.Ast.Pattern (
PtFRecord
, AsPtRecord(..)
) where
import Data.Functor.Classes (showsUnaryWith)
import Text.Show (showListWith)
import Bound (Bound(..))
import Control.Lens.Prism (Prism')
import Control.Lens.TH (makePrisms)
import Data.Deriving (deriveEq1, deriveOrd1, deriveShow1)
import qualified Data.Text as T
import Ast.Pattern
import Data.Bitransversable
import Data.Functor.Rec
data PtFRecord pt a =
PtRecordF [(T.Text, pt a)]
deriving (Eq, Ord, Show, Functor, Foldable, Traversable)
makePrisms ''PtFRecord
deriveEq1 ''PtFRecord
deriveOrd1 ''PtFRecord
deriveShow1 ''PtFRecord
instance EqRec PtFRecord where
liftEqRec eR _ (PtRecordF xs) (PtRecordF ys) =
let
f (l1, x1) (l2, x2) = l1 == l2 && eR x1 x2
in
and $ zipWith f xs ys
instance OrdRec PtFRecord where
liftCompareRec _ _ (PtRecordF []) (PtRecordF []) = EQ
liftCompareRec _ _ (PtRecordF []) (PtRecordF (_ : _)) = LT
liftCompareRec _ _ (PtRecordF (_ : _)) (PtRecordF []) = GT
liftCompareRec cR c (PtRecordF ((lx, x) : xs)) (PtRecordF ((ly, y) : ys)) =
case compare lx ly of
EQ -> case cR x y of
EQ -> liftCompareRec cR c (PtRecordF xs) (PtRecordF ys)
z -> z
z -> z
instance ShowRec PtFRecord where
liftShowsPrecRec sR _ _ _ n (PtRecordF xs) =
let
g m (l, x) = showString ("(" ++ T.unpack l ++ ", ") .
sR m x .
showString ")"
f _ ps = showListWith (g 0) ps
in
showsUnaryWith f "PtRecordF" n xs
instance Bound PtFRecord where
PtRecordF pts >>>= f = PtRecordF (fmap (fmap (>>= f)) pts)
instance Bitransversable PtFRecord where
bitransverse fT fL (PtRecordF rs) = PtRecordF <$> traverse (traverse (fT fL)) rs
class AsPtRecord pt where
_PtRecordP :: Prism' (pt k a) (PtFRecord k a)
_PtRecord :: Prism' (Pattern pt a) [(T.Text, Pattern pt a)]
_PtRecord = _PtTree . _PtRecordP . _PtRecordF
instance AsPtRecord PtFRecord where
_PtRecordP = id
instance {-# OVERLAPPABLE #-} AsPtRecord (PtSum xs) => AsPtRecord (PtSum (x ': xs)) where
_PtRecordP = _PtNext . _PtRecordP
instance {-# OVERLAPPING #-} AsPtRecord (PtSum (PtFRecord ': xs)) where
_PtRecordP = _PtNow . _PtRecordP
|
dalaing/type-systems
|
src/Fragment/Record/Ast/Pattern.hs
|
bsd-3-clause
| 2,660
| 0
| 17
| 559
| 901
| 475
| 426
| 66
| 0
|
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Client.Fetch
-- Copyright : (c) David Himmelstrup 2005
-- Duncan Coutts 2011
-- License : BSD-like
--
-- Maintainer : cabal-devel@gmail.com
-- Stability : provisional
-- Portability : portable
--
-- The cabal fetch command
-----------------------------------------------------------------------------
module Distribution.Client.Fetch (
fetch,
) where
import Distribution.Client.Types
import Distribution.Client.Targets
import Distribution.Client.FetchUtils hiding (fetchPackage)
import Distribution.Client.Dependency
import Distribution.Client.IndexUtils as IndexUtils
( getSourcePackages, getInstalledPackages )
import Distribution.Client.HttpUtils
( configureTransport, HttpTransport(..) )
import qualified Distribution.Client.InstallPlan as InstallPlan
import Distribution.Client.Setup
( GlobalFlags(..), FetchFlags(..) )
import Distribution.Package
( packageId )
import Distribution.Simple.Compiler
( Compiler, compilerInfo, PackageDBStack )
import Distribution.Simple.PackageIndex (InstalledPackageIndex)
import Distribution.Simple.Program
( ProgramConfiguration )
import Distribution.Simple.Setup
( fromFlag, flagToMaybe )
import Distribution.Simple.Utils
( die, notice, debug )
import Distribution.System
( Platform )
import Distribution.Text
( display )
import Distribution.Verbosity
( Verbosity )
import Control.Monad
( filterM )
-- ------------------------------------------------------------
-- * The fetch command
-- ------------------------------------------------------------
--TODO:
-- * add fetch -o support
-- * support tarball URLs via ad-hoc download cache (or in -o mode?)
-- * suggest using --no-deps, unpack or fetch -o if deps cannot be satisfied
-- * Port various flags from install:
-- * --updage-dependencies
-- * --constraint and --preference
-- * --only-dependencies, but note it conflicts with --no-deps
-- | Fetch a list of packages and their dependencies.
--
fetch :: Verbosity
-> PackageDBStack
-> [Repo]
-> Compiler
-> Platform
-> ProgramConfiguration
-> GlobalFlags
-> FetchFlags
-> [UserTarget]
-> IO ()
fetch verbosity _ _ _ _ _ _ _ [] =
notice verbosity "No packages requested. Nothing to do."
fetch verbosity packageDBs repos comp platform conf
globalFlags fetchFlags userTargets = do
mapM_ checkTarget userTargets
installedPkgIndex <- getInstalledPackages verbosity comp packageDBs conf
sourcePkgDb <- getSourcePackages verbosity repos
transport <- configureTransport verbosity (flagToMaybe (globalHttpTransport globalFlags))
pkgSpecifiers <- resolveUserTargets transport verbosity
(fromFlag $ globalWorldFile globalFlags)
(packageIndex sourcePkgDb)
userTargets
pkgs <- planPackages
verbosity comp platform fetchFlags
installedPkgIndex sourcePkgDb pkgSpecifiers
pkgs' <- filterM (fmap not . isFetched . packageSource) pkgs
if null pkgs'
--TODO: when we add support for remote tarballs then this message
-- will need to be changed because for remote tarballs we fetch them
-- at the earlier phase.
then notice verbosity $ "No packages need to be fetched. "
++ "All the requested packages are already local "
++ "or cached locally."
else if dryRun
then notice verbosity $ unlines $
"The following packages would be fetched:"
: map (display . packageId) pkgs'
else mapM_ (fetchPackage transport verbosity . packageSource) pkgs'
where
dryRun = fromFlag (fetchDryRun fetchFlags)
planPackages :: Verbosity
-> Compiler
-> Platform
-> FetchFlags
-> InstalledPackageIndex
-> SourcePackageDb
-> [PackageSpecifier SourcePackage]
-> IO [SourcePackage]
planPackages verbosity comp platform fetchFlags
installedPkgIndex sourcePkgDb pkgSpecifiers
| includeDependencies = do
solver <- chooseSolver verbosity
(fromFlag (fetchSolver fetchFlags)) (compilerInfo comp)
notice verbosity "Resolving dependencies..."
installPlan <- foldProgress logMsg die return $
resolveDependencies
platform (compilerInfo comp)
solver
resolverParams
-- The packages we want to fetch are those packages the 'InstallPlan'
-- that are in the 'InstallPlan.Configured' state.
return
[ pkg
| (InstallPlan.Configured (InstallPlan.ConfiguredPackage pkg _ _ _))
<- InstallPlan.toList installPlan ]
| otherwise =
either (die . unlines . map show) return $
resolveWithoutDependencies resolverParams
where
resolverParams =
setMaxBackjumps (if maxBackjumps < 0 then Nothing
else Just maxBackjumps)
. setIndependentGoals independentGoals
. setReorderGoals reorderGoals
. setShadowPkgs shadowPkgs
. setStrongFlags strongFlags
-- Reinstall the targets given on the command line so that the dep
-- resolver will decide that they need fetching, even if they're
-- already installed. Since we want to get the source packages of
-- things we might have installed (but not have the sources for).
. reinstallTargets
$ standardInstallPolicy installedPkgIndex sourcePkgDb pkgSpecifiers
includeDependencies = fromFlag (fetchDeps fetchFlags)
logMsg message rest = debug verbosity message >> rest
reorderGoals = fromFlag (fetchReorderGoals fetchFlags)
independentGoals = fromFlag (fetchIndependentGoals fetchFlags)
shadowPkgs = fromFlag (fetchShadowPkgs fetchFlags)
strongFlags = fromFlag (fetchStrongFlags fetchFlags)
maxBackjumps = fromFlag (fetchMaxBackjumps fetchFlags)
checkTarget :: UserTarget -> IO ()
checkTarget target = case target of
UserTargetRemoteTarball _uri
-> die $ "The 'fetch' command does not yet support remote tarballs. "
++ "In the meantime you can use the 'unpack' commands."
_ -> return ()
fetchPackage :: HttpTransport -> Verbosity -> PackageLocation a -> IO ()
fetchPackage transport verbosity pkgsrc = case pkgsrc of
LocalUnpackedPackage _dir -> return ()
LocalTarballPackage _file -> return ()
RemoteTarballPackage _uri _ ->
die $ "The 'fetch' command does not yet support remote tarballs. "
++ "In the meantime you can use the 'unpack' commands."
RepoTarballPackage repo pkgid _ -> do
_ <- fetchRepoTarball transport verbosity repo pkgid
return ()
|
fugyk/cabal
|
cabal-install/Distribution/Client/Fetch.hs
|
bsd-3-clause
| 7,079
| 0
| 16
| 1,865
| 1,185
| 623
| 562
| 126
| 4
|
-- | Find the K'th element of a list
--
-- > elementAt [1,2,3] 2 == 2
-- > elementAt "haskell" 5 == 'e'
--
module Main where
main :: IO()
main = undefined
elementAt :: [a] -> Int -> a
elementAt [] _ = error "Usage: elementAt xs n where xs is non-empty and n > 0."
elementAt ls n
| n > length ls = error "Out of bounds, try a larger list or a smaller number, exiting..."
| n == 1 = head ls
| otherwise = elementAt (drop 1 ls) (n - 1)
|
Jaso-N7/H99-solutions
|
1-10/3.hs
|
bsd-3-clause
| 453
| 0
| 9
| 117
| 126
| 64
| 62
| 9
| 1
|
{-# language CPP #-}
-- | = Name
--
-- VK_ARM_rasterization_order_attachment_access - device extension
--
-- == VK_ARM_rasterization_order_attachment_access
--
-- [__Name String__]
-- @VK_ARM_rasterization_order_attachment_access@
--
-- [__Extension Type__]
-- Device extension
--
-- [__Registered Extension Number__]
-- 343
--
-- [__Revision__]
-- 1
--
-- [__Extension and Version Dependencies__]
--
-- - Requires Vulkan 1.0
--
-- - Requires @VK_KHR_get_physical_device_properties2@
--
-- [__Contact__]
--
-- - Jan-Harald Fredriksen
-- <https://github.com/KhronosGroup/Vulkan-Docs/issues/new?body=[VK_ARM_rasterization_order_attachment_access] @janharaldfredriksen-arm%0A<<Here describe the issue or question you have about the VK_ARM_rasterization_order_attachment_access extension>> >
--
-- == Other Extension Metadata
--
-- [__Last Modified Date__]
-- 2021-11-12
--
-- [__IP Status__]
-- No known IP claims.
--
-- [__Contributors__]
--
-- - Tobias Hector, AMD
--
-- - Jan-Harald Fredriksen, Arm
--
-- == Description
--
-- Renderpasses, and specifically
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#synchronization-pipeline-barriers-subpass-self-dependencies subpass self-dependencies>
-- enable much of the same functionality as the framebuffer fetch and pixel
-- local storage extensions did for OpenGL ES. But certain techniques such
-- as programmable blending are awkward or impractical to implement with
-- these alone, in part because a self-dependency is required every time a
-- fragment will read a value at a given sample coordinate.
--
-- This extension extends the mechanism of input attachments to allow
-- access to framebuffer attachments when used as both input and color, or
-- depth\/stencil, attachments from one fragment to the next, in
-- rasterization order, without explicit synchronization.
--
-- See
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#renderpass-feedbackloop renderpass feedback loops>
-- for more information.
--
-- == New Structures
--
-- - Extending
-- 'Vulkan.Core11.Promoted_From_VK_KHR_get_physical_device_properties2.PhysicalDeviceFeatures2',
-- 'Vulkan.Core10.Device.DeviceCreateInfo':
--
-- - 'PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM'
--
-- == New Enums
--
-- - 'Vulkan.Core10.Enums.PipelineColorBlendStateCreateFlagBits.PipelineColorBlendStateCreateFlagBits'
--
-- - 'Vulkan.Core10.Enums.PipelineDepthStencilStateCreateFlagBits.PipelineDepthStencilStateCreateFlagBits'
--
-- == New Enum Constants
--
-- - 'ARM_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_EXTENSION_NAME'
--
-- - 'ARM_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_SPEC_VERSION'
--
-- - Extending
-- 'Vulkan.Core10.Enums.PipelineColorBlendStateCreateFlagBits.PipelineColorBlendStateCreateFlagBits':
--
-- - 'Vulkan.Core10.Enums.PipelineColorBlendStateCreateFlagBits.PIPELINE_COLOR_BLEND_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_BIT_ARM'
--
-- - Extending
-- 'Vulkan.Core10.Enums.PipelineDepthStencilStateCreateFlagBits.PipelineDepthStencilStateCreateFlagBits':
--
-- - 'Vulkan.Core10.Enums.PipelineDepthStencilStateCreateFlagBits.PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_ARM'
--
-- - 'Vulkan.Core10.Enums.PipelineDepthStencilStateCreateFlagBits.PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_ARM'
--
-- - Extending 'Vulkan.Core10.Enums.StructureType.StructureType':
--
-- - 'Vulkan.Core10.Enums.StructureType.STRUCTURE_TYPE_PHYSICAL_DEVICE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_ARM'
--
-- - Extending
-- 'Vulkan.Core10.Enums.SubpassDescriptionFlagBits.SubpassDescriptionFlagBits':
--
-- - 'Vulkan.Core10.Enums.SubpassDescriptionFlagBits.SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_COLOR_ACCESS_BIT_ARM'
--
-- - 'Vulkan.Core10.Enums.SubpassDescriptionFlagBits.SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_ARM'
--
-- - 'Vulkan.Core10.Enums.SubpassDescriptionFlagBits.SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_ARM'
--
-- == Issues
--
-- 1) Is there any interaction with the @VK_KHR_dynamic_rendering@
-- extension?
--
-- No. This extension only affects reads from input attachments. Render
-- pass instances begun with
-- 'Vulkan.Extensions.VK_KHR_dynamic_rendering.cmdBeginRenderingKHR' do not
-- have input attachments and a different mechanism will be needed to
-- provide similar functionality in this case.
--
-- == Examples
--
-- None.
--
-- == Version History
--
-- - Revision 1, 2021-11-12 (Jan-Harald Fredriksen)
--
-- - Initial draft
--
-- == See Also
--
-- 'PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM',
-- 'Vulkan.Core10.Enums.PipelineColorBlendStateCreateFlagBits.PipelineColorBlendStateCreateFlagBits',
-- 'Vulkan.Core10.Enums.PipelineDepthStencilStateCreateFlagBits.PipelineDepthStencilStateCreateFlagBits'
--
-- == Document Notes
--
-- For more information, see the
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#VK_ARM_rasterization_order_attachment_access Vulkan Specification>
--
-- This page is a generated document. Fixes and changes should be made to
-- the generator scripts, not directly.
module Vulkan.Extensions.VK_ARM_rasterization_order_attachment_access ( PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM(..)
, ARM_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_SPEC_VERSION
, pattern ARM_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_SPEC_VERSION
, ARM_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_EXTENSION_NAME
, pattern ARM_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_EXTENSION_NAME
) where
import Foreign.Marshal.Alloc (allocaBytes)
import Foreign.Ptr (nullPtr)
import Foreign.Ptr (plusPtr)
import Vulkan.CStruct (FromCStruct)
import Vulkan.CStruct (FromCStruct(..))
import Vulkan.CStruct (ToCStruct)
import Vulkan.CStruct (ToCStruct(..))
import Vulkan.Zero (Zero(..))
import Data.String (IsString)
import Data.Typeable (Typeable)
import Foreign.Storable (Storable)
import Foreign.Storable (Storable(peek))
import Foreign.Storable (Storable(poke))
import qualified Foreign.Storable (Storable(..))
import GHC.Generics (Generic)
import Foreign.Ptr (Ptr)
import Data.Kind (Type)
import Vulkan.Core10.FundamentalTypes (bool32ToBool)
import Vulkan.Core10.FundamentalTypes (boolToBool32)
import Vulkan.Core10.FundamentalTypes (Bool32)
import Vulkan.Core10.Enums.StructureType (StructureType)
import Vulkan.Core10.Enums.StructureType (StructureType(STRUCTURE_TYPE_PHYSICAL_DEVICE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_ARM))
-- | VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM -
-- Structure describing whether rasterization order attachment access can
-- be supported by an implementation
--
-- = Members
--
-- The members of the
-- 'PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM' structure
-- describe the following features:
--
-- = Description
--
-- If the 'PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM'
-- structure is included in the @pNext@ chain of
-- 'Vulkan.Core11.Promoted_From_VK_KHR_get_physical_device_properties2.PhysicalDeviceFeatures2',
-- it is filled with values indicating whether the feature is supported.
-- 'PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM' /can/ also
-- be used in the @pNext@ chain of 'Vulkan.Core10.Device.DeviceCreateInfo'
-- to enable features.
--
-- == Valid Usage (Implicit)
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_ARM_rasterization_order_attachment_access VK_ARM_rasterization_order_attachment_access>,
-- 'Vulkan.Core10.FundamentalTypes.Bool32',
-- 'Vulkan.Core10.Enums.StructureType.StructureType'
data PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM = PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM
{ -- | #features-rasterizationOrderColorAttachmentAccess#
-- @rasterizationOrderColorAttachmentAccess@ indicates that rasterization
-- order access to color and input attachments is supported by the
-- implementation.
rasterizationOrderColorAttachmentAccess :: Bool
, -- | #features-rasterizationOrderDepthAttachmentAccess#
-- @rasterizationOrderDepthAttachmentAccess@ indicates that rasterization
-- order access to the depth aspect of depth\/stencil and input attachments
-- is supported by the implementation.
rasterizationOrderDepthAttachmentAccess :: Bool
, -- | #features-rasterizationOrderStencilAttachmentAccess#
-- @rasterizationOrderStencilAttachmentAccess@ indicates that rasterization
-- order access to the stencil aspect of depth\/stencil and input
-- attachments is supported by the implementation.
rasterizationOrderStencilAttachmentAccess :: Bool
}
deriving (Typeable, Eq)
#if defined(GENERIC_INSTANCES)
deriving instance Generic (PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM)
#endif
deriving instance Show PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM
instance ToCStruct PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM where
withCStruct x f = allocaBytes 32 $ \p -> pokeCStruct p x (f p)
pokeCStruct p PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM{..} f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_PHYSICAL_DEVICE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_ARM)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Bool32)) (boolToBool32 (rasterizationOrderColorAttachmentAccess))
poke ((p `plusPtr` 20 :: Ptr Bool32)) (boolToBool32 (rasterizationOrderDepthAttachmentAccess))
poke ((p `plusPtr` 24 :: Ptr Bool32)) (boolToBool32 (rasterizationOrderStencilAttachmentAccess))
f
cStructSize = 32
cStructAlignment = 8
pokeZeroCStruct p f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_PHYSICAL_DEVICE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_ARM)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Bool32)) (boolToBool32 (zero))
poke ((p `plusPtr` 20 :: Ptr Bool32)) (boolToBool32 (zero))
poke ((p `plusPtr` 24 :: Ptr Bool32)) (boolToBool32 (zero))
f
instance FromCStruct PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM where
peekCStruct p = do
rasterizationOrderColorAttachmentAccess <- peek @Bool32 ((p `plusPtr` 16 :: Ptr Bool32))
rasterizationOrderDepthAttachmentAccess <- peek @Bool32 ((p `plusPtr` 20 :: Ptr Bool32))
rasterizationOrderStencilAttachmentAccess <- peek @Bool32 ((p `plusPtr` 24 :: Ptr Bool32))
pure $ PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM
(bool32ToBool rasterizationOrderColorAttachmentAccess) (bool32ToBool rasterizationOrderDepthAttachmentAccess) (bool32ToBool rasterizationOrderStencilAttachmentAccess)
instance Storable PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM where
sizeOf ~_ = 32
alignment ~_ = 8
peek = peekCStruct
poke ptr poked = pokeCStruct ptr poked (pure ())
instance Zero PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM where
zero = PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM
zero
zero
zero
type ARM_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_SPEC_VERSION = 1
-- No documentation found for TopLevel "VK_ARM_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_SPEC_VERSION"
pattern ARM_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_SPEC_VERSION :: forall a . Integral a => a
pattern ARM_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_SPEC_VERSION = 1
type ARM_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_EXTENSION_NAME = "VK_ARM_rasterization_order_attachment_access"
-- No documentation found for TopLevel "VK_ARM_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_EXTENSION_NAME"
pattern ARM_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_EXTENSION_NAME :: forall a . (Eq a, IsString a) => a
pattern ARM_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_EXTENSION_NAME = "VK_ARM_rasterization_order_attachment_access"
|
expipiplus1/vulkan
|
src/Vulkan/Extensions/VK_ARM_rasterization_order_attachment_access.hs
|
bsd-3-clause
| 12,452
| 0
| 14
| 1,736
| 1,254
| 782
| 472
| -1
| -1
|
module Rewriting.SRS.Steps where
import Rewriting.SRS.Step
import Rewriting.SRS.Raw
import Autolib.TES.Rule
import Autolib.Symbol
import Autolib.Reporter
import Autolib.ToDoc
import Data.List ( inits, tails )
import Data.Maybe
-- | list all possible rewrite steps
-- starting from given term
steps :: ( Symbol c )
=> SRS c
-> [c]
-> [ Step c ]
steps rs w = do
( p, rest ) <- zip [0..] $ tails w
( k, r ) <- zip [ 0 .. ] $ regeln rs
let ( pre, post ) = splitAt ( length $ lhs r ) rest
guard $ lhs r == pre
return $ Step
{ rule_number = k
, position = p
}
-- | execute one rewrite step
exec :: ( Symbol c )
=> SRS c
-> [ c ]
-> Step c
-> Reporter [ c ]
exec srs w step = do
inform $ vcat
[ text "Anwenden des Ersetzungsschrittes"
, nest 4 $ toDoc step
, text "auf das Wort"
, nest 4 $ toDoc w
]
let k = rule_number step
inform $ text "die Regel Nummer" <+> toDoc k
rule <- if k < length ( regeln srs )
then do
let rule = regeln srs !! k
inform $ text "ist" <+> toDoc rule
return rule
else reject $ text "existiert nicht."
let p = position step
( pre, midpost ) = splitAt p w
inform $ text "das Teilwort an Position" <+> toDoc p
if p > length w
then reject $ text "existiert nicht"
else inform $ text "ist" <+> toDoc midpost
let ( mid, post ) = splitAt ( length $ lhs rule ) midpost
assert ( mid == lhs rule )
$ text "linke Regelseite ist Präfix des Teilwortes an Position?"
inform $ text "Suffix ist" <+> toDoc post
let res = pre ++ rhs rule ++ post
inform $ text "resultierendes Wort ist"
$$ ( nest 4 $ toDoc res )
return res
|
Erdwolf/autotool-bonn
|
src/Rewriting/SRS/Steps.hs
|
gpl-2.0
| 1,850
| 0
| 15
| 648
| 637
| 311
| 326
| 54
| 3
|
{- Problem 39: Integer right triangles
If p is the perimeter of a right angle triangle with integral length sides,
{a,b,c}, there are exactly three solutions for p = 120.
{20,48,52}, {24,45,51}, {30,40,50}
For which value of p ≤ 1000, is the number of solutions maximised?
-}
module Problem39 where
import PythagoreanTriples (triplesForPerimeter)
maxTriangles ts1 ts2 =
if length ts1 > length ts2
then ts1
else ts2
mostTriangles = foldr1 maxTriangles [triplesForPerimeter p | p<-[12,14..1000]]
solve = sum $ head mostTriangles
|
ajsmith/project-euler-solutions
|
src/Problem39.hs
|
gpl-2.0
| 546
| 0
| 9
| 98
| 85
| 46
| 39
| 8
| 2
|
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.EC2.DescribeSpotFleetRequestHistory
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Describes the events for the specified Spot fleet request during the
-- specified time.
--
-- Spot fleet events are delayed by up to 30 seconds before they can be
-- described. This ensures that you can query by the last evaluated time
-- and not miss a recorded event.
--
-- /See:/ <http://docs.aws.amazon.com/AWSEC2/latest/APIReference/ApiReference-query-DescribeSpotFleetRequestHistory.html AWS API Reference> for DescribeSpotFleetRequestHistory.
module Network.AWS.EC2.DescribeSpotFleetRequestHistory
(
-- * Creating a Request
describeSpotFleetRequestHistory
, DescribeSpotFleetRequestHistory
-- * Request Lenses
, dsfrhNextToken
, dsfrhEventType
, dsfrhDryRun
, dsfrhMaxResults
, dsfrhSpotFleetRequestId
, dsfrhStartTime
-- * Destructuring the Response
, describeSpotFleetRequestHistoryResponse
, DescribeSpotFleetRequestHistoryResponse
-- * Response Lenses
, dsfrhrsNextToken
, dsfrhrsResponseStatus
, dsfrhrsSpotFleetRequestId
, dsfrhrsStartTime
, dsfrhrsLastEvaluatedTime
, dsfrhrsHistoryRecords
) where
import Network.AWS.EC2.Types
import Network.AWS.EC2.Types.Product
import Network.AWS.Prelude
import Network.AWS.Request
import Network.AWS.Response
-- | Contains the parameters for DescribeSpotFleetRequestHistory.
--
-- /See:/ 'describeSpotFleetRequestHistory' smart constructor.
data DescribeSpotFleetRequestHistory = DescribeSpotFleetRequestHistory'
{ _dsfrhNextToken :: !(Maybe Text)
, _dsfrhEventType :: !(Maybe EventType)
, _dsfrhDryRun :: !(Maybe Bool)
, _dsfrhMaxResults :: !(Maybe Int)
, _dsfrhSpotFleetRequestId :: !Text
, _dsfrhStartTime :: !ISO8601
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'DescribeSpotFleetRequestHistory' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'dsfrhNextToken'
--
-- * 'dsfrhEventType'
--
-- * 'dsfrhDryRun'
--
-- * 'dsfrhMaxResults'
--
-- * 'dsfrhSpotFleetRequestId'
--
-- * 'dsfrhStartTime'
describeSpotFleetRequestHistory
:: Text -- ^ 'dsfrhSpotFleetRequestId'
-> UTCTime -- ^ 'dsfrhStartTime'
-> DescribeSpotFleetRequestHistory
describeSpotFleetRequestHistory pSpotFleetRequestId_ pStartTime_ =
DescribeSpotFleetRequestHistory'
{ _dsfrhNextToken = Nothing
, _dsfrhEventType = Nothing
, _dsfrhDryRun = Nothing
, _dsfrhMaxResults = Nothing
, _dsfrhSpotFleetRequestId = pSpotFleetRequestId_
, _dsfrhStartTime = _Time # pStartTime_
}
-- | The token for the next set of results.
dsfrhNextToken :: Lens' DescribeSpotFleetRequestHistory (Maybe Text)
dsfrhNextToken = lens _dsfrhNextToken (\ s a -> s{_dsfrhNextToken = a});
-- | The type of events to describe. By default, all events are described.
dsfrhEventType :: Lens' DescribeSpotFleetRequestHistory (Maybe EventType)
dsfrhEventType = lens _dsfrhEventType (\ s a -> s{_dsfrhEventType = a});
-- | Checks whether you have the required permissions for the action, without
-- actually making the request, and provides an error response. If you have
-- the required permissions, the error response is 'DryRunOperation'.
-- Otherwise, it is 'UnauthorizedOperation'.
dsfrhDryRun :: Lens' DescribeSpotFleetRequestHistory (Maybe Bool)
dsfrhDryRun = lens _dsfrhDryRun (\ s a -> s{_dsfrhDryRun = a});
-- | The maximum number of results to return in a single call. Specify a
-- value between 1 and 1000. The default value is 1000. To retrieve the
-- remaining results, make another call with the returned 'NextToken'
-- value.
dsfrhMaxResults :: Lens' DescribeSpotFleetRequestHistory (Maybe Int)
dsfrhMaxResults = lens _dsfrhMaxResults (\ s a -> s{_dsfrhMaxResults = a});
-- | The ID of the Spot fleet request.
dsfrhSpotFleetRequestId :: Lens' DescribeSpotFleetRequestHistory Text
dsfrhSpotFleetRequestId = lens _dsfrhSpotFleetRequestId (\ s a -> s{_dsfrhSpotFleetRequestId = a});
-- | The starting date and time for the events, in UTC format (for example,
-- /YYYY/-/MM/-/DD/T/HH/:/MM/:/SS/Z).
dsfrhStartTime :: Lens' DescribeSpotFleetRequestHistory UTCTime
dsfrhStartTime = lens _dsfrhStartTime (\ s a -> s{_dsfrhStartTime = a}) . _Time;
instance AWSRequest DescribeSpotFleetRequestHistory
where
type Rs DescribeSpotFleetRequestHistory =
DescribeSpotFleetRequestHistoryResponse
request = postQuery eC2
response
= receiveXML
(\ s h x ->
DescribeSpotFleetRequestHistoryResponse' <$>
(x .@? "nextToken") <*> (pure (fromEnum s)) <*>
(x .@ "spotFleetRequestId")
<*> (x .@ "startTime")
<*> (x .@ "lastEvaluatedTime")
<*>
(x .@? "historyRecordSet" .!@ mempty >>=
parseXMLList "item"))
instance ToHeaders DescribeSpotFleetRequestHistory
where
toHeaders = const mempty
instance ToPath DescribeSpotFleetRequestHistory where
toPath = const "/"
instance ToQuery DescribeSpotFleetRequestHistory
where
toQuery DescribeSpotFleetRequestHistory'{..}
= mconcat
["Action" =:
("DescribeSpotFleetRequestHistory" :: ByteString),
"Version" =: ("2015-04-15" :: ByteString),
"NextToken" =: _dsfrhNextToken,
"EventType" =: _dsfrhEventType,
"DryRun" =: _dsfrhDryRun,
"MaxResults" =: _dsfrhMaxResults,
"SpotFleetRequestId" =: _dsfrhSpotFleetRequestId,
"StartTime" =: _dsfrhStartTime]
-- | Contains the output of DescribeSpotFleetRequestHistory.
--
-- /See:/ 'describeSpotFleetRequestHistoryResponse' smart constructor.
data DescribeSpotFleetRequestHistoryResponse = DescribeSpotFleetRequestHistoryResponse'
{ _dsfrhrsNextToken :: !(Maybe Text)
, _dsfrhrsResponseStatus :: !Int
, _dsfrhrsSpotFleetRequestId :: !Text
, _dsfrhrsStartTime :: !ISO8601
, _dsfrhrsLastEvaluatedTime :: !ISO8601
, _dsfrhrsHistoryRecords :: ![HistoryRecord]
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'DescribeSpotFleetRequestHistoryResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'dsfrhrsNextToken'
--
-- * 'dsfrhrsResponseStatus'
--
-- * 'dsfrhrsSpotFleetRequestId'
--
-- * 'dsfrhrsStartTime'
--
-- * 'dsfrhrsLastEvaluatedTime'
--
-- * 'dsfrhrsHistoryRecords'
describeSpotFleetRequestHistoryResponse
:: Int -- ^ 'dsfrhrsResponseStatus'
-> Text -- ^ 'dsfrhrsSpotFleetRequestId'
-> UTCTime -- ^ 'dsfrhrsStartTime'
-> UTCTime -- ^ 'dsfrhrsLastEvaluatedTime'
-> DescribeSpotFleetRequestHistoryResponse
describeSpotFleetRequestHistoryResponse pResponseStatus_ pSpotFleetRequestId_ pStartTime_ pLastEvaluatedTime_ =
DescribeSpotFleetRequestHistoryResponse'
{ _dsfrhrsNextToken = Nothing
, _dsfrhrsResponseStatus = pResponseStatus_
, _dsfrhrsSpotFleetRequestId = pSpotFleetRequestId_
, _dsfrhrsStartTime = _Time # pStartTime_
, _dsfrhrsLastEvaluatedTime = _Time # pLastEvaluatedTime_
, _dsfrhrsHistoryRecords = mempty
}
-- | The token required to retrieve the next set of results. This value is
-- 'null' when there are no more results to return.
dsfrhrsNextToken :: Lens' DescribeSpotFleetRequestHistoryResponse (Maybe Text)
dsfrhrsNextToken = lens _dsfrhrsNextToken (\ s a -> s{_dsfrhrsNextToken = a});
-- | The response status code.
dsfrhrsResponseStatus :: Lens' DescribeSpotFleetRequestHistoryResponse Int
dsfrhrsResponseStatus = lens _dsfrhrsResponseStatus (\ s a -> s{_dsfrhrsResponseStatus = a});
-- | The ID of the Spot fleet request.
dsfrhrsSpotFleetRequestId :: Lens' DescribeSpotFleetRequestHistoryResponse Text
dsfrhrsSpotFleetRequestId = lens _dsfrhrsSpotFleetRequestId (\ s a -> s{_dsfrhrsSpotFleetRequestId = a});
-- | The starting date and time for the events, in UTC format (for example,
-- /YYYY/-/MM/-/DD/T/HH/:/MM/:/SS/Z).
dsfrhrsStartTime :: Lens' DescribeSpotFleetRequestHistoryResponse UTCTime
dsfrhrsStartTime = lens _dsfrhrsStartTime (\ s a -> s{_dsfrhrsStartTime = a}) . _Time;
-- | The last date and time for the events, in UTC format (for example,
-- /YYYY/-/MM/-/DD/T/HH/:/MM/:/SS/Z). All records up to this time were
-- retrieved.
--
-- If 'nextToken' indicates that there are more results, this value is not
-- present.
dsfrhrsLastEvaluatedTime :: Lens' DescribeSpotFleetRequestHistoryResponse UTCTime
dsfrhrsLastEvaluatedTime = lens _dsfrhrsLastEvaluatedTime (\ s a -> s{_dsfrhrsLastEvaluatedTime = a}) . _Time;
-- | Information about the events in the history of the Spot fleet request.
dsfrhrsHistoryRecords :: Lens' DescribeSpotFleetRequestHistoryResponse [HistoryRecord]
dsfrhrsHistoryRecords = lens _dsfrhrsHistoryRecords (\ s a -> s{_dsfrhrsHistoryRecords = a}) . _Coerce;
|
fmapfmapfmap/amazonka
|
amazonka-ec2/gen/Network/AWS/EC2/DescribeSpotFleetRequestHistory.hs
|
mpl-2.0
| 9,785
| 0
| 17
| 1,906
| 1,292
| 772
| 520
| 152
| 1
|
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE ScopedTypeVariables #-}
module HERMIT.Shell.Externals where
import Control.Arrow
import Control.Monad (liftM)
import Control.Monad.Reader (asks)
import Control.Monad.State (gets, modify)
import Data.Dynamic (fromDynamic)
import Data.List (intercalate)
import qualified Data.Map as M
import Data.Maybe (fromMaybe)
import HERMIT.External
import HERMIT.Kernel
import HERMIT.Kure
import HERMIT.Lemma
import HERMIT.Parser
import HERMIT.Plugin.Renderer
import HERMIT.Plugin.Types
import HERMIT.PrettyPrinter.Common
import HERMIT.Dictionary.Reasoning
import HERMIT.Shell.Dictionary
import HERMIT.Shell.KernelEffect
import HERMIT.Shell.Proof as Proof
import HERMIT.Shell.ScriptToRewrite
import HERMIT.Shell.ShellEffect
import HERMIT.Shell.Types
----------------------------------------------------------------------------------
shell_externals :: [External]
shell_externals = map (.+ Shell)
[ external "resume" Resume -- HERMIT Kernel Exit
[ "stops HERMIT; resumes compile" ]
, external "abort" Abort -- UNIX Exit
[ "hard UNIX-style exit; does not return to GHC; does not save" ]
, external "continue" Continue -- Shell Exit, but not HERMIT
[ "exits shell; resumes HERMIT" ]
, external "gc" Delete
[ "garbage-collect a given AST" ]
, external "gc" (CLSModify gc)
[ "garbage-collect all ASTs except for the initial and current AST" ]
, external "display" (CLSModify $ printWindowAlways Nothing)
[ "redisplays current state" ]
, external "up" (Direction U)
[ "move to the parent node"]
, external "navigate" (CLSModify $ modify $ \ st -> st { cl_nav = True })
[ "switch to navigate mode" ]
, external "command-line" (CLSModify $ modify $ \ st -> st { cl_nav = False })
[ "switch to command line mode" ]
, external "set-window" (CLSModify $ setWindow >> printWindow Nothing)
[ "fix the window to the current focus" ]
, external "top" (Direction T)
[ "move to root of current scope" ]
, external "log" (Inquiry showDerivationTree)
[ "go back in the derivation" ] .+ VersionControl
, external "back" (CLSModify $ versionCmd Back)
[ "go back in the derivation" ] .+ VersionControl
, external "step" (CLSModify $ versionCmd Step)
[ "step forward in the derivation" ] .+ VersionControl
, external "goto" (CLSModify . versionCmd . Goto)
[ "goto a specific step in the derivation" ] .+ VersionControl
, external "goto" (CLSModify . versionCmd . GotoTag)
[ "goto a specific step in the derivation by tag name" ] .+ VersionControl
, external "tag" (CLSModify . versionCmd . Tag)
[ "name the current step in the derivation" ] .+ VersionControl
, external "diff" Diff
[ "show diff of two ASTs" ] .+ VersionControl
, external "set-pp-diffonly" (\ bStr -> CLSModify $
case reads bStr of
[(b,"")] -> modify (\st -> st { cl_diffonly = b }) >> printWindow Nothing
_ -> fail "valid arguments are True and False" )
[ "set-pp-diffonly <True|False>; False by default"
, "print diffs rather than full code after a rewrite" ]
, external "set-fail-hard" (\ bStr -> CLSModify $
case reads bStr of
[(b,"")] -> modify $ \ st -> st { cl_failhard = b }
_ -> fail "valid arguments are True and False" )
[ "set-fail-hard <True|False>; False by default"
, "any rewrite failure causes compilation to abort" ]
, external "set-auto-corelint" (\ bStr -> CLSModify $
case reads bStr of
[(b,"")] -> modify $ flip setCoreLint b
_ -> fail "valid arguments are True and False" )
[ "set-auto-corelint <True|False>; False by default"
, "run core lint type-checker after every rewrite, reverting on failure" ]
, external "set-pp" (\ name -> CLSModify $
case M.lookup name pp_dictionary of
Nothing -> fail $ "List of Pretty Printers: " ++ intercalate ", " (M.keys pp_dictionary)
Just pp -> do modify $ \ st -> setPrettyOpts (setPretty st pp) (cl_pretty_opts st) -- careful to preserve the current options
printWindow Nothing)
[ "set the pretty printer"
, "use 'set-pp ls' to list available pretty printers" ]
, external "set-pp-renderer" (PluginComp . changeRenderer)
[ "set the output renderer mode"]
, external "set-pp-renderer" showRenderers
[ "set the output renderer mode"]
, -- DEPRECATED - this dump behavior uses the current pretty printer selected in the shell
external "dump" (\pp fp r w -> CLSModify (dump fp pp r w))
[ "dump <filename> <renderer> <width> - DEPRECATED"]
, external "dump" (\fp pp r w -> CLSModify (dump fp pp r w))
[ "dump <filename> <pretty-printer> <renderer> <width>"]
, external "dump-lemma" ((\nm fp pp r w -> getLemmaByNameT nm >>> liftPrettyH (pOptions pp) (ppLemmaT pp nm) >>> dumpT fp pp r w) :: LemmaName -> FilePath -> PrettyPrinter -> String -> Int -> TransformH LCoreTC ())
[ "Dump named lemma to a file."
, "dump-lemma <lemma-name> <filename> <pretty-printer> <renderer> <width>" ]
, external "dump-lemma" ((\pp nm fp r w -> getLemmaByNameT nm >>> liftPrettyH (pOptions pp) (ppLemmaT pp nm) >>> dumpT fp pp r w) :: PrettyPrinter -> LemmaName -> FilePath -> String -> Int -> TransformH LCoreTC ())
[ "Dump named lemma to a file."
, "dump-lemma <lemma-name> <filename> <pretty-printer> <renderer> <width>" ]
, external "set-pp-width" (\ w -> CLSModify $ do
modify $ \ st -> setPrettyOpts st (updateWidthOption w (cl_pretty_opts st))
printWindow Nothing)
["set the width of the screen"]
, external "set-pp-type" (\ str -> CLSModify $
case reads str :: [(ShowOption,String)] of
[(opt,"")] -> do modify $ \ st -> setPrettyOpts st (updateTypeShowOption opt (cl_pretty_opts st))
printWindow Nothing
_ -> fail "valid arguments are Show, Abstract, and Omit")
["set how to show expression-level types (Show|Abstact|Omit)"]
, external "set-pp-coercion" (\ str -> CLSModify $
case reads str :: [(ShowOption,String)] of
[(opt,"")] -> do modify $ \ st -> setPrettyOpts st (updateCoShowOption opt (cl_pretty_opts st))
printWindow Nothing
_ -> fail "valid arguments are Show, Abstract, and Omit")
["set how to show coercions (Show|Abstact|Omit)"]
, external "set-pp-uniques" (\ str -> CLSModify $
case reads str of
[(b,"")] -> do modify $ \ st -> setPrettyOpts st ((cl_pretty_opts st) { po_showUniques = b })
printWindow Nothing
_ -> fail "valid arguments are True and False")
["set whether uniques are printed with variable names"]
, external "{" BeginScope
["push current lens onto a stack"] -- tag as internal
, external "}" EndScope
["pop a lens off a stack"] -- tag as internal
, external "load" LoadFile
["load <script-name> <file-name> : load a HERMIT script from a file and save it under the specified name."]
, external "load-and-run" loadAndRun
["load-and-run <file-name> : load a HERMIT script from a file and run it immediately."]
, external "save" (SaveFile False)
["save <filename> : save the current complete derivation into a file."]
, external "save-verbose" (SaveFile True)
["save-verbose <filename> : save the current complete derivation into a file,"
,"including output of each command as a comment."]
, external "save-script" SaveScript
["save-script <filename> <script name> : save a loaded or manually defined script to a file." ]
, external "load-as-rewrite" (\ rewriteName fileName -> SeqMeta [LoadFile rewriteName fileName, ScriptToRewrite rewriteName rewriteName])
["load-as-rewrite <rewrite-name> <filepath> : load a HERMIT script from a file, and convert it to a rewrite."
,"Note that there are significant limitations on the commands the script may contain."] .+ Experiment .+ TODO
, external "script-to-rewrite" ScriptToRewrite
["script-to-rewrite <rewrite-name> <script-name> : create a new rewrite from a pre-loaded (or manually defined) HERMIT script."
,"Note that there are significant limitations on the commands the script may contain."] .+ Experiment .+ TODO
, external "define-script" DefineScript
["Define a new HERMIT script and bind it to a name."
,"Note that any names in the script will not be resolved until the script is *run*."
,"Example usage: define-script \"MyScriptName\" \"any-td beta-reduce ; let-subst ; bash\""]
, external "define-rewrite" (\ name str -> SeqMeta [DefineScript name str, ScriptToRewrite name name])
["Define a new HERMIT rewrite and bind it to a name."
,"Note that this also saves the input script under the same name."
,"Example usage: define-rewrite \"MyRewriteName\" \"let-subst >>> bash\""]
, external "run-script" RunScript
["Run a pre-loaded (or manually defined) HERMIT script."
,"Note that any names in the script will not be resolved until the script is *run*." ]
, external "display-scripts" displayScripts
["Display all loaded scripts."]
, external "stop-script" (CLSModify $ setRunningScript Nothing)
[ "Stop running the current script." ]
--, external "test-rewrites" (testRewrites :: [(ExternalName,RewriteH Core)] -> TransformH Core String)
-- ["Test a given set of rewrites to see if they succeed"] .+ Experiment
, external "possible-rewrites" (testAllT:: CommandLineState -> TransformH LCore String)
["Test all given set of rewrites to see if they succeed"] .+ Experiment
-- TODO: maybe add a "list-scripts" as well that just lists the names of loaded scripts?
] ++ Proof.externals
gc :: CLT IO ()
gc = do
k <- asks pr_kernel
cursor <- gets cl_cursor
asts <- listK k
mapM_ (deleteK k) [ ast | (ast,_,_) <- asts, ast `notElem` [cursor, firstAST] ]
----------------------------------------------------------------------------------
setWindow :: CLT IO ()
setWindow = modify $ \ st ->
let ps = fromMaybe ([],mempty) (M.lookup (cl_cursor st) (cl_foci st))
in st { cl_window = pathStack2Path ps }
showRenderers :: QueryFun ()
showRenderers = message $ "set-renderer " ++ show (map fst shellRenderers)
--------------------------------------------------------
versionCmd :: VersionCmd -> CLT IO ()
versionCmd whereTo = do
k <- asks pr_kernel
all_asts <- listK k
case whereTo of
Goto ast ->
if ast `elem` [ ast' | (ast',_,_) <- all_asts ]
then modify (setCursor ast) >> printWindow Nothing
else fail $ "Cannot find AST #" ++ show ast ++ "."
GotoTag nm -> do
tags <- gets cl_tags
case [ ast | (ast,nms) <- M.toList tags, nm `elem` nms ] of
[] -> fail $ "No tag named: " ++ nm
(ast:_) -> modify (setCursor ast) >> printWindow Nothing
Tag nm -> do
modify $ \st -> st { cl_tags = M.insertWith (++) (cl_cursor st) [nm] (cl_tags st) }
cl_putStrLn $ "Tag: " ++ nm ++ " added."
Step -> do
cursor <- gets cl_cursor
let ns = [ (fromMaybe "unknown" msg, ast) | (ast,msg,Just p) <- all_asts, p == cursor ]
case ns of
[] -> fail "Cannot step forward (no more steps)."
[(cmd,ast)] -> do
cl_putStrLn $ "step : " ++ cmd
modify $ setCursor ast
printWindow Nothing
_ -> fail $ "Cannot step forward (multiple choices), use goto {"
++ intercalate "," (map (show.snd) ns) ++ "}"
Back -> do
cursor <- gets cl_cursor
let ns = [ (fromMaybe "unknown" msg, p) | (ast,msg,Just p) <- all_asts, ast == cursor ]
case ns of
[] -> fail "Cannot step backwards (no more steps)."
[(cmd,ast)] -> do
cl_putStrLn $ "back, unstepping : " ++ cmd
modify $ setCursor ast
printWindow Nothing
_ -> fail "Cannot step backwards (multiple choices, impossible!)."
-------------------------------------------------------------------------------
showDerivationTree :: PluginReader -> CommandLineState -> IO String
showDerivationTree r s = do
let k = pr_kernel r
cursor = cl_cursor s
all_asts <- listK k
let graph = [ (a,[fromMaybe "-- command missing!" b],c) | (c,b,Just a) <- all_asts ]
return $ unlines $ showRefactorTrail graph firstAST cursor
showRefactorTrail :: (Eq a, Show a) => [(a,[String],a)] -> a -> a -> [String]
showRefactorTrail db a me =
case [ (b,c) | (a0,b,c) <- db, a == a0 ] of
[] -> [show' 3 a ++ " " ++ dot]
((b,c):bs) ->
[show' 3 a ++ " " ++ dot ++ (if not (null bs) then "->" else "") ] ++
[" " ++ "| " ++ txt | txt <- b ] ++
showRefactorTrail db c me ++
if null bs
then []
else [] :
showRefactorTrail [ (a',b',c') | (a',b',c') <- db
, not (a == a' && c == c')
] a me
where dot = if a == me then "*" else "o"
show' n x = replicate (n - length (show a)) ' ' ++ show x
-------------------------------------------------------------------------------
displayScripts :: QueryFun ()
displayScripts = Inquiry (const (return . showScripts . cl_scripts))
showScripts :: [(ScriptName,Script)] -> String
showScripts = concatMap (\ (name,script) -> name ++ ": " ++ unparseScript script ++ "\n\n")
-------------------------------------------------------------------------------
testAllT :: CommandLineState -> TransformH LCore String
testAllT st = do
let es = cl_externals st
mbs = map (\d -> (externName d, fromDynamic (externDyn d) :: Maybe RewriteLCoreBox)) es
namedRewrites = [(name ,unbox boxedR) | (name, Just boxedR) <- mbs]
testRewrites False namedRewrites
testRewrites :: Bool-> [(ExternalName, RewriteH LCore)] -> TransformH LCore String
testRewrites debug rewrites = case debug of
True -> let list = mapM (\ (n,r) -> liftM (f n) (testM r)) rewrites
in liftM unlines list
False -> let list = mapM (\ (n,r) -> liftM (g n) (testM r)) rewrites
filtered = liftM (filter(\x -> snd x)) list
res = liftM (map (\ (n,b) -> f n b )) filtered
in liftM unlines res
{-testRewrites rewrites = let list = mapM (\ (n,r) -> liftM (g n) (testM r)) rewrites
filtered = liftM (filter (\ x -> snd x)) list
res = liftM (map (\ (n, b) -> f n b)) filtered
in liftM unlines res
-}
where
f :: ExternalName -> Bool -> String
f x True = x++" would succeed."
f x False = x++" would fail."
g x y = (x,y)
|
ku-fpg/hermit
|
src/HERMIT/Shell/Externals.hs
|
bsd-2-clause
| 16,050
| 0
| 23
| 4,910
| 3,778
| 1,995
| 1,783
| 259
| 11
|
module WASHExpression where
import Control.Monad
import WASHFlags
import qualified WASHUtil
import WASHData
import WASHOut
code :: FLAGS -> [CodeFrag] -> ShowS
code flags [] = id
code flags (x:xs) = code' flags x . code flags xs
code' :: FLAGS -> CodeFrag -> ShowS
code' flags (HFrag h) =
showString h
code' flags (EFrag e) =
runOut $ element flags e
code' flags (CFrag cnts) =
showChar '(' .
runOut (contents flags [] cnts) .
showChar ')'
code' flags (AFrag attrs) =
showChar '(' .
WASHUtil.itemList (attribute flags) "CGI.empty" " >> " attrs .
showChar ')'
code' flags (VFrag var) =
id
code' flags _ = error "Unknown type: code"
outMode :: Mode -> Out ()
outMode = outShowS . showMode
showMode :: Mode -> ShowS
showMode V = id
showMode S = showString "_T"
showMode F = showString "_S"
element :: FLAGS -> Element -> Out [String]
element flags (Element mode nm ats cnt et) =
do outChar '('
outString "CGI."
outString nm
when (generateBT flags) $ outMode mode
outChar '('
outShowS $ attributes flags ats
rvs <- contents flags [] cnt
outString "))"
return rvs
outRVS :: [String] -> Out ()
outRVS [] = outString "()"
outRVS (x:xs) =
do outChar '('
outString x
mapM_ g xs
outChar ')'
where g x = do { outChar ','; outString x; }
outRVSpat :: [String] -> Out ()
outRVSpat [] = outString "(_)"
outRVSpat xs = outRVS xs
contents :: FLAGS -> [String] -> [Content] -> Out [String]
contents flags inRVS cts =
case cts of
[] ->
do outString "return"
outRVS inRVS
return inRVS
ct:cts ->
do rvs <- content flags ct
case rvs of
[] ->
case (cts, inRVS) of
([],[]) ->
return []
_ ->
do outString " >> "
contents flags inRVS cts
_ ->
case (cts, inRVS) of
([],[]) ->
return rvs
_ ->
do outString " >>= \\ "
outRVSpat rvs
outString " -> "
contents flags (rvs ++ inRVS) cts
content :: FLAGS -> Content -> Out [String]
content flags (CElement elem) =
element flags elem
content flags (CText txt) =
do text flags txt
return []
content flags (CCode (VFrag var:c)) =
do outShowS $ (showChar '(' . code flags c . showChar ')')
return [var]
content flags (CCode c) =
do outShowS $ (showChar '(' . code flags c . showChar ')')
return []
content flags (CComment cc) =
do outShowS $ (showString "return (const () " . shows cc . showChar ')')
return []
content flags (CReference txt) =
do text flags txt
return []
content flags c =
error $ "Unknown type: content -- " ++ (show c)
text :: FLAGS -> Text -> Out [String]
text flags txt =
do outString "CGI.rawtext"
when (generateBT flags) $ outMode (textMode txt)
outChar ' '
outs (textString txt)
return []
attributes :: FLAGS -> [Attribute] -> ShowS
attributes flags atts =
f atts
where
f [] = id
f (att:atts) =
attribute flags att .
showString " >> " .
f atts
attribute :: FLAGS -> Attribute -> ShowS
attribute flags (Attribute m n v) =
showString "(CGI.attr" .
(if generateBT flags then (attrvalueBT m v) else id) .
showChar ' ' .
shows n .
showString " " .
attrvalue v .
showString ")"
attribute flags (AttrPattern pat) =
showString "( " .
showString pat .
showString " )"
attribute flags a = error $ "Unknown type: attribute -- " ++ (show a)
attrvalue :: AttrValue -> ShowS
attrvalue (AText t) =
shows t
attrvalue (ACode c) =
showString "( " .
showString c .
showString " )"
attrvalue a = error $ "Unknown type: attrvalue -- " ++ (show a)
attrvalueBT :: Mode -> AttrValue -> ShowS
attrvalueBT V _ = id
attrvalueBT m (AText _) = showMode m . showChar 'S'
attrvalueBT m (ACode _) = showMode m . showChar 'D'
attrvalueBT m a = error $ "Unknown type: attrvalueBT -- " ++ (show a)
|
IreneKnapp/Faction
|
libfaction/tests/systemTests/wash2hs/hs/WASHExpression.hs
|
bsd-3-clause
| 3,852
| 20
| 17
| 1,009
| 1,613
| 768
| 845
| 141
| 5
|
module Main where
import GHC (runGhc)
import GHC.Paths (libdir)
import qualified Spec
main :: IO ()
main = do
runGhc (Just libdir) (return ())
Spec.main
|
froozen/dead-code-detection
|
test/Main.hs
|
bsd-3-clause
| 181
| 0
| 10
| 53
| 68
| 37
| 31
| 8
| 1
|
{-# LANGUAGE TemplateHaskellQuotes #-}
-- test the representation of literals and also explicit type annotations
module TH_repE1
where
import Language.Haskell.TH
integralExpr :: ExpQ
integralExpr = [| 42 |]
intExpr :: ExpQ
intExpr = [| 42 :: Int |]
integerExpr :: ExpQ
integerExpr = [| 42 :: Integer |]
charExpr :: ExpQ
charExpr = [| 'x' |]
stringExpr :: ExpQ
stringExpr = [| "A String" |]
fractionalExpr :: ExpQ
fractionalExpr = [| 1.2 |]
floatExpr :: ExpQ
floatExpr = [| 1.2 :: Float |]
doubleExpr :: ExpQ
doubleExpr = [| 1.2 :: Double |]
|
mpickering/ghc-exactprint
|
tests/examples/ghc8/TH_repE1.hs
|
bsd-3-clause
| 551
| 0
| 4
| 103
| 116
| 81
| 35
| -1
| -1
|
{-# LANGUAGE NoMonomorphismRestriction,
ScopedTypeVariables#-}
module ReduceObsidian(reduceKernel) where
import Obsidian
import Data.Word
import Prelude hiding (map,zipWith,sum,replicate,take,drop,iterate)
import Obsidian.Run.CUDA.Exec
blockRed :: Data a => Word32 -> (a -> a -> a) -> SPull a -> BProgram (SPush Block a)
blockRed cutoff f arr
| len arr == cutoff = return $ push $ fold1 f arr
| otherwise = do
let (a1,a2) = halve arr
arr' <- compute (zipWith f a1 a2)
blockRed cutoff f arr'
coalesceRed f arr =
do arr' <- compute $ asBlockMap (execThread' . seqReduce f) (coalesce 32 arr)
blockRed 2 f arr'
reduceKernel f arr = asGridMap body arr
where
body arr = execBlock (coalesceRed f arr)
|
aesadde/AccObsBenchmarks
|
Reduce/haskell/src/ReduceObsidian.hs
|
bsd-3-clause
| 750
| 0
| 12
| 166
| 287
| 144
| 143
| 19
| 1
|
module Main where
pasc = iterate expand [1]
where expand lst = let sums = zipWith (+) lst (tail lst)
in 1 : sums ++ [1]
main =
putStr $ (unlines . (map show) . (take 16)) pasc
|
waywardcode/small_programs
|
print_pascaltri/p_pasc2.hs
|
gpl-2.0
| 205
| 0
| 13
| 67
| 99
| 52
| 47
| 6
| 1
|
{- |
Module : $Header$
Description : parsing symbol items
Copyright : (c) Christian Maeder and Uni Bremen 2003
License : GPLv2 or higher, see LICENSE.txt
Maintainer : Christian.Maeder@dfki.de
Stability : experimental
Portability : portable
HasCASL parsable symbol items for structured specs
-}
module HasCASL.SymbItem where
import Common.Id
import Common.Token (colonST)
import Common.Keywords
import Common.Lexer
import Common.AnnoState
import Text.ParserCombinators.Parsec
import HasCASL.ParseTerm
import HasCASL.As
-- * parsers for symbols
-- | parse a (typed) symbol
symb :: AParser st Symb
symb = do
p@(PolyId i tys _) <- parsePolyId
do c <- colonST
sc <- typeScheme p
return $ Symb i (Just $ SymbType sc) $ tokPos c
<|> if null tys then return $ Symb i Nothing $ posOfId i else
fail ("bound type variables for '" ++ showId i "' without type")
-- | parse a mapped symbol
symbMap :: AParser st SymbOrMap
symbMap = do
s <- symb
do f <- asKey mapsTo
optional symbKind
t <- symb
return $ SymbOrMap s (Just t) $ tokPos f
<|> return (SymbOrMap s Nothing nullRange)
-- | parse kind of symbols
symbKind :: AParser st (SymbKind, Token)
symbKind = choice (map ( \ k -> do
q <- pluralKeyword $ drop 3 $ show k
return (k, q)) [SyKop, SyKfun, SyKpred, SyKtype, SyKsort])
<|> do
q <- asKey (classS ++ "es") <|> asKey classS
return (SyKclass, q)
<?> "kind"
-- | parse symbol items
symbItems :: AParser st SymbItems
symbItems = do
s <- symb
return $ SymbItems Implicit [s] [] nullRange
<|> do
(k, p) <- symbKind
(is, ps) <- symbs
return $ SymbItems k is [] $ catRange $ p : ps
symbs :: AParser st ([Symb], [Token])
symbs = do
s <- symb
do c <- anComma `followedWith` symb
(is, ps) <- symbs
return (s : is, c : ps)
<|> return ([s], [])
-- | parse symbol mappings
symbMapItems :: AParser st SymbMapItems
symbMapItems = do
s <- symbMap
return $ SymbMapItems Implicit [s] [] nullRange
<|> do
(k, p) <- symbKind
(is, ps) <- symbMaps
return $ SymbMapItems k is [] $ catRange $ p : ps
symbMaps :: AParser st ([SymbOrMap], [Token])
symbMaps = do
s <- symbMap
do c <- anComma `followedWith` symb
(is, ps) <- symbMaps
return (s : is, c : ps)
<|> return ([s], [])
|
keithodulaigh/Hets
|
HasCASL/SymbItem.hs
|
gpl-2.0
| 2,367
| 0
| 17
| 610
| 815
| 416
| 399
| 63
| 2
|
<?xml version='1.0' encoding='UTF-8' ?>
<!DOCTYPE helpset
PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 1.0//EN"
"http://java.sun.com/products/javahelp/helpset_1_0.dtd">
<?SecureFTP this is data for SecureFTP ?>
<helpset version="1.0">
<!-- title -->
<title>Ajuda do Secure FTP</title>
<!-- maps -->
<maps>
<homeID>intro</homeID>
<mapref location="Map_pt_BR.jhm"/>
</maps>
<!-- views -->
<view>
<name>TOC</name>
<label>Conteúdo</label>
<type>javax.help.TOCView</type>
<data>SecureFTPTOC_pt_BR.xml</data>
</view>
<view>
<name>Index</name>
<label>Índice</label>
<type>javax.help.IndexView</type>
<data>SecureFTPIndex_pt_BR.xml</data>
</view>
<view>
<name>Search</name>
<label>Pesquisa</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch_pt_BR
</data>
</view>
<presentation default=true>
<size width="700" height="550" />
<location x="100" y="100" />
</presentation>
</helpset>
|
glub/secureftp
|
src/com/glub/secureftp/client/resources/help/SecureFTP_pt_BR.hs
|
apache-2.0
| 1,085
| 89
| 59
| 199
| 434
| 219
| 215
| -1
| -1
|
{-# LANGUAGE DeriveDataTypeable #-}
module Database.HDBC.Statement
(
Statement(..),
SqlError(..),
-- * Re-exported from "Database.HDBC.SqlValue"; this re-exporting is deprecated
nToSql, iToSql, posixToSql, fromSql, safeFromSql, toSql,
SqlValue(..)
)
where
import Data.Dynamic
import Database.HDBC.ColTypes
import Database.HDBC.SqlValue
import Control.Exception
data Statement = Statement
{
{- | Execute the prepared statement, passing in the given positional
parameters (that should take the place of the question marks
in the call to 'prepare').
For non-SELECT queries, the return value is the number of
rows modified, if known. If no rows were modified, you get 0.
If the value is unknown, you get -1. All current HDBC drivers
support this function and should never return -1.
For SELECT queries, you will always get 0.
This function should automatically call finish() to finish the previous
execution, if necessary.
-}
execute :: [SqlValue] -> IO Integer,
{- | Execute the statement as-is, without supplying any
positional parameters. This is intended for statements for
which the results aren't interesting or present (e.g., DDL or
DML commands). If your query contains placeholders, this will
certainly fail; use 'execute' instead. -}
executeRaw :: IO (),
{- | Execute the query with many rows.
The return value is the return value from the final row
as if you had called 'execute' on it.
Due to optimizations that are possible due to different
databases and driver designs, this can often be significantly
faster than using 'execute' multiple times since queries
need to be compiled only once.
This is most useful for non-SELECT statements. -}
executeMany :: [[SqlValue]] -> IO (),
{- | Abort a query in progress -- usually not needed. -}
finish :: IO (),
{- | Fetches one row from the DB. Returns 'Nothing' if there
are no more rows. Will automatically call 'finish' when
the last row is read. -}
fetchRow :: IO (Maybe [SqlValue]),
{- | Returns a list of the column names in the result.
For maximum portability, you should not assume that
information is available until after an 'execute' function
has been run.
Information is returned here directly as returned
by the underlying database layer. Note that different
databases have different rules about capitalization
of return values and about representation of names
of columns that are not simple columns. For this reason,
it is suggested that you treat this information for
display purposes only. Failing that, you should convert
to lower (or upper) case, and use @AS@ clauses for
anything other than simple columns.
A simple getColumnNames implementation could simply
apply @map fst@ to the return value of 'describeResult'.
-}
getColumnNames :: IO [String],
{- | The original query that this 'Statement' was prepared
with. -}
originalQuery :: String,
{- | Obtain information about the columns in the result set.
Must be run only after 'execute'. The String in the result
set is the column name.
You should expect this to be returned in the same manner
as a result from 'Database.HDBC.fetchAllRows''.
All results should be converted to lowercase for you
before you see them.
Please see caveats under 'getColumnNames' for information
on the column name field here.
-}
describeResult :: IO [(String, SqlColDesc)]
}
{- | The main HDBC exception object. As much information as possible
is passed from the database through to the application through this object.
Errors generated in the Haskell layer will have seNativeError set to -1.
-}
data SqlError = SqlError {seState :: String,
seNativeError :: Int,
seErrorMsg :: String}
deriving (Eq, Show, Read, Typeable)
#if __GLASGOW_HASKELL__ >= 610
--data SqlException
instance Exception SqlError where
{-
toException = SomeException
fromException (SomeException e) = Just e
fromException _ = Nothing
-}
#endif
|
hdbc/hdbc
|
Database/HDBC/Statement.hs
|
bsd-3-clause
| 4,467
| 0
| 12
| 1,250
| 261
| 163
| 98
| 24
| 0
|
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_HADDOCK show-extensions #-}
-- |
-- Module : Yi.Buffer.Region
-- License : GPL-2
-- Maintainer : yi-devel@googlegroups.com
-- Stability : experimental
-- Portability : portable
--
-- Handles indentation in the keymaps. Includes:
--
-- * (TODO) Auto-indentation to the previous lines indentation
-- * Tab-expansion
-- * Shifting of the indentation for a region of text
module Yi.Buffer.Indent
( autoIndentB
, cycleIndentsB
, indentAsNextB
, indentAsPreviousB
, indentAsTheMostIndentedNeighborLineB
, indentOfB
, indentOfCurrentPosB
, indentSettingsB
, indentToB
, modifyIndentB
, newlineAndIndentB
, shiftIndentOfRegionB
, tabB
) where
import Control.Applicative ((<$>))
import Control.Monad ()
import Data.Char (isSpace)
import Data.List (nub, sort)
import Data.Monoid ((<>))
import Yi.Buffer.Basic (Direction (..))
import Yi.Buffer.HighLevel (firstNonSpaceB, getNextLineB, getNextNonBlankLineB, moveToSol, readLnB)
import Yi.Buffer.Misc
import Yi.Buffer.Normal ()
import Yi.Buffer.Region (Region (regionStart), mkRegion, modifyRegionB, readRegionB)
import Yi.Buffer.TextUnit (regionWithTwoMovesB)
import Yi.Rope (YiString)
import qualified Yi.Rope as R
import Yi.String (mapLines)
{- |
Return either a \t or the number of spaces specified by tabSize in the
IndentSettings. Note that if you actually want to insert a tab character
(for example when editing makefiles) then you should use: @insertB '\t'@.
-}
tabB :: BufferM String
tabB = do
indentSettings <- indentSettingsB
return $ if expandTabs indentSettings
then replicate (tabSize indentSettings) ' '
else "\t"
{-|
A specialisation of 'autoIndentHelperB'.
This is the most basic and the user is encouraged to
specialise 'autoIndentHelperB' on their own.
-}
autoIndentB :: IndentBehaviour -> BufferM ()
autoIndentB = autoIndentHelperB fetchPreviousIndentsB indentsOfString
where
-- Returns the indentation hints considering the given
-- string as the line above the current one.
-- The hints added are:
-- The indent of the given string
-- The indent of the given string plus two
-- The offset of the last open bracket if any in the line.
indentsOfString :: YiString -> BufferM [Int]
indentsOfString input = do
indent <- indentOfB input
bracketHints <- lastOpenBracketHint input
indentSettings <- indentSettingsB
return $ indent : (indent + shiftWidth indentSettings) : bracketHints
{-|
This takes two arguments the first is a function to
obtain indentation hints from lines above the current one.
The second is a function to obtain a set of indentation hints
from the previous line. Both of these are in the 'BufferM'
monad although the second seems like it is unnecessary.
However we must take into account the length of tabs which come
from the the tab settings and hence we must be in the 'BufferM'
monad.
To get the straightforward behaviour of the indents of all previous
lines until one of them has zero indent call this with:
@autoIndentHelperB fetchPreviousIndentsB (fmap (: []) indentOfB)@
However commonly we wish to have something more interesting for
the second argument, in particular we commonly wish to have the
last opening bracket of the previous line as well as its indent.
-}
autoIndentHelperB :: BufferM [ Int ]
-- ^ Action to fetch hints from previous lines
-> (YiString -> BufferM [ Int ])
-- ^ Action to calculate hints from previous line
-> IndentBehaviour
-- ^ Sets the indent behaviour,
-- see 'Yi.Buffer.IndentBehaviour' for a description
-> BufferM ()
autoIndentHelperB getUpwards getPrevious indentBehave =
do upwardHints <- savingExcursionB getUpwards
previousLine <- getNextLineB Backward
previousHints <- getPrevious previousLine
let allHints = upwardHints ++ previousHints
cycleIndentsB indentBehave allHints
-- | Cycles through the indentation hints. It does this without
-- requiring to set/get any state. We just look at the current
-- indentation of the current line and moving to the largest
-- indent that is
cycleIndentsB :: IndentBehaviour -> [Int] -> BufferM ()
cycleIndentsB _ [] = return ()
cycleIndentsB indentBehave indents =
do currentLine <- readLnB
currentIndent <- indentOfB currentLine
indentToB $ chooseIndent currentIndent (sort $ nub indents)
where
-- Is the function to choose the indent from the given current
-- indent to the given list of indentation hints.
chooseIndent :: Int -> [ Int ] -> Int
chooseIndent =
case indentBehave of
IncreaseCycle -> chooseIncreaseCycle
DecreaseCycle -> chooseDecreaseCycle
IncreaseOnly -> chooseIncreaseOnly
DecreaseOnly -> chooseDecreaseOnly
-- Choose the indentation hint which is one more than the current
-- indentation hint unless the current is the largest or larger than
-- all the indentation hints in which case choose the smallest
-- (which will often be zero)
chooseIncreaseCycle :: Int -> [ Int ] -> Int
chooseIncreaseCycle currentIndent hints =
-- Similarly to 'chooseDecreasing' if 'above' is null then
-- we will go to the first of below which will be the smallest
-- indentation hint, if above is not null then we are moving to
-- the indentation hint which is one above the current.
head (above ++ below)
where
(below, above) = span (<= currentIndent) hints
-- Note that these functions which follow generally assume that
-- the list of hints which have been given is already sorted
-- and that the list is non-empty
-- Choose the indentation hint one less than the current indentation
-- unless the current indentation is the smallest (usually zero)
-- in which case choose the largest indentation hint.
chooseDecreaseCycle :: Int -> [ Int ] -> Int
chooseDecreaseCycle currentIndent hints =
-- So in particular if 'below' is null then we will
-- go to the largest indentation, if below is not null
-- we go to the largest indentation which is *not* higher
-- than the current one.
last (above ++ below)
where
(below, above) = span (< currentIndent) hints
chooseIncreaseOnly :: Int -> [ Int ] -> Int
chooseIncreaseOnly currentIndent hints =
head $ filter (> currentIndent) hints ++ [ currentIndent ]
chooseDecreaseOnly :: Int -> [ Int ] -> Int
chooseDecreaseOnly currentIndent hints =
last $ currentIndent : filter (< currentIndent) hints
{-|
A function generally useful as the first argument to
'autoIndentHelperB'. This searches the lines above
the current line for the indentations of each line
until we get to a line which has no indentation
*and* is not empty. Indicating that we have reached
the outer scope.
-}
fetchPreviousIndentsB :: BufferM [Int]
fetchPreviousIndentsB = do
-- Move up one line,
moveOffset <- lineMoveRel (-1)
line <- readLnB
indent <- indentOfB line
-- So if we didn't manage to move upwards
-- or the current offset was zero *and* the line
-- was non-blank then we return just the current
-- indent (it might be the first line but indented some.)
if moveOffset == 0 || (indent == 0 && R.any (not . isSpace) line)
then return [ indent ]
else (indent :) <$> fetchPreviousIndentsB
-- | Returns the position of the last opening bracket on the
-- line which is not closed on the same line.
-- Note that if we have unmatched parentheses such as "( ]"
-- then we may not get the correct answer, but in that case
-- then arguably we don't really care if we get the correct
-- answer (at least if we get it wrong the user may notice
-- their error).
-- We return a list here as it's a convenient way of returning
-- no hint in the case of there being no non-closed bracket
-- and normally such a hint will be part of a list of hints
-- anyway.
-- NOTE: this could be easily modified to return the indentations
-- of *all* the non-closed opening brackets. But I think this is
-- not what you generally want.
-- TODO: we also do not care whether or not the bracket is within
-- a string or escaped. If someone feels up to caring about that
-- by all means please fix this.
lastOpenBracketHint :: YiString -> BufferM [ Int ]
lastOpenBracketHint input =
case getOpen 0 $ R.reverse input of
Nothing -> return []
Just s -> return <$> spacingOfB s
where
-- We get the last open bracket by counting through
-- the reversed line, when we see a closed bracket we
-- add one to the count. When we see an opening bracket
-- decrease the count. If we see an opening bracket when the
-- count is 0 we return the remaining (reversed) string
-- as the part of the line which preceds the last opening bracket.
-- This can then be turned into an indentation by calling 'spacingOfB'
-- on it so that tabs are counted as tab length.
-- NOTE: that this will work even if tab occur in the middle of the line
getOpen :: Int -> YiString -> Maybe YiString
getOpen i s = let rest = R.drop 1 s in case R.head s of
Nothing -> Nothing
Just c
-- If it is opening and we have no closing to match
-- then we return the rest of the line
| isOpening c && i == 0 -> Just rest
-- If i is not zero then we have matched one of the
-- closing parentheses and we can decrease the nesting count.
| isOpening c -> getOpen (i - 1) rest
-- If the character is a closing bracket then we must increase
-- the nesting count
| isClosing c -> getOpen (i + 1) rest
-- If it is just a normal character forget about it and move on.
| otherwise -> getOpen i rest
isOpening :: Char -> Bool
isOpening '(' = True
isOpening '[' = True
isOpening '{' = True
isOpening _ = False
isClosing :: Char -> Bool
isClosing ')' = True
isClosing ']' = True
isClosing '}' = True
isClosing _ = False
-- | Returns the indentation of a given string. Note that this depends
-- on the current indentation settings.
indentOfB :: YiString -> BufferM Int
indentOfB = spacingOfB . R.takeWhile isSpace
makeIndentString :: Int -> BufferM YiString
makeIndentString level = do
IndentSettings et _ sw <- indentSettingsB
let (q, r) = level `quotRem` sw
if et
then return (R.replicate level " ")
else return (R.replicate q "\t" <> R.replicate r " ")
-- | Returns the length of a given string taking into account the
-- white space and the indentation settings.
spacingOfB :: YiString -> BufferM Int
spacingOfB text = do
indentSettings <- indentSettingsB
return $ countIndent indentSettings text
{-| Indents the current line to the given indentation level.
In addition moves the point according to where it was on the
line originally. If we were somewhere within the indentation
(ie at the start of the line or on an empty line) then we want
to just go to the end of the (new) indentation.
However if we are currently pointing somewhere within the text
of the line then we wish to remain pointing to the same character.
-}
indentToB :: Int -> BufferM ()
indentToB = modifyIndentB . const
-- | Modifies current line indent measured in visible spaces.
-- Respects indent settings. Calling this with value (+ 4)
-- will turn "\t" into "\t\t" if shiftwidth is 4 and into
-- "\t " if shiftwidth is 8
-- If current line is empty nothing happens.
modifyIndentB :: (Int -> Int) -> BufferM ()
modifyIndentB f = do
leadingSpaces <- regionWithTwoMovesB moveToSol firstNonSpaceB
newLeadinSpaces <-
readRegionB leadingSpaces >>= indentOfB >>= makeIndentString . f
modifyRegionB (const newLeadinSpaces) leadingSpaces
-- | Indent as much as the previous line
indentAsPreviousB :: BufferM ()
indentAsPreviousB = indentAsNeighborLineB Backward
-- | Indent as much as the next line
indentAsNextB :: BufferM ()
indentAsNextB = indentAsNeighborLineB Forward
indentAsTheMostIndentedNeighborLineB :: BufferM ()
indentAsTheMostIndentedNeighborLineB = do
prevLine <- getNextNonBlankLineB Backward
nextLine <- getNextNonBlankLineB Forward
prevIndent <- indentOfB prevLine
nextIndent <- indentOfB nextLine
indentToB (max prevIndent nextIndent)
indentAsNeighborLineB :: Direction -> BufferM ()
indentAsNeighborLineB dir = do
otherLine <- getNextNonBlankLineB dir
otherIndent <- indentOfB otherLine
indentToB otherIndent
-- | Insert a newline at point and indent the new line as the previous one.
newlineAndIndentB :: BufferM ()
newlineAndIndentB = newlineB >> indentAsPreviousB
-- | Set the padding of the string to newCount, filling in tabs if
-- expandTabs is set in the buffers IndentSettings
rePadString :: IndentSettings -> Int -> R.YiString -> R.YiString
rePadString indentSettings newCount input
| newCount <= 0 = rest
| expandTabs indentSettings = R.replicateChar newCount ' ' <> rest
| otherwise = tabs <> spaces <> rest
where (_indents,rest) = R.span isSpace input
tabs = R.replicateChar (newCount `div` tabSize indentSettings) '\t'
spaces = R.replicateChar (newCount `mod` tabSize indentSettings) ' '
-- | Counts the size of the indent in the given text.
--
-- Assumes nothing but tabs and spaces: uses 'isSpace'.
countIndent :: IndentSettings -> R.YiString -> Int
countIndent i t = R.foldl' (\i' c -> i' + spacing c) 0 indents
where
(indents, _) = R.span isSpace t
spacing '\t' = tabSize i
spacing _ = 1
-- | shifts right (or left if num is negative) num times, filling in tabs if
-- expandTabs is set in the buffers IndentSettings
indentString :: IndentSettings -> Int -> R.YiString -> R.YiString
indentString is numOfShifts i = rePadString is newCount i
where
newCount = countIndent is i + (shiftWidth is * numOfShifts)
-- | Increases the indentation on the region by the given amount of shiftWidth
shiftIndentOfRegionB :: Int -> Region -> BufferM ()
shiftIndentOfRegionB shiftCount region = do
is <- indentSettingsB
let indentFn :: R.YiString -> R.YiString
indentFn line = if not (R.null line) && line /= "\n"
then indentString is shiftCount line
else line
modifyRegionB (mapLines indentFn) region
moveTo $ regionStart region
firstNonSpaceB
-- | Return the number of spaces at the beginning of the line, up to
-- the point.
indentOfCurrentPosB :: BufferM Int
indentOfCurrentPosB = do
p <- pointB
moveToSol
sol <- pointB
moveTo p
let region = mkRegion p sol
readRegionB region >>= spacingOfB
|
TOSPIO/yi
|
src/library/Yi/Buffer/Indent.hs
|
gpl-2.0
| 14,910
| 0
| 17
| 3,416
| 2,269
| 1,200
| 1,069
| 186
| 9
|
module List00001 where
list = [ "a",
"b"
]
val = 0
|
charleso/intellij-haskforce
|
tests/gold/parser/List00001.hs
|
apache-2.0
| 69
| 0
| 5
| 30
| 20
| 13
| 7
| 4
| 1
|
module Model where
import Prelude
import Yesod
import Data.Text (Text)
import Database.Persist.Quasi
import Data.Time
-- You can define all of your database entities in the entities file.
-- You can find more information on persistent and how to declare entities
-- at:
-- http://www.yesodweb.com/book/persistent/
share [mkPersist sqlSettings, mkMigrate "migrateAll"]
$(persistFileWith lowerCaseSettings "config/models")
|
MasseR/introitu
|
Model.hs
|
bsd-2-clause
| 428
| 0
| 8
| 59
| 64
| 37
| 27
| -1
| -1
|
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="ko-KR">
<title>Directory List v2.3</title>
<maps>
<homeID>directorylistv2_3</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
kingthorin/zap-extensions
|
addOns/directorylistv2_3/src/main/javahelp/help_ko_KR/helpset_ko_KR.hs
|
apache-2.0
| 978
| 78
| 66
| 157
| 412
| 209
| 203
| -1
| -1
|
g i = (a,b,c)
where a = False
b = True
c = ()
|
olsner/ghc
|
testsuite/tests/ghci.debugger/scripts/break013.hs
|
bsd-3-clause
| 76
| 0
| 7
| 44
| 37
| 21
| 16
| 4
| 1
|
-- Checks that using the "by" clause in a transform requires a function parameter
{-# OPTIONS_GHC -XMonadComprehensions -XTransformListComp #-}
module ShouldFail where
import Data.List(take)
z = [x | x <- [1..10], then take 5 by x ]
|
forked-upstream-packages-for-ghcjs/ghc
|
testsuite/tests/typecheck/should_fail/mc23.hs
|
bsd-3-clause
| 238
| 0
| 8
| 42
| 50
| 29
| 21
| 4
| 1
|
module Graphics.Urho3D.UI.Internal.DropDownList(
DropDownList
, dropDownListCntx
, sharedDropDownListPtrCntx
, SharedDropDownList
) where
import qualified Language.C.Inline as C
import qualified Language.C.Inline.Context as C
import qualified Language.C.Types as C
import Graphics.Urho3D.Container.Ptr
import qualified Data.Map as Map
data DropDownList
dropDownListCntx :: C.Context
dropDownListCntx = mempty {
C.ctxTypesTable = Map.fromList [
(C.TypeName "DropDownList", [t| DropDownList |])
]
}
sharedPtrImpl "DropDownList"
|
Teaspot-Studio/Urho3D-Haskell
|
src/Graphics/Urho3D/UI/Internal/DropDownList.hs
|
mit
| 559
| 0
| 11
| 88
| 120
| 80
| 40
| -1
| -1
|
module ProjectEuler.Problem50
( problem
) where
import Math.NumberTheory.Primes.Testing
import Control.Monad
import Petbox
import ProjectEuler.Types
problem :: Problem
problem = pureProblem 50 Solved result
-- calculate sum of primes
-- keep the first 0 so that the sum of all primes
-- from p[1] to p[n], is just sum_p[n] - sum_p[0] = sum_p[n]
primeSum :: [Integer]
primeSum = 0 : zipWith (+) primeSum primes
-- only consider possible sums in the range
primeSumRanged :: [Integer]
primeSumRanged = takeWhile (<=1000000) primeSum
consecPrimeSum :: [Integer]
consecPrimeSum = do
let lenMax = length primeSumRanged
len <- [lenMax, lenMax-1..1]
i <- [lenMax-1, lenMax-2..0]
let j = i - len + 1
guard $ 0 <= j && j < lenMax
-- now we have i and j
-- let's enumerate all possible consecutive sums
-- random-access list support will be great
-- but the search space isn't too scary.
pure $ primeSumRanged !! i - primeSumRanged !! j
result :: Integer
result = head $ filter isPrime consecPrimeSum
|
Javran/Project-Euler
|
src/ProjectEuler/Problem50.hs
|
mit
| 1,052
| 0
| 11
| 225
| 245
| 136
| 109
| 22
| 1
|
{-# LANGUAGE ScopedTypeVariables #-}
-- | Data transport handling for a nodes capable of acting as clients.
module Haste.App.Transport
( MonadClient (..)
, getNonce
) where
import Control.Monad.IO.Class
import Data.IORef
import Data.Proxy
import Data.Typeable
import Haste.JSON (JSON)
import Haste.Concurrent (MonadConc)
import qualified Haste.JSString as S
import System.IO.Unsafe
import Haste.App.Protocol
{-# NOINLINE nonceRef #-}
nonceRef :: IORef Nonce
nonceRef = unsafePerformIO $ newIORef 0
-- | Get a nonce that's guaranteed to be unique per physical machine, modulo
-- overflow. By extension, this means that the nonce is guaranteed to be
-- unique per node as well.
getNonce :: MonadIO m => m Nonce
getNonce = liftIO $ atomicModifyIORef' nonceRef $ \n -> (n+1, n)
class (Typeable m, MonadConc m) => MonadClient m where
-- | Invoke a remote function: send the RPC call over the network and wait for
-- the response to get back.
-- The message received from the server will be a 'ServerReply'. Instances
-- of this class must return the JSON within that reply.
remoteCall :: Endpoint -> S.JSString -> Nonce -> m JSON
|
valderman/haste-app
|
src/Haste/App/Transport.hs
|
mit
| 1,154
| 0
| 10
| 207
| 203
| 120
| 83
| 20
| 1
|
module HackySDL.Events where
import Graphics.UI.SDL.Video
import Graphics.UI.SDL.Timer
import Graphics.UI.SDL.Basic
import Graphics.UI.SDL.Event
import Graphics.UI.SDL.Types
import Foreign.Marshal.Alloc
import Foreign.Storable
import Control.Applicative
import Control.Monad.IO.Class
import HackySDL.Monad
getEvents :: SDL [Event]
getEvents = liftIO $ alloca getEvents'
where getEvents' ev = do
res <- pollEvent ev
case res of
0 -> return []
_ -> (:) <$> peek ev <*> getEvents' ev
|
LudvikGalois/USBGame
|
src/HackySDL/Events.hs
|
mit
| 536
| 0
| 14
| 113
| 152
| 87
| 65
| 18
| 2
|
module Tfoo.Matrix where
import Data.List
replace :: Int -> a -> [a] -> [a]
replace index element list = (take index list) ++ [element] ++ (drop (index+1) list)
type Matrix a = [[a]]
replace' :: Int -> Int -> a -> Matrix a -> Matrix a
replace' x y element matrix = replace x (replace y element (matrix !! x)) matrix
diagonal :: Matrix a -> [a]
diagonal m = zipWith (!!) m [0..]
diagonals :: Matrix a -> [[a]]
diagonals matrix =
let tails' = tail . tails
diagonalsNW m = map diagonal ([m] ++ tails' m ++ tails' (transpose m))
in diagonalsNW matrix ++ diagonalsNW (map reverse matrix)
differences :: (Eq a) => Matrix a -> Matrix a -> [(Int, Int)]
differences a b =
let enumeratedRows = zip3 a b [0..]
enumerateEntry (a,b,x) = zip4 a b (repeat x) [0..]
enumeratedEntries = concat $ map enumerateEntry enumeratedRows
compareEntries a b = fst a == fst b && snd a == snd b
different (a,b,_,_) = a /= b
differentEntries = filter different enumeratedEntries
entryCoordinates (_,_,x,y) = (x,y)
in map entryCoordinates differentEntries
|
nbartlomiej/tfoo
|
Tfoo/Matrix.hs
|
mit
| 1,084
| 0
| 14
| 242
| 507
| 265
| 242
| 24
| 1
|
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeFamilies #-}
module SoOSiM.SimMonad
( -- * Basic API
createComponent
, invoke
, invokeAsync
, notify
, respond
, yield
, readMemory
, writeMemory
, componentLookup
, traceMsg
, traceMsgTag
, createNode
, compute
, stop
, expect
-- * Advanced API
, createNodeN
, runSTM
, getComponentId
, getNodeId
, componentCreator
, getTime
, stopSim
-- * Specialized API
, createComponentN
, createComponentNP
, createComponentNPS
, invokeS
, invokeAsyncS
, notifyS
, respondS
, readMemoryN
, writeMemoryN
, componentLookupN
)
where
import Control.Concurrent.STM (STM,newTVar)
import Control.Monad.Coroutine (suspend)
import Control.Monad.State (gets,lift,modify)
import Data.Dynamic (Dynamic,Typeable,toDyn)
import qualified Data.IntMap as IM
import qualified Data.Map as Map
import Data.Maybe (fromMaybe)
import SoOSiM.Simulator.Util
import SoOSiM.Types
import SoOSiM.Util
{-# INLINE createComponent #-}
-- | Create a new component
createComponent ::
(ComponentInterface iface, Typeable (Receive iface))
=> iface
-- ^ Component Interface
-> Sim ComponentId
-- ^ 'ComponentId' of the created component
createComponent = createComponentNPS Nothing Nothing Nothing
{-# INLINE createComponentN #-}
-- | Create a new component
createComponentN ::
(ComponentInterface iface, Typeable (Receive iface))
=> iface
-- ^ Component Interface
-> NodeId
-- Node to create component on
-> Sim ComponentId
createComponentN iface nId =
createComponentNPS (Just nId) Nothing Nothing iface
{-# INLINE createComponentNP #-}
-- | Create a new component
createComponentNP ::
(ComponentInterface iface, Typeable (Receive iface))
=> NodeId
-- ^ Node to create component on, leave to 'Nothing' to create on current
-- node
-> ComponentId
-- ^ ComponentId to set as parent, set to 'Nothing' to use own ComponentId
-> iface
-- ^ Component Interface
-> Sim ComponentId
-- ^ 'ComponentId' of the created component
createComponentNP nodeId parentId iface =
createComponentNPS (Just nodeId) (Just parentId) Nothing iface
-- | Create a new component
createComponentNPS ::
(ComponentInterface iface, Typeable (Receive iface))
=> Maybe NodeId
-- ^ Node to create component on, leave to 'Nothing' to create on current
-- node
-> Maybe ComponentId
-- ^ ComponentId to set as parent, set to 'Nothing' to use own ComponentId
-> Maybe (State iface)
-- ^ Internal State, leave 'Nothing' to set to default
-> iface
-- ^ Component Interface
-> Sim ComponentId
-- ^ 'ComponentId' of the created component
createComponentNPS nodeIdM parentIdM iStateM iface = Sim $ do
nodeId <- fmap (`fromMaybe` nodeIdM) $ gets currentNode
parentId <- fmap (`fromMaybe` parentIdM) $ gets currentComponent
compId <- getUniqueM
statusTV <- (lift . lift) $ newTVar ReadyToRun
let iState = fromMaybe (initState iface) iStateM
stateTV <- (lift . lift) $ newTVar iState
reqBufTV <- (lift . lift) $ newTVar []
respBufTV <- (lift . lift) $ newTVar []
let meta = SimMetaData 0 0 0 Map.empty Map.empty
metaTV <- (lift . lift) $ newTVar meta
let component = (CC iface compId parentId statusTV stateTV reqBufTV respBufTV [] metaTV)
lift $ modifyNode nodeId (addComponent compId component)
return compId
where
cname = componentName iface
addComponent cId comp n@(Node {..}) =
n { nodeComponents = IM.insert cId comp nodeComponents
, nodeComponentLookup = Map.insert cname cId nodeComponentLookup
}
{-# INLINE invoke #-}
-- | Synchronously invoke another component
invoke ::
(ComponentInterface iface, Typeable (Receive iface), Typeable (Send iface), Show (Receive iface))
=> iface
-- ^ Interface type
-> ComponentId
-- ^ ComponentId of callee
-> Receive iface
-- ^ Argument
-> Sim (Send iface)
-- ^ Response from callee
invoke iface recipient content = invokeS iface Nothing recipient content
-- | Synchronously invoke another component
invokeS ::
forall iface
. (ComponentInterface iface
, Typeable (Receive iface)
, Typeable (Send iface)
, Show (Receive iface))
=> iface
-- ^ Interface type
-> Maybe ComponentId
-- ^ Caller, leave 'Nothing' to set to current module
-> ComponentId
-- ^ Callee
-> Receive iface
-- ^ Argument
-> Sim (Send iface)
-- ^ Response from recipient
invokeS _ senderM recipient content = Sim $ do
t <- gets simClk
sender <- fmap (`fromMaybe` senderM) $ gets currentComponent
let message = Message (t,show content) (toDyn content) (RA (sender,sender))
lift $ sendMessage sender recipient requestBuffer message
(varD,varS) <- suspend (Request recipient return)
return (unmarshall ("invoke: " ++ show (sender,recipient,content) ++ varS) varD)
expect ::
forall iface
. ( ComponentInterface iface
, Typeable (Send iface))
=> iface
-> ComponentId
-> Sim (Send iface)
expect _ expectedSender = Sim $ do
(varD,varS) <- suspend (Request expectedSender return)
return (unmarshall ("expect: " ++ show expectedSender ++ " " ++ varS) varD)
{-# INLINE invokeAsync #-}
-- | Invoke another component, handle response asynchronously
invokeAsync ::
(ComponentInterface iface, Typeable (Receive iface), Typeable (Send iface), Show (Receive iface))
=> iface
-- ^ Interface type
-> ComponentId
-- ^ ComponentId of callee
-> Receive iface
-- ^ Argument
-> (Send iface -> Sim ())
-- ^ Response Handler
-> Sim ()
-- ^ Call returns immediately
invokeAsync iface recipient content handler =
invokeAsyncS iface Nothing recipient content handler
-- | Invoke another component, handle response asynchronously
invokeAsyncS ::
forall iface
. (ComponentInterface iface
, Typeable (Receive iface)
, Typeable (Send iface)
, Show (Receive iface))
=> iface
-- ^ Interface type
-> Maybe ComponentId
-- ^ Parent of handler, leave 'Nothing' to set to the current module
-> ComponentId
-- ^ Callee
-> (Receive iface)
-- ^ Argument
-> (Send iface -> Sim ())
-- ^ Handler
-> Sim ()
-- ^ Call returns immediately
invokeAsyncS _ parentIdM recipient content handler = Sim $ do
nodeId <- gets currentNode
parentId <- fmap (`fromMaybe` parentIdM) $ gets currentComponent
sender <- runSim $ createComponentNPS (Just nodeId) parentIdM
(Just (recipient,\(d,_) -> handler $ unmarshallAsync d))
(HS parentId)
t <- gets simClk
let message = Message (t,show content) (toDyn content) (RA (sender,parentId))
lift $ sendMessage parentId recipient requestBuffer message
where
unmarshallAsync :: Dynamic -> Send iface
unmarshallAsync = unmarshall "invokeAsyncS"
{-# INLINE notify #-}
-- | Notify another component
notify ::
(ComponentInterface iface, Typeable (Receive iface), Show (Receive iface))
=> iface
-- ^ Interface type
-> ComponentId
-- ^ ComponentId of callee
-> Receive iface
-- ^ Argument
-> Sim ()
notify iface recipient content = notifyS iface Nothing recipient content
-- | Notify another component
notifyS ::
forall iface
. (ComponentInterface iface
, Typeable (Receive iface)
, Show (Receive iface))
=> iface
-- ^ Interface type
-> Maybe ComponentId
-- ^ Caller, leave 'Nothing' to set to current module
-> ComponentId
-- ^ Callee
-> Receive iface
-- ^ Argument
-> Sim ()
notifyS _ senderM recipient content = Sim $ do
sender <- fmap (`fromMaybe` senderM) $ gets currentComponent
t <- gets simClk
let message = Message (t, show content) (toDyn content) (RA (sender,sender))
lift $ sendMessage sender recipient requestBuffer message
{-# INLINE respond #-}
-- | Respond to an invocation
respond ::
(ComponentInterface iface, Typeable (Send iface), Show (Send iface))
=> iface
-- ^ Interface type
-> ReturnAddress
-- ^ Return address to send response to
-> (Send iface)
-- ^ Value to send as response
-> Sim ()
-- ^ Call returns immediately
respond iface retAddr content = respondS iface Nothing retAddr content
-- | Respond to an invocation
respondS ::
forall iface
. ( ComponentInterface iface
, Typeable (Send iface)
, Show (Send iface))
=> iface
-- ^ Interface type
-> Maybe ComponentId
-- ^ Callee Id, leave 'Nothing' to set to current module
-> ReturnAddress
-- ^ Return address
-> (Send iface)
-- ^ Value to send as response
-> Sim ()
-- ^ Call returns immediately
respondS _ senderM recipient content = Sim $ do
sender <- fmap (`fromMaybe` senderM) $ gets currentComponent
t <- gets simClk
let message = Message (t, show content) (toDyn content) (RA (sender,sender))
lift $ sendMessage sender (fst $ unRA recipient) responseBuffer message
-- | Have a pure computation run for 'n' simulator ticks
compute ::
Int -- ^ The number of ticks the computation should take
-> a -- ^ The pure computation
-> Sim a -- ^ Result of the pure computation
compute i a
| i < 1 = return a
| otherwise = Sim $ suspend (Run i (return a))
-- | Yield internal state to the simulator scheduler
yield ::
a
-> Sim a
yield s = Sim $ suspend (Yield (return s))
-- | Stop the component
-- NB I cannot be resumed!
stop ::
Sim a
stop = Sim $ suspend Kill
-- | Get the component id of your component
getComponentId ::
Sim ComponentId
getComponentId = Sim $ gets currentComponent
-- | Get the node id of of the node your component is currently running on
getNodeId ::
Sim NodeId
getNodeId = Sim $ gets currentNode
-- | Create a new node
createNode ::
Sim NodeId -- ^ NodeId of the created node
createNode = Sim $ do
nodeId <- getUniqueM
runSim $ createNodeN nodeId
return nodeId
-- | Create a new node with a specific NodeId
createNodeN ::
NodeId
-> Sim ()
createNodeN nodeId = Sim $
modify (\s -> s {nodes = IM.insert nodeId newNode (nodes s)})
where
newNode = Node nodeId NodeInfo Map.empty IM.empty IM.empty []
-- | Write memory of local node
writeMemory ::
Typeable a
=> Int
-- ^ Address to write
-> a
-- ^ Value to write
-> Sim ()
writeMemory = writeMemoryN Nothing
-- | Write memory of local node
writeMemoryN ::
Typeable a
=> Maybe NodeId
-- ^ Node you want to write on, leave 'Nothing' to set to current node
-> Int
-- ^ Address to write
-> a
-- ^ Value to write
-> Sim ()
writeMemoryN nodeM addr val = Sim $ do
node <- fmap (`fromMaybe` nodeM) $ gets currentNode
lift $ modifyNode node writeVal
where
writeVal n@(Node {..}) = n { nodeMemory = IM.insert addr (toDyn val)
nodeMemory }
-- | Read memory of local node
readMemory ::
Int
-- ^ Address to read
-> Sim Dynamic
readMemory = readMemoryN Nothing
-- | Read memory of local node
readMemoryN ::
Maybe NodeId
-- ^ Node you want to look on, leave 'Nothing' to set to current node
-> Int
-- ^ Address to read
-> Sim Dynamic
readMemoryN nodeM addr = Sim $ do
node <- fmap (`fromMaybe` nodeM) $ gets currentNode
nodeMem <- fmap (nodeMemory . (IM.! node)) $ gets nodes
case (IM.lookup addr nodeMem) of
Just val -> return val
Nothing -> error $ "Trying to read empty memory location: " ++
show addr ++ " from Node: " ++ show node
-- | Return the 'ComponentId' of the component that created the current
-- component
componentCreator ::
Sim ComponentId
componentCreator = Sim $ do
nId <- gets currentNode
cId <- gets currentComponent
ns <- gets nodes
let ces = (nodeComponents (ns IM.! nId))
let ce = ces IM.! cId
let ceCreator = creator ce
return ceCreator
{-# INLINE componentLookup #-}
-- | Get the unique 'ComponentId' of a component implementing an interface
componentLookup ::
ComponentInterface iface
=> iface
-- ^ Interface type of the component you are looking for
-> Sim (Maybe ComponentId)
-- ^ 'Just' 'ComponentID' if a component is found, 'Nothing' otherwise
componentLookup = componentLookupN Nothing
-- | Get the unique 'ComponentId' of a component implementing an interface
componentLookupN ::
ComponentInterface iface
=> Maybe NodeId
-- ^ Node you want to look on, leave 'Nothing' to set to current node
-> iface
-- ^ Interface type of the component you are looking for
-> Sim (Maybe ComponentId)
-- ^ 'Just' 'ComponentID' if a component is found, 'Nothing' otherwise
componentLookupN nodeM iface = Sim $ do
node <- fmap (`fromMaybe` nodeM) $ gets currentNode
idCache <- fmap (nodeComponentLookup . (IM.! node)) $ lift $ gets nodes
return $ Map.lookup (componentName iface) idCache
traceMsg ::
String
-> Sim ()
traceMsg msg = Sim $ do
t <- gets simClk
node <- gets currentNode
comp <- gets currentComponent
lift $ modifyNode node (updateTraceBuffer comp t msg Nothing)
traceMsgTag ::
String
-> String
-> Sim ()
traceMsgTag msg tag = Sim $ do
t <- gets simClk
node <- gets currentNode
comp <- gets currentComponent
lift $ modifyNode node (updateTraceBuffer comp t msg (Just tag))
runSTM ::
STM a
-> Sim a
runSTM = Sim . lift . lift
getTime ::
Sim Int
getTime = Sim $ gets simClk
stopSim ::
Sim ()
stopSim = Sim $ modify (\s -> s {running = False})
newtype HandlerStub = HS ComponentId
instance ComponentInterface HandlerStub where
type State HandlerStub = (ComponentId, (Dynamic,String) -> Sim ())
type Receive HandlerStub = Dynamic
type Send HandlerStub = ()
initState _ = undefined
componentName (HS cId) = "Asynchronous callback for component " ++
show cId
componentBehaviour _ (waitingFor, handler) (Message (_,s) cnt sender)
| returnAddress sender == waitingFor
= Sim $ do
runSim $ handler (cnt,s)
suspend Kill
componentBehaviour _ (waitingFor, handler) _ = Sim $ do
var <- suspend (Request waitingFor return)
runSim $ handler var
suspend Kill
|
christiaanb/SoOSiM
|
src/SoOSiM/SimMonad.hs
|
mit
| 14,215
| 0
| 16
| 3,321
| 3,695
| 1,906
| 1,789
| 344
| 2
|
module Oden.Explode
(
ExplodeError(..),
explodeExpr,
explodeTopLevel,
explodePackage
) where
import qualified Oden.Core.Untyped as Untyped
import Oden.Identifier
import Oden.Metadata
import Oden.QualifiedName (QualifiedName(..))
import Oden.Syntax
import Oden.SourceInfo
import Oden.Type.Signature
import Control.Monad
import Control.Monad.Writer
import qualified Data.Map as Map
data ExplodeError = TypeSignatureWithoutDefinition SourceInfo Identifier (TypeSignature SourceInfo)
| TypeSignatureRedefinition SourceInfo Identifier (Maybe (TypeSignature SourceInfo))
| InvalidMemberAccessExpression SourceInfo Untyped.Expr Untyped.Expr
deriving (Show, Eq)
explodeNameBinding :: NameBinding -> Untyped.NameBinding
explodeNameBinding (NameBinding si name) = Untyped.NameBinding (Metadata si) name
explodeFieldInitializer :: FieldInitializer -> Writer [ExplodeError] Untyped.FieldInitializer
explodeFieldInitializer (FieldInitializer si label expr) =
Untyped.FieldInitializer (Metadata si) label <$> explodeExpr' expr
toEither :: (Eq v, Show v, Show e) => Writer [e] v -> Either [e] v
toEither w = case es of
[] -> Right a
_ -> Left es
where (a, es) = runWriter w
explodeExpr :: Expr -> Either [ExplodeError] Untyped.Expr
explodeExpr = toEither . explodeExpr'
explodePackage :: Package -> Either [ExplodeError] (Untyped.Package [Untyped.ImportReference])
explodePackage = toEither . explodePackage'
explodeTopLevel :: PackageName -> [TopLevel] -> Either [ExplodeError] ([Untyped.ImportReference], [Untyped.Definition])
explodeTopLevel = (.) toEither . explodeTopLevel'
explodeExpr' :: Expr -> Writer [ExplodeError] Untyped.Expr
explodeExpr' (Subscript si es [Singular e]) =
Untyped.Subscript (Metadata si) <$> explodeExpr' es <*> explodeExpr' e
explodeExpr' (Subscript si es [Range e1 e2]) =
Untyped.Subslice (Metadata si) <$> explodeExpr' es <*> (Untyped.Range <$> explodeExpr' e1 <*> explodeExpr' e2)
explodeExpr' (Subscript si es [RangeTo e]) =
Untyped.Subslice (Metadata si) <$> explodeExpr' es <*> (Untyped.RangeTo <$> explodeExpr' e)
explodeExpr' (Subscript si es [RangeFrom e]) =
Untyped.Subslice (Metadata si) <$> explodeExpr' es <*> (Untyped.RangeFrom <$> explodeExpr' e)
explodeExpr' (Subscript si es (i:ir)) =
explodeExpr' (Subscript si (Subscript si es [i]) ir)
explodeExpr' (UnaryOp si o e) =
Untyped.UnaryOp (Metadata si) o <$> explodeExpr' e
explodeExpr' (BinaryOp si o e1 e2) =
Untyped.BinaryOp (Metadata si) o <$> explodeExpr' e1 <*> explodeExpr' e2
explodeExpr' (Symbol si i) =
return $ Untyped.Symbol (Metadata si) i
explodeExpr' (Literal si (Bool b)) =
return $ Untyped.Literal (Metadata si) (Untyped.Bool b)
explodeExpr' (Literal si (Int i)) =
return $ Untyped.Literal (Metadata si) (Untyped.Int i)
explodeExpr' (Literal si (String s)) =
return $ Untyped.Literal (Metadata si) (Untyped.String s)
explodeExpr' (Literal si Unit) =
return $ Untyped.Literal (Metadata si) Untyped.Unit
explodeExpr' (Tuple si f s r) =
Untyped.Tuple (Metadata si) <$> explodeExpr' f <*> explodeExpr' s <*> mapM explodeExpr' r
explodeExpr' (If si c t f) =
Untyped.If (Metadata si) <$> explodeExpr' c <*> explodeExpr' t <*> explodeExpr' f
explodeExpr' (Application si f ps) =
Untyped.Application (Metadata si) <$> explodeExpr' f <*> mapM explodeExpr' ps
explodeExpr' (Fn si [] b) =
Untyped.NoArgFn (Metadata si) <$> explodeExpr' b
explodeExpr' (Fn si [arg] b) =
Untyped.Fn (Metadata si) (explodeNameBinding arg) <$> explodeExpr' b
explodeExpr' (Fn si (arg:args) b) =
Untyped.Fn (Metadata si) (explodeNameBinding arg) <$> explodeExpr' (Fn si args b)
explodeExpr' (RecordInitializer si fields) =
Untyped.RecordInitializer (Metadata si) <$> mapM explodeFieldInitializer fields
explodeExpr' (MemberAccess si expr (Symbol _ name)) =
Untyped.MemberAccess (Metadata si) <$> explodeExpr' expr <*> return name
explodeExpr' (MemberAccess si expr nonName) = do
expr' <- explodeExpr' expr
nonName' <- explodeExpr' nonName
tell [InvalidMemberAccessExpression si expr' nonName']
return expr'
-- invalid, but can be handled anyway
explodeExpr' (Subscript _ a []) = explodeExpr' a
explodeExpr' (Let _ [] b) = explodeExpr' b
explodeExpr' (Let _ [LetPair si n e] b) =
Untyped.Let (Metadata si) (explodeNameBinding n) <$> explodeExpr' e <*> explodeExpr' b
explodeExpr' (Let si (LetPair _ n e:bs) b) =
Untyped.Let (Metadata si) (explodeNameBinding n) <$> explodeExpr' e <*> explodeExpr' (Let si bs b)
explodeExpr' (Slice si es) =
Untyped.Slice (Metadata si) <$> mapM explodeExpr' es
explodeExpr' (Block si es) =
Untyped.Block (Metadata si) <$> mapM explodeExpr' es
-- temporary metadata for top level definitions, used for keeping track
-- of duplications and detecting missing terms for type signatures
data TempTopLevel = TempTop {
tempType :: (SourceInfo, Maybe (TypeSignature SourceInfo)),
-- whether this signature has a corresponding definition
hasValue :: Bool
}
explodeTopLevel' :: PackageName -> [TopLevel] -> Writer [ExplodeError] ([Untyped.ImportReference], [Untyped.Definition])
explodeTopLevel' pkg top = do
(is, scs, defs) <- foldM iter ([], Map.empty, []) top
case filter (not . hasValue . snd) (Map.assocs scs) of
[] -> return ()
us -> mapM_ (tell . (:[]) . typeSigNoValErr) us
return (is, defs)
where
iter (is, ts, defs) (FnDefinition si name args body) = do
def <- Untyped.Definition (Metadata si) name (Map.lookup name ts >>= snd . tempType) <$> explodeExpr' (Fn si args body)
return (is, assignValue name si ts, defs ++ [def])
iter (is, ts, defs) (ValueDefinition si name expr) = do
def <- Untyped.Definition (Metadata si) name (Map.lookup name ts >>= snd . tempType) <$> explodeExpr' expr
return (is, assignValue name si ts, defs ++ [def])
iter (is, ts, defs) (TypeSignatureDeclaration tsi name sc) = do
let (TypeSignature sc' _ _) = sc
case Map.lookup name ts of
Just existing -> tell [typeSigReDefErr (name, existing) sc'] -- type already defined
Nothing -> return ()
return (is, newTypeSig name tsi (Just sc) ts, defs)
iter (is, ts, defs) (ImportDeclaration si name) =
return (is ++ [Untyped.ImportReference (Metadata si) name], ts, defs)
iter (is, ts, defs) (TypeDefinition si name typeSig) =
-- TODO: Add support for type parameters
let def = Untyped.TypeDefinition (Metadata si) (FQN pkg name) [] typeSig
in return (is, ts, defs ++ [def])
newTypeSig name tsi msc
= Map.insertWith (\_ old -> old) name (TempTop (tsi, msc) False)
assignValue name si
-- if there's type signature, keep its location
= Map.insertWith (\_ (TempTop (_, sc') _) -> (TempTop (si, sc') True))
name (TempTop (si, Nothing) True)
-- type signature doesn't have an assigned term
typeSigNoValErr :: (Identifier, TempTopLevel) -> ExplodeError
typeSigNoValErr (n, TempTop (si, sc) _)
= case sc of
Just j_sc -> TypeSignatureWithoutDefinition si n j_sc
-- if this happens, it's a bug in the compiler, rather than source code
Nothing -> error "Panic: type signature definition present without actual signature"
-- type signature already defined
typeSigReDefErr :: (Identifier, TempTopLevel) -> SourceInfo -> ExplodeError
typeSigReDefErr (n, TempTop (_, sc) _) si'
= TypeSignatureRedefinition si' n sc
explodePackage' :: Package -> Writer [ExplodeError] (Untyped.Package [Untyped.ImportReference])
explodePackage' (Package (PackageDeclaration si name) definitions) = do
(is, ds) <- explodeTopLevel' name definitions
return (Untyped.Package (Untyped.PackageDeclaration (Metadata si) name) is ds)
|
AlbinTheander/oden
|
src/Oden/Explode.hs
|
mit
| 7,811
| 0
| 16
| 1,472
| 2,824
| 1,433
| 1,391
| 136
| 8
|
module Source where
data Name = Name String
deriving (Eq, Show, Ord)
data Kind = Star | KFun Kind Kind | KVar Name
deriving Eq
data PolyType = Forall [Name] Type
data Type = TUnit
| TVar (Maybe Kind) Name
| TCon (Maybe Kind) Name
| TApp Type Type
| TFun
| TTuple
| TList
| TInt
| TDouble
| TBool
| TIO
| TChar
deriving Eq
tfun :: Type -> Type -> Type
tfun l r = TApp (TApp TFun l) r
ttuple :: Type -> Type -> Type
ttuple l r = TApp (TApp TTuple l) r
tlist :: Type -> Type
tlist = TApp TList
data ConD = ConD Name [Type]
data Pat a = WildP
| VarP a Name
| ConP a Name [Pat a]
| IntP a Int
| CharP a Char
| StringP a String
| ListP a [Pat a]
| TupleP a (Pat a) (Pat a)
data Match a = Match Name [Pat a] (Exp a) [NestedDecl a]
data Branch a = Branch (Pat a) (Exp a)
data NestedDecl a = NSig FunSig
| NFun (Fun a)
| NTop (Top a)
data Constr = Constr Name [Name]
data Cxt = Cxt [Constr]
data FunSig = FunSig [Name] Cxt Type
data Fun a = Fun Name [Match a]
data Top a = Top (Pat a) (Exp a) [NestedDecl a]
data Decl a = DFun (Fun a)
| DTop (Top a)
| DClass Cxt Name Name [NestedDecl a]
| DInst Cxt Name Type [NestedDecl a]
| DData Name [(Name, Maybe Kind)] [ConD]
| DSig FunSig
| DType Name [(Name, Maybe Kind)] Type
| DAssoc Name -- Just to ensure that Name exists
data Exp a = Var a Name
| Con a Name
| LitChar a Char
| LitInt a Int
| LitString a String
| App a (Exp a) (Exp a)
| InfixApp a (Exp a) Name (Exp a) -- Operator in the middle
| LeftSection a (Exp a) Name -- Operator on the right
| RightSection a Name (Exp a) -- Operator on the left
| List a [Exp a]
| Let a [NestedDecl a] (Exp a)
| If a (Exp a) (Exp a) (Exp a)
| Tuple a (Exp a) (Exp a)
| Lambda a [Pat a] (Exp a)
| Case a (Exp a) [Branch a]
| Annot a (Exp a) Type
|
jozefg/hi
|
src/Source.hs
|
mit
| 2,241
| 0
| 9
| 907
| 876
| 482
| 394
| 68
| 1
|
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE MultiParamTypeClasses #-}
module SimpleBool.Context where
import Control.Monad.Error (throwError)
import Control.Monad.Reader
import qualified Data.Text.Lazy.Builder as LB
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Info
import qualified Data.Evaluator as Eval
import SimpleBool.Type
import SimpleBool.Error
data Binding
= NameBind -- ^ λx.x (identifier binding with name)
| VarBind Type -- ^ λx:T (identifier binding with type)
deriving (Eq, Show)
type Context = [(Name, Binding)]
data Env a = Env { symbols :: Map Name a, context :: Context }
deriving (Eq, Show)
initEnv :: Env a
initEnv = Env { symbols = Map.empty, context = [] }
type Eval b a = Eval.Eval (Env b) Error a
runEval :: Eval b a -> (Either Error a, Env b)
runEval ev = Eval.runEval ev initEnv
class HasType a where
typeof :: a -> Eval b Type
class HasInfo a where
inform :: a -> Eval b Info
class Display a where
buildText :: a -> Eval b LB.Builder
pushContext :: MonadReader (Env v) m => Name -> Binding -> m (Env v)
pushContext x b = do
env <- ask
let tabl = symbols env
let ctx' = (x, b) : (context env)
return $ Env { symbols = tabl, context = ctx' }
getBind :: Info -> Int -> Eval b (Name, Binding)
getBind info i = do
env <- ask
let ctx = context env
let l = length ctx
if l > i
then return $ ctx !! i
else throwError $ OutOfContext info i l
getBinding :: Info -> Int -> Eval b Binding
getBinding info i = getBind info i >>= return . snd
indexToName :: Info -> Int -> Eval b Name
indexToName info i = getBind info i >>= return . fst
nameToIndex :: Info -> Name -> Eval b Int
nameToIndex info x = do
env <- ask
let ctx = context env
search 0 ctx
where
search c = \case
[] -> throwError $ UndefinedSymbol info x
((y,NameBind):ctx)
| x == y -> return c
| otherwise -> search (c + 1) ctx
(_:ctx) -> search (c + 1) ctx
getTypeFromContext :: Info -> Int -> Eval b Type
getTypeFromContext info i = getBinding info i >>= \case
VarBind ty -> return ty
_ -> indexToName info i >>= throwError . WrongBinding info
|
VoQn/tapl-hs
|
src/SimpleBool/Context.hs
|
mit
| 2,177
| 0
| 13
| 492
| 851
| 442
| 409
| 64
| 3
|
module PE0027 where
import Data.List (foldl')
import Benchmark
import Primes (isPrime)
genericQuadratic :: Integer -> Integer -> Integer -> Integer
genericQuadratic a b n = n*n + a*n + b
fortyPrimeQuadratic :: Integer -> Integer
fortyPrimeQuadratic = genericQuadratic 1 41
eightyPrimeQuadratic :: Integer -> Integer
eightyPrimeQuadratic = genericQuadratic (-79) 1601
primeRunLength :: (Integer -> Integer) -> Integer
primeRunLength g =
prl 0 g
where
prl start generator
| isPrime (generator start) = prl (start + 1) generator
| otherwise = start
strictMaximum :: [Integer] -> Integer
strictMaximum = foldl' max 0
strictMaximum1stOf3 :: [(Integer, a, b)] -> (Integer, a, b)
strictMaximum1stOf3 =
foldl' max1Of3 (0,undefined,undefined)
where
max1Of3 a@(a1,_,_) b@(b1,_,_) = if b1 > a1
then b
else a
solve :: Integer -> Integer
solve n = strictMaximum [
primeRunLength (genericQuadratic a b)
| a <- [(-n)..n]
, b <- [(-n)..n]
]
solve2 :: Integer -> (Integer, Integer, Integer, Integer)
solve2 n = (\(l,a,b) -> (l,a,b,a*b)) $ strictMaximum1stOf3 [
(primeRunLength (genericQuadratic a b), a, b)
| a <- [(-n)..n]
, b <- [(-n)..n]
]
main :: IO ()
main = do
n <- arg 1 "1000"
let n' = read n :: Integer
let result = solve2 n'
print result
|
mvidner/projecteuler
|
src/PE0027.hs
|
mit
| 1,391
| 0
| 12
| 359
| 567
| 308
| 259
| 40
| 2
|
{-|
This module, similarly to the Graphics module, only it provides the ability
for one to play sounds.
-}
module FRP.Spice.Internal.Sound ( playSound
, loopSound
) where
-------------------
-- Local Imports --
import FRP.Spice.Internal.Types
----------
-- Code --
{-|
Playing a sound.
-}
playSound :: Sound -> Scene
playSound _ = return ()
{-|
Looping a sound.
-}
loopSound :: Sound -> Scene
loopSound _ = return ()
|
crockeo/spice
|
src/FRP/Spice/Internal/Sound.hs
|
mit
| 493
| 0
| 6
| 144
| 74
| 45
| 29
| 7
| 1
|
module BoundedMap ( BoundedMap
, mkBoundedMap
, insert
, update
, pop
, view
) where
import qualified BoundedStack as BS
import qualified Data.Map.Strict as M
-- Bounded map maintaining a FIFO to drop the oldest element when its specified
-- element limit is reached
data BoundedMap k v = BoundedMap (BS.BoundedStack k) (M.Map k v)
deriving (Show)
mkBoundedMap :: Int -> BoundedMap k v
mkBoundedMap limit | limit >= 1 = BoundedMap (BS.mkBoundedStack limit) M.empty
| otherwise = error "limit for BoundedMap needs to be >= 1"
-- Insert a new element into the map, return the new map and the truncated
-- element (if over the limit)
insert :: Ord k => k -> v -> BoundedMap k v -> (BoundedMap k v, Maybe (k, v))
insert k v (BoundedMap st m) =
let isNew = M.notMember k m
(newStack, newMap) = if isNew
then (BS.push k st, M.insert k v m)
else ((st, Nothing), m)
lookupE k' = case M.lookup k' m of
Just v' -> v'
Nothing -> error $ "BoundedMap.insertBoundedMap: "
++ "Key in FIFO but not in Map"
in case newStack of
(st', Nothing ) -> ( BoundedMap st' newMap
, Nothing
)
(st', Just kTrunc) -> ( BoundedMap st' $ M.delete kTrunc newMap
, Just (kTrunc, lookupE kTrunc)
)
-- Update an existing element (does nothing if the element is not found)
update :: Ord k => k -> v -> BoundedMap k v -> BoundedMap k v
update k v (BoundedMap st m) =
let m' = M.update (\_ -> Just v) k m
in BoundedMap st m'
-- LIFO pop
pop :: Ord k => BoundedMap k v -> (Maybe (k, v), BoundedMap k v)
pop (BoundedMap st m) =
let (k, st') = BS.pop st
lookupE k' = case M.lookup k' m of
Just v' -> v'
Nothing -> error $ "BoundedMap.popBoundedMap: "
++ "Key in FIFO but not in Map"
in case k of Just trunc -> (Just (trunc, lookupE trunc), BoundedMap st' (M.delete trunc m))
Nothing -> (Nothing, BoundedMap st m)
view :: BoundedMap k v -> M.Map k v
view (BoundedMap _ m) = m
|
blitzcode/jacky
|
src/old_code/BoundedMap.hs
|
mit
| 2,523
| 0
| 14
| 1,048
| 711
| 368
| 343
| 43
| 4
|
-- FunctionWithWhere.hs
module FunctionWithWhere where
printInc n = print plusTwo
where plusTwo = n + 2
|
younggi/books
|
haskellbook/practices/FunctionWithWhere.hs
|
mit
| 110
| 0
| 7
| 22
| 27
| 15
| 12
| 3
| 1
|
module Probability.Convergence.Terms where
import Notes
makeDefs [
"large sample theory"
, "limit theory"
, "asymptotic theory"
, "converge in probability"
, "converge in distribution"
, "converge in quadratic mean"
]
|
NorfairKing/the-notes
|
src/Probability/Convergence/Terms.hs
|
gpl-2.0
| 260
| 0
| 6
| 72
| 34
| 21
| 13
| -1
| -1
|
-- | ObjectStore represents the git repository's collection of objects,
-- mostly found under @.git\/objects@ as loose files or in pack files.
module ObjectStore (
getObject, getRawObject
, findTree
-- Exposed for testing.
, getLooseObject
) where
import qualified Data.ByteString.Char8 as BC
import qualified Data.ByteString.Lazy as BL
import qualified Data.ByteString as B
import Codec.Compression.Zlib (decompress)
import Control.Exception
import Control.Monad
import Control.Monad.Error
import Data.Bits
import Data.ByteString.Internal (c2w, w2c)
import Data.Char (ord)
import Data.Word
import System.FilePath
import Commit
import FileMode
import Pack
import Object
import Shared
import State
-- |@splitAround sep str@ finds @sep@ in @str@ and returns the before and after
-- parts.
splitAround :: Word8 -> BL.ByteString -> Maybe (BL.ByteString, BL.ByteString)
splitAround sep input = do
pos <- BL.elemIndex sep input
let (a, b) = BL.splitAt pos input
return (a, BL.tail b)
-- |Parse an int out of a ByteString.Lazy.
-- Best I can figure out is to repack as a Char8. :(
parseIntBL :: BL.ByteString -> Maybe (Int, BC.ByteString)
parseIntBL = BC.readInt . BC.pack . map w2c . BL.unpack
-- |Parse a loose git blob, returning @(type, content)@.
parseLoose :: BL.ByteString -> Either String RawObject
parseLoose loose = do
-- The header looks like "%s %ld\0".
let parse = do
(typestr, loose') <- splitAround (c2w ' ') loose
(sizestr, rest) <- splitAround (c2w '\0') loose'
(size, _) <- parseIntBL sizestr -- XXX Unused?
return (typestr, rest)
case parse of
Nothing -> throwError $ "error parsing loose object header"
Just (typestr, raw) -> do
typ <- objectTypeFromString $ map w2c $ BL.unpack typestr
return (typ, raw)
-- |Return the path to a loose object.
objectPath :: Hash -> FilePath
objectPath hash = ".git/objects" </> before </> after
where (before, after) = splitAt 2 (hashAsHex hash)
-- | Get a \"loose\" (found in @.git\/objects\/@...) object.
getLooseObject :: FilePath -> IO (Maybe RawObject)
getLooseObject path = do
compressed <-
catchJust ioErrors (BL.readFile path >>= return . Just)
(\err -> return Nothing)
case compressed of
Nothing -> return Nothing
Just compressed -> do
checkHeader compressed
-- The normal format for loose objects is a compressed blob with a textual
-- header.
let raw = decompress compressed
case parseLoose raw of
Left err -> fail err
Right ok -> return (Just ok)
where
checkHeader raw = do
-- There is an older format that put info in the first few bytes of the
-- file. Git uses the following check to verify it's not this older format.
-- 1) First byte must be 0x78.
-- 2) First 16-bit word (big-endian) divisible by 31.
-- Grab the bytes as Word16s so the left shift works.
let (byte1, byte2) = (fromIntegral $ BL.index raw 0,
fromIntegral $ BL.index raw 1) :: (Word16, Word16)
let word = (byte1 `shiftL` 8) + byte2
unless (byte1 == 0x78 && word `mod` 31 == 0) $
fail "object appears to be in old loose format"
bsToString = map w2c . BL.unpack
-- |Fetch an object, from both the objects/../ dirs and one pack file.
-- TODO: multiple pack files, alternates, etc.
getRawObject :: Hash -> GitM RawObject
getRawObject hash = do
obj <- getPackObject hash
case obj of
Just obj -> return obj
Nothing -> do
obj <- liftIO $ getLooseObject (objectPath hash)
case obj of
Just obj -> return obj
Nothing -> fail $ "can't find object: " ++ hashAsHex hash
-- |Fetch an object, from both the objects/../ dirs and one pack file.
-- TODO: multiple pack files, alternates, etc.
getObject :: Hash -> GitM Object
getObject hash = do
(objtype, raw) <- getRawObject hash
case objtype of
TypeBlob -> return $ Blob raw
TypeTree -> do
tree <- forceError (parseTree raw)
return (ObTree tree)
TypeCommit -> do
commit <- forceError (parseCommit (strictifyBS raw))
return (ObCommit commit)
-- | @findTree hash@ fetches objects, starting at @hash@, following commits
-- until it finds a Tree object.
findTree :: Hash -> GitM Tree
findTree hash = do
obj <- getObject hash
case obj of
Blob _ -> fail "found blob while looking for tree"
ObCommit commit -> findTree (Hash (fromHex (commit_tree commit)))
ObTree tree -> return tree
-- Parse a raw tree object's bytes into an Object.
parseTree :: BL.ByteString -> ErrorOr Tree
parseTree raw | BL.null raw = return $ Tree []
| otherwise = do
(entry, rest) <- parseTreeEntry raw
(Tree xs) <- parseTree rest
return $ Tree (entry:xs)
-- Parse a ByteString as an octal integer.
bsToOctal :: BL.ByteString -> ErrorOr Int
bsToOctal str = mapM digit (BL.unpack str) >>= return . foldl octal 0 where
octal cur digit = cur * 8 + digit
digit x =
case fromIntegral x - ord '0' of
value | value >= 0 && value <= 7 -> return value
_ -> throwError $ "bad octal digit: " ++ show x
parseTreeEntry :: BL.ByteString -> Either String (TreeEntry, BL.ByteString)
parseTreeEntry raw = do
let header = do
-- The header looks like "%s %ld\0".
(mode, raw') <- splitAround (c2w ' ') raw
(path, raw'') <- splitAround (c2w '\0') raw'
let (hash, rest) = BL.splitAt 20 raw''
return (mode, bsToString path, Hash (strictifyBS hash), rest)
case header of
Just (modestr, path, hash, rest) -> do
mode <- bsToOctal modestr
return ((modeFromInt mode, path, hash), rest)
Nothing -> throwError "error parsing tree entry"
|
martine/gat
|
ObjectStore.hs
|
gpl-2.0
| 5,698
| 0
| 17
| 1,329
| 1,584
| 799
| 785
| -1
| -1
|
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE DeriveDataTypeable #-}
module Database.Design.Ampersand.ADL1.Pair
( Paire(..),Pairs
, mkPair
)
where
import Data.Typeable
import Database.Design.Ampersand.Basics
type Pairs = [Paire]
--srcPaire :: Paire -> String
--trgPaire :: Paire -> String
--type Paire = (String,String)
--mkPair :: String -> String -> Paire
--mkPair a b = (a,b)
--srcPaire = fst
--trgPaire = snd
data Paire = Paire {srcPaire ::String --TODO introduction of AtomValue, and replace these messy Strings
,trgPaire ::String
} deriving (Show ,Eq,Ord, Typeable)
mkPair :: String -> String -> Paire
mkPair = Paire
instance Unique Paire where
showUnique p = "("++srcPaire p++","++trgPaire p++")"
|
DanielSchiavini/ampersand
|
src/Database/Design/Ampersand/ADL1/Pair.hs
|
gpl-3.0
| 846
| 0
| 10
| 204
| 152
| 93
| 59
| 16
| 1
|
fatorial 0 = 1
fatorial n = fatorial(n-1) * n
|
Gleuton/curso-haskell
|
fatorial.hs
|
gpl-3.0
| 46
| 0
| 8
| 10
| 31
| 15
| 16
| 2
| 1
|
{- ============================================================================
| Copyright 2011 Matthew D. Steele <mdsteele@alum.mit.edu> |
| |
| This file is part of Fallback. |
| |
| Fallback is free software: you can redistribute it and/or modify it under |
| the terms of the GNU General Public License as published by the Free |
| Software Foundation, either version 3 of the License, or (at your option) |
| any later version. |
| |
| Fallback is distributed in the hope that it will be useful, but WITHOUT |
| ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or |
| FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for |
| more details. |
| |
| You should have received a copy of the GNU General Public License along |
| with Fallback. If not, see <http://www.gnu.org/licenses/>. |
============================================================================ -}
module Fallback.View.MultiChoice (newMultiChoiceView) where
import Data.Traversable (for)
import Fallback.Constants (screenHeight, screenWidth)
import Fallback.Data.Color (Color(Color), blackColor)
import Fallback.Data.Point
import Fallback.Draw
import Fallback.Event (Key(KeyEscape), letterKeys)
import Fallback.State.Resources (FontTag(FontChancery14), Resources, rsrcFont)
import Fallback.View.Base (View, compoundView, nullView, subView_)
import Fallback.View.Dialog (newDialogView)
import Fallback.View.Widget
-------------------------------------------------------------------------------
newMultiChoiceView :: (MonadDraw m) => Resources -> View a b -> a -> String
-> [(String, c)] -> Maybe c -> m (View () c)
newMultiChoiceView resources bgView bgInput text choices cancelValue = do
let margin = 20
textW = 512
width = textW + 2 * margin
choiceIndent = 20
choiceW = textW - choiceIndent
choiceSpacing = 6
(textH, textView) <- newStaticTextWrapView resources textW text
topRef <- newDrawRef (margin + textH + 24)
choiceButtons <- for (zip3 choices ['A'..'Z'] letterKeys) $
\((label, value), letter, key) -> do
(choiceH, choicePaint) <-
newStaticTextWrapColorPaint resources choiceW label
let buttonColor ButtonUp = blackColor
buttonColor ButtonHover = Color 0 64 0
buttonColor ButtonDown = Color 128 0 0
buttonColor ButtonDisabled = blackColor
let buttonPaint _ state = do
let color = buttonColor state
drawText (rsrcFont resources FontChancery14) color
(LocTopleft $ Point 0 (0 :: Int)) (letter : "")
withSubCanvas (Rect choiceIndent 0 choiceW choiceH) $ do
choicePaint color
choiceButton <- newButton buttonPaint (const ReadyButton) [key] value
top <- readDrawRef topRef
writeDrawRef topRef (top + choiceH + choiceSpacing)
return $ subView_ (Rect margin top textW choiceH) choiceButton
cancelButton <- maybe (return nullView)
(newSimpleTextButton resources "Done" [KeyEscape]) cancelValue
buttonTop <- readDrawRef topRef
let (buttonWidth, buttonHeight) = (100, 20)
let height = buttonTop + buttonHeight + margin
let rect = Rect (half (screenWidth - width)) (half (screenHeight - height))
width height
let view = compoundView [
subView_ (Rect margin margin textW textH) textView,
compoundView choiceButtons,
subView_ (Rect (width - buttonWidth - margin) buttonTop
buttonWidth buttonHeight) cancelButton]
newDialogView bgView bgInput view rect
-------------------------------------------------------------------------------
|
mdsteele/fallback
|
src/Fallback/View/MultiChoice.hs
|
gpl-3.0
| 4,182
| 0
| 21
| 1,283
| 818
| 425
| 393
| 53
| 4
|
module Main where
addThree :: Int -> Int -> Int -> Int
addThree x y z = (x + y + z)
|
yumerov/haskell-study
|
learnyouahaskell/01-types-typeclasses/function-type.hs
|
gpl-3.0
| 84
| 0
| 7
| 22
| 44
| 24
| 20
| 3
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.ServiceNetworking.Services.Projects.Global.Networks.UpdateConsumerConfig
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Service producers use this method to update the configuration of their
-- connection including the import\/export of custom routes and subnetwork
-- routes with public IP.
--
-- /See:/ <https://cloud.google.com/service-infrastructure/docs/service-networking/getting-started Service Networking API Reference> for @servicenetworking.services.projects.global.networks.updateConsumerConfig@.
module Network.Google.Resource.ServiceNetworking.Services.Projects.Global.Networks.UpdateConsumerConfig
(
-- * REST Resource
ServicesProjectsGlobalNetworksUpdateConsumerConfigResource
-- * Creating a Request
, servicesProjectsGlobalNetworksUpdateConsumerConfig
, ServicesProjectsGlobalNetworksUpdateConsumerConfig
-- * Request Lenses
, spgnuccParent
, spgnuccXgafv
, spgnuccUploadProtocol
, spgnuccAccessToken
, spgnuccUploadType
, spgnuccPayload
, spgnuccCallback
) where
import Network.Google.Prelude
import Network.Google.ServiceNetworking.Types
-- | A resource alias for @servicenetworking.services.projects.global.networks.updateConsumerConfig@ method which the
-- 'ServicesProjectsGlobalNetworksUpdateConsumerConfig' request conforms to.
type ServicesProjectsGlobalNetworksUpdateConsumerConfigResource
=
"v1" :>
CaptureMode "parent" "updateConsumerConfig" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] UpdateConsumerConfigRequest :>
Patch '[JSON] Operation
-- | Service producers use this method to update the configuration of their
-- connection including the import\/export of custom routes and subnetwork
-- routes with public IP.
--
-- /See:/ 'servicesProjectsGlobalNetworksUpdateConsumerConfig' smart constructor.
data ServicesProjectsGlobalNetworksUpdateConsumerConfig =
ServicesProjectsGlobalNetworksUpdateConsumerConfig'
{ _spgnuccParent :: !Text
, _spgnuccXgafv :: !(Maybe Xgafv)
, _spgnuccUploadProtocol :: !(Maybe Text)
, _spgnuccAccessToken :: !(Maybe Text)
, _spgnuccUploadType :: !(Maybe Text)
, _spgnuccPayload :: !UpdateConsumerConfigRequest
, _spgnuccCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ServicesProjectsGlobalNetworksUpdateConsumerConfig' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'spgnuccParent'
--
-- * 'spgnuccXgafv'
--
-- * 'spgnuccUploadProtocol'
--
-- * 'spgnuccAccessToken'
--
-- * 'spgnuccUploadType'
--
-- * 'spgnuccPayload'
--
-- * 'spgnuccCallback'
servicesProjectsGlobalNetworksUpdateConsumerConfig
:: Text -- ^ 'spgnuccParent'
-> UpdateConsumerConfigRequest -- ^ 'spgnuccPayload'
-> ServicesProjectsGlobalNetworksUpdateConsumerConfig
servicesProjectsGlobalNetworksUpdateConsumerConfig pSpgnuccParent_ pSpgnuccPayload_ =
ServicesProjectsGlobalNetworksUpdateConsumerConfig'
{ _spgnuccParent = pSpgnuccParent_
, _spgnuccXgafv = Nothing
, _spgnuccUploadProtocol = Nothing
, _spgnuccAccessToken = Nothing
, _spgnuccUploadType = Nothing
, _spgnuccPayload = pSpgnuccPayload_
, _spgnuccCallback = Nothing
}
-- | Required. Parent resource identifying the connection for which the
-- consumer config is being updated in the format:
-- \`services\/{service}\/projects\/{project}\/global\/networks\/{network}\`
-- {service} is the peering service that is managing connectivity for the
-- service producer\'s organization. For Google services that support this
-- functionality, this value is \`servicenetworking.googleapis.com\`.
-- {project} is the number of the project that contains the service
-- consumer\'s VPC network e.g. \`12345\`. {network} is the name of the
-- service consumer\'s VPC network.
spgnuccParent :: Lens' ServicesProjectsGlobalNetworksUpdateConsumerConfig Text
spgnuccParent
= lens _spgnuccParent
(\ s a -> s{_spgnuccParent = a})
-- | V1 error format.
spgnuccXgafv :: Lens' ServicesProjectsGlobalNetworksUpdateConsumerConfig (Maybe Xgafv)
spgnuccXgafv
= lens _spgnuccXgafv (\ s a -> s{_spgnuccXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
spgnuccUploadProtocol :: Lens' ServicesProjectsGlobalNetworksUpdateConsumerConfig (Maybe Text)
spgnuccUploadProtocol
= lens _spgnuccUploadProtocol
(\ s a -> s{_spgnuccUploadProtocol = a})
-- | OAuth access token.
spgnuccAccessToken :: Lens' ServicesProjectsGlobalNetworksUpdateConsumerConfig (Maybe Text)
spgnuccAccessToken
= lens _spgnuccAccessToken
(\ s a -> s{_spgnuccAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
spgnuccUploadType :: Lens' ServicesProjectsGlobalNetworksUpdateConsumerConfig (Maybe Text)
spgnuccUploadType
= lens _spgnuccUploadType
(\ s a -> s{_spgnuccUploadType = a})
-- | Multipart request metadata.
spgnuccPayload :: Lens' ServicesProjectsGlobalNetworksUpdateConsumerConfig UpdateConsumerConfigRequest
spgnuccPayload
= lens _spgnuccPayload
(\ s a -> s{_spgnuccPayload = a})
-- | JSONP
spgnuccCallback :: Lens' ServicesProjectsGlobalNetworksUpdateConsumerConfig (Maybe Text)
spgnuccCallback
= lens _spgnuccCallback
(\ s a -> s{_spgnuccCallback = a})
instance GoogleRequest
ServicesProjectsGlobalNetworksUpdateConsumerConfig
where
type Rs
ServicesProjectsGlobalNetworksUpdateConsumerConfig
= Operation
type Scopes
ServicesProjectsGlobalNetworksUpdateConsumerConfig
=
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/service.management"]
requestClient
ServicesProjectsGlobalNetworksUpdateConsumerConfig'{..}
= go _spgnuccParent _spgnuccXgafv
_spgnuccUploadProtocol
_spgnuccAccessToken
_spgnuccUploadType
_spgnuccCallback
(Just AltJSON)
_spgnuccPayload
serviceNetworkingService
where go
= buildClient
(Proxy ::
Proxy
ServicesProjectsGlobalNetworksUpdateConsumerConfigResource)
mempty
|
brendanhay/gogol
|
gogol-servicenetworking/gen/Network/Google/Resource/ServiceNetworking/Services/Projects/Global/Networks/UpdateConsumerConfig.hs
|
mpl-2.0
| 7,353
| 0
| 16
| 1,454
| 794
| 469
| 325
| 126
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.IAM.Projects.Locations.WorkLoadIdentityPools.Providers.Undelete
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Undeletes a WorkloadIdentityPoolProvider, as long as it was deleted
-- fewer than 30 days ago.
--
-- /See:/ <https://cloud.google.com/iam/ Identity and Access Management (IAM) API Reference> for @iam.projects.locations.workloadIdentityPools.providers.undelete@.
module Network.Google.Resource.IAM.Projects.Locations.WorkLoadIdentityPools.Providers.Undelete
(
-- * REST Resource
ProjectsLocationsWorkLoadIdentityPoolsProvidersUndeleteResource
-- * Creating a Request
, projectsLocationsWorkLoadIdentityPoolsProvidersUndelete
, ProjectsLocationsWorkLoadIdentityPoolsProvidersUndelete
-- * Request Lenses
, plwlippuXgafv
, plwlippuUploadProtocol
, plwlippuAccessToken
, plwlippuUploadType
, plwlippuPayload
, plwlippuName
, plwlippuCallback
) where
import Network.Google.IAM.Types
import Network.Google.Prelude
-- | A resource alias for @iam.projects.locations.workloadIdentityPools.providers.undelete@ method which the
-- 'ProjectsLocationsWorkLoadIdentityPoolsProvidersUndelete' request conforms to.
type ProjectsLocationsWorkLoadIdentityPoolsProvidersUndeleteResource
=
"v1" :>
CaptureMode "name" "undelete" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON]
UndeleteWorkLoadIdentityPoolProviderRequest
:> Post '[JSON] Operation
-- | Undeletes a WorkloadIdentityPoolProvider, as long as it was deleted
-- fewer than 30 days ago.
--
-- /See:/ 'projectsLocationsWorkLoadIdentityPoolsProvidersUndelete' smart constructor.
data ProjectsLocationsWorkLoadIdentityPoolsProvidersUndelete =
ProjectsLocationsWorkLoadIdentityPoolsProvidersUndelete'
{ _plwlippuXgafv :: !(Maybe Xgafv)
, _plwlippuUploadProtocol :: !(Maybe Text)
, _plwlippuAccessToken :: !(Maybe Text)
, _plwlippuUploadType :: !(Maybe Text)
, _plwlippuPayload :: !UndeleteWorkLoadIdentityPoolProviderRequest
, _plwlippuName :: !Text
, _plwlippuCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ProjectsLocationsWorkLoadIdentityPoolsProvidersUndelete' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'plwlippuXgafv'
--
-- * 'plwlippuUploadProtocol'
--
-- * 'plwlippuAccessToken'
--
-- * 'plwlippuUploadType'
--
-- * 'plwlippuPayload'
--
-- * 'plwlippuName'
--
-- * 'plwlippuCallback'
projectsLocationsWorkLoadIdentityPoolsProvidersUndelete
:: UndeleteWorkLoadIdentityPoolProviderRequest -- ^ 'plwlippuPayload'
-> Text -- ^ 'plwlippuName'
-> ProjectsLocationsWorkLoadIdentityPoolsProvidersUndelete
projectsLocationsWorkLoadIdentityPoolsProvidersUndelete pPlwlippuPayload_ pPlwlippuName_ =
ProjectsLocationsWorkLoadIdentityPoolsProvidersUndelete'
{ _plwlippuXgafv = Nothing
, _plwlippuUploadProtocol = Nothing
, _plwlippuAccessToken = Nothing
, _plwlippuUploadType = Nothing
, _plwlippuPayload = pPlwlippuPayload_
, _plwlippuName = pPlwlippuName_
, _plwlippuCallback = Nothing
}
-- | V1 error format.
plwlippuXgafv :: Lens' ProjectsLocationsWorkLoadIdentityPoolsProvidersUndelete (Maybe Xgafv)
plwlippuXgafv
= lens _plwlippuXgafv
(\ s a -> s{_plwlippuXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
plwlippuUploadProtocol :: Lens' ProjectsLocationsWorkLoadIdentityPoolsProvidersUndelete (Maybe Text)
plwlippuUploadProtocol
= lens _plwlippuUploadProtocol
(\ s a -> s{_plwlippuUploadProtocol = a})
-- | OAuth access token.
plwlippuAccessToken :: Lens' ProjectsLocationsWorkLoadIdentityPoolsProvidersUndelete (Maybe Text)
plwlippuAccessToken
= lens _plwlippuAccessToken
(\ s a -> s{_plwlippuAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
plwlippuUploadType :: Lens' ProjectsLocationsWorkLoadIdentityPoolsProvidersUndelete (Maybe Text)
plwlippuUploadType
= lens _plwlippuUploadType
(\ s a -> s{_plwlippuUploadType = a})
-- | Multipart request metadata.
plwlippuPayload :: Lens' ProjectsLocationsWorkLoadIdentityPoolsProvidersUndelete UndeleteWorkLoadIdentityPoolProviderRequest
plwlippuPayload
= lens _plwlippuPayload
(\ s a -> s{_plwlippuPayload = a})
-- | Required. The name of the provider to undelete.
plwlippuName :: Lens' ProjectsLocationsWorkLoadIdentityPoolsProvidersUndelete Text
plwlippuName
= lens _plwlippuName (\ s a -> s{_plwlippuName = a})
-- | JSONP
plwlippuCallback :: Lens' ProjectsLocationsWorkLoadIdentityPoolsProvidersUndelete (Maybe Text)
plwlippuCallback
= lens _plwlippuCallback
(\ s a -> s{_plwlippuCallback = a})
instance GoogleRequest
ProjectsLocationsWorkLoadIdentityPoolsProvidersUndelete
where
type Rs
ProjectsLocationsWorkLoadIdentityPoolsProvidersUndelete
= Operation
type Scopes
ProjectsLocationsWorkLoadIdentityPoolsProvidersUndelete
= '["https://www.googleapis.com/auth/cloud-platform"]
requestClient
ProjectsLocationsWorkLoadIdentityPoolsProvidersUndelete'{..}
= go _plwlippuName _plwlippuXgafv
_plwlippuUploadProtocol
_plwlippuAccessToken
_plwlippuUploadType
_plwlippuCallback
(Just AltJSON)
_plwlippuPayload
iAMService
where go
= buildClient
(Proxy ::
Proxy
ProjectsLocationsWorkLoadIdentityPoolsProvidersUndeleteResource)
mempty
|
brendanhay/gogol
|
gogol-iam/gen/Network/Google/Resource/IAM/Projects/Locations/WorkLoadIdentityPools/Providers/Undelete.hs
|
mpl-2.0
| 6,713
| 0
| 16
| 1,360
| 781
| 457
| 324
| 126
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Analytics.Management.WebPropertyUserLinks.Insert
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Adds a new user to the given web property.
--
-- /See:/ <https://developers.google.com/analytics/ Google Analytics API Reference> for @analytics.management.webpropertyUserLinks.insert@.
module Network.Google.Resource.Analytics.Management.WebPropertyUserLinks.Insert
(
-- * REST Resource
ManagementWebPropertyUserLinksInsertResource
-- * Creating a Request
, managementWebPropertyUserLinksInsert
, ManagementWebPropertyUserLinksInsert
-- * Request Lenses
, mwpuliWebPropertyId
, mwpuliPayload
, mwpuliAccountId
) where
import Network.Google.Analytics.Types
import Network.Google.Prelude
-- | A resource alias for @analytics.management.webpropertyUserLinks.insert@ method which the
-- 'ManagementWebPropertyUserLinksInsert' request conforms to.
type ManagementWebPropertyUserLinksInsertResource =
"analytics" :>
"v3" :>
"management" :>
"accounts" :>
Capture "accountId" Text :>
"webproperties" :>
Capture "webPropertyId" Text :>
"entityUserLinks" :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] EntityUserLink :>
Post '[JSON] EntityUserLink
-- | Adds a new user to the given web property.
--
-- /See:/ 'managementWebPropertyUserLinksInsert' smart constructor.
data ManagementWebPropertyUserLinksInsert = ManagementWebPropertyUserLinksInsert'
{ _mwpuliWebPropertyId :: !Text
, _mwpuliPayload :: !EntityUserLink
, _mwpuliAccountId :: !Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ManagementWebPropertyUserLinksInsert' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'mwpuliWebPropertyId'
--
-- * 'mwpuliPayload'
--
-- * 'mwpuliAccountId'
managementWebPropertyUserLinksInsert
:: Text -- ^ 'mwpuliWebPropertyId'
-> EntityUserLink -- ^ 'mwpuliPayload'
-> Text -- ^ 'mwpuliAccountId'
-> ManagementWebPropertyUserLinksInsert
managementWebPropertyUserLinksInsert pMwpuliWebPropertyId_ pMwpuliPayload_ pMwpuliAccountId_ =
ManagementWebPropertyUserLinksInsert'
{ _mwpuliWebPropertyId = pMwpuliWebPropertyId_
, _mwpuliPayload = pMwpuliPayload_
, _mwpuliAccountId = pMwpuliAccountId_
}
-- | Web Property ID to create the user link for.
mwpuliWebPropertyId :: Lens' ManagementWebPropertyUserLinksInsert Text
mwpuliWebPropertyId
= lens _mwpuliWebPropertyId
(\ s a -> s{_mwpuliWebPropertyId = a})
-- | Multipart request metadata.
mwpuliPayload :: Lens' ManagementWebPropertyUserLinksInsert EntityUserLink
mwpuliPayload
= lens _mwpuliPayload
(\ s a -> s{_mwpuliPayload = a})
-- | Account ID to create the user link for.
mwpuliAccountId :: Lens' ManagementWebPropertyUserLinksInsert Text
mwpuliAccountId
= lens _mwpuliAccountId
(\ s a -> s{_mwpuliAccountId = a})
instance GoogleRequest
ManagementWebPropertyUserLinksInsert where
type Rs ManagementWebPropertyUserLinksInsert =
EntityUserLink
type Scopes ManagementWebPropertyUserLinksInsert =
'["https://www.googleapis.com/auth/analytics.manage.users"]
requestClient
ManagementWebPropertyUserLinksInsert'{..}
= go _mwpuliAccountId _mwpuliWebPropertyId
(Just AltJSON)
_mwpuliPayload
analyticsService
where go
= buildClient
(Proxy ::
Proxy ManagementWebPropertyUserLinksInsertResource)
mempty
|
rueshyna/gogol
|
gogol-analytics/gen/Network/Google/Resource/Analytics/Management/WebPropertyUserLinks/Insert.hs
|
mpl-2.0
| 4,475
| 0
| 17
| 1,010
| 470
| 280
| 190
| 84
| 1
|
--
-- Copyright (c) 2005,2009 Stefan Wehr - http://www.stefanwehr.de
--
-- This library is free software; you can redistribute it and/or
-- modify it under the terms of the GNU Lesser General Public
-- License as published by the Free Software Foundation; either
-- version 2.1 of the License, or (at your option) any later version.
--
-- This library is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-- Lesser General Public License for more details.
--
-- You should have received a copy of the GNU Lesser General Public
-- License along with this library; if not, write to the Free Software
-- Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA
--
{- |
Top-level module that re-exports functionality from sub-modules.
Modules that only define unit tests and quickcheck properties typically
only need to import this module.
Your test driver should additionally import 'Test.Framework.TestManager' and,
if needed, 'Test.Framework.BlackBoxTest'.
-}
module Test.Framework (
-- * Unit tests
module Test.Framework.HUnitWrapper, TM.makeUnitTest,
-- * Quickcheck
module Test.Framework.QuickCheckWrapper, TM.makeQuickCheckTest,
-- * Generic assertions
module Test.Framework.AssertM,
-- * Organizing tests
TM.makeTestSuite, TM.TestSuite, TM.htfMain, TM.htfMainWithArgs, Loc.makeLoc
) where
import Test.Framework.HUnitWrapper
import Test.Framework.QuickCheckWrapper
import Test.Framework.AssertM
import qualified Test.Framework.TestManager as TM
import qualified Test.Framework.Location as Loc
|
ekarayel/HTF
|
Test/Framework.hs
|
lgpl-2.1
| 1,673
| 0
| 5
| 255
| 122
| 89
| 33
| 10
| 0
|
--------------------------------------------------------------------------
-- --
-- Matches.hs --
-- --
-- (c) Simon Thompson, 1995, 2000 --
-- --
--------------------------------------------------------------------------
module Matches where
import RegExp
matches :: Reg -> String -> Bool
matches Epsilon st = (st == "")
matches (Literal ch) st = (st == [ch])
matches (Or r1 r2) st
= matches r1 st || matches r2 st
matches (Then r1 r2) st
= or [ matches r1 s1 && matches r2 s2 | (s1,s2) <- splits st ]
--------------------------------------------------------------------------
-- The definition for (Star r) requires some thought. Can think --
-- of (Star r) as Epsilon or (Then r (Star r)), but in the latter --
-- case the match with (Then r (Star r)) needs to be one in which --
-- r matches *non-tivially* at the start of the string; otherwise --
-- there is a possibility of a recursive call to --
-- matches (Star r) st --
-- a black hole!! --
-- --
-- matches (Star (Or Epsilon (Or (Literal 'a') (Literal 'b')))) --
-- is an example of exactly this. --
--------------------------------------------------------------------------
matches (Star r) st
= matches Epsilon st ||
or [ matches r s1 && matches (Star r) s2 | (s1,s2) <- frontSplits st ]
--------------------------------------------------------------------------
-- All the ways of splitting a list into two halves. --
-- --
-- splits [2,3,4] --
-- = [([],[2,3,4]),([2],[3,4]),([2,3],[4]),([2,3,4],[])] --
--------------------------------------------------------------------------
splits :: [a] -> [ ([a],[a]) ]
splits st = [ splitAt n st | n <- [0 .. length st] ]
--------------------------------------------------------------------------
-- Splits as above, with the first half non-trivial. --
--------------------------------------------------------------------------
frontSplits :: [a] -> [ ([a],[a]) ]
frontSplits st = [ splitAt n st | n <- [1.. length st] ]
|
SonomaStatist/CS454_NFA
|
haskellDFA/RegExp/Matches.hs
|
unlicense
| 2,049
| 0
| 11
| 405
| 365
| 205
| 160
| 16
| 1
|
module Tables.A271710Spec (main, spec) where
import Test.Hspec
import Tables.A271710 (a271710)
main :: IO ()
main = hspec spec
spec :: Spec
spec = describe "A271710" $
it "correctly computes the first 20 elements" $
take 20 (map a271710 [0..]) `shouldBe` expectedValue where
expectedValue = [0,3,3,5,0,5,9,6,6,9,17,10,0,10,17,33,18,12,12,18]
|
peterokagey/haskellOEIS
|
test/Tables/A271710Spec.hs
|
apache-2.0
| 356
| 0
| 10
| 59
| 160
| 95
| 65
| 10
| 1
|
ans (x:y:b:p:_) =
let a = x*b + y*p
a'= ( x * (max b 5) + y * (max p 2) ) * 8 `div` 10
c = if b >= 5 && p >= 2
then a * 8 `div` 10
else min a a'
in
c
main = do
_ <- getLine
c <- getContents
let i = map (map read) $ map words $ lines c :: [[Int]]
o = map ans i
mapM_ print o
|
a143753/AOJ
|
0278.hs
|
apache-2.0
| 330
| 0
| 16
| 136
| 215
| 109
| 106
| 13
| 2
|
{-# LANGUAGE OverloadedStrings #-}
-- |Example Crawlers for a variety of use-cases.
module Crawling.Hephaestos.Crawlers.Templates (
-- * Crawlers that generate numbered lists of links.
fileList,
fileList',
-- * Crawlers for single files
singleFile,
-- * Crawlers for XPath-expressions
xPathCrawler,
-- * Crawlers that collect specific elements from pages.
allElementsWhere,
allElementsWhereExtension,
allImages,
)where
import Prelude hiding ((++))
import Control.Arrow
import Control.Exception
import Data.Char
import Data.Functor.Monadic
import qualified Data.List.Safe as L
import Data.List.Split
import Data.ListLike (ListLike(append), StringLike(fromString))
import Data.Maybe (mapMaybe)
import Data.Monoid (mempty)
import qualified Data.Text.Lazy as T
import Data.Void
import Crawling.Hephaestos.Crawlers.Utils
import Crawling.Hephaestos.Fetch.Types
import Crawling.Hephaestos.Fetch.Successor
-- Lists of files
-------------------------------------------------------------------------------
-- |Downloads a list of numbered files.
-- Given a list of integers @[i_1,...,i_n]@ URL of the form @XmY@,
-- where @m@ is the last decimal number in the
-- URL, this function generates the Blobs
--
-- @
-- Xi_1Y
-- Xi_2Y
-- ...
-- Xi_nY
-- @
--
-- Thus, @m@ only signifies where the "hole" is which is to be filled in. It
-- is thrown away. If the given URL is not of the form @XmY@, then a single
-- 'Failure' noe is generated.
--
-- This function does not check whether the generated URLs actually
-- exist.
fileList :: [Int] -> Successor SomeException Void Void
fileList range uri _ _ = return $ case e of Nothing -> [noNum]
Just _ -> map f indices
where
noNum = voidNode $ failure (URLHadNoNumberError $ fromString $ show uri) Nothing
fillIn (x,_,z) i = voidNode
$ makeLink uri blob
$ T.pack $ concat x L.++ i L.++ concat z
(f, e@(Just num)) = fillIn &&& (\(_,y,_) -> y)
$ getLast isNum
$ split (whenElt (not.isDigit))
$ show uri
indices = map (padLeft '0' (length num) . show) range
padLeft :: a -> Int -> [a] -> [a]
padLeft c i cs = replicate (i - length cs) c L.++ cs
-- |Variant of 'fileList' which finds out the range by itself.
-- The second parameter is the number of items to download.
--
-- @
-- pictureList' \"XmY\" i = pictureList \"XmY\" [m..(m+i)]
-- @
fileList' :: Int -> Successor SomeException Void Void
fileList' num uri = fileList range uri
where
(_,e,_) = getLast isNum $ split (whenElt (not.isDigit)) $ show uri
range = case e of Nothing -> []
Just e' -> [read e'..read e'+num-1]
-- |Returns True iff the string is composed only of digits and is not empty.
isNum :: String -> Bool
isNum x = all isDigit x && not (null x)
-- |Gets the last element of a list which fulfils a given predicate.
-- The elements of the list before and after that element are also
-- returned. Only works for finite lists.
-- @any f xs == True@ implies @getLast f xs == (ys,Just e,zs)@
-- such that @xs == ys ++ [e] ++ zs@, @f e == True@ and @any f zs == False@.
-- On the other hand, @any f xs == False@ implies
-- @getLast f xs == ([],Nothing,xs)@.
getLast :: (a -> Bool) -> [a] -> ([a],Maybe a,[a])
getLast f xs = (concat $ L.init before, L.last before, after xs)
where
after = reverse . takeWhile (not . f) . reverse
before = take (length xs - length (after xs)) xs
-- Single files
-------------------------------------------------------------------------------
-- |Retrieves a single file as a ByteString.
singleFile :: Successor SomeException Void Void
singleFile uri _ _ = return [voidNode $ blob uri mempty]
-- XPath
-------------------------------------------------------------------------------
-- |Runs an XPath-expression that returns a set of text nodes
-- against a page and returns the result set as URLs ('Blob').
-- If the given XPath-expression does not return a set of text,
-- this function returns an empty result set.
xPathCrawler :: T.Text -> Successor SomeException Void Void
xPathCrawler xpath = htmlSuccessor mempty xPathCrawler'
where
xPathCrawler' uri doc _ = return
$ mapMaybe (getText
>=$> makeLink uri blob
>=$> voidNode)
$ getXPath xpath doc
-- Specific elements
-------------------------------------------------------------------------------
-- |Searches the contents of given pairs of tags and attributes on a page.
--
-- Note: if a URL is linked multiple times on a page
-- it will only be appear once in the results.
allElementsWhere :: [(T.Text, T.Text)]
-- ^The list of tag/attribute pairs which are to be
-- gathered. E.g. @[("a","href"), ("img", "src")]@.
-> (URL -> Bool)
-- ^The predicate which gathered elements have
-- to pass.
-> Successor SomeException Void Void
allElementsWhere tags pred = htmlSuccessor mempty allWhere'
where
allWhere' uri doc _ = return $ concatMap getRes tags
where
-- puts (TAG,ATTR) into an xpath-expression of the form
-- "//TAG/@ATTR/@text()"
-- and runs it against the given predicate
getRes (tag, attr) =
map (voidNode . makeLink uri blob)
$ L.nub
$ filter (\x -> not ("#" `T.isPrefixOf` x) && pred x)
$ mapMaybe getText
$ getXPath
("//" `append` tag `append` "/@" `append` attr `append` "")
doc
-- |Variant of 'allElementsWhere', but instead of a predicate,
-- all list of acceptable file extensions for the collected URLs
-- (e.g. @[".jpg", ".png"]@) is passed.
allElementsWhereExtension :: [(T.Text,T.Text)]
-> [T.Text]
-> Successor SomeException Void Void
allElementsWhereExtension tags exts = allElementsWhere tags elemOfExsts
where
elemOfExsts t = any (`T.isSuffixOf` stripParams t) exts
stripParams = T.takeWhile ('?'/=)
-- |Variant of 'allElementsWhere' which selects the @src@-attributes of
-- @img@-tags and the @href@-attributes of @a@-tags. Only URLs with the
-- following file extensions are returned:
-- * TIFF: @.tif, .tiff@
-- * GIF: @.gif@
-- * JPG: @.jpeg, .jpg, .jif, .jiff@
-- * JPEG 2000: @.jp2, .jpx, .j2k, .j2c@
-- * Flashpix: @.fpx@
-- * ImagePac: @.pcd@
-- * PNG: @.png@
-- * SVG: @.svg, .svgt@
allImages :: Successor SomeException Void Void
allImages = allElementsWhereExtension tags exts
where
tags = [("img", "src"),
("a", "href")]
exts = [".tif",".tiff",
".gif",
".jpeg", ".jpg", ".jif", ".jiff",
".jp2", ".jpx", ".j2k", ".j2c",
".fpx",
".pcd",
".png",
".svg", ".svgt"]
|
jtapolczai/Hephaestos
|
Crawling/Hephaestos/Crawlers/Templates.hs
|
apache-2.0
| 7,181
| 0
| 18
| 1,978
| 1,329
| 756
| 573
| 89
| 2
|
-- cabal-helper: Simple interface to Cabal's configuration state
-- Copyright (C) 2018 Daniel Gröber <cabal-helper@dxld.at>
--
-- SPDX-License-Identifier: Apache-2.0
--
-- Licensed under the Apache License, Version 2.0 (the "License");
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
{-|
Module : CabalHelper.Compiletime.Types.RelativePath
License : Apache-2.0
-}
module CabalHelper.Compiletime.Types.RelativePath
( RelativePath
, mkRelativePath
, unRelativePath
) where
import System.FilePath
-- | A path guaranteed to be relative and not escape the base path. The
-- constructor is not exposed, use the 'mkRelativePath' smart constructor.
newtype RelativePath = RelativePath { unRelativePath :: FilePath }
deriving (Show)
-- | Smart constructor for 'RelativePath'. Checks if the given path
-- satisfies the constraints and throws 'UserError' if not.
mkRelativePath :: FilePath -> RelativePath
mkRelativePath dir
| isAbsolute dir =
error $ "mkRelativePath: the path given was absolute! got: " ++ dir
| doesRelativePathEscapeCWD dir =
error $ "mkRelativePath: the path given escapes the base dir! got: " ++ dir
| otherwise =
RelativePath dir
doesRelativePathEscapeCWD :: FilePath -> Bool
doesRelativePathEscapeCWD path =
go [] $ splitDirectories $ normalise path
-- normalise collapses '.' in path, this is very important or this
-- check would be traivial to defeat. For example './../' would be
-- able to escape.
where
go (_:xs) ("..":ys) = go xs ys
go [] ("..":__) = True
go xs (y :ys) = go (y:xs) ys
go _ [] = False
|
DanielG/cabal-helper
|
src/CabalHelper/Compiletime/Types/RelativePath.hs
|
apache-2.0
| 1,757
| 0
| 9
| 370
| 252
| 140
| 112
| 22
| 4
|
{- |
Module : Neovim.Config
Description : The user editable and compilable configuration
Copyright : (c) Sebastian Witte
License : Apache-2.0
Maintainer : woozletoff@gmail.com
Stability : experimental
-}
module Neovim.Config (
NeovimConfig(..),
module System.Log,
) where
import Neovim.Context (Neovim)
import Neovim.Plugin.Internal (NeovimPlugin)
import Neovim.Plugin.Startup (StartupConfig)
import System.Log (Priority (..))
-- | This data type contins information about the configuration of neovim. See
-- the fields' documentation for what you possibly want to change. Also, the
-- tutorial in the "Neovim" module should get you started.
data NeovimConfig = Config
{ plugins :: [Neovim (StartupConfig NeovimConfig) () NeovimPlugin]
-- ^ The list of plugins. The IO type inside the list allows the plugin
-- author to run some arbitrary startup code before creating a value of
-- type 'NeovimPlugin'.
, logOptions :: Maybe (FilePath, Priority)
-- ^ Set the general logging options.
, errorMessage :: Maybe String
-- ^ Internally used field. Changing this has no effect.
--
-- Used by 'Dyre' for storing compilation errors.
}
|
gibiansky/nvim-hs
|
library/Neovim/Config.hs
|
apache-2.0
| 1,281
| 0
| 12
| 319
| 136
| 87
| 49
| 11
| 0
|
import Prelude (IO)
import Yesod.Default.Config
import Yesod.Default.Main (defaultMain)
import Application (getAppConfig, makeApplication)
import Settings (parseExtra)
main :: IO ()
main = defaultMain (fromArgs parseExtra) makeApplication
|
nejstastnejsistene/bannerstalker-yesod
|
server/Main.hs
|
bsd-3-clause
| 276
| 0
| 7
| 62
| 71
| 41
| 30
| 7
| 1
|
module Main where
import Logic.Pheasant
main :: IO ()
main = return ()
|
thsutton/pheasant
|
test/Language.hs
|
bsd-3-clause
| 83
| 0
| 6
| 25
| 29
| 16
| 13
| 4
| 1
|
-- -- |
-- -- Module : Data.Config.Types
-- -- Copyright : 2011 Magnus Therning, 2012 Hans Hoglund
-- -- License : BSD3
-- module Data.Config.Types where
--
-- import qualified Data.Map as M
--
-- type OptionName = String
-- type OptionValue = String
-- type Section = M.Map OptionName OptionValue
--
-- type SectionName = (String, Maybe String)
-- type Config = M.Map SectionName Section
--
--
--
-- -- useful since Map doesn't have any Serial instance
-- cfgFromList :: [(SectionName, [(OptionName, OptionValue)])] -> Config
-- cfgFromList = M.map (M.fromList) . M.fromList
--
-- cfgToList :: Config -> [(SectionName, [(OptionName, OptionValue)])]
-- cfgToList = M.toList . M.map (M.toList)
|
hanshoglund/hsconf
|
src/Data/Config/Types.hs
|
bsd-3-clause
| 705
| 0
| 2
| 123
| 25
| 24
| 1
| 1
| 0
|
--------------------------------------------------------------------------------
module Generics.Deriving.Util (
Dir(..),
dir,
) where
--------------------------------------------------------------------------------
data Dir = L | R deriving (Show, Eq)
dir :: Dir -> a -> a -> a
dir L f _ = f
dir R _ g = g
|
spl/generic-deriving-extras
|
src/Generics/Deriving/Util.hs
|
bsd-3-clause
| 317
| 0
| 7
| 48
| 84
| 49
| 35
| 7
| 1
|
{-# LANGUAGE OverloadedStrings #-}
module Main where
import Lib
import Web.Scotty
import Data.Monoid (mconcat)
main = scotty 3000 $ do
get "/:word" $ do
beam <- param "word"
html $ mconcat ["<h1>Scotty, ", beam, " me up!</h1>"]
|
adbrowne/haskell-eb
|
app/Main.hs
|
bsd-3-clause
| 242
| 0
| 13
| 51
| 74
| 39
| 35
| 9
| 1
|
module Test.Parser where
import Test.Hspec
import Parser.ProductRule
import Parser.ProductRule.Internal (Symbol(..))
import Data.Set as S (fromList)
import Data.Map as M (fromList)
data TestNT = A0 | B0 | C0 deriving (Eq, Show, Ord)
tableA :: [ProductRule TestNT]
tableA = productRules $ do
Start ---> A0 & B0 & C0
|> A0 & B0
|/ Null
A0 --> B0 & C0 & A0
|> A0 & C0
|/ Null
data MySym = A
| B
| C'
deriving (Eq, Show, Ord)
tableB :: [ProductRule MySym]
tableB = productRules $ do
Start ---> A & C' & B -- AC'B concatenation
|> A
|> C'
; A --> B
|/ Null
|> A & C'
; B --> C'
tableC :: [ProductRule MySym]
tableC = productRules $
Start >>> Null & C'
|> C'
test :: IO ()
test = hspec $ do
describe "sampleTableA" $
it "productRules" $
S.fromList tableA `shouldBe` S.fromList [
(Start, [NT A0, T B0, T C0])
, (Start, [NT A0, T B0])
, (Start, [Null])
, (NT A0, [T B0, T C0, NT A0])
, (NT A0, [NT A0, T C0])
, (NT A0, [Null])
]
describe "sampleTableB" $
it "productRules" $
S.fromList tableB `shouldBe` S.fromList [
(Start, [NT A, T C', NT B])
, (Start, [NT A])
, (Start, [T C'])
, (NT A, [NT B])
, (NT A, [NT A, T C'])
, (NT A, [Null])
, (NT B, [T C'])
]
describe "sampleTableC" $
it "productRules" $
S.fromList tableC `shouldBe` S.fromList [(Start, [Null, T C']), (Start, [T C'])]
|
YLiLarry/parser241
|
test/Test/Parser.hs
|
bsd-3-clause
| 1,660
| 0
| 13
| 637
| 701
| 382
| 319
| 56
| 1
|
module Main where
import Options.Applicative
import Control.Applicative
import Control.Monad
import Data.Monoid
import qualified Examples.ClientPipeline as ClientPipeline
import qualified Examples.ClientToUpper as ClientToUpper
import qualified Examples.ProxyAuth as ProxyAuth
import qualified Examples.ProxyToUpper as ProxyToUpper
import qualified Examples.ServePipes as ServePipes
import qualified Examples.ServerDouble as ServerDouble
import qualified Examples.ServerToUpper as ServerToUpper
opts :: Parser (IO ())
opts = helper <*> subparser stuff where
stuff = mconcat
[ command "ClientPipeline" (info (pure ClientPipeline.main) idm)
, command "ClientToUpper" (info (pure ClientToUpper.main) idm)
, command "ProxyAuth" (info (pure ProxyAuth.main) idm)
, command "ProxyToUpper" (info (pure ProxyToUpper.main) idm)
, command "ServePipes" (info (pure ServePipes.main) idm)
, command "ServerDouble" (info (pure ServerDouble.main) idm)
, command "ServerToUpper" (info (pure ServerToUpper.main) idm)
]
main :: IO ()
main = join $ execParser (info opts idm)
|
michaelt/pipes-network-tcp-examples
|
Main.hs
|
bsd-3-clause
| 1,128
| 0
| 14
| 195
| 316
| 175
| 141
| 24
| 1
|
module Emulator.Registers where
import Data.Word
import Data.Bits
import Data.Maybe ( catMaybes )
-- | http://www.masswerk.at/6502/6502_instruction_set.html
data Registers = Registers
{ rPC :: !Word16 -- ^ Program Counter
, rAC :: !Word8 -- ^ Accumulator
, rX :: !Word8 -- ^ X register
, rY :: !Word8 -- ^ Y register
, rSR :: !Word8 -- ^ Status register [NV-BDIZC]
, rSP :: !Word8 -- ^ Stack pointer
} deriving (Read, Show, Eq, Ord)
data SRFlag = Carry -- ^ bit 0
| Zero -- ^ bit 1
| Interrupt -- ^ bit 2
| Decimal -- ^ bit 3
| Break -- ^ bit 4
| Ignored -- ^ bit 5
| Overflow -- ^ bit 6
| Negative -- ^ bit 7
deriving (Read, Show, Eq, Ord, Enum)
-- | Construct the registers all with initial value of 0
mkRegisters :: Registers
mkRegisters = Registers
{ rPC = 0
, rAC = 0
, rX = 0
, rY = 0
, rSR = 0
, rSP = 0
}
-- | all the status register flags in a handy list
allSRFlags :: [SRFlag]
allSRFlags = [Carry .. Negative]
-- | Look up the value of a particular flag by name.
-- Nothing means the flag is not set, Just flag means it
-- is set.
lookupSRFlag :: Registers -> SRFlag -> Maybe SRFlag
lookupSRFlag (Registers { rSR = sr }) f
| testBit sr (fromEnum f) = Just f
| otherwise = Nothing
-- | This returns all the SRFlags that are currently set,
-- the return type is morally `Data.Set.Set SRFlag`
getSRFlags :: Registers -> [SRFlag]
getSRFlags rs =
catMaybes (zipWith lookupSRFlag (repeat rs) allSRFlags)
-- | Applies a bit transformation at the specified status register bit
atSRFlag :: (Word8 -> Int -> Word8) -> Registers -> SRFlag -> Registers
atSRFlag f rs@(Registers { rSR = sr }) flag =
rs { rSR = f sr (fromEnum flag) }
-- | Clears a specific status register flag
clearSRFlag :: Registers -> SRFlag -> Registers
clearSRFlag = atSRFlag clearBit
-- | Sets a specific status register flag
setSRFlag :: Registers -> SRFlag -> Registers
setSRFlag = atSRFlag setBit
-- | Complements a specific status register flag
complementSRFlag :: Registers -> SRFlag -> Registers
complementSRFlag = atSRFlag complementBit
-----------------------------------------------------------------
-- With the exception of clearSRFlags these are overkill
-- | Applies a function at every bit
atSRFlags :: (Word8 -> Int -> Word8) -> Registers -> Registers
atSRFlags f rs@(Registers { rSR = sr }) =
rs { rSR = foldl f sr (map fromEnum allSRFlags) }
-- | Clears all the bits in the status register.
clearSRFlags :: Registers -> Registers
clearSRFlags rs = rs { rSR = 0 }
-- | Sets every bit in the status register
setSRFlags :: Registers -> Registers
setSRFlags = atSRFlags setBit
-- | Compelement every bit in the status register
complementSRFlags :: Registers -> Registers
complementSRFlags = atSRFlags complementBit
|
dagit/hs6502
|
src/Emulator/Registers.hs
|
bsd-3-clause
| 2,889
| 0
| 10
| 678
| 620
| 356
| 264
| 68
| 1
|
{-# LANGUAGE PackageImports #-}
module Data.Foldable (module M) where
import "base" Data.Foldable as M
|
silkapp/base-noprelude
|
src/Data/Foldable.hs
|
bsd-3-clause
| 108
| 0
| 4
| 18
| 21
| 15
| 6
| 3
| 0
|
{-# LANGUAGE TemplateHaskell, DeriveDataTypeable, FlexibleContexts, TypeFamilies #-}
{-| A simple framework for doing forms-based
authenitcation in Happstack.
-}
module Happstack.Server.TinyAuth
(
-- * Required state
AuthConfig(..)
, defaultAuthConfig
-- * Key management
, getKey
, defaultKeyFile
, getDefaultKey
, initKey
-- * System hooks, login/logout
, setLoggedIn
, forwardAfterLogin
, loginData
, requireLoggedIn
, setLoggedOut
, refreshLoggedIn
-- * Monadic interface
, AuthMonad(..)
, setLoggedIn'
, forwardAfterLogin'
, loginData'
, requireLoggedIn'
, setLoggedOut'
, refreshLoggedIn'
) where
import Control.Monad (guard)
import Control.Monad.Trans (MonadIO, liftIO)
import qualified Data.ByteString.Char8 as B8
import Data.Data (Data)
import Data.Functor
import Data.Maybe (fromMaybe)
import Data.Proxy (Proxy, asProxyTypeOf)
import Data.SafeCopy (base, deriveSafeCopy, SafeCopy, safePut, safeGet)
import Data.Serialize (runGet, runPut)
import Data.Text (Text)
import qualified Data.Text as T
import Data.Time (UTCTime, getCurrentTime, addUTCTime)
import Data.Typeable (Typeable)
import Happstack.Server
( FilterMonad, ServerMonad, WebMonad, Response, HasRqData, getDataFn,
lookCookie, addCookie, mkCookie, Cookie(..), CookieLife(..),
finishWith, seeOther, toResponse, look, queryString, rqUri, askRq )
import Network.URL
( URL(..), URLType(..), importURL, exportURL, add_param)
import Web.ClientSession
-- | Internal type we serialize to the
-- user cookie.
data SessionInfo u =
MkSessionInfo
{ sess_user :: u
, sess_expires :: UTCTime
}
deriving (Data, Typeable, Eq, Ord)
deriveSafeCopy 1 'base ''SessionInfo
-- | This type is used to customize how authentication
-- works for your site. We recommend building it from the
-- 'defaultAuthConfig' value.
data AuthConfig s =
MkAuthConfig
{ loginForm :: Text -- ^ where to redirect to on authentication failure
, loginSecret :: Maybe Key -- ^ key to use for encryption
, loginCookieName
:: String -- ^ Name of cookie to store authentication data
, loginRedirectParam
:: Maybe String -- ^ Query param used to store redirect information
}
defaultAuthConfig :: AuthConfig s
defaultAuthConfig =
MkAuthConfig
{ loginForm = T.pack "/login"
, loginSecret = Nothing
, loginCookieName = "authData"
, loginRedirectParam = Just "loginRedirect"
}
-- | Class to make both the 'AuthConfig' data available and to perform Happstack-server
-- functions.
class ( MonadIO m, FilterMonad Response m, ServerMonad m, HasRqData m, Functor m
, WebMonad Response m)
=> AuthMonad m where
type Session m
getAuthConfig :: m (AuthConfig (Session m))
newExpires :: AuthConfig s -> IO UTCTime
newExpires _ = do
curr <- liftIO getCurrentTime
-- expires in one week
return $ addUTCTime 604800 curr
getKey_ :: AuthConfig s -> IO Key
getKey_ cfg =
case loginSecret cfg of
Just k -> return k
Nothing -> getDefaultKey
-- | The passed in user-data is sent to the client
-- encrypted. Call this in the handler for your login form after
-- the user has successfully provided credentials.
setLoggedIn :: (SafeCopy s, MonadIO m, FilterMonad Response m, Functor m)
=> AuthConfig s -- ^ Config
-> s -- ^ Session state
-> m ()
setLoggedIn cfg user = do
let cName = loginCookieName cfg
expires <- liftIO $ newExpires cfg
let dt = MkSessionInfo user expires
dtBytes = runPut $ safePut dt
key <- liftIO $ getKey_ cfg
cData <- B8.unpack <$> (liftIO $ encryptIO key dtBytes)
let cookie = (mkCookie cName cData) {httpOnly = True}
addCookie (Expires expires) cookie
-- | Wrapper around `setLoggedIn`.
setLoggedIn' :: (AuthMonad m, Session m ~ s, SafeCopy s)
=> s -> m ()
setLoggedIn' s = getAuthConfig >>= flip setLoggedIn s
eitherToMaybe :: Either a b -> Maybe b
eitherToMaybe Left{} = Nothing
eitherToMaybe (Right x) = Just x
-- | If there is a logged in user and the session data
-- is valid return the session data.
loginData :: (SafeCopy s, MonadIO m, HasRqData m, ServerMonad m)
=> AuthConfig s
-> m (Maybe s)
loginData cfg = do
let cName = loginCookieName cfg
cookieE <- getDataFn $ lookCookie cName
key <- liftIO $ getKey_ cfg
currTime <- liftIO getCurrentTime
return $
do cookie <- eitherToMaybe cookieE
bytes <- decrypt key $ B8.pack $ cookieValue cookie
sessData <- eitherToMaybe $ runGet safeGet bytes
guard $ sess_expires sessData > currTime
return $ sess_user sessData
-- | Wrapper around `loginData`.
loginData' :: (AuthMonad m, Session m ~ s, SafeCopy s)
=> m (Maybe s)
loginData' = getAuthConfig >>= loginData
-- | Return a redirect to send the user back where they were
-- trying to go when they were bounced to the login form.
-- NOTE: You'll lose any post-body data the user was trying
-- to submit, so try to require login on the form GET as
-- well as the form POST.
forwardAfterLogin :: (HasRqData m, FilterMonad Response m, Functor m)
=> AuthConfig s
-> String -- ^ default redirect url
-> m Response
forwardAfterLogin cfg defaultUrl = do
urlStr <- redirectUrl cfg defaultUrl
seeOther urlStr (toResponse ())
-- | Wrapper around `forwardAfterLogin`.
forwardAfterLogin' :: (AuthMonad m)
=> String -- ^ default redirect url
-> m Response
forwardAfterLogin' defaultUrl
= getAuthConfig >>= flip forwardAfterLogin defaultUrl
redirectUrl :: (HasRqData m, Monad m, Functor m)
=> AuthConfig s
-> String -- ^ default redirect location
-> m String
redirectUrl cfg def =
case loginRedirectParam cfg of
Nothing -> return def
Just redirectParam
-> do
redirectStr <- queryString $ look redirectParam
case importURL redirectStr of
Just url@(URL {url_type=HostRelative}) ->
return $ exportURL url
_ -> return $ def
-- | Pack the current request path into the login
-- URL provide by the config.
createLoginUrl :: (HasRqData m, ServerMonad m, Functor m)
=> AuthConfig s
-> m String
createLoginUrl cfg = do
let urlStr = T.unpack $ loginForm cfg
let paramM = loginRedirectParam cfg
currUrlStr <- rqUri <$> askRq
let newUrlM = do
redirectParam <- paramM
baseUrl <- importURL urlStr
currUrl <- importURL currUrlStr
let redirectStr =
exportURL $ currUrl {url_type = HostRelative}
return $ add_param baseUrl (redirectParam, redirectStr)
case newUrlM of
Nothing -> return urlStr -- do what the user told us if things don't parse
Just newUrl -> return $ exportURL newUrl
{-| Return the logged-in user. If a user is not
logged in, they are forwarded to your login page.
-}
requireLoggedIn :: (SafeCopy s, MonadIO m, HasRqData m, ServerMonad m,
WebMonad Response m, FilterMonad Response m, Functor m)
=> AuthConfig s
-> m s
requireLoggedIn cfg = do
userM <- loginData cfg
case userM of
Just user -> return user
Nothing -> do
url <- createLoginUrl cfg
seeOther url (toResponse ()) >>=
finishWith
-- | Wrapper around `requireLoggedIn`.
requireLoggedIn' :: (AuthMonad m, Session m ~ s, SafeCopy s)
=> m s
requireLoggedIn' = getAuthConfig >>= requireLoggedIn
-- | If a user is logged in, log them out.
-- We do not gaurantee this succeeds for a malicious
-- user agent - it is provided for user-agent convinience only,
-- or for a user-agent choosing to lock itself down.
setLoggedOut :: (SafeCopy s, MonadIO m, FilterMonad Response m, HasRqData m, ServerMonad m)
=> AuthConfig s -> m ()
setLoggedOut cfg = do
userM <- loginData cfg
case userM of
Nothing -> return ()
Just{} -> do
let name = loginCookieName cfg
addCookie Expired $ mkCookie name ""
-- | Wrapper around `setLoggedOut`.
setLoggedOut' :: (AuthMonad m, Session m ~ s, SafeCopy s)
=> m ()
setLoggedOut' = getAuthConfig >>= setLoggedOut
-- | If a user is logged in, refresh their login cookie so they
-- can stay logged in. Used for keeping a user active when you want
-- to log them out after X minutes for inactivity.
refreshLoggedIn :: (SafeCopy s, MonadIO m, FilterMonad Response m, HasRqData m, ServerMonad m, Functor m)
=> AuthConfig s -> m ()
refreshLoggedIn cfg = do
sessM <- loginData cfg
case sessM of
Nothing -> return ()
Just sess -> setLoggedIn cfg sess
-- | Wrapper around `refreshLoggedIn`.
refreshLoggedIn' :: (AuthMonad m, Session m ~ s, SafeCopy s)
=> m ()
refreshLoggedIn' = getAuthConfig >>= refreshLoggedIn
|
aslatter/auth
|
src/Happstack/Server/TinyAuth.hs
|
bsd-3-clause
| 8,930
| 0
| 18
| 2,210
| 2,169
| 1,128
| 1,041
| 193
| 3
|
-- | Comparison circuit from <http://arxiv.org/abs/quant-ph/0410184>
module QACG.CircGen.Comp.Ripple
( rippleComp
,mkLessOutOfPlace
,mkGreaterOutOfPlace
,mkLessThanOrEqualOutOfPlace
,mkGreaterThenOrEqualOutOfPlace
) where
import QACG.CircUtils.Circuit
import QACG.CircUtils.CircuitState
import Control.Monad.State
import Control.Exception(assert)
import Debug.Trace
import QACG.CircGen.Bit.Toffoli
-- The comparison is simply done by reverse computing an addition circuit until the carry bit is determined
-- Then undoing the computation. If the carry bit is set then the first input is greater then the second
rippleComp :: [String] -> [String] -> String -> CircuitState ()
rippleComp a b carry = assert (trace ("rip("++(show.length) a++","++(show.length) b++")") $ length a == length b) $ do
cs <- getConst 1
applyRippleComp (head cs:a) b carry
freeConst [head cs]
return ()
where applyRippleComp (a0:[]) [] z = cnot a0 z
applyRippleComp (a0:a1:as) (b0:bs) z
= do uma a0 b0 a1
applyRippleComp (a1:as) bs z
umaInv a0 b0 a1
applyRippleComp _ _ _ = assert False $ return () --Should never happen!
greaterThan,greaterThanOrEqual,lessThan,lessThanOrEqual
:: [String] -> [String] -> String -> CircuitState ()
greaterThan = rippleComp
greaterThanOrEqual a b c = do rippleComp b a c
notgate c
lessThan a b c = rippleComp b a c
lessThanOrEqual a b c = do rippleComp a b c
notgate c
mkLessOutOfPlace,mkGreaterOutOfPlace,mkLessThanOrEqualOutOfPlace,mkGreaterThenOrEqualOutOfPlace
:: [String] -> [String] -> String -> Circuit
mkLessOutOfPlace = mkComp lessThan
mkGreaterOutOfPlace = mkComp greaterThan
mkLessThanOrEqualOutOfPlace = mkComp lessThanOrEqual
mkGreaterThenOrEqualOutOfPlace = mkComp greaterThanOrEqual
mkComp :: ([String] -> [String] -> String -> CircuitState ()) -> [String] -> [String] -> String -> Circuit
mkComp comp aLns bLns carry = circ
where (_,(_,_,circ)) = runState go ([], ['c':show x|x<-[0::Int .. 10]] , Circuit (LineInfo [] [] [] []) [] [])
go = do comp aLns bLns carry
_ <- initLines aLns
_ <- initLines bLns
_ <- initLines [carry]
setOutputs $ aLns ++ bLns ++ [carry]
uma :: String -> String -> String -> CircuitState ()
uma x y z
= do rightTof x y z
cnot z x
cnot x y
umaInv :: String -> String -> String -> CircuitState ()
umaInv x y z
= do cnot x y
cnot z x
rightTof x y z
|
aparent/qacg
|
src/QACG/CircGen/Comp/Ripple.hs
|
bsd-3-clause
| 2,641
| 0
| 18
| 688
| 867
| 442
| 425
| 56
| 3
|
module P13 (
p13 ) where
import System.IO ( hGetContents, openFile, IOMode ( ReadMode ) )
import System.IO.Unsafe ( unsafePerformIO )
getNums :: IO [String]
getNums = do
handle <- openFile "src/13.txt" ReadMode
contents <- hGetContents handle
--hClose handle
return $ lines contents
findSum' :: [String] -> Integer
findSum' xs = let nums = sum . map read $ xs
in connectDigits . take 10 . reverse . sepDigits $ nums
findSum :: IO [String] -> IO Integer
findSum xs = do
nums <- xs
num <- return $ sum . map read $ nums
return $ connectDigits . take 10 . reverse . sepDigits $ num
sepDigits :: Integer -> [Integer]
sepDigits 0 = []
sepDigits x = x `mod` 10 : sepDigits (x `div` 10)
connectDigits :: [Integer] -> Integer
connectDigits [] = 0
connectDigits (x:xs) = x * (10 ^ fromIntegral (length xs)) + connectDigits xs
p13 :: Integer
p13 = unsafePerformIO $ getNums >>= return . findSum'
|
pyuk/euler2
|
src/P13.hs
|
bsd-3-clause
| 939
| 0
| 11
| 214
| 369
| 191
| 178
| 25
| 1
|
{-# Language ImpredicativeTypes #-}
{-# Language OverloadedStrings #-}
{-# Language DeriveFunctor #-}
module System.FileSystemOperations where
import Data.IOData(IOData)
import Conduit
import Control.Applicative
import Control.Monad.Free
import Control.Monad.IO.Class
-- import Data.Conduit.Combinators(sourceFile, sinkFile)
import Filesystem hiding (readFile, writeFile)
import qualified Filesystem.Path.CurrentOS as FP
import qualified System.Posix.Files as Posix
import Prelude hiding (readFile, writeFile)
--------------------------------------------------------------------------------
data Operation path writer reader next = WriteFile path writer next
| ReadFile path (reader -> next)
| DeleteFile path next
| CreateDirectory path next
| ReadDirectory path (([path],[path]) -> next) -- ^ Files first, then directories. The returned path is just the *fileName*. Not the full path.
| DeleteDirectory path next
| Move path path next
| Copy path path next
deriving (Functor)
type FSOperation path writer reader = Free (Operation path writer reader)
writeFile :: path -> writer -> FSOperation path writer reader ()
writeFile p w = liftF (WriteFile p w ())
readFile :: path -> FSOperation path writer reader reader
readFile p = liftF (ReadFile p id)
deleteFile :: path -> FSOperation path writer reader ()
deleteFile p = liftF (DeleteFile p ())
createDirectory :: path -> FSOperation path writer reader ()
createDirectory p = liftF (CreateDirectory p ())
readDirectory :: path -> FSOperation path writer reader ([path],[path])
readDirectory p = liftF (ReadDirectory p id)
deleteDirectory :: path -> FSOperation path writer reader ()
deleteDirectory p = liftF (DeleteDirectory p ())
move :: path -> path -> FSOperation path writer reader ()
move p q = liftF (Move p q ())
copy :: path -> path -> FSOperation path writer reader ()
copy p q = liftF (Copy p q ())
-- testProg = do
-- createDirectory "/Users/frank/tmp/dirtest/foo/bar"
-- | The usual implementation of the operations
interpretIO :: (IOData ra, IOData wa, MonadResource rm, MonadResource wm)
=> FSOperation FP.FilePath (Source wm wa) (ConduitM i ra rm ()) b
-> wm b
interpretIO (Pure b) = pure b
interpretIO (Free op) = case op of
WriteFile p w next -> do
liftIO . createTree $ FP.dirname p
w $$ sinkFile p
interpretIO next
ReadFile p next -> interpretIO $ next (sourceFile p)
DeleteFile p next -> liftIO (removeFile p) >> interpretIO next
CreateDirectory p next -> liftIO (createTree p) >> interpretIO next
ReadDirectory p next -> do
entries <- liftIO $ listDirectory p
t <- separateFilesAndDirs p entries
interpretIO $ next t
DeleteDirectory p next -> liftIO (removeTree p) >> interpretIO next
Move p q next -> liftIO (rename p q) >> interpretIO next
Copy p q next -> liftIO (copyFile p q) >> interpretIO next
-- | We never remove stuff, instead we create new versions of all files in the
-- indicated data directory.
interpretPersistentIO :: ( IOData ra, IOData wa
, MonadResource rm, MonadResource wm)
=> (FP.FilePath -> wm FP.FilePath)
-- ^ Function that computes a persistent
-- location where we keep the actual file
-- content
-> FSOperation FP.FilePath (Source wm wa)
(ConduitM i ra rm ()) b
-> wm b
interpretPersistentIO _ (Pure b) = pure b
interpretPersistentIO genPath (Free op) = case op of
WriteFile p w next -> do
contentPath <- genPath p
-- make sure the directories exist
liftIO . createTree $ FP.dirname contentPath
w $$ sinkFile contentPath
-- make sure the dir containing the link exists
-- and create the link
liftIO $ do createTree $ FP.dirname p
createSymbolicLink contentPath p
interpretPersistentIO genPath next
ReadFile p next -> interpretPersistentIO genPath $ next (sourceFile p)
DeleteFile p next -> do
liftIO $ removeLink p
interpretPersistentIO genPath next
CreateDirectory p next -> do
contentPath <- genPath p
-- create the directory as well as the
-- content directory
liftIO $ do createTree $ FP.dirname contentPath
createTree p
interpretPersistentIO genPath next
ReadDirectory p next -> do
entries <- liftIO $ listDirectory p
t <- separateFilesAndDirs p entries
interpretPersistentIO genPath $ next t
DeleteDirectory p next -> do
q <- genPath p
liftIO (rename p q)
interpretPersistentIO genPath next
-- instead of deleting the tree, we move it to the backup
-- location.
Move p q next -> do
contentPathQ <- genPath q
liftIO $ do
-- TODO: This assumes p points to a file!
-- think what to do when p is a dir.
contentPathP <- readSymbolicLink p
createSymbolicLink contentPathP contentPathQ
rename p q
interpretPersistentIO genPath next
Copy p q next -> do
contentPathQ <- genPath q
liftIO $ do
b <- isDirectory p
if b then copyTree p q
else do
contentPathP <- readSymbolicLink p
createSymbolicLink contentPathP contentPathQ
createSymbolicLink contentPathQ q
interpretPersistentIO genPath next
where
copyTree p q = error "copyTree not implemented yet"
-- incremental :: MonadIO m => FP.FilePath -- ^ contenPath dir
-- -> FP.FilePath -- ^ baseDir virtual tree
-- -> FP.FilePath -- ^ Full path to the file for which we want to
-- -- know the content path
-- -> m FP.FilePath
-- incremental contentPath baseDir fp = let Just fp' = stripPrefix baseDir fp
-- in do
-- do
-- | Returns a pair (List of Files, List of Directories)
separateFilesAndDirs :: (MonadIO m, Applicative m)
=> FP.FilePath -> [FP.FilePath]
-> m ([FP.FilePath],[FP.FilePath])
separateFilesAndDirs base entries = foldr collect ([],[]) <$> liftIO (mapM f entries)
where
f n = (\b -> if b then Right n else Left n) <$> isDirectory (base FP.</> n)
collect (Left n) (ls,rs) = (n:ls,rs)
collect (Right n) (ls,rs) = (ls,n:rs)
--------------------------------------------------------------------------------
createSymbolicLink :: FP.FilePath -> FP.FilePath -> IO ()
createSymbolicLink p q = Posix.createSymbolicLink (FP.encodeString p) (FP.encodeString q)
removeLink :: FP.FilePath -> IO ()
removeLink = Posix.removeLink . FP.encodeString
readSymbolicLink :: FP.FilePath -> IO FP.FilePath
readSymbolicLink = fmap FP.decodeString . Posix.readSymbolicLink . FP.encodeString
|
noinia/virtual-persistent-fs
|
src/System/FileSystemOperations.hs
|
bsd-3-clause
| 9,236
| 0
| 17
| 4,138
| 1,872
| 931
| 941
| 121
| 9
|
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.GL.SUN.Vertex
-- Copyright : (c) Sven Panne 2019
-- License : BSD3
--
-- Maintainer : Sven Panne <svenpanne@gmail.com>
-- Stability : stable
-- Portability : portable
--
--------------------------------------------------------------------------------
module Graphics.GL.SUN.Vertex (
-- * Extension Support
glGetSUNVertex,
gl_SUN_vertex,
-- * Functions
glColor3fVertex3fSUN,
glColor3fVertex3fvSUN,
glColor4fNormal3fVertex3fSUN,
glColor4fNormal3fVertex3fvSUN,
glColor4ubVertex2fSUN,
glColor4ubVertex2fvSUN,
glColor4ubVertex3fSUN,
glColor4ubVertex3fvSUN,
glNormal3fVertex3fSUN,
glNormal3fVertex3fvSUN,
glReplacementCodeuiColor3fVertex3fSUN,
glReplacementCodeuiColor3fVertex3fvSUN,
glReplacementCodeuiColor4fNormal3fVertex3fSUN,
glReplacementCodeuiColor4fNormal3fVertex3fvSUN,
glReplacementCodeuiColor4ubVertex3fSUN,
glReplacementCodeuiColor4ubVertex3fvSUN,
glReplacementCodeuiNormal3fVertex3fSUN,
glReplacementCodeuiNormal3fVertex3fvSUN,
glReplacementCodeuiTexCoord2fColor4fNormal3fVertex3fSUN,
glReplacementCodeuiTexCoord2fColor4fNormal3fVertex3fvSUN,
glReplacementCodeuiTexCoord2fNormal3fVertex3fSUN,
glReplacementCodeuiTexCoord2fNormal3fVertex3fvSUN,
glReplacementCodeuiTexCoord2fVertex3fSUN,
glReplacementCodeuiTexCoord2fVertex3fvSUN,
glReplacementCodeuiVertex3fSUN,
glReplacementCodeuiVertex3fvSUN,
glTexCoord2fColor3fVertex3fSUN,
glTexCoord2fColor3fVertex3fvSUN,
glTexCoord2fColor4fNormal3fVertex3fSUN,
glTexCoord2fColor4fNormal3fVertex3fvSUN,
glTexCoord2fColor4ubVertex3fSUN,
glTexCoord2fColor4ubVertex3fvSUN,
glTexCoord2fNormal3fVertex3fSUN,
glTexCoord2fNormal3fVertex3fvSUN,
glTexCoord2fVertex3fSUN,
glTexCoord2fVertex3fvSUN,
glTexCoord4fColor4fNormal3fVertex4fSUN,
glTexCoord4fColor4fNormal3fVertex4fvSUN,
glTexCoord4fVertex4fSUN,
glTexCoord4fVertex4fvSUN
) where
import Graphics.GL.ExtensionPredicates
import Graphics.GL.Functions
|
haskell-opengl/OpenGLRaw
|
src/Graphics/GL/SUN/Vertex.hs
|
bsd-3-clause
| 2,056
| 0
| 4
| 203
| 161
| 112
| 49
| 45
| 0
|
{-# LANGUAGE RecordWildCards, OverloadedStrings #-}
module OS.Win.WinRules
( copyWinTargetExtras
, pkgrootConfFixup
, winGhcInstall
, winRules
)
where
import qualified Data.ByteString as B
import qualified Data.Text as T
import qualified Data.Text.Encoding as E
import Development.Shake
import Development.Shake.FilePath ( (</>), takeDirectory )
import Dirs
import OS.Internal
import OS.Win.WinNsis
import OS.Win.WinPaths
import OS.Win.WinUtils
import Paths
import Types
import Utils
winRules :: Rules ()
winRules = do
genNsisData
genNsisFile
copyInstExtras
winGhcInstall :: FilePath -> GhcInstallAction
winGhcInstall destDir bc distDir = do
let untarDir = takeDirectory distDir
-- (will this cause some race conditions, removing vs populating?)
command_ [] "mv" [untarDir </> show (bcGhcVersion bc), destDir ]
-- Install the GLUT components into destDir:
-- lib, dll, ...
let winGlutLibSrc = winExternalGlutLibDir bc </> winGlutLibFile
winGlutDllSrc = winExternalGlutLibDir bc </> winGlutDllFile
winGlutLibInstallDir = destDir </> winGlutLibDir
winGlutDllInstallDir = destDir </> winGlutDllDir
command_ [] "cp" [ "-p", winGlutLibSrc, winGlutLibInstallDir ]
command_ [] "cp" [ "-p", winGlutDllSrc, winGlutDllInstallDir ]
-- ... and the include files
let setup = makeDirectory winGlutIncludeInstallDir
winGlutIncludeInstallDir = destDir </> winGlutIncludeDir
winGlutIncSrcs <- getDirectoryContents winExternalGlutIncDir
copyFilesAction setup winExternalGlutIncDir winGlutIncludeInstallDir
winGlutIncSrcs
needContents winGlutIncludeInstallDir
return destDir
copyWinTargetExtras :: BuildConfig -> Action ()
copyWinTargetExtras bc = do
-- copy icons
let mkIconsDir = makeDirectory $ winTargetDir </> "icons"
copyFilesAction mkIconsDir winExtrasSrc winTargetDir winIconsFiles
-- copy user's guide docs: ps, pdf, html, etc....
copyDirAction winExternalDocs winDocTargetDir
-- copy winghci pieces
copyDirAction winExternalWinGhciDir winWinGhciTargetDir
-- copy msys(msys2) pieces
copyDirAction (winExternalMSysDir bc) winMSysTargetDir
-- | These files are needed when building the installer
copyInstExtras :: Rules ()
copyInstExtras = do
let setup = makeDirectory $ installerPartsDir </> "icons"
copyFilesRules setup winExtrasSrc installerPartsDir winInstExtrasFiles
pkgrootConfFixup :: OS -> FilePath -> Action ()
pkgrootConfFixup os confFile = do
putLoud $ "pkgrootConfFixup: " ++ show confFile
let OS{..} = os
confStr <- liftIO . B.readFile $ confFile
-- Fix up the pkgroot.
-- Using brute force, replace the first part of the
-- paths in the conf file with ${pkgroot} so that the conf
-- is portable. n.b., not all packages have conf files.
-- Ref: https://ghc.haskell.org/trac/ghc/ticket/3268
-- implement the Cabal ${pkgroot} spec extension
let pkgRoot = T.pack $ toCabalPrefix "lib"
(return . T.replace pkgRoot "${pkgroot}" . E.decodeUtf8) confStr
>>= liftIO . B.writeFile confFile . E.encodeUtf8
-- | Take a list of file names, a source dir where they are, a dest dir
-- for where to copy them, a list of additional needs (possibly empty),
-- and set up the needs and rules.
copyFilesRules :: Action () -> FilePath -> FilePath -> [FilePath] -> Rules ()
copyFilesRules setup srcDir dstDir =
mapM_ (\f -> dstDir </> f %> \_ -> copyFileAction setup srcDir dstDir f)
copyFileAction :: Action () -> FilePath -> FilePath -> FilePath -> Action ()
copyFileAction setup srcDir dstDir file = do
need [srcDir </> file]
setup
command_ [] "cp" ["-p", srcDir </> file, dstDir </> file]
copyFilesAction :: Action () -> FilePath -> FilePath -> [FilePath] -> Action ()
copyFilesAction setup srcDir dstDir files = do
setup
mapM_ (copyFileAction (return ()) srcDir dstDir) files
copyDirAction :: FilePath -> FilePath -> Action ()
copyDirAction srcDir dstDir = do
needContents srcDir
makeDirectory dstDir
-- Two problems: seems that (</>) strips the "." out, so use (++);
-- second problem is that using an "*" in the path results in an error,
-- so "/." works better.
command_ [] "cp" ["-pR", srcDir ++ "/.", dstDir]
needContents dstDir
|
ardumont/haskell-platform
|
hptool/src/OS/Win/WinRules.hs
|
bsd-3-clause
| 4,379
| 0
| 15
| 917
| 935
| 477
| 458
| 78
| 1
|
module Network.IRC.ByteString.Config where
import Data.ByteString.Char8
import Data.Attoparsec.Char8 as Char8
data IRCParserConfig = IRCParserConfig
{ nickParser :: Parser ByteString
, userParser :: Parser ByteString
, hostParser :: Parser ByteString
, paramParser :: Parser ByteString
, isIRCSpace :: Char -> Bool
}
|
kallisti-dev/irc-bytestring
|
src/Network/IRC/ByteString/Config.hs
|
bsd-3-clause
| 344
| 0
| 9
| 67
| 77
| 47
| 30
| 9
| 0
|
{-# LANGUAGE TypeSynonymInstances, FlexibleInstances #-}
module Game (
GameState,
startNewGame,
continueGame,
) where
import Character
import Control.Monad.State
import Data.Char (toLower)
import qualified Distance as Dst
import qualified Speed as Spd
import qualified Time as Tm
import System.Console.ANSI
import System.IO
processInputWith :: (String -> IO b) -> IO b
processInputWith processor = do
input <- getLine
processor $ map toLower input
data GameState = GameState {
player :: Player,
currentTime :: Tm.Time Float
}
startNewGame :: IO ()
startNewGame = do
clearScreen
putStrLn "Almost three weeks have passed since The Crash.\nLorem ipsum, here comes the back-story, which I'm rather bad at telling."
getLine
clearScreen
putStrLn "You wake up on the floor of what seams like a hut with a throbbing headache. Looking at the ceiling you recognize your home instantly from the familiar ornaments."
putStrLn "You're still dizzy while you try to remember how you got on the floor. You close your eyes and concentrate. You see five man. Two of them are standing next to you. " -- continue story here
putStrLn "she's my [wife]"
putStrLn "he's my [husband]"
gender <- processInputWith selectGender
putStrLn $ "You're a " ++ (if gender == Male then "tribesman" else "tribeswoman") ++ "."
where selectGender input = case input of
"wife" -> return Male
"husband" -> return Female
_ -> do
putStrLn "Invalid choice."
processInputWith selectGender
continueGame :: Handle -> IO ()
continueGame file = return ()
travel dist = do
gameState <- get
let playerChar = player gameState
put $ GameState playerChar ((currentTime gameState) `Tm.offset` (Spd.getTime dist (travelSpeed playerChar)))
|
siliconbrain/khaland
|
src/Game.hs
|
bsd-3-clause
| 1,894
| 0
| 14
| 467
| 384
| 196
| 188
| 44
| 4
|
module Control.Concurrent.STM.TVar.Class where
import Control.Concurrent.STM (STM)
import qualified Control.Concurrent.STM.TVar as TVar
class TVarWrite var where
writeTVar :: var a -> a -> STM ()
instance TVarWrite TVar.TVar where
writeTVar = TVar.writeTVar
{-# INLINE writeTVar #-}
class TVarRead var where
readTVar :: var a -> STM a
instance TVarRead TVar.TVar where
readTVar = TVar.readTVar
{-# INLINE readTVar #-}
|
osa1/privileged-concurrency
|
Control/Concurrent/STM/TVar/Class.hs
|
bsd-3-clause
| 447
| 0
| 10
| 86
| 120
| 67
| 53
| 13
| 0
|
module Data.Selection
( Selection
, includes
, selectBy
, empty
, universal
, select
, union
, intersection
, difference
, allExcept
) where
--------------------------------------------------------------------------------
-- * Data structure for describing selections of values.
--------------------------------------------------------------------------------
-- | Selection is a description of a set of values.
data Selection a = Selection (a -> Bool)
instance Semigroup (Selection a)
where
(<>) = mappend
instance Monoid (Selection a)
where
mempty = empty
mappend = union
-- | Check whether a value is included in a selection.
includes :: Selection a -> a -> Bool
includes (Selection p) = p
-- | Select the values that fulfill a predicate.
selectBy :: (a -> Bool) -> Selection a
selectBy = Selection
-- | Empty selection.
empty :: Selection a
empty = Selection $ \_ -> False
-- | Select all values.
universal :: Selection a
universal = Selection $ \_ -> True
-- | Union of selections.
union :: Selection a -> Selection a -> Selection a
union s t = Selection $ \a -> includes s a || includes t a
-- | Intersection of selections.
intersection :: Selection a -> Selection a -> Selection a
intersection s t = Selection $ \a -> includes s a && includes t a
-- | Difference of selections.
difference :: Selection a -> Selection a -> Selection a
difference s t = Selection $ \a -> includes s a && not (includes t a)
-- | Create a classification from a list of elements.
select :: Eq a => [a] -> Selection a
select as = selectBy (`elem` as)
-- | Select all values except those in the given list.
allExcept :: Eq a => [a] -> Selection a
allExcept = difference universal . select
--------------------------------------------------------------------------------
|
markus-git/co-feldspar
|
src/Data/Selection.hs
|
bsd-3-clause
| 1,806
| 0
| 10
| 344
| 456
| 245
| 211
| 35
| 1
|
{-
-- putStrLn $ show $ gr
let str = graphviz gr "test" (1.0,1.0) (1,1) Portrait
writeFile "test.dot" str -}
mai1 = do
putStrLn "welcome to dev-admin"
gdescs <- mapM (readPackageDescription normal . getCabalFileName) projects
let deps = map (combo getPkgName getDependency) gdescs
motherlist = map (combo fst (filter (nameMatch projects). snd)) deps
mapM_ (putStrLn . show ) motherlist
putStrLn "daughter map"
let daughterlist = M.toList ( convertMotherMapToDaughterMap motherlist )
-- mapM_ (putStrLn . show ) )
putStrLn "-----------------------"
writeFile "test.dot" $ dotGraph daughterlist
mai2 = do
args <- getArgs
putStrLn "welcome to dev-admin"
gdescs <- mapM (readPackageDescription normal . getCabalFileName) projects
let deps = map (combo getPkgName getDependency) gdescs
motherlist = map (combo fst (filter (nameMatch projects). snd)) deps
mapM_ (putStrLn . show ) motherlist
putStrLn "daughter map"
let dmap = convertMotherMapToDaughterMap motherlist
mapM_ cabalInstallJob $ fromJust . M.lookup (args !! 0) $ dmap
-- let daughterlist = M.toList ( convertMotherMapToDaughterMap motherlist )
-- mapM_ (putStrLn . show ) )
-- putStrLn "-----------------------"
-- writeFile "test.dot" $ dotGraph daughterlist
|
wavewave/devadmin
|
lib/trash/trash.hs
|
bsd-3-clause
| 1,321
| 0
| 17
| 273
| 315
| 146
| 169
| 20
| 1
|
{-# LANGUAGE OverloadedStrings #-}
module Main ( main ) where
import Test.Framework as T
import qualified Tests.Dalvik.SSA as TSSA
import qualified Tests.Dalvik.Labels as LSSA
import qualified Tests.Dalvik.NameDecoding as ND
main :: IO ()
main = do
T.defaultMain $ [
ND.tests
, TSSA.tests
, LSSA.tests
]
|
travitch/dalvik
|
tests/ssa/SSALabels.hs
|
bsd-3-clause
| 326
| 0
| 9
| 67
| 83
| 54
| 29
| 12
| 1
|
{-# LANGUAGE CPP #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE Rank2Types #-}
--------------------------------------------------------------------------------
-- |
-- Module : FRP.Yampa.Switches
-- Copyright : (c) Antony Courtney and Henrik Nilsson, Yale University, 2003
-- License : BSD-style (see the LICENSE file in the distribution)
--
-- Maintainer : ivan.perez@keera.co.uk
-- Stability : provisional
-- Portability : non-portable (GHC extensions)
--
-- Switches allow you to change the signal function being applied.
--
-- The basic idea of switching is fromed by combining a subordinate signal
-- function and a signal function continuation parameterised over some initial
-- data.
--
-- For example, the most basic switch has the following signature:
--
-- @switch :: SF a (b, Event c) -> (c -> SF a b) -> SF a b@
--
-- which indicates that it has two parameters: a signal function
-- that produces an output and indicates, with an event, when it is time to
-- switch, and a signal function that starts with the residual data left by the
-- first SF in the event and continues onwards.
--
-- Switching occurs, at most, once. If you want something to switch repeatedly,
-- in general, you need to loop, or to switch onto the same signal function
-- again. However, some switches, explained below, are immediate (meaning that
-- the second SF is started at the time of switching). If you use the same SF
-- that originally provoked the switch, you are very likely to fall into an
-- infinite loop. In those cases, the use of 'dSwitch' or '-->' may help.
--
-- Switches vary depending on a number of criterions:
--
-- - /Decoupled/ vs normal switching /(d)/: when an SF is being applied and a
-- different SF needs to be applied next, one question is which one is used
-- for the time in which the switching takes place. In decoupled switching, the
-- old SF is used for the time of switching, and the one SF is only used after
-- that. In normal or instantaneous or coupled switching, the old SF is
-- discarded immediately and a new SF is used for the output already from that
-- point in time.
--
-- - How the switching event is provided /( \/r\/k)/: normally, an 'Event' is
-- used to indicate that a switching must take place. This event can be part of
-- the argument SF (e.g., 'switch'), it can be part of the input (e.g.,
-- 'rSwitch'), or it can be determined by a second argument SF (e.g,
-- 'kSwitch').
--
-- - How many SFs are being handled /( \/p\/par)/: some combinators deal with
-- only one SF, others handle collections, either in the form of a
--'Functor' or a list ('[]').
--
-- - How the input is router /(B\/Z\/ )/: when multiple SFs are being combined,
-- a decision needs to be made about how the input is passed to the internal
-- SFs. In some cases, broadcasting is used to pass the same input to all
-- internal SFs. In others, the input is itself a collection, and each element
-- is passed to one internal SF (i.e., /zipping/). In others, an auxiliary
-- function is used to decide how to route specific inputs to specific SFs in
-- the collection.
--
-- These gives a number of different combinations, some of which make no sense,
-- and also helps determine the expected behaviour of a combinator by looking
-- at its name. For example, 'drpSwitchB' is the decoupled (/d/), recurrent
-- (/r/), parallel (/p/) switch with broadcasting (/B/).
module FRP.Yampa.Switches (
-- * Basic switching
switch, dSwitch, -- :: SF a (b, Event c) -> (c -> SF a b) -> SF a b
rSwitch, drSwitch, -- :: SF a b -> SF (a,Event (SF a b)) b
kSwitch, dkSwitch, -- :: SF a b
-- -> SF (a,b) (Event c)
-- -> (SF a b -> c -> SF a b)
-- -> SF a b
-- * Parallel composition\/switching (collections)
-- ** With broadcasting
parB, -- :: Functor col => col (SF a b) -> SF a (col b)
pSwitchB,dpSwitchB, -- :: Functor col =>
-- col (SF a b)
-- -> SF (a, col b) (Event c)
-- -> (col (SF a b) -> c -> SF a (col b))
-- -> SF a (col b)
rpSwitchB,drpSwitchB,-- :: Functor col =>
-- col (SF a b)
-- -> SF (a, Event (col (SF a b)->col (SF a b)))
-- (col b)
-- ** With helper routing function
par, -- Functor col =>
-- (forall sf . (a -> col sf -> col (b, sf)))
-- -> col (SF b c)
-- -> SF a (col c)
pSwitch, dpSwitch, -- pSwitch :: Functor col =>
-- (forall sf . (a -> col sf -> col (b, sf)))
-- -> col (SF b c)
-- -> SF (a, col c) (Event d)
-- -> (col (SF b c) -> d -> SF a (col c))
-- -> SF a (col c)
rpSwitch,drpSwitch, -- Functor col =>
-- (forall sf . (a -> col sf -> col (b, sf)))
-- -> col (SF b c)
-- -> SF (a, Event (col (SF b c) -> col (SF b c)))
-- (col c)
--
-- * Parallel composition\/switching (lists)
--
-- ** With "zip" routing
parZ, -- [SF a b] -> SF [a] [b]
pSwitchZ, -- [SF a b] -> SF ([a],[b]) (Event c)
-- -> ([SF a b] -> c -> SF [a] [b]) -> SF [a] [b]
dpSwitchZ, -- [SF a b] -> SF ([a],[b]) (Event c)
-- -> ([SF a b] -> c ->SF [a] [b]) -> SF [a] [b]
rpSwitchZ, -- [SF a b] -> SF ([a], Event ([SF a b]->[SF a b])) [b]
drpSwitchZ, -- [SF a b] -> SF ([a], Event ([SF a b]->[SF a b])) [b]
-- ** With replication
parC, -- SF a b -> SF [a] [b]
) where
import Control.Arrow
import FRP.Yampa.Diagnostics
import FRP.Yampa.InternalCore (DTime, FunDesc (..), SF (..), SF' (..), fdFun,
sfArrG, sfConst, sfTF')
import FRP.Yampa.Basic
import FRP.Yampa.Event
------------------------------------------------------------------------------
-- Basic switches
------------------------------------------------------------------------------
-- !!! Interesting case. It seems we need scoped type variables
-- !!! to be able to write down the local type signatures.
-- !!! On the other hand, the scoped type variables seem to
-- !!! prohibit the kind of unification that is needed for GADTs???
-- !!! Maybe this could be made to wok if it actually WAS known
-- !!! that scoped type variables indeed corresponds to universally
-- !!! quantified variables? Or if one were to keep track of those
-- !!! scoped type variables that actually do?
-- !!!
-- !!! Find a simpler case to experiment further. For now, elim.
-- !!! the free variable.
{-
-- Basic switch.
switch :: SF a (b, Event c) -> (c -> SF a b) -> SF a b
switch (SF {sfTF = tf10} :: SF a (b, Event c))
(k :: c -> SF a b) = SF {sfTF = tf0}
where
tf0 a0 =
case tf10 a0 of
(sf1, (b0, NoEvent)) -> (switchAux sf1, b0)
(_, (_, Event c0)) -> sfTF (k c0) a0
-- It would be nice to optimize further here. E.g. if it would be
-- possible to observe the event source only.
switchAux :: SF' a (b, Event c) -> SF' a b
switchAux (SFId _) = switchAuxA1 id -- New
switchAux (SFConst _ (b, NoEvent)) = sfConst b
switchAux (SFArr _ f1) = switchAuxA1 f1
switchAux sf1 = SF' tf
where
tf dt a =
case (sfTF' sf1) dt a of
(sf1', (b, NoEvent)) -> (switchAux sf1', b)
(_, (_, Event c)) -> sfTF (k c) a
-- Could be optimized a little bit further by having a case for
-- identity, switchAuxI1
-- Note: While switch behaves as a stateless arrow at this point, that
-- could change after a switch. Hence, SF' overall.
switchAuxA1 :: (a -> (b, Event c)) -> SF' a b
switchAuxA1 f1 = sf
where
sf = SF' tf
tf _ a =
case f1 a of
(b, NoEvent) -> (sf, b)
(_, Event c) -> sfTF (k c) a
-}
-- | Basic switch.
--
-- By default, the first signal function is applied. Whenever the second value
-- in the pair actually is an event, the value carried by the event is used to
-- obtain a new signal function to be applied *at that time and at future
-- times*. Until that happens, the first value in the pair is produced in the
-- output signal.
--
-- Important note: at the time of switching, the second signal function is
-- applied immediately. If that second SF can also switch at time zero, then a
-- double (nested) switch might take place. If the second SF refers to the
-- first one, the switch might take place infinitely many times and never be
-- resolved.
--
-- Remember: The continuation is evaluated strictly at the time
-- of switching!
switch :: SF a (b, Event c) -> (c -> SF a b) -> SF a b
switch (SF {sfTF = tf10}) k = SF {sfTF = tf0}
where
tf0 a0 =
case tf10 a0 of
(sf1, (b0, NoEvent)) -> (switchAux sf1 k, b0)
(_, (_, Event c0)) -> sfTF (k c0) a0
-- It would be nice to optimize further here. E.g. if it would be
-- possible to observe the event source only.
switchAux :: SF' a (b, Event c) -> (c -> SF a b) -> SF' a b
switchAux (SFArr _ (FDC (b, NoEvent))) _ = sfConst b
switchAux (SFArr _ fd1) k = switchAuxA1 (fdFun fd1) k
switchAux sf1 k = SF' tf
{-
if sfIsInv sf1 then
switchInv sf1 k
else
SF' tf False
-}
where
tf dt a =
case (sfTF' sf1) dt a of
(sf1', (b, NoEvent)) -> (switchAux sf1' k, b)
(_, (_, Event c)) -> sfTF (k c) a
{-
-- Note: subordinate signal function being invariant does NOT
-- imply that the overall signal function is.
switchInv :: SF' a (b, Event c) -> (c -> SF a b) -> SF' a b
switchInv sf1 k = SF' tf False
where
tf dt a =
case (sfTF' sf1) dt a of
(sf1', (b, NoEvent)) -> (switchInv sf1' k, b)
(_, (_, Event c)) -> sfTF (k c) a
-}
-- !!! Could be optimized a little bit further by having a case for
-- !!! identity, switchAuxI1. But I'd expect identity is so unlikely
-- !!! that there is no point.
-- Note: While switch behaves as a stateless arrow at this point, that
-- could change after a switch. Hence, SF' overall.
switchAuxA1 :: (a -> (b, Event c)) -> (c -> SF a b) -> SF' a b
switchAuxA1 f1 k = sf
where
sf = SF' tf -- False
tf _ a =
case f1 a of
(b, NoEvent) -> (sf, b)
(_, Event c) -> sfTF (k c) a
-- | Switch with delayed observation.
--
-- By default, the first signal function is applied.
--
-- Whenever the second value in the pair actually is an event,
-- the value carried by the event is used to obtain a new signal
-- function to be applied *at future times*.
--
-- Until that happens, the first value in the pair is produced
-- in the output signal.
--
-- Important note: at the time of switching, the second
-- signal function is used immediately, but the current
-- input is fed by it (even though the actual output signal
-- value at time 0 is discarded).
--
-- If that second SF can also switch at time zero, then a
-- double (nested) -- switch might take place. If the second SF refers to the
-- first one, the switch might take place infinitely many times and never be
-- resolved.
--
-- Remember: The continuation is evaluated strictly at the time
-- of switching!
-- Alternative name: "decoupled switch"?
-- (The SFId optimization is highly unlikley to be of much use, but it
-- does raise an interesting typing issue.)
dSwitch :: SF a (b, Event c) -> (c -> SF a b) -> SF a b
dSwitch (SF {sfTF = tf10}) k = SF {sfTF = tf0}
where
tf0 a0 =
let (sf1, (b0, ec0)) = tf10 a0
in (case ec0 of
NoEvent -> dSwitchAux sf1 k
Event c0 -> fst (sfTF (k c0) a0),
b0)
-- It would be nice to optimize further here. E.g. if it would be
-- possible to observe the event source only.
dSwitchAux :: SF' a (b, Event c) -> (c -> SF a b) -> SF' a b
dSwitchAux (SFArr _ (FDC (b, NoEvent))) _ = sfConst b
dSwitchAux (SFArr _ fd1) k = dSwitchAuxA1 (fdFun fd1) k
dSwitchAux sf1 k = SF' tf
{-
if sfIsInv sf1 then
dSwitchInv sf1 k
else
SF' tf False
-}
where
tf dt a =
let (sf1', (b, ec)) = (sfTF' sf1) dt a
in (case ec of
NoEvent -> dSwitchAux sf1' k
Event c -> fst (sfTF (k c) a),
b)
{-
-- Note: that the subordinate signal function is invariant does NOT
-- imply that the overall signal function is.
dSwitchInv :: SF' a (b, Event c) -> (c -> SF a b) -> SF' a b
dSwitchInv sf1 k = SF' tf False
where
tf dt a =
let (sf1', (b, ec)) = (sfTF' sf1) dt a
in (case ec of
NoEvent -> dSwitchInv sf1' k
Event c -> fst (sfTF (k c) a),
b)
-}
-- !!! Could be optimized a little bit further by having a case for
-- !!! identity, switchAuxI1
-- Note: While dSwitch behaves as a stateless arrow at this point, that
-- could change after a switch. Hence, SF' overall.
dSwitchAuxA1 :: (a -> (b, Event c)) -> (c -> SF a b) -> SF' a b
dSwitchAuxA1 f1 k = sf
where
sf = SF' tf -- False
tf _ a =
let (b, ec) = f1 a
in (case ec of
NoEvent -> sf
Event c -> fst (sfTF (k c) a),
b)
-- | Recurring switch.
--
-- Uses the given SF until an event comes in the input, in which case the SF in
-- the event is turned on, until the next event comes in the input, and so on.
--
-- See <https://wiki.haskell.org/Yampa#Switches> for more
-- information on how this switch works.
-- !!! Suboptimal. Overall, the constructor is invarying since rSwitch is
-- !!! being invoked recursively on a switch. In fact, we don't even care
-- !!! whether the subordinate signal function is invarying or not.
-- !!! We could make use of a signal function transformer sfInv to
-- !!! mark the constructor as invarying. Would that make sense?
-- !!! The price would be an extra loop with case analysis.
-- !!! The potential gain is fewer case analyses in superior loops.
rSwitch :: SF a b -> SF (a, Event (SF a b)) b
rSwitch sf = switch (first sf) ((noEventSnd >=-) . rSwitch)
{-
-- Old version. New is more efficient. Which one is clearer?
rSwitch :: SF a b -> SF (a, Event (SF a b)) b
rSwitch sf = switch (first sf) rSwitch'
where
rSwitch' sf = switch (sf *** notYet) rSwitch'
-}
-- | Recurring switch with delayed observation.
--
-- Uses the given SF until an event comes in the input, in which case the SF in
-- the event is turned on, until the next event comes in the input, and so on.
--
-- Uses decoupled switch ('dSwitch').
--
-- See <https://wiki.haskell.org/Yampa#Switches> for more
-- information on how this switch works.
drSwitch :: SF a b -> SF (a, Event (SF a b)) b
drSwitch sf = dSwitch (first sf) ((noEventSnd >=-) . drSwitch)
{-
-- Old version. New is more efficient. Which one is clearer?
drSwitch :: SF a b -> SF (a, Event (SF a b)) b
drSwitch sf = dSwitch (first sf) drSwitch'
where
drSwitch' sf = dSwitch (sf *** notYet) drSwitch'
-}
-- | Call-with-current-continuation switch.
--
-- Applies the first SF until the input signal and the output signal, when
-- passed to the second SF, produce an event, in which case the original SF and
-- the event are used to build an new SF to switch into.
--
-- See <https://wiki.haskell.org/Yampa#Switches> for more
-- information on how this switch works.
-- !!! Has not been optimized properly.
-- !!! Nor has opts been tested!
-- !!! Don't forget Inv opts!
kSwitch :: SF a b -> SF (a,b) (Event c) -> (SF a b -> c -> SF a b) -> SF a b
kSwitch sf10@(SF {sfTF = tf10}) (SF {sfTF = tfe0}) k = SF {sfTF = tf0}
where
tf0 a0 =
let (sf1, b0) = tf10 a0
in
case tfe0 (a0, b0) of
(sfe, NoEvent) -> (kSwitchAux sf1 sfe, b0)
(_, Event c0) -> sfTF (k sf10 c0) a0
-- Same problem as above: must pass k explicitly???
-- kSwitchAux (SFId _) sfe = kSwitchAuxI1 sfe
kSwitchAux (SFArr _ (FDC b)) sfe = kSwitchAuxC1 b sfe
kSwitchAux (SFArr _ fd1) sfe = kSwitchAuxA1 (fdFun fd1) sfe
-- kSwitchAux (SFArrE _ f1) sfe = kSwitchAuxA1 f1 sfe
-- kSwitchAux (SFArrEE _ f1) sfe = kSwitchAuxA1 f1 sfe
kSwitchAux sf1 (SFArr _ (FDC NoEvent)) = sf1
kSwitchAux sf1 (SFArr _ fde) = kSwitchAuxAE sf1 (fdFun fde)
-- kSwitchAux sf1 (SFArrE _ fe) = kSwitchAuxAE sf1 fe
-- kSwitchAux sf1 (SFArrEE _ fe) = kSwitchAuxAE sf1 fe
kSwitchAux sf1 sfe = SF' tf -- False
where
tf dt a =
let (sf1', b) = (sfTF' sf1) dt a
in
case (sfTF' sfe) dt (a, b) of
(sfe', NoEvent) -> (kSwitchAux sf1' sfe', b)
(_, Event c) -> sfTF (k (freeze sf1 dt) c) a
{-
-- !!! Untested optimization!
kSwitchAuxI1 (SFConst _ NoEvent) = sfId
kSwitchAuxI1 (SFArr _ fe) = kSwitchAuxI1AE fe
kSwitchAuxI1 sfe = SF' tf
where
tf dt a =
case (sfTF' sfe) dt (a, a) of
(sfe', NoEvent) -> (kSwitchAuxI1 sfe', a)
(_, Event c) -> sfTF (k identity c) a
-}
-- !!! Untested optimization!
kSwitchAuxC1 b (SFArr _ (FDC NoEvent)) = sfConst b
kSwitchAuxC1 b (SFArr _ fde) = kSwitchAuxC1AE b (fdFun fde)
-- kSwitchAuxC1 b (SFArrE _ fe) = kSwitchAuxC1AE b fe
-- kSwitchAuxC1 b (SFArrEE _ fe) = kSwitchAuxC1AE b fe
kSwitchAuxC1 b sfe = SF' tf -- False
where
tf dt a =
case (sfTF' sfe) dt (a, b) of
(sfe', NoEvent) -> (kSwitchAuxC1 b sfe', b)
(_, Event c) -> sfTF (k (constant b) c) a
-- !!! Untested optimization!
kSwitchAuxA1 f1 (SFArr _ (FDC NoEvent)) = sfArrG f1
kSwitchAuxA1 f1 (SFArr _ fde) = kSwitchAuxA1AE f1 (fdFun fde)
-- kSwitchAuxA1 f1 (SFArrE _ fe) = kSwitchAuxA1AE f1 fe
-- kSwitchAuxA1 f1 (SFArrEE _ fe) = kSwitchAuxA1AE f1 fe
kSwitchAuxA1 f1 sfe = SF' tf -- False
where
tf dt a =
let b = f1 a
in
case (sfTF' sfe) dt (a, b) of
(sfe', NoEvent) -> (kSwitchAuxA1 f1 sfe', b)
(_, Event c) -> sfTF (k (arr f1) c) a
-- !!! Untested optimization!
-- kSwitchAuxAE (SFId _) fe = kSwitchAuxI1AE fe
kSwitchAuxAE (SFArr _ (FDC b)) fe = kSwitchAuxC1AE b fe
kSwitchAuxAE (SFArr _ fd1) fe = kSwitchAuxA1AE (fdFun fd1) fe
-- kSwitchAuxAE (SFArrE _ f1) fe = kSwitchAuxA1AE f1 fe
-- kSwitchAuxAE (SFArrEE _ f1) fe = kSwitchAuxA1AE f1 fe
kSwitchAuxAE sf1 fe = SF' tf -- False
where
tf dt a =
let (sf1', b) = (sfTF' sf1) dt a
in
case fe (a, b) of
NoEvent -> (kSwitchAuxAE sf1' fe, b)
Event c -> sfTF (k (freeze sf1 dt) c) a
{-
-- !!! Untested optimization!
kSwitchAuxI1AE fe = SF' tf -- False
where
tf dt a =
case fe (a, a) of
NoEvent -> (kSwitchAuxI1AE fe, a)
Event c -> sfTF (k identity c) a
-}
-- !!! Untested optimization!
kSwitchAuxC1AE b fe = SF' tf -- False
where
tf _ a =
case fe (a, b) of
NoEvent -> (kSwitchAuxC1AE b fe, b)
Event c -> sfTF (k (constant b) c) a
-- !!! Untested optimization!
kSwitchAuxA1AE f1 fe = SF' tf -- False
where
tf _ a =
let b = f1 a
in
case fe (a, b) of
NoEvent -> (kSwitchAuxA1AE f1 fe, b)
Event c -> sfTF (k (arr f1) c) a
-- | 'kSwitch' with delayed observation.
--
-- Applies the first SF until the input signal and the output signal, when
-- passed to the second SF, produce an event, in which case the original SF and
-- the event are used to build an new SF to switch into.
--
-- The switch is decoupled ('dSwitch').
--
-- See <https://wiki.haskell.org/Yampa#Switches> for more
-- information on how this switch works.
-- !!! Has not been optimized properly. Should be like kSwitch.
dkSwitch :: SF a b -> SF (a,b) (Event c) -> (SF a b -> c -> SF a b) -> SF a b
dkSwitch sf10@(SF {sfTF = tf10}) (SF {sfTF = tfe0}) k = SF {sfTF = tf0}
where
tf0 a0 =
let (sf1, b0) = tf10 a0
in (case tfe0 (a0, b0) of
(sfe, NoEvent) -> dkSwitchAux sf1 sfe
(_, Event c0) -> fst (sfTF (k sf10 c0) a0),
b0)
dkSwitchAux sf1 (SFArr _ (FDC NoEvent)) = sf1
dkSwitchAux sf1 sfe = SF' tf -- False
where
tf dt a =
let (sf1', b) = (sfTF' sf1) dt a
in (case (sfTF' sfe) dt (a, b) of
(sfe', NoEvent) -> dkSwitchAux sf1' sfe'
(_, Event c) -> fst (sfTF (k (freeze sf1 dt) c) a),
b)
------------------------------------------------------------------------------
-- Parallel composition and switching over collections with broadcasting
------------------------------------------------------------------------------
-- | Tuple a value up with every element of a collection of signal
-- functions.
broadcast :: Functor col => a -> col sf -> col (a, sf)
broadcast a = fmap (\sf -> (a, sf))
-- !!! Hmm. We should really optimize here.
-- !!! Check for Arr in parallel!
-- !!! Check for Arr FDE in parallel!!!
-- !!! Check for EP in parallel!!!!!
-- !!! Cf &&&.
-- !!! But how??? All we know is that the collection is a functor ...
-- !!! Maybe that kind of generality does not make much sense for
-- !!! par and parB? (Although it is niceto be able to switch into a
-- !!! par or parB from within a pSwitch[B].)
-- !!! If we had a parBList, that could be defined in terms of &&&, surely?
-- !!! E.g.
-- !!! parBList [] = constant []
-- !!! parBList (sf:sfs) = sf &&& parBList sfs >>> arr (\(x,xs) -> x:xs)
-- !!!
-- !!! This ought to optimize quite well. E.g.
-- !!! parBList [arr1,arr2,arr3]
-- !!! = arr1 &&& parBList [arr2,arr3] >>> arrX
-- !!! = arr1 &&& (arr2 &&& parBList [arr3] >>> arrX) >>> arrX
-- !!! = arr1 &&& (arr2 &&& (arr3 &&& parBList [] >>> arrX) >>> arrX) >>> arrX
-- !!! = arr1 &&& (arr2 &&& (arr3C >>> arrX) >>> arrX) >>> arrX
-- !!! = arr1 &&& (arr2 &&& (arr3CcpX) >>> arrX) >>> arrX
-- !!! = arr1 &&& (arr23CcpX >>> arrX) >>> arrX
-- !!! = arr1 &&& (arr23CcpXcpX) >>> arrX
-- !!! = arr123CcpXcpXcpX
-- | Spatial parallel composition of a signal function collection.
-- Given a collection of signal functions, it returns a signal
-- function that broadcasts its input signal to every element
-- of the collection, to return a signal carrying a collection
-- of outputs. See 'par'.
--
-- For more information on how parallel composition works, check
-- <http://haskell.cs.yale.edu/wp-content/uploads/2011/01/yampa-arcade.pdf>
parB :: Functor col => col (SF a b) -> SF a (col b)
parB = par broadcast
-- | Parallel switch (dynamic collection of signal functions spatially composed
-- in parallel) with broadcasting. See 'pSwitch'.
--
-- For more information on how parallel composition works, check
-- <http://haskell.cs.yale.edu/wp-content/uploads/2011/01/yampa-arcade.pdf>
pSwitchB :: Functor col =>
col (SF a b) -> SF (a,col b) (Event c) -> (col (SF a b)->c-> SF a (col b))
-> SF a (col b)
pSwitchB = pSwitch broadcast
-- | Decoupled parallel switch with broadcasting (dynamic collection of
-- signal functions spatially composed in parallel). See 'dpSwitch'.
--
-- For more information on how parallel composition works, check
-- <http://haskell.cs.yale.edu/wp-content/uploads/2011/01/yampa-arcade.pdf>
dpSwitchB :: Functor col =>
col (SF a b) -> SF (a,col b) (Event c) -> (col (SF a b)->c->SF a (col b))
-> SF a (col b)
dpSwitchB = dpSwitch broadcast
-- | Recurring parallel switch with broadcasting.
--
-- Uses the given collection of SFs, until an event comes in the input, in
-- which case the function in the 'Event' is used to transform the collections
-- of SF to be used with 'rpSwitch' again, until the next event comes in the
-- input, and so on.
--
-- Broadcasting is used to decide which subpart of the input goes to each SF in
-- the collection.
--
-- See 'rpSwitch'.
--
-- For more information on how parallel composition works, check
-- <http://haskell.cs.yale.edu/wp-content/uploads/2011/01/yampa-arcade.pdf>
rpSwitchB :: Functor col =>
col (SF a b) -> SF (a, Event (col (SF a b) -> col (SF a b))) (col b)
rpSwitchB = rpSwitch broadcast
-- | Decoupled recurring parallel switch with broadcasting.
--
-- Uses the given collection of SFs, until an event comes in the input, in
-- which case the function in the 'Event' is used to transform the collections
-- of SF to be used with 'rpSwitch' again, until the next event comes in the
-- input, and so on.
--
-- Broadcasting is used to decide which subpart of the input goes to each SF in
-- the collection.
--
-- This is the decoupled version of 'rpSwitchB'.
--
-- For more information on how parallel composition works, check
-- <http://haskell.cs.yale.edu/wp-content/uploads/2011/01/yampa-arcade.pdf>
drpSwitchB :: Functor col =>
col (SF a b) -> SF (a, Event (col (SF a b) -> col (SF a b))) (col b)
drpSwitchB = drpSwitch broadcast
------------------------------------------------------------------------------
-- Parallel composition and switching over collections with general routing
------------------------------------------------------------------------------
-- | Spatial parallel composition of a signal function collection parameterized
-- on the routing function.
--
par :: Functor col
=> (forall sf . (a -> col sf -> col (b, sf)))
-- ^ Determines the input to each signal function
-- in the collection. IMPORTANT! The routing function MUST
-- preserve the structure of the signal function collection.
-> col (SF b c)
-- ^ Signal function collection.
-> SF a (col c)
par rf sfs0 = SF {sfTF = tf0}
where
tf0 a0 =
let bsfs0 = rf a0 sfs0
sfcs0 = fmap (\(b0, sf0) -> (sfTF sf0) b0) bsfs0
sfs = fmap fst sfcs0
cs0 = fmap snd sfcs0
in
(parAux rf sfs, cs0)
-- Internal definition. Also used in parallel switchers.
parAux :: Functor col =>
(forall sf . (a -> col sf -> col (b, sf)))
-> col (SF' b c)
-> SF' a (col c)
parAux rf sfs = SF' tf -- True
where
tf dt a =
let bsfs = rf a sfs
sfcs' = fmap (\(b, sf) -> (sfTF' sf) dt b) bsfs
sfs' = fmap fst sfcs'
cs = fmap snd sfcs'
in
(parAux rf sfs', cs)
-- | Parallel switch parameterized on the routing function. This is the most
-- general switch from which all other (non-delayed) switches in principle
-- can be derived. The signal function collection is spatially composed in
-- parallel and run until the event signal function has an occurrence. Once
-- the switching event occurs, all signal function are "frozen" and their
-- continuations are passed to the continuation function, along with the
-- event value.
--
-- !!! Could be optimized on the event source being SFArr, SFArrE, SFArrEE
pSwitch :: Functor col
=> (forall sf . (a -> col sf -> col (b, sf)))
-- ^ Routing function: determines the input to each signal
-- function in the collection. IMPORTANT! The routing function
-- has an obligation to preserve the structure of the signal
-- function collection.
-> col (SF b c)
-- ^ Signal function collection.
-> SF (a, col c) (Event d)
-- ^ Signal function generating the switching event.
-> (col (SF b c) -> d -> SF a (col c))
-- ^ Continuation to be invoked once event occurs.
-> SF a (col c)
pSwitch rf sfs0 sfe0 k = SF {sfTF = tf0}
where
tf0 a0 =
let bsfs0 = rf a0 sfs0
sfcs0 = fmap (\(b0, sf0) -> (sfTF sf0) b0) bsfs0
sfs = fmap fst sfcs0
cs0 = fmap snd sfcs0
in
case (sfTF sfe0) (a0, cs0) of
(sfe, NoEvent) -> (pSwitchAux sfs sfe, cs0)
(_, Event d0) -> sfTF (k sfs0 d0) a0
pSwitchAux sfs (SFArr _ (FDC NoEvent)) = parAux rf sfs
pSwitchAux sfs sfe = SF' tf -- False
where
tf dt a =
let bsfs = rf a sfs
sfcs' = fmap (\(b, sf) -> (sfTF' sf) dt b) bsfs
sfs' = fmap fst sfcs'
cs = fmap snd sfcs'
in
case (sfTF' sfe) dt (a, cs) of
(sfe', NoEvent) -> (pSwitchAux sfs' sfe', cs)
(_, Event d) -> sfTF (k (freezeCol sfs dt) d) a
-- | Parallel switch with delayed observation parameterized on the routing
-- function.
--
-- The collection argument to the function invoked on the
-- switching event is of particular interest: it captures the
-- continuations of the signal functions running in the collection
-- maintained by 'dpSwitch' at the time of the switching event,
-- thus making it possible to preserve their state across a switch.
-- Since the continuations are plain, ordinary signal functions,
-- they can be resumed, discarded, stored, or combined with
-- other signal functions.
-- !!! Could be optimized on the event source being SFArr, SFArrE, SFArrEE.
--
dpSwitch :: Functor col
=> (forall sf . (a -> col sf -> col (b, sf)))
-- ^ Routing function. Its purpose is to pair up each running
-- signal function in the collection maintained by 'dpSwitch'
-- with the input it is going to see at each point in time. All
-- the routing function can do is specify how the input is
-- distributed.
-> col (SF b c)
-- ^ Initial collection of signal functions.
-> SF (a, col c) (Event d)
-- ^ Signal function that observes the external input signal and
-- the output signals from the collection in order to produce a
-- switching event.
-> (col (SF b c) -> d -> SF a (col c))
-- ^ The fourth argument is a function that is invoked when the
-- switching event occurs, yielding a new signal function to
-- switch into based on the collection of signal functions
-- previously running and the value carried by the switching
-- event. This allows the collection to be updated and then
-- switched back in, typically by employing 'dpSwitch' again.
-> SF a (col c)
dpSwitch rf sfs0 sfe0 k = SF {sfTF = tf0}
where
tf0 a0 =
let bsfs0 = rf a0 sfs0
sfcs0 = fmap (\(b0, sf0) -> (sfTF sf0) b0) bsfs0
cs0 = fmap snd sfcs0
in
(case (sfTF sfe0) (a0, cs0) of
(sfe, NoEvent) -> dpSwitchAux (fmap fst sfcs0) sfe
(_, Event d0) -> fst (sfTF (k sfs0 d0) a0),
cs0)
dpSwitchAux sfs (SFArr _ (FDC NoEvent)) = parAux rf sfs
dpSwitchAux sfs sfe = SF' tf -- False
where
tf dt a =
let bsfs = rf a sfs
sfcs' = fmap (\(b, sf) -> (sfTF' sf) dt b) bsfs
cs = fmap snd sfcs'
in
(case (sfTF' sfe) dt (a, cs) of
(sfe', NoEvent) -> dpSwitchAux (fmap fst sfcs')
sfe'
(_, Event d) -> fst (sfTF (k (freezeCol sfs dt)
d)
a),
cs)
-- | Recurring parallel switch parameterized on the routing function.
--
-- Uses the given collection of SFs, until an event comes in the input, in
-- which case the function in the 'Event' is used to transform the collections
-- of SF to be used with 'rpSwitch' again, until the next event comes in the
-- input, and so on.
--
-- The routing function is used to decide which subpart of the input
-- goes to each SF in the collection.
--
-- This is the parallel version of 'rSwitch'.
rpSwitch :: Functor col
=> (forall sf . (a -> col sf -> col (b, sf)))
-- ^ Routing function: determines the input to each signal
-- function in the collection. IMPORTANT! The routing function
-- has an obligation to preserve the structure of the signal
-- function collection.
-> col (SF b c)
-- ^ Initial signal function collection.
-> SF (a, Event (col (SF b c) -> col (SF b c))) (col c)
rpSwitch rf sfs =
pSwitch (rf . fst) sfs (arr (snd . fst)) $ \sfs' f ->
noEventSnd >=- rpSwitch rf (f sfs')
{-
rpSwitch rf sfs = pSwitch (rf . fst) sfs (arr (snd . fst)) k
where
k sfs f = rpSwitch' (f sfs)
rpSwitch' sfs = pSwitch (rf . fst) sfs (NoEvent --> arr (snd . fst)) k
-}
-- | Recurring parallel switch with delayed observation parameterized on the
-- routing function.
--
-- Uses the given collection of SFs, until an event comes in the input, in
-- which case the function in the 'Event' is used to transform the collections
-- of SF to be used with 'rpSwitch' again, until the next event comes in the
-- input, and so on.
--
-- The routing function is used to decide which subpart of the input
-- goes to each SF in the collection.
--
-- This is the parallel version of 'drSwitch'.
drpSwitch :: Functor col
=> (forall sf . (a -> col sf -> col (b, sf)))
-- ^ Routing function: determines the input to each signal
-- function in the collection. IMPORTANT! The routing function
-- has an obligation to preserve the structure of the signal
-- function collection.
-> col (SF b c)
-- ^ Initial signal function collection.
-> SF (a, Event (col (SF b c) -> col (SF b c))) (col c)
drpSwitch rf sfs =
dpSwitch (rf . fst) sfs (arr (snd . fst)) $ \sfs' f ->
noEventSnd >=- drpSwitch rf (f sfs')
{-
drpSwitch rf sfs = dpSwitch (rf . fst) sfs (arr (snd . fst)) k
where
k sfs f = drpSwitch' (f sfs)
drpSwitch' sfs = dpSwitch (rf . fst) sfs (NoEvent-->arr (snd . fst)) k
-}
------------------------------------------------------------------------------
-- * Parallel composition/switchers with "zip" routing
------------------------------------------------------------------------------
-- | Parallel composition of a list of SFs.
--
-- Given a list of SFs, returns an SF that takes a list of inputs, applies
-- each SF to each input in order, and returns the SFs' outputs.
--
-- >>> embed (parZ [arr (+1), arr (+2)]) (deltaEncode 0.1 [[0, 0], [1, 1]])
-- [[1,2],[2,3]]
--
-- If there are more SFs than inputs, an exception is thrown.
--
-- >>> embed (parZ [arr (+1), arr (+1), arr (+2)]) (deltaEncode 0.1 [[0, 0], [1, 1]])
-- [[1,1,*** Exception: FRP.Yampa.Switches.parZ: Input list too short.
--
-- If there are more inputs than SFs, the unused inputs are ignored.
--
-- >>> embed (parZ [arr (+1)]) (deltaEncode 0.1 [[0, 0], [1, 1]])
-- [[1],[2]]
parZ :: [SF a b] -> SF [a] [b]
parZ = par (safeZip "parZ")
-- | Parallel switch (dynamic collection of signal functions spatially composed
-- in parallel). See 'pSwitch'.
--
-- For more information on how parallel composition works, check
-- <http://haskell.cs.yale.edu/wp-content/uploads/2011/01/yampa-arcade.pdf>
pSwitchZ :: [SF a b] -> SF ([a],[b]) (Event c) -> ([SF a b] -> c -> SF [a] [b])
-> SF [a] [b]
pSwitchZ = pSwitch (safeZip "pSwitchZ")
-- | Decoupled parallel switch with broadcasting (dynamic collection of
-- signal functions spatially composed in parallel). See 'dpSwitch'.
--
-- For more information on how parallel composition works, check
-- <http://haskell.cs.yale.edu/wp-content/uploads/2011/01/yampa-arcade.pdf>
dpSwitchZ :: [SF a b] -> SF ([a],[b]) (Event c) -> ([SF a b] -> c ->SF [a] [b])
-> SF [a] [b]
dpSwitchZ = dpSwitch (safeZip "dpSwitchZ")
-- | Recurring parallel switch with "zip" routing.
--
-- Uses the given list of SFs, until an event comes in the input, in which case
-- the function in the 'Event' is used to transform the list of SF to be used
-- with 'rpSwitchZ' again, until the next event comes in the input, and so on.
--
-- Zip routing is used to decide which subpart of the input goes to each SF in
-- the list.
--
-- See 'rpSwitch'.
--
-- For more information on how parallel composition works, check
-- <http://haskell.cs.yale.edu/wp-content/uploads/2011/01/yampa-arcade.pdf>
rpSwitchZ :: [SF a b] -> SF ([a], Event ([SF a b] -> [SF a b])) [b]
rpSwitchZ = rpSwitch (safeZip "rpSwitchZ")
-- | Decoupled recurring parallel switch with "zip" routing.
--
-- Uses the given list of SFs, until an event comes in the input, in which case
-- the function in the 'Event' is used to transform the list of SF to be used
-- with 'rpSwitchZ' again, until the next event comes in the input, and so on.
--
-- Zip routing is used to decide which subpart of the input goes to each SF in
-- the list.
--
-- See 'rpSwitchZ' and 'drpSwitch'.
--
-- For more information on how parallel composition works, check
-- <http://haskell.cs.yale.edu/wp-content/uploads/2011/01/yampa-arcade.pdf>
drpSwitchZ :: [SF a b] -> SF ([a], Event ([SF a b] -> [SF a b])) [b]
drpSwitchZ = drpSwitch (safeZip "drpSwitchZ")
-- IPerez: This is actually unsafezip. Zip is actually safe. It works
-- regardless of which list is smallest. This version of zip is right-biased:
-- the second list determines the size of the final list.
safeZip :: String -> [a] -> [b] -> [(a,b)]
safeZip fn l1 l2 = safeZip' l1 l2
where
safeZip' :: [a] -> [b] -> [(a, b)]
safeZip' _ [] = []
safeZip' as (b:bs) = (head' as, b) : safeZip' (tail' as) bs
head' :: [a] -> a
head' [] = err
head' (a:_) = a
tail' :: [a] -> [a]
tail' [] = err
tail' (_:as) = as
err :: a
err = usrErr "FRP.Yampa.Switches" fn "Input list too short."
-- Freezes a "running" signal function, i.e., turns it into a continuation in
-- the form of a plain signal function.
freeze :: SF' a b -> DTime -> SF a b
freeze sf dt = SF {sfTF = (sfTF' sf) dt}
freezeCol :: Functor col => col (SF' a b) -> DTime -> col (SF a b)
freezeCol sfs dt = fmap (`freeze` dt) sfs
-- | Apply an SF to every element of a list.
--
-- Example:
--
-- >>> embed (parC integral) (deltaEncode 0.1 [[1, 2], [2, 4], [3, 6], [4.0, 8.0 :: Float]])
-- [[0.0,0.0],[0.1,0.2],[0.3,0.6],[0.6,1.2]]
--
-- The number of SFs or expected inputs is determined by the first input
-- list, and not expected to vary over time.
--
-- If more inputs come in a subsequent list, they are ignored.
--
-- >>> embed (parC (arr (+1))) (deltaEncode 0.1 [[0], [1, 1], [3, 4], [6, 7, 8], [1, 1], [0, 0], [1, 9, 8]])
-- [[1],[2],[4],[7],[2],[1],[2]]
--
-- If less inputs come in a subsequent list, an exception is thrown.
--
-- >>> embed (parC (arr (+1))) (deltaEncode 0.1 [[0, 0], [1, 1], [3, 4], [6, 7, 8], [1, 1], [0, 0], [1, 9, 8]])
-- [[1,1],[2,2],[4,5],[7,8],[2,2],[1,1],[2,10]]
parC :: SF a b -> SF [a] [b]
parC sf = SF $ \as -> let os = map (sfTF sf) as
bs = map snd os
sfs = map fst os
in (parCAux sfs, bs)
-- Internal definition. Also used in parallel switchers.
parCAux :: [SF' a b] -> SF' [a] [b]
parCAux sfs = SF' tf
where
tf dt as =
let os = map (\(a,sf) -> sfTF' sf dt a) $ safeZip "parC" as sfs
bs = map snd os
sfcs = map fst os
in
(listSeq sfcs `seq` parCAux sfcs, listSeq bs)
listSeq :: [a] -> [a]
listSeq x = x `seq` (listSeq' x)
listSeq' :: [a] -> [a]
listSeq' [] = []
listSeq' rs@(a:as) = a `seq` listSeq' as `seq` rs
-- Vim modeline
-- vim:set tabstop=8 expandtab:
|
ivanperez-keera/Yampa
|
yampa/src/FRP/Yampa/Switches.hs
|
bsd-3-clause
| 42,438
| 0
| 21
| 13,713
| 6,585
| 3,636
| 2,949
| 297
| 18
|
module BackslashPatternTests where
import Data.Either.Extra (fromRight)
import Helpers
import Test.QuickCheck.Regex.PCRE
import Test.QuickCheck.Regex.PCRE.Types
import Test.Tasty
import Test.Tasty.HUnit
bigBackslashPattern :: String
bigBackslashPattern =
"\\&\\\\\\^\\$\\.\\[\\|\\(\\)\\?\\*\\+\\{\\-\\]\\a\\e\\f\\n\\r\\t\\0\\15\\o{15}\\xAF\\x{AF}\\d\\D\\h\\H\\v\\V\\w\\W"
backslashPatterns :: [TestTree]
backslashPatterns =
[ testCase
"\\D"
("\\D" `shouldBe` Regex (Alternative [Quant (Backslash NonDigit)] [])),
testCase
"\\h"
("\\h" `shouldBe` Regex (Alternative [Quant (Backslash HorizontalWhiteSpace)] [])),
testCase
"\\H"
("\\H" `shouldBe` Regex (Alternative [Quant (Backslash NotHorizontalWhiteSpace)] [])),
testCase
"\\s"
("\\s" `shouldBe` Regex (Alternative [Quant (Backslash WhiteSpace)] [])),
testCase
"\\S"
("\\S" `shouldBe` Regex (Alternative [Quant (Backslash NotWhiteSpace)] [])),
testCase
"\\v"
("\\v" `shouldBe` Regex (Alternative [Quant (Backslash VerticalWhiteSpace)] [])),
testCase
"\\V"
("\\V" `shouldBe` Regex (Alternative [Quant (Backslash NotVerticalWhiteSpace)] [])),
testCase
"\\w"
("\\w" `shouldBe` Regex (Alternative [Quant (Backslash WordCharacter)] [])),
testCase
"\\W"
("\\W" `shouldBe` Regex (Alternative [Quant (Backslash NonWordCharacter)] [])),
testCase
"\\Q...*\\E"
("\\Q...*\\E" `shouldBe` Regex (Alternative [Quoted "...*"] [])),
testCase
"[\\Q^.\\E]"
( "[\\Q^.\\E]"
`shouldBe` Regex
( Alternative
[ Quant
( CharacterClass
( fromRight
(error "Left")
(characterClassCharacter "\\Q^.\\E")
)
[]
)
]
[]
)
),
testCase
"render [\\Q^.\\E]"
test_render_quotedclassliterals,
testCase
"\a"
("\a" `shouldBe` Regex (Alternative [Quant (Character '\a')] [])),
testCase
"\\01"
("\\01" `shouldBe` Regex (Alternative [Quant (Backslash (NonprintingOctalCode 1))] [])),
testCase
"\\013"
("\\013" `shouldBe` Regex (Alternative [Quant (Backslash (NonprintingOctalCode 11))] [])),
testCase
"\\11"
("\\11" `shouldBe` Regex (Alternative [Quant (Backslash (NonprintingOctalCode 9))] [])),
testCase
"()()()()()()()()()()()\\11"
( "()()()()()()()()()()()\\11"
`shouldBe` Regex
( Alternative
[ Quant (Subpattern (Alternative [] [])),
Quant (Subpattern (Alternative [] [])),
Quant (Subpattern (Alternative [] [])),
Quant (Subpattern (Alternative [] [])),
Quant (Subpattern (Alternative [] [])),
Quant (Subpattern (Alternative [] [])),
Quant (Subpattern (Alternative [] [])),
Quant (Subpattern (Alternative [] [])),
Quant (Subpattern (Alternative [] [])),
Quant (Subpattern (Alternative [] [])),
Quant (Subpattern (Alternative [] [])),
Quant (BackReference 11 (Alternative [] []))
]
[]
)
),
testCase
"\\113"
("\\113" `shouldBe` Regex (Alternative [Quant (Backslash (NonprintingOctalCode 75))] [])),
testCase
"\\o{013}"
("\\o{013}" `shouldBe` Regex (Alternative [Quant (Backslash (NonprintingOctalCodeBraces 11))] [])),
testCase
"\\xFF"
("\\xFF" `shouldBe` Regex (Alternative [Quant (Backslash (NonprintingHexCode 255))] [])),
testCase
bigBackslashPattern
( bigBackslashPattern
`shouldBe` Regex
( Alternative
[ Quant (Backslash (Nonalphanumeric '&')),
Quant (Backslash BackslashChar),
Quant (Backslash Caret),
Quant (Backslash Dollar),
Quant (Backslash Dot),
Quant (Backslash OpenSquareBracket),
Quant (Backslash Pipe),
Quant (Backslash OpenParens),
Quant (Backslash CloseParens),
Quant (Backslash QuestionMark),
Quant (Backslash Asterisk),
Quant (Backslash Plus),
Quant (Backslash OpenBrace),
Quant (Backslash Hyphen),
Quant (Backslash CloseSquareBracket),
Quant (Backslash NonprintingAlarm),
Quant (Backslash NonprintingEscape),
Quant (Backslash NonprintingFormFeed),
Quant (Backslash NonprintingLineFeed),
Quant (Backslash NonprintingCarriageReturn),
Quant (Backslash NonprintingTab),
Quant (Backslash (NonprintingOctalCode 0)),
Quant (Backslash (NonprintingOctalCode 13)),
Quant (Backslash (NonprintingOctalCodeBraces 13)),
Quant (Backslash (NonprintingHexCode 175)),
Quant (Backslash (NonprintingHexCodeBraces 175)),
Quant (Backslash Digit),
Quant (Backslash NonDigit),
Quant (Backslash HorizontalWhiteSpace),
Quant (Backslash NotHorizontalWhiteSpace),
Quant (Backslash VerticalWhiteSpace),
Quant (Backslash NotVerticalWhiteSpace),
Quant (Backslash WordCharacter),
Quant
( Backslash
NonWordCharacter
)
]
[]
)
),
testCase
"\\x{FF}"
("\\x{FF}" `shouldBe` Regex (Alternative [Quant (Backslash (NonprintingHexCodeBraces 255))] [])),
testCase
"\\0\\x\\015"
( "\\0\\x\\015"
`shouldBe` Regex
( Alternative
[ Quant (Backslash (NonprintingOctalCode 0)),
Quant (Backslash NonprintingHexZero),
Quant (Backslash (NonprintingOctalCode 13))
]
[]
)
)
]
test_render_quotedclassliterals :: Assertion
test_render_quotedclassliterals =
assertEqual
"parse and render did not preserve original regex string"
"[\\Q^.\\E]"
(toText . fromRight (Regex (Alternative [] [])) . parseRegex $ "[\\Q^.\\E]")
|
lorcanmcdonald/regexicon
|
src/Tests/BackslashPatternTests.hs
|
mit
| 6,589
| 0
| 19
| 2,292
| 1,858
| 962
| 896
| 156
| 1
|
-----------------------------------------------------------------------------
--
-- Module : Network.Google.Books
-- Copyright : (c) 2012-13 Brian W Bush
-- License : MIT
--
-- Maintainer : Brian W Bush <b.w.bush@acm.org>
-- Stability : Stable
-- Portability : Portable
--
-- | Functions for the Google Books API, see <https://developers.google.com/books/docs/v1/using#WorkingMyBookshelves>.
--
-----------------------------------------------------------------------------
module Network.Google.Books (
-- * Types
ShelfId
-- * Functions
, listBooks
, listBookshelves
) where
import Control.Monad (liftM)
import Data.Maybe (fromMaybe)
import Network.Google (AccessToken, appendQuery, doRequest, makeRequest)
import Network.HTTP.Conduit (Request)
import Text.JSON (JSObject, JSValue(..), Result(Ok), decode, fromJSObject, toJSObject, valFromObj)
-- | The host for API access.
booksHost :: String
booksHost = "www.googleapis.com"
-- | The API version used here.
booksApi :: (String, String)
booksApi = ("Gdata-version", "2")
-- | Bookshelf ID.
type ShelfId = String
-- | List the bookshelves, see <https://developers.google.com/books/docs/v1/using#RetrievingMyBookshelves>.
listBookshelves ::
AccessToken -- ^ The OAuth 2.0 access token.
-> IO JSValue -- ^ The action returning the bookshelves' metadata in JSON format.
listBookshelves accessToken =
doRequest $ booksRequest accessToken Nothing 0
-- | List the bookshelf IDs, see <https://developers.google.com/books/docs/v1/using#RetrievingMyBookshelves>.
listBookshelfIds ::
AccessToken -- ^ The OAuth 2.0 access token.
-> IO [String] -- ^ The action returning list of bookshelf IDs.
listBookshelfIds accessToken =
do
JSObject result <- listBookshelves accessToken
let
extractId :: JSValue -> String
extractId (JSObject x) =
let
y :: Rational
Ok (JSRational _ y) = valFromObj "id" x
in
show (round y :: Int)
items :: [JSValue]
Ok (JSArray items) = valFromObj "items" result
return $ map extractId items
-- | List the books, see <https://developers.google.com/books/docs/v1/using#RetrievingMyBookshelfVolumes>.
listBooks ::
AccessToken -- ^ The OAuth 2.0 access token.
-> [ShelfId] -- ^ The bookshelf IDs.
-> IO JSValue -- ^ The action returning the books' metadata in JSON format.
listBooks accessToken shelves =
do
shelves' <-
if null shelves
then listBookshelfIds accessToken
else return shelves
results <- mapM (listShelfBooks accessToken) shelves'
return $ JSArray results
-- | List the books in a shelf, see <https://developers.google.com/books/docs/v1/using#RetrievingMyBookshelfVolumes>.
listShelfBooks ::
AccessToken -- ^ The OAuth 2.0 access token.
-> ShelfId -- ^ The bookshelf ID.
-> IO JSValue -- ^ The action returning the books' metadata in JSON format.
listShelfBooks accessToken shelf =
do
x <- listShelfBooks' accessToken shelf Nothing
let
y :: [JSValue]
y = concatMap items x
JSObject o = head x
z :: [(String, JSValue)]
z = fromJSObject o
u :: [(String, JSValue)]
u = filter (\w -> fst w /= "items") z
v :: [(String, JSValue)]
v = ("items", JSArray y) : u
return $ JSObject $ toJSObject v
-- | List the books in a shelf, see <https://developers.google.com/books/docs/v1/using#RetrievingMyBookshelfVolumes>.
listShelfBooks' ::
AccessToken -- ^ The OAuth 2.0 access token.
-> ShelfId -- ^ The bookshelf ID.
-> Maybe Int -- ^ The start index in the list of metadata.
-> IO [JSValue] -- ^ The action returning the books' metadata in JSON format.
listShelfBooks' accessToken shelf startIndex =
do
let
startIndex' :: Int
startIndex' = fromMaybe 0 startIndex
books <- doRequest $ booksRequest accessToken (Just shelf) startIndex'
let
startIndex'' = startIndex' + length (items books)
liftM (books :) $
if startIndex' + 1 <= totalItems books
then listShelfBooks' accessToken shelf $ Just startIndex''
else return []
-- | Find the total number of items in a shelf.
totalItems ::
JSValue -- ^ The books' metadata.
-> Int -- ^ The total number of books in the shelf.
totalItems (JSObject books) =
let
Ok count = "totalItems" `valFromObj` books
in
count
-- | Find the items in a list of books' metadata.
items ::
JSValue -- ^ The books' metadata
-> [JSValue] -- ^ The books in the metadata.
items (JSObject books) =
let
list = "items" `valFromObj` books
f (Ok x) = x
f _ = []
in
f list
-- | Make an HTTP request for Google Books.
booksRequest ::
AccessToken -- ^ The OAuth 2.0 access token.
-> Maybe ShelfId -- ^ The bookshelf ID.
-> Int -- ^ The starting index
-> Request -- ^ The request.
booksRequest accessToken shelf startIndex =
appendQuery
[
("maxResults", "40")
, ("startIndex", show startIndex)
]
$
makeRequest accessToken booksApi "GET"
(
booksHost
, "/books/v1/mylibrary/bookshelves" ++ maybe "" (\x -> "/" ++ x ++ "/volumes") shelf
)
|
rrnewton/hgdata
|
src/Network/Google/Books.hs
|
mit
| 5,213
| 0
| 16
| 1,201
| 1,000
| 542
| 458
| 113
| 2
|
-- |
-- Module: SwiftNav.SBP.Navigation
-- Copyright: Copyright (C) 2015 Swift Navigation, Inc.
-- License: LGPL-3
-- Maintainer: Mark Fine <dev@swiftnav.com>
-- Stability: experimental
-- Portability: portable
--
-- Geodetic navigation messages reporting GPS time, position, velocity, and
-- baseline position solutions. For position solutions, these messages define
-- several different position solutions: single-point (SPP), RTK, and pseudo-
-- absolute position solutions. The SPP is the standalone, absolute GPS
-- position solution using only a single receiver. The RTK solution is the
-- differential GPS solution, which can use either a fixed/integer or floating
-- carrier phase ambiguity. The pseudo-absolute position solution uses a user-
-- provided, well-surveyed base station position (if available) and the RTK
-- solution in tandem.
module SwiftNav.SBP.Navigation where
import BasicPrelude
import Control.Monad
import Control.Monad.Loops
import Data.Aeson.TH (deriveJSON, defaultOptions, fieldLabelModifier)
import Data.Binary
import Data.Binary.Get
import Data.Binary.IEEE754
import Data.Binary.Put
import Data.ByteString
import Data.ByteString.Lazy hiding ( ByteString )
import Data.Int
import Data.Word
import SwiftNav.SBP.Encoding
msgGpsTime :: Word16
msgGpsTime = 0x0100
-- | SBP class for message MSG_GPS_TIME (0x0100).
--
-- This message reports the GPS time, representing the time since the GPS epoch
-- began on midnight January 6, 1980 UTC. GPS time counts the weeks and seconds
-- of the week. The weeks begin at the Saturday/Sunday transition. GPS week 0
-- began at the beginning of the GPS time scale. Within each week number, the
-- GPS time of the week is between between 0 and 604800 seconds (=60*60*24*7).
-- Note that GPS time does not accumulate leap seconds, and as of now, has a
-- small offset from UTC. In a message stream, this message precedes a set of
-- other navigation messages referenced to the same time (but lacking the ns
-- field) and indicates a more precise time of these messages.
data MsgGpsTime = MsgGpsTime
{ msgGpsTime_wn :: Word16
-- ^ GPS week number
, msgGpsTime_tow :: Word32
-- ^ GPS time of week rounded to the nearest millisecond
, msgGpsTime_ns :: Int32
-- ^ Nanosecond residual of millisecond-rounded TOW (ranges from -500000 to
-- 500000)
, msgGpsTime_flags :: Word8
-- ^ Status flags (reserved)
} deriving ( Show, Read, Eq )
instance Binary MsgGpsTime where
get = do
msgGpsTime_wn <- getWord16le
msgGpsTime_tow <- getWord32le
msgGpsTime_ns <- liftM fromIntegral getWord32le
msgGpsTime_flags <- getWord8
return MsgGpsTime {..}
put MsgGpsTime {..} = do
putWord16le msgGpsTime_wn
putWord32le msgGpsTime_tow
putWord32le $ fromIntegral msgGpsTime_ns
putWord8 msgGpsTime_flags
$(deriveJSON defaultOptions {fieldLabelModifier = fromMaybe "msgGpsTime_" . stripPrefix "msgGpsTime_"}
''MsgGpsTime)
msgDops :: Word16
msgDops = 0x0206
-- | SBP class for message MSG_DOPS (0x0206).
--
-- This dilution of precision (DOP) message describes the effect of navigation
-- satellite geometry on positional measurement precision.
data MsgDops = MsgDops
{ msgDops_tow :: Word32
-- ^ GPS Time of Week
, msgDops_gdop :: Word16
-- ^ Geometric Dilution of Precision
, msgDops_pdop :: Word16
-- ^ Position Dilution of Precision
, msgDops_tdop :: Word16
-- ^ Time Dilution of Precision
, msgDops_hdop :: Word16
-- ^ Horizontal Dilution of Precision
, msgDops_vdop :: Word16
-- ^ Vertical Dilution of Precision
} deriving ( Show, Read, Eq )
instance Binary MsgDops where
get = do
msgDops_tow <- getWord32le
msgDops_gdop <- getWord16le
msgDops_pdop <- getWord16le
msgDops_tdop <- getWord16le
msgDops_hdop <- getWord16le
msgDops_vdop <- getWord16le
return MsgDops {..}
put MsgDops {..} = do
putWord32le msgDops_tow
putWord16le msgDops_gdop
putWord16le msgDops_pdop
putWord16le msgDops_tdop
putWord16le msgDops_hdop
putWord16le msgDops_vdop
$(deriveJSON defaultOptions {fieldLabelModifier = fromMaybe "msgDops_" . stripPrefix "msgDops_"}
''MsgDops)
msgPosEcef :: Word16
msgPosEcef = 0x0200
-- | SBP class for message MSG_POS_ECEF (0x0200).
--
-- The position solution message reports absolute Earth Centered Earth Fixed
-- (ECEF) coordinates and the status (single point vs pseudo-absolute RTK) of
-- the position solution. If the rover receiver knows the surveyed position of
-- the base station and has an RTK solution, this reports a pseudo-absolute
-- position solution using the base station position and the rover's RTK
-- baseline vector. The full GPS time is given by the preceding MSG_GPS_TIME
-- with the matching time-of-week (tow).
data MsgPosEcef = MsgPosEcef
{ msgPosEcef_tow :: Word32
-- ^ GPS Time of Week
, msgPosEcef_x :: Double
-- ^ ECEF X coordinate
, msgPosEcef_y :: Double
-- ^ ECEF Y coordinate
, msgPosEcef_z :: Double
-- ^ ECEF Z coordinate
, msgPosEcef_accuracy :: Word16
-- ^ Position accuracy estimate (not implemented). Defaults to 0.
, msgPosEcef_n_sats :: Word8
-- ^ Number of satellites used in solution
, msgPosEcef_flags :: Word8
-- ^ Status flags
} deriving ( Show, Read, Eq )
instance Binary MsgPosEcef where
get = do
msgPosEcef_tow <- getWord32le
msgPosEcef_x <- getFloat64le
msgPosEcef_y <- getFloat64le
msgPosEcef_z <- getFloat64le
msgPosEcef_accuracy <- getWord16le
msgPosEcef_n_sats <- getWord8
msgPosEcef_flags <- getWord8
return MsgPosEcef {..}
put MsgPosEcef {..} = do
putWord32le msgPosEcef_tow
putFloat64le msgPosEcef_x
putFloat64le msgPosEcef_y
putFloat64le msgPosEcef_z
putWord16le msgPosEcef_accuracy
putWord8 msgPosEcef_n_sats
putWord8 msgPosEcef_flags
$(deriveJSON defaultOptions {fieldLabelModifier = fromMaybe "msgPosEcef_" . stripPrefix "msgPosEcef_"}
''MsgPosEcef)
msgPosLlh :: Word16
msgPosLlh = 0x0201
-- | SBP class for message MSG_POS_LLH (0x0201).
--
-- This position solution message reports the absolute geodetic coordinates and
-- the status (single point vs pseudo-absolute RTK) of the position solution.
-- If the rover receiver knows the surveyed position of the base station and
-- has an RTK solution, this reports a pseudo-absolute position solution using
-- the base station position and the rover's RTK baseline vector. The full GPS
-- time is given by the preceding MSG_GPS_TIME with the matching time-of-week
-- (tow).
data MsgPosLlh = MsgPosLlh
{ msgPosLlh_tow :: Word32
-- ^ GPS Time of Week
, msgPosLlh_lat :: Double
-- ^ Latitude
, msgPosLlh_lon :: Double
-- ^ Longitude
, msgPosLlh_height :: Double
-- ^ Height
, msgPosLlh_h_accuracy :: Word16
-- ^ Horizontal position accuracy estimate (not implemented). Defaults to 0.
, msgPosLlh_v_accuracy :: Word16
-- ^ Vertical position accuracy estimate (not implemented). Defaults to 0.
, msgPosLlh_n_sats :: Word8
-- ^ Number of satellites used in solution.
, msgPosLlh_flags :: Word8
-- ^ Status flags
} deriving ( Show, Read, Eq )
instance Binary MsgPosLlh where
get = do
msgPosLlh_tow <- getWord32le
msgPosLlh_lat <- getFloat64le
msgPosLlh_lon <- getFloat64le
msgPosLlh_height <- getFloat64le
msgPosLlh_h_accuracy <- getWord16le
msgPosLlh_v_accuracy <- getWord16le
msgPosLlh_n_sats <- getWord8
msgPosLlh_flags <- getWord8
return MsgPosLlh {..}
put MsgPosLlh {..} = do
putWord32le msgPosLlh_tow
putFloat64le msgPosLlh_lat
putFloat64le msgPosLlh_lon
putFloat64le msgPosLlh_height
putWord16le msgPosLlh_h_accuracy
putWord16le msgPosLlh_v_accuracy
putWord8 msgPosLlh_n_sats
putWord8 msgPosLlh_flags
$(deriveJSON defaultOptions {fieldLabelModifier = fromMaybe "msgPosLlh_" . stripPrefix "msgPosLlh_"}
''MsgPosLlh)
msgBaselineEcef :: Word16
msgBaselineEcef = 0x0202
-- | SBP class for message MSG_BASELINE_ECEF (0x0202).
--
-- This message reports the baseline solution in Earth Centered Earth Fixed
-- (ECEF) coordinates. This baseline is the relative vector distance from the
-- base station to the rover receiver. The full GPS time is given by the
-- preceding MSG_GPS_TIME with the matching time-of-week (tow).
data MsgBaselineEcef = MsgBaselineEcef
{ msgBaselineEcef_tow :: Word32
-- ^ GPS Time of Week
, msgBaselineEcef_x :: Int32
-- ^ Baseline ECEF X coordinate
, msgBaselineEcef_y :: Int32
-- ^ Baseline ECEF Y coordinate
, msgBaselineEcef_z :: Int32
-- ^ Baseline ECEF Z coordinate
, msgBaselineEcef_accuracy :: Word16
-- ^ Position accuracy estimate (not implemented). Defaults to 0.
, msgBaselineEcef_n_sats :: Word8
-- ^ Number of satellites used in solution
, msgBaselineEcef_flags :: Word8
-- ^ Status flags
} deriving ( Show, Read, Eq )
instance Binary MsgBaselineEcef where
get = do
msgBaselineEcef_tow <- getWord32le
msgBaselineEcef_x <- liftM fromIntegral getWord32le
msgBaselineEcef_y <- liftM fromIntegral getWord32le
msgBaselineEcef_z <- liftM fromIntegral getWord32le
msgBaselineEcef_accuracy <- getWord16le
msgBaselineEcef_n_sats <- getWord8
msgBaselineEcef_flags <- getWord8
return MsgBaselineEcef {..}
put MsgBaselineEcef {..} = do
putWord32le msgBaselineEcef_tow
putWord32le $ fromIntegral msgBaselineEcef_x
putWord32le $ fromIntegral msgBaselineEcef_y
putWord32le $ fromIntegral msgBaselineEcef_z
putWord16le msgBaselineEcef_accuracy
putWord8 msgBaselineEcef_n_sats
putWord8 msgBaselineEcef_flags
$(deriveJSON defaultOptions {fieldLabelModifier = fromMaybe "msgBaselineEcef_" . stripPrefix "msgBaselineEcef_"}
''MsgBaselineEcef)
msgBaselineNed :: Word16
msgBaselineNed = 0x0203
-- | SBP class for message MSG_BASELINE_NED (0x0203).
--
-- This message reports the baseline solution in North East Down (NED)
-- coordinates. This baseline is the relative vector distance from the base
-- station to the rover receiver, and NED coordinate system is defined at the
-- local tangent plane centered at the base station position. The full GPS
-- time is given by the preceding MSG_GPS_TIME with the matching time-of-week
-- (tow).
data MsgBaselineNed = MsgBaselineNed
{ msgBaselineNed_tow :: Word32
-- ^ GPS Time of Week
, msgBaselineNed_n :: Int32
-- ^ Baseline North coordinate
, msgBaselineNed_e :: Int32
-- ^ Baseline East coordinate
, msgBaselineNed_d :: Int32
-- ^ Baseline Down coordinate
, msgBaselineNed_h_accuracy :: Word16
-- ^ Horizontal position accuracy estimate (not implemented). Defaults to 0.
, msgBaselineNed_v_accuracy :: Word16
-- ^ Vertical position accuracy estimate (not implemented). Defaults to 0.
, msgBaselineNed_n_sats :: Word8
-- ^ Number of satellites used in solution
, msgBaselineNed_flags :: Word8
-- ^ Status flags
} deriving ( Show, Read, Eq )
instance Binary MsgBaselineNed where
get = do
msgBaselineNed_tow <- getWord32le
msgBaselineNed_n <- liftM fromIntegral getWord32le
msgBaselineNed_e <- liftM fromIntegral getWord32le
msgBaselineNed_d <- liftM fromIntegral getWord32le
msgBaselineNed_h_accuracy <- getWord16le
msgBaselineNed_v_accuracy <- getWord16le
msgBaselineNed_n_sats <- getWord8
msgBaselineNed_flags <- getWord8
return MsgBaselineNed {..}
put MsgBaselineNed {..} = do
putWord32le msgBaselineNed_tow
putWord32le $ fromIntegral msgBaselineNed_n
putWord32le $ fromIntegral msgBaselineNed_e
putWord32le $ fromIntegral msgBaselineNed_d
putWord16le msgBaselineNed_h_accuracy
putWord16le msgBaselineNed_v_accuracy
putWord8 msgBaselineNed_n_sats
putWord8 msgBaselineNed_flags
$(deriveJSON defaultOptions {fieldLabelModifier = fromMaybe "msgBaselineNed_" . stripPrefix "msgBaselineNed_"}
''MsgBaselineNed)
msgVelEcef :: Word16
msgVelEcef = 0x0204
-- | SBP class for message MSG_VEL_ECEF (0x0204).
--
-- This message reports the velocity in Earth Centered Earth Fixed (ECEF)
-- coordinates. The full GPS time is given by the preceding MSG_GPS_TIME with
-- the matching time-of-week (tow).
data MsgVelEcef = MsgVelEcef
{ msgVelEcef_tow :: Word32
-- ^ GPS Time of Week
, msgVelEcef_x :: Int32
-- ^ Velocity ECEF X coordinate
, msgVelEcef_y :: Int32
-- ^ Velocity ECEF Y coordinate
, msgVelEcef_z :: Int32
-- ^ Velocity ECEF Z coordinate
, msgVelEcef_accuracy :: Word16
-- ^ Velocity accuracy estimate (not implemented). Defaults to 0.
, msgVelEcef_n_sats :: Word8
-- ^ Number of satellites used in solution
, msgVelEcef_flags :: Word8
-- ^ Status flags (reserved)
} deriving ( Show, Read, Eq )
instance Binary MsgVelEcef where
get = do
msgVelEcef_tow <- getWord32le
msgVelEcef_x <- liftM fromIntegral getWord32le
msgVelEcef_y <- liftM fromIntegral getWord32le
msgVelEcef_z <- liftM fromIntegral getWord32le
msgVelEcef_accuracy <- getWord16le
msgVelEcef_n_sats <- getWord8
msgVelEcef_flags <- getWord8
return MsgVelEcef {..}
put MsgVelEcef {..} = do
putWord32le msgVelEcef_tow
putWord32le $ fromIntegral msgVelEcef_x
putWord32le $ fromIntegral msgVelEcef_y
putWord32le $ fromIntegral msgVelEcef_z
putWord16le msgVelEcef_accuracy
putWord8 msgVelEcef_n_sats
putWord8 msgVelEcef_flags
$(deriveJSON defaultOptions {fieldLabelModifier = fromMaybe "msgVelEcef_" . stripPrefix "msgVelEcef_"}
''MsgVelEcef)
msgVelNed :: Word16
msgVelNed = 0x0205
-- | SBP class for message MSG_VEL_NED (0x0205).
--
-- This message reports the velocity in local North East Down (NED)
-- coordinates. The full GPS time is given by the preceding MSG_GPS_TIME with
-- the matching time-of-week (tow).
data MsgVelNed = MsgVelNed
{ msgVelNed_tow :: Word32
-- ^ GPS Time of Week
, msgVelNed_n :: Int32
-- ^ Velocity North coordinate
, msgVelNed_e :: Int32
-- ^ Velocity East coordinate
, msgVelNed_d :: Int32
-- ^ Velocity Down coordinate
, msgVelNed_h_accuracy :: Word16
-- ^ Horizontal velocity accuracy estimate (not implemented). Defaults to 0.
, msgVelNed_v_accuracy :: Word16
-- ^ Vertical velocity accuracy estimate (not implemented). Defaults to 0.
, msgVelNed_n_sats :: Word8
-- ^ Number of satellites used in solution
, msgVelNed_flags :: Word8
-- ^ Status flags (reserved)
} deriving ( Show, Read, Eq )
instance Binary MsgVelNed where
get = do
msgVelNed_tow <- getWord32le
msgVelNed_n <- liftM fromIntegral getWord32le
msgVelNed_e <- liftM fromIntegral getWord32le
msgVelNed_d <- liftM fromIntegral getWord32le
msgVelNed_h_accuracy <- getWord16le
msgVelNed_v_accuracy <- getWord16le
msgVelNed_n_sats <- getWord8
msgVelNed_flags <- getWord8
return MsgVelNed {..}
put MsgVelNed {..} = do
putWord32le msgVelNed_tow
putWord32le $ fromIntegral msgVelNed_n
putWord32le $ fromIntegral msgVelNed_e
putWord32le $ fromIntegral msgVelNed_d
putWord16le msgVelNed_h_accuracy
putWord16le msgVelNed_v_accuracy
putWord8 msgVelNed_n_sats
putWord8 msgVelNed_flags
$(deriveJSON defaultOptions {fieldLabelModifier = fromMaybe "msgVelNed_" . stripPrefix "msgVelNed_"}
''MsgVelNed)
|
mfine/libsbp
|
haskell/src/SwiftNav/SBP/Navigation.hs
|
lgpl-3.0
| 15,551
| 0
| 11
| 3,151
| 2,218
| 1,162
| 1,056
| -1
| -1
|
module Digits (digits, digitsCount, digitsToNum) where
digits :: Integral a => a -> [a]
digits 0 = []
digits n = digits(n `div` 10) ++ [n `mod` 10]
digitsCount :: Integral a => a -> Int
digitsCount x = length (digits x)
digitsToNum :: [Int] -> Int
digitsToNum = foldl (\predecessor x -> predecessor*10 + x) 0
|
nothiphop/project-euler
|
lib/Digits.hs
|
apache-2.0
| 312
| 0
| 9
| 60
| 150
| 82
| 68
| 8
| 1
|
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="ms-MY">
<title>Forced Browse Add-On</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
veggiespam/zap-extensions
|
addOns/bruteforce/src/main/javahelp/org/zaproxy/zap/extension/bruteforce/resources/help_ms_MY/helpset_ms_MY.hs
|
apache-2.0
| 966
| 79
| 67
| 158
| 415
| 210
| 205
| -1
| -1
|
module Main where
import Control.Monad
import Control.Monad.IO.Class
import FileIO (TransMethod (Naive))
import RuntimeProcessManager (withRuntimeProcess)
import System.Console.Haskeline (runInputT, defaultSettings)
import System.Directory (doesFileExist)
import System.IO
import qualified Data.Map as Map
-- REPL-specific modules
import BackEnd
import Loop
import qualified Environment as Env
import qualified History as Hist
main :: IO ()
main = do
withRuntimeProcess "FileServer" LineBuffering
(\(inP,outP) ->
do liftIO printHelp
runInputT defaultSettings
(Loop.loop (inP, outP) (0, compileN, [Naive])
Map.empty Env.empty Hist.empty Hist.empty 0 False False False False 0))
fileExist :: String -> IO ()
fileExist name = do
exist <- doesFileExist name
unless exist $ fileExist name
printFile :: IO ()
printFile = do
f <- getLine
contents <- readFile f
putStr contents
|
zhiyuanshi/fcore
|
repl/Main.hs
|
bsd-2-clause
| 1,085
| 0
| 16
| 328
| 284
| 157
| 127
| 30
| 1
|
module Gadgets.PEFile where
import Data.PE.Parser
import Data.PE.Structures
import Data.PE.Utils
import qualified Data.ByteString as BS
import qualified Data.ByteString.Lazy as LBS
import Data.Maybe
import System.Environment
import Data.Word
import System.IO.Unsafe
import Data.Binary
import Data.Binary.Get
import Data.Char
import Data.Bits
import Data.Array.Unboxed
import Data.List
type Filename = String
type Secname = String
type SectionMeta = (SectionTable, LBS.ByteString)
getsecandinfo :: Filename -> Secname -> IO ((Maybe SectionMeta, MachineType))
getsecandinfo fn sn = buildFile fn >>= \pefile -> return (getsection pefile sn, getmachinetype pefile)
getsec :: Filename -> Secname -> IO (Maybe SectionMeta)
getsec fn sn = buildFile fn >>= \pefile -> return $ getsection pefile sn
getsecs :: Filename -> [SectionMeta]
getsecs fn = unsafePerformIO (buildFile fn >>= \pefile -> return $ (sectionTables.peHeader) pefile)
getary fn = arrayrep $ getsecs fn
getdirs :: Filename -> [DirectoryEntry]
getdirs fn = unsafePerformIO (buildFile fn >>= \pefile -> return $ (dataDirectories.peHeader) pefile)
getsection :: PEFile -> Secname -> Maybe SectionMeta
getsection pefile secn = let sections = (sectionTables.peHeader) pefile in
find (\x -> secn == (sectionHeaderName $ fst x)) sections
getmachinetype :: PEFile -> MachineType
getmachinetype pe = targetMachine $ coffHeader $ peHeader pe
showsections :: Filename -> IO ()
showsections filename = do
pefile <- buildFile filename
let sections = (sectionTables.peHeader) pefile
let coff = (coffHeader.peHeader) pefile
let std = (standardFields.peHeader) pefile
let showme = \x -> (sectionHeaderName $ fst x)
--putStr $ show datadirs
putStr $ show $ coff
putStr $ show $ std
putStr $ show $ map showme sections
--putStr $ show $ (numberOfRVAandSizes.windowsSpecFields.peHeader) pefile
--putStr $ show pefile
return ()
--Import Table Parsing stuff. This should eventually move to the PE library.
type ImportDirectory = [ImportDirectoryEntry]
type ImportLookupTable = [ImportLookupTableEntry]
data ImportDirectoryEntry = ID {
lookupTableRVA :: Word32,
timeStamp :: Word32,
forwarderChain :: Word32,
nameRVA :: Word32,
importAddressTableRVA :: Word32
} | IDNull deriving (Show,Eq)
data HintNameEntry = HNE {
hint :: Word16,
name :: String
} deriving (Show, Eq)
data ImportLookupTableEntry = ILTOrd Word16 | ILTHint Word32 | ILTNull deriving (Show,Eq)
getImpDir :: Get ImportDirectory
getImpDir = do
entry <- get
case (entry) of
IDNull -> return [IDNull]
x -> getImpDir >>= \y -> return (x : y)
getLT :: Get ImportLookupTable
getLT = do
entry <- get
case (entry) of
ILTNull -> return [ILTNull]
x -> getLT >>= \y -> return (x : y)
instance Binary HintNameEntry where
put (HNE h n) = let words = (map fromIntegral $ map ord n)::[Word8] in
do
put h
put words
if (length words `mod` 2 == 0)
then put (0x0::Word8)
else return ()
get = do
ordinal <- getWord16le
astr <- getAStr
if (length astr `mod` 2 == 0)
then getWord8 >>= \_ -> return (HNE ordinal astr)
else return (HNE ordinal astr)
instance Binary ImportDirectoryEntry where
put (ID lut ts fc nrva iarva) = put lut >> put ts >> put fc >> put nrva >> put iarva
put (IDNull) = put (0x0::Word32) >> put (0x0::Word32) >> put (0x0::Word32) >> put (0x0::Word32) >> put (0x0::Word32)
get = do
lut <- getWord32le
ts <- getWord32le
fc <- getWord32le
nrva <- getWord32le
iarva <- getWord32le
case (lut + ts + fc + nrva + iarva) of
0 -> return IDNull
_ -> return (ID lut ts fc nrva iarva)
instance Binary ImportLookupTableEntry where
put (ILTOrd ord ) = put (0x80::Word8) >> put ord >> put (0x00::Word8)
put (ILTHint rva) = put (setBit rva 31)
put ILTNull = put (0x0::Word32)
get = do
word <- getWord32le
case (word) of
0 -> return ILTNull
_ -> case (testBit word 31) of
True -> return $ ILTOrd $ fromIntegral word
False -> return $ ILTHint (clearBit word 31)
--More PE Data structure stuff
importInfo fn = importInfo' (getsecs fn) (getdirs fn)
importInfo' secns dirs = map infos ientries
where ary = arrayrep secns
ientries = delete IDNull $ buildImport ary dirs
lookups = (buildLookup ary)
hnts = (buildHintName ary)
infos = \x -> (getdllname ary x, map name $ map hnts $ delete ILTNull $ lookups x)
--Build the Import table.
buildImport ary dirs = runGet getImpDir bstr
where itaddr = virtualAddr (dirs !! 1)
bstr = grabAt (fromIntegral itaddr) ary
buildLookup ary ientry = runGet getLT (grabAt (fromIntegral rva) ary)
where rva = lookupTableRVA ientry
buildHintName ary ltentry = case (ltentry) of
(ILTHint x) -> runGet hnte (grabAt (fromIntegral x) ary)
(ILTNull) -> error "Null encountered"
_ -> error "Not working with ords today"
where hnte = get >>= \x -> return x::Get HintNameEntry
getdllname ary ientry = case (ientry) of
(IDNull) -> ""
_ -> runGet getAStr (grabAt (fromIntegral rva) ary)
where rva = nameRVA ientry
--Building an array to represent the file structure
sectoblist (secn, bytes) = let words = LBS.unpack bytes in
let indxs x = x : indxs (x+1) in
zip (indxs $ fromIntegral $ virtualAddress secn) words
arrayrep :: [SectionMeta] -> UArray Word32 Word8
arrayrep secn = array (0,maxaddr) words
where
words = concat $ map sectoblist secn
maxaddr = maximum $ map fst words
--Ask for an address to begin a new head for a bytestring to build from, simple enough.
{-
grabAt :: Word32 -> UArray Word32 Word8 -> LBS.ByteString
grabAt indx ary = LBS.pack $ elems newarray
where maxdx = maximum $ indices ary
newarray = ixmap (0,maxdx-indx) (\i -> i - indx) ary --remap the array
-}
grabAt :: Int -> UArray Word32 Word8 -> LBS.ByteString
grabAt indx ary = LBS.pack $ drop (indx) $ elems ary
|
igraves/peparser-haskell
|
PEFile.hs
|
bsd-3-clause
| 7,355
| 0
| 17
| 2,666
| 2,047
| 1,053
| 994
| 137
| 3
|
-----------------------------------------------------------------------------
-- |
-- Module : Plugins.Monitors.Mem
-- Copyright : (c) Andrea Rossato
-- License : BSD-style (see LICENSE)
--
-- Maintainer : Jose A. Ortega Ruiz <jao@gnu.org>
-- Stability : unstable
-- Portability : unportable
--
-- A memory monitor for Xmobar
--
-----------------------------------------------------------------------------
module Plugins.Monitors.Mem (memConfig, runMem, totalMem, usedMem) where
import Plugins.Monitors.Common
memConfig :: IO MConfig
memConfig = mkMConfig
"Mem: <usedratio>% (<cache>M)" -- template
["usedbar", "freebar", "usedratio", "freeratio", "total",
"free", "buffer", "cache", "rest", "used"] -- available replacements
fileMEM :: IO String
fileMEM = readFile "/proc/meminfo"
parseMEM :: IO [Float]
parseMEM =
do file <- fileMEM
let content = map words $ take 4 $ lines file
[total, free, buffer, cache] = map (\line -> (read $ line !! 1 :: Float) / 1024) content
rest = free + buffer + cache
used = total - rest
usedratio = used / total
freeratio = free / total
return [usedratio, freeratio, total, free, buffer, cache, rest, used, freeratio]
totalMem :: IO Float
totalMem = fmap ((*1024) . (!!1)) parseMEM
usedMem :: IO Float
usedMem = fmap ((*1024) . (!!6)) parseMEM
formatMem :: [Float] -> Monitor [String]
formatMem (r:fr:xs) =
do let f = showDigits 0
rr = 100 * r
ub <- showPercentBar rr r
fb <- showPercentBar (100 - rr) (1 - r)
rs <- showPercentWithColors r
fs <- showPercentWithColors fr
s <- mapM (showWithColors f) xs
return (ub:fb:rs:fs:s)
formatMem _ = return $ replicate 10 "N/A"
runMem :: [String] -> Monitor String
runMem _ =
do m <- io parseMEM
l <- formatMem m
parseTemplate l
|
tsiliakis/xmobar
|
src/Plugins/Monitors/Mem.hs
|
bsd-3-clause
| 1,901
| 0
| 16
| 457
| 568
| 307
| 261
| 39
| 1
|
{-# LANGUAGE OverloadedStrings #-}
-- | The purpose of this test is to make sure that if we run lots of parses on
-- multiple threads, that they all give the correct answers. This is important,
-- because this implementation is imperative code hidden inside an unsafePerformIO.
module Text.XML.Expat.ParallelTest where
import Text.XML.Expat.Tests -- Arbitrary instance
import Text.XML.Expat.ParseFormat (normalizeText)
import Text.XML.Expat.Tree
import Text.XML.Expat.Format
import Control.Concurrent
import Control.Exception
import Control.Monad.State.Strict
import qualified Data.ByteString.Char8 as B
import qualified Data.ByteString.Lazy.Char8 as L
import Test.QuickCheck
import Test.QuickCheck.Gen
import Test.HUnit hiding (Node)
import System.IO
import System.Random
import Test.Framework.Providers.HUnit (hUnitTestToTests)
import Prelude hiding (catch)
tests = hUnitTestToTests $
TestList [
TestLabel "parallel (forkIO)" $ TestCase (testParallel forkOS),
TestLabel "parallel (forkOS)" $ TestCase (testParallel forkOS)
]
chunkSize = 512
breakUp :: B.ByteString -> L.ByteString
breakUp = L.fromChunks . bu
where
bu bs | B.length bs < chunkSize = [bs]
bu bs = bs1:bu bs2
where
(bs1, bs2) = B.splitAt chunkSize bs
nthreads = 5
nloops = 500
testParallel :: (IO () -> IO ThreadId) -> IO ()
testParallel fork = do
resultMVs <- replicateM nthreads $ do
resultMV <- newEmptyMVar
fork $ do
g <- newStdGen
flip evalStateT g $ do
replicateM_ nloops $ do
(g, g2) <- gets split
put g
let treeIn = normalizeText $ unGen (arbitrary :: Gen TNode) g 0
xml = breakUp $ format' treeIn
treeOut = normalizeText $ parseThrowing defaultParseOptions xml
lift $ assertEqual "tree match" treeIn treeOut
`catch` \exc -> do
putStrLn $ "failing XML: "++concat (map B.unpack $ L.toChunks xml)
throwIO (exc :: SomeException)
putMVar resultMV Nothing
`catch` \exc -> do
putMVar resultMV $ Just (exc :: SomeException)
return resultMV
forM_ resultMVs $ \resultMV -> do
mExc <- takeMVar resultMV
case mExc of
Just exc -> throwIO exc
Nothing -> return ()
|
sol/hexpat
|
test/suite/Text/XML/Expat/ParallelTest.hs
|
bsd-3-clause
| 2,435
| 0
| 30
| 702
| 626
| 328
| 298
| 56
| 2
|
-- | See "Control.Super.Monad.Prelude".
module Control.Supermonad.Prelude
( module Control.Super.Monad.Prelude
) where
import Control.Super.Monad.Prelude
|
jbracker/supermonad-plugin
|
src/Control/Supermonad/Prelude.hs
|
bsd-3-clause
| 159
| 0
| 5
| 17
| 25
| 18
| 7
| 3
| 0
|
{-# LANGUAGE OverloadedStrings #-}
module Test.Hspec.AttoparsecSpec where
import Control.Applicative
import Data.Attoparsec.Text
import Data.Text
import Test.Hspec
import Test.Hspec.Attoparsec
main :: IO ()
main = hspec spec
spec :: Spec
spec = do
describe "shouldParse" $
it "works on: \"x\" ~> char 'x'" $
("x" :: Text) ~> char 'x'
`shouldParse` 'x'
describe "parseSatisfies" $ do
it "works on: \"x\" and (=='x')" $
("x" :: Text) ~> char 'x'
`parseSatisfies` (=='x')
it "\">>>\" satisfies length == 3 when parser as a list of char" $
(">>>" :: Text) ~> many (char '>')
`parseSatisfies` ((==3) . Prelude.length)
describe "shouldFailOn" $
it "char 'x' fails on \"ha\"" $
char 'x' `shouldFailOn` ("ha" :: Text)
describe "shouldSucceedOn" $
it "char 'x' succeeds on \"x\"" $
char 'x' `shouldSucceedOn` ("x" :: Text)
describe "leavesUnconsumed" $ do
it "works on \"xa\" ~?> char 'x'" $
("xa" :: Text) ~?> char 'x'
`leavesUnconsumed` "a"
it "char 'x' leaves nothing unconsumed on \"x\"" $
("x" :: Text) ~?> char 'x'
`leavesUnconsumed` ""
|
alpmestan/hspec-attoparsec
|
tests/Test/Hspec/AttoparsecSpec.hs
|
bsd-3-clause
| 1,164
| 0
| 14
| 293
| 322
| 170
| 152
| 35
| 1
|
{-# LANGUAGE TypeFamilies #-}
-- | Doubly-linked list
module Data.Mutable.DLList
( DLList
, asDLList
, module Data.Mutable.Class
) where
import Data.Mutable.Class
data Node s a = Node
a
(MutVar s (Maybe (Node s a))) -- previous
(MutVar s (Maybe (Node s a))) -- next
-- | A doubly-linked list.
--
-- Since 0.3.0
data DLList s a = DLList (MutVar s (Maybe (Node s a))) (MutVar s (Maybe (Node s a)))
-- |
-- Since 0.2.0
asDLList :: DLList s a -> DLList s a
asDLList = id
{-# INLINE asDLList #-}
instance MutableContainer (DLList s a) where
type MCState (DLList s a) = s
instance MutableCollection (DLList s a) where
type CollElement (DLList s a) = a
newColl = do
x <- newRef $! Nothing
y <- newRef $! Nothing
return $! DLList x y
{-# INLINE newColl #-}
instance MutablePopFront (DLList s a) where
popFront (DLList frontRef backRef) = do
mfront <- readRef frontRef
case mfront of
Nothing -> return Nothing
Just (Node val _ nextRef) -> do
mnext <- readRef nextRef
case mnext of
Nothing -> do
writeRef frontRef $! Nothing
writeRef backRef $! Nothing
Just next@(Node _ prevRef _) -> do
writeRef prevRef $! Nothing
writeRef frontRef $! Just next
return $ Just val
{-# INLINE popFront #-}
instance MutablePopBack (DLList s a) where
popBack (DLList frontRef backRef) = do
mback <- readRef backRef
case mback of
Nothing -> return Nothing
Just (Node val prevRef _) -> do
mprev <- readRef prevRef
case mprev of
Nothing -> do
writeRef frontRef $! Nothing
writeRef backRef $! Nothing
Just prev@(Node _ _ nextRef) -> do
writeRef nextRef $! Nothing
writeRef backRef (Just prev)
return $ Just val
{-# INLINE popBack #-}
instance MutablePushFront (DLList s a) where
pushFront (DLList frontRef backRef) val = do
mfront <- readRef frontRef
case mfront of
Nothing -> do
prevRef <- newRef $! Nothing
nextRef <- newRef $! Nothing
let node = Just $ Node val prevRef nextRef
writeRef frontRef node
writeRef backRef node
Just front@(Node _ prevRef _) -> do
prevRefNew <- newRef $! Nothing
nextRef <- newRef $ Just front
let node = Just $ Node val prevRefNew nextRef
writeRef prevRef node
writeRef frontRef node
{-# INLINE pushFront #-}
instance MutablePushBack (DLList s a) where
pushBack (DLList frontRef backRef) val = do
mback <- readRef backRef
case mback of
Nothing -> do
prevRef <- newRef $! Nothing
nextRef <- newRef $! Nothing
let node = Just $! Node val prevRef nextRef
writeRef frontRef $! node
writeRef backRef $! node
Just back@(Node _ _ nextRef) -> do
nextRefNew <- newRef $! Nothing
prevRef <- newRef $! Just back
let node = Just $! Node val prevRef nextRefNew
writeRef nextRef $! node
writeRef backRef $! node
{-# INLINE pushBack #-}
|
bitemyapp/mutable-containers
|
Data/Mutable/DLList.hs
|
mit
| 3,574
| 0
| 20
| 1,410
| 1,043
| 498
| 545
| 89
| 1
|
{-# LANGUAGE OverloadedStrings #-}
-- | Fundamental built-in style. Defines no additional extensions or
-- configurations beyond the default printer.
module HIndent.Styles.Fundamental where
import HIndent.Types
-- | Empty state.
data State = State
-- | The printer style.
fundamental :: Style
fundamental =
Style {styleName = "fundamental"
,styleAuthor = "Chris Done"
,styleDescription = "This style adds no extensions to the built-in printer."
,styleInitialState = State
,styleExtenders = []
,styleDefConfig = defaultConfig
,styleCommentPreprocessor = return}
|
lunaris/hindent
|
src/HIndent/Styles/Fundamental.hs
|
bsd-3-clause
| 617
| 0
| 7
| 130
| 80
| 53
| 27
| 13
| 1
|
{-# LANGUAGE CPP #-}
{-
Copyright (C) 2010-2014 John MacFarlane <jgm@berkeley.edu>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-}
{- |
Module : Text.Pandoc.UTF8
Copyright : Copyright (C) 2010-2014 John MacFarlane
License : GNU GPL, version 2 or above
Maintainer : John MacFarlane <jgm@berkeley.edu>
Stability : alpha
Portability : portable
UTF-8 aware string IO functions that will work with GHC 6.10, 6.12, or 7.
-}
module Text.Pandoc.UTF8 ( readFile
, writeFile
, getContents
, putStr
, putStrLn
, hPutStr
, hPutStrLn
, hGetContents
, toString
, fromString
, toStringLazy
, fromStringLazy
, encodePath
, decodeArg
)
where
import System.IO hiding (readFile, writeFile, getContents,
putStr, putStrLn, hPutStr, hPutStrLn, hGetContents)
import Prelude hiding (readFile, writeFile, getContents, putStr, putStrLn)
import qualified System.IO as IO
import qualified Data.ByteString.Char8 as B
import qualified Data.ByteString.Lazy as BL
import qualified Data.Text.Encoding as T
import qualified Data.Text as T
import qualified Data.Text.Lazy as TL
import qualified Data.Text.Lazy.Encoding as TL
readFile :: FilePath -> IO String
readFile f = do
h <- openFile (encodePath f) ReadMode
hGetContents h
writeFile :: FilePath -> String -> IO ()
writeFile f s = withFile (encodePath f) WriteMode $ \h -> hPutStr h s
getContents :: IO String
getContents = hGetContents stdin
putStr :: String -> IO ()
putStr s = hPutStr stdout s
putStrLn :: String -> IO ()
putStrLn s = hPutStrLn stdout s
hPutStr :: Handle -> String -> IO ()
hPutStr h s = hSetEncoding h utf8 >> IO.hPutStr h s
hPutStrLn :: Handle -> String -> IO ()
hPutStrLn h s = hSetEncoding h utf8 >> IO.hPutStrLn h s
hGetContents :: Handle -> IO String
hGetContents = fmap toString . B.hGetContents
-- hGetContents h = hSetEncoding h utf8_bom
-- >> hSetNewlineMode h universalNewlineMode
-- >> IO.hGetContents h
-- | Drop BOM (byte order marker) if present at beginning of string.
-- Note that Data.Text converts the BOM to code point FEFF, zero-width
-- no-break space, so if the string begins with this we strip it off.
dropBOM :: String -> String
dropBOM ('\xFEFF':xs) = xs
dropBOM xs = xs
-- | Convert UTF8-encoded ByteString to String, also
-- removing '\r' characters.
toString :: B.ByteString -> String
toString = filter (/='\r') . dropBOM . T.unpack . T.decodeUtf8
fromString :: String -> B.ByteString
fromString = T.encodeUtf8 . T.pack
-- | Convert UTF8-encoded ByteString to String, also
-- removing '\r' characters.
toStringLazy :: BL.ByteString -> String
toStringLazy = filter (/='\r') . dropBOM . TL.unpack . TL.decodeUtf8
fromStringLazy :: String -> BL.ByteString
fromStringLazy = TL.encodeUtf8 . TL.pack
encodePath :: FilePath -> FilePath
decodeArg :: String -> String
#if MIN_VERSION_base(4,4,0)
encodePath = id
decodeArg = id
#else
encodePath = B.unpack . fromString
decodeArg = toString . B.pack
#endif
|
peter-fogg/pardoc
|
src/Text/Pandoc/UTF8.hs
|
gpl-2.0
| 3,950
| 0
| 10
| 1,011
| 649
| 369
| 280
| 58
| 1
|
{-# LANGUAGE MultiParamTypeClasses, TypeInType, ConstrainedClassMethods, ScopedTypeVariables #-}
module T16342 where
import Data.Proxy
class C (a::ka) x where
cop :: D a x => x -> Proxy a -> Proxy a
cop _ x = x :: Proxy (a::ka)
class D (b::kb) y where
dop :: C b y => y -> Proxy b -> Proxy b
dop _ x = x :: Proxy (b::kb)
|
sdiehl/ghc
|
testsuite/tests/polykinds/T16342.hs
|
bsd-3-clause
| 333
| 0
| 10
| 76
| 144
| 75
| 69
| -1
| -1
|
{-# OPTIONS -fglasgow-exts #-}
-- Trac #1445
module Bug where
f :: () -> (?p :: ()) => () -> ()
f _ _ = ()
g :: (?p :: ()) => ()
g = f () ()
|
hvr/jhc
|
regress/tests/1_typecheck/2_pass/ghc/uncat/tc230.hs
|
mit
| 145
| 0
| 8
| 42
| 80
| 46
| 34
| -1
| -1
|
module WhereIn1 where
data Tree a = Leaf a | Branch (Tree a) (Tree a)
fringe_global x = fringe x
where
fringe :: Tree a -> [a]
fringe (Leaf x ) = [x]
fringe (Branch left right) = fringe left ++ fringe right
|
kmate/HaRe
|
old/testing/subIntroPattern/WhereIn1.hs
|
bsd-3-clause
| 275
| 0
| 9
| 111
| 104
| 54
| 50
| 6
| 2
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.