Search is not available for this dataset
repo_name
string | path
string | license
string | full_code
string | full_size
int64 | uncommented_code
string | uncommented_size
int64 | function_only_code
string | function_only_size
int64 | is_commented
bool | is_signatured
bool | n_ast_errors
int64 | ast_max_depth
int64 | n_whitespaces
int64 | n_ast_nodes
int64 | n_ast_terminals
int64 | n_ast_nonterminals
int64 | loc
int64 | cycloplexity
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
asib/cryptopals-haskell
|
s2ch14.hs
|
mit
|
main :: IO ()
main = do
k <- getEntropy 16
let key = aesKey k
blockSize = getBlockSizeSimple aesByteAtATimeECBEncryptHarder key
ecbMode = isAESInECB . aesByteAtATimeECBEncryptSimple key $ C8.pack "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
(pad, prefix) = findPrefixLength aesByteAtATimeECBEncryptHarder key
{-print (pad,prefix)-}
putStrLn . C8.unpack . aesByteAtATimeECBDecrypt (aesByteAtATimeECBEncryptHarderWrapper pad prefix) blockSize $ key
| 530
|
main :: IO ()
main = do
k <- getEntropy 16
let key = aesKey k
blockSize = getBlockSizeSimple aesByteAtATimeECBEncryptHarder key
ecbMode = isAESInECB . aesByteAtATimeECBEncryptSimple key $ C8.pack "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
(pad, prefix) = findPrefixLength aesByteAtATimeECBEncryptHarder key
{-print (pad,prefix)-}
putStrLn . C8.unpack . aesByteAtATimeECBDecrypt (aesByteAtATimeECBEncryptHarderWrapper pad prefix) blockSize $ key
| 530
|
main = do
k <- getEntropy 16
let key = aesKey k
blockSize = getBlockSizeSimple aesByteAtATimeECBEncryptHarder key
ecbMode = isAESInECB . aesByteAtATimeECBEncryptSimple key $ C8.pack "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
(pad, prefix) = findPrefixLength aesByteAtATimeECBEncryptHarder key
{-print (pad,prefix)-}
putStrLn . C8.unpack . aesByteAtATimeECBDecrypt (aesByteAtATimeECBEncryptHarderWrapper pad prefix) blockSize $ key
| 516
| false
| true
| 0
| 12
| 74
| 123
| 57
| 66
| null | null |
apyrgio/ganeti
|
src/Ganeti/Constants.hs
|
bsd-2-clause
|
hvKvmCdromDiskType :: String
hvKvmCdromDiskType = "cdrom_disk_type"
| 67
|
hvKvmCdromDiskType :: String
hvKvmCdromDiskType = "cdrom_disk_type"
| 67
|
hvKvmCdromDiskType = "cdrom_disk_type"
| 38
| false
| true
| 0
| 4
| 5
| 11
| 6
| 5
| null | null |
higgsd/euler
|
hs/127.hs
|
bsd-2-clause
|
rr = sortBy (compare `on` snd) $ drop 1 $ radicalSieve cc
| 57
|
rr = sortBy (compare `on` snd) $ drop 1 $ radicalSieve cc
| 57
|
rr = sortBy (compare `on` snd) $ drop 1 $ radicalSieve cc
| 57
| false
| false
| 1
| 8
| 11
| 35
| 16
| 19
| null | null |
tsani/tortoise
|
src/Language/Prism/Module.hs
|
mit
|
(.=!) :: Name -> Value -> Declaration
(.=!) = constantDecl
| 58
|
(.=!) :: Name -> Value -> Declaration
(.=!) = constantDecl
| 58
|
(.=!) = constantDecl
| 20
| false
| true
| 0
| 6
| 9
| 23
| 14
| 9
| null | null |
HJvT/hdirect
|
src/Parser.hs
|
bsd-3-clause
|
happyReduce_357 = happySpecReduce_1 115# happyReduction_357
| 59
|
happyReduce_357 = happySpecReduce_1 115# happyReduction_357
| 59
|
happyReduce_357 = happySpecReduce_1 115# happyReduction_357
| 59
| false
| false
| 0
| 5
| 4
| 11
| 5
| 6
| null | null |
nomeata/cryptonite
|
Crypto/Cipher/Camellia/Primitive.hs
|
bsd-3-clause
|
etKeyK Decrypt key i = k key `arrayRead64` (17 - i)
| 52
|
getKeyK Decrypt key i = k key `arrayRead64` (17 - i)
| 52
|
getKeyK Decrypt key i = k key `arrayRead64` (17 - i)
| 52
| false
| false
| 0
| 7
| 11
| 31
| 15
| 16
| null | null |
mydaum/cabal
|
cabal-install/Distribution/Client/CmdTest.hs
|
bsd-3-clause
|
selectComponentTarget pkgid cname subtarget _
= Left (TargetProblemIsSubComponent pkgid cname subtarget)
| 106
|
selectComponentTarget pkgid cname subtarget _
= Left (TargetProblemIsSubComponent pkgid cname subtarget)
| 106
|
selectComponentTarget pkgid cname subtarget _
= Left (TargetProblemIsSubComponent pkgid cname subtarget)
| 106
| false
| false
| 0
| 6
| 12
| 28
| 13
| 15
| null | null |
Thhethssmuz/ppp
|
src/PreProcess.hs
|
mit
|
processMacros :: [String] -> IO String
processMacros [] = return ""
| 67
|
processMacros :: [String] -> IO String
processMacros [] = return ""
| 67
|
processMacros [] = return ""
| 28
| false
| true
| 0
| 6
| 10
| 29
| 14
| 15
| null | null |
frontrowed/stratosphere
|
library-gen/Stratosphere/ResourceProperties/ECSTaskDefinitionTmpfs.hs
|
mit
|
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ecs-taskdefinition-tmpfs.html#cfn-ecs-taskdefinition-tmpfs-containerpath
ecstdtContainerPath :: Lens' ECSTaskDefinitionTmpfs (Maybe (Val Text))
ecstdtContainerPath = lens _eCSTaskDefinitionTmpfsContainerPath (\s a -> s { _eCSTaskDefinitionTmpfsContainerPath = a })
| 346
|
ecstdtContainerPath :: Lens' ECSTaskDefinitionTmpfs (Maybe (Val Text))
ecstdtContainerPath = lens _eCSTaskDefinitionTmpfsContainerPath (\s a -> s { _eCSTaskDefinitionTmpfsContainerPath = a })
| 191
|
ecstdtContainerPath = lens _eCSTaskDefinitionTmpfsContainerPath (\s a -> s { _eCSTaskDefinitionTmpfsContainerPath = a })
| 120
| true
| true
| 0
| 9
| 22
| 52
| 28
| 24
| null | null |
GaloisInc/halvm-ghc
|
compiler/types/Coercion.hs
|
bsd-3-clause
|
pprParendCo co = ppr_co TyConPrec co
| 36
|
pprParendCo co = ppr_co TyConPrec co
| 36
|
pprParendCo co = ppr_co TyConPrec co
| 36
| false
| false
| 1
| 5
| 5
| 17
| 6
| 11
| null | null |
KarimxD/Evolverbetert
|
src/MyRandom.hs
|
mit
|
-- {-# INLINE runRand #-}
evalRand :: Rand a -> PureMT -> a
evalRand = evalState
| 81
|
evalRand :: Rand a -> PureMT -> a
evalRand = evalState
| 54
|
evalRand = evalState
| 20
| true
| true
| 0
| 8
| 16
| 30
| 13
| 17
| null | null |
Chouffe/skelme
|
test/SpecUtils.hs
|
bsd-3-clause
|
symbolGen :: Gen Char
symbolGen = elements "!#$%&|*+-/:<=>?@^_~"
| 64
|
symbolGen :: Gen Char
symbolGen = elements "!#$%&|*+-/:<=>?@^_~"
| 64
|
symbolGen = elements "!#$%&|*+-/:<=>?@^_~"
| 42
| false
| true
| 0
| 5
| 7
| 17
| 8
| 9
| null | null |
alexander-at-github/eta
|
compiler/ETA/Interactive/ObjLink.hs
|
bsd-3-clause
|
loadObj :: String -> IO ()
loadObj str = do
withFilePath str $ \c_str -> do
r <- c_loadObj c_str
when (r == 0) (panic ("loadObj " ++ show str ++ ": failed"))
| 170
|
loadObj :: String -> IO ()
loadObj str = do
withFilePath str $ \c_str -> do
r <- c_loadObj c_str
when (r == 0) (panic ("loadObj " ++ show str ++ ": failed"))
| 170
|
loadObj str = do
withFilePath str $ \c_str -> do
r <- c_loadObj c_str
when (r == 0) (panic ("loadObj " ++ show str ++ ": failed"))
| 143
| false
| true
| 0
| 18
| 45
| 88
| 40
| 48
| null | null |
Lainepress/hledger
|
hledger-lib/Hledger/Data/Posting.hs
|
gpl-3.0
|
-- | Join account names into one. If any of them has () or [] posting type
-- indicators, these (the first type encountered) will also be applied to
-- the resulting account name.
concatAccountNames :: [AccountName] -> AccountName
concatAccountNames as = accountNameWithPostingType t $ intercalate ":" $ map accountNameWithoutPostingType as
where t = headDef RegularPosting $ filter (/= RegularPosting) $ map accountNamePostingType as
-- | Rewrite an account name using the first applicable alias from the given list, if any.
| 530
|
concatAccountNames :: [AccountName] -> AccountName
concatAccountNames as = accountNameWithPostingType t $ intercalate ":" $ map accountNameWithoutPostingType as
where t = headDef RegularPosting $ filter (/= RegularPosting) $ map accountNamePostingType as
-- | Rewrite an account name using the first applicable alias from the given list, if any.
| 350
|
concatAccountNames as = accountNameWithPostingType t $ intercalate ":" $ map accountNameWithoutPostingType as
where t = headDef RegularPosting $ filter (/= RegularPosting) $ map accountNamePostingType as
-- | Rewrite an account name using the first applicable alias from the given list, if any.
| 299
| true
| true
| 1
| 8
| 85
| 83
| 39
| 44
| null | null |
forked-upstream-packages-for-ghcjs/ghc
|
compiler/utils/Util.hs
|
bsd-3-clause
|
golden :: Int32
golden = 1013904242
| 35
|
golden :: Int32
golden = 1013904242
| 35
|
golden = 1013904242
| 19
| false
| true
| 0
| 6
| 5
| 18
| 7
| 11
| null | null |
rueshyna/gogol
|
gogol-tagmanager/gen/Network/Google/Resource/TagManager/Accounts/Containers/Variables/Update.hs
|
mpl-2.0
|
-- | Multipart request metadata.
acvuPayload :: Lens' AccountsContainersVariablesUpdate Variable
acvuPayload
= lens _acvuPayload (\ s a -> s{_acvuPayload = a})
| 161
|
acvuPayload :: Lens' AccountsContainersVariablesUpdate Variable
acvuPayload
= lens _acvuPayload (\ s a -> s{_acvuPayload = a})
| 128
|
acvuPayload
= lens _acvuPayload (\ s a -> s{_acvuPayload = a})
| 64
| true
| true
| 0
| 9
| 22
| 42
| 22
| 20
| null | null |
myShoggoth/shakespeare
|
Text/Hamlet.hs
|
mit
|
bindingPattern :: Binding -> Q (Pat, [(Ident, Exp)])
bindingPattern (BindAs i@(Ident s) b) = do
name <- newName s
(pattern, scope) <- bindingPattern b
return (AsP name pattern, (i, VarE name):scope)
| 210
|
bindingPattern :: Binding -> Q (Pat, [(Ident, Exp)])
bindingPattern (BindAs i@(Ident s) b) = do
name <- newName s
(pattern, scope) <- bindingPattern b
return (AsP name pattern, (i, VarE name):scope)
| 210
|
bindingPattern (BindAs i@(Ident s) b) = do
name <- newName s
(pattern, scope) <- bindingPattern b
return (AsP name pattern, (i, VarE name):scope)
| 157
| false
| true
| 0
| 11
| 42
| 109
| 56
| 53
| null | null |
vTurbine/ghc
|
compiler/ghci/ByteCodeGen.hs
|
bsd-3-clause
|
getCurrentModule :: BcM Module
getCurrentModule = BcM $ \st -> return (st, thisModule st)
| 89
|
getCurrentModule :: BcM Module
getCurrentModule = BcM $ \st -> return (st, thisModule st)
| 89
|
getCurrentModule = BcM $ \st -> return (st, thisModule st)
| 58
| false
| true
| 0
| 9
| 13
| 35
| 18
| 17
| null | null |
DaMSL/K3
|
src/Language/K3/Parser/SQL.hs
|
apache-2.0
|
fieldE (Just (Right tp)) _ e = projectPathE e tp
| 48
|
fieldE (Just (Right tp)) _ e = projectPathE e tp
| 48
|
fieldE (Just (Right tp)) _ e = projectPathE e tp
| 48
| false
| false
| 0
| 9
| 9
| 30
| 14
| 16
| null | null |
Shumush/SNIHs
|
src/Text/Shaun/Sweeper.hs
|
gpl-3.0
|
getObject s (SObject o) = case lookup s o of
Nothing -> throwM AttributeNotFound
Just r -> return r
| 103
|
getObject s (SObject o) = case lookup s o of
Nothing -> throwM AttributeNotFound
Just r -> return r
| 103
|
getObject s (SObject o) = case lookup s o of
Nothing -> throwM AttributeNotFound
Just r -> return r
| 103
| false
| false
| 0
| 8
| 22
| 47
| 21
| 26
| null | null |
sopvop/cabal
|
Cabal/Distribution/Verbosity.hs
|
bsd-3-clause
|
parseVerbosity :: ReadP r (Either Int Verbosity)
parseVerbosity = parseIntVerbosity <++ parseStringVerbosity
where
parseIntVerbosity = fmap Left (readS_to_P reads)
parseStringVerbosity = fmap Right $ do
level <- parseVerbosityLevel
_ <- skipSpaces
extras <- sepBy parseExtra skipSpaces
return (foldr (.) id extras (mkVerbosity level))
parseVerbosityLevel = choice
[ string "silent" >> return Silent
, string "normal" >> return Normal
, string "verbose" >> return Verbose
, string "debug" >> return Deafening
, string "deafening" >> return Deafening
]
parseExtra = char '+' >> choice
[ string "callsite" >> return verboseCallSite
, string "callstack" >> return verboseCallStack
]
| 802
|
parseVerbosity :: ReadP r (Either Int Verbosity)
parseVerbosity = parseIntVerbosity <++ parseStringVerbosity
where
parseIntVerbosity = fmap Left (readS_to_P reads)
parseStringVerbosity = fmap Right $ do
level <- parseVerbosityLevel
_ <- skipSpaces
extras <- sepBy parseExtra skipSpaces
return (foldr (.) id extras (mkVerbosity level))
parseVerbosityLevel = choice
[ string "silent" >> return Silent
, string "normal" >> return Normal
, string "verbose" >> return Verbose
, string "debug" >> return Deafening
, string "deafening" >> return Deafening
]
parseExtra = char '+' >> choice
[ string "callsite" >> return verboseCallSite
, string "callstack" >> return verboseCallStack
]
| 802
|
parseVerbosity = parseIntVerbosity <++ parseStringVerbosity
where
parseIntVerbosity = fmap Left (readS_to_P reads)
parseStringVerbosity = fmap Right $ do
level <- parseVerbosityLevel
_ <- skipSpaces
extras <- sepBy parseExtra skipSpaces
return (foldr (.) id extras (mkVerbosity level))
parseVerbosityLevel = choice
[ string "silent" >> return Silent
, string "normal" >> return Normal
, string "verbose" >> return Verbose
, string "debug" >> return Deafening
, string "deafening" >> return Deafening
]
parseExtra = char '+' >> choice
[ string "callsite" >> return verboseCallSite
, string "callstack" >> return verboseCallStack
]
| 753
| false
| true
| 2
| 12
| 219
| 232
| 106
| 126
| null | null |
yalpul/CENG242
|
H99/31-41/p35.hs
|
gpl-3.0
|
-- find the prime factors of a number
p35 n = f n primes where
f n p@(x:xs)
| n < 2 = []
| mod n x == 0 = x : f (div n x) p
| otherwise = f n xs
primes = sieve [2..] where
sieve (x:xs) = x : sieve [ z | z <- xs, mod z x /= 0 ]
| 307
|
p35 n = f n primes where
f n p@(x:xs)
| n < 2 = []
| mod n x == 0 = x : f (div n x) p
| otherwise = f n xs
primes = sieve [2..] where
sieve (x:xs) = x : sieve [ z | z <- xs, mod z x /= 0 ]
| 268
|
p35 n = f n primes where
f n p@(x:xs)
| n < 2 = []
| mod n x == 0 = x : f (div n x) p
| otherwise = f n xs
primes = sieve [2..] where
sieve (x:xs) = x : sieve [ z | z <- xs, mod z x /= 0 ]
| 268
| true
| false
| 0
| 14
| 147
| 157
| 76
| 81
| null | null |
vaibhavsagar/duffer.hs
|
duffer/src/Duffer/Pack/Bits.hs
|
bsd-3-clause
|
setMSB :: Bits t => t -> t
setMSB = (`setBit` 7)
| 48
|
setMSB :: Bits t => t -> t
setMSB = (`setBit` 7)
| 48
|
setMSB = (`setBit` 7)
| 21
| false
| true
| 0
| 8
| 11
| 35
| 17
| 18
| null | null |
wavewave/madgraph-auto-dataset
|
src/HEP/Automation/MadGraph/Dataset/Set20110303set1.hs
|
gpl-3.0
|
psetuplist :: [ProcessSetup]
psetuplist = [ psetup_trip_ttbar01j ]
| 66
|
psetuplist :: [ProcessSetup]
psetuplist = [ psetup_trip_ttbar01j ]
| 66
|
psetuplist = [ psetup_trip_ttbar01j ]
| 37
| false
| true
| 0
| 5
| 7
| 17
| 10
| 7
| null | null |
ddssff/lens
|
src/Control/Lens/TH.hs
|
bsd-3-clause
|
-- | Generate "simple" optics even when type-changing optics are possible.
-- (e.g. 'Lens'' instead of 'Lens')
simpleLenses :: Lens' LensRules Bool
simpleLenses f r = fmap (\x -> r { _simpleLenses = x}) (f (_simpleLenses r))
| 224
|
simpleLenses :: Lens' LensRules Bool
simpleLenses f r = fmap (\x -> r { _simpleLenses = x}) (f (_simpleLenses r))
| 113
|
simpleLenses f r = fmap (\x -> r { _simpleLenses = x}) (f (_simpleLenses r))
| 76
| true
| true
| 0
| 9
| 36
| 56
| 30
| 26
| null | null |
romanb/amazonka
|
amazonka-ec2/gen/Network/AWS/EC2/RegisterImage.hs
|
mpl-2.0
|
-- | A description for your AMI.
ri1Description :: Lens' RegisterImage (Maybe Text)
ri1Description = lens _ri1Description (\s a -> s { _ri1Description = a })
| 157
|
ri1Description :: Lens' RegisterImage (Maybe Text)
ri1Description = lens _ri1Description (\s a -> s { _ri1Description = a })
| 124
|
ri1Description = lens _ri1Description (\s a -> s { _ri1Description = a })
| 73
| true
| true
| 0
| 9
| 25
| 46
| 25
| 21
| null | null |
AndrewRademacher/dp-tests
|
src/Supervisor.hs
|
mit
|
-- END: Clock Process
rtable :: RemoteTable
rtable = Main.__remoteTable initRemoteTable
| 88
|
rtable :: RemoteTable
rtable = Main.__remoteTable initRemoteTable
| 65
|
rtable = Main.__remoteTable initRemoteTable
| 43
| true
| true
| 0
| 6
| 11
| 17
| 9
| 8
| null | null |
brendanhay/gogol
|
gogol-searchconsole/gen/Network/Google/SearchConsole/Types/Product.hs
|
mpl-2.0
|
-- | Creates a value of 'SearchAnalyticsQueryRequest' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'saqrAggregationType'
--
-- * 'saqrDataState'
--
-- * 'saqrRowLimit'
--
-- * 'saqrEndDate'
--
-- * 'saqrSearchType'
--
-- * 'saqrDimensionFilterGroups'
--
-- * 'saqrStartDate'
--
-- * 'saqrStartRow'
--
-- * 'saqrDimensions'
searchAnalyticsQueryRequest
:: SearchAnalyticsQueryRequest
searchAnalyticsQueryRequest =
SearchAnalyticsQueryRequest'
{ _saqrAggregationType = Nothing
, _saqrDataState = Nothing
, _saqrRowLimit = Nothing
, _saqrEndDate = Nothing
, _saqrSearchType = Nothing
, _saqrDimensionFilterGroups = Nothing
, _saqrStartDate = Nothing
, _saqrStartRow = Nothing
, _saqrDimensions = Nothing
}
| 835
|
searchAnalyticsQueryRequest
:: SearchAnalyticsQueryRequest
searchAnalyticsQueryRequest =
SearchAnalyticsQueryRequest'
{ _saqrAggregationType = Nothing
, _saqrDataState = Nothing
, _saqrRowLimit = Nothing
, _saqrEndDate = Nothing
, _saqrSearchType = Nothing
, _saqrDimensionFilterGroups = Nothing
, _saqrStartDate = Nothing
, _saqrStartRow = Nothing
, _saqrDimensions = Nothing
}
| 424
|
searchAnalyticsQueryRequest =
SearchAnalyticsQueryRequest'
{ _saqrAggregationType = Nothing
, _saqrDataState = Nothing
, _saqrRowLimit = Nothing
, _saqrEndDate = Nothing
, _saqrSearchType = Nothing
, _saqrDimensionFilterGroups = Nothing
, _saqrStartDate = Nothing
, _saqrStartRow = Nothing
, _saqrDimensions = Nothing
}
| 361
| true
| true
| 0
| 6
| 154
| 88
| 64
| 24
| null | null |
alexvong1995/pandoc
|
src/Text/Pandoc/Readers/MediaWiki.hs
|
gpl-2.0
|
syntaxhighlight :: String -> [Attribute String] -> MWParser Blocks
syntaxhighlight tag attrs = try $ do
let mblang = lookup "lang" attrs
let mbstart = lookup "start" attrs
let mbline = lookup "line" attrs
let classes = maybe [] (:[]) mblang ++ maybe [] (const ["numberLines"]) mbline
let kvs = maybe [] (\x -> [("startFrom",x)]) mbstart
contents <- charsInTags tag
return $ B.codeBlockWith ("",classes,kvs) $ trimCode contents
| 440
|
syntaxhighlight :: String -> [Attribute String] -> MWParser Blocks
syntaxhighlight tag attrs = try $ do
let mblang = lookup "lang" attrs
let mbstart = lookup "start" attrs
let mbline = lookup "line" attrs
let classes = maybe [] (:[]) mblang ++ maybe [] (const ["numberLines"]) mbline
let kvs = maybe [] (\x -> [("startFrom",x)]) mbstart
contents <- charsInTags tag
return $ B.codeBlockWith ("",classes,kvs) $ trimCode contents
| 440
|
syntaxhighlight tag attrs = try $ do
let mblang = lookup "lang" attrs
let mbstart = lookup "start" attrs
let mbline = lookup "line" attrs
let classes = maybe [] (:[]) mblang ++ maybe [] (const ["numberLines"]) mbline
let kvs = maybe [] (\x -> [("startFrom",x)]) mbstart
contents <- charsInTags tag
return $ B.codeBlockWith ("",classes,kvs) $ trimCode contents
| 373
| false
| true
| 6
| 10
| 80
| 183
| 95
| 88
| null | null |
Teaspot-Studio/Urho3D-Haskell
|
app/sample01/Main.hs
|
mit
|
main :: IO ()
main = withObject () $ \cntx -> do
newSample cntx "HelloWorld" joysticPatch customStart >>= runSample
-- | Setup after engine initialization and before running the main loop.
| 192
|
main :: IO ()
main = withObject () $ \cntx -> do
newSample cntx "HelloWorld" joysticPatch customStart >>= runSample
-- | Setup after engine initialization and before running the main loop.
| 192
|
main = withObject () $ \cntx -> do
newSample cntx "HelloWorld" joysticPatch customStart >>= runSample
-- | Setup after engine initialization and before running the main loop.
| 178
| false
| true
| 2
| 9
| 34
| 54
| 24
| 30
| null | null |
Fermat/higher-rank
|
src/Syntax.hs
|
bsd-3-clause
|
erase (App a1 a2) = App (erase a1) (erase a2)
| 45
|
erase (App a1 a2) = App (erase a1) (erase a2)
| 45
|
erase (App a1 a2) = App (erase a1) (erase a2)
| 45
| false
| false
| 0
| 7
| 9
| 34
| 16
| 18
| null | null |
azadbolour/boardgame
|
haskell-server/test/BoardGame/Server/Domain/CrossWordFinderSpec.hs
|
agpl-3.0
|
pce :: Char -> Maybe Piece
pce s = Just $ Piece s ""
| 52
|
pce :: Char -> Maybe Piece
pce s = Just $ Piece s ""
| 52
|
pce s = Just $ Piece s ""
| 25
| false
| true
| 0
| 7
| 13
| 34
| 15
| 19
| null | null |
silkapp/pickler.js
|
src/Pickler.hs
|
bsd-3-clause
|
fmap :: (b -> o) -> Point i j b -> Point i j o
fmap = (<$>)
| 59
|
fmap :: (b -> o) -> Point i j b -> Point i j o
fmap = (<$>)
| 59
|
fmap = (<$>)
| 12
| false
| true
| 0
| 7
| 17
| 42
| 22
| 20
| null | null |
ghcjs/jsaddle-dom
|
src/JSDOM/Generated/SVGTransform.hs
|
mit
|
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGTransform.type Mozilla SVGTransform.type documentation>
getType :: (MonadDOM m) => SVGTransform -> m Word
getType self
= liftDOM (round <$> ((self ^. js "type") >>= valToNumber))
| 239
|
getType :: (MonadDOM m) => SVGTransform -> m Word
getType self
= liftDOM (round <$> ((self ^. js "type") >>= valToNumber))
| 124
|
getType self
= liftDOM (round <$> ((self ^. js "type") >>= valToNumber))
| 74
| true
| true
| 0
| 12
| 29
| 58
| 30
| 28
| null | null |
crdueck/rgb-gen
|
RGB.hs
|
bsd-3-clause
|
neighbours :: DIM2 -> [DIM2]
neighbours (Z :. y :. x) =
[ Z :. y + dy :. x + dx | dx <- [-1..1], dy <- [-1..1], dx /= 0 || dy /= 0 ]
| 136
|
neighbours :: DIM2 -> [DIM2]
neighbours (Z :. y :. x) =
[ Z :. y + dy :. x + dx | dx <- [-1..1], dy <- [-1..1], dx /= 0 || dy /= 0 ]
| 136
|
neighbours (Z :. y :. x) =
[ Z :. y + dy :. x + dx | dx <- [-1..1], dy <- [-1..1], dx /= 0 || dy /= 0 ]
| 107
| false
| true
| 0
| 9
| 40
| 97
| 51
| 46
| null | null |
unthingable/Tidal
|
Sound/Tidal/Dirt.hs
|
gpl-3.0
|
pan = makeF dirt "pan"
| 31
|
pan = makeF dirt "pan"
| 31
|
pan = makeF dirt "pan"
| 31
| false
| false
| 1
| 5
| 13
| 16
| 5
| 11
| null | null |
brendanhay/gogol
|
gogol-cloudshell/gen/Network/Google/CloudShell/Types/Product.hs
|
mpl-2.0
|
-- | Public keys that should be added to the environment before it is
-- started.
serPublicKeys :: Lens' StartEnvironmentRequest [Text]
serPublicKeys
= lens _serPublicKeys
(\ s a -> s{_serPublicKeys = a})
. _Default
. _Coerce
| 245
|
serPublicKeys :: Lens' StartEnvironmentRequest [Text]
serPublicKeys
= lens _serPublicKeys
(\ s a -> s{_serPublicKeys = a})
. _Default
. _Coerce
| 163
|
serPublicKeys
= lens _serPublicKeys
(\ s a -> s{_serPublicKeys = a})
. _Default
. _Coerce
| 109
| true
| true
| 2
| 8
| 55
| 58
| 29
| 29
| null | null |
brendanhay/gogol
|
gogol-vision/gen/Network/Google/Vision/Types/Product.hs
|
mpl-2.0
|
-- | RGB color values with their score and pixel fraction.
dcaColors :: Lens' DominantColorsAnnotation [ColorInfo]
dcaColors
= lens _dcaColors (\ s a -> s{_dcaColors = a}) .
_Default
. _Coerce
| 206
|
dcaColors :: Lens' DominantColorsAnnotation [ColorInfo]
dcaColors
= lens _dcaColors (\ s a -> s{_dcaColors = a}) .
_Default
. _Coerce
| 147
|
dcaColors
= lens _dcaColors (\ s a -> s{_dcaColors = a}) .
_Default
. _Coerce
| 91
| true
| true
| 2
| 9
| 44
| 58
| 28
| 30
| null | null |
mariefarrell/Hets
|
OWL2/ShipSyntax.hs
|
gpl-2.0
|
tbox :: CharParser st TBox
tbox = (key "Disjoint" >> fmap DisjointCs
(parent $ concept <:> many (commaP >> concept)))
<|> try (liftM2 ConceptDecl concept
(liftM2 ConceptRel eqOrLess concept
<|> fmap ADTCons
((tryString "::=" >> skip >> sepBy tboxCons (skipChar '|'))
<|> (char ':' >> pzero)
<|> (string "==" >> pzero)
<|> (string "!=" >> pzero)
<|> return [])))
| 409
|
tbox :: CharParser st TBox
tbox = (key "Disjoint" >> fmap DisjointCs
(parent $ concept <:> many (commaP >> concept)))
<|> try (liftM2 ConceptDecl concept
(liftM2 ConceptRel eqOrLess concept
<|> fmap ADTCons
((tryString "::=" >> skip >> sepBy tboxCons (skipChar '|'))
<|> (char ':' >> pzero)
<|> (string "==" >> pzero)
<|> (string "!=" >> pzero)
<|> return [])))
| 409
|
tbox = (key "Disjoint" >> fmap DisjointCs
(parent $ concept <:> many (commaP >> concept)))
<|> try (liftM2 ConceptDecl concept
(liftM2 ConceptRel eqOrLess concept
<|> fmap ADTCons
((tryString "::=" >> skip >> sepBy tboxCons (skipChar '|'))
<|> (char ':' >> pzero)
<|> (string "==" >> pzero)
<|> (string "!=" >> pzero)
<|> return [])))
| 382
| false
| true
| 3
| 20
| 106
| 170
| 84
| 86
| null | null |
dhrosa/shiny
|
Shiny/FS.hs
|
gpl-3.0
|
helloPath :: FilePath
helloPath = "/hello"
| 42
|
helloPath :: FilePath
helloPath = "/hello"
| 42
|
helloPath = "/hello"
| 20
| false
| true
| 0
| 6
| 5
| 18
| 7
| 11
| null | null |
trskop/cabal
|
cabal-install/Main.hs
|
bsd-3-clause
|
replAction :: (ReplFlags, BuildExFlags) -> [String] -> Action
replAction (replFlags, buildExFlags) extraArgs globalFlags = do
cwd <- getCurrentDirectory
pkgDesc <- findPackageDesc cwd
either (const onNoPkgDesc) (const onPkgDesc) pkgDesc
where
verbosity = fromFlagOrDefault normal (replVerbosity replFlags)
-- There is a .cabal file in the current directory: start a REPL and load
-- the project's modules.
onPkgDesc = do
let noAddSource = case replReload replFlags of
Flag True -> SkipAddSourceDepsCheck
_ -> fromFlagOrDefault DontSkipAddSourceDepsCheck
(buildOnly buildExFlags)
-- Calls 'configureAction' to do the real work, so nothing special has to
-- be done to support sandboxes.
(useSandbox, _config, distPref) <-
reconfigure verbosity (replDistPref replFlags)
mempty [] globalFlags noAddSource NoFlag
(const Nothing)
let progConf = defaultProgramConfiguration
setupOptions = defaultSetupScriptOptions
{ useCabalVersion = orLaterVersion $ Version [1,18,0] []
, useDistPref = distPref
}
replFlags' = replFlags
{ replVerbosity = toFlag verbosity
, replDistPref = toFlag distPref
}
maybeWithSandboxDirOnSearchPath useSandbox $
setupWrapper verbosity setupOptions Nothing
(Cabal.replCommand progConf) (const replFlags') extraArgs
-- No .cabal file in the current directory: just start the REPL (possibly
-- using the sandbox package DB).
onNoPkgDesc = do
(_useSandbox, config) <- loadConfigOrSandboxConfig verbosity globalFlags
let configFlags = savedConfigureFlags config
(comp, _platform, programDb) <- configCompilerAux' configFlags
programDb' <- reconfigurePrograms verbosity
(replProgramPaths replFlags)
(replProgramArgs replFlags)
programDb
startInterpreter verbosity programDb' comp (configPackageDB' configFlags)
-- | Re-configure the package in the current directory if needed. Deciding
-- when to reconfigure and with which options is convoluted:
--
-- If we are reconfiguring, we must always run @configure@ with the
-- verbosity option we are given; however, that a previous configuration
-- uses a different verbosity setting is not reason enough to reconfigure.
--
-- The package should be configured to use the same \"dist\" prefix as
-- given to the @build@ command, otherwise the build will probably
-- fail. Not only does this determine the \"dist\" prefix setting if we
-- need to reconfigure anyway, but an existing configuration should be
-- invalidated if its \"dist\" prefix differs.
--
-- If the package has never been configured (i.e., there is no
-- LocalBuildInfo), we must configure first, using the default options.
--
-- If the package has been configured, there will be a 'LocalBuildInfo'.
-- If there no package description file, we assume that the
-- 'PackageDescription' is up to date, though the configuration may need
-- to be updated for other reasons (see above). If there is a package
-- description file, and it has been modified since the 'LocalBuildInfo'
-- was generated, then we need to reconfigure.
--
-- The caller of this function may also have specific requirements
-- regarding the flags the last configuration used. For example,
-- 'testAction' requires that the package be configured with test suites
-- enabled. The caller may pass the required settings to this function
-- along with a function to check the validity of the saved 'ConfigFlags';
-- these required settings will be checked first upon determining that
-- a previous configuration exists.
| 3,861
|
replAction :: (ReplFlags, BuildExFlags) -> [String] -> Action
replAction (replFlags, buildExFlags) extraArgs globalFlags = do
cwd <- getCurrentDirectory
pkgDesc <- findPackageDesc cwd
either (const onNoPkgDesc) (const onPkgDesc) pkgDesc
where
verbosity = fromFlagOrDefault normal (replVerbosity replFlags)
-- There is a .cabal file in the current directory: start a REPL and load
-- the project's modules.
onPkgDesc = do
let noAddSource = case replReload replFlags of
Flag True -> SkipAddSourceDepsCheck
_ -> fromFlagOrDefault DontSkipAddSourceDepsCheck
(buildOnly buildExFlags)
-- Calls 'configureAction' to do the real work, so nothing special has to
-- be done to support sandboxes.
(useSandbox, _config, distPref) <-
reconfigure verbosity (replDistPref replFlags)
mempty [] globalFlags noAddSource NoFlag
(const Nothing)
let progConf = defaultProgramConfiguration
setupOptions = defaultSetupScriptOptions
{ useCabalVersion = orLaterVersion $ Version [1,18,0] []
, useDistPref = distPref
}
replFlags' = replFlags
{ replVerbosity = toFlag verbosity
, replDistPref = toFlag distPref
}
maybeWithSandboxDirOnSearchPath useSandbox $
setupWrapper verbosity setupOptions Nothing
(Cabal.replCommand progConf) (const replFlags') extraArgs
-- No .cabal file in the current directory: just start the REPL (possibly
-- using the sandbox package DB).
onNoPkgDesc = do
(_useSandbox, config) <- loadConfigOrSandboxConfig verbosity globalFlags
let configFlags = savedConfigureFlags config
(comp, _platform, programDb) <- configCompilerAux' configFlags
programDb' <- reconfigurePrograms verbosity
(replProgramPaths replFlags)
(replProgramArgs replFlags)
programDb
startInterpreter verbosity programDb' comp (configPackageDB' configFlags)
-- | Re-configure the package in the current directory if needed. Deciding
-- when to reconfigure and with which options is convoluted:
--
-- If we are reconfiguring, we must always run @configure@ with the
-- verbosity option we are given; however, that a previous configuration
-- uses a different verbosity setting is not reason enough to reconfigure.
--
-- The package should be configured to use the same \"dist\" prefix as
-- given to the @build@ command, otherwise the build will probably
-- fail. Not only does this determine the \"dist\" prefix setting if we
-- need to reconfigure anyway, but an existing configuration should be
-- invalidated if its \"dist\" prefix differs.
--
-- If the package has never been configured (i.e., there is no
-- LocalBuildInfo), we must configure first, using the default options.
--
-- If the package has been configured, there will be a 'LocalBuildInfo'.
-- If there no package description file, we assume that the
-- 'PackageDescription' is up to date, though the configuration may need
-- to be updated for other reasons (see above). If there is a package
-- description file, and it has been modified since the 'LocalBuildInfo'
-- was generated, then we need to reconfigure.
--
-- The caller of this function may also have specific requirements
-- regarding the flags the last configuration used. For example,
-- 'testAction' requires that the package be configured with test suites
-- enabled. The caller may pass the required settings to this function
-- along with a function to check the validity of the saved 'ConfigFlags';
-- these required settings will be checked first upon determining that
-- a previous configuration exists.
| 3,861
|
replAction (replFlags, buildExFlags) extraArgs globalFlags = do
cwd <- getCurrentDirectory
pkgDesc <- findPackageDesc cwd
either (const onNoPkgDesc) (const onPkgDesc) pkgDesc
where
verbosity = fromFlagOrDefault normal (replVerbosity replFlags)
-- There is a .cabal file in the current directory: start a REPL and load
-- the project's modules.
onPkgDesc = do
let noAddSource = case replReload replFlags of
Flag True -> SkipAddSourceDepsCheck
_ -> fromFlagOrDefault DontSkipAddSourceDepsCheck
(buildOnly buildExFlags)
-- Calls 'configureAction' to do the real work, so nothing special has to
-- be done to support sandboxes.
(useSandbox, _config, distPref) <-
reconfigure verbosity (replDistPref replFlags)
mempty [] globalFlags noAddSource NoFlag
(const Nothing)
let progConf = defaultProgramConfiguration
setupOptions = defaultSetupScriptOptions
{ useCabalVersion = orLaterVersion $ Version [1,18,0] []
, useDistPref = distPref
}
replFlags' = replFlags
{ replVerbosity = toFlag verbosity
, replDistPref = toFlag distPref
}
maybeWithSandboxDirOnSearchPath useSandbox $
setupWrapper verbosity setupOptions Nothing
(Cabal.replCommand progConf) (const replFlags') extraArgs
-- No .cabal file in the current directory: just start the REPL (possibly
-- using the sandbox package DB).
onNoPkgDesc = do
(_useSandbox, config) <- loadConfigOrSandboxConfig verbosity globalFlags
let configFlags = savedConfigureFlags config
(comp, _platform, programDb) <- configCompilerAux' configFlags
programDb' <- reconfigurePrograms verbosity
(replProgramPaths replFlags)
(replProgramArgs replFlags)
programDb
startInterpreter verbosity programDb' comp (configPackageDB' configFlags)
-- | Re-configure the package in the current directory if needed. Deciding
-- when to reconfigure and with which options is convoluted:
--
-- If we are reconfiguring, we must always run @configure@ with the
-- verbosity option we are given; however, that a previous configuration
-- uses a different verbosity setting is not reason enough to reconfigure.
--
-- The package should be configured to use the same \"dist\" prefix as
-- given to the @build@ command, otherwise the build will probably
-- fail. Not only does this determine the \"dist\" prefix setting if we
-- need to reconfigure anyway, but an existing configuration should be
-- invalidated if its \"dist\" prefix differs.
--
-- If the package has never been configured (i.e., there is no
-- LocalBuildInfo), we must configure first, using the default options.
--
-- If the package has been configured, there will be a 'LocalBuildInfo'.
-- If there no package description file, we assume that the
-- 'PackageDescription' is up to date, though the configuration may need
-- to be updated for other reasons (see above). If there is a package
-- description file, and it has been modified since the 'LocalBuildInfo'
-- was generated, then we need to reconfigure.
--
-- The caller of this function may also have specific requirements
-- regarding the flags the last configuration used. For example,
-- 'testAction' requires that the package be configured with test suites
-- enabled. The caller may pass the required settings to this function
-- along with a function to check the validity of the saved 'ConfigFlags';
-- these required settings will be checked first upon determining that
-- a previous configuration exists.
| 3,799
| false
| true
| 0
| 15
| 940
| 439
| 237
| 202
| null | null |
Mathnerd314/atomo
|
src/Atomo/Pattern.hs
|
bsd-3-clause
|
-- | Match multiple patterns with multiple values.
matchAll :: IDs -> Maybe ORef -> [Pattern] -> [Value] -> Bool
matchAll _ _ [] [] = True
| 138
|
matchAll :: IDs -> Maybe ORef -> [Pattern] -> [Value] -> Bool
matchAll _ _ [] [] = True
| 87
|
matchAll _ _ [] [] = True
| 25
| true
| true
| 0
| 9
| 26
| 50
| 26
| 24
| null | null |
brendanhay/gogol
|
gogol-logging/gen/Network/Google/Resource/Logging/Folders/Locations/Operations/Cancel.hs
|
mpl-2.0
|
-- | OAuth access token.
flocAccessToken :: Lens' FoldersLocationsOperationsCancel (Maybe Text)
flocAccessToken
= lens _flocAccessToken
(\ s a -> s{_flocAccessToken = a})
| 178
|
flocAccessToken :: Lens' FoldersLocationsOperationsCancel (Maybe Text)
flocAccessToken
= lens _flocAccessToken
(\ s a -> s{_flocAccessToken = a})
| 153
|
flocAccessToken
= lens _flocAccessToken
(\ s a -> s{_flocAccessToken = a})
| 82
| true
| true
| 0
| 9
| 29
| 48
| 25
| 23
| null | null |
keera-studios/hsQt
|
Qtc/Enums/Gui/QListView.hs
|
bsd-2-clause
|
ieMovement :: Int -> Movement
ieMovement x = QEnum (CMovement x)
| 64
|
ieMovement :: Int -> Movement
ieMovement x = QEnum (CMovement x)
| 64
|
ieMovement x = QEnum (CMovement x)
| 34
| false
| true
| 0
| 7
| 10
| 32
| 14
| 18
| null | null |
vialette/ppattern-tmp
|
src/PPatternExactTrackBenchmark.hs
|
mit
|
options :: Options
options = Options { psize = def &= help "The pattern permutation size"
, qsize = def &= help "The target permutation size"
, pTrack = def &= help "p is the union of at most pTrack increasingss"
, qTrack = def &= help "q is the union of at most qTrack increasings"
, seed = def &= help "The seed of the random generator"
}
&= verbosity
&= summary "ppattern-Track-benchmark v0.1.0.0, (C) Laurent Bulteau, Romeo Rizzi, Stéphane Vialette, 2016-1017"
&= program "ppattern-Track-benchmark"
| 650
|
options :: Options
options = Options { psize = def &= help "The pattern permutation size"
, qsize = def &= help "The target permutation size"
, pTrack = def &= help "p is the union of at most pTrack increasingss"
, qTrack = def &= help "q is the union of at most qTrack increasings"
, seed = def &= help "The seed of the random generator"
}
&= verbosity
&= summary "ppattern-Track-benchmark v0.1.0.0, (C) Laurent Bulteau, Romeo Rizzi, Stéphane Vialette, 2016-1017"
&= program "ppattern-Track-benchmark"
| 650
|
options = Options { psize = def &= help "The pattern permutation size"
, qsize = def &= help "The target permutation size"
, pTrack = def &= help "p is the union of at most pTrack increasingss"
, qTrack = def &= help "q is the union of at most qTrack increasings"
, seed = def &= help "The seed of the random generator"
}
&= verbosity
&= summary "ppattern-Track-benchmark v0.1.0.0, (C) Laurent Bulteau, Romeo Rizzi, Stéphane Vialette, 2016-1017"
&= program "ppattern-Track-benchmark"
| 631
| false
| true
| 0
| 11
| 233
| 96
| 50
| 46
| null | null |
olsner/ghc
|
compiler/stgSyn/StgSyn.hs
|
bsd-3-clause
|
pprStgOp (StgPrimCallOp op)= ppr op
| 35
|
pprStgOp (StgPrimCallOp op)= ppr op
| 35
|
pprStgOp (StgPrimCallOp op)= ppr op
| 35
| false
| false
| 0
| 7
| 4
| 18
| 8
| 10
| null | null |
snoyberg/ghc
|
libraries/base/GHC/Event/PSQ.hs
|
bsd-3-clause
|
lsingleRight _ _ _ _ _ _ = moduleError "lsingleRight" "malformed tree"
| 70
|
lsingleRight _ _ _ _ _ _ = moduleError "lsingleRight" "malformed tree"
| 70
|
lsingleRight _ _ _ _ _ _ = moduleError "lsingleRight" "malformed tree"
| 70
| false
| false
| 1
| 5
| 11
| 21
| 10
| 11
| null | null |
jmn/goodreads
|
app/Main.hs
|
bsd-3-clause
|
parseFindBook :: Parser Command
parseFindBook = FindBook
<$> argument str (metavar "TITLE")
| 95
|
parseFindBook :: Parser Command
parseFindBook = FindBook
<$> argument str (metavar "TITLE")
| 95
|
parseFindBook = FindBook
<$> argument str (metavar "TITLE")
| 63
| false
| true
| 2
| 7
| 15
| 30
| 14
| 16
| null | null |
snoyberg/ghc
|
compiler/basicTypes/Module.hs
|
bsd-3-clause
|
generalizeIndefUnitId :: IndefUnitId -> IndefUnitId
generalizeIndefUnitId IndefUnitId{ indefUnitIdComponentId = cid
, indefUnitIdInsts = insts } =
newIndefUnitId cid (map (\(m,_) -> (m, mkHoleModule m)) insts)
| 246
|
generalizeIndefUnitId :: IndefUnitId -> IndefUnitId
generalizeIndefUnitId IndefUnitId{ indefUnitIdComponentId = cid
, indefUnitIdInsts = insts } =
newIndefUnitId cid (map (\(m,_) -> (m, mkHoleModule m)) insts)
| 246
|
generalizeIndefUnitId IndefUnitId{ indefUnitIdComponentId = cid
, indefUnitIdInsts = insts } =
newIndefUnitId cid (map (\(m,_) -> (m, mkHoleModule m)) insts)
| 194
| false
| true
| 0
| 11
| 61
| 69
| 38
| 31
| null | null |
tonyday567/web-play
|
src/Web/Socket.hs
|
mit
|
echoClient :: WS.ClientApp ()
echoClient conn = do
putStrLn "."
-- Fork a thread that writes WS data to stdout
a <- async $ forever $ do
WS.sendTextData conn (encode [99::Int])
msg <- WS.receiveData conn :: IO ByteString
putStrLn ".1"
WS.sendTextData conn msg
putStrLn $ ".2:" <> show msg
link a
| 351
|
echoClient :: WS.ClientApp ()
echoClient conn = do
putStrLn "."
-- Fork a thread that writes WS data to stdout
a <- async $ forever $ do
WS.sendTextData conn (encode [99::Int])
msg <- WS.receiveData conn :: IO ByteString
putStrLn ".1"
WS.sendTextData conn msg
putStrLn $ ".2:" <> show msg
link a
| 351
|
echoClient conn = do
putStrLn "."
-- Fork a thread that writes WS data to stdout
a <- async $ forever $ do
WS.sendTextData conn (encode [99::Int])
msg <- WS.receiveData conn :: IO ByteString
putStrLn ".1"
WS.sendTextData conn msg
putStrLn $ ".2:" <> show msg
link a
| 321
| false
| true
| 0
| 14
| 106
| 119
| 53
| 66
| null | null |
a143753/AOJ
|
0502.hs
|
apache-2.0
|
ans :: [String] -> [Int]
ans [] = []
| 36
|
ans :: [String] -> [Int]
ans [] = []
| 36
|
ans [] = []
| 11
| false
| true
| 0
| 6
| 8
| 28
| 15
| 13
| null | null |
nevrenato/HetsAlloy
|
Common/Lexer.hs
|
gpl-2.0
|
scanAnyWords :: CharParser st String
scanAnyWords = flat (scanLetterWord <:> many scanUnderlineWord) <?> "words"
| 112
|
scanAnyWords :: CharParser st String
scanAnyWords = flat (scanLetterWord <:> many scanUnderlineWord) <?> "words"
| 112
|
scanAnyWords = flat (scanLetterWord <:> many scanUnderlineWord) <?> "words"
| 75
| false
| true
| 0
| 8
| 13
| 39
| 17
| 22
| null | null |
edom/sound
|
src/Sound/Time.hs
|
bsd-3-clause
|
-- | Convert number of samples to number of seconds.
sampleToSecond :: (Num a) => Precision b a -> a -> a
sampleToSecond !x =
let
!p = _prPeriod x
in
(p *)
| 179
|
sampleToSecond :: (Num a) => Precision b a -> a -> a
sampleToSecond !x =
let
!p = _prPeriod x
in
(p *)
| 126
|
sampleToSecond !x =
let
!p = _prPeriod x
in
(p *)
| 73
| true
| true
| 0
| 9
| 56
| 58
| 28
| 30
| null | null |
caperren/Archives
|
OSU Coursework/CS 381 - Programming Language Fundamentals/Homework 3/MiniMiniLogoSem.perrenc.hs
|
gpl-3.0
|
cmd (Move mx my) (Down, (start_x, start_y)) = ((Down, (mx, my)), Just ((start_x, start_y),(mx, my)))
| 100
|
cmd (Move mx my) (Down, (start_x, start_y)) = ((Down, (mx, my)), Just ((start_x, start_y),(mx, my)))
| 100
|
cmd (Move mx my) (Down, (start_x, start_y)) = ((Down, (mx, my)), Just ((start_x, start_y),(mx, my)))
| 100
| false
| false
| 0
| 8
| 14
| 72
| 42
| 30
| null | null |
tjakway/ghcjvm
|
compiler/nativeGen/SPARC/Ppr.hs
|
bsd-3-clause
|
-- | Pretty print a format for an instruction suffix.
pprFormat :: Format -> SDoc
pprFormat x
= ptext
(case x of
II8 -> sLit "ub"
II16 -> sLit "uh"
II32 -> sLit ""
II64 -> sLit "d"
FF32 -> sLit ""
FF64 -> sLit "d"
_ -> panic "SPARC.Ppr.pprFormat: no match")
| 343
|
pprFormat :: Format -> SDoc
pprFormat x
= ptext
(case x of
II8 -> sLit "ub"
II16 -> sLit "uh"
II32 -> sLit ""
II64 -> sLit "d"
FF32 -> sLit ""
FF64 -> sLit "d"
_ -> panic "SPARC.Ppr.pprFormat: no match")
| 289
|
pprFormat x
= ptext
(case x of
II8 -> sLit "ub"
II16 -> sLit "uh"
II32 -> sLit ""
II64 -> sLit "d"
FF32 -> sLit ""
FF64 -> sLit "d"
_ -> panic "SPARC.Ppr.pprFormat: no match")
| 261
| true
| true
| 0
| 9
| 137
| 92
| 43
| 49
| null | null |
flowbox-public/mainland-pretty
|
Text/PrettyPrint/Mainland.hs
|
bsd-3-clause
|
-- | The document @star@ consists of an asterisk, \"*\".
star :: Doc
star = char '*'
| 84
|
star :: Doc
star = char '*'
| 27
|
star = char '*'
| 15
| true
| true
| 0
| 5
| 16
| 15
| 8
| 7
| null | null |
bacchanalia/KitchenSink
|
KitchenSink/Qualified.hs
|
gpl-3.0
|
-- |'VU.zip5'
vu_zip5 = VU.zip5
| 31
|
vu_zip5 = VU.zip5
| 17
|
vu_zip5 = VU.zip5
| 17
| true
| false
| 0
| 5
| 4
| 9
| 5
| 4
| null | null |
allonsy/chirp
|
src/Message.hs
|
bsd-3-clause
|
parsePrefix :: String -> Either CmdErr (Prefix, String)
parsePrefix str = resPrefix >>= (\val -> Right (val, rest)) where
first = (words str) !! 0
rest = stripWhiteLead $ drop (length first) str
(nickname, afterNick) = breakAt '!' first
(username, hostname)
| afterNick == "" = ("", snd (breakAt '@' nickname))
| otherwise = breakAt '@' afterNick
resPrefix = case [nickname, username, hostname] of
["", _, _] -> Left $ MessageError "Invalid Prefix"
[nick, "", ""] -> Right $ UserName nick Nothing Nothing
[nick, "", host@(c:cs)] -> Right $ UserName nick Nothing (Just host)
[nick, user@(x:xs), host@(c:cs)] -> Right $ UserName nick (Just user) (Just host)
_ -> Left $ MessageError "Invalid Prefix"
| 735
|
parsePrefix :: String -> Either CmdErr (Prefix, String)
parsePrefix str = resPrefix >>= (\val -> Right (val, rest)) where
first = (words str) !! 0
rest = stripWhiteLead $ drop (length first) str
(nickname, afterNick) = breakAt '!' first
(username, hostname)
| afterNick == "" = ("", snd (breakAt '@' nickname))
| otherwise = breakAt '@' afterNick
resPrefix = case [nickname, username, hostname] of
["", _, _] -> Left $ MessageError "Invalid Prefix"
[nick, "", ""] -> Right $ UserName nick Nothing Nothing
[nick, "", host@(c:cs)] -> Right $ UserName nick Nothing (Just host)
[nick, user@(x:xs), host@(c:cs)] -> Right $ UserName nick (Just user) (Just host)
_ -> Left $ MessageError "Invalid Prefix"
| 735
|
parsePrefix str = resPrefix >>= (\val -> Right (val, rest)) where
first = (words str) !! 0
rest = stripWhiteLead $ drop (length first) str
(nickname, afterNick) = breakAt '!' first
(username, hostname)
| afterNick == "" = ("", snd (breakAt '@' nickname))
| otherwise = breakAt '@' afterNick
resPrefix = case [nickname, username, hostname] of
["", _, _] -> Left $ MessageError "Invalid Prefix"
[nick, "", ""] -> Right $ UserName nick Nothing Nothing
[nick, "", host@(c:cs)] -> Right $ UserName nick Nothing (Just host)
[nick, user@(x:xs), host@(c:cs)] -> Right $ UserName nick (Just user) (Just host)
_ -> Left $ MessageError "Invalid Prefix"
| 679
| false
| true
| 0
| 13
| 150
| 336
| 179
| 157
| null | null |
rfranek/duckling
|
Duckling/Dimensions.hs
|
bsd-3-clause
|
dependents (This Quantity) = HashSet.singleton (This Numeral)
| 61
|
dependents (This Quantity) = HashSet.singleton (This Numeral)
| 61
|
dependents (This Quantity) = HashSet.singleton (This Numeral)
| 61
| false
| false
| 0
| 7
| 6
| 26
| 12
| 14
| null | null |
miguelpagano/equ
|
Equ/TypeChecker/State.hs
|
gpl-3.0
|
mkCtxQuan :: PreExpr -> TyState ()
mkCtxQuan = mapM_ updCtx . getQuants
where updCtx quan = renTy M.empty (quantTy quan) >>= extCtxQuan quan . fst
-- | Dado un tipo, reemplaza todas las variables libres del
-- tipo por variables frescas.
| 242
|
mkCtxQuan :: PreExpr -> TyState ()
mkCtxQuan = mapM_ updCtx . getQuants
where updCtx quan = renTy M.empty (quantTy quan) >>= extCtxQuan quan . fst
-- | Dado un tipo, reemplaza todas las variables libres del
-- tipo por variables frescas.
| 242
|
mkCtxQuan = mapM_ updCtx . getQuants
where updCtx quan = renTy M.empty (quantTy quan) >>= extCtxQuan quan . fst
-- | Dado un tipo, reemplaza todas las variables libres del
-- tipo por variables frescas.
| 207
| false
| true
| 0
| 9
| 45
| 70
| 32
| 38
| null | null |
krdlab/haskell-ltsv
|
Data/LTSV/String.hs
|
bsd-3-clause
|
serialize :: Record -> String
serialize [] = ""
| 47
|
serialize :: Record -> String
serialize [] = ""
| 47
|
serialize [] = ""
| 17
| false
| true
| 0
| 6
| 8
| 20
| 10
| 10
| null | null |
VictorDenisov/jdi
|
src/Language/Java/Jdwp.hs
|
gpl-2.0
|
classPathsCommand :: PacketId -> Packet
classPathsCommand packetId = CommandPacket 11 packetId 0 1 13 B.empty
| 109
|
classPathsCommand :: PacketId -> Packet
classPathsCommand packetId = CommandPacket 11 packetId 0 1 13 B.empty
| 109
|
classPathsCommand packetId = CommandPacket 11 packetId 0 1 13 B.empty
| 69
| false
| true
| 0
| 7
| 14
| 38
| 17
| 21
| null | null |
beni55/bayes-stack
|
network-topic-models/DumpCI.hs
|
bsd-3-clause
|
readDumper "edge-mixtures" = Just $ \opts nd m showItem showNode ->
let showArc (Arc (Citing d) (Cited c)) = showNode d <> " -> " <> showNode c
formatMixture a =
let ps = sortBy (flip compare `on` snd)
$ map (\t->(t, arcTopicMixture nd m a t))
$ S.toList $ dTopics nd
norm = Numeric.Log.sum $ map snd ps
in foldMap (\(t,p)->"\t" <> showTopic t <> "\t" <> formatProb p <> "\n")
$ maybe id take (nElems opts)
$ map (\(t,p)->(t, p / norm)) ps
in foldMap (\a->"\n" <> showArc a <> "\n" <> formatMixture a)
$ S.toList $ dArcs nd
| 654
|
readDumper "edge-mixtures" = Just $ \opts nd m showItem showNode ->
let showArc (Arc (Citing d) (Cited c)) = showNode d <> " -> " <> showNode c
formatMixture a =
let ps = sortBy (flip compare `on` snd)
$ map (\t->(t, arcTopicMixture nd m a t))
$ S.toList $ dTopics nd
norm = Numeric.Log.sum $ map snd ps
in foldMap (\(t,p)->"\t" <> showTopic t <> "\t" <> formatProb p <> "\n")
$ maybe id take (nElems opts)
$ map (\(t,p)->(t, p / norm)) ps
in foldMap (\a->"\n" <> showArc a <> "\n" <> formatMixture a)
$ S.toList $ dArcs nd
| 654
|
readDumper "edge-mixtures" = Just $ \opts nd m showItem showNode ->
let showArc (Arc (Citing d) (Cited c)) = showNode d <> " -> " <> showNode c
formatMixture a =
let ps = sortBy (flip compare `on` snd)
$ map (\t->(t, arcTopicMixture nd m a t))
$ S.toList $ dTopics nd
norm = Numeric.Log.sum $ map snd ps
in foldMap (\(t,p)->"\t" <> showTopic t <> "\t" <> formatProb p <> "\n")
$ maybe id take (nElems opts)
$ map (\(t,p)->(t, p / norm)) ps
in foldMap (\a->"\n" <> showArc a <> "\n" <> formatMixture a)
$ S.toList $ dArcs nd
| 654
| false
| false
| 4
| 22
| 231
| 302
| 150
| 152
| null | null |
bos/criterion
|
Criterion/Types.hs
|
bsd-2-clause
|
addOutliers :: Outliers -> Outliers -> Outliers
addOutliers (Outliers s a b c d) (Outliers t w x y z) =
Outliers (s+t) (a+w) (b+x) (c+y) (d+z)
| 146
|
addOutliers :: Outliers -> Outliers -> Outliers
addOutliers (Outliers s a b c d) (Outliers t w x y z) =
Outliers (s+t) (a+w) (b+x) (c+y) (d+z)
| 146
|
addOutliers (Outliers s a b c d) (Outliers t w x y z) =
Outliers (s+t) (a+w) (b+x) (c+y) (d+z)
| 98
| false
| true
| 0
| 7
| 30
| 98
| 51
| 47
| null | null |
shlevy/ghc
|
compiler/vectorise/Vectorise/Generic/PData.hs
|
bsd-3-clause
|
uildPDataTyConRhs :: Name -> TyCon -> TyCon -> SumRepr -> VM AlgTyConRhs
buildPDataTyConRhs orig_name vect_tc repr_tc repr
= do data_con <- buildPDataDataCon orig_name vect_tc repr_tc repr
return $ mkDataTyConRhs [data_con]
| 231
|
buildPDataTyConRhs :: Name -> TyCon -> TyCon -> SumRepr -> VM AlgTyConRhs
buildPDataTyConRhs orig_name vect_tc repr_tc repr
= do data_con <- buildPDataDataCon orig_name vect_tc repr_tc repr
return $ mkDataTyConRhs [data_con]
| 231
|
buildPDataTyConRhs orig_name vect_tc repr_tc repr
= do data_con <- buildPDataDataCon orig_name vect_tc repr_tc repr
return $ mkDataTyConRhs [data_con]
| 157
| false
| true
| 0
| 9
| 37
| 68
| 32
| 36
| null | null |
fehu/haskell-java-bridge-fork
|
src/Foreign/Java/Types.hs
|
mit
|
(-->) :: a -> x -> P a x
a --> x = P a x
| 40
|
(-->) :: a -> x -> P a x
a --> x = P a x
| 40
|
a --> x = P a x
| 15
| false
| true
| 0
| 9
| 15
| 37
| 18
| 19
| null | null |
spechub/Hets
|
CspCASLProver/IsabelleUtils.hs
|
gpl-2.0
|
{- | Prepare a theory for writing it out to a file. This function is based off
the function Isabelle.IsaProve.prepareTheory. The difference being that
this function does not mark axioms nor theorms as to be added to the
simplifier in Isabelle. -}
prepareTheory :: Theory Sign Sentence ()
-> (Sign, [Named Sentence], [Named Sentence], Map.Map String String)
prepareTheory (Theory sig nSens) = let
oSens = toNamedList nSens
nSens' = prepareSenNames transString oSens
(disAxs, disGoals) = getAxioms nSens'
in (sig, disAxs, disGoals,
Map.fromList $ zip (map senAttr nSens') $ map senAttr oSens)
| 617
|
prepareTheory :: Theory Sign Sentence ()
-> (Sign, [Named Sentence], [Named Sentence], Map.Map String String)
prepareTheory (Theory sig nSens) = let
oSens = toNamedList nSens
nSens' = prepareSenNames transString oSens
(disAxs, disGoals) = getAxioms nSens'
in (sig, disAxs, disGoals,
Map.fromList $ zip (map senAttr nSens') $ map senAttr oSens)
| 370
|
prepareTheory (Theory sig nSens) = let
oSens = toNamedList nSens
nSens' = prepareSenNames transString oSens
(disAxs, disGoals) = getAxioms nSens'
in (sig, disAxs, disGoals,
Map.fromList $ zip (map senAttr nSens') $ map senAttr oSens)
| 256
| true
| true
| 0
| 13
| 117
| 150
| 75
| 75
| null | null |
alexandersgreen/hoodlums
|
Arch/Synacor.hs
|
apache-2.0
|
getChar :: Syn Word16
getChar = do
c <- lift P.getChar
return $ fromIntegral . C.ord $ c
| 90
|
getChar :: Syn Word16
getChar = do
c <- lift P.getChar
return $ fromIntegral . C.ord $ c
| 90
|
getChar = do
c <- lift P.getChar
return $ fromIntegral . C.ord $ c
| 68
| false
| true
| 0
| 9
| 19
| 43
| 20
| 23
| null | null |
burz/Feval
|
FVL/Algebra.hs
|
mit
|
lazyCata :: Functor (f (LazyFix f)) => Algebra (f (LazyFix f)) a -> LazyFix f -> a
lazyCata alg = alg . fmap (lazyCata alg) . lazyUnFix
| 135
|
lazyCata :: Functor (f (LazyFix f)) => Algebra (f (LazyFix f)) a -> LazyFix f -> a
lazyCata alg = alg . fmap (lazyCata alg) . lazyUnFix
| 135
|
lazyCata alg = alg . fmap (lazyCata alg) . lazyUnFix
| 52
| false
| true
| 0
| 11
| 26
| 78
| 37
| 41
| null | null |
sopvop/cabal
|
cabal-install/Distribution/Client/Sandbox/Timestamp.hs
|
bsd-3-clause
|
withTimestampFile :: FilePath
-> ([TimestampFileRecord] -> IO [TimestampFileRecord])
-> IO ()
withTimestampFile sandboxDir process = do
let timestampFile = sandboxDir </> timestampFileName
timestampRecords <- readTimestampFile timestampFile >>= process
writeTimestampFile timestampFile timestampRecords
-- | Given a list of 'AddSourceTimestamp's, a list of paths to add-source deps
-- we've added and an initial timestamp, add an 'AddSourceTimestamp' to the list
-- for each path. If a timestamp for a given path already exists in the list,
-- update it.
| 603
|
withTimestampFile :: FilePath
-> ([TimestampFileRecord] -> IO [TimestampFileRecord])
-> IO ()
withTimestampFile sandboxDir process = do
let timestampFile = sandboxDir </> timestampFileName
timestampRecords <- readTimestampFile timestampFile >>= process
writeTimestampFile timestampFile timestampRecords
-- | Given a list of 'AddSourceTimestamp's, a list of paths to add-source deps
-- we've added and an initial timestamp, add an 'AddSourceTimestamp' to the list
-- for each path. If a timestamp for a given path already exists in the list,
-- update it.
| 603
|
withTimestampFile sandboxDir process = do
let timestampFile = sandboxDir </> timestampFileName
timestampRecords <- readTimestampFile timestampFile >>= process
writeTimestampFile timestampFile timestampRecords
-- | Given a list of 'AddSourceTimestamp's, a list of paths to add-source deps
-- we've added and an initial timestamp, add an 'AddSourceTimestamp' to the list
-- for each path. If a timestamp for a given path already exists in the list,
-- update it.
| 467
| false
| true
| 1
| 10
| 125
| 84
| 42
| 42
| null | null |
robinp/haskell-indexer
|
haskell-indexer-backend-ghc/tests/Language/Haskell/Indexer/Backend/Ghc/Test/TranslateAssert.hs
|
apache-2.0
|
extraAlternateIdSpanContainsPos :: (MonadIO m) => (Int, Int) -> Decl -> m ()
extraAlternateIdSpanContainsPos p decl =
let idSpan = declExtra decl >>= alternateIdSpan
in unless ((containsPos p <$> idSpan) == Just True) $ failConcat
[ "alternateIdSpan of ", prettyDecl decl, " doesn't contain pas ", show p ]
| 324
|
extraAlternateIdSpanContainsPos :: (MonadIO m) => (Int, Int) -> Decl -> m ()
extraAlternateIdSpanContainsPos p decl =
let idSpan = declExtra decl >>= alternateIdSpan
in unless ((containsPos p <$> idSpan) == Just True) $ failConcat
[ "alternateIdSpan of ", prettyDecl decl, " doesn't contain pas ", show p ]
| 324
|
extraAlternateIdSpanContainsPos p decl =
let idSpan = declExtra decl >>= alternateIdSpan
in unless ((containsPos p <$> idSpan) == Just True) $ failConcat
[ "alternateIdSpan of ", prettyDecl decl, " doesn't contain pas ", show p ]
| 247
| false
| true
| 0
| 13
| 65
| 110
| 55
| 55
| null | null |
kawu/nerf
|
src/NLP/Nerf/Dict/Prolexbase.hs
|
bsd-2-clause
|
parseLine :: L.Text -> Entry
parseLine row = case map L.toStrict (L.split (=='\t') row) of
[_form, _base, _tag, _cat] -> Entry _form _cat
_ -> error $ "parseLine: invalid row \"" ++ L.unpack row ++ "\""
-- | Read the dictionary from the file.
| 253
|
parseLine :: L.Text -> Entry
parseLine row = case map L.toStrict (L.split (=='\t') row) of
[_form, _base, _tag, _cat] -> Entry _form _cat
_ -> error $ "parseLine: invalid row \"" ++ L.unpack row ++ "\""
-- | Read the dictionary from the file.
| 253
|
parseLine row = case map L.toStrict (L.split (=='\t') row) of
[_form, _base, _tag, _cat] -> Entry _form _cat
_ -> error $ "parseLine: invalid row \"" ++ L.unpack row ++ "\""
-- | Read the dictionary from the file.
| 224
| false
| true
| 0
| 11
| 54
| 92
| 48
| 44
| null | null |
christiaanb/clash-compiler
|
clash-lib/src/CLaSH/Normalize/Transformations.hs
|
bsd-2-clause
|
reduceConst :: NormRewrite
reduceConst _ e@(App _ _)
| isConstant e
, (conPrim, _) <- collectArgs e
, isPrim conPrim
= R $ do
tcm <- Lens.use tcCache
reduceConstant <- Lens.use evaluator
case reduceConstant tcm e of
e'@(Data _) -> changed e'
e'@(Literal _) -> changed e'
_ -> return e
| 338
|
reduceConst :: NormRewrite
reduceConst _ e@(App _ _)
| isConstant e
, (conPrim, _) <- collectArgs e
, isPrim conPrim
= R $ do
tcm <- Lens.use tcCache
reduceConstant <- Lens.use evaluator
case reduceConstant tcm e of
e'@(Data _) -> changed e'
e'@(Literal _) -> changed e'
_ -> return e
| 338
|
reduceConst _ e@(App _ _)
| isConstant e
, (conPrim, _) <- collectArgs e
, isPrim conPrim
= R $ do
tcm <- Lens.use tcCache
reduceConstant <- Lens.use evaluator
case reduceConstant tcm e of
e'@(Data _) -> changed e'
e'@(Literal _) -> changed e'
_ -> return e
| 311
| false
| true
| 0
| 14
| 104
| 145
| 68
| 77
| null | null |
neothemachine/monadiccp
|
src/Control/CP/FD/Interface.hs
|
bsd-3-clause
|
val :: Tree DummySolver () -> ModelInt
val = Sugar.toExpr . treeToModel
| 71
|
val :: Tree DummySolver () -> ModelInt
val = Sugar.toExpr . treeToModel
| 71
|
val = Sugar.toExpr . treeToModel
| 32
| false
| true
| 1
| 8
| 11
| 36
| 15
| 21
| null | null |
imalsogreg/hComedi
|
src/System/HComedi/Units.hs
|
gpl-3.0
|
getMaxData :: Handle -> SubDevice -> Channel -> B.LSample
getMaxData (Handle fn p) (SubDevice s) (Channel c) =
unsafePerformIO $
throwErrnoIf (<= 0)
(unwords ["Comedi error getting max data for "
, fn, "subdevice", show s])
(B.c_comedi_get_maxdata p s c)
| 275
|
getMaxData :: Handle -> SubDevice -> Channel -> B.LSample
getMaxData (Handle fn p) (SubDevice s) (Channel c) =
unsafePerformIO $
throwErrnoIf (<= 0)
(unwords ["Comedi error getting max data for "
, fn, "subdevice", show s])
(B.c_comedi_get_maxdata p s c)
| 275
|
getMaxData (Handle fn p) (SubDevice s) (Channel c) =
unsafePerformIO $
throwErrnoIf (<= 0)
(unwords ["Comedi error getting max data for "
, fn, "subdevice", show s])
(B.c_comedi_get_maxdata p s c)
| 217
| false
| true
| 0
| 8
| 60
| 102
| 52
| 50
| null | null |
sdiehl/ghc
|
testsuite/tests/codeGen/should_run/T13825-unit.hs
|
bsd-3-clause
|
fmt (Padding len off) = "P(" ++ show len ++ "," ++ show off ++ ")"
| 66
|
fmt (Padding len off) = "P(" ++ show len ++ "," ++ show off ++ ")"
| 66
|
fmt (Padding len off) = "P(" ++ show len ++ "," ++ show off ++ ")"
| 66
| false
| false
| 2
| 8
| 15
| 42
| 18
| 24
| null | null |
vTurbine/ghc
|
compiler/prelude/PrelNames.hs
|
bsd-3-clause
|
staticPtrDataConKey :: Unique
staticPtrDataConKey = mkPreludeDataConUnique 33
| 97
|
staticPtrDataConKey :: Unique
staticPtrDataConKey = mkPreludeDataConUnique 33
| 97
|
staticPtrDataConKey = mkPreludeDataConUnique 33
| 67
| false
| true
| 0
| 5
| 26
| 14
| 7
| 7
| null | null |
eryx67/haskell-libtorrent
|
src/Network/Libtorrent/CreateTorrent.hs
|
bsd-3-clause
|
-- | Can throw 'LibtorrentException'.
createTorrentSetPieceHashes :: MonadIO m => CreateTorrent -> Text -> (C.CInt -> IO ()) -> m ()
createTorrentSetPieceHashes ho fp cb =
liftIO . withPtr ho $ \hoPtr -> do
fpStr <- textToStdString fp
withPtr fpStr $ \fpPtr ->
withErrorCode CreateTorrentError $ \ePtr ->
bracket
($(C.mkFunPtr [t| C.CInt -> IO () |]) cb)
freeHaskellFunPtr $ \cbPtr ->
[C.block| void {
boost::function<void(int)> cb = $(SetPieceHashesCb cbPtr);
set_piece_hashes(*$(create_torrent * hoPtr), *$(string * fpPtr), cb, *$(error_code * ePtr)) ;
}
|]
| 612
|
createTorrentSetPieceHashes :: MonadIO m => CreateTorrent -> Text -> (C.CInt -> IO ()) -> m ()
createTorrentSetPieceHashes ho fp cb =
liftIO . withPtr ho $ \hoPtr -> do
fpStr <- textToStdString fp
withPtr fpStr $ \fpPtr ->
withErrorCode CreateTorrentError $ \ePtr ->
bracket
($(C.mkFunPtr [t| C.CInt -> IO () |]) cb)
freeHaskellFunPtr $ \cbPtr ->
[C.block| void {
boost::function<void(int)> cb = $(SetPieceHashesCb cbPtr);
set_piece_hashes(*$(create_torrent * hoPtr), *$(string * fpPtr), cb, *$(error_code * ePtr)) ;
}
|]
| 574
|
createTorrentSetPieceHashes ho fp cb =
liftIO . withPtr ho $ \hoPtr -> do
fpStr <- textToStdString fp
withPtr fpStr $ \fpPtr ->
withErrorCode CreateTorrentError $ \ePtr ->
bracket
($(C.mkFunPtr [t| C.CInt -> IO () |]) cb)
freeHaskellFunPtr $ \cbPtr ->
[C.block| void {
boost::function<void(int)> cb = $(SetPieceHashesCb cbPtr);
set_piece_hashes(*$(create_torrent * hoPtr), *$(string * fpPtr), cb, *$(error_code * ePtr)) ;
}
|]
| 479
| true
| true
| 0
| 20
| 134
| 149
| 76
| 73
| null | null |
rsasse/tamarin-prover
|
lib/term/src/Term/Term/Raw.hs
|
gpl-3.0
|
fAppList :: [Term a] -> Term a
fAppList = FAPP List
| 51
|
fAppList :: [Term a] -> Term a
fAppList = FAPP List
| 51
|
fAppList = FAPP List
| 20
| false
| true
| 0
| 8
| 10
| 33
| 14
| 19
| null | null |
astro/haskell-couchdb
|
src/Database/CouchDB.hs
|
bsd-3-clause
|
-- |Creates a new database. Throws an exception if the database already
-- exists.
createDB :: String -> CouchMonad ()
createDB = U.createDB
| 142
|
createDB :: String -> CouchMonad ()
createDB = U.createDB
| 57
|
createDB = U.createDB
| 21
| true
| true
| 0
| 7
| 24
| 24
| 13
| 11
| null | null |
coursestitch/coursestitch-api
|
lib/CourseStitch/Templates/Resource.hs
|
apache-2.0
|
resourceCreated :: Entity Resource -> Html ()
resourceCreated r = do
p_ $ mconcat [resourceUri r, " was created successfully"]
resource r
| 145
|
resourceCreated :: Entity Resource -> Html ()
resourceCreated r = do
p_ $ mconcat [resourceUri r, " was created successfully"]
resource r
| 145
|
resourceCreated r = do
p_ $ mconcat [resourceUri r, " was created successfully"]
resource r
| 99
| false
| true
| 0
| 11
| 29
| 56
| 24
| 32
| null | null |
nkaretnikov/titlecase
|
tests/Test/Unit.hs
|
bsd-3-clause
|
testLast t = testTitlecase $ "This Sentence Capitalizes" <#> toTitleLast t
| 77
|
testLast t = testTitlecase $ "This Sentence Capitalizes" <#> toTitleLast t
| 77
|
testLast t = testTitlecase $ "This Sentence Capitalizes" <#> toTitleLast t
| 77
| false
| false
| 0
| 6
| 13
| 20
| 9
| 11
| null | null |
comonoidial/ALFIN
|
Alfin/CoreConvert.hs
|
mit
|
usedFunsExp (SCApply a xs) = usedFunsExp a ++ concatMap usedFunsExp xs
| 73
|
usedFunsExp (SCApply a xs) = usedFunsExp a ++ concatMap usedFunsExp xs
| 73
|
usedFunsExp (SCApply a xs) = usedFunsExp a ++ concatMap usedFunsExp xs
| 73
| false
| false
| 0
| 7
| 13
| 29
| 13
| 16
| null | null |
ghc-android/ghc
|
compiler/prelude/PrelNames.hs
|
bsd-3-clause
|
rEAD_PREC = mkBaseModule (fsLit "Text.ParserCombinators.ReadPrec")
| 72
|
rEAD_PREC = mkBaseModule (fsLit "Text.ParserCombinators.ReadPrec")
| 72
|
rEAD_PREC = mkBaseModule (fsLit "Text.ParserCombinators.ReadPrec")
| 72
| false
| false
| 0
| 7
| 10
| 15
| 7
| 8
| null | null |
IanConnolly/aws-sdk-fork
|
AWS/Credential.hs
|
bsd-3-clause
|
mkConfig "configParser" [config|
Credential
accessKey ByteString
secretAccessKey ByteString
|]
type AccessKey = ByteString
| 137
|
mkConfig "configParser" [config|
Credential
accessKey ByteString
secretAccessKey ByteString
|]
type AccessKey = ByteString
| 137
|
mkConfig "configParser" [config|
Credential
accessKey ByteString
secretAccessKey ByteString
|]
type AccessKey = ByteString
| 137
| false
| false
| 0
| 5
| 27
| 19
| 11
| 8
| null | null |
markus1189/xmonad-contrib-710
|
XMonad/Actions/Plane.hs
|
bsd-3-clause
|
shift' ::
(Eq s, Eq i, Ord a) => i -> StackSet i l a s sd -> StackSet i l a s sd
shift' area = greedyView area . shift area
| 127
|
shift' ::
(Eq s, Eq i, Ord a) => i -> StackSet i l a s sd -> StackSet i l a s sd
shift' area = greedyView area . shift area
| 127
|
shift' area = greedyView area . shift area
| 42
| false
| true
| 0
| 8
| 35
| 78
| 37
| 41
| null | null |
scott-fleischman/greek-grammar
|
haskell/greek-grammar/src/Data/Unicode/DecomposeChar.hs
|
mit
|
decomposeChar '\x2F951' = "\x40E3"
| 34
|
decomposeChar '\x2F951' = "\x40E3"
| 34
|
decomposeChar '\x2F951' = "\x40E3"
| 34
| false
| false
| 0
| 4
| 3
| 10
| 4
| 6
| null | null |
lambdataro/Dive
|
TypeInf.hs
|
mit
|
{- fun と同じ -}
makeClos (TList t) ftv = makeClos t ftv
| 53
|
makeClos (TList t) ftv = makeClos t ftv
| 39
|
makeClos (TList t) ftv = makeClos t ftv
| 39
| true
| false
| 0
| 7
| 11
| 23
| 11
| 12
| null | null |
nushio3/ghc
|
compiler/main/DynFlags.hs
|
bsd-3-clause
|
pgm_c :: DynFlags -> (String,[Option])
pgm_c dflags = sPgm_c (settings dflags)
| 94
|
pgm_c :: DynFlags -> (String,[Option])
pgm_c dflags = sPgm_c (settings dflags)
| 94
|
pgm_c dflags = sPgm_c (settings dflags)
| 39
| false
| true
| 0
| 7
| 26
| 36
| 19
| 17
| null | null |
thiagoarrais/gtk2hs
|
gtk/Graphics/UI/Gtk/ModelView/TreeStoreStatic.hs
|
lgpl-2.1
|
setBitSlice :: TreeIter -> Int -> Int -> Word -> TreeIter
setBitSlice (TreeIter stamp a b c) off count value =
assert (value < 1 `shiftL` count) $
TreeIter stamp
(setBitSliceWord a off count value)
(setBitSliceWord b (off-32) count value)
(setBitSliceWord c (off-64) count value)
where setBitSliceWord :: Word -> Int -> Int -> Word -> Word
setBitSliceWord word off count value =
let mask = (1 `shiftL` count - 1) `shiftL` off
in (word .&. complement mask) .|. (value `shiftL` off)
-------------------
-- testing
--
| 589
|
setBitSlice :: TreeIter -> Int -> Int -> Word -> TreeIter
setBitSlice (TreeIter stamp a b c) off count value =
assert (value < 1 `shiftL` count) $
TreeIter stamp
(setBitSliceWord a off count value)
(setBitSliceWord b (off-32) count value)
(setBitSliceWord c (off-64) count value)
where setBitSliceWord :: Word -> Int -> Int -> Word -> Word
setBitSliceWord word off count value =
let mask = (1 `shiftL` count - 1) `shiftL` off
in (word .&. complement mask) .|. (value `shiftL` off)
-------------------
-- testing
--
| 589
|
setBitSlice (TreeIter stamp a b c) off count value =
assert (value < 1 `shiftL` count) $
TreeIter stamp
(setBitSliceWord a off count value)
(setBitSliceWord b (off-32) count value)
(setBitSliceWord c (off-64) count value)
where setBitSliceWord :: Word -> Int -> Int -> Word -> Word
setBitSliceWord word off count value =
let mask = (1 `shiftL` count - 1) `shiftL` off
in (word .&. complement mask) .|. (value `shiftL` off)
-------------------
-- testing
--
| 531
| false
| true
| 0
| 12
| 160
| 235
| 119
| 116
| null | null |
gnn/Hets
|
Logic/Comorphism.hs
|
gpl-2.0
|
-- * Properties of comorphisms
-- | Test whether a comorphism is model-transportable
isModelTransportable :: AnyComorphism -> Bool
isModelTransportable (Comorphism cid) = is_model_transportable cid
| 198
|
isModelTransportable :: AnyComorphism -> Bool
isModelTransportable (Comorphism cid) = is_model_transportable cid
| 112
|
isModelTransportable (Comorphism cid) = is_model_transportable cid
| 66
| true
| true
| 0
| 7
| 24
| 29
| 15
| 14
| null | null |
ocharles/hackage-server
|
Distribution/Server/Features/Upload/State.hs
|
bsd-3-clause
|
addHackageTrustee :: UserId -> Update HackageTrustees ()
addHackageTrustee uid = modifyHackageTrustees (Group.insert uid)
| 121
|
addHackageTrustee :: UserId -> Update HackageTrustees ()
addHackageTrustee uid = modifyHackageTrustees (Group.insert uid)
| 121
|
addHackageTrustee uid = modifyHackageTrustees (Group.insert uid)
| 64
| false
| true
| 0
| 8
| 12
| 36
| 17
| 19
| null | null |
fumieval/free-game
|
FreeGame/Internal/GLFW.hs
|
bsd-3-clause
|
trim1 :: S.Seq Double -> S.Seq Double
trim1 s0 = go zs s0 (sum zs) where
go (x:xs) s a
| a < 1 = s
| otherwise = go xs (S.drop 1 s) (a - x)
go [] s _ = s
zs = F.toList s0
| 204
|
trim1 :: S.Seq Double -> S.Seq Double
trim1 s0 = go zs s0 (sum zs) where
go (x:xs) s a
| a < 1 = s
| otherwise = go xs (S.drop 1 s) (a - x)
go [] s _ = s
zs = F.toList s0
| 203
|
trim1 s0 = go zs s0 (sum zs) where
go (x:xs) s a
| a < 1 = s
| otherwise = go xs (S.drop 1 s) (a - x)
go [] s _ = s
zs = F.toList s0
| 165
| false
| true
| 0
| 11
| 80
| 129
| 62
| 67
| null | null |
andyarvanitis/idris-cplusplus
|
src/Main.hs
|
bsd-2-clause
|
main :: IO ()
main = do opts <- getOpts
if (null (inputs opts))
then showUsage
else runMain (cpp_main opts)
| 144
|
main :: IO ()
main = do opts <- getOpts
if (null (inputs opts))
then showUsage
else runMain (cpp_main opts)
| 144
|
main = do opts <- getOpts
if (null (inputs opts))
then showUsage
else runMain (cpp_main opts)
| 130
| false
| true
| 0
| 12
| 56
| 61
| 28
| 33
| null | null |
newhoggy/monadic-regions
|
src/Test/SafeHandlesTest.hs
|
bsd-3-clause
|
-- testp1 :: (MonadRaise m1 m2, RMonadIO m2) => SHandle m1 -> m2 String
-- The following, essentially equivalent, code however gives problem
-- testp2 h = newRgn (shGetLine h)
-- Could not deduce (MonadRaise m1 (IORT s1 m)) from the context ()
-- And so does this
-- testp3 h = shGetLine h >> newRgn (shGetLine h)
-- But the following is OK:
-- We can provide the explicit signature
testp4 :: (SMonad1IO m2, m2 ~ (IORT s' m'), MonadRaise m1 m2) =>
SHandle m1 -> m2 String
testp4 h = newRgn (liftSIO $ shGetLine h)
| 525
|
testp4 :: (SMonad1IO m2, m2 ~ (IORT s' m'), MonadRaise m1 m2) =>
SHandle m1 -> m2 String
testp4 h = newRgn (liftSIO $ shGetLine h)
| 140
|
testp4 h = newRgn (liftSIO $ shGetLine h)
| 41
| true
| true
| 0
| 10
| 108
| 83
| 43
| 40
| null | null |
iquiw/wai
|
warp/Network/Wai/Handler/Warp.hs
|
mit
|
-- | Get the listening port.
--
-- Since 2.1.1
getPort :: Settings -> Port
getPort = settingsPort
| 97
|
getPort :: Settings -> Port
getPort = settingsPort
| 50
|
getPort = settingsPort
| 22
| true
| true
| 0
| 5
| 17
| 18
| 11
| 7
| null | null |
HaskellForCats/HaskellForCats
|
factorial.hs
|
mit
|
------
factorial' n | n < 2 = 1
| 32
|
factorial' n | n < 2 = 1
| 24
|
factorial' n | n < 2 = 1
| 24
| true
| false
| 0
| 8
| 9
| 24
| 10
| 14
| null | null |
GaloisInc/stack
|
src/Stack/Types/Config.hs
|
bsd-3-clause
|
packageDatabaseDeps :: (MonadThrow m, MonadReader env m, HasEnvConfig env) => m (Path Abs Dir)
packageDatabaseDeps = do
root <- installationRootDeps
return $ root </> $(mkRelDir "pkgdb")
-- | Package database for installing local packages into
| 252
|
packageDatabaseDeps :: (MonadThrow m, MonadReader env m, HasEnvConfig env) => m (Path Abs Dir)
packageDatabaseDeps = do
root <- installationRootDeps
return $ root </> $(mkRelDir "pkgdb")
-- | Package database for installing local packages into
| 252
|
packageDatabaseDeps = do
root <- installationRootDeps
return $ root </> $(mkRelDir "pkgdb")
-- | Package database for installing local packages into
| 157
| false
| true
| 0
| 11
| 43
| 77
| 36
| 41
| null | null |
alexander-at-github/eta
|
compiler/ETA/Utils/Bag.hs
|
bsd-3-clause
|
foldBag :: (r -> r -> r) -- Replace TwoBags with this; should be associative
-> (a -> r) -- Replace UnitBag with this
-> r -- Replace EmptyBag with this
-> Bag a
-> r
{- Standard definition
foldBag t u e EmptyBag = e
foldBag t u e (UnitBag x) = u x
foldBag t u e (TwoBags b1 b2) = (foldBag t u e b1) `t` (foldBag t u e b2)
foldBag t u e (ListBag xs) = foldr (t.u) e xs
-}
-- More tail-recursive definition, exploiting associativity of "t"
foldBag _ _ e EmptyBag = e
| 537
|
foldBag :: (r -> r -> r) -- Replace TwoBags with this; should be associative
-> (a -> r) -- Replace UnitBag with this
-> r -- Replace EmptyBag with this
-> Bag a
-> r
foldBag _ _ e EmptyBag = e
| 249
|
foldBag _ _ e EmptyBag = e
| 33
| true
| true
| 0
| 10
| 172
| 68
| 35
| 33
| null | null |
fmapfmapfmap/amazonka
|
amazonka-directconnect/gen/Network/AWS/DirectConnect/AllocateConnectionOnInterconnect.hs
|
mpl-2.0
|
-- | Bandwidth of the connection.
--
-- Example: \"/500Mbps/\"
--
-- Default: None
acoiBandwidth :: Lens' AllocateConnectionOnInterconnect Text
acoiBandwidth = lens _acoiBandwidth (\ s a -> s{_acoiBandwidth = a})
| 212
|
acoiBandwidth :: Lens' AllocateConnectionOnInterconnect Text
acoiBandwidth = lens _acoiBandwidth (\ s a -> s{_acoiBandwidth = a})
| 129
|
acoiBandwidth = lens _acoiBandwidth (\ s a -> s{_acoiBandwidth = a})
| 68
| true
| true
| 1
| 9
| 29
| 49
| 26
| 23
| null | null |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.