Search is not available for this dataset
repo_name
string
path
string
license
string
full_code
string
full_size
int64
uncommented_code
string
uncommented_size
int64
function_only_code
string
function_only_size
int64
is_commented
bool
is_signatured
bool
n_ast_errors
int64
ast_max_depth
int64
n_whitespaces
int64
n_ast_nodes
int64
n_ast_terminals
int64
n_ast_nonterminals
int64
loc
int64
cycloplexity
int64
erantapaa/read-word32
src/Lib.hs
bsd-3-clause
wanted :: BL.ByteString -> Bool wanted bs = BL.index bs 0 == 0xC0
65
wanted :: BL.ByteString -> Bool wanted bs = BL.index bs 0 == 0xC0
65
wanted bs = BL.index bs 0 == 0xC0
33
false
true
0
7
12
38
16
22
null
null
alexander-at-github/eta
compiler/ETA/Types/TyCon.hs
bsd-3-clause
-- | Kind constructors mkKindTyCon :: Name -> Kind -> TyCon mkKindTyCon name kind = mkPrimTyCon' name kind [] VoidRep True
124
mkKindTyCon :: Name -> Kind -> TyCon mkKindTyCon name kind = mkPrimTyCon' name kind [] VoidRep True
101
mkKindTyCon name kind = mkPrimTyCon' name kind [] VoidRep True
64
true
true
0
6
22
38
19
19
null
null
grtlr/wyas
src/Wyas/Parser.hs
bsd-3-clause
parseQuasiQuoted :: Parser LispVal parseQuasiQuoted = do char '`' x <- parseExpr return $ List [Atom "quasiquote", x]
161
parseQuasiQuoted :: Parser LispVal parseQuasiQuoted = do char '`' x <- parseExpr return $ List [Atom "quasiquote", x]
161
parseQuasiQuoted = do char '`' x <- parseExpr return $ List [Atom "quasiquote", x]
126
false
true
0
10
61
46
21
25
null
null
asilvestre/haskell-neo4j-rest-client
tests/IntegrationTests.hs
mit
case_uniquenessTraversalNodes :: Assertion case_uniquenessTraversalNodes = withAuthConnection host port creds $ do g <- setUpTraversalTest let start = fromJust $ G.getNamedNode "Tobias" g let desc = def {T.travDepth = Left 2, T.travRelFilter = [("loves", Any), ("hates", Any), ("admires", Any)]} ns <- T.traverseGetNodes desc start let expected = map (\n -> fromJust $ G.getNamedNode n g) ["Tobias", "Sara", "Gumersindo", "Sara", "Gumersindo"] neo4jEqual (L.sort expected) (L.sort ns) let desc2 = desc {T.travUniqueness = Just T.NodeGlobal} ns2 <- T.traverseGetNodes desc2 start let expected2 = map (\n -> fromJust $ G.getNamedNode n g) ["Tobias", "Sara", "Gumersindo"] neo4jEqual (L.sort expected2) (L.sort ns2) let desc3 = desc {T.travUniqueness = Just T.RelationshipGlobal} ns3 <- T.traverseGetNodes desc3 start let expected3 = map (\n -> fromJust $ G.getNamedNode n g) ["Tobias", "Gumersindo", "Sara", "Gumersindo"] neo4jEqual (L.sort expected3) (L.sort ns3) let desc4 = desc {T.travUniqueness = Just T.NodePathUnique} ns4 <- T.traverseGetNodes desc4 start let expected4 = map (\n -> fromJust $ G.getNamedNode n g) ["Tobias", "Sara", "Gumersindo", "Sara", "Gumersindo"] neo4jEqual (L.sort expected4) (L.sort ns4) let desc5 = desc {T.travUniqueness = Just T.RelationshipPath} ns5 <- T.traverseGetNodes desc5 start let expected5 = map (\n -> fromJust $ G.getNamedNode n g) ["Tobias", "Sara", "Gumersindo", "Sara", "Gumersindo"] neo4jEqual (L.sort expected5) (L.sort ns5) cleanUpTraversalTest g -- | Test traversal with a javascript depth expression
1,643
case_uniquenessTraversalNodes :: Assertion case_uniquenessTraversalNodes = withAuthConnection host port creds $ do g <- setUpTraversalTest let start = fromJust $ G.getNamedNode "Tobias" g let desc = def {T.travDepth = Left 2, T.travRelFilter = [("loves", Any), ("hates", Any), ("admires", Any)]} ns <- T.traverseGetNodes desc start let expected = map (\n -> fromJust $ G.getNamedNode n g) ["Tobias", "Sara", "Gumersindo", "Sara", "Gumersindo"] neo4jEqual (L.sort expected) (L.sort ns) let desc2 = desc {T.travUniqueness = Just T.NodeGlobal} ns2 <- T.traverseGetNodes desc2 start let expected2 = map (\n -> fromJust $ G.getNamedNode n g) ["Tobias", "Sara", "Gumersindo"] neo4jEqual (L.sort expected2) (L.sort ns2) let desc3 = desc {T.travUniqueness = Just T.RelationshipGlobal} ns3 <- T.traverseGetNodes desc3 start let expected3 = map (\n -> fromJust $ G.getNamedNode n g) ["Tobias", "Gumersindo", "Sara", "Gumersindo"] neo4jEqual (L.sort expected3) (L.sort ns3) let desc4 = desc {T.travUniqueness = Just T.NodePathUnique} ns4 <- T.traverseGetNodes desc4 start let expected4 = map (\n -> fromJust $ G.getNamedNode n g) ["Tobias", "Sara", "Gumersindo", "Sara", "Gumersindo"] neo4jEqual (L.sort expected4) (L.sort ns4) let desc5 = desc {T.travUniqueness = Just T.RelationshipPath} ns5 <- T.traverseGetNodes desc5 start let expected5 = map (\n -> fromJust $ G.getNamedNode n g) ["Tobias", "Sara", "Gumersindo", "Sara", "Gumersindo"] neo4jEqual (L.sort expected5) (L.sort ns5) cleanUpTraversalTest g -- | Test traversal with a javascript depth expression
1,643
case_uniquenessTraversalNodes = withAuthConnection host port creds $ do g <- setUpTraversalTest let start = fromJust $ G.getNamedNode "Tobias" g let desc = def {T.travDepth = Left 2, T.travRelFilter = [("loves", Any), ("hates", Any), ("admires", Any)]} ns <- T.traverseGetNodes desc start let expected = map (\n -> fromJust $ G.getNamedNode n g) ["Tobias", "Sara", "Gumersindo", "Sara", "Gumersindo"] neo4jEqual (L.sort expected) (L.sort ns) let desc2 = desc {T.travUniqueness = Just T.NodeGlobal} ns2 <- T.traverseGetNodes desc2 start let expected2 = map (\n -> fromJust $ G.getNamedNode n g) ["Tobias", "Sara", "Gumersindo"] neo4jEqual (L.sort expected2) (L.sort ns2) let desc3 = desc {T.travUniqueness = Just T.RelationshipGlobal} ns3 <- T.traverseGetNodes desc3 start let expected3 = map (\n -> fromJust $ G.getNamedNode n g) ["Tobias", "Gumersindo", "Sara", "Gumersindo"] neo4jEqual (L.sort expected3) (L.sort ns3) let desc4 = desc {T.travUniqueness = Just T.NodePathUnique} ns4 <- T.traverseGetNodes desc4 start let expected4 = map (\n -> fromJust $ G.getNamedNode n g) ["Tobias", "Sara", "Gumersindo", "Sara", "Gumersindo"] neo4jEqual (L.sort expected4) (L.sort ns4) let desc5 = desc {T.travUniqueness = Just T.RelationshipPath} ns5 <- T.traverseGetNodes desc5 start let expected5 = map (\n -> fromJust $ G.getNamedNode n g) ["Tobias", "Sara", "Gumersindo", "Sara", "Gumersindo"] neo4jEqual (L.sort expected5) (L.sort ns5) cleanUpTraversalTest g -- | Test traversal with a javascript depth expression
1,600
false
true
0
16
300
630
317
313
null
null
fros1y/umbral
src/Coord.hs
bsd-3-clause
insetBounds :: Integer -> Bounds -> Bounds insetBounds i (Bounds l u) = (Bounds l' u') where offset = (Coord i i) l' = l + offset u' = u - offset
159
insetBounds :: Integer -> Bounds -> Bounds insetBounds i (Bounds l u) = (Bounds l' u') where offset = (Coord i i) l' = l + offset u' = u - offset
159
insetBounds i (Bounds l u) = (Bounds l' u') where offset = (Coord i i) l' = l + offset u' = u - offset
116
false
true
2
7
45
80
38
42
null
null
coolhacks/scripts-hacks
examples/shellcheck-master/ShellCheck/Parser.hs
mit
parseProblem level code msg = do pos <- getPosition parseProblemAt pos level code msg
93
parseProblem level code msg = do pos <- getPosition parseProblemAt pos level code msg
93
parseProblem level code msg = do pos <- getPosition parseProblemAt pos level code msg
93
false
false
0
7
21
32
14
18
null
null
jgoerzen/dtmconv
HaXml-1.12/src/Text/XML/HaXml/Combinators.hs
gpl-2.0
-- FILTER COMBINATORS -- | Sequential (/Irish/,/backwards/) composition o :: CFilter -> CFilter -> CFilter f `o` g = concatMap f . g
134
o :: CFilter -> CFilter -> CFilter f `o` g = concatMap f . g
60
f `o` g = concatMap f . g
25
true
true
0
6
24
43
21
22
null
null
leshchevds/ganeti
src/Ganeti/Constants.hs
bsd-2-clause
exportModeRemote :: String exportModeRemote = Types.exportModeToRaw ExportModeRemote
84
exportModeRemote :: String exportModeRemote = Types.exportModeToRaw ExportModeRemote
84
exportModeRemote = Types.exportModeToRaw ExportModeRemote
57
false
true
0
6
6
16
8
8
null
null
nushio3/ghc
compiler/deSugar/TmOracle.hs
bsd-3-clause
exprDeepLookup env (PmExprEq e1 e2) = PmExprEq (exprDeepLookup env e1) (exprDeepLookup env e2)
141
exprDeepLookup env (PmExprEq e1 e2) = PmExprEq (exprDeepLookup env e1) (exprDeepLookup env e2)
141
exprDeepLookup env (PmExprEq e1 e2) = PmExprEq (exprDeepLookup env e1) (exprDeepLookup env e2)
141
false
false
0
7
59
42
19
23
null
null
lpsmith/aeson
benchmarks/CompareWithJSON.hs
bsd-3-clause
decodeJ :: String -> J.JSValue decodeJ s = case J.decodeStrict s of J.Ok v -> v J.Error _ -> error "fail to parse via JSON"
133
decodeJ :: String -> J.JSValue decodeJ s = case J.decodeStrict s of J.Ok v -> v J.Error _ -> error "fail to parse via JSON"
133
decodeJ s = case J.decodeStrict s of J.Ok v -> v J.Error _ -> error "fail to parse via JSON"
102
false
true
0
9
34
54
25
29
null
null
wavewave/xournal-types
src/Data/Xournal/Predefined.hs
bsd-2-clause
predefined_highlighter_opacity :: Double predefined_highlighter_opacity = 0.5
78
predefined_highlighter_opacity :: Double predefined_highlighter_opacity = 0.5
77
predefined_highlighter_opacity = 0.5
36
false
true
0
4
6
11
6
5
null
null
hecrj/haskell-format
src/Language/Haskell/Format/Declaration.hs
bsd-3-clause
assertion :: Asst CommentedSrc -> Format assertion asst = case asst of ClassA _ qname types -> Format.intercalate " " (Atom.qname qname : map type' types) ParenA _ parenAsst -> "(" <> assertion parenAsst <> ")" _ -> error (show asst)
299
assertion :: Asst CommentedSrc -> Format assertion asst = case asst of ClassA _ qname types -> Format.intercalate " " (Atom.qname qname : map type' types) ParenA _ parenAsst -> "(" <> assertion parenAsst <> ")" _ -> error (show asst)
299
assertion asst = case asst of ClassA _ qname types -> Format.intercalate " " (Atom.qname qname : map type' types) ParenA _ parenAsst -> "(" <> assertion parenAsst <> ")" _ -> error (show asst)
258
false
true
6
12
106
97
46
51
null
null
randen/cabal
Cabal/Distribution/Simple/Build.hs
bsd-3-clause
-- | Translate a lib-style 'TestSuite' component into a lib + exe for building testSuiteLibV09AsLibAndExe :: PackageDescription -> TestSuite -> ComponentLocalBuildInfo -> LocalBuildInfo -> FilePath -> FilePath -> (PackageDescription, Library, ComponentLocalBuildInfo, LocalBuildInfo, IPI.InstalledPackageInfo, Executable, ComponentLocalBuildInfo) testSuiteLibV09AsLibAndExe pkg_descr test@TestSuite { testInterface = TestSuiteLibV09 _ m } clbi lbi distPref pwd = (pkg, lib, libClbi, lbi, ipi, exe, exeClbi) where bi = testBuildInfo test lib = Library { exposedModules = [ m ], reexportedModules = [], requiredSignatures = [], exposedSignatures = [], libExposed = True, libBuildInfo = bi } libClbi = LibComponentLocalBuildInfo { componentPackageDeps = componentPackageDeps clbi , componentPackageRenaming = componentPackageRenaming clbi , componentId = ComponentId $ display (packageId pkg) , componentCompatPackageKey = ComponentId $ display (packageId pkg) , componentExposedModules = [IPI.ExposedModule m Nothing Nothing] } pkg = pkg_descr { package = (package pkg_descr) { pkgName = PackageName (testName test) } , buildDepends = targetBuildDepends $ testBuildInfo test , executables = [] , testSuites = [] , library = Just lib } ipi = inplaceInstalledPackageInfo pwd distPref pkg (IPI.AbiHash "") lib lbi libClbi testDir = buildDir lbi </> stubName test </> stubName test ++ "-tmp" testLibDep = thisPackageVersion $ package pkg exe = Executable { exeName = stubName test, modulePath = stubFilePath test, buildInfo = (testBuildInfo test) { hsSourceDirs = [ testDir ], targetBuildDepends = testLibDep : (targetBuildDepends $ testBuildInfo test), targetBuildRenaming = Map.empty } } -- | The stub executable needs a new 'ComponentLocalBuildInfo' -- that exposes the relevant test suite library. exeClbi = ExeComponentLocalBuildInfo { componentPackageDeps = (IPI.installedComponentId ipi, packageId ipi) : (filter (\(_, x) -> let PackageName name = pkgName x in name == "Cabal" || name == "base") (componentPackageDeps clbi)), componentPackageRenaming = Map.insert (packageName ipi) defaultRenaming (componentPackageRenaming clbi) }
3,233
testSuiteLibV09AsLibAndExe :: PackageDescription -> TestSuite -> ComponentLocalBuildInfo -> LocalBuildInfo -> FilePath -> FilePath -> (PackageDescription, Library, ComponentLocalBuildInfo, LocalBuildInfo, IPI.InstalledPackageInfo, Executable, ComponentLocalBuildInfo) testSuiteLibV09AsLibAndExe pkg_descr test@TestSuite { testInterface = TestSuiteLibV09 _ m } clbi lbi distPref pwd = (pkg, lib, libClbi, lbi, ipi, exe, exeClbi) where bi = testBuildInfo test lib = Library { exposedModules = [ m ], reexportedModules = [], requiredSignatures = [], exposedSignatures = [], libExposed = True, libBuildInfo = bi } libClbi = LibComponentLocalBuildInfo { componentPackageDeps = componentPackageDeps clbi , componentPackageRenaming = componentPackageRenaming clbi , componentId = ComponentId $ display (packageId pkg) , componentCompatPackageKey = ComponentId $ display (packageId pkg) , componentExposedModules = [IPI.ExposedModule m Nothing Nothing] } pkg = pkg_descr { package = (package pkg_descr) { pkgName = PackageName (testName test) } , buildDepends = targetBuildDepends $ testBuildInfo test , executables = [] , testSuites = [] , library = Just lib } ipi = inplaceInstalledPackageInfo pwd distPref pkg (IPI.AbiHash "") lib lbi libClbi testDir = buildDir lbi </> stubName test </> stubName test ++ "-tmp" testLibDep = thisPackageVersion $ package pkg exe = Executable { exeName = stubName test, modulePath = stubFilePath test, buildInfo = (testBuildInfo test) { hsSourceDirs = [ testDir ], targetBuildDepends = testLibDep : (targetBuildDepends $ testBuildInfo test), targetBuildRenaming = Map.empty } } -- | The stub executable needs a new 'ComponentLocalBuildInfo' -- that exposes the relevant test suite library. exeClbi = ExeComponentLocalBuildInfo { componentPackageDeps = (IPI.installedComponentId ipi, packageId ipi) : (filter (\(_, x) -> let PackageName name = pkgName x in name == "Cabal" || name == "base") (componentPackageDeps clbi)), componentPackageRenaming = Map.insert (packageName ipi) defaultRenaming (componentPackageRenaming clbi) }
3,154
testSuiteLibV09AsLibAndExe pkg_descr test@TestSuite { testInterface = TestSuiteLibV09 _ m } clbi lbi distPref pwd = (pkg, lib, libClbi, lbi, ipi, exe, exeClbi) where bi = testBuildInfo test lib = Library { exposedModules = [ m ], reexportedModules = [], requiredSignatures = [], exposedSignatures = [], libExposed = True, libBuildInfo = bi } libClbi = LibComponentLocalBuildInfo { componentPackageDeps = componentPackageDeps clbi , componentPackageRenaming = componentPackageRenaming clbi , componentId = ComponentId $ display (packageId pkg) , componentCompatPackageKey = ComponentId $ display (packageId pkg) , componentExposedModules = [IPI.ExposedModule m Nothing Nothing] } pkg = pkg_descr { package = (package pkg_descr) { pkgName = PackageName (testName test) } , buildDepends = targetBuildDepends $ testBuildInfo test , executables = [] , testSuites = [] , library = Just lib } ipi = inplaceInstalledPackageInfo pwd distPref pkg (IPI.AbiHash "") lib lbi libClbi testDir = buildDir lbi </> stubName test </> stubName test ++ "-tmp" testLibDep = thisPackageVersion $ package pkg exe = Executable { exeName = stubName test, modulePath = stubFilePath test, buildInfo = (testBuildInfo test) { hsSourceDirs = [ testDir ], targetBuildDepends = testLibDep : (targetBuildDepends $ testBuildInfo test), targetBuildRenaming = Map.empty } } -- | The stub executable needs a new 'ComponentLocalBuildInfo' -- that exposes the relevant test suite library. exeClbi = ExeComponentLocalBuildInfo { componentPackageDeps = (IPI.installedComponentId ipi, packageId ipi) : (filter (\(_, x) -> let PackageName name = pkgName x in name == "Cabal" || name == "base") (componentPackageDeps clbi)), componentPackageRenaming = Map.insert (packageName ipi) defaultRenaming (componentPackageRenaming clbi) }
2,600
true
true
0
16
1,381
588
325
263
null
null
plow-technologies/distributed-simple-cell
src/SimpleStore/Cell/Distributed/Migration.hs
mit
-- | Insert a new store with a value, or overwrite the stored value if the value's key is already present in the cell getOrInsertStore :: (SimpleCellConstraint cell k src dst tm st) => st -> DistributedHandlerM urllist st () getOrInsertStore state = do localCell <- asks localCell (liftIO $ getStore localCell state) >>= flip maybe (liftIO . flip putSimpleStore state) (void $ makeHandler $ bimapEitherT show id $ EitherT $ insertStore localCell state) -- | Handler for migrations: Write or overwrite
509
getOrInsertStore :: (SimpleCellConstraint cell k src dst tm st) => st -> DistributedHandlerM urllist st () getOrInsertStore state = do localCell <- asks localCell (liftIO $ getStore localCell state) >>= flip maybe (liftIO . flip putSimpleStore state) (void $ makeHandler $ bimapEitherT show id $ EitherT $ insertStore localCell state) -- | Handler for migrations: Write or overwrite
391
getOrInsertStore state = do localCell <- asks localCell (liftIO $ getStore localCell state) >>= flip maybe (liftIO . flip putSimpleStore state) (void $ makeHandler $ bimapEitherT show id $ EitherT $ insertStore localCell state) -- | Handler for migrations: Write or overwrite
284
true
true
0
13
90
125
61
64
null
null
gcampax/ghc
compiler/utils/Outputable.hs
bsd-3-clause
qualPackage :: PprStyle -> QueryQualifyPackage qualPackage (PprUser q _) m = queryQualifyPackage q m
101
qualPackage :: PprStyle -> QueryQualifyPackage qualPackage (PprUser q _) m = queryQualifyPackage q m
101
qualPackage (PprUser q _) m = queryQualifyPackage q m
54
false
true
0
9
14
38
17
21
null
null
lukexi/ghc
compiler/nativeGen/X86/Regs.hs
bsd-3-clause
rcx = regSingle 2
19
rcx = regSingle 2
19
rcx = regSingle 2
19
false
false
1
5
5
12
4
8
null
null
emc2/chill
src/IR/FlatIR/LLVMGen/Metadata.hs
bsd-3-clause
mutabilityValue VolatileOnce = LLVM.mdString "writeonce"
56
mutabilityValue VolatileOnce = LLVM.mdString "writeonce"
56
mutabilityValue VolatileOnce = LLVM.mdString "writeonce"
56
false
false
0
6
4
14
6
8
null
null
asr/fotc
notes/fixed-points/Functors.hs
mit
-- The stream destructors. headS ∷ Stream a → a headS xs = case out xs of St x _ → x
97
headS ∷ Stream a → a headS xs = case out xs of St x _ → x
70
headS xs = case out xs of St x _ → x
49
true
true
0
8
34
46
20
26
null
null
felixsch/moonbase-ng
src/Moonbase/DBus.hs
lgpl-2.1
dbusMethod :: ObjectPath -> (Ref -> [Method]) -> Moon () dbusMethod objPath generator = do client <- view dbus <$> get ref <- ask liftIO $ export client objPath $ generator ref
185
dbusMethod :: ObjectPath -> (Ref -> [Method]) -> Moon () dbusMethod objPath generator = do client <- view dbus <$> get ref <- ask liftIO $ export client objPath $ generator ref
185
dbusMethod objPath generator = do client <- view dbus <$> get ref <- ask liftIO $ export client objPath $ generator ref
128
false
true
0
10
40
84
38
46
null
null
xarts19/GoHaskell
Main.hs
mit
keyPressed = do isKey <- c_kbhit if isKey then Just <$> c_getch else return Nothing
124
keyPressed = do isKey <- c_kbhit if isKey then Just <$> c_getch else return Nothing
124
keyPressed = do isKey <- c_kbhit if isKey then Just <$> c_getch else return Nothing
124
false
false
0
8
55
31
15
16
null
null
szehk/Haskell-Carbonara-Library
src/Data/Carbonara/Char.hs
bsd-3-clause
isNotOctDigit :: Char -> Bool isNotOctDigit = not . isOctDigit
62
isNotOctDigit :: Char -> Bool isNotOctDigit = not . isOctDigit
62
isNotOctDigit = not . isOctDigit
32
false
true
0
5
9
19
10
9
null
null
agocorona/ghcjs-perch
src/Internal/Perch.hs
mit
img = nelem "img"
22
img = nelem "img"
22
img = nelem "img"
22
false
false
1
5
8
13
4
9
null
null
copumpkin/vector-static
Data/Vector/Unboxed/Static.hs
bsd-3-clause
zip4 (Vec as) (Vec bs) (Vec cs) (Vec ds) = Vec (G.zip4 as bs cs ds)
67
zip4 (Vec as) (Vec bs) (Vec cs) (Vec ds) = Vec (G.zip4 as bs cs ds)
67
zip4 (Vec as) (Vec bs) (Vec cs) (Vec ds) = Vec (G.zip4 as bs cs ds)
67
false
false
0
8
15
56
27
29
null
null
kazu-yamamoto/http-types
Network/HTTP/Types/Status.hs
bsd-3-clause
-- | Use Proxy 305 useProxy305 :: Status useProxy305 = status305
64
useProxy305 :: Status useProxy305 = status305
45
useProxy305 = status305
23
true
true
0
6
10
19
8
11
null
null
apyrgio/snf-ganeti
src/Ganeti/HTools/Node.hs
bsd-2-clause
-- | Calculate the new spindle usage calcSpindleUse :: Bool -- Action: True = adding instance, False = removing it -> Node -> Instance.Instance -> Double calcSpindleUse _ (Node {exclStorage = True}) _ = 0.0
239
calcSpindleUse :: Bool -- Action: True = adding instance, False = removing it -> Node -> Instance.Instance -> Double calcSpindleUse _ (Node {exclStorage = True}) _ = 0.0
202
calcSpindleUse _ (Node {exclStorage = True}) _ = 0.0
52
true
true
4
9
67
54
26
28
null
null
Arguggi/Frinfo
src/Frinfo/Config.hs
mit
downSpeedIcon :: T.Text downSpeedIcon = "/home/arguggi/dotfiles/icons/xbm8x8/net_down_03.xbm"
93
downSpeedIcon :: T.Text downSpeedIcon = "/home/arguggi/dotfiles/icons/xbm8x8/net_down_03.xbm"
93
downSpeedIcon = "/home/arguggi/dotfiles/icons/xbm8x8/net_down_03.xbm"
69
false
true
0
5
5
13
7
6
null
null
donnie4w/tim
protocols/gen-hs/ITim_Client.hs
apache-2.0
send_timAck op arg_ab = do seq <- seqid seqn <- R.readIORef seq T.writeMessageBegin op ("timAck", T.M_ONEWAY, seqn) write_TimAck_args op (TimAck_args{timAck_args_ab=arg_ab}) T.writeMessageEnd op T.tFlush (T.getTransport op)
235
send_timAck op arg_ab = do seq <- seqid seqn <- R.readIORef seq T.writeMessageBegin op ("timAck", T.M_ONEWAY, seqn) write_TimAck_args op (TimAck_args{timAck_args_ab=arg_ab}) T.writeMessageEnd op T.tFlush (T.getTransport op)
235
send_timAck op arg_ab = do seq <- seqid seqn <- R.readIORef seq T.writeMessageBegin op ("timAck", T.M_ONEWAY, seqn) write_TimAck_args op (TimAck_args{timAck_args_ab=arg_ab}) T.writeMessageEnd op T.tFlush (T.getTransport op)
235
false
false
1
11
36
99
44
55
null
null
ollef/Grempa
Data/Parser/Grempa/Grammar.hs
bsd-3-clause
severalInter :: ( ToSym s x, ToSymT s x ~ a , ToSym s t, ToSymT s t ~ s , Typeable a, Typeable s) => t -> x -> Grammar s [a] severalInter tok x = do rec xs <- rule [ (:[]) <@> x , (:) <@> x <# tok <#> xs] return xs -- | Takes two symbols and combines them with @(:)@. -- -- Creates one new rule. -- -- This can for example be used instead of using both 'several' and 'several0' -- on the same symbol, as that will create three new rules, whereas the -- equivalent using 'cons' will only create two new rules. Example -- transformation: -- -- > xs0 <- several0 x -- > xs <- several x -- > ==> -- > xs0 <- several0 x -- > xs <- x `cons` xs0
724
severalInter :: ( ToSym s x, ToSymT s x ~ a , ToSym s t, ToSymT s t ~ s , Typeable a, Typeable s) => t -> x -> Grammar s [a] severalInter tok x = do rec xs <- rule [ (:[]) <@> x , (:) <@> x <# tok <#> xs] return xs -- | Takes two symbols and combines them with @(:)@. -- -- Creates one new rule. -- -- This can for example be used instead of using both 'several' and 'several0' -- on the same symbol, as that will create three new rules, whereas the -- equivalent using 'cons' will only create two new rules. Example -- transformation: -- -- > xs0 <- several0 x -- > xs <- several x -- > ==> -- > xs0 <- several0 x -- > xs <- x `cons` xs0
724
severalInter tok x = do rec xs <- rule [ (:[]) <@> x , (:) <@> x <# tok <#> xs] return xs -- | Takes two symbols and combines them with @(:)@. -- -- Creates one new rule. -- -- This can for example be used instead of using both 'several' and 'several0' -- on the same symbol, as that will create three new rules, whereas the -- equivalent using 'cons' will only create two new rules. Example -- transformation: -- -- > xs0 <- several0 x -- > xs <- several x -- > ==> -- > xs0 <- several0 x -- > xs <- x `cons` xs0
554
false
true
0
15
228
157
84
73
null
null
uuhan/Idris-dev
src/Idris/Output.hs
bsd-3-clause
iPrintFunTypes bnd n overloads = do ist <- getIState let ppo = ppOptionIst ist let infixes = idris_infixes ist let output = vsep (map (uncurry (ppOverload ppo infixes)) overloads) iRenderResult output where fullName ppo n | length overloads > 1 = prettyName True True bnd n | otherwise = prettyName True (ppopt_impl ppo) bnd n ppOverload ppo infixes n tm = fullName ppo n <+> colon <+> align tm
581
iPrintFunTypes bnd n overloads = do ist <- getIState let ppo = ppOptionIst ist let infixes = idris_infixes ist let output = vsep (map (uncurry (ppOverload ppo infixes)) overloads) iRenderResult output where fullName ppo n | length overloads > 1 = prettyName True True bnd n | otherwise = prettyName True (ppopt_impl ppo) bnd n ppOverload ppo infixes n tm = fullName ppo n <+> colon <+> align tm
581
iPrintFunTypes bnd n overloads = do ist <- getIState let ppo = ppOptionIst ist let infixes = idris_infixes ist let output = vsep (map (uncurry (ppOverload ppo infixes)) overloads) iRenderResult output where fullName ppo n | length overloads > 1 = prettyName True True bnd n | otherwise = prettyName True (ppopt_impl ppo) bnd n ppOverload ppo infixes n tm = fullName ppo n <+> colon <+> align tm
581
false
false
5
16
255
183
77
106
null
null
tolysz/yesod
yesod-core/Yesod/Core/Content.hs
mit
typeJpeg :: ContentType typeJpeg = "image/jpeg"
47
typeJpeg :: ContentType typeJpeg = "image/jpeg"
47
typeJpeg = "image/jpeg"
23
false
true
0
6
5
18
7
11
null
null
ashishnegi/hsalgos
src/LongestEdgePathInDAG.hs
bsd-3-clause
isValidPath :: [NodeId] -> DAG -> (Int, Int) -> Bool isValidPath path graph (width, height) = let heights = fmap (heightOfNode . flip nodeData graph) path incOrderHeights = any (\(a,b) -> a <= b) $ zip heights (drop 1 heights) validTransitions = all (isValidTransition width) $ zip path (drop 1 path) in incOrderHeights && validTransitions where isValidTransition width (nodeId1, nodeId2) = case nodeId1 - nodeId2 of 1 -> nodeId1 `mod` width /= 0 -- left -1 -> nodeId1 `mod` width /= (width - 1) -- right w -> (w == -width) || (w == width) --up/down
601
isValidPath :: [NodeId] -> DAG -> (Int, Int) -> Bool isValidPath path graph (width, height) = let heights = fmap (heightOfNode . flip nodeData graph) path incOrderHeights = any (\(a,b) -> a <= b) $ zip heights (drop 1 heights) validTransitions = all (isValidTransition width) $ zip path (drop 1 path) in incOrderHeights && validTransitions where isValidTransition width (nodeId1, nodeId2) = case nodeId1 - nodeId2 of 1 -> nodeId1 `mod` width /= 0 -- left -1 -> nodeId1 `mod` width /= (width - 1) -- right w -> (w == -width) || (w == width) --up/down
601
isValidPath path graph (width, height) = let heights = fmap (heightOfNode . flip nodeData graph) path incOrderHeights = any (\(a,b) -> a <= b) $ zip heights (drop 1 heights) validTransitions = all (isValidTransition width) $ zip path (drop 1 path) in incOrderHeights && validTransitions where isValidTransition width (nodeId1, nodeId2) = case nodeId1 - nodeId2 of 1 -> nodeId1 `mod` width /= 0 -- left -1 -> nodeId1 `mod` width /= (width - 1) -- right w -> (w == -width) || (w == width) --up/down
548
false
true
0
13
147
248
132
116
null
null
jwiegley/ghc-release
libraries/Cabal/cabal-install/Distribution/Client/Setup.hs
gpl-3.0
defaultMaxBackjumps :: Int defaultMaxBackjumps = 200
52
defaultMaxBackjumps :: Int defaultMaxBackjumps = 200
52
defaultMaxBackjumps = 200
25
false
true
0
4
5
11
6
5
null
null
peterokagey/haskellOEIS
src/External/A065880.hs
apache-2.0
a065880 n = n * a272760 n
25
a065880 n = n * a272760 n
25
a065880 n = n * a272760 n
25
false
false
0
6
6
16
7
9
null
null
sdiehl/ghc
compiler/GHC/Cmm/Info/Build.hs
bsd-3-clause
doSRTs :: DynFlags -> ModuleSRTInfo -> [(CAFEnv, [CmmDecl])] -> [(CAFSet, CmmDecl)] -> IO (ModuleSRTInfo, [CmmDeclSRTs]) doSRTs dflags moduleSRTInfo procs data_ = do us <- mkSplitUniqSupply 'u' -- Ignore the original grouping of decls, and combine all the -- CAFEnvs into a single CAFEnv. let static_data_env :: Map CLabel CAFSet static_data_env = Map.fromList $ flip map data_ $ \(set, decl) -> case decl of CmmProc{} -> pprPanic "doSRTs" (text "Proc in static data list:" <+> ppr decl) CmmData _ static -> case static of CmmStatics lbl _ _ _ -> (lbl, set) CmmStaticsRaw lbl _ -> (lbl, set) static_data :: Set CLabel static_data = Map.keysSet static_data_env (proc_envs, procss) = unzip procs cafEnv = mapUnions proc_envs decls = map snd data_ ++ concat procss staticFuns = mapFromList (getStaticFuns decls) -- Put the decls in dependency order. Why? So that we can implement -- [Inline] and [Filter]. If we need to refer to an SRT that has -- a single entry, we use the entry itself, which means that we -- don't need to generate the singleton SRT in the first place. But -- to do this we need to process blocks before things that depend on -- them. let sccs :: [SCC (SomeLabel, CAFLabel, Set CAFLabel)] sccs = {-# SCC depAnalSRTs #-} depAnalSRTs cafEnv static_data_env decls cafsWithSRTs :: [(Label, CAFLabel, Set CAFLabel)] cafsWithSRTs = getCAFs cafEnv decls srtTraceM "doSRTs" (text "data:" <+> ppr data_ $$ text "procs:" <+> ppr procs $$ text "static_data_env:" <+> ppr static_data_env $$ text "sccs:" <+> ppr sccs $$ text "cafsWithSRTs:" <+> ppr cafsWithSRTs) -- On each strongly-connected group of decls, construct the SRT -- closures and the SRT fields for info tables. let result :: [ ( [CmmDeclSRTs] -- generated SRTs , [(Label, CLabel)] -- SRT fields for info tables , [(Label, [SRTEntry])] -- SRTs to attach to static functions , Bool -- Whether the group has CAF references ) ] (result, moduleSRTInfo') = initUs_ us $ flip runStateT moduleSRTInfo $ do nonCAFs <- mapM (doSCC dflags staticFuns static_data) sccs cAFs <- forM cafsWithSRTs $ \(l, cafLbl, cafs) -> oneSRT dflags staticFuns [BlockLabel l] [cafLbl] True{-is a CAF-} cafs static_data return (nonCAFs ++ cAFs) (srt_declss, pairs, funSRTs, has_caf_refs) = unzip4 result srt_decls = concat srt_declss unless (null srt_decls) $ dumpIfSet_dyn dflags Opt_D_dump_srts "SRTs" FormatCMM (ppr srt_decls) -- Next, update the info tables with the SRTs let srtFieldMap = mapFromList (concat pairs) funSRTMap = mapFromList (concat funSRTs) has_caf_refs' = or has_caf_refs decls' = concatMap (updInfoSRTs dflags srtFieldMap funSRTMap has_caf_refs') decls -- Finally update CafInfos for raw static literals (CmmStaticsRaw). Those are -- not analysed in oneSRT so we never add entries for them to the SRTMap. let srtMap_w_raws = foldl' (\(srtMap :: SRTMap) (_, decl) -> case decl of CmmData _ CmmStatics{} -> -- already updated by oneSRT srtMap CmmData _ (CmmStaticsRaw lbl _) | isIdLabel lbl -> -- not analysed by oneSRT, declare it non-CAFFY here Map.insert (mkCAFLabel lbl) Nothing srtMap | otherwise -> -- Not an IdLabel, ignore srtMap CmmProc{} -> pprPanic "doSRTs" (text "Found Proc in static data list:" <+> ppr decl)) (moduleSRTMap moduleSRTInfo') data_ return (moduleSRTInfo'{ moduleSRTMap = srtMap_w_raws }, srt_decls ++ decls') -- | Build the SRT for a strongly-connected component of blocks
4,213
doSRTs :: DynFlags -> ModuleSRTInfo -> [(CAFEnv, [CmmDecl])] -> [(CAFSet, CmmDecl)] -> IO (ModuleSRTInfo, [CmmDeclSRTs]) doSRTs dflags moduleSRTInfo procs data_ = do us <- mkSplitUniqSupply 'u' -- Ignore the original grouping of decls, and combine all the -- CAFEnvs into a single CAFEnv. let static_data_env :: Map CLabel CAFSet static_data_env = Map.fromList $ flip map data_ $ \(set, decl) -> case decl of CmmProc{} -> pprPanic "doSRTs" (text "Proc in static data list:" <+> ppr decl) CmmData _ static -> case static of CmmStatics lbl _ _ _ -> (lbl, set) CmmStaticsRaw lbl _ -> (lbl, set) static_data :: Set CLabel static_data = Map.keysSet static_data_env (proc_envs, procss) = unzip procs cafEnv = mapUnions proc_envs decls = map snd data_ ++ concat procss staticFuns = mapFromList (getStaticFuns decls) -- Put the decls in dependency order. Why? So that we can implement -- [Inline] and [Filter]. If we need to refer to an SRT that has -- a single entry, we use the entry itself, which means that we -- don't need to generate the singleton SRT in the first place. But -- to do this we need to process blocks before things that depend on -- them. let sccs :: [SCC (SomeLabel, CAFLabel, Set CAFLabel)] sccs = {-# SCC depAnalSRTs #-} depAnalSRTs cafEnv static_data_env decls cafsWithSRTs :: [(Label, CAFLabel, Set CAFLabel)] cafsWithSRTs = getCAFs cafEnv decls srtTraceM "doSRTs" (text "data:" <+> ppr data_ $$ text "procs:" <+> ppr procs $$ text "static_data_env:" <+> ppr static_data_env $$ text "sccs:" <+> ppr sccs $$ text "cafsWithSRTs:" <+> ppr cafsWithSRTs) -- On each strongly-connected group of decls, construct the SRT -- closures and the SRT fields for info tables. let result :: [ ( [CmmDeclSRTs] -- generated SRTs , [(Label, CLabel)] -- SRT fields for info tables , [(Label, [SRTEntry])] -- SRTs to attach to static functions , Bool -- Whether the group has CAF references ) ] (result, moduleSRTInfo') = initUs_ us $ flip runStateT moduleSRTInfo $ do nonCAFs <- mapM (doSCC dflags staticFuns static_data) sccs cAFs <- forM cafsWithSRTs $ \(l, cafLbl, cafs) -> oneSRT dflags staticFuns [BlockLabel l] [cafLbl] True{-is a CAF-} cafs static_data return (nonCAFs ++ cAFs) (srt_declss, pairs, funSRTs, has_caf_refs) = unzip4 result srt_decls = concat srt_declss unless (null srt_decls) $ dumpIfSet_dyn dflags Opt_D_dump_srts "SRTs" FormatCMM (ppr srt_decls) -- Next, update the info tables with the SRTs let srtFieldMap = mapFromList (concat pairs) funSRTMap = mapFromList (concat funSRTs) has_caf_refs' = or has_caf_refs decls' = concatMap (updInfoSRTs dflags srtFieldMap funSRTMap has_caf_refs') decls -- Finally update CafInfos for raw static literals (CmmStaticsRaw). Those are -- not analysed in oneSRT so we never add entries for them to the SRTMap. let srtMap_w_raws = foldl' (\(srtMap :: SRTMap) (_, decl) -> case decl of CmmData _ CmmStatics{} -> -- already updated by oneSRT srtMap CmmData _ (CmmStaticsRaw lbl _) | isIdLabel lbl -> -- not analysed by oneSRT, declare it non-CAFFY here Map.insert (mkCAFLabel lbl) Nothing srtMap | otherwise -> -- Not an IdLabel, ignore srtMap CmmProc{} -> pprPanic "doSRTs" (text "Found Proc in static data list:" <+> ppr decl)) (moduleSRTMap moduleSRTInfo') data_ return (moduleSRTInfo'{ moduleSRTMap = srtMap_w_raws }, srt_decls ++ decls') -- | Build the SRT for a strongly-connected component of blocks
4,212
doSRTs dflags moduleSRTInfo procs data_ = do us <- mkSplitUniqSupply 'u' -- Ignore the original grouping of decls, and combine all the -- CAFEnvs into a single CAFEnv. let static_data_env :: Map CLabel CAFSet static_data_env = Map.fromList $ flip map data_ $ \(set, decl) -> case decl of CmmProc{} -> pprPanic "doSRTs" (text "Proc in static data list:" <+> ppr decl) CmmData _ static -> case static of CmmStatics lbl _ _ _ -> (lbl, set) CmmStaticsRaw lbl _ -> (lbl, set) static_data :: Set CLabel static_data = Map.keysSet static_data_env (proc_envs, procss) = unzip procs cafEnv = mapUnions proc_envs decls = map snd data_ ++ concat procss staticFuns = mapFromList (getStaticFuns decls) -- Put the decls in dependency order. Why? So that we can implement -- [Inline] and [Filter]. If we need to refer to an SRT that has -- a single entry, we use the entry itself, which means that we -- don't need to generate the singleton SRT in the first place. But -- to do this we need to process blocks before things that depend on -- them. let sccs :: [SCC (SomeLabel, CAFLabel, Set CAFLabel)] sccs = {-# SCC depAnalSRTs #-} depAnalSRTs cafEnv static_data_env decls cafsWithSRTs :: [(Label, CAFLabel, Set CAFLabel)] cafsWithSRTs = getCAFs cafEnv decls srtTraceM "doSRTs" (text "data:" <+> ppr data_ $$ text "procs:" <+> ppr procs $$ text "static_data_env:" <+> ppr static_data_env $$ text "sccs:" <+> ppr sccs $$ text "cafsWithSRTs:" <+> ppr cafsWithSRTs) -- On each strongly-connected group of decls, construct the SRT -- closures and the SRT fields for info tables. let result :: [ ( [CmmDeclSRTs] -- generated SRTs , [(Label, CLabel)] -- SRT fields for info tables , [(Label, [SRTEntry])] -- SRTs to attach to static functions , Bool -- Whether the group has CAF references ) ] (result, moduleSRTInfo') = initUs_ us $ flip runStateT moduleSRTInfo $ do nonCAFs <- mapM (doSCC dflags staticFuns static_data) sccs cAFs <- forM cafsWithSRTs $ \(l, cafLbl, cafs) -> oneSRT dflags staticFuns [BlockLabel l] [cafLbl] True{-is a CAF-} cafs static_data return (nonCAFs ++ cAFs) (srt_declss, pairs, funSRTs, has_caf_refs) = unzip4 result srt_decls = concat srt_declss unless (null srt_decls) $ dumpIfSet_dyn dflags Opt_D_dump_srts "SRTs" FormatCMM (ppr srt_decls) -- Next, update the info tables with the SRTs let srtFieldMap = mapFromList (concat pairs) funSRTMap = mapFromList (concat funSRTs) has_caf_refs' = or has_caf_refs decls' = concatMap (updInfoSRTs dflags srtFieldMap funSRTMap has_caf_refs') decls -- Finally update CafInfos for raw static literals (CmmStaticsRaw). Those are -- not analysed in oneSRT so we never add entries for them to the SRTMap. let srtMap_w_raws = foldl' (\(srtMap :: SRTMap) (_, decl) -> case decl of CmmData _ CmmStatics{} -> -- already updated by oneSRT srtMap CmmData _ (CmmStaticsRaw lbl _) | isIdLabel lbl -> -- not analysed by oneSRT, declare it non-CAFFY here Map.insert (mkCAFLabel lbl) Nothing srtMap | otherwise -> -- Not an IdLabel, ignore srtMap CmmProc{} -> pprPanic "doSRTs" (text "Found Proc in static data list:" <+> ppr decl)) (moduleSRTMap moduleSRTInfo') data_ return (moduleSRTInfo'{ moduleSRTMap = srtMap_w_raws }, srt_decls ++ decls') -- | Build the SRT for a strongly-connected component of blocks
4,081
false
true
0
20
1,382
910
468
442
null
null
anttisalonen/nix
src/Filter.hs
gpl-3.0
filterAll (f:fs) xs = filterAll fs (filter f xs)
48
filterAll (f:fs) xs = filterAll fs (filter f xs)
48
filterAll (f:fs) xs = filterAll fs (filter f xs)
48
false
false
0
7
8
31
15
16
null
null
acowley/ghc
compiler/prelude/PrelNames.hs
bsd-3-clause
ordClassKey = mkPreludeClassUnique 12
49
ordClassKey = mkPreludeClassUnique 12
49
ordClassKey = mkPreludeClassUnique 12
49
false
false
0
5
15
9
4
5
null
null
fumieval/audiovisual
src/Data/Graphics/Bitmap.hs
bsd-3-clause
bbox :: Bitmap -> B.Box V2 Int bbox (Bitmap (C.Image w h _) (V2 x y) _) = B.Box (V2 x y) (V2 (x+w) (y+h))
105
bbox :: Bitmap -> B.Box V2 Int bbox (Bitmap (C.Image w h _) (V2 x y) _) = B.Box (V2 x y) (V2 (x+w) (y+h))
105
bbox (Bitmap (C.Image w h _) (V2 x y) _) = B.Box (V2 x y) (V2 (x+w) (y+h))
74
false
true
0
9
24
96
47
49
null
null
gazay/faker
src/Faker/Business.hs
mit
-- | Returns random business-like credit card number, i.e. "1234-2121-1221-1211" creditCardNumber :: Faker String creditCardNumber = randomBusinessWord "credit_card_number"
172
creditCardNumber :: Faker String creditCardNumber = randomBusinessWord "credit_card_number"
91
creditCardNumber = randomBusinessWord "credit_card_number"
58
true
true
0
5
17
18
9
9
null
null
guilt/webify
src/Utils.hs
mit
toStrict :: BL.ByteString -> B.ByteString toStrict = B.concat . BL.toChunks
75
toStrict :: BL.ByteString -> B.ByteString toStrict = B.concat . BL.toChunks
75
toStrict = B.concat . BL.toChunks
33
false
true
0
8
9
34
15
19
null
null
codeq/language-py
src/Language/Py/LexerUtils.hs
bsd-3-clause
utf8Encode :: Char -> [Word8] utf8Encode = map fromIntegral . go . ord where go oc | oc <= 0x7f = [oc] | oc <= 0x7ff = [ 0xc0 + (oc `Data.Bits.shiftR` 6) , 0x80 + oc Data.Bits..&. 0x3f ] | oc <= 0xffff = [ 0xe0 + (oc `Data.Bits.shiftR` 12) , 0x80 + ((oc `Data.Bits.shiftR` 6) Data.Bits..&. 0x3f) , 0x80 + oc Data.Bits..&. 0x3f ] | otherwise = [ 0xf0 + (oc `Data.Bits.shiftR` 18) , 0x80 + ((oc `Data.Bits.shiftR` 12) Data.Bits..&. 0x3f) , 0x80 + ((oc `Data.Bits.shiftR` 6) Data.Bits..&. 0x3f) , 0x80 + oc Data.Bits..&. 0x3f ]
821
utf8Encode :: Char -> [Word8] utf8Encode = map fromIntegral . go . ord where go oc | oc <= 0x7f = [oc] | oc <= 0x7ff = [ 0xc0 + (oc `Data.Bits.shiftR` 6) , 0x80 + oc Data.Bits..&. 0x3f ] | oc <= 0xffff = [ 0xe0 + (oc `Data.Bits.shiftR` 12) , 0x80 + ((oc `Data.Bits.shiftR` 6) Data.Bits..&. 0x3f) , 0x80 + oc Data.Bits..&. 0x3f ] | otherwise = [ 0xf0 + (oc `Data.Bits.shiftR` 18) , 0x80 + ((oc `Data.Bits.shiftR` 12) Data.Bits..&. 0x3f) , 0x80 + ((oc `Data.Bits.shiftR` 6) Data.Bits..&. 0x3f) , 0x80 + oc Data.Bits..&. 0x3f ]
821
utf8Encode = map fromIntegral . go . ord where go oc | oc <= 0x7f = [oc] | oc <= 0x7ff = [ 0xc0 + (oc `Data.Bits.shiftR` 6) , 0x80 + oc Data.Bits..&. 0x3f ] | oc <= 0xffff = [ 0xe0 + (oc `Data.Bits.shiftR` 12) , 0x80 + ((oc `Data.Bits.shiftR` 6) Data.Bits..&. 0x3f) , 0x80 + oc Data.Bits..&. 0x3f ] | otherwise = [ 0xf0 + (oc `Data.Bits.shiftR` 18) , 0x80 + ((oc `Data.Bits.shiftR` 12) Data.Bits..&. 0x3f) , 0x80 + ((oc `Data.Bits.shiftR` 6) Data.Bits..&. 0x3f) , 0x80 + oc Data.Bits..&. 0x3f ]
791
false
true
0
11
394
273
156
117
null
null
adinapoli/Shelly.hs
src/Shelly.hs
bsd-3-clause
traceCanonicPath :: (Text -> Text) -> FilePath -> Sh FilePath traceCanonicPath = tracePath canonic
98
traceCanonicPath :: (Text -> Text) -> FilePath -> Sh FilePath traceCanonicPath = tracePath canonic
98
traceCanonicPath = tracePath canonic
36
false
true
0
7
13
32
16
16
null
null
ssaavedra/liquidhaskell
tests/pos/StructRec.hs
bsd-3-clause
insert y (Ln x xs) | y <= x = Ln y (Ln x xs) | otherwise = Ln x (insert y xs)
94
insert y (Ln x xs) | y <= x = Ln y (Ln x xs) | otherwise = Ln x (insert y xs)
94
insert y (Ln x xs) | y <= x = Ln y (Ln x xs) | otherwise = Ln x (insert y xs)
94
false
false
0
8
39
68
29
39
null
null
jcpetruzza/haskell-ast
src/Language/Haskell/AST/HSE.hs
bsd-3-clause
fromHseActivation (E.ActiveUntil l k) = Pragmas.ActiveUntil l k
64
fromHseActivation (E.ActiveUntil l k) = Pragmas.ActiveUntil l k
64
fromHseActivation (E.ActiveUntil l k) = Pragmas.ActiveUntil l k
64
false
false
0
7
8
27
12
15
null
null
damianfral/clay
src/Clay/Font.hs
bsd-3-clause
monospace = GenericFontFamily "monospace"
41
monospace = GenericFontFamily "monospace"
41
monospace = GenericFontFamily "monospace"
41
false
false
0
5
3
9
4
5
null
null
Teino1978-Corp/Teino1978-Corp-alex
src/NFA.hs
bsd-3-clause
bytesEdge _ _ _ _ = undefined
29
bytesEdge _ _ _ _ = undefined
29
bytesEdge _ _ _ _ = undefined
29
false
false
0
5
6
15
7
8
null
null
dorchard/camfort
tests/Camfort/Specification/StencilsSpec.hs
apache-2.0
-- Indices for the 2D five point stencil (deliberately in an odd order) fivepoint = [ Cons (-1) (Cons 0 Nil), Cons 0 (Cons (-1) Nil) , Cons 1 (Cons 0 Nil) , Cons 0 (Cons 1 Nil), Cons 0 (Cons 0 Nil) ]
223
fivepoint = [ Cons (-1) (Cons 0 Nil), Cons 0 (Cons (-1) Nil) , Cons 1 (Cons 0 Nil) , Cons 0 (Cons 1 Nil), Cons 0 (Cons 0 Nil) ]
151
fivepoint = [ Cons (-1) (Cons 0 Nil), Cons 0 (Cons (-1) Nil) , Cons 1 (Cons 0 Nil) , Cons 0 (Cons 1 Nil), Cons 0 (Cons 0 Nil) ]
151
true
false
0
10
67
97
50
47
null
null
dmjio/CompactMap
src/Data/CompactMap/Index.hs
bsd-3-clause
newKeyCursor :: Buffer -> Lazy.ByteString -> IO (Ptr KeyCursor) newKeyCursor buffer keyE = withBytes buffer (intSize*2 + keyLen) $ \keyPtr -> do poke (castPtr keyPtr) nullPtr putByteString (keyPtr `plusPtr` intSize) keyE keyLen return keyPtr where keyLen = fromIntegral $ Lazy.length keyE
320
newKeyCursor :: Buffer -> Lazy.ByteString -> IO (Ptr KeyCursor) newKeyCursor buffer keyE = withBytes buffer (intSize*2 + keyLen) $ \keyPtr -> do poke (castPtr keyPtr) nullPtr putByteString (keyPtr `plusPtr` intSize) keyE keyLen return keyPtr where keyLen = fromIntegral $ Lazy.length keyE
320
newKeyCursor buffer keyE = withBytes buffer (intSize*2 + keyLen) $ \keyPtr -> do poke (castPtr keyPtr) nullPtr putByteString (keyPtr `plusPtr` intSize) keyE keyLen return keyPtr where keyLen = fromIntegral $ Lazy.length keyE
256
false
true
3
11
72
125
57
68
null
null
anand-singh/either
src/Data/Either/Validation.hs
bsd-3-clause
validationToEither :: Validation e a -> Either e a validationToEither x = case x of Failure e -> Left e Success a -> Right a
129
validationToEither :: Validation e a -> Either e a validationToEither x = case x of Failure e -> Left e Success a -> Right a
129
validationToEither x = case x of Failure e -> Left e Success a -> Right a
78
false
true
0
8
29
56
25
31
null
null
ptitfred/ftv-vods
src/YouTube/Services.hs
bsd-3-clause
listVideos :: [YouTubeId] -> Client Videos listVideos vIds = toList <$> listVideosBatch (take 50 vIds) (drop 50 vIds)
117
listVideos :: [YouTubeId] -> Client Videos listVideos vIds = toList <$> listVideosBatch (take 50 vIds) (drop 50 vIds)
117
listVideos vIds = toList <$> listVideosBatch (take 50 vIds) (drop 50 vIds)
74
false
true
0
8
17
49
24
25
null
null
holzensp/ghc
compiler/cmm/CLabel.hs
bsd-3-clause
labelType (SRTLabel _) = DataLabel
59
labelType (SRTLabel _) = DataLabel
59
labelType (SRTLabel _) = DataLabel
59
false
false
0
6
29
16
7
9
null
null
WraithM/CoreCompiler
src/Core/LambdaLift.hs
bsd-3-clause
collectCsE (Constr i j) = ([], Constr i j)
42
collectCsE (Constr i j) = ([], Constr i j)
42
collectCsE (Constr i j) = ([], Constr i j)
42
false
false
0
7
8
30
15
15
null
null
thalerjonathan/phd
coding/learning/haskell/grahambook/Code_Solutions/tictactoe.hs
gpl-3.0
won :: Grid -> Bool won g = wins O g || wins X g
48
won :: Grid -> Bool won g = wins O g || wins X g
48
won g = wins O g || wins X g
28
false
true
0
6
14
35
16
19
null
null
romanb/amazonka
amazonka-redshift/gen/Network/AWS/Redshift/DescribeClusterSnapshots.hs
mpl-2.0
-- | The type of snapshots for which you are requesting information. By default, -- snapshots of all types are returned. -- -- Valid Values: 'automated' | 'manual' dcs1SnapshotType :: Lens' DescribeClusterSnapshots (Maybe Text) dcs1SnapshotType = lens _dcs1SnapshotType (\s a -> s { _dcs1SnapshotType = a })
307
dcs1SnapshotType :: Lens' DescribeClusterSnapshots (Maybe Text) dcs1SnapshotType = lens _dcs1SnapshotType (\s a -> s { _dcs1SnapshotType = a })
143
dcs1SnapshotType = lens _dcs1SnapshotType (\s a -> s { _dcs1SnapshotType = a })
79
true
true
0
9
46
49
28
21
null
null
lucasdicioccio/haskell-paris-src
app.hs
apache-2.0
deleteAndRedirect db path model oId = do act <- liftIO $ db $ delete' model oId liftIO $ print act either (\_ -> raise "could not delete from DB") (\_ -> redirect path) act -- Like MongoDB's insert but on a BDoc instance
259
deleteAndRedirect db path model oId = do act <- liftIO $ db $ delete' model oId liftIO $ print act either (\_ -> raise "could not delete from DB") (\_ -> redirect path) act -- Like MongoDB's insert but on a BDoc instance
259
deleteAndRedirect db path model oId = do act <- liftIO $ db $ delete' model oId liftIO $ print act either (\_ -> raise "could not delete from DB") (\_ -> redirect path) act -- Like MongoDB's insert but on a BDoc instance
259
false
false
0
10
81
78
37
41
null
null
iblumenfeld/saw-core
src/Verifier/SAW/Typechecker/Context.hs
bsd-3-clause
ppTCTermGen d _ (TCLocalDef i) | 0 <= i && i < length d = d !! i | otherwise = text $ "Bad local var index " ++ show i
149
ppTCTermGen d _ (TCLocalDef i) | 0 <= i && i < length d = d !! i | otherwise = text $ "Bad local var index " ++ show i
149
ppTCTermGen d _ (TCLocalDef i) | 0 <= i && i < length d = d !! i | otherwise = text $ "Bad local var index " ++ show i
149
false
false
0
10
61
64
29
35
null
null
phylake/avm3
vm/store.hs
mit
xform_opCode {- 0x5F -} i u d s m t (Abc.FindDef) = [FindDef]
61
xform_opCode {- 0x5F -} i u d s m t (Abc.FindDef) = [FindDef]
61
xform_opCode {- 0x5F -} i u d s m t (Abc.FindDef) = [FindDef]
61
false
false
0
7
12
30
16
14
null
null
yiannist/ganeti
src/Ganeti/Constants.hs
bsd-2-clause
sshsSshHostKey :: String sshsSshHostKey = "ssh_host_key"
56
sshsSshHostKey :: String sshsSshHostKey = "ssh_host_key"
56
sshsSshHostKey = "ssh_host_key"
31
false
true
0
4
5
11
6
5
null
null
saturday06/FrameworkBenchmarks
frameworks/Haskell/yesod/yesod-postgres/src/Main.hs
bsd-3-clause
getFortunesR :: Handler Html getFortunesR = do fortunesFromDb <- runPg $ selectList [] [] let fortunes = sortBy (compare `on` fortuneMessage . entityVal) $ (Entity (toSqlKey 0) Fortune{fortuneMessage="Additional fortune added at request time."}):fortunesFromDb defaultLayout $ do setTitle "Fortunes" [whamlet| <table> <tr> <th>id <th>message $forall fortune <- fortunes <tr> <td>#{entityKey fortune} <td>#{fortuneMessage $ entityVal fortune} |]
622
getFortunesR :: Handler Html getFortunesR = do fortunesFromDb <- runPg $ selectList [] [] let fortunes = sortBy (compare `on` fortuneMessage . entityVal) $ (Entity (toSqlKey 0) Fortune{fortuneMessage="Additional fortune added at request time."}):fortunesFromDb defaultLayout $ do setTitle "Fortunes" [whamlet| <table> <tr> <th>id <th>message $forall fortune <- fortunes <tr> <td>#{entityKey fortune} <td>#{fortuneMessage $ entityVal fortune} |]
622
getFortunesR = do fortunesFromDb <- runPg $ selectList [] [] let fortunes = sortBy (compare `on` fortuneMessage . entityVal) $ (Entity (toSqlKey 0) Fortune{fortuneMessage="Additional fortune added at request time."}):fortunesFromDb defaultLayout $ do setTitle "Fortunes" [whamlet| <table> <tr> <th>id <th>message $forall fortune <- fortunes <tr> <td>#{entityKey fortune} <td>#{fortuneMessage $ entityVal fortune} |]
593
false
true
0
15
232
115
56
59
null
null
CindyLinz/Haskell-HVG
src/HVG/ContextState.hs
bsd-3-clause
setTextAlign :: TextAlign -> Builder info ContextState draw () setTextAlign val = modifyStructState $ \ctx -> ctx{ctxTextAlign = val}
133
setTextAlign :: TextAlign -> Builder info ContextState draw () setTextAlign val = modifyStructState $ \ctx -> ctx{ctxTextAlign = val}
133
setTextAlign val = modifyStructState $ \ctx -> ctx{ctxTextAlign = val}
70
false
true
0
8
18
46
24
22
null
null
mainland/nikola
src/Data/Vector/CUDA/UnboxedForeign.hs
bsd-3-clause
map = G.map
11
map = G.map
11
map = G.map
11
false
false
1
6
2
12
4
8
null
null
Ming-Tang/FP15
tests/ArbitraryTokens.hs
mit
a <:- b = a <:> pure b
22
a <:- b = a <:> pure b
22
a <:- b = a <:> pure b
22
false
false
2
6
7
21
9
12
null
null
eijian/deeplearning
CNN/ActLayer.hs
bsd-3-clause
{- | deactivate IN : activation function image difference from previous layer OUT: difference and updated layer >>> fst $ deactivate relu [fromLists [[1.5,(-2.0)]]] [fromLists [[0.5,0.1]]] [(1><2) [ 0.5, 0.0 ]] -} m0 :: Matrix R m0 = fromLists [[0.0]]
275
m0 :: Matrix R m0 = fromLists [[0.0]]
37
m0 = fromLists [[0.0]]
22
true
true
0
6
63
29
14
15
null
null
kmilner/tamarin-prover
lib/theory/src/Theory/Tools/Wellformedness.hs
gpl-3.0
-- | Lower-case a string. lowerCase :: String -> String lowerCase = map toLower
79
lowerCase :: String -> String lowerCase = map toLower
53
lowerCase = map toLower
23
true
true
0
5
13
19
10
9
null
null
grandpascorpion/canon
Math/NumberTheory/Canon/AurifCyclo.hs
gpl-3.0
aurCandDec :: Integer -> Integer -> Bool -> Maybe (Integer, Integer) aurCandDec xi yi b = f (fst $ crFromI xi) (fst $ crFromI yi) where f xp yp = aurCandDecI x y n (fst $ crFromI n) b where n = gcd (crMaxRoot $ crAbs x) (crMaxRoot $ crAbs y) gxy = crGCD xp yp (x, y) = (crDivStrict xp gxy, crDivStrict yp gxy) -- this will fix the input to be relatively prime -- toDo: incorporate the factorization status when determining the correct gcd. -- Don't call this I(nternal) function directly. The function assumes that x and y are relatively prime. Currently uses Brent logic only.
736
aurCandDec :: Integer -> Integer -> Bool -> Maybe (Integer, Integer) aurCandDec xi yi b = f (fst $ crFromI xi) (fst $ crFromI yi) where f xp yp = aurCandDecI x y n (fst $ crFromI n) b where n = gcd (crMaxRoot $ crAbs x) (crMaxRoot $ crAbs y) gxy = crGCD xp yp (x, y) = (crDivStrict xp gxy, crDivStrict yp gxy) -- this will fix the input to be relatively prime -- toDo: incorporate the factorization status when determining the correct gcd. -- Don't call this I(nternal) function directly. The function assumes that x and y are relatively prime. Currently uses Brent logic only.
736
aurCandDec xi yi b = f (fst $ crFromI xi) (fst $ crFromI yi) where f xp yp = aurCandDecI x y n (fst $ crFromI n) b where n = gcd (crMaxRoot $ crAbs x) (crMaxRoot $ crAbs y) gxy = crGCD xp yp (x, y) = (crDivStrict xp gxy, crDivStrict yp gxy) -- this will fix the input to be relatively prime -- toDo: incorporate the factorization status when determining the correct gcd. -- Don't call this I(nternal) function directly. The function assumes that x and y are relatively prime. Currently uses Brent logic only.
667
false
true
0
10
266
176
88
88
null
null
DavidAlphaFox/sblog
src/App/Types.hs
bsd-3-clause
setTplLayout :: FilePath -> Response () setTplLayout layout = do lift $ modify $ (\s -> s {tplLayout = layout} )
115
setTplLayout :: FilePath -> Response () setTplLayout layout = do lift $ modify $ (\s -> s {tplLayout = layout} )
115
setTplLayout layout = do lift $ modify $ (\s -> s {tplLayout = layout} )
75
false
true
0
11
23
50
26
24
null
null
diku-dk/futhark
src/Futhark/CodeGen/Backends/MulticoreWASM.hs
isc
fRepMyRep :: Imp.Definitions Imp.Multicore -> [JSEntryPoint] fRepMyRep prog = let Imp.Functions fs = Imp.defFuns prog function (Imp.Function entry _ _ _ res args) = do n <- entry Just $ JSEntryPoint { name = nameToString n, parameters = map (extToString . snd) args, ret = map extToString res } in mapMaybe (function . snd) fs
415
fRepMyRep :: Imp.Definitions Imp.Multicore -> [JSEntryPoint] fRepMyRep prog = let Imp.Functions fs = Imp.defFuns prog function (Imp.Function entry _ _ _ res args) = do n <- entry Just $ JSEntryPoint { name = nameToString n, parameters = map (extToString . snd) args, ret = map extToString res } in mapMaybe (function . snd) fs
415
fRepMyRep prog = let Imp.Functions fs = Imp.defFuns prog function (Imp.Function entry _ _ _ res args) = do n <- entry Just $ JSEntryPoint { name = nameToString n, parameters = map (extToString . snd) args, ret = map extToString res } in mapMaybe (function . snd) fs
354
false
true
0
16
143
149
71
78
null
null
stumped2/school
CS381/hw1/Hw1.hs
apache-2.0
ppBread :: Bread -> String ppBread Rye = "rye"
46
ppBread :: Bread -> String ppBread Rye = "rye"
46
ppBread Rye = "rye"
19
false
true
0
7
8
24
10
14
null
null
onponomarev/ganeti
src/Ganeti/Constants.hs
bsd-2-clause
osScriptExport :: String osScriptExport = "export"
50
osScriptExport :: String osScriptExport = "export"
50
osScriptExport = "export"
25
false
true
0
6
5
18
7
11
null
null
piyush-kurur/yesod
yesod/main.hs
mit
main :: IO () main = do o <- execParser =<< injectDefaults "yesod" [ ("yesod.devel.extracabalarg" , \o args -> o { optCommand = case optCommand o of d@Devel{} -> d { develExtraArgs = args } c -> c }) , ("yesod.devel.ignore" , \o args -> o { optCommand = case optCommand o of d@Devel{} -> d { develIgnore = args } c -> c }) , ("yesod.build.extracabalarg" , \o args -> o { optCommand = case optCommand o of b@Build{} -> b { buildExtraArgs = args } c -> c }) ] optParser' let cabal xs = rawSystem' (cabalCommand o) xs case optCommand o of Init -> scaffold Configure -> cabal ["configure"] Build es -> touch' >> cabal ("build":es) Touch -> touch' Devel da s f r b _ig es -> devel (DevelOpts (optCabalPgm o == CabalDev) da (optVerbose o) r s f b) es Keter noRebuild -> keter (cabalCommand o) noRebuild Version -> do putStrLn ("yesod-core version:" ++ yesodVersion) putStrLn ("yesod version:" ++ showVersion Paths_yesod.version) AddHandler -> addHandler Test -> do touch' cabal ["configure", "--enable-tests", "-flibrary-only"] cabal ["build"] cabal ["test"]
2,142
main :: IO () main = do o <- execParser =<< injectDefaults "yesod" [ ("yesod.devel.extracabalarg" , \o args -> o { optCommand = case optCommand o of d@Devel{} -> d { develExtraArgs = args } c -> c }) , ("yesod.devel.ignore" , \o args -> o { optCommand = case optCommand o of d@Devel{} -> d { develIgnore = args } c -> c }) , ("yesod.build.extracabalarg" , \o args -> o { optCommand = case optCommand o of b@Build{} -> b { buildExtraArgs = args } c -> c }) ] optParser' let cabal xs = rawSystem' (cabalCommand o) xs case optCommand o of Init -> scaffold Configure -> cabal ["configure"] Build es -> touch' >> cabal ("build":es) Touch -> touch' Devel da s f r b _ig es -> devel (DevelOpts (optCabalPgm o == CabalDev) da (optVerbose o) r s f b) es Keter noRebuild -> keter (cabalCommand o) noRebuild Version -> do putStrLn ("yesod-core version:" ++ yesodVersion) putStrLn ("yesod version:" ++ showVersion Paths_yesod.version) AddHandler -> addHandler Test -> do touch' cabal ["configure", "--enable-tests", "-flibrary-only"] cabal ["build"] cabal ["test"]
2,142
main = do o <- execParser =<< injectDefaults "yesod" [ ("yesod.devel.extracabalarg" , \o args -> o { optCommand = case optCommand o of d@Devel{} -> d { develExtraArgs = args } c -> c }) , ("yesod.devel.ignore" , \o args -> o { optCommand = case optCommand o of d@Devel{} -> d { develIgnore = args } c -> c }) , ("yesod.build.extracabalarg" , \o args -> o { optCommand = case optCommand o of b@Build{} -> b { buildExtraArgs = args } c -> c }) ] optParser' let cabal xs = rawSystem' (cabalCommand o) xs case optCommand o of Init -> scaffold Configure -> cabal ["configure"] Build es -> touch' >> cabal ("build":es) Touch -> touch' Devel da s f r b _ig es -> devel (DevelOpts (optCabalPgm o == CabalDev) da (optVerbose o) r s f b) es Keter noRebuild -> keter (cabalCommand o) noRebuild Version -> do putStrLn ("yesod-core version:" ++ yesodVersion) putStrLn ("yesod version:" ++ showVersion Paths_yesod.version) AddHandler -> addHandler Test -> do touch' cabal ["configure", "--enable-tests", "-flibrary-only"] cabal ["build"] cabal ["test"]
2,128
false
true
0
20
1,270
460
231
229
null
null
bergmark/mmdoc
src/Types.hs
bsd-3-clause
protectImport :: Protection -> Import -> Import protectImport p (Import _ a b c) = Import (Just p) a b c
104
protectImport :: Protection -> Import -> Import protectImport p (Import _ a b c) = Import (Just p) a b c
104
protectImport p (Import _ a b c) = Import (Just p) a b c
56
false
true
0
7
20
56
26
30
null
null
nushio3/ghc
compiler/stranal/DmdAnal.hs
bsd-3-clause
updSigEnv :: AnalEnv -> SigEnv -> AnalEnv updSigEnv env sigs = env { ae_sigs = sigs }
85
updSigEnv :: AnalEnv -> SigEnv -> AnalEnv updSigEnv env sigs = env { ae_sigs = sigs }
85
updSigEnv env sigs = env { ae_sigs = sigs }
43
false
true
0
8
16
38
18
20
null
null
MarcusVoelker/MuCurse
Function.hs
mit
resolveFunction defs (MRek g) = MRek (resolveFunction defs g)
61
resolveFunction defs (MRek g) = MRek (resolveFunction defs g)
61
resolveFunction defs (MRek g) = MRek (resolveFunction defs g)
61
false
false
0
7
8
28
13
15
null
null
HJvT/GeBoP
Game.hs
bsd-3-clause
step (i:is) t | filled t = let u = step is (children t ! i) in Game.update i $ t {children = children t // [(i, u)]} | otherwise = grow t
160
step (i:is) t | filled t = let u = step is (children t ! i) in Game.update i $ t {children = children t // [(i, u)]} | otherwise = grow t
160
step (i:is) t | filled t = let u = step is (children t ! i) in Game.update i $ t {children = children t // [(i, u)]} | otherwise = grow t
160
false
false
0
13
55
104
49
55
null
null
psfblair/freepalace
src/FreePalace/Handlers/Incoming.hs
apache-2.0
handleInboundEvent clientState (InboundMessages.UserListMessage userListing) = handleUserList clientState userListing
117
handleInboundEvent clientState (InboundMessages.UserListMessage userListing) = handleUserList clientState userListing
117
handleInboundEvent clientState (InboundMessages.UserListMessage userListing) = handleUserList clientState userListing
117
false
false
0
8
7
24
11
13
null
null
jaapweel/piffle
src/Syntax/PPrint.hs
gpl-2.0
isEmpty _ = False
21
isEmpty _ = False
21
isEmpty _ = False
21
false
false
0
5
7
9
4
5
null
null
keithodulaigh/Hets
Comorphisms/CASL2TopSort.hs
gpl-2.0
genDisjunction :: [VAR_DECL] -> Set.Set [Maybe PRED_NAME] -> Maybe (FORMULA f) genDisjunction vars spn | Set.size spn == 1 = case disjs of [] -> Nothing [x] -> Just x _ -> error "CASL2TopSort.genDisjunction: this cannot happen" | null disjs = Nothing | otherwise = Just (disjunct disjs) where disjs = foldl genConjunction [] (Set.toList spn) genConjunction acc pns | null conjs = acc | otherwise = conjunct (reverse conjs) : acc where conjs = foldl genPred [] (zip vars pns) genPred acc (v, mpn) = maybe acc (\ pn -> genPredication pn [v] : acc) mpn {- | Each membership test of a subsort is transformed to a predication of the corresponding unary predicate. Variables quantified over a subsort yield a premise to the quantified formula that the corresponding predicate holds. All typings are adjusted according to the subsortmap and sort generation constraints are translated to disjointness axioms. -}
1,042
genDisjunction :: [VAR_DECL] -> Set.Set [Maybe PRED_NAME] -> Maybe (FORMULA f) genDisjunction vars spn | Set.size spn == 1 = case disjs of [] -> Nothing [x] -> Just x _ -> error "CASL2TopSort.genDisjunction: this cannot happen" | null disjs = Nothing | otherwise = Just (disjunct disjs) where disjs = foldl genConjunction [] (Set.toList spn) genConjunction acc pns | null conjs = acc | otherwise = conjunct (reverse conjs) : acc where conjs = foldl genPred [] (zip vars pns) genPred acc (v, mpn) = maybe acc (\ pn -> genPredication pn [v] : acc) mpn {- | Each membership test of a subsort is transformed to a predication of the corresponding unary predicate. Variables quantified over a subsort yield a premise to the quantified formula that the corresponding predicate holds. All typings are adjusted according to the subsortmap and sort generation constraints are translated to disjointness axioms. -}
1,042
genDisjunction vars spn | Set.size spn == 1 = case disjs of [] -> Nothing [x] -> Just x _ -> error "CASL2TopSort.genDisjunction: this cannot happen" | null disjs = Nothing | otherwise = Just (disjunct disjs) where disjs = foldl genConjunction [] (Set.toList spn) genConjunction acc pns | null conjs = acc | otherwise = conjunct (reverse conjs) : acc where conjs = foldl genPred [] (zip vars pns) genPred acc (v, mpn) = maybe acc (\ pn -> genPredication pn [v] : acc) mpn {- | Each membership test of a subsort is transformed to a predication of the corresponding unary predicate. Variables quantified over a subsort yield a premise to the quantified formula that the corresponding predicate holds. All typings are adjusted according to the subsortmap and sort generation constraints are translated to disjointness axioms. -}
963
false
true
0
12
288
258
124
134
null
null
amutake/imp
src/Main.hs
bsd-3-clause
help :: IO () help = putStr $ unlines [ "Usage: imp [COMMAND or FILEPATH]" , "" , "Commands:" , " help show this help text" , " repl start REPL" ]
182
help :: IO () help = putStr $ unlines [ "Usage: imp [COMMAND or FILEPATH]" , "" , "Commands:" , " help show this help text" , " repl start REPL" ]
182
help = putStr $ unlines [ "Usage: imp [COMMAND or FILEPATH]" , "" , "Commands:" , " help show this help text" , " repl start REPL" ]
168
false
true
0
6
67
38
21
17
null
null
diku-kmc/repg
src/KMC/Kleenex/Parser.hs
mit
lexeme :: Parser a -> Parser a lexeme p = p <* whiteSpace
57
lexeme :: Parser a -> Parser a lexeme p = p <* whiteSpace
57
lexeme p = p <* whiteSpace
26
false
true
2
7
12
35
15
20
null
null
gridaphobe/ghc
compiler/hsSyn/HsExpr.hs
bsd-3-clause
matchContextErrString IfAlt = text "multi-way if"
64
matchContextErrString IfAlt = text "multi-way if"
64
matchContextErrString IfAlt = text "multi-way if"
64
false
false
0
5
20
12
5
7
null
null
deontologician/orbRPG
Game/OrbRPG/Combinations.hs
gpl-3.0
P Green @>> E Deuterium = L Lambda
34
P Green @>> E Deuterium = L Lambda
34
P Green @>> E Deuterium = L Lambda
34
false
false
0
6
7
21
8
13
null
null
sbditto85/parsedsltest
test/Generators.hs
apache-2.0
genJsonValueToParsed :: JsonValue -> JsonValue genJsonValueToParsed (JsonValue strConcat) = JsonValue $ genStrConcatToParsed strConcat
134
genJsonValueToParsed :: JsonValue -> JsonValue genJsonValueToParsed (JsonValue strConcat) = JsonValue $ genStrConcatToParsed strConcat
134
genJsonValueToParsed (JsonValue strConcat) = JsonValue $ genStrConcatToParsed strConcat
87
false
true
0
7
12
31
15
16
null
null
mdsteele/fallback
src/Fallback/State/Item.hs
gpl-3.0
weaponName NastyKnife = "Nasty Knife"
37
weaponName NastyKnife = "Nasty Knife"
37
weaponName NastyKnife = "Nasty Knife"
37
false
false
1
5
4
13
4
9
null
null
ComputationWithBoundedResources/ara-inference
doc/tpdb_trs/Haskell/basic_haskell/toEnum_3.hs
mit
primEqInt (Pos Zero) (Neg Zero) = MyTrue
40
primEqInt (Pos Zero) (Neg Zero) = MyTrue
40
primEqInt (Pos Zero) (Neg Zero) = MyTrue
40
false
false
0
7
6
23
11
12
null
null
shotofi/homepage
hs/homepage.hs
apache-2.0
fiTwoColumnLeft :: Context String -> Compiler (Item String) fiTwoColumnLeft = applyTemplates "templates/two-column.html" fiTemplate
131
fiTwoColumnLeft :: Context String -> Compiler (Item String) fiTwoColumnLeft = applyTemplates "templates/two-column.html" fiTemplate
131
fiTwoColumnLeft = applyTemplates "templates/two-column.html" fiTemplate
71
false
true
0
9
12
37
16
21
null
null
miniBill/entangle
src/exe/Examples.hs
mit
groverNaive :: (Qubit, Qubit, Qubit) -> Circ (Bit, Bit) groverNaive (q1,q2,q3) = do hadamard_at q1 hadamard_at q2 hadamard_at q3 --gate_X_at q2 qnot_at q3 `controlled` [q1, q2] --gate_X_at q2 hadamard_at q1 hadamard_at q2 gate_X_at q1 gate_X_at q2 hadamard_at q2 qnot_at q2 `controlled` q1 hadamard_at q2 gate_X_at q1 gate_X_at q2 hadamard_at q1 hadamard_at q2 hadamard_at q3 measure (q1,q2)
467
groverNaive :: (Qubit, Qubit, Qubit) -> Circ (Bit, Bit) groverNaive (q1,q2,q3) = do hadamard_at q1 hadamard_at q2 hadamard_at q3 --gate_X_at q2 qnot_at q3 `controlled` [q1, q2] --gate_X_at q2 hadamard_at q1 hadamard_at q2 gate_X_at q1 gate_X_at q2 hadamard_at q2 qnot_at q2 `controlled` q1 hadamard_at q2 gate_X_at q1 gate_X_at q2 hadamard_at q1 hadamard_at q2 hadamard_at q3 measure (q1,q2)
467
groverNaive (q1,q2,q3) = do hadamard_at q1 hadamard_at q2 hadamard_at q3 --gate_X_at q2 qnot_at q3 `controlled` [q1, q2] --gate_X_at q2 hadamard_at q1 hadamard_at q2 gate_X_at q1 gate_X_at q2 hadamard_at q2 qnot_at q2 `controlled` q1 hadamard_at q2 gate_X_at q1 gate_X_at q2 hadamard_at q1 hadamard_at q2 hadamard_at q3 measure (q1,q2)
411
false
true
0
8
131
173
78
95
null
null
AlexeyRaga/eta
compiler/ETA/TypeCheck/TcForeign.hs
bsd-3-clause
checkForeignRes :: Bool -> Bool -> (Type -> Validity) -> Type -> TcM () checkForeignRes nonIOResultOk checkSafe predResType ty | Just (_, resType) <- tcSplitIOType_maybe ty = do traceTc "checkForeignRes[IO]" $ ppr resType check (predResType resType) (illegalForeignTyErr result) | Just (_, tagType, resType) <- tcSplitJavaType_maybe ty = do traceTc "checkForeignRes[Java]" $ ppr tagType <+> ppr resType check (predResType resType) (illegalForeignTyErr result) -- Case for non-IO result type with FFI Import | not nonIOResultOk = addErrTc . illegalForeignTyErr result $ str "IO result type expected" | otherwise = do traceTc "checkForeignRes[Other]" $ ppr ty dflags <- getDynFlags case predResType ty of -- Handle normal typecheck fail, we want to handle this first and -- only report safe haskell errors if the normal type check is OK. NotValid msg -> addErrTc $ illegalForeignTyErr result msg -- handle safe infer fail _ | checkSafe && safeInferOn dflags -> recordUnsafeInfer -- handle safe language typecheck fail _ | checkSafe && safeLanguageOn dflags -> addErrTc $ illegalForeignTyErr result safeHsErr -- sucess! non-IO return is fine _ -> return () where safeHsErr = str $ "Safe Haskell is on, all FFI imports must be in the" ++ " IO monad"
1,454
checkForeignRes :: Bool -> Bool -> (Type -> Validity) -> Type -> TcM () checkForeignRes nonIOResultOk checkSafe predResType ty | Just (_, resType) <- tcSplitIOType_maybe ty = do traceTc "checkForeignRes[IO]" $ ppr resType check (predResType resType) (illegalForeignTyErr result) | Just (_, tagType, resType) <- tcSplitJavaType_maybe ty = do traceTc "checkForeignRes[Java]" $ ppr tagType <+> ppr resType check (predResType resType) (illegalForeignTyErr result) -- Case for non-IO result type with FFI Import | not nonIOResultOk = addErrTc . illegalForeignTyErr result $ str "IO result type expected" | otherwise = do traceTc "checkForeignRes[Other]" $ ppr ty dflags <- getDynFlags case predResType ty of -- Handle normal typecheck fail, we want to handle this first and -- only report safe haskell errors if the normal type check is OK. NotValid msg -> addErrTc $ illegalForeignTyErr result msg -- handle safe infer fail _ | checkSafe && safeInferOn dflags -> recordUnsafeInfer -- handle safe language typecheck fail _ | checkSafe && safeLanguageOn dflags -> addErrTc $ illegalForeignTyErr result safeHsErr -- sucess! non-IO return is fine _ -> return () where safeHsErr = str $ "Safe Haskell is on, all FFI imports must be in the" ++ " IO monad"
1,454
checkForeignRes nonIOResultOk checkSafe predResType ty | Just (_, resType) <- tcSplitIOType_maybe ty = do traceTc "checkForeignRes[IO]" $ ppr resType check (predResType resType) (illegalForeignTyErr result) | Just (_, tagType, resType) <- tcSplitJavaType_maybe ty = do traceTc "checkForeignRes[Java]" $ ppr tagType <+> ppr resType check (predResType resType) (illegalForeignTyErr result) -- Case for non-IO result type with FFI Import | not nonIOResultOk = addErrTc . illegalForeignTyErr result $ str "IO result type expected" | otherwise = do traceTc "checkForeignRes[Other]" $ ppr ty dflags <- getDynFlags case predResType ty of -- Handle normal typecheck fail, we want to handle this first and -- only report safe haskell errors if the normal type check is OK. NotValid msg -> addErrTc $ illegalForeignTyErr result msg -- handle safe infer fail _ | checkSafe && safeInferOn dflags -> recordUnsafeInfer -- handle safe language typecheck fail _ | checkSafe && safeLanguageOn dflags -> addErrTc $ illegalForeignTyErr result safeHsErr -- sucess! non-IO return is fine _ -> return () where safeHsErr = str $ "Safe Haskell is on, all FFI imports must be in the" ++ " IO monad"
1,382
false
true
2
14
402
335
154
181
null
null
keera-studios/hsQt
Qtc/Gui/QStyleOptionToolBar.hs
bsd-2-clause
setToolBarArea :: QStyleOptionToolBar a -> ((ToolBarArea)) -> IO () setToolBarArea x0 (x1) = withObjectPtr x0 $ \cobj_x0 -> qtc_QStyleOptionToolBar_setToolBarArea cobj_x0 (toCLong $ qEnum_toInt x1)
203
setToolBarArea :: QStyleOptionToolBar a -> ((ToolBarArea)) -> IO () setToolBarArea x0 (x1) = withObjectPtr x0 $ \cobj_x0 -> qtc_QStyleOptionToolBar_setToolBarArea cobj_x0 (toCLong $ qEnum_toInt x1)
203
setToolBarArea x0 (x1) = withObjectPtr x0 $ \cobj_x0 -> qtc_QStyleOptionToolBar_setToolBarArea cobj_x0 (toCLong $ qEnum_toInt x1)
135
false
true
0
10
29
68
34
34
null
null
lambdageek/insomnia
tests/TestToAST.hs
bsd-3-clause
units :: TestTree units = testGroup "ToAST" [ infixParsingUnits ]
69
units :: TestTree units = testGroup "ToAST" [ infixParsingUnits ]
69
units = testGroup "ToAST" [ infixParsingUnits ]
51
false
true
0
6
13
19
10
9
null
null
imeckler/mote
Mote.hs
bsd-3-clause
respond' :: Ref MoteState -> FromClient -> M ToClient respond' stRef = \case Load p -> do loadFile stRef p liftIO . forkIO $ do fmap fileData (gReadRef stRef) >>= \case Nothing -> return () Just (FileData {holesInfo=_}) -> return () -- no idea if this is kosher -- void . runGhc (Just libdir) $ runErrorT (mapM_ (getAndMemoizeSuggestions stRef) (M.elems holesInfo)) return Ok NextHole (ClientState {path, cursorPos=(line,col)}) -> getHoles stRef >>= \holes -> let mh = case dropWhile ((currPosLoc >=) . srcSpanStart) holes of [] -> case holes of {[] -> Nothing; (h:_) -> Just h } (h:_) -> Just h in maybe (throwError NoHole) (return . SetCursor . srcLocPos . srcSpanStart) mh where currPosLoc = mkSrcLoc (fsLit path) line col -- inefficient PrevHole (ClientState {path, cursorPos=(line, col)}) -> getHoles stRef >>= \holes -> let mxs = case takeWhile (< currPosSpan) holes of [] -> case holes of {[] -> Nothing; _ -> Just holes} xs -> Just xs in maybe (throwError NoHole) (return . SetCursor . srcLocPos . srcSpanStart . last) mxs where currPosSpan = srcLocSpan (mkSrcLoc (fsLit path) line col) EnterHole (ClientState {..}) -> do FileData {path=p} <- getFileDataErr stRef when (p /= path) (loadFile stRef path) mh <- getEnclosingHole stRef cursorPos gModifyRef stRef (\st -> st { currentHole = mh }) return $ case mh of Nothing -> SetInfoWindow "No Hole found" Just _ -> Ok GetHoleInfo (ClientState {..}) (HoleInfoOptions{..}) -> do ahi@(AugmentedHoleInfo {holeInfo=hi}) <- getCurrentHoleErr stRef fs <- lift getSessionDynFlags let env = map (\(id,t) -> (occNameToString (getOccName id), showType fs t)) (holeEnv hi) case sendOutputAsData of True -> do suggsJSON <- if withSuggestions then mkSuggsJSON <$> Mote.Suggest.getAndMemoizeSuggestions stRef ahi else return [] return $ HoleInfoJSON . Aeson.object $ [ "environment" .= map (\(x, t) -> Aeson.object ["name" .= x, "type" .= t]) env , "goal" .= Aeson.object ["name" .= holeNameString hi, "type" .= showType fs (holeType hi) ] ] ++ suggsJSON where mkSuggJSON (n, t) = Aeson.object ["name" .= occNameToString (occName n), "type" .= showType fs t] mkSuggsJSON suggs = [ "suggestions" .= map mkSuggJSON suggs ] False -> do suggsStr <- if withSuggestions then mkSuggsStr <$> Mote.Suggest.getAndMemoizeSuggestions stRef ahi else return "" let goalStr = "Goal: " ++ holeNameString hi ++ " :: " ++ showType fs (holeType hi) ++ "\n" ++ replicate 40 '-' envStr = -- TODO: Wow, this is total for the strangest reason. If env -- is empty then maxIdentLength never gets used to pad so -- maximum doesn't fail. let maxIdentLength = maximum $ map (\(x,_) -> length x) env in unlines $ map (\(x, t) -> take maxIdentLength (x ++ repeat ' ') ++ " :: " ++ t) env return . SetInfoWindow $ unlines [goalStr, envStr, "", suggsStr] where mkSuggsStr suggs = let heading = "Suggestions:\n" ++ replicate 40 '-' in unlines (heading : map (\(n, t) -> (occNameToString . occName) n ++ " :: " ++ showType fs t) suggs) Refine exprStr (ClientState {..}) -> do hi <- getCurrentHoleErr stRef expr' <- refine stRef exprStr fs <- lift getSessionDynFlags unqual <- lift getPrintUnqual return $ Replace (toSpan . holeSpan $ holeInfo hi) path (showSDocForUser fs unqual (ppr expr')) SendStop -> return Stop -- Precondition here: Hole has already been entered CaseFurther var ClientState {} -> do MoteState {..} <- gReadRef stRef FileData {path, hsModule} <- getFileDataErr stRef hi@(HoleInfo {holeEnv}) <- holeInfo <$> getCurrentHoleErr stRef (id, ty) <- maybeThrow (NoVariable var) $ List.find (\(id,_) -> var == occNameToString (getOccName id)) holeEnv expansions stRef (occNameToString (getOccName id)) ty (holeSpan hi) hsModule >>= \case Nothing -> return (Error "Variable not found") Just ((L sp _mg, mi), matches) -> do fs <- lift getSessionDynFlags unqual <- lift getPrintUnqual let span = toSpan sp indentLevel = subtract 1 . snd . fst $ span indentTail [] = error "indentTail got []" indentTail (s:ss) = s : map (replicate indentLevel ' ' ++) ss showMatch :: HsMatchContext RdrName -> Match RdrName (LHsExpr RdrName) -> String showMatch ctx = showSDocForUser fs unqual . pprMatch ctx return $ case mi of Equation (L _l name) -> Replace (toSpan sp) path . unlines . indentTail $ map (showMatch (FunRhs name False)) matches CaseBranch -> -- TODO shouldn't always unlines. sometimes should be ; and {} Replace (toSpan sp) path . unlines . indentTail $ map (showMatch CaseAlt) matches SingleLambda _loc -> Error "SingleLambda case expansion not yet implemented" CaseOn exprStr (ClientState {..}) -> do expr <- parseExpr exprStr -- Should actually have general mechanism for getting the scope at -- a point... FileData {..} <- getFileDataErr stRef ty <- getEnclosingHole stRef cursorPos >>= \case Nothing -> hsExprType expr Just hi -> inHoleEnv typecheckedModule (holeInfo hi) $ tcRnExprTc expr let (line, col) = cursorPos ms <- matchesForTypeAt stRef ty (mkSrcLoc (fsLit "") line col) indentLevel <- liftIO $ LB8.length . LB8.takeWhile (== ' ') . (!! (line - 1)) . LB8.lines <$> LB8.readFile path fs <- lift getSessionDynFlags unqual <- lift getPrintUnqual let indent n = (replicate n ' ' ++) showMatch = showSDocForUser fs unqual . pprMatch (CaseAlt :: HsMatchContext RdrName) return . Insert cursorPos path . unlines $ ("case " ++ exprStr ++ " of") : map (indent (2 + fromIntegral indentLevel) . showMatch) ms -- every message should really send current file name (ClientState) and -- check if it matches the currently loaded file GetType e -> do fs <- lift getSessionDynFlags x <- exprType e unqual <- lift getPrintUnqual return . SetInfoWindow . showSDocForUser fs unqual $ ppr x Search {} -> return Ok -- TODO
6,719
respond' :: Ref MoteState -> FromClient -> M ToClient respond' stRef = \case Load p -> do loadFile stRef p liftIO . forkIO $ do fmap fileData (gReadRef stRef) >>= \case Nothing -> return () Just (FileData {holesInfo=_}) -> return () -- no idea if this is kosher -- void . runGhc (Just libdir) $ runErrorT (mapM_ (getAndMemoizeSuggestions stRef) (M.elems holesInfo)) return Ok NextHole (ClientState {path, cursorPos=(line,col)}) -> getHoles stRef >>= \holes -> let mh = case dropWhile ((currPosLoc >=) . srcSpanStart) holes of [] -> case holes of {[] -> Nothing; (h:_) -> Just h } (h:_) -> Just h in maybe (throwError NoHole) (return . SetCursor . srcLocPos . srcSpanStart) mh where currPosLoc = mkSrcLoc (fsLit path) line col -- inefficient PrevHole (ClientState {path, cursorPos=(line, col)}) -> getHoles stRef >>= \holes -> let mxs = case takeWhile (< currPosSpan) holes of [] -> case holes of {[] -> Nothing; _ -> Just holes} xs -> Just xs in maybe (throwError NoHole) (return . SetCursor . srcLocPos . srcSpanStart . last) mxs where currPosSpan = srcLocSpan (mkSrcLoc (fsLit path) line col) EnterHole (ClientState {..}) -> do FileData {path=p} <- getFileDataErr stRef when (p /= path) (loadFile stRef path) mh <- getEnclosingHole stRef cursorPos gModifyRef stRef (\st -> st { currentHole = mh }) return $ case mh of Nothing -> SetInfoWindow "No Hole found" Just _ -> Ok GetHoleInfo (ClientState {..}) (HoleInfoOptions{..}) -> do ahi@(AugmentedHoleInfo {holeInfo=hi}) <- getCurrentHoleErr stRef fs <- lift getSessionDynFlags let env = map (\(id,t) -> (occNameToString (getOccName id), showType fs t)) (holeEnv hi) case sendOutputAsData of True -> do suggsJSON <- if withSuggestions then mkSuggsJSON <$> Mote.Suggest.getAndMemoizeSuggestions stRef ahi else return [] return $ HoleInfoJSON . Aeson.object $ [ "environment" .= map (\(x, t) -> Aeson.object ["name" .= x, "type" .= t]) env , "goal" .= Aeson.object ["name" .= holeNameString hi, "type" .= showType fs (holeType hi) ] ] ++ suggsJSON where mkSuggJSON (n, t) = Aeson.object ["name" .= occNameToString (occName n), "type" .= showType fs t] mkSuggsJSON suggs = [ "suggestions" .= map mkSuggJSON suggs ] False -> do suggsStr <- if withSuggestions then mkSuggsStr <$> Mote.Suggest.getAndMemoizeSuggestions stRef ahi else return "" let goalStr = "Goal: " ++ holeNameString hi ++ " :: " ++ showType fs (holeType hi) ++ "\n" ++ replicate 40 '-' envStr = -- TODO: Wow, this is total for the strangest reason. If env -- is empty then maxIdentLength never gets used to pad so -- maximum doesn't fail. let maxIdentLength = maximum $ map (\(x,_) -> length x) env in unlines $ map (\(x, t) -> take maxIdentLength (x ++ repeat ' ') ++ " :: " ++ t) env return . SetInfoWindow $ unlines [goalStr, envStr, "", suggsStr] where mkSuggsStr suggs = let heading = "Suggestions:\n" ++ replicate 40 '-' in unlines (heading : map (\(n, t) -> (occNameToString . occName) n ++ " :: " ++ showType fs t) suggs) Refine exprStr (ClientState {..}) -> do hi <- getCurrentHoleErr stRef expr' <- refine stRef exprStr fs <- lift getSessionDynFlags unqual <- lift getPrintUnqual return $ Replace (toSpan . holeSpan $ holeInfo hi) path (showSDocForUser fs unqual (ppr expr')) SendStop -> return Stop -- Precondition here: Hole has already been entered CaseFurther var ClientState {} -> do MoteState {..} <- gReadRef stRef FileData {path, hsModule} <- getFileDataErr stRef hi@(HoleInfo {holeEnv}) <- holeInfo <$> getCurrentHoleErr stRef (id, ty) <- maybeThrow (NoVariable var) $ List.find (\(id,_) -> var == occNameToString (getOccName id)) holeEnv expansions stRef (occNameToString (getOccName id)) ty (holeSpan hi) hsModule >>= \case Nothing -> return (Error "Variable not found") Just ((L sp _mg, mi), matches) -> do fs <- lift getSessionDynFlags unqual <- lift getPrintUnqual let span = toSpan sp indentLevel = subtract 1 . snd . fst $ span indentTail [] = error "indentTail got []" indentTail (s:ss) = s : map (replicate indentLevel ' ' ++) ss showMatch :: HsMatchContext RdrName -> Match RdrName (LHsExpr RdrName) -> String showMatch ctx = showSDocForUser fs unqual . pprMatch ctx return $ case mi of Equation (L _l name) -> Replace (toSpan sp) path . unlines . indentTail $ map (showMatch (FunRhs name False)) matches CaseBranch -> -- TODO shouldn't always unlines. sometimes should be ; and {} Replace (toSpan sp) path . unlines . indentTail $ map (showMatch CaseAlt) matches SingleLambda _loc -> Error "SingleLambda case expansion not yet implemented" CaseOn exprStr (ClientState {..}) -> do expr <- parseExpr exprStr -- Should actually have general mechanism for getting the scope at -- a point... FileData {..} <- getFileDataErr stRef ty <- getEnclosingHole stRef cursorPos >>= \case Nothing -> hsExprType expr Just hi -> inHoleEnv typecheckedModule (holeInfo hi) $ tcRnExprTc expr let (line, col) = cursorPos ms <- matchesForTypeAt stRef ty (mkSrcLoc (fsLit "") line col) indentLevel <- liftIO $ LB8.length . LB8.takeWhile (== ' ') . (!! (line - 1)) . LB8.lines <$> LB8.readFile path fs <- lift getSessionDynFlags unqual <- lift getPrintUnqual let indent n = (replicate n ' ' ++) showMatch = showSDocForUser fs unqual . pprMatch (CaseAlt :: HsMatchContext RdrName) return . Insert cursorPos path . unlines $ ("case " ++ exprStr ++ " of") : map (indent (2 + fromIntegral indentLevel) . showMatch) ms -- every message should really send current file name (ClientState) and -- check if it matches the currently loaded file GetType e -> do fs <- lift getSessionDynFlags x <- exprType e unqual <- lift getPrintUnqual return . SetInfoWindow . showSDocForUser fs unqual $ ppr x Search {} -> return Ok -- TODO
6,719
respond' stRef = \case Load p -> do loadFile stRef p liftIO . forkIO $ do fmap fileData (gReadRef stRef) >>= \case Nothing -> return () Just (FileData {holesInfo=_}) -> return () -- no idea if this is kosher -- void . runGhc (Just libdir) $ runErrorT (mapM_ (getAndMemoizeSuggestions stRef) (M.elems holesInfo)) return Ok NextHole (ClientState {path, cursorPos=(line,col)}) -> getHoles stRef >>= \holes -> let mh = case dropWhile ((currPosLoc >=) . srcSpanStart) holes of [] -> case holes of {[] -> Nothing; (h:_) -> Just h } (h:_) -> Just h in maybe (throwError NoHole) (return . SetCursor . srcLocPos . srcSpanStart) mh where currPosLoc = mkSrcLoc (fsLit path) line col -- inefficient PrevHole (ClientState {path, cursorPos=(line, col)}) -> getHoles stRef >>= \holes -> let mxs = case takeWhile (< currPosSpan) holes of [] -> case holes of {[] -> Nothing; _ -> Just holes} xs -> Just xs in maybe (throwError NoHole) (return . SetCursor . srcLocPos . srcSpanStart . last) mxs where currPosSpan = srcLocSpan (mkSrcLoc (fsLit path) line col) EnterHole (ClientState {..}) -> do FileData {path=p} <- getFileDataErr stRef when (p /= path) (loadFile stRef path) mh <- getEnclosingHole stRef cursorPos gModifyRef stRef (\st -> st { currentHole = mh }) return $ case mh of Nothing -> SetInfoWindow "No Hole found" Just _ -> Ok GetHoleInfo (ClientState {..}) (HoleInfoOptions{..}) -> do ahi@(AugmentedHoleInfo {holeInfo=hi}) <- getCurrentHoleErr stRef fs <- lift getSessionDynFlags let env = map (\(id,t) -> (occNameToString (getOccName id), showType fs t)) (holeEnv hi) case sendOutputAsData of True -> do suggsJSON <- if withSuggestions then mkSuggsJSON <$> Mote.Suggest.getAndMemoizeSuggestions stRef ahi else return [] return $ HoleInfoJSON . Aeson.object $ [ "environment" .= map (\(x, t) -> Aeson.object ["name" .= x, "type" .= t]) env , "goal" .= Aeson.object ["name" .= holeNameString hi, "type" .= showType fs (holeType hi) ] ] ++ suggsJSON where mkSuggJSON (n, t) = Aeson.object ["name" .= occNameToString (occName n), "type" .= showType fs t] mkSuggsJSON suggs = [ "suggestions" .= map mkSuggJSON suggs ] False -> do suggsStr <- if withSuggestions then mkSuggsStr <$> Mote.Suggest.getAndMemoizeSuggestions stRef ahi else return "" let goalStr = "Goal: " ++ holeNameString hi ++ " :: " ++ showType fs (holeType hi) ++ "\n" ++ replicate 40 '-' envStr = -- TODO: Wow, this is total for the strangest reason. If env -- is empty then maxIdentLength never gets used to pad so -- maximum doesn't fail. let maxIdentLength = maximum $ map (\(x,_) -> length x) env in unlines $ map (\(x, t) -> take maxIdentLength (x ++ repeat ' ') ++ " :: " ++ t) env return . SetInfoWindow $ unlines [goalStr, envStr, "", suggsStr] where mkSuggsStr suggs = let heading = "Suggestions:\n" ++ replicate 40 '-' in unlines (heading : map (\(n, t) -> (occNameToString . occName) n ++ " :: " ++ showType fs t) suggs) Refine exprStr (ClientState {..}) -> do hi <- getCurrentHoleErr stRef expr' <- refine stRef exprStr fs <- lift getSessionDynFlags unqual <- lift getPrintUnqual return $ Replace (toSpan . holeSpan $ holeInfo hi) path (showSDocForUser fs unqual (ppr expr')) SendStop -> return Stop -- Precondition here: Hole has already been entered CaseFurther var ClientState {} -> do MoteState {..} <- gReadRef stRef FileData {path, hsModule} <- getFileDataErr stRef hi@(HoleInfo {holeEnv}) <- holeInfo <$> getCurrentHoleErr stRef (id, ty) <- maybeThrow (NoVariable var) $ List.find (\(id,_) -> var == occNameToString (getOccName id)) holeEnv expansions stRef (occNameToString (getOccName id)) ty (holeSpan hi) hsModule >>= \case Nothing -> return (Error "Variable not found") Just ((L sp _mg, mi), matches) -> do fs <- lift getSessionDynFlags unqual <- lift getPrintUnqual let span = toSpan sp indentLevel = subtract 1 . snd . fst $ span indentTail [] = error "indentTail got []" indentTail (s:ss) = s : map (replicate indentLevel ' ' ++) ss showMatch :: HsMatchContext RdrName -> Match RdrName (LHsExpr RdrName) -> String showMatch ctx = showSDocForUser fs unqual . pprMatch ctx return $ case mi of Equation (L _l name) -> Replace (toSpan sp) path . unlines . indentTail $ map (showMatch (FunRhs name False)) matches CaseBranch -> -- TODO shouldn't always unlines. sometimes should be ; and {} Replace (toSpan sp) path . unlines . indentTail $ map (showMatch CaseAlt) matches SingleLambda _loc -> Error "SingleLambda case expansion not yet implemented" CaseOn exprStr (ClientState {..}) -> do expr <- parseExpr exprStr -- Should actually have general mechanism for getting the scope at -- a point... FileData {..} <- getFileDataErr stRef ty <- getEnclosingHole stRef cursorPos >>= \case Nothing -> hsExprType expr Just hi -> inHoleEnv typecheckedModule (holeInfo hi) $ tcRnExprTc expr let (line, col) = cursorPos ms <- matchesForTypeAt stRef ty (mkSrcLoc (fsLit "") line col) indentLevel <- liftIO $ LB8.length . LB8.takeWhile (== ' ') . (!! (line - 1)) . LB8.lines <$> LB8.readFile path fs <- lift getSessionDynFlags unqual <- lift getPrintUnqual let indent n = (replicate n ' ' ++) showMatch = showSDocForUser fs unqual . pprMatch (CaseAlt :: HsMatchContext RdrName) return . Insert cursorPos path . unlines $ ("case " ++ exprStr ++ " of") : map (indent (2 + fromIntegral indentLevel) . showMatch) ms -- every message should really send current file name (ClientState) and -- check if it matches the currently loaded file GetType e -> do fs <- lift getSessionDynFlags x <- exprType e unqual <- lift getPrintUnqual return . SetInfoWindow . showSDocForUser fs unqual $ ppr x Search {} -> return Ok -- TODO
6,665
false
true
0
29
1,997
2,219
1,089
1,130
null
null
vTurbine/ghc
compiler/hsSyn/HsPat.hs
bsd-3-clause
hsRecUpdFieldOcc :: HsRecField' (AmbiguousFieldOcc Id) arg -> LFieldOcc Id hsRecUpdFieldOcc = fmap unambiguousFieldOcc . hsRecFieldLbl
134
hsRecUpdFieldOcc :: HsRecField' (AmbiguousFieldOcc Id) arg -> LFieldOcc Id hsRecUpdFieldOcc = fmap unambiguousFieldOcc . hsRecFieldLbl
134
hsRecUpdFieldOcc = fmap unambiguousFieldOcc . hsRecFieldLbl
59
false
true
0
8
14
36
17
19
null
null
jmct/IterativeCompiler
frontend/Parser.hs
mit
second :: a -> b -> b second a b = b
36
second :: a -> b -> b second a b = b
36
second a b = b
14
false
true
0
8
11
30
13
17
null
null
motiz88/postgrest
test/Feature/StructureSpec.hs
mit
spec :: DbStructure -> H.Connection -> Spec spec struct c = around (withApp cfgDefault struct c) $ do describe "GET /" $ do it "lists views in schema" $ request methodGet "/" [] "" `shouldRespondWith` [json| [ {"schema":"test","name":"Escap3e;","insertable":true} , {"schema":"test","name":"articleStars","insertable":true} , {"schema":"test","name":"articles","insertable":true} , {"schema":"test","name":"auto_incrementing_pk","insertable":true} , {"schema":"test","name":"clients","insertable":true} , {"schema":"test","name":"comments","insertable":true} , {"schema":"test","name":"complex_items","insertable":true} , {"schema":"test","name":"compound_pk","insertable":true} , {"schema":"test","name":"ghostBusters","insertable":true} , {"schema":"test","name":"has_count_column","insertable":false} , {"schema":"test","name":"has_fk","insertable":true} , {"schema":"test","name":"insertable_view_with_join","insertable":true} , {"schema":"test","name":"insertonly","insertable":true} , {"schema":"test","name":"items","insertable":true} , {"schema":"test","name":"json","insertable":true} , {"schema":"test","name":"materialized_view","insertable":false} , {"schema":"test","name":"menagerie","insertable":true} , {"schema":"test","name":"no_pk","insertable":true} , {"schema":"test","name":"nullable_integer","insertable":true} , {"schema":"test","name":"projects","insertable":true} , {"schema":"test","name":"projects_view","insertable":true} , {"schema":"test","name":"simple_pk","insertable":true} , {"schema":"test","name":"tasks","insertable":true} , {"schema":"test","name":"tsearch","insertable":true} , {"schema":"test","name":"users","insertable":true} , {"schema":"test","name":"users_projects","insertable":true} , {"schema":"test","name":"users_tasks","insertable":true} , {"schema":"test","name":"withUnique","insertable":true} ] |] {matchStatus = 200} it "lists only views user has permission to see" $ do let auth = authHeaderJWT "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJyb2xlIjoicG9zdGdyZXN0X3Rlc3RfYXV0aG9yIiwiaWQiOiJqZG9lIn0.y4vZuu1dDdwAl0-S00MCRWRYMlJ5YAMSir6Es6WtWx0" request methodGet "/" [auth] "" `shouldRespondWith` [json| [ {"schema":"test","name":"authors_only","insertable":true} ] |] {matchStatus = 200} describe "Table info" $ do it "is available with OPTIONS verb" $ request methodOptions "/menagerie" [] "" `shouldRespondWith` [json| { "pkey":["integer"], "columns":[ { "default": null, "precision": 32, "updatable": true, "schema": "test", "name": "integer", "type": "integer", "maxLen": null, "enum": [], "nullable": false, "position": 1, "references": null, "default": null }, { "default": null, "precision": 53, "updatable": true, "schema": "test", "name": "double", "type": "double precision", "maxLen": null, "enum": [], "nullable": false, "references": null, "position": 2 }, { "default": null, "precision": null, "updatable": true, "schema": "test", "name": "varchar", "type": "character varying", "maxLen": null, "enum": [], "nullable": false, "position": 3, "references": null, "default": null }, { "default": null, "precision": null, "updatable": true, "schema": "test", "name": "boolean", "type": "boolean", "maxLen": null, "enum": [], "nullable": false, "references": null, "position": 4 }, { "default": null, "precision": null, "updatable": true, "schema": "test", "name": "date", "type": "date", "maxLen": null, "enum": [], "nullable": false, "references": null, "position": 5 }, { "default": null, "precision": null, "updatable": true, "schema": "test", "name": "money", "type": "money", "maxLen": null, "enum": [], "nullable": false, "position": 6, "references": null, "default": null }, { "default": null, "precision": null, "updatable": true, "schema": "test", "name": "enum", "type": "USER-DEFINED", "maxLen": null, "enum": [ "foo", "bar" ], "nullable": false, "position": 7, "references": null, "default": null } ] } |] it "it includes primary and foreign keys for views" $ request methodOptions "/projects_view" [] "" `shouldRespondWith` [json| { "pkey":[ "id" ], "columns":[ { "references":null, "default":null, "precision":32, "updatable":true, "schema":"test", "name":"id", "type":"integer", "maxLen":null, "enum":[], "nullable":true, "position":1 }, { "references":null, "default":null, "precision":null, "updatable":true, "schema":"test", "name":"name", "type":"text", "maxLen":null, "enum":[], "nullable":true, "position":2 }, { "references": { "schema":"test", "column":"id", "table":"clients" }, "default":null, "precision":32, "updatable":true, "schema":"test", "name":"client_id", "type":"integer", "maxLen":null, "enum":[], "nullable":true, "position":3 } ] } |] it "includes foreign key data" $ request methodOptions "/has_fk" [] "" `shouldRespondWith` [json| { "pkey": ["id"], "columns":[ { "default": "nextval('test.has_fk_id_seq'::regclass)", "precision": 64, "updatable": true, "schema": "test", "name": "id", "type": "bigint", "maxLen": null, "nullable": false, "position": 1, "enum": [], "references": null }, { "default": null, "precision": 32, "updatable": true, "schema": "test", "name": "auto_inc_fk", "type": "integer", "maxLen": null, "nullable": true, "position": 2, "enum": [], "references": {"schema":"test", "table": "auto_incrementing_pk", "column": "id"} }, { "default": null, "precision": null, "updatable": true, "schema": "test", "name": "simple_fk", "type": "character varying", "maxLen": 255, "nullable": true, "position": 3, "enum": [], "references": {"schema":"test", "table": "simple_pk", "column": "k"} } ] } |] it "includes all information on views for renamed columns, and raises relations to correct schema" $ request methodOptions "/articleStars" [] "" `shouldRespondWith` [json| { "pkey": [ "articleId", "userId" ], "columns": [ { "references": { "schema": "test", "column": "id", "table": "articles" }, "default": null, "precision": 32, "updatable": true, "schema": "test", "name": "articleId", "type": "integer", "maxLen": null, "enum": [], "nullable": true, "position": 1 }, { "references": { "schema": "test", "column": "id", "table": "users" }, "default": null, "precision": 32, "updatable": true, "schema": "test", "name": "userId", "type": "integer", "maxLen": null, "enum": [], "nullable": true, "position": 2 }, { "references": null, "default": null, "precision": null, "updatable": true, "schema": "test", "name": "createdAt", "type": "timestamp without time zone", "maxLen": null, "enum": [], "nullable": true, "position": 3 } ] } |] it "errors for non existant tables" $ request methodOptions "/dne" [] "" `shouldRespondWith` 404
9,989
spec :: DbStructure -> H.Connection -> Spec spec struct c = around (withApp cfgDefault struct c) $ do describe "GET /" $ do it "lists views in schema" $ request methodGet "/" [] "" `shouldRespondWith` [json| [ {"schema":"test","name":"Escap3e;","insertable":true} , {"schema":"test","name":"articleStars","insertable":true} , {"schema":"test","name":"articles","insertable":true} , {"schema":"test","name":"auto_incrementing_pk","insertable":true} , {"schema":"test","name":"clients","insertable":true} , {"schema":"test","name":"comments","insertable":true} , {"schema":"test","name":"complex_items","insertable":true} , {"schema":"test","name":"compound_pk","insertable":true} , {"schema":"test","name":"ghostBusters","insertable":true} , {"schema":"test","name":"has_count_column","insertable":false} , {"schema":"test","name":"has_fk","insertable":true} , {"schema":"test","name":"insertable_view_with_join","insertable":true} , {"schema":"test","name":"insertonly","insertable":true} , {"schema":"test","name":"items","insertable":true} , {"schema":"test","name":"json","insertable":true} , {"schema":"test","name":"materialized_view","insertable":false} , {"schema":"test","name":"menagerie","insertable":true} , {"schema":"test","name":"no_pk","insertable":true} , {"schema":"test","name":"nullable_integer","insertable":true} , {"schema":"test","name":"projects","insertable":true} , {"schema":"test","name":"projects_view","insertable":true} , {"schema":"test","name":"simple_pk","insertable":true} , {"schema":"test","name":"tasks","insertable":true} , {"schema":"test","name":"tsearch","insertable":true} , {"schema":"test","name":"users","insertable":true} , {"schema":"test","name":"users_projects","insertable":true} , {"schema":"test","name":"users_tasks","insertable":true} , {"schema":"test","name":"withUnique","insertable":true} ] |] {matchStatus = 200} it "lists only views user has permission to see" $ do let auth = authHeaderJWT "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJyb2xlIjoicG9zdGdyZXN0X3Rlc3RfYXV0aG9yIiwiaWQiOiJqZG9lIn0.y4vZuu1dDdwAl0-S00MCRWRYMlJ5YAMSir6Es6WtWx0" request methodGet "/" [auth] "" `shouldRespondWith` [json| [ {"schema":"test","name":"authors_only","insertable":true} ] |] {matchStatus = 200} describe "Table info" $ do it "is available with OPTIONS verb" $ request methodOptions "/menagerie" [] "" `shouldRespondWith` [json| { "pkey":["integer"], "columns":[ { "default": null, "precision": 32, "updatable": true, "schema": "test", "name": "integer", "type": "integer", "maxLen": null, "enum": [], "nullable": false, "position": 1, "references": null, "default": null }, { "default": null, "precision": 53, "updatable": true, "schema": "test", "name": "double", "type": "double precision", "maxLen": null, "enum": [], "nullable": false, "references": null, "position": 2 }, { "default": null, "precision": null, "updatable": true, "schema": "test", "name": "varchar", "type": "character varying", "maxLen": null, "enum": [], "nullable": false, "position": 3, "references": null, "default": null }, { "default": null, "precision": null, "updatable": true, "schema": "test", "name": "boolean", "type": "boolean", "maxLen": null, "enum": [], "nullable": false, "references": null, "position": 4 }, { "default": null, "precision": null, "updatable": true, "schema": "test", "name": "date", "type": "date", "maxLen": null, "enum": [], "nullable": false, "references": null, "position": 5 }, { "default": null, "precision": null, "updatable": true, "schema": "test", "name": "money", "type": "money", "maxLen": null, "enum": [], "nullable": false, "position": 6, "references": null, "default": null }, { "default": null, "precision": null, "updatable": true, "schema": "test", "name": "enum", "type": "USER-DEFINED", "maxLen": null, "enum": [ "foo", "bar" ], "nullable": false, "position": 7, "references": null, "default": null } ] } |] it "it includes primary and foreign keys for views" $ request methodOptions "/projects_view" [] "" `shouldRespondWith` [json| { "pkey":[ "id" ], "columns":[ { "references":null, "default":null, "precision":32, "updatable":true, "schema":"test", "name":"id", "type":"integer", "maxLen":null, "enum":[], "nullable":true, "position":1 }, { "references":null, "default":null, "precision":null, "updatable":true, "schema":"test", "name":"name", "type":"text", "maxLen":null, "enum":[], "nullable":true, "position":2 }, { "references": { "schema":"test", "column":"id", "table":"clients" }, "default":null, "precision":32, "updatable":true, "schema":"test", "name":"client_id", "type":"integer", "maxLen":null, "enum":[], "nullable":true, "position":3 } ] } |] it "includes foreign key data" $ request methodOptions "/has_fk" [] "" `shouldRespondWith` [json| { "pkey": ["id"], "columns":[ { "default": "nextval('test.has_fk_id_seq'::regclass)", "precision": 64, "updatable": true, "schema": "test", "name": "id", "type": "bigint", "maxLen": null, "nullable": false, "position": 1, "enum": [], "references": null }, { "default": null, "precision": 32, "updatable": true, "schema": "test", "name": "auto_inc_fk", "type": "integer", "maxLen": null, "nullable": true, "position": 2, "enum": [], "references": {"schema":"test", "table": "auto_incrementing_pk", "column": "id"} }, { "default": null, "precision": null, "updatable": true, "schema": "test", "name": "simple_fk", "type": "character varying", "maxLen": 255, "nullable": true, "position": 3, "enum": [], "references": {"schema":"test", "table": "simple_pk", "column": "k"} } ] } |] it "includes all information on views for renamed columns, and raises relations to correct schema" $ request methodOptions "/articleStars" [] "" `shouldRespondWith` [json| { "pkey": [ "articleId", "userId" ], "columns": [ { "references": { "schema": "test", "column": "id", "table": "articles" }, "default": null, "precision": 32, "updatable": true, "schema": "test", "name": "articleId", "type": "integer", "maxLen": null, "enum": [], "nullable": true, "position": 1 }, { "references": { "schema": "test", "column": "id", "table": "users" }, "default": null, "precision": 32, "updatable": true, "schema": "test", "name": "userId", "type": "integer", "maxLen": null, "enum": [], "nullable": true, "position": 2 }, { "references": null, "default": null, "precision": null, "updatable": true, "schema": "test", "name": "createdAt", "type": "timestamp without time zone", "maxLen": null, "enum": [], "nullable": true, "position": 3 } ] } |] it "errors for non existant tables" $ request methodOptions "/dne" [] "" `shouldRespondWith` 404
9,989
spec struct c = around (withApp cfgDefault struct c) $ do describe "GET /" $ do it "lists views in schema" $ request methodGet "/" [] "" `shouldRespondWith` [json| [ {"schema":"test","name":"Escap3e;","insertable":true} , {"schema":"test","name":"articleStars","insertable":true} , {"schema":"test","name":"articles","insertable":true} , {"schema":"test","name":"auto_incrementing_pk","insertable":true} , {"schema":"test","name":"clients","insertable":true} , {"schema":"test","name":"comments","insertable":true} , {"schema":"test","name":"complex_items","insertable":true} , {"schema":"test","name":"compound_pk","insertable":true} , {"schema":"test","name":"ghostBusters","insertable":true} , {"schema":"test","name":"has_count_column","insertable":false} , {"schema":"test","name":"has_fk","insertable":true} , {"schema":"test","name":"insertable_view_with_join","insertable":true} , {"schema":"test","name":"insertonly","insertable":true} , {"schema":"test","name":"items","insertable":true} , {"schema":"test","name":"json","insertable":true} , {"schema":"test","name":"materialized_view","insertable":false} , {"schema":"test","name":"menagerie","insertable":true} , {"schema":"test","name":"no_pk","insertable":true} , {"schema":"test","name":"nullable_integer","insertable":true} , {"schema":"test","name":"projects","insertable":true} , {"schema":"test","name":"projects_view","insertable":true} , {"schema":"test","name":"simple_pk","insertable":true} , {"schema":"test","name":"tasks","insertable":true} , {"schema":"test","name":"tsearch","insertable":true} , {"schema":"test","name":"users","insertable":true} , {"schema":"test","name":"users_projects","insertable":true} , {"schema":"test","name":"users_tasks","insertable":true} , {"schema":"test","name":"withUnique","insertable":true} ] |] {matchStatus = 200} it "lists only views user has permission to see" $ do let auth = authHeaderJWT "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJyb2xlIjoicG9zdGdyZXN0X3Rlc3RfYXV0aG9yIiwiaWQiOiJqZG9lIn0.y4vZuu1dDdwAl0-S00MCRWRYMlJ5YAMSir6Es6WtWx0" request methodGet "/" [auth] "" `shouldRespondWith` [json| [ {"schema":"test","name":"authors_only","insertable":true} ] |] {matchStatus = 200} describe "Table info" $ do it "is available with OPTIONS verb" $ request methodOptions "/menagerie" [] "" `shouldRespondWith` [json| { "pkey":["integer"], "columns":[ { "default": null, "precision": 32, "updatable": true, "schema": "test", "name": "integer", "type": "integer", "maxLen": null, "enum": [], "nullable": false, "position": 1, "references": null, "default": null }, { "default": null, "precision": 53, "updatable": true, "schema": "test", "name": "double", "type": "double precision", "maxLen": null, "enum": [], "nullable": false, "references": null, "position": 2 }, { "default": null, "precision": null, "updatable": true, "schema": "test", "name": "varchar", "type": "character varying", "maxLen": null, "enum": [], "nullable": false, "position": 3, "references": null, "default": null }, { "default": null, "precision": null, "updatable": true, "schema": "test", "name": "boolean", "type": "boolean", "maxLen": null, "enum": [], "nullable": false, "references": null, "position": 4 }, { "default": null, "precision": null, "updatable": true, "schema": "test", "name": "date", "type": "date", "maxLen": null, "enum": [], "nullable": false, "references": null, "position": 5 }, { "default": null, "precision": null, "updatable": true, "schema": "test", "name": "money", "type": "money", "maxLen": null, "enum": [], "nullable": false, "position": 6, "references": null, "default": null }, { "default": null, "precision": null, "updatable": true, "schema": "test", "name": "enum", "type": "USER-DEFINED", "maxLen": null, "enum": [ "foo", "bar" ], "nullable": false, "position": 7, "references": null, "default": null } ] } |] it "it includes primary and foreign keys for views" $ request methodOptions "/projects_view" [] "" `shouldRespondWith` [json| { "pkey":[ "id" ], "columns":[ { "references":null, "default":null, "precision":32, "updatable":true, "schema":"test", "name":"id", "type":"integer", "maxLen":null, "enum":[], "nullable":true, "position":1 }, { "references":null, "default":null, "precision":null, "updatable":true, "schema":"test", "name":"name", "type":"text", "maxLen":null, "enum":[], "nullable":true, "position":2 }, { "references": { "schema":"test", "column":"id", "table":"clients" }, "default":null, "precision":32, "updatable":true, "schema":"test", "name":"client_id", "type":"integer", "maxLen":null, "enum":[], "nullable":true, "position":3 } ] } |] it "includes foreign key data" $ request methodOptions "/has_fk" [] "" `shouldRespondWith` [json| { "pkey": ["id"], "columns":[ { "default": "nextval('test.has_fk_id_seq'::regclass)", "precision": 64, "updatable": true, "schema": "test", "name": "id", "type": "bigint", "maxLen": null, "nullable": false, "position": 1, "enum": [], "references": null }, { "default": null, "precision": 32, "updatable": true, "schema": "test", "name": "auto_inc_fk", "type": "integer", "maxLen": null, "nullable": true, "position": 2, "enum": [], "references": {"schema":"test", "table": "auto_incrementing_pk", "column": "id"} }, { "default": null, "precision": null, "updatable": true, "schema": "test", "name": "simple_fk", "type": "character varying", "maxLen": 255, "nullable": true, "position": 3, "enum": [], "references": {"schema":"test", "table": "simple_pk", "column": "k"} } ] } |] it "includes all information on views for renamed columns, and raises relations to correct schema" $ request methodOptions "/articleStars" [] "" `shouldRespondWith` [json| { "pkey": [ "articleId", "userId" ], "columns": [ { "references": { "schema": "test", "column": "id", "table": "articles" }, "default": null, "precision": 32, "updatable": true, "schema": "test", "name": "articleId", "type": "integer", "maxLen": null, "enum": [], "nullable": true, "position": 1 }, { "references": { "schema": "test", "column": "id", "table": "users" }, "default": null, "precision": 32, "updatable": true, "schema": "test", "name": "userId", "type": "integer", "maxLen": null, "enum": [], "nullable": true, "position": 2 }, { "references": null, "default": null, "precision": null, "updatable": true, "schema": "test", "name": "createdAt", "type": "timestamp without time zone", "maxLen": null, "enum": [], "nullable": true, "position": 3 } ] } |] it "errors for non existant tables" $ request methodOptions "/dne" [] "" `shouldRespondWith` 404
9,945
false
true
0
17
4,083
314
162
152
null
null
fmapfmapfmap/amazonka
amazonka-iam/gen/Network/AWS/IAM/Types/Product.hs
mpl-2.0
-- | The contents of the public key certificate chain. sCertificateChain :: Lens' ServerCertificate (Maybe Text) sCertificateChain = lens _sCertificateChain (\ s a -> s{_sCertificateChain = a})
193
sCertificateChain :: Lens' ServerCertificate (Maybe Text) sCertificateChain = lens _sCertificateChain (\ s a -> s{_sCertificateChain = a})
138
sCertificateChain = lens _sCertificateChain (\ s a -> s{_sCertificateChain = a})
80
true
true
0
9
26
46
25
21
null
null
pgreze/7languages7weeks
haskell/day2.hs
mit
lstripWith chars = dropWhile (\x -> any (== x) chars)
53
lstripWith chars = dropWhile (\x -> any (== x) chars)
53
lstripWith chars = dropWhile (\x -> any (== x) chars)
53
false
false
0
9
9
29
15
14
null
null
slasser/AllStar
ParserGenerator/AtnToDot.hs
bsd-3-clause
-- Took these from an online example createImage :: PrintDotRepr dg n => FilePath -> dg n -> IO FilePath createImage n g = createImageInDir "." n Png g
151
createImage :: PrintDotRepr dg n => FilePath -> dg n -> IO FilePath createImage n g = createImageInDir "." n Png g
114
createImage n g = createImageInDir "." n Png g
46
true
true
0
8
28
49
23
26
null
null
brendanhay/gogol
gogol-books/gen/Network/Google/Books/Types/Product.hs
mpl-2.0
-- | Creates a value of 'VolumeVolumeInfoIndustryIdentifiersItem' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'vviiiiIdentifier' -- -- * 'vviiiiType' volumeVolumeInfoIndustryIdentifiersItem :: VolumeVolumeInfoIndustryIdentifiersItem volumeVolumeInfoIndustryIdentifiersItem = VolumeVolumeInfoIndustryIdentifiersItem' {_vviiiiIdentifier = Nothing, _vviiiiType = Nothing}
467
volumeVolumeInfoIndustryIdentifiersItem :: VolumeVolumeInfoIndustryIdentifiersItem volumeVolumeInfoIndustryIdentifiersItem = VolumeVolumeInfoIndustryIdentifiersItem' {_vviiiiIdentifier = Nothing, _vviiiiType = Nothing}
228
volumeVolumeInfoIndustryIdentifiersItem = VolumeVolumeInfoIndustryIdentifiersItem' {_vviiiiIdentifier = Nothing, _vviiiiType = Nothing}
141
true
true
1
7
59
39
23
16
null
null
davidfontenot/haskell-hashtag-viewer
src/WebServer.hs
mit
getPortFromArgs :: (Num a) => Map.Map String String -> a getPortFromArgs argMap = case Map.lookup "port" argMap of Just port -> fromIntegral (read port :: Integer) Nothing -> 8001
233
getPortFromArgs :: (Num a) => Map.Map String String -> a getPortFromArgs argMap = case Map.lookup "port" argMap of Just port -> fromIntegral (read port :: Integer) Nothing -> 8001
233
getPortFromArgs argMap = case Map.lookup "port" argMap of Just port -> fromIntegral (read port :: Integer) Nothing -> 8001
176
false
true
0
10
82
72
35
37
null
null
ghcjs/ghcjs
src/Gen2/Optimizer.hs
mit
intInfixOp GeOp i1 i2 = eBool (i1 >= i2)
49
intInfixOp GeOp i1 i2 = eBool (i1 >= i2)
49
intInfixOp GeOp i1 i2 = eBool (i1 >= i2)
49
false
false
0
7
17
23
11
12
null
null
knotman90/haskell-tutorial
code/Problem55/Problem55.hs
gpl-2.0
--a smarter solution involves the fact that a number that is NOT liry produces a chain of number that are not liry themself and should not be tested! This approach is Not needed here for 10000 numbers. Brute force works in milliseconds. --Brute Force solution solve :: Int solve = let vals = (map (isLiry) [1..9999]) in length $ filter (==True) vals
350
solve :: Int solve = let vals = (map (isLiry) [1..9999]) in length $ filter (==True) vals
89
solve = let vals = (map (isLiry) [1..9999]) in length $ filter (==True) vals
76
true
true
0
11
62
59
30
29
null
null
nickspinale/aether
src/Network/Aether/RW.hs
mit
buildWant :: Want -> B.ByteString buildWant N4 = "n4"
53
buildWant :: Want -> B.ByteString buildWant N4 = "n4"
53
buildWant N4 = "n4"
19
false
true
0
8
8
26
11
15
null
null