Search is not available for this dataset
repo_name
string
path
string
license
string
full_code
string
full_size
int64
uncommented_code
string
uncommented_size
int64
function_only_code
string
function_only_size
int64
is_commented
bool
is_signatured
bool
n_ast_errors
int64
ast_max_depth
int64
n_whitespaces
int64
n_ast_nodes
int64
n_ast_terminals
int64
n_ast_nonterminals
int64
loc
int64
cycloplexity
int64
diogob/postgrest
test/Feature/UpsertSpec.hs
mit
spec :: SpecWith Application spec = describe "UPSERT" $ do context "with POST" $ do context "when Prefer: resolution=merge-duplicates is specified" $ do it "INSERTs and UPDATEs rows on pk conflict" $ request methodPost "/tiobe_pls" [("Prefer", "return=representation"), ("Prefer", "resolution=merge-duplicates")] [json| [ { "name": "Javascript", "rank": 6 }, { "name": "Java", "rank": 2 }, { "name": "C", "rank": 1 } ]|] `shouldRespondWith` [json| [ { "name": "Javascript", "rank": 6 }, { "name": "Java", "rank": 2 }, { "name": "C", "rank": 1 } ]|] { matchStatus = 201 , matchHeaders = ["Preference-Applied" <:> "resolution=merge-duplicates", matchContentTypeJson] } it "INSERTs and UPDATEs row on composite pk conflict" $ request methodPost "/employees" [("Prefer", "return=representation"), ("Prefer", "resolution=merge-duplicates")] [json| [ { "first_name": "Frances M.", "last_name": "Roe", "salary": "30000" }, { "first_name": "Peter S.", "last_name": "Yang", "salary": 42000 } ]|] `shouldRespondWith` [json| [ { "first_name": "Frances M.", "last_name": "Roe", "salary": "$30,000.00", "company": "One-Up Realty", "occupation": "Author" }, { "first_name": "Peter S.", "last_name": "Yang", "salary": "$42,000.00", "company": null, "occupation": null } ]|] { matchStatus = 201 , matchHeaders = ["Preference-Applied" <:> "resolution=merge-duplicates", matchContentTypeJson] } it "succeeds when the payload has no elements" $ request methodPost "/articles" [("Prefer", "return=representation"), ("Prefer", "resolution=merge-duplicates")] [json|[]|] `shouldRespondWith` [json|[]|] { matchStatus = 201 , matchHeaders = [matchContentTypeJson] } context "when Prefer: resolution=ignore-duplicates is specified" $ do it "INSERTs and ignores rows on pk conflict" $ request methodPost "/tiobe_pls" [("Prefer", "return=representation"), ("Prefer", "resolution=ignore-duplicates")] [json|[ { "name": "PHP", "rank": 9 }, { "name": "Python", "rank": 10 } ]|] `shouldRespondWith` [json|[ { "name": "PHP", "rank": 9 } ]|] { matchStatus = 201 , matchHeaders = ["Preference-Applied" <:> "resolution=ignore-duplicates", matchContentTypeJson] } it "INSERTs and ignores rows on composite pk conflict" $ request methodPost "/employees" [("Prefer", "return=representation"), ("Prefer", "resolution=ignore-duplicates")] [json|[ { "first_name": "Daniel B.", "last_name": "Lyon", "salary": "72000", "company": null, "occupation": null }, { "first_name": "Sara M.", "last_name": "Torpey", "salary": 60000, "company": "Burstein-Applebee", "occupation": "Soil scientist" } ]|] `shouldRespondWith` [json|[ { "first_name": "Sara M.", "last_name": "Torpey", "salary": "$60,000.00", "company": "Burstein-Applebee", "occupation": "Soil scientist" } ]|] { matchStatus = 201 , matchHeaders = ["Preference-Applied" <:> "resolution=ignore-duplicates", matchContentTypeJson] } it "succeeds if the table has only PK cols and no other cols" $ do request methodPost "/only_pk" [("Prefer", "return=representation"), ("Prefer", "resolution=ignore-duplicates")] [json|[ { "id": 1 }, { "id": 2 }, { "id": 3} ]|] `shouldRespondWith` [json|[ { "id": 3} ]|] { matchStatus = 201 , matchHeaders = ["Preference-Applied" <:> "resolution=ignore-duplicates", matchContentTypeJson] } request methodPost "/only_pk" [("Prefer", "return=representation"), ("Prefer", "resolution=merge-duplicates")] [json|[ { "id": 1 }, { "id": 2 }, { "id": 4} ]|] `shouldRespondWith` [json|[ { "id": 1 }, { "id": 2 }, { "id": 4} ]|] { matchStatus = 201 , matchHeaders = ["Preference-Applied" <:> "resolution=merge-duplicates", matchContentTypeJson] } it "succeeds and ignores the Prefer: resolution header(no Preference-Applied present) if the table has no PK" $ request methodPost "/no_pk" [("Prefer", "return=representation"), ("Prefer", "resolution=merge-duplicates")] [json|[ { "a": "1", "b": "0" } ]|] `shouldRespondWith` [json|[ { "a": "1", "b": "0" } ]|] { matchStatus = 201 , matchHeaders = [matchContentTypeJson] } it "succeeds if not a single resource is created" $ do request methodPost "/tiobe_pls" [("Prefer", "return=representation"), ("Prefer", "resolution=ignore-duplicates")] [json|[ { "name": "Java", "rank": 1 } ]|] `shouldRespondWith` [json|[]|] { matchStatus = 201 , matchHeaders = [matchContentTypeJson] } request methodPost "/tiobe_pls" [("Prefer", "return=representation"), ("Prefer", "resolution=ignore-duplicates")] [json|[ { "name": "Java", "rank": 1 }, { "name": "C", "rank": 2 } ]|] `shouldRespondWith` [json|[]|] { matchStatus = 201 , matchHeaders = [matchContentTypeJson] } context "with PUT" $ do context "Restrictions" $ do it "fails if Range is specified" $ request methodPut "/tiobe_pls?name=eq.Javascript" [("Range", "0-5")] [str| [ { "name": "Javascript", "rank": 1 } ]|] `shouldRespondWith` [json|{"message":"Range header and limit/offset querystring parameters are not allowed for PUT"}|] { matchStatus = 400 , matchHeaders = [matchContentTypeJson] } it "fails if limit is specified" $ put "/tiobe_pls?name=eq.Javascript&limit=1" [str| [ { "name": "Javascript", "rank": 1 } ]|] `shouldRespondWith` [json|{"message":"Range header and limit/offset querystring parameters are not allowed for PUT"}|] { matchStatus = 400 , matchHeaders = [matchContentTypeJson] } it "fails if offset is specified" $ put "/tiobe_pls?name=eq.Javascript&offset=1" [str| [ { "name": "Javascript", "rank": 1 } ]|] `shouldRespondWith` [json|{"message":"Range header and limit/offset querystring parameters are not allowed for PUT"}|] { matchStatus = 400 , matchHeaders = [matchContentTypeJson] } it "fails if the payload has more than one row" $ put "/tiobe_pls?name=eq.Go" [str| [ { "name": "Go", "rank": 19 }, { "name": "Swift", "rank": 12 } ]|] `shouldRespondWith` [json|{"message":"PUT payload must contain a single row"}|] { matchStatus = 400 , matchHeaders = [matchContentTypeJson] } it "fails if not all columns are specified" $ do put "/tiobe_pls?name=eq.Go" [str| [ { "name": "Go" } ]|] `shouldRespondWith` [json|{"message":"You must specify all columns in the payload when using PUT"}|] { matchStatus = 400 , matchHeaders = [matchContentTypeJson] } put "/employees?first_name=eq.Susan&last_name=eq.Heidt" [str| [ { "first_name": "Susan", "last_name": "Heidt", "salary": "48000" } ]|] `shouldRespondWith` [json|{"message":"You must specify all columns in the payload when using PUT"}|] { matchStatus = 400 , matchHeaders = [matchContentTypeJson] } it "rejects every other filter than pk cols eq's" $ do put "/tiobe_pls?rank=eq.19" [str| [ { "name": "Go", "rank": 19 } ]|] `shouldRespondWith` [json|{"message":"Filters must include all and only primary key columns with 'eq' operators"}|] { matchStatus = 405 , matchHeaders = [matchContentTypeJson] } put "/tiobe_pls?id=not.eq.Java" [str| [ { "name": "Go", "rank": 19 } ]|] `shouldRespondWith` [json|{"message":"Filters must include all and only primary key columns with 'eq' operators"}|] { matchStatus = 405 , matchHeaders = [matchContentTypeJson] } put "/tiobe_pls?id=in.(Go)" [str| [ { "name": "Go", "rank": 19 } ]|] `shouldRespondWith` [json|{"message":"Filters must include all and only primary key columns with 'eq' operators"}|] { matchStatus = 405 , matchHeaders = [matchContentTypeJson] } put "/tiobe_pls?and=(id.eq.Go)" [str| [ { "name": "Go", "rank": 19 } ]|] `shouldRespondWith` [json|{"message":"Filters must include all and only primary key columns with 'eq' operators"}|] { matchStatus = 405 , matchHeaders = [matchContentTypeJson] } it "fails if not all composite key cols are specified as eq filters" $ do put "/employees?first_name=eq.Susan" [str| [ { "first_name": "Susan", "last_name": "Heidt", "salary": "48000", "company": "GEX", "occupation": "Railroad engineer" } ]|] `shouldRespondWith` [json|{"message":"Filters must include all and only primary key columns with 'eq' operators"}|] { matchStatus = 405 , matchHeaders = [matchContentTypeJson] } put "/employees?last_name=eq.Heidt" [str| [ { "first_name": "Susan", "last_name": "Heidt", "salary": "48000", "company": "GEX", "occupation": "Railroad engineer" } ]|] `shouldRespondWith` [json|{"message":"Filters must include all and only primary key columns with 'eq' operators"}|] { matchStatus = 405 , matchHeaders = [matchContentTypeJson] } it "fails if the uri primary key doesn't match the payload primary key" $ do put "/tiobe_pls?name=eq.MATLAB" [str| [ { "name": "Perl", "rank": 17 } ]|] `shouldRespondWith` [json|{"message":"Payload values do not match URL in primary key column(s)"}|] { matchStatus = 400 , matchHeaders = [matchContentTypeJson] } put "/employees?first_name=eq.Wendy&last_name=eq.Anderson" [str| [ { "first_name": "Susan", "last_name": "Heidt", "salary": "48000", "company": "GEX", "occupation": "Railroad engineer" } ]|] `shouldRespondWith` [json|{"message":"Payload values do not match URL in primary key column(s)"}|] { matchStatus = 400 , matchHeaders = [matchContentTypeJson] } it "fails if the table has no PK" $ put "/no_pk?a=eq.one&b=eq.two" [str| [ { "a": "one", "b": "two" } ]|] `shouldRespondWith` [json|{"message":"Filters must include all and only primary key columns with 'eq' operators"}|] { matchStatus = 405 , matchHeaders = [matchContentTypeJson] } context "Inserting row" $ do it "succeeds on table with single pk col" $ do get "/tiobe_pls?name=eq.Go" `shouldRespondWith` "[]" put "/tiobe_pls?name=eq.Go" [str| [ { "name": "Go", "rank": 19 } ]|] `shouldRespondWith` 204 get "/tiobe_pls?name=eq.Go" `shouldRespondWith` [json| [ { "name": "Go", "rank": 19 } ]|] { matchHeaders = [matchContentTypeJson] } it "succeeds on table with composite pk" $ do get "/employees?first_name=eq.Susan&last_name=eq.Heidt" `shouldRespondWith` "[]" put "/employees?first_name=eq.Susan&last_name=eq.Heidt" [str| [ { "first_name": "Susan", "last_name": "Heidt", "salary": "48000", "company": "GEX", "occupation": "Railroad engineer" } ]|] `shouldRespondWith` 204 get "/employees?first_name=eq.Susan&last_name=eq.Heidt" `shouldRespondWith` [json| [ { "first_name": "Susan", "last_name": "Heidt", "salary": "$48,000.00", "company": "GEX", "occupation": "Railroad engineer" } ]|] { matchHeaders = [matchContentTypeJson] } it "succeeds if the table has only PK cols and no other cols" $ do get "/only_pk?id=eq.10" `shouldRespondWith` "[]" put "/only_pk?id=eq.10" [str|[ { "id": 10 } ]|] `shouldRespondWith` 204 get "/only_pk?id=eq.10" `shouldRespondWith` [json|[ { "id": 10 } ]|] { matchHeaders = [matchContentTypeJson] } context "Updating row" $ do it "succeeds on table with single pk col" $ do get "/tiobe_pls?name=eq.Go" `shouldRespondWith` [json|[ { "name": "Go", "rank": 19 } ]|] { matchHeaders = [matchContentTypeJson] } put "/tiobe_pls?name=eq.Go" [str| [ { "name": "Go", "rank": 13 } ]|] `shouldRespondWith` 204 get "/tiobe_pls?name=eq.Go" `shouldRespondWith` [json| [ { "name": "Go", "rank": 13 } ]|] { matchHeaders = [matchContentTypeJson] } it "succeeds on table with composite pk" $ do get "/employees?first_name=eq.Susan&last_name=eq.Heidt" `shouldRespondWith` [json| [ { "first_name": "Susan", "last_name": "Heidt", "salary": "$48,000.00", "company": "GEX", "occupation": "Railroad engineer" } ]|] { matchHeaders = [matchContentTypeJson] } put "/employees?first_name=eq.Susan&last_name=eq.Heidt" [str| [ { "first_name": "Susan", "last_name": "Heidt", "salary": "60000", "company": "Gamma Gas", "occupation": "Railroad engineer" } ]|] `shouldRespondWith` 204 get "/employees?first_name=eq.Susan&last_name=eq.Heidt" `shouldRespondWith` [json| [ { "first_name": "Susan", "last_name": "Heidt", "salary": "$60,000.00", "company": "Gamma Gas", "occupation": "Railroad engineer" } ]|] { matchHeaders = [matchContentTypeJson] } it "succeeds if the table has only PK cols and no other cols" $ do get "/only_pk?id=eq.10" `shouldRespondWith` [json|[ { "id": 10 } ]|] { matchHeaders = [matchContentTypeJson] } put "/only_pk?id=eq.10" [str|[ { "id": 10 } ]|] `shouldRespondWith` 204 get "/only_pk?id=eq.10" `shouldRespondWith` [json|[ { "id": 10 } ]|] { matchHeaders = [matchContentTypeJson] } it "works with return=representation and vnd.pgrst.object+json" $ request methodPut "/tiobe_pls?name=eq.Ruby" [("Prefer", "return=representation"), ("Accept", "application/vnd.pgrst.object+json")] [str| [ { "name": "Ruby", "rank": 11 } ]|] `shouldRespondWith` [json|{ "name": "Ruby", "rank": 11 }|] { matchHeaders = [matchContentTypeSingular] } context "with a camel case pk column" $ do it "works with POST and merge-duplicates/ignore-duplicates headers" $ do request methodPost "/UnitTest" [("Prefer", "return=representation"), ("Prefer", "resolution=merge-duplicates")] [json| [ { "idUnitTest": 1, "nameUnitTest": "name of unittest 1" }, { "idUnitTest": 2, "nameUnitTest": "name of unittest 2" } ]|] `shouldRespondWith` [json|[ { "idUnitTest": 1, "nameUnitTest": "name of unittest 1" }, { "idUnitTest": 2, "nameUnitTest": "name of unittest 2" } ]|] { matchStatus = 201 , matchHeaders = ["Preference-Applied" <:> "resolution=merge-duplicates", matchContentTypeJson] } request methodPost "/UnitTest" [("Prefer", "return=representation"), ("Prefer", "resolution=ignore-duplicates")] [json| [ { "idUnitTest": 1, "nameUnitTest": "name of unittest 1" }, { "idUnitTest": 2, "nameUnitTest": "name of unittest 2" } ]|] `shouldRespondWith` [json|[]|] { matchStatus = 201 , matchHeaders = ["Preference-Applied" <:> "resolution=ignore-duplicates", matchContentTypeJson] } it "works with PUT" $ do put "/UnitTest?idUnitTest=eq.1" [str| [ { "idUnitTest": 1, "nameUnitTest": "unit test 1" } ]|] `shouldRespondWith` 204 get "/UnitTest?idUnitTest=eq.1" `shouldRespondWith` [json| [ { "idUnitTest": 1, "nameUnitTest": "unit test 1" } ]|] { matchHeaders = [matchContentTypeJson] }
16,168
spec :: SpecWith Application spec = describe "UPSERT" $ do context "with POST" $ do context "when Prefer: resolution=merge-duplicates is specified" $ do it "INSERTs and UPDATEs rows on pk conflict" $ request methodPost "/tiobe_pls" [("Prefer", "return=representation"), ("Prefer", "resolution=merge-duplicates")] [json| [ { "name": "Javascript", "rank": 6 }, { "name": "Java", "rank": 2 }, { "name": "C", "rank": 1 } ]|] `shouldRespondWith` [json| [ { "name": "Javascript", "rank": 6 }, { "name": "Java", "rank": 2 }, { "name": "C", "rank": 1 } ]|] { matchStatus = 201 , matchHeaders = ["Preference-Applied" <:> "resolution=merge-duplicates", matchContentTypeJson] } it "INSERTs and UPDATEs row on composite pk conflict" $ request methodPost "/employees" [("Prefer", "return=representation"), ("Prefer", "resolution=merge-duplicates")] [json| [ { "first_name": "Frances M.", "last_name": "Roe", "salary": "30000" }, { "first_name": "Peter S.", "last_name": "Yang", "salary": 42000 } ]|] `shouldRespondWith` [json| [ { "first_name": "Frances M.", "last_name": "Roe", "salary": "$30,000.00", "company": "One-Up Realty", "occupation": "Author" }, { "first_name": "Peter S.", "last_name": "Yang", "salary": "$42,000.00", "company": null, "occupation": null } ]|] { matchStatus = 201 , matchHeaders = ["Preference-Applied" <:> "resolution=merge-duplicates", matchContentTypeJson] } it "succeeds when the payload has no elements" $ request methodPost "/articles" [("Prefer", "return=representation"), ("Prefer", "resolution=merge-duplicates")] [json|[]|] `shouldRespondWith` [json|[]|] { matchStatus = 201 , matchHeaders = [matchContentTypeJson] } context "when Prefer: resolution=ignore-duplicates is specified" $ do it "INSERTs and ignores rows on pk conflict" $ request methodPost "/tiobe_pls" [("Prefer", "return=representation"), ("Prefer", "resolution=ignore-duplicates")] [json|[ { "name": "PHP", "rank": 9 }, { "name": "Python", "rank": 10 } ]|] `shouldRespondWith` [json|[ { "name": "PHP", "rank": 9 } ]|] { matchStatus = 201 , matchHeaders = ["Preference-Applied" <:> "resolution=ignore-duplicates", matchContentTypeJson] } it "INSERTs and ignores rows on composite pk conflict" $ request methodPost "/employees" [("Prefer", "return=representation"), ("Prefer", "resolution=ignore-duplicates")] [json|[ { "first_name": "Daniel B.", "last_name": "Lyon", "salary": "72000", "company": null, "occupation": null }, { "first_name": "Sara M.", "last_name": "Torpey", "salary": 60000, "company": "Burstein-Applebee", "occupation": "Soil scientist" } ]|] `shouldRespondWith` [json|[ { "first_name": "Sara M.", "last_name": "Torpey", "salary": "$60,000.00", "company": "Burstein-Applebee", "occupation": "Soil scientist" } ]|] { matchStatus = 201 , matchHeaders = ["Preference-Applied" <:> "resolution=ignore-duplicates", matchContentTypeJson] } it "succeeds if the table has only PK cols and no other cols" $ do request methodPost "/only_pk" [("Prefer", "return=representation"), ("Prefer", "resolution=ignore-duplicates")] [json|[ { "id": 1 }, { "id": 2 }, { "id": 3} ]|] `shouldRespondWith` [json|[ { "id": 3} ]|] { matchStatus = 201 , matchHeaders = ["Preference-Applied" <:> "resolution=ignore-duplicates", matchContentTypeJson] } request methodPost "/only_pk" [("Prefer", "return=representation"), ("Prefer", "resolution=merge-duplicates")] [json|[ { "id": 1 }, { "id": 2 }, { "id": 4} ]|] `shouldRespondWith` [json|[ { "id": 1 }, { "id": 2 }, { "id": 4} ]|] { matchStatus = 201 , matchHeaders = ["Preference-Applied" <:> "resolution=merge-duplicates", matchContentTypeJson] } it "succeeds and ignores the Prefer: resolution header(no Preference-Applied present) if the table has no PK" $ request methodPost "/no_pk" [("Prefer", "return=representation"), ("Prefer", "resolution=merge-duplicates")] [json|[ { "a": "1", "b": "0" } ]|] `shouldRespondWith` [json|[ { "a": "1", "b": "0" } ]|] { matchStatus = 201 , matchHeaders = [matchContentTypeJson] } it "succeeds if not a single resource is created" $ do request methodPost "/tiobe_pls" [("Prefer", "return=representation"), ("Prefer", "resolution=ignore-duplicates")] [json|[ { "name": "Java", "rank": 1 } ]|] `shouldRespondWith` [json|[]|] { matchStatus = 201 , matchHeaders = [matchContentTypeJson] } request methodPost "/tiobe_pls" [("Prefer", "return=representation"), ("Prefer", "resolution=ignore-duplicates")] [json|[ { "name": "Java", "rank": 1 }, { "name": "C", "rank": 2 } ]|] `shouldRespondWith` [json|[]|] { matchStatus = 201 , matchHeaders = [matchContentTypeJson] } context "with PUT" $ do context "Restrictions" $ do it "fails if Range is specified" $ request methodPut "/tiobe_pls?name=eq.Javascript" [("Range", "0-5")] [str| [ { "name": "Javascript", "rank": 1 } ]|] `shouldRespondWith` [json|{"message":"Range header and limit/offset querystring parameters are not allowed for PUT"}|] { matchStatus = 400 , matchHeaders = [matchContentTypeJson] } it "fails if limit is specified" $ put "/tiobe_pls?name=eq.Javascript&limit=1" [str| [ { "name": "Javascript", "rank": 1 } ]|] `shouldRespondWith` [json|{"message":"Range header and limit/offset querystring parameters are not allowed for PUT"}|] { matchStatus = 400 , matchHeaders = [matchContentTypeJson] } it "fails if offset is specified" $ put "/tiobe_pls?name=eq.Javascript&offset=1" [str| [ { "name": "Javascript", "rank": 1 } ]|] `shouldRespondWith` [json|{"message":"Range header and limit/offset querystring parameters are not allowed for PUT"}|] { matchStatus = 400 , matchHeaders = [matchContentTypeJson] } it "fails if the payload has more than one row" $ put "/tiobe_pls?name=eq.Go" [str| [ { "name": "Go", "rank": 19 }, { "name": "Swift", "rank": 12 } ]|] `shouldRespondWith` [json|{"message":"PUT payload must contain a single row"}|] { matchStatus = 400 , matchHeaders = [matchContentTypeJson] } it "fails if not all columns are specified" $ do put "/tiobe_pls?name=eq.Go" [str| [ { "name": "Go" } ]|] `shouldRespondWith` [json|{"message":"You must specify all columns in the payload when using PUT"}|] { matchStatus = 400 , matchHeaders = [matchContentTypeJson] } put "/employees?first_name=eq.Susan&last_name=eq.Heidt" [str| [ { "first_name": "Susan", "last_name": "Heidt", "salary": "48000" } ]|] `shouldRespondWith` [json|{"message":"You must specify all columns in the payload when using PUT"}|] { matchStatus = 400 , matchHeaders = [matchContentTypeJson] } it "rejects every other filter than pk cols eq's" $ do put "/tiobe_pls?rank=eq.19" [str| [ { "name": "Go", "rank": 19 } ]|] `shouldRespondWith` [json|{"message":"Filters must include all and only primary key columns with 'eq' operators"}|] { matchStatus = 405 , matchHeaders = [matchContentTypeJson] } put "/tiobe_pls?id=not.eq.Java" [str| [ { "name": "Go", "rank": 19 } ]|] `shouldRespondWith` [json|{"message":"Filters must include all and only primary key columns with 'eq' operators"}|] { matchStatus = 405 , matchHeaders = [matchContentTypeJson] } put "/tiobe_pls?id=in.(Go)" [str| [ { "name": "Go", "rank": 19 } ]|] `shouldRespondWith` [json|{"message":"Filters must include all and only primary key columns with 'eq' operators"}|] { matchStatus = 405 , matchHeaders = [matchContentTypeJson] } put "/tiobe_pls?and=(id.eq.Go)" [str| [ { "name": "Go", "rank": 19 } ]|] `shouldRespondWith` [json|{"message":"Filters must include all and only primary key columns with 'eq' operators"}|] { matchStatus = 405 , matchHeaders = [matchContentTypeJson] } it "fails if not all composite key cols are specified as eq filters" $ do put "/employees?first_name=eq.Susan" [str| [ { "first_name": "Susan", "last_name": "Heidt", "salary": "48000", "company": "GEX", "occupation": "Railroad engineer" } ]|] `shouldRespondWith` [json|{"message":"Filters must include all and only primary key columns with 'eq' operators"}|] { matchStatus = 405 , matchHeaders = [matchContentTypeJson] } put "/employees?last_name=eq.Heidt" [str| [ { "first_name": "Susan", "last_name": "Heidt", "salary": "48000", "company": "GEX", "occupation": "Railroad engineer" } ]|] `shouldRespondWith` [json|{"message":"Filters must include all and only primary key columns with 'eq' operators"}|] { matchStatus = 405 , matchHeaders = [matchContentTypeJson] } it "fails if the uri primary key doesn't match the payload primary key" $ do put "/tiobe_pls?name=eq.MATLAB" [str| [ { "name": "Perl", "rank": 17 } ]|] `shouldRespondWith` [json|{"message":"Payload values do not match URL in primary key column(s)"}|] { matchStatus = 400 , matchHeaders = [matchContentTypeJson] } put "/employees?first_name=eq.Wendy&last_name=eq.Anderson" [str| [ { "first_name": "Susan", "last_name": "Heidt", "salary": "48000", "company": "GEX", "occupation": "Railroad engineer" } ]|] `shouldRespondWith` [json|{"message":"Payload values do not match URL in primary key column(s)"}|] { matchStatus = 400 , matchHeaders = [matchContentTypeJson] } it "fails if the table has no PK" $ put "/no_pk?a=eq.one&b=eq.two" [str| [ { "a": "one", "b": "two" } ]|] `shouldRespondWith` [json|{"message":"Filters must include all and only primary key columns with 'eq' operators"}|] { matchStatus = 405 , matchHeaders = [matchContentTypeJson] } context "Inserting row" $ do it "succeeds on table with single pk col" $ do get "/tiobe_pls?name=eq.Go" `shouldRespondWith` "[]" put "/tiobe_pls?name=eq.Go" [str| [ { "name": "Go", "rank": 19 } ]|] `shouldRespondWith` 204 get "/tiobe_pls?name=eq.Go" `shouldRespondWith` [json| [ { "name": "Go", "rank": 19 } ]|] { matchHeaders = [matchContentTypeJson] } it "succeeds on table with composite pk" $ do get "/employees?first_name=eq.Susan&last_name=eq.Heidt" `shouldRespondWith` "[]" put "/employees?first_name=eq.Susan&last_name=eq.Heidt" [str| [ { "first_name": "Susan", "last_name": "Heidt", "salary": "48000", "company": "GEX", "occupation": "Railroad engineer" } ]|] `shouldRespondWith` 204 get "/employees?first_name=eq.Susan&last_name=eq.Heidt" `shouldRespondWith` [json| [ { "first_name": "Susan", "last_name": "Heidt", "salary": "$48,000.00", "company": "GEX", "occupation": "Railroad engineer" } ]|] { matchHeaders = [matchContentTypeJson] } it "succeeds if the table has only PK cols and no other cols" $ do get "/only_pk?id=eq.10" `shouldRespondWith` "[]" put "/only_pk?id=eq.10" [str|[ { "id": 10 } ]|] `shouldRespondWith` 204 get "/only_pk?id=eq.10" `shouldRespondWith` [json|[ { "id": 10 } ]|] { matchHeaders = [matchContentTypeJson] } context "Updating row" $ do it "succeeds on table with single pk col" $ do get "/tiobe_pls?name=eq.Go" `shouldRespondWith` [json|[ { "name": "Go", "rank": 19 } ]|] { matchHeaders = [matchContentTypeJson] } put "/tiobe_pls?name=eq.Go" [str| [ { "name": "Go", "rank": 13 } ]|] `shouldRespondWith` 204 get "/tiobe_pls?name=eq.Go" `shouldRespondWith` [json| [ { "name": "Go", "rank": 13 } ]|] { matchHeaders = [matchContentTypeJson] } it "succeeds on table with composite pk" $ do get "/employees?first_name=eq.Susan&last_name=eq.Heidt" `shouldRespondWith` [json| [ { "first_name": "Susan", "last_name": "Heidt", "salary": "$48,000.00", "company": "GEX", "occupation": "Railroad engineer" } ]|] { matchHeaders = [matchContentTypeJson] } put "/employees?first_name=eq.Susan&last_name=eq.Heidt" [str| [ { "first_name": "Susan", "last_name": "Heidt", "salary": "60000", "company": "Gamma Gas", "occupation": "Railroad engineer" } ]|] `shouldRespondWith` 204 get "/employees?first_name=eq.Susan&last_name=eq.Heidt" `shouldRespondWith` [json| [ { "first_name": "Susan", "last_name": "Heidt", "salary": "$60,000.00", "company": "Gamma Gas", "occupation": "Railroad engineer" } ]|] { matchHeaders = [matchContentTypeJson] } it "succeeds if the table has only PK cols and no other cols" $ do get "/only_pk?id=eq.10" `shouldRespondWith` [json|[ { "id": 10 } ]|] { matchHeaders = [matchContentTypeJson] } put "/only_pk?id=eq.10" [str|[ { "id": 10 } ]|] `shouldRespondWith` 204 get "/only_pk?id=eq.10" `shouldRespondWith` [json|[ { "id": 10 } ]|] { matchHeaders = [matchContentTypeJson] } it "works with return=representation and vnd.pgrst.object+json" $ request methodPut "/tiobe_pls?name=eq.Ruby" [("Prefer", "return=representation"), ("Accept", "application/vnd.pgrst.object+json")] [str| [ { "name": "Ruby", "rank": 11 } ]|] `shouldRespondWith` [json|{ "name": "Ruby", "rank": 11 }|] { matchHeaders = [matchContentTypeSingular] } context "with a camel case pk column" $ do it "works with POST and merge-duplicates/ignore-duplicates headers" $ do request methodPost "/UnitTest" [("Prefer", "return=representation"), ("Prefer", "resolution=merge-duplicates")] [json| [ { "idUnitTest": 1, "nameUnitTest": "name of unittest 1" }, { "idUnitTest": 2, "nameUnitTest": "name of unittest 2" } ]|] `shouldRespondWith` [json|[ { "idUnitTest": 1, "nameUnitTest": "name of unittest 1" }, { "idUnitTest": 2, "nameUnitTest": "name of unittest 2" } ]|] { matchStatus = 201 , matchHeaders = ["Preference-Applied" <:> "resolution=merge-duplicates", matchContentTypeJson] } request methodPost "/UnitTest" [("Prefer", "return=representation"), ("Prefer", "resolution=ignore-duplicates")] [json| [ { "idUnitTest": 1, "nameUnitTest": "name of unittest 1" }, { "idUnitTest": 2, "nameUnitTest": "name of unittest 2" } ]|] `shouldRespondWith` [json|[]|] { matchStatus = 201 , matchHeaders = ["Preference-Applied" <:> "resolution=ignore-duplicates", matchContentTypeJson] } it "works with PUT" $ do put "/UnitTest?idUnitTest=eq.1" [str| [ { "idUnitTest": 1, "nameUnitTest": "unit test 1" } ]|] `shouldRespondWith` 204 get "/UnitTest?idUnitTest=eq.1" `shouldRespondWith` [json| [ { "idUnitTest": 1, "nameUnitTest": "unit test 1" } ]|] { matchHeaders = [matchContentTypeJson] }
16,168
spec = describe "UPSERT" $ do context "with POST" $ do context "when Prefer: resolution=merge-duplicates is specified" $ do it "INSERTs and UPDATEs rows on pk conflict" $ request methodPost "/tiobe_pls" [("Prefer", "return=representation"), ("Prefer", "resolution=merge-duplicates")] [json| [ { "name": "Javascript", "rank": 6 }, { "name": "Java", "rank": 2 }, { "name": "C", "rank": 1 } ]|] `shouldRespondWith` [json| [ { "name": "Javascript", "rank": 6 }, { "name": "Java", "rank": 2 }, { "name": "C", "rank": 1 } ]|] { matchStatus = 201 , matchHeaders = ["Preference-Applied" <:> "resolution=merge-duplicates", matchContentTypeJson] } it "INSERTs and UPDATEs row on composite pk conflict" $ request methodPost "/employees" [("Prefer", "return=representation"), ("Prefer", "resolution=merge-duplicates")] [json| [ { "first_name": "Frances M.", "last_name": "Roe", "salary": "30000" }, { "first_name": "Peter S.", "last_name": "Yang", "salary": 42000 } ]|] `shouldRespondWith` [json| [ { "first_name": "Frances M.", "last_name": "Roe", "salary": "$30,000.00", "company": "One-Up Realty", "occupation": "Author" }, { "first_name": "Peter S.", "last_name": "Yang", "salary": "$42,000.00", "company": null, "occupation": null } ]|] { matchStatus = 201 , matchHeaders = ["Preference-Applied" <:> "resolution=merge-duplicates", matchContentTypeJson] } it "succeeds when the payload has no elements" $ request methodPost "/articles" [("Prefer", "return=representation"), ("Prefer", "resolution=merge-duplicates")] [json|[]|] `shouldRespondWith` [json|[]|] { matchStatus = 201 , matchHeaders = [matchContentTypeJson] } context "when Prefer: resolution=ignore-duplicates is specified" $ do it "INSERTs and ignores rows on pk conflict" $ request methodPost "/tiobe_pls" [("Prefer", "return=representation"), ("Prefer", "resolution=ignore-duplicates")] [json|[ { "name": "PHP", "rank": 9 }, { "name": "Python", "rank": 10 } ]|] `shouldRespondWith` [json|[ { "name": "PHP", "rank": 9 } ]|] { matchStatus = 201 , matchHeaders = ["Preference-Applied" <:> "resolution=ignore-duplicates", matchContentTypeJson] } it "INSERTs and ignores rows on composite pk conflict" $ request methodPost "/employees" [("Prefer", "return=representation"), ("Prefer", "resolution=ignore-duplicates")] [json|[ { "first_name": "Daniel B.", "last_name": "Lyon", "salary": "72000", "company": null, "occupation": null }, { "first_name": "Sara M.", "last_name": "Torpey", "salary": 60000, "company": "Burstein-Applebee", "occupation": "Soil scientist" } ]|] `shouldRespondWith` [json|[ { "first_name": "Sara M.", "last_name": "Torpey", "salary": "$60,000.00", "company": "Burstein-Applebee", "occupation": "Soil scientist" } ]|] { matchStatus = 201 , matchHeaders = ["Preference-Applied" <:> "resolution=ignore-duplicates", matchContentTypeJson] } it "succeeds if the table has only PK cols and no other cols" $ do request methodPost "/only_pk" [("Prefer", "return=representation"), ("Prefer", "resolution=ignore-duplicates")] [json|[ { "id": 1 }, { "id": 2 }, { "id": 3} ]|] `shouldRespondWith` [json|[ { "id": 3} ]|] { matchStatus = 201 , matchHeaders = ["Preference-Applied" <:> "resolution=ignore-duplicates", matchContentTypeJson] } request methodPost "/only_pk" [("Prefer", "return=representation"), ("Prefer", "resolution=merge-duplicates")] [json|[ { "id": 1 }, { "id": 2 }, { "id": 4} ]|] `shouldRespondWith` [json|[ { "id": 1 }, { "id": 2 }, { "id": 4} ]|] { matchStatus = 201 , matchHeaders = ["Preference-Applied" <:> "resolution=merge-duplicates", matchContentTypeJson] } it "succeeds and ignores the Prefer: resolution header(no Preference-Applied present) if the table has no PK" $ request methodPost "/no_pk" [("Prefer", "return=representation"), ("Prefer", "resolution=merge-duplicates")] [json|[ { "a": "1", "b": "0" } ]|] `shouldRespondWith` [json|[ { "a": "1", "b": "0" } ]|] { matchStatus = 201 , matchHeaders = [matchContentTypeJson] } it "succeeds if not a single resource is created" $ do request methodPost "/tiobe_pls" [("Prefer", "return=representation"), ("Prefer", "resolution=ignore-duplicates")] [json|[ { "name": "Java", "rank": 1 } ]|] `shouldRespondWith` [json|[]|] { matchStatus = 201 , matchHeaders = [matchContentTypeJson] } request methodPost "/tiobe_pls" [("Prefer", "return=representation"), ("Prefer", "resolution=ignore-duplicates")] [json|[ { "name": "Java", "rank": 1 }, { "name": "C", "rank": 2 } ]|] `shouldRespondWith` [json|[]|] { matchStatus = 201 , matchHeaders = [matchContentTypeJson] } context "with PUT" $ do context "Restrictions" $ do it "fails if Range is specified" $ request methodPut "/tiobe_pls?name=eq.Javascript" [("Range", "0-5")] [str| [ { "name": "Javascript", "rank": 1 } ]|] `shouldRespondWith` [json|{"message":"Range header and limit/offset querystring parameters are not allowed for PUT"}|] { matchStatus = 400 , matchHeaders = [matchContentTypeJson] } it "fails if limit is specified" $ put "/tiobe_pls?name=eq.Javascript&limit=1" [str| [ { "name": "Javascript", "rank": 1 } ]|] `shouldRespondWith` [json|{"message":"Range header and limit/offset querystring parameters are not allowed for PUT"}|] { matchStatus = 400 , matchHeaders = [matchContentTypeJson] } it "fails if offset is specified" $ put "/tiobe_pls?name=eq.Javascript&offset=1" [str| [ { "name": "Javascript", "rank": 1 } ]|] `shouldRespondWith` [json|{"message":"Range header and limit/offset querystring parameters are not allowed for PUT"}|] { matchStatus = 400 , matchHeaders = [matchContentTypeJson] } it "fails if the payload has more than one row" $ put "/tiobe_pls?name=eq.Go" [str| [ { "name": "Go", "rank": 19 }, { "name": "Swift", "rank": 12 } ]|] `shouldRespondWith` [json|{"message":"PUT payload must contain a single row"}|] { matchStatus = 400 , matchHeaders = [matchContentTypeJson] } it "fails if not all columns are specified" $ do put "/tiobe_pls?name=eq.Go" [str| [ { "name": "Go" } ]|] `shouldRespondWith` [json|{"message":"You must specify all columns in the payload when using PUT"}|] { matchStatus = 400 , matchHeaders = [matchContentTypeJson] } put "/employees?first_name=eq.Susan&last_name=eq.Heidt" [str| [ { "first_name": "Susan", "last_name": "Heidt", "salary": "48000" } ]|] `shouldRespondWith` [json|{"message":"You must specify all columns in the payload when using PUT"}|] { matchStatus = 400 , matchHeaders = [matchContentTypeJson] } it "rejects every other filter than pk cols eq's" $ do put "/tiobe_pls?rank=eq.19" [str| [ { "name": "Go", "rank": 19 } ]|] `shouldRespondWith` [json|{"message":"Filters must include all and only primary key columns with 'eq' operators"}|] { matchStatus = 405 , matchHeaders = [matchContentTypeJson] } put "/tiobe_pls?id=not.eq.Java" [str| [ { "name": "Go", "rank": 19 } ]|] `shouldRespondWith` [json|{"message":"Filters must include all and only primary key columns with 'eq' operators"}|] { matchStatus = 405 , matchHeaders = [matchContentTypeJson] } put "/tiobe_pls?id=in.(Go)" [str| [ { "name": "Go", "rank": 19 } ]|] `shouldRespondWith` [json|{"message":"Filters must include all and only primary key columns with 'eq' operators"}|] { matchStatus = 405 , matchHeaders = [matchContentTypeJson] } put "/tiobe_pls?and=(id.eq.Go)" [str| [ { "name": "Go", "rank": 19 } ]|] `shouldRespondWith` [json|{"message":"Filters must include all and only primary key columns with 'eq' operators"}|] { matchStatus = 405 , matchHeaders = [matchContentTypeJson] } it "fails if not all composite key cols are specified as eq filters" $ do put "/employees?first_name=eq.Susan" [str| [ { "first_name": "Susan", "last_name": "Heidt", "salary": "48000", "company": "GEX", "occupation": "Railroad engineer" } ]|] `shouldRespondWith` [json|{"message":"Filters must include all and only primary key columns with 'eq' operators"}|] { matchStatus = 405 , matchHeaders = [matchContentTypeJson] } put "/employees?last_name=eq.Heidt" [str| [ { "first_name": "Susan", "last_name": "Heidt", "salary": "48000", "company": "GEX", "occupation": "Railroad engineer" } ]|] `shouldRespondWith` [json|{"message":"Filters must include all and only primary key columns with 'eq' operators"}|] { matchStatus = 405 , matchHeaders = [matchContentTypeJson] } it "fails if the uri primary key doesn't match the payload primary key" $ do put "/tiobe_pls?name=eq.MATLAB" [str| [ { "name": "Perl", "rank": 17 } ]|] `shouldRespondWith` [json|{"message":"Payload values do not match URL in primary key column(s)"}|] { matchStatus = 400 , matchHeaders = [matchContentTypeJson] } put "/employees?first_name=eq.Wendy&last_name=eq.Anderson" [str| [ { "first_name": "Susan", "last_name": "Heidt", "salary": "48000", "company": "GEX", "occupation": "Railroad engineer" } ]|] `shouldRespondWith` [json|{"message":"Payload values do not match URL in primary key column(s)"}|] { matchStatus = 400 , matchHeaders = [matchContentTypeJson] } it "fails if the table has no PK" $ put "/no_pk?a=eq.one&b=eq.two" [str| [ { "a": "one", "b": "two" } ]|] `shouldRespondWith` [json|{"message":"Filters must include all and only primary key columns with 'eq' operators"}|] { matchStatus = 405 , matchHeaders = [matchContentTypeJson] } context "Inserting row" $ do it "succeeds on table with single pk col" $ do get "/tiobe_pls?name=eq.Go" `shouldRespondWith` "[]" put "/tiobe_pls?name=eq.Go" [str| [ { "name": "Go", "rank": 19 } ]|] `shouldRespondWith` 204 get "/tiobe_pls?name=eq.Go" `shouldRespondWith` [json| [ { "name": "Go", "rank": 19 } ]|] { matchHeaders = [matchContentTypeJson] } it "succeeds on table with composite pk" $ do get "/employees?first_name=eq.Susan&last_name=eq.Heidt" `shouldRespondWith` "[]" put "/employees?first_name=eq.Susan&last_name=eq.Heidt" [str| [ { "first_name": "Susan", "last_name": "Heidt", "salary": "48000", "company": "GEX", "occupation": "Railroad engineer" } ]|] `shouldRespondWith` 204 get "/employees?first_name=eq.Susan&last_name=eq.Heidt" `shouldRespondWith` [json| [ { "first_name": "Susan", "last_name": "Heidt", "salary": "$48,000.00", "company": "GEX", "occupation": "Railroad engineer" } ]|] { matchHeaders = [matchContentTypeJson] } it "succeeds if the table has only PK cols and no other cols" $ do get "/only_pk?id=eq.10" `shouldRespondWith` "[]" put "/only_pk?id=eq.10" [str|[ { "id": 10 } ]|] `shouldRespondWith` 204 get "/only_pk?id=eq.10" `shouldRespondWith` [json|[ { "id": 10 } ]|] { matchHeaders = [matchContentTypeJson] } context "Updating row" $ do it "succeeds on table with single pk col" $ do get "/tiobe_pls?name=eq.Go" `shouldRespondWith` [json|[ { "name": "Go", "rank": 19 } ]|] { matchHeaders = [matchContentTypeJson] } put "/tiobe_pls?name=eq.Go" [str| [ { "name": "Go", "rank": 13 } ]|] `shouldRespondWith` 204 get "/tiobe_pls?name=eq.Go" `shouldRespondWith` [json| [ { "name": "Go", "rank": 13 } ]|] { matchHeaders = [matchContentTypeJson] } it "succeeds on table with composite pk" $ do get "/employees?first_name=eq.Susan&last_name=eq.Heidt" `shouldRespondWith` [json| [ { "first_name": "Susan", "last_name": "Heidt", "salary": "$48,000.00", "company": "GEX", "occupation": "Railroad engineer" } ]|] { matchHeaders = [matchContentTypeJson] } put "/employees?first_name=eq.Susan&last_name=eq.Heidt" [str| [ { "first_name": "Susan", "last_name": "Heidt", "salary": "60000", "company": "Gamma Gas", "occupation": "Railroad engineer" } ]|] `shouldRespondWith` 204 get "/employees?first_name=eq.Susan&last_name=eq.Heidt" `shouldRespondWith` [json| [ { "first_name": "Susan", "last_name": "Heidt", "salary": "$60,000.00", "company": "Gamma Gas", "occupation": "Railroad engineer" } ]|] { matchHeaders = [matchContentTypeJson] } it "succeeds if the table has only PK cols and no other cols" $ do get "/only_pk?id=eq.10" `shouldRespondWith` [json|[ { "id": 10 } ]|] { matchHeaders = [matchContentTypeJson] } put "/only_pk?id=eq.10" [str|[ { "id": 10 } ]|] `shouldRespondWith` 204 get "/only_pk?id=eq.10" `shouldRespondWith` [json|[ { "id": 10 } ]|] { matchHeaders = [matchContentTypeJson] } it "works with return=representation and vnd.pgrst.object+json" $ request methodPut "/tiobe_pls?name=eq.Ruby" [("Prefer", "return=representation"), ("Accept", "application/vnd.pgrst.object+json")] [str| [ { "name": "Ruby", "rank": 11 } ]|] `shouldRespondWith` [json|{ "name": "Ruby", "rank": 11 }|] { matchHeaders = [matchContentTypeSingular] } context "with a camel case pk column" $ do it "works with POST and merge-duplicates/ignore-duplicates headers" $ do request methodPost "/UnitTest" [("Prefer", "return=representation"), ("Prefer", "resolution=merge-duplicates")] [json| [ { "idUnitTest": 1, "nameUnitTest": "name of unittest 1" }, { "idUnitTest": 2, "nameUnitTest": "name of unittest 2" } ]|] `shouldRespondWith` [json|[ { "idUnitTest": 1, "nameUnitTest": "name of unittest 1" }, { "idUnitTest": 2, "nameUnitTest": "name of unittest 2" } ]|] { matchStatus = 201 , matchHeaders = ["Preference-Applied" <:> "resolution=merge-duplicates", matchContentTypeJson] } request methodPost "/UnitTest" [("Prefer", "return=representation"), ("Prefer", "resolution=ignore-duplicates")] [json| [ { "idUnitTest": 1, "nameUnitTest": "name of unittest 1" }, { "idUnitTest": 2, "nameUnitTest": "name of unittest 2" } ]|] `shouldRespondWith` [json|[]|] { matchStatus = 201 , matchHeaders = ["Preference-Applied" <:> "resolution=ignore-duplicates", matchContentTypeJson] } it "works with PUT" $ do put "/UnitTest?idUnitTest=eq.1" [str| [ { "idUnitTest": 1, "nameUnitTest": "unit test 1" } ]|] `shouldRespondWith` 204 get "/UnitTest?idUnitTest=eq.1" `shouldRespondWith` [json| [ { "idUnitTest": 1, "nameUnitTest": "unit test 1" } ]|] { matchHeaders = [matchContentTypeJson] }
16,139
false
true
0
21
4,223
2,199
1,345
854
null
null
pparkkin/eta
compiler/ETA/Prelude/PrelNames.hs
bsd-3-clause
coercibleDataConKey = mkPreludeDataConUnique 32
67
coercibleDataConKey = mkPreludeDataConUnique 32
67
coercibleDataConKey = mkPreludeDataConUnique 32
67
false
false
0
5
23
9
4
5
null
null
thalerjonathan/phd
coding/prototyping/haskell/declarativeABM/haskell/SpatialGameSimple/src/SGModel.hs
gpl-3.0
payoffWith :: SGCell -> SGCell -> Double payoffWith cRef cOther = payoff (sgCurrState cRef) (sgCurrState cOther)
112
payoffWith :: SGCell -> SGCell -> Double payoffWith cRef cOther = payoff (sgCurrState cRef) (sgCurrState cOther)
112
payoffWith cRef cOther = payoff (sgCurrState cRef) (sgCurrState cOther)
71
false
true
0
8
15
46
21
25
null
null
josefs/autosar
oldARSim/AR.hs
bsd-3-clause
hasType (TStruct ts) (VStruct fs) = unique (dom fs) && dom ts `equal` dom fs && and [ hasTypeIn ts f v | (f,v) <- Map.assocs fs ]
176
hasType (TStruct ts) (VStruct fs) = unique (dom fs) && dom ts `equal` dom fs && and [ hasTypeIn ts f v | (f,v) <- Map.assocs fs ]
176
hasType (TStruct ts) (VStruct fs) = unique (dom fs) && dom ts `equal` dom fs && and [ hasTypeIn ts f v | (f,v) <- Map.assocs fs ]
176
false
false
0
11
74
84
41
43
null
null
rueshyna/gogol
gogol-dataproc/gen/Network/Google/Dataproc/Types/Product.hs
mpl-2.0
-- | [Required] The zone where the Google Compute Engine cluster will be -- located. Example: -- \`https:\/\/www.googleapis.com\/compute\/v1\/projects\/[project_id]\/zones\/[zone]\`. gccZoneURI :: Lens' GceClusterConfig (Maybe Text) gccZoneURI = lens _gccZoneURI (\ s a -> s{_gccZoneURI = a})
294
gccZoneURI :: Lens' GceClusterConfig (Maybe Text) gccZoneURI = lens _gccZoneURI (\ s a -> s{_gccZoneURI = a})
111
gccZoneURI = lens _gccZoneURI (\ s a -> s{_gccZoneURI = a})
61
true
true
0
9
36
50
27
23
null
null
nurpax/aeson
Data/Aeson/Types/Instances.hs
bsd-3-clause
tuple :: B.Builder -> Encoding tuple b = Encoding (B.char7 '[' <> b <> B.char7 ']')
83
tuple :: B.Builder -> Encoding tuple b = Encoding (B.char7 '[' <> b <> B.char7 ']')
83
tuple b = Encoding (B.char7 '[' <> b <> B.char7 ']')
52
false
true
0
10
15
44
21
23
null
null
CarstenKoenig/AdventOfCode2016
Day20/Main.hs
mit
interParser :: Parser Interval interParser = do l <- parseNumber parseChar (== '-') u <- parseNumber return $ Inter l u
127
interParser :: Parser Interval interParser = do l <- parseNumber parseChar (== '-') u <- parseNumber return $ Inter l u
127
interParser = do l <- parseNumber parseChar (== '-') u <- parseNumber return $ Inter l u
96
false
true
0
8
28
50
23
27
null
null
PackAssembler/PaCLI
PaCLI/Current.hs
gpl-3.0
findNewOrUpdated :: [Version] -> [BP.Mod] -> [UpdateAction] findNewOrUpdated vs = catMaybes . foldl (\acc x -> newOrUpdatedEntry vs x : acc) []
143
findNewOrUpdated :: [Version] -> [BP.Mod] -> [UpdateAction] findNewOrUpdated vs = catMaybes . foldl (\acc x -> newOrUpdatedEntry vs x : acc) []
143
findNewOrUpdated vs = catMaybes . foldl (\acc x -> newOrUpdatedEntry vs x : acc) []
83
false
true
0
10
21
63
33
30
null
null
paf31/Adventure
src/Game/Adventure/State.hs
bsd-3-clause
setLocation :: Location -> GameState item -> GameState item setLocation loc st = st { location = loc }
102
setLocation :: Location -> GameState item -> GameState item setLocation loc st = st { location = loc }
102
setLocation loc st = st { location = loc }
42
false
true
0
7
18
38
19
19
null
null
pauloborba/plc
src/DetalhesSobreListasERecursao.hs
cc0-1.0
creditar n v [] = []
20
creditar n v [] = []
20
creditar n v [] = []
20
false
false
0
6
5
17
8
9
null
null
cdxr/terminal
System/Console/Terminal/Term.hs
bsd-3-clause
runHaskeline :: (MonadIO m, MonadCatch m) => H.Settings IO -> (H.InputState -> m a) -> m a runHaskeline hs k = bracketOnError mkInputState cancelInputState $ \i -> do a <- k i liftIO $ H.closeInput i return a where mkInputState = liftIO $ H.initializeInput hs cancelInputState = liftIO . H.cancelInput
362
runHaskeline :: (MonadIO m, MonadCatch m) => H.Settings IO -> (H.InputState -> m a) -> m a runHaskeline hs k = bracketOnError mkInputState cancelInputState $ \i -> do a <- k i liftIO $ H.closeInput i return a where mkInputState = liftIO $ H.initializeInput hs cancelInputState = liftIO . H.cancelInput
362
runHaskeline hs k = bracketOnError mkInputState cancelInputState $ \i -> do a <- k i liftIO $ H.closeInput i return a where mkInputState = liftIO $ H.initializeInput hs cancelInputState = liftIO . H.cancelInput
232
false
true
2
11
110
140
62
78
null
null
alphalambda/k12math
contrib/MHills/GeometryLessons/code/student/lesson3e.hs
mit
myPicture points = drawPoints [a,b,c] & drawLabels [a,b,c] ["A","B","C"] & drawPoint a' & drawLabel a' "A'" & drawSegment (a,b) & drawSegment (b,c) & drawSegment (c,a) & messages [ "length BC = "++ show (dist b c) ,"length BA' = "++ show (dist b a') ,"length CA' = "++ show (dist c a') ] where [a,b,c] = take 3 points a' = midpoint b c
424
myPicture points = drawPoints [a,b,c] & drawLabels [a,b,c] ["A","B","C"] & drawPoint a' & drawLabel a' "A'" & drawSegment (a,b) & drawSegment (b,c) & drawSegment (c,a) & messages [ "length BC = "++ show (dist b c) ,"length BA' = "++ show (dist b a') ,"length CA' = "++ show (dist c a') ] where [a,b,c] = take 3 points a' = midpoint b c
424
myPicture points = drawPoints [a,b,c] & drawLabels [a,b,c] ["A","B","C"] & drawPoint a' & drawLabel a' "A'" & drawSegment (a,b) & drawSegment (b,c) & drawSegment (c,a) & messages [ "length BC = "++ show (dist b c) ,"length BA' = "++ show (dist b a') ,"length CA' = "++ show (dist c a') ] where [a,b,c] = take 3 points a' = midpoint b c
424
false
false
0
13
152
196
102
94
null
null
YelaSeamless/cgrep
src/CGrep/Semantic/Cpp/Token.hs
gpl-2.0
tokenFilter filt (TokenHeaderName{}) = filtHeader filt
59
tokenFilter filt (TokenHeaderName{}) = filtHeader filt
59
tokenFilter filt (TokenHeaderName{}) = filtHeader filt
59
false
false
0
7
10
21
10
11
null
null
nomeata/ghc
compiler/main/GhcMake.hs
bsd-3-clause
upsweep :: GhcMonad m => HomePackageTable -- ^ HPT from last time round (pruned) -> ([ModuleName],[ModuleName]) -- ^ stable modules (see checkStability) -> (HscEnv -> IO ()) -- ^ How to clean up unwanted tmp files -> [SCC ModSummary] -- ^ Mods to do (the worklist) -> m (SuccessFlag, [ModSummary]) -- ^ Returns: -- -- 1. A flag whether the complete upsweep was successful. -- 2. The 'HscEnv' in the monad has an updated HPT -- 3. A list of modules which succeeded loading. upsweep old_hpt stable_mods cleanup sccs = do (res, done) <- upsweep' old_hpt [] sccs 1 (length sccs) return (res, reverse done) where upsweep' _old_hpt done [] _ _ = return (Succeeded, done) upsweep' _old_hpt done (CyclicSCC ms:_) _ _ = do dflags <- getSessionDynFlags liftIO $ fatalErrorMsg dflags (cyclicModuleErr ms) return (Failed, done) upsweep' old_hpt done (AcyclicSCC mod:mods) mod_index nmods = do -- putStrLn ("UPSWEEP_MOD: hpt = " ++ -- show (map (moduleUserString.moduleName.mi_module.hm_iface) -- (moduleEnvElts (hsc_HPT hsc_env))) let logger _mod = defaultWarnErrLogger hsc_env <- getSession -- Remove unwanted tmp files between compilations liftIO (cleanup hsc_env) mb_mod_info <- handleSourceError (\err -> do logger mod (Just err); return Nothing) $ do mod_info <- liftIO $ upsweep_mod hsc_env old_hpt stable_mods mod mod_index nmods logger mod Nothing -- log warnings return (Just mod_info) case mb_mod_info of Nothing -> return (Failed, done) Just mod_info -> do let this_mod = ms_mod_name mod -- Add new info to hsc_env hpt1 = addToUFM (hsc_HPT hsc_env) this_mod mod_info hsc_env1 = hsc_env { hsc_HPT = hpt1 } -- Space-saving: delete the old HPT entry -- for mod BUT if mod is a hs-boot -- node, don't delete it. For the -- interface, the HPT entry is probaby for the -- main Haskell source file. Deleting it -- would force the real module to be recompiled -- every time. old_hpt1 | isBootSummary mod = old_hpt | otherwise = delFromUFM old_hpt this_mod done' = mod:done -- fixup our HomePackageTable after we've finished compiling -- a mutually-recursive loop. See reTypecheckLoop, below. hsc_env2 <- liftIO $ reTypecheckLoop hsc_env1 mod done' setSession hsc_env2 upsweep' old_hpt1 done' mods (mod_index+1) nmods -- | Compile a single module. Always produce a Linkable for it if -- successful. If no compilation happened, return the old Linkable.
3,182
upsweep :: GhcMonad m => HomePackageTable -- ^ HPT from last time round (pruned) -> ([ModuleName],[ModuleName]) -- ^ stable modules (see checkStability) -> (HscEnv -> IO ()) -- ^ How to clean up unwanted tmp files -> [SCC ModSummary] -- ^ Mods to do (the worklist) -> m (SuccessFlag, [ModSummary]) upsweep old_hpt stable_mods cleanup sccs = do (res, done) <- upsweep' old_hpt [] sccs 1 (length sccs) return (res, reverse done) where upsweep' _old_hpt done [] _ _ = return (Succeeded, done) upsweep' _old_hpt done (CyclicSCC ms:_) _ _ = do dflags <- getSessionDynFlags liftIO $ fatalErrorMsg dflags (cyclicModuleErr ms) return (Failed, done) upsweep' old_hpt done (AcyclicSCC mod:mods) mod_index nmods = do -- putStrLn ("UPSWEEP_MOD: hpt = " ++ -- show (map (moduleUserString.moduleName.mi_module.hm_iface) -- (moduleEnvElts (hsc_HPT hsc_env))) let logger _mod = defaultWarnErrLogger hsc_env <- getSession -- Remove unwanted tmp files between compilations liftIO (cleanup hsc_env) mb_mod_info <- handleSourceError (\err -> do logger mod (Just err); return Nothing) $ do mod_info <- liftIO $ upsweep_mod hsc_env old_hpt stable_mods mod mod_index nmods logger mod Nothing -- log warnings return (Just mod_info) case mb_mod_info of Nothing -> return (Failed, done) Just mod_info -> do let this_mod = ms_mod_name mod -- Add new info to hsc_env hpt1 = addToUFM (hsc_HPT hsc_env) this_mod mod_info hsc_env1 = hsc_env { hsc_HPT = hpt1 } -- Space-saving: delete the old HPT entry -- for mod BUT if mod is a hs-boot -- node, don't delete it. For the -- interface, the HPT entry is probaby for the -- main Haskell source file. Deleting it -- would force the real module to be recompiled -- every time. old_hpt1 | isBootSummary mod = old_hpt | otherwise = delFromUFM old_hpt this_mod done' = mod:done -- fixup our HomePackageTable after we've finished compiling -- a mutually-recursive loop. See reTypecheckLoop, below. hsc_env2 <- liftIO $ reTypecheckLoop hsc_env1 mod done' setSession hsc_env2 upsweep' old_hpt1 done' mods (mod_index+1) nmods -- | Compile a single module. Always produce a Linkable for it if -- successful. If no compilation happened, return the old Linkable.
2,968
upsweep old_hpt stable_mods cleanup sccs = do (res, done) <- upsweep' old_hpt [] sccs 1 (length sccs) return (res, reverse done) where upsweep' _old_hpt done [] _ _ = return (Succeeded, done) upsweep' _old_hpt done (CyclicSCC ms:_) _ _ = do dflags <- getSessionDynFlags liftIO $ fatalErrorMsg dflags (cyclicModuleErr ms) return (Failed, done) upsweep' old_hpt done (AcyclicSCC mod:mods) mod_index nmods = do -- putStrLn ("UPSWEEP_MOD: hpt = " ++ -- show (map (moduleUserString.moduleName.mi_module.hm_iface) -- (moduleEnvElts (hsc_HPT hsc_env))) let logger _mod = defaultWarnErrLogger hsc_env <- getSession -- Remove unwanted tmp files between compilations liftIO (cleanup hsc_env) mb_mod_info <- handleSourceError (\err -> do logger mod (Just err); return Nothing) $ do mod_info <- liftIO $ upsweep_mod hsc_env old_hpt stable_mods mod mod_index nmods logger mod Nothing -- log warnings return (Just mod_info) case mb_mod_info of Nothing -> return (Failed, done) Just mod_info -> do let this_mod = ms_mod_name mod -- Add new info to hsc_env hpt1 = addToUFM (hsc_HPT hsc_env) this_mod mod_info hsc_env1 = hsc_env { hsc_HPT = hpt1 } -- Space-saving: delete the old HPT entry -- for mod BUT if mod is a hs-boot -- node, don't delete it. For the -- interface, the HPT entry is probaby for the -- main Haskell source file. Deleting it -- would force the real module to be recompiled -- every time. old_hpt1 | isBootSummary mod = old_hpt | otherwise = delFromUFM old_hpt this_mod done' = mod:done -- fixup our HomePackageTable after we've finished compiling -- a mutually-recursive loop. See reTypecheckLoop, below. hsc_env2 <- liftIO $ reTypecheckLoop hsc_env1 mod done' setSession hsc_env2 upsweep' old_hpt1 done' mods (mod_index+1) nmods -- | Compile a single module. Always produce a Linkable for it if -- successful. If no compilation happened, return the old Linkable.
2,604
true
true
0
20
1,211
557
279
278
null
null
ejconlon/notate
src/Notate/Interpret.hs
bsd-3-clause
say = putStrLn
14
say = putStrLn
14
say = putStrLn
14
false
false
1
5
2
10
3
7
null
null
unisonweb/platform
parser-typechecker/src/Unison/Runtime/Machine.hs
mit
cacheAdd :: [(Reference, SuperGroup Symbol)] -> CCache -> IO [Reference] cacheAdd l cc = do rtm <- readTVarIO (refTm cc) rty <- readTVarIO (refTy cc) let known = M.keysSet rtm <> S.fromList (fst <$> l) f b r | not b, S.notMember r known = (S.singleton r, mempty) | b, M.notMember r rty = (mempty, S.singleton r) | otherwise = (mempty, mempty) (missing, tys) = (foldMap.foldMap) (groupLinks f) l l' = filter (\(r,_) -> M.notMember r rtm) l if S.null missing then [] <$ cacheAdd0 tys l' cc else pure $ S.toList missing
580
cacheAdd :: [(Reference, SuperGroup Symbol)] -> CCache -> IO [Reference] cacheAdd l cc = do rtm <- readTVarIO (refTm cc) rty <- readTVarIO (refTy cc) let known = M.keysSet rtm <> S.fromList (fst <$> l) f b r | not b, S.notMember r known = (S.singleton r, mempty) | b, M.notMember r rty = (mempty, S.singleton r) | otherwise = (mempty, mempty) (missing, tys) = (foldMap.foldMap) (groupLinks f) l l' = filter (\(r,_) -> M.notMember r rtm) l if S.null missing then [] <$ cacheAdd0 tys l' cc else pure $ S.toList missing
580
cacheAdd l cc = do rtm <- readTVarIO (refTm cc) rty <- readTVarIO (refTy cc) let known = M.keysSet rtm <> S.fromList (fst <$> l) f b r | not b, S.notMember r known = (S.singleton r, mempty) | b, M.notMember r rty = (mempty, S.singleton r) | otherwise = (mempty, mempty) (missing, tys) = (foldMap.foldMap) (groupLinks f) l l' = filter (\(r,_) -> M.notMember r rtm) l if S.null missing then [] <$ cacheAdd0 tys l' cc else pure $ S.toList missing
501
false
true
4
15
156
298
142
156
null
null
xmonad/xmonad-contrib
XMonad/Util/PositionStore.hs
bsd-3-clause
posStoreInsert :: PositionStore -> Window -> Rectangle -> Rectangle -> PositionStore posStoreInsert (PS posStoreMap) w (Rectangle x y wh ht) (Rectangle srX srY srWh srHt) = let offsetX = x - srX offsetY = y - srY in PS $ M.insert w (PSRectangle (fromIntegral offsetX / fromIntegral srWh) (fromIntegral offsetY / fromIntegral srHt) (fromIntegral wh / fromIntegral srWh) (fromIntegral ht / fromIntegral srHt)) posStoreMap
576
posStoreInsert :: PositionStore -> Window -> Rectangle -> Rectangle -> PositionStore posStoreInsert (PS posStoreMap) w (Rectangle x y wh ht) (Rectangle srX srY srWh srHt) = let offsetX = x - srX offsetY = y - srY in PS $ M.insert w (PSRectangle (fromIntegral offsetX / fromIntegral srWh) (fromIntegral offsetY / fromIntegral srHt) (fromIntegral wh / fromIntegral srWh) (fromIntegral ht / fromIntegral srHt)) posStoreMap
576
posStoreInsert (PS posStoreMap) w (Rectangle x y wh ht) (Rectangle srX srY srWh srHt) = let offsetX = x - srX offsetY = y - srY in PS $ M.insert w (PSRectangle (fromIntegral offsetX / fromIntegral srWh) (fromIntegral offsetY / fromIntegral srHt) (fromIntegral wh / fromIntegral srWh) (fromIntegral ht / fromIntegral srHt)) posStoreMap
491
false
true
2
12
220
173
82
91
null
null
DavidAlphaFox/ghc
libraries/bytestring/Data/ByteString/Lazy.hs
bsd-3-clause
-- | /O(n)/ 'reverse' @xs@ returns the elements of @xs@ in reverse order. reverse :: ByteString -> ByteString reverse cs0 = rev Empty cs0 where rev a Empty = a rev a (Chunk c cs) = rev (Chunk (S.reverse c) a) cs
229
reverse :: ByteString -> ByteString reverse cs0 = rev Empty cs0 where rev a Empty = a rev a (Chunk c cs) = rev (Chunk (S.reverse c) a) cs
155
reverse cs0 = rev Empty cs0 where rev a Empty = a rev a (Chunk c cs) = rev (Chunk (S.reverse c) a) cs
119
true
true
1
10
59
75
37
38
null
null
megantti/rtorrent-cli
Main.hs
mit
addPath :: String -> IO () addPath url = do path <- canonicalizePath url `catch` (\(_ :: IOException) -> return url) _ <- call $ loadStartTorrent path return ()
209
addPath :: String -> IO () addPath url = do path <- canonicalizePath url `catch` (\(_ :: IOException) -> return url) _ <- call $ loadStartTorrent path return ()
209
addPath url = do path <- canonicalizePath url `catch` (\(_ :: IOException) -> return url) _ <- call $ loadStartTorrent path return ()
182
false
true
0
13
77
84
39
45
null
null
garykl/Horg
Colors.hs
bsd-3-clause
-- | accept only a number in [0, 1] to2digitHex :: Float -> String to2digitHex n = let byte = round $ 255 * n::Int digit1 = hexDigits (byte `div` 16) digit2 = hexDigits (byte `mod` 16) in [digit1, digit2]
231
to2digitHex :: Float -> String to2digitHex n = let byte = round $ 255 * n::Int digit1 = hexDigits (byte `div` 16) digit2 = hexDigits (byte `mod` 16) in [digit1, digit2]
195
to2digitHex n = let byte = round $ 255 * n::Int digit1 = hexDigits (byte `div` 16) digit2 = hexDigits (byte `mod` 16) in [digit1, digit2]
164
true
true
0
11
66
81
45
36
null
null
gcross/habit-of-fate
sources/library/HabitOfFate/Quest.hs
agpl-3.0
random_stories_content_ ∷ Lens (Entry c1) (Entry c2) [c1] [c2] random_stories_content_ = lens random_stories_content (\x y → x{random_stories_content = y})
155
random_stories_content_ ∷ Lens (Entry c1) (Entry c2) [c1] [c2] random_stories_content_ = lens random_stories_content (\x y → x{random_stories_content = y})
155
random_stories_content_ = lens random_stories_content (\x y → x{random_stories_content = y})
92
false
true
0
9
18
61
33
28
null
null
utky/lycopene
backup/Action.hs
apache-2.0
runAction' :: ActionF a -> ActionResult a runAction' (FsAction f) = actionResult (fmap Right (FS.runFsAction f))
113
runAction' :: ActionF a -> ActionResult a runAction' (FsAction f) = actionResult (fmap Right (FS.runFsAction f))
113
runAction' (FsAction f) = actionResult (fmap Right (FS.runFsAction f))
70
false
true
0
10
16
53
24
29
null
null
Sword-Smith/hfasto
src/MipsRegAlloc.hs
mit
getSRegNames :: [] (S.Set Mips.Reg) -> [] (S.Set Mips.Reg) -> S.Set SymReg getSRegNames kls gls = S.filter filterDynamicRegs $ S.unions $ map (\(x,y) -> S.union x y) (zip kls gls)
180
getSRegNames :: [] (S.Set Mips.Reg) -> [] (S.Set Mips.Reg) -> S.Set SymReg getSRegNames kls gls = S.filter filterDynamicRegs $ S.unions $ map (\(x,y) -> S.union x y) (zip kls gls)
180
getSRegNames kls gls = S.filter filterDynamicRegs $ S.unions $ map (\(x,y) -> S.union x y) (zip kls gls)
105
false
true
0
10
30
108
53
55
null
null
glguy/advent2016
asmprog-final/Small.hs
isc
set B x = modify' $ \rs -> rs { registerB = x }
49
set B x = modify' $ \rs -> rs { registerB = x }
49
set B x = modify' $ \rs -> rs { registerB = x }
49
false
false
0
8
15
28
15
13
null
null
Kercoin/yummy
src/Store.hs
mit
replyResult :: Reply -> Either String a replyResult (SingleLine s) = Left (C.unpack s)
86
replyResult :: Reply -> Either String a replyResult (SingleLine s) = Left (C.unpack s)
86
replyResult (SingleLine s) = Left (C.unpack s)
46
false
true
0
8
13
40
19
21
null
null
violetkz/haskell_learning
fn.hs
gpl-2.0
n n = fn(n - 1) + fn(n -2)
27
fn n = fn(n - 1) + fn(n -2)
27
fn n = fn(n - 1) + fn(n -2)
27
false
false
0
8
9
33
16
17
null
null
andrewthad/yesod-bootstrap
src/Yesod/Form/Generic.hs
mit
gFormToForm :: (Monad m, HandlerSite m ~ site) => GForm w m a -> MForm m (FormResult a, w) gFormToForm (GForm gform) = do ints <- get (env, site, langs) <- ask (a, w, ints', enc) <- lift $ gform (site, langs) env ints put ints' tell enc return (a, w)
286
gFormToForm :: (Monad m, HandlerSite m ~ site) => GForm w m a -> MForm m (FormResult a, w) gFormToForm (GForm gform) = do ints <- get (env, site, langs) <- ask (a, w, ints', enc) <- lift $ gform (site, langs) env ints put ints' tell enc return (a, w)
286
gFormToForm (GForm gform) = do ints <- get (env, site, langs) <- ask (a, w, ints', enc) <- lift $ gform (site, langs) env ints put ints' tell enc return (a, w)
171
false
true
0
10
86
148
74
74
null
null
hrsrashid/nummet
src/Main.hs
bsd-3-clause
scalarMatrix = parseInput $ parseMatrix parseDecimal
52
scalarMatrix = parseInput $ parseMatrix parseDecimal
52
scalarMatrix = parseInput $ parseMatrix parseDecimal
52
false
false
0
6
5
13
6
7
null
null
nevrenato/Hets_Fork
CSL/Keywords.hs
gpl-2.0
asinhS :: String asinhS = "asinh"
33
asinhS :: String asinhS = "asinh"
33
asinhS = "asinh"
16
false
true
0
6
5
18
7
11
null
null
kawu/text-trie
Data/TextTrie.hs
bsd-2-clause
-- | Branch lookup. brLookup :: Int -> T.Text -> Trie a -> Maybe a brLookup code k (Branch brBit leftTrie rightTrie) = if testBit code brBit then brLookup code k rightTrie else brLookup code k leftTrie
221
brLookup :: Int -> T.Text -> Trie a -> Maybe a brLookup code k (Branch brBit leftTrie rightTrie) = if testBit code brBit then brLookup code k rightTrie else brLookup code k leftTrie
201
brLookup code k (Branch brBit leftTrie rightTrie) = if testBit code brBit then brLookup code k rightTrie else brLookup code k leftTrie
154
true
true
0
11
56
81
38
43
null
null
edwardwas/chatServer
src/Shared/Types.hs
mit
isMessage :: UserAction -> Bool isMessage (SendMessage _ _) = True
66
isMessage :: UserAction -> Bool isMessage (SendMessage _ _) = True
66
isMessage (SendMessage _ _) = True
34
false
true
0
7
10
26
13
13
null
null
Ornedan/dom4statusbot
src/Model/Dominions4.hs
bsd-3-clause
nationName 76 = "LA Utgård"
27
nationName 76 = "LA Utgård"
27
nationName 76 = "LA Utgård"
27
false
false
0
5
4
9
4
5
null
null
jecisc/TP_PF_L3
PF-TP5/src/PF-TP_Interprete_Fini_FerlicotDelbecque_Cyril/Main.hs
mit
estChiffre :: Char -> Bool estChiffre = flip elem ['0'..'9']
60
estChiffre :: Char -> Bool estChiffre = flip elem ['0'..'9']
60
estChiffre = flip elem ['0'..'9']
33
false
true
0
7
9
32
14
18
null
null
themoritz/cabal
cabal-install/Distribution/Client/SolverInstallPlan.hs
bsd-3-clause
valid :: IndependentGoals -> SolverPlanIndex -> Bool valid indepGoals index = null $ problems indepGoals index
126
valid :: IndependentGoals -> SolverPlanIndex -> Bool valid indepGoals index = null $ problems indepGoals index
126
valid indepGoals index = null $ problems indepGoals index
61
false
true
2
8
31
40
17
23
null
null
ssaavedra/liquidhaskell
tests/neg/Books.hs
bsd-3-clause
{-@ inline discount @-} discount :: Int -> Int discount bookCount = (bookCount - BOOK_THRESHOLD) * DISCOUNT_PERCENTAGE
118
discount :: Int -> Int discount bookCount = (bookCount - BOOK_THRESHOLD) * DISCOUNT_PERCENTAGE
94
discount bookCount = (bookCount - BOOK_THRESHOLD) * DISCOUNT_PERCENTAGE
71
true
true
0
7
16
30
16
14
null
null
silky/ImplicitCAD
Graphics/Implicit/ExtOpenScad/Primitives.hs
gpl-2.0
rotateExtrude :: ([Char], [OVal] -> ArgParser (IO [OVal])) rotateExtrude = moduleWithSuite "rotate_extrude" $ \children -> do example "rotate_extrude() translate(20) circle(10);" totalRot :: ℝ <- argument "a" `defaultTo` 360 `doc` "angle to sweep" r :: ℝ <- argument "r" `defaultTo` 0 translateArg :: Either ℝ2 (ℝ -> ℝ2) <- argument "translate" `defaultTo` Left (0,0) rotateArg :: Either ℝ (ℝ -> ℝ ) <- argument "rotate" `defaultTo` Left 0 let is360m n = 360 * fromIntegral (round $ n / 360) /= n n = fromIntegral $ round $ totalRot / 360 cap = is360m totalRot || Either.either ( /= (0,0)) (\f -> f 0 /= f totalRot) translateArg || Either.either is360m (\f -> is360m (f 0 - f totalRot)) rotateArg capM = if cap then Just r else Nothing return $ return $ obj2UpMap (Prim.rotateExtrude totalRot capM translateArg rotateArg) children {-rotateExtrudeStatement = moduleWithSuite "rotate_extrude" $ \suite -> do h <- realArgument "h" center <- boolArgumentWithDefault "center" False twist <- realArgumentWithDefault 0.0 r <- realArgumentWithDefault "r" 0.0 getAndModUpObj2s suite (\obj -> Prim.extrudeRMod r (\θ (x,y) -> (x*cos(θ)+y*sin(θ), y*cos(θ)-x*sin(θ)) ) obj h) -}
1,303
rotateExtrude :: ([Char], [OVal] -> ArgParser (IO [OVal])) rotateExtrude = moduleWithSuite "rotate_extrude" $ \children -> do example "rotate_extrude() translate(20) circle(10);" totalRot :: ℝ <- argument "a" `defaultTo` 360 `doc` "angle to sweep" r :: ℝ <- argument "r" `defaultTo` 0 translateArg :: Either ℝ2 (ℝ -> ℝ2) <- argument "translate" `defaultTo` Left (0,0) rotateArg :: Either ℝ (ℝ -> ℝ ) <- argument "rotate" `defaultTo` Left 0 let is360m n = 360 * fromIntegral (round $ n / 360) /= n n = fromIntegral $ round $ totalRot / 360 cap = is360m totalRot || Either.either ( /= (0,0)) (\f -> f 0 /= f totalRot) translateArg || Either.either is360m (\f -> is360m (f 0 - f totalRot)) rotateArg capM = if cap then Just r else Nothing return $ return $ obj2UpMap (Prim.rotateExtrude totalRot capM translateArg rotateArg) children {-rotateExtrudeStatement = moduleWithSuite "rotate_extrude" $ \suite -> do h <- realArgument "h" center <- boolArgumentWithDefault "center" False twist <- realArgumentWithDefault 0.0 r <- realArgumentWithDefault "r" 0.0 getAndModUpObj2s suite (\obj -> Prim.extrudeRMod r (\θ (x,y) -> (x*cos(θ)+y*sin(θ), y*cos(θ)-x*sin(θ)) ) obj h) -}
1,303
rotateExtrude = moduleWithSuite "rotate_extrude" $ \children -> do example "rotate_extrude() translate(20) circle(10);" totalRot :: ℝ <- argument "a" `defaultTo` 360 `doc` "angle to sweep" r :: ℝ <- argument "r" `defaultTo` 0 translateArg :: Either ℝ2 (ℝ -> ℝ2) <- argument "translate" `defaultTo` Left (0,0) rotateArg :: Either ℝ (ℝ -> ℝ ) <- argument "rotate" `defaultTo` Left 0 let is360m n = 360 * fromIntegral (round $ n / 360) /= n n = fromIntegral $ round $ totalRot / 360 cap = is360m totalRot || Either.either ( /= (0,0)) (\f -> f 0 /= f totalRot) translateArg || Either.either is360m (\f -> is360m (f 0 - f totalRot)) rotateArg capM = if cap then Just r else Nothing return $ return $ obj2UpMap (Prim.rotateExtrude totalRot capM translateArg rotateArg) children {-rotateExtrudeStatement = moduleWithSuite "rotate_extrude" $ \suite -> do h <- realArgument "h" center <- boolArgumentWithDefault "center" False twist <- realArgumentWithDefault 0.0 r <- realArgumentWithDefault "r" 0.0 getAndModUpObj2s suite (\obj -> Prim.extrudeRMod r (\θ (x,y) -> (x*cos(θ)+y*sin(θ), y*cos(θ)-x*sin(θ)) ) obj h) -}
1,244
false
true
0
19
317
363
185
178
null
null
pcrama/message-compiler
test/Suite.hs
bsd-3-clause
testGetNextCandidates :: Assertion testGetNextCandidates = assertRightTrue TestCandSel.testGetNextCandidates
108
testGetNextCandidates :: Assertion testGetNextCandidates = assertRightTrue TestCandSel.testGetNextCandidates
108
testGetNextCandidates = assertRightTrue TestCandSel.testGetNextCandidates
73
false
true
0
6
6
16
8
8
null
null
HaskellZhangSong/derive-topdown
src/Data/Derive/TopDown/Lib.hs
mit
getTypeNames :: Type -> [Name] getTypeNames (ForallT tvbs cxt t) = getTypeNames t
81
getTypeNames :: Type -> [Name] getTypeNames (ForallT tvbs cxt t) = getTypeNames t
81
getTypeNames (ForallT tvbs cxt t) = getTypeNames t
50
false
true
0
6
12
38
18
20
null
null
tuura/fantasi
src/Tuura/Fantasi/VHDL/Internal/EnvironmentWriter.hs
mit
instantiateModules :: P.Pangraph -> String instantiateModules p = delayer_enable_vertexList ns ++ network ns ++ synchroniser ns ++ genericCounter ns ++ reg_counter ns ++ genericAccumulator ns ++ reg_counter_mul ns ++ reg_counter_mul2 ns ++ reg_sum_mul ns ++ reg_sum_mul2 ns ++ shift_reg ns ++ start_delayer ++ counter_delayer ++ adder_comparator ns ++ comparator_delayer ns ++ comparator_delayer_result ns ++ done_latch ++ done_circuit ++ output_wires ns where ns = P.vertexList p
1,223
instantiateModules :: P.Pangraph -> String instantiateModules p = delayer_enable_vertexList ns ++ network ns ++ synchroniser ns ++ genericCounter ns ++ reg_counter ns ++ genericAccumulator ns ++ reg_counter_mul ns ++ reg_counter_mul2 ns ++ reg_sum_mul ns ++ reg_sum_mul2 ns ++ shift_reg ns ++ start_delayer ++ counter_delayer ++ adder_comparator ns ++ comparator_delayer ns ++ comparator_delayer_result ns ++ done_latch ++ done_circuit ++ output_wires ns where ns = P.vertexList p
1,223
instantiateModules p = delayer_enable_vertexList ns ++ network ns ++ synchroniser ns ++ genericCounter ns ++ reg_counter ns ++ genericAccumulator ns ++ reg_counter_mul ns ++ reg_counter_mul2 ns ++ reg_sum_mul ns ++ reg_sum_mul2 ns ++ shift_reg ns ++ start_delayer ++ counter_delayer ++ adder_comparator ns ++ comparator_delayer ns ++ comparator_delayer_result ns ++ done_latch ++ done_circuit ++ output_wires ns where ns = P.vertexList p
1,180
false
true
0
23
807
149
67
82
null
null
mcmaniac/ghc
distrib/compare/compare.hs
bsd-3-clause
compareTarLine :: TarLine -> TarLine -> [Change] compareTarLine tl1 tl2 = [ PermissionsChanged fn1 fn2 perms1 perms2 | perms1 /= perms2 ] ++ [ FileSizeChanged fn1 fn2 size1 size2 | sizeChanged ] where fn1 = tlFileName tl1 fn2 = tlFileName tl2 perms1 = tlPermissions tl1 perms2 = tlPermissions tl2 size1 = tlSize tl1 size2 = tlSize tl2 sizeChanged = abs (size1 - size2) > sizeAbs && (((100 * size1) `div` size2) > sizePercentage || ((100 * size2) `div` size1) > sizePercentage)
598
compareTarLine :: TarLine -> TarLine -> [Change] compareTarLine tl1 tl2 = [ PermissionsChanged fn1 fn2 perms1 perms2 | perms1 /= perms2 ] ++ [ FileSizeChanged fn1 fn2 size1 size2 | sizeChanged ] where fn1 = tlFileName tl1 fn2 = tlFileName tl2 perms1 = tlPermissions tl1 perms2 = tlPermissions tl2 size1 = tlSize tl1 size2 = tlSize tl2 sizeChanged = abs (size1 - size2) > sizeAbs && (((100 * size1) `div` size2) > sizePercentage || ((100 * size2) `div` size1) > sizePercentage)
598
compareTarLine tl1 tl2 = [ PermissionsChanged fn1 fn2 perms1 perms2 | perms1 /= perms2 ] ++ [ FileSizeChanged fn1 fn2 size1 size2 | sizeChanged ] where fn1 = tlFileName tl1 fn2 = tlFileName tl2 perms1 = tlPermissions tl1 perms2 = tlPermissions tl2 size1 = tlSize tl1 size2 = tlSize tl2 sizeChanged = abs (size1 - size2) > sizeAbs && (((100 * size1) `div` size2) > sizePercentage || ((100 * size2) `div` size1) > sizePercentage)
549
false
true
8
14
202
190
99
91
null
null
bringert/haskell-tar
htar/htar.hs
bsd-3-clause
die :: [String] -> IO a die errs = do mapM_ (\e -> hPutStrLn stderr $ "htar: " ++ e) $ errs hPutStrLn stderr "Try `htar --help' for more information." exitFailure
190
die :: [String] -> IO a die errs = do mapM_ (\e -> hPutStrLn stderr $ "htar: " ++ e) $ errs hPutStrLn stderr "Try `htar --help' for more information." exitFailure
190
die errs = do mapM_ (\e -> hPutStrLn stderr $ "htar: " ++ e) $ errs hPutStrLn stderr "Try `htar --help' for more information." exitFailure
166
false
true
0
13
58
69
31
38
null
null
xenog/haskoin
src/Network/Haskoin/Network/Message.hs
unlicense
msgType (MHeaders _) = "headers"
36
msgType (MHeaders _) = "headers"
36
msgType (MHeaders _) = "headers"
36
false
false
0
6
8
16
7
9
null
null
abhin4v/ringo
ringo/src/Main.hs
mit
writeFiles :: FilePath -> Config -> IO () writeFiles outputDir config = do let Settings{..} = configSettings config forM_ (makeSQLs config dimTables factTables) $ \(sqlType, table, sql) -> do let dirName = outputDir </> map toLower (show sqlType) createDirectoryIfMissing True dirName writeFile (dirName </> Text.unpack table <.> "sql") sql BS.writeFile (outputDir </> Text.unpack settingDependenciesJSONFileName) . encode . foldl (\acc -> Map.union acc . extractDependencies config) Map.empty $ facts BS.writeFile (outputDir </> Text.unpack settingDimensionsJSONFileName) . encode $ [ tableName table | (_, tabs) <- dimTables, table <- tabs , table `notElem` tables ] BS.writeFile (outputDir </> Text.unpack settingFactsJSONFileName) . encode $ [ tableName table | (_, table) <- factTables ] where facts = configFacts config tables = configTables config dimTables = [ (fact, extractDimensionTables config fact) | fact <- facts ] factTables = [ (fact, extractFactTable config fact) | fact <- facts, factTablePersistent fact ]
1,106
writeFiles :: FilePath -> Config -> IO () writeFiles outputDir config = do let Settings{..} = configSettings config forM_ (makeSQLs config dimTables factTables) $ \(sqlType, table, sql) -> do let dirName = outputDir </> map toLower (show sqlType) createDirectoryIfMissing True dirName writeFile (dirName </> Text.unpack table <.> "sql") sql BS.writeFile (outputDir </> Text.unpack settingDependenciesJSONFileName) . encode . foldl (\acc -> Map.union acc . extractDependencies config) Map.empty $ facts BS.writeFile (outputDir </> Text.unpack settingDimensionsJSONFileName) . encode $ [ tableName table | (_, tabs) <- dimTables, table <- tabs , table `notElem` tables ] BS.writeFile (outputDir </> Text.unpack settingFactsJSONFileName) . encode $ [ tableName table | (_, table) <- factTables ] where facts = configFacts config tables = configTables config dimTables = [ (fact, extractDimensionTables config fact) | fact <- facts ] factTables = [ (fact, extractFactTable config fact) | fact <- facts, factTablePersistent fact ]
1,106
writeFiles outputDir config = do let Settings{..} = configSettings config forM_ (makeSQLs config dimTables factTables) $ \(sqlType, table, sql) -> do let dirName = outputDir </> map toLower (show sqlType) createDirectoryIfMissing True dirName writeFile (dirName </> Text.unpack table <.> "sql") sql BS.writeFile (outputDir </> Text.unpack settingDependenciesJSONFileName) . encode . foldl (\acc -> Map.union acc . extractDependencies config) Map.empty $ facts BS.writeFile (outputDir </> Text.unpack settingDimensionsJSONFileName) . encode $ [ tableName table | (_, tabs) <- dimTables, table <- tabs , table `notElem` tables ] BS.writeFile (outputDir </> Text.unpack settingFactsJSONFileName) . encode $ [ tableName table | (_, table) <- factTables ] where facts = configFacts config tables = configTables config dimTables = [ (fact, extractDimensionTables config fact) | fact <- facts ] factTables = [ (fact, extractFactTable config fact) | fact <- facts, factTablePersistent fact ]
1,064
false
true
3
17
224
393
193
200
null
null
aloiscochard/bound
examples/Overkill.hs
bsd-3-clause
wildp :: P a wildp = P WildP [] (const Nothing)
47
wildp :: P a wildp = P WildP [] (const Nothing)
47
wildp = P WildP [] (const Nothing)
34
false
true
1
7
10
32
14
18
null
null
conal/shady-examples
src/RunUtils.hs
agpl-3.0
-- flatIm im = (intrinsic, view1, xyPlane , toColor . im) -- runU :: Sink [SurfB] -- runU [s] = runUI' animOut (s . pureD) -- runU _ = error "runU: only single SurfB" -- I've forgotten what this restriction to single-SurfB is about. -- Investigate. animOut :: Out (Sink R1) animOut = lambda (V.vec1 <$> i) renderOut where i = -- clockIn fakeTime
361
animOut :: Out (Sink R1) animOut = lambda (V.vec1 <$> i) renderOut where i = -- clockIn fakeTime
107
animOut = lambda (V.vec1 <$> i) renderOut where i = -- clockIn fakeTime
82
true
true
0
8
79
49
28
21
null
null
brendanhay/gogol
gogol-datastore/gen/Network/Google/Resource/Datastore/Projects/ReserveIds.hs
mpl-2.0
-- | OAuth access token. priAccessToken :: Lens' ProjectsReserveIds (Maybe Text) priAccessToken = lens _priAccessToken (\ s a -> s{_priAccessToken = a})
160
priAccessToken :: Lens' ProjectsReserveIds (Maybe Text) priAccessToken = lens _priAccessToken (\ s a -> s{_priAccessToken = a})
135
priAccessToken = lens _priAccessToken (\ s a -> s{_priAccessToken = a})
79
true
true
0
9
29
48
25
23
null
null
mettekou/ghc
compiler/backpack/DriverBkp.hs
bsd-3-clause
{- ************************************************************************ * * Module graph construction * * ************************************************************************ -} -- | This is our version of GhcMake.downsweep, but with a few modifications: -- -- 1. Every module is required to be mentioned, so we don't do any funny -- business with targets or recursively grabbing dependencies. (We -- could support this in principle). -- 2. We support inline modules, whose summary we have to synthesize ourself. -- -- We don't bother trying to support GhcMake for now, it's more trouble -- than it's worth for inline modules. hsunitModuleGraph :: DynFlags -> HsUnit HsComponentId -> BkpM ModuleGraph hsunitModuleGraph dflags unit = do let decls = hsunitBody unit pn = hsPackageName (unLoc (hsunitName unit)) -- 1. Create a HsSrcFile/HsigFile summary for every -- explicitly mentioned module/signature. let get_decl (L _ (DeclD dt lmodname mb_hsmod)) = do let hsc_src = case dt of ModuleD -> HsSrcFile SignatureD -> HsigFile Just `fmap` summariseDecl pn hsc_src lmodname mb_hsmod get_decl _ = return Nothing nodes <- catMaybes `fmap` mapM get_decl decls -- 2. For each hole which does not already have an hsig file, -- create an "empty" hsig file to induce compilation for the -- requirement. let node_map = Map.fromList [ ((ms_mod_name n, ms_hsc_src n == HsigFile), n) | n <- nodes ] req_nodes <- fmap catMaybes . forM (thisUnitIdInsts dflags) $ \(mod_name, _) -> let has_local = Map.member (mod_name, True) node_map in if has_local then return Nothing else fmap Just $ summariseRequirement pn mod_name -- 3. Return the kaboodle return (nodes ++ req_nodes)
2,058
hsunitModuleGraph :: DynFlags -> HsUnit HsComponentId -> BkpM ModuleGraph hsunitModuleGraph dflags unit = do let decls = hsunitBody unit pn = hsPackageName (unLoc (hsunitName unit)) -- 1. Create a HsSrcFile/HsigFile summary for every -- explicitly mentioned module/signature. let get_decl (L _ (DeclD dt lmodname mb_hsmod)) = do let hsc_src = case dt of ModuleD -> HsSrcFile SignatureD -> HsigFile Just `fmap` summariseDecl pn hsc_src lmodname mb_hsmod get_decl _ = return Nothing nodes <- catMaybes `fmap` mapM get_decl decls -- 2. For each hole which does not already have an hsig file, -- create an "empty" hsig file to induce compilation for the -- requirement. let node_map = Map.fromList [ ((ms_mod_name n, ms_hsc_src n == HsigFile), n) | n <- nodes ] req_nodes <- fmap catMaybes . forM (thisUnitIdInsts dflags) $ \(mod_name, _) -> let has_local = Map.member (mod_name, True) node_map in if has_local then return Nothing else fmap Just $ summariseRequirement pn mod_name -- 3. Return the kaboodle return (nodes ++ req_nodes)
1,249
hsunitModuleGraph dflags unit = do let decls = hsunitBody unit pn = hsPackageName (unLoc (hsunitName unit)) -- 1. Create a HsSrcFile/HsigFile summary for every -- explicitly mentioned module/signature. let get_decl (L _ (DeclD dt lmodname mb_hsmod)) = do let hsc_src = case dt of ModuleD -> HsSrcFile SignatureD -> HsigFile Just `fmap` summariseDecl pn hsc_src lmodname mb_hsmod get_decl _ = return Nothing nodes <- catMaybes `fmap` mapM get_decl decls -- 2. For each hole which does not already have an hsig file, -- create an "empty" hsig file to induce compilation for the -- requirement. let node_map = Map.fromList [ ((ms_mod_name n, ms_hsc_src n == HsigFile), n) | n <- nodes ] req_nodes <- fmap catMaybes . forM (thisUnitIdInsts dflags) $ \(mod_name, _) -> let has_local = Map.member (mod_name, True) node_map in if has_local then return Nothing else fmap Just $ summariseRequirement pn mod_name -- 3. Return the kaboodle return (nodes ++ req_nodes)
1,175
true
true
9
13
643
310
166
144
null
null
Lythimus/lptv
sites/all/modules/jgm-pandoc-8be6cc2/src/Text/Pandoc/Writers/MediaWiki.hs
gpl-2.0
inlineToMediaWiki _ Space = return " "
38
inlineToMediaWiki _ Space = return " "
38
inlineToMediaWiki _ Space = return " "
38
false
false
1
5
6
15
6
9
null
null
jcberentsen/haskell
tensorflow-ops/src/TensorFlow/Gradient.hs
apache-2.0
-- | Lens that defaults Nothing to mempty. nonEmpty :: (Monoid (t v), Foldable t) => Lens' (Maybe (t v)) (t v) nonEmpty = anon mempty null
138
nonEmpty :: (Monoid (t v), Foldable t) => Lens' (Maybe (t v)) (t v) nonEmpty = anon mempty null
95
nonEmpty = anon mempty null
27
true
true
0
11
26
66
32
34
null
null
snoyberg/ghc
compiler/basicTypes/Id.hs
bsd-3-clause
-- | Create a template local for a series of type, but start from a specified template local mkTemplateLocalsNum :: Int -> [Type] -> [Id] mkTemplateLocalsNum n tys = zipWith mkTemplateLocal [n..] tys
199
mkTemplateLocalsNum :: Int -> [Type] -> [Id] mkTemplateLocalsNum n tys = zipWith mkTemplateLocal [n..] tys
106
mkTemplateLocalsNum n tys = zipWith mkTemplateLocal [n..] tys
61
true
true
0
7
32
41
22
19
null
null
jacekm-git/HsPredictor
executable/Main.hs
gpl-3.0
initializeTable :: Ref Table -> IO () initializeTable table = do begin table setRows table maxRows setRowHeader table True setRowHeightAll table 20 setRowResize table False setCols table maxCols setColHeader table True setColWidthAll table 80 setColResize table True end table
296
initializeTable :: Ref Table -> IO () initializeTable table = do begin table setRows table maxRows setRowHeader table True setRowHeightAll table 20 setRowResize table False setCols table maxCols setColHeader table True setColWidthAll table 80 setColResize table True end table
296
initializeTable table = do begin table setRows table maxRows setRowHeader table True setRowHeightAll table 20 setRowResize table False setCols table maxCols setColHeader table True setColWidthAll table 80 setColResize table True end table
258
false
true
0
7
58
102
40
62
null
null
dmjio/miso
src/Miso/Svg/Attribute.hs
bsd-3-clause
-- | <https://developer.mozilla.org/en-US/docs/Web/SVG/Attribute/baseProfile> baseProfile_ :: MisoString -> Attribute action baseProfile_ = attr "baseProfile"
159
baseProfile_ :: MisoString -> Attribute action baseProfile_ = attr "baseProfile"
81
baseProfile_ = attr "baseProfile"
33
true
true
0
6
13
22
11
11
null
null
KiNaudiz/bachelor
CN/Vector.hs
gpl-3.0
-- TODO: partitionVec (!) :: Vector a -> VKey -> a v ! i = (A.!) (vec v) i
75
(!) :: Vector a -> VKey -> a v ! i = (A.!) (vec v) i
52
v ! i = (A.!) (vec v) i
23
true
true
0
9
19
50
25
25
null
null
TomMD/crypto-api
Crypto/Util.hs
bsd-3-clause
-- |zipWith xor + Pack -- -- This is written intentionally to take advantage -- of the bytestring libraries 'zipWith'' rewrite rule but at the -- extra cost of the resulting lazy bytestring being more fragmented -- than either of the two inputs. zwp :: L.ByteString -> L.ByteString -> L.ByteString zwp a b = let as = L.toChunks a bs = L.toChunks b in L.fromChunks (go as bs) where go [] _ = [] go _ [] = [] go (a:as) (b:bs) = let l = min (B.length a) (B.length b) (a',ar) = B.splitAt l a (b',br) = B.splitAt l b as' = if B.length ar == 0 then as else ar : as bs' = if B.length br == 0 then bs else br : bs in (zwp' a' b') : go as' bs'
737
zwp :: L.ByteString -> L.ByteString -> L.ByteString zwp a b = let as = L.toChunks a bs = L.toChunks b in L.fromChunks (go as bs) where go [] _ = [] go _ [] = [] go (a:as) (b:bs) = let l = min (B.length a) (B.length b) (a',ar) = B.splitAt l a (b',br) = B.splitAt l b as' = if B.length ar == 0 then as else ar : as bs' = if B.length br == 0 then bs else br : bs in (zwp' a' b') : go as' bs'
491
zwp a b = let as = L.toChunks a bs = L.toChunks b in L.fromChunks (go as bs) where go [] _ = [] go _ [] = [] go (a:as) (b:bs) = let l = min (B.length a) (B.length b) (a',ar) = B.splitAt l a (b',br) = B.splitAt l b as' = if B.length ar == 0 then as else ar : as bs' = if B.length br == 0 then bs else br : bs in (zwp' a' b') : go as' bs'
439
true
true
2
12
237
281
139
142
null
null
dicomgrid/dicom-haskell-library
src/Data/DICOM/Dictionary.hs
gpl-3.0
queryretrievelevel :: String -> Element queryretrievelevel = cs $ tag (TagGroup 0x0008) (TagElement 0x0052)
107
queryretrievelevel :: String -> Element queryretrievelevel = cs $ tag (TagGroup 0x0008) (TagElement 0x0052)
107
queryretrievelevel = cs $ tag (TagGroup 0x0008) (TagElement 0x0052)
67
false
true
2
7
13
43
19
24
null
null
scsibug/hS3
examples/createBucket.hs
bsd-3-clause
main = do argv <- getArgs let bucket = head argv mConn <- amazonS3ConnectionFromEnv let conn = fromJust mConn putStrLn ("Creating bucket with name: " ++ bucket) res <- createBucketWithPrefix conn bucket either (putStrLn . prettyReqError) (\x -> putStrLn ("Creation of " ++ x ++ " successful.")) res
392
main = do argv <- getArgs let bucket = head argv mConn <- amazonS3ConnectionFromEnv let conn = fromJust mConn putStrLn ("Creating bucket with name: " ++ bucket) res <- createBucketWithPrefix conn bucket either (putStrLn . prettyReqError) (\x -> putStrLn ("Creation of " ++ x ++ " successful.")) res
392
main = do argv <- getArgs let bucket = head argv mConn <- amazonS3ConnectionFromEnv let conn = fromJust mConn putStrLn ("Creating bucket with name: " ++ bucket) res <- createBucketWithPrefix conn bucket either (putStrLn . prettyReqError) (\x -> putStrLn ("Creation of " ++ x ++ " successful.")) res
392
false
false
0
13
141
108
50
58
null
null
zenhack/haskell-capnp
cmd/capnpc-haskell/Trans/Stage1ToFlat.hs
mit
applyBrandType :: ApplyBrandFn C.Type applyBrandType = \case C.CompositeType t -> C.CompositeType $ applyBrandCompositeType t C.VoidType -> C.VoidType C.WordType t -> C.WordType t C.PtrType t -> C.PtrType $ applyBrandPtrType t
260
applyBrandType :: ApplyBrandFn C.Type applyBrandType = \case C.CompositeType t -> C.CompositeType $ applyBrandCompositeType t C.VoidType -> C.VoidType C.WordType t -> C.WordType t C.PtrType t -> C.PtrType $ applyBrandPtrType t
260
applyBrandType = \case C.CompositeType t -> C.CompositeType $ applyBrandCompositeType t C.VoidType -> C.VoidType C.WordType t -> C.WordType t C.PtrType t -> C.PtrType $ applyBrandPtrType t
222
false
true
0
9
62
84
39
45
null
null
pgj/bead
src/Bead/Controller/Pages.hs
bsd-3-clause
viewPageCata login logout home courseOverview evaluationTable viewAssignment submissionList userSubmissions administration courseAdmin p = case p of Login a -> login a Logout a -> logout a Home a -> home a CourseOverview ck a -> courseOverview ck a EvaluationTable a -> evaluationTable a ViewAssignment ak a -> viewAssignment ak a SubmissionList a -> submissionList a UserSubmissions a -> userSubmissions a Administration a -> administration a CourseAdmin a -> courseAdmin a
536
viewPageCata login logout home courseOverview evaluationTable viewAssignment submissionList userSubmissions administration courseAdmin p = case p of Login a -> login a Logout a -> logout a Home a -> home a CourseOverview ck a -> courseOverview ck a EvaluationTable a -> evaluationTable a ViewAssignment ak a -> viewAssignment ak a SubmissionList a -> submissionList a UserSubmissions a -> userSubmissions a Administration a -> administration a CourseAdmin a -> courseAdmin a
536
viewPageCata login logout home courseOverview evaluationTable viewAssignment submissionList userSubmissions administration courseAdmin p = case p of Login a -> login a Logout a -> logout a Home a -> home a CourseOverview ck a -> courseOverview ck a EvaluationTable a -> evaluationTable a ViewAssignment ak a -> viewAssignment ak a SubmissionList a -> submissionList a UserSubmissions a -> userSubmissions a Administration a -> administration a CourseAdmin a -> courseAdmin a
536
false
false
0
8
131
171
70
101
null
null
wavewave/devadmin
lib/Application/DevAdmin/Graph.hs
bsd-3-clause
makeProjDepOrderList :: BuildConfiguration -> ProjectConfiguration -> IO (DaughterMap,[String]) makeProjDepOrderList bc pc = do let projects = pc_projects pc let idnamemap = idprojmap projects nameidmap = projidmap projects -- let (p,w) = (,) <$> bc_srcbase <*> bc_workspacebase $ bc p = bc_srcbase bc gdescs <- mapM (readPackageDescription normal . getCabalFileName p ) projects let deps = map ((,) <$> getPkgName <*> getDependency) gdescs motherlist = map ((,) <$> fst <*> (filter (nameMatch projects). snd)) deps daughtermap = convertMotherMapToDaughterMap motherlist daughterlist = M.toList daughtermap edgelist = concatMap (mkDepEdge nameidmap) daughterlist allnodes = idproj projects gr :: Gr String () gr = mkGraph allnodes edgelist linear = topsort gr strlst = map (\x->fromJust $ M.lookup x idnamemap) linear return (daughtermap,strlst) -- | get all dependent daughter packages on a given package
1,040
makeProjDepOrderList :: BuildConfiguration -> ProjectConfiguration -> IO (DaughterMap,[String]) makeProjDepOrderList bc pc = do let projects = pc_projects pc let idnamemap = idprojmap projects nameidmap = projidmap projects -- let (p,w) = (,) <$> bc_srcbase <*> bc_workspacebase $ bc p = bc_srcbase bc gdescs <- mapM (readPackageDescription normal . getCabalFileName p ) projects let deps = map ((,) <$> getPkgName <*> getDependency) gdescs motherlist = map ((,) <$> fst <*> (filter (nameMatch projects). snd)) deps daughtermap = convertMotherMapToDaughterMap motherlist daughterlist = M.toList daughtermap edgelist = concatMap (mkDepEdge nameidmap) daughterlist allnodes = idproj projects gr :: Gr String () gr = mkGraph allnodes edgelist linear = topsort gr strlst = map (\x->fromJust $ M.lookup x idnamemap) linear return (daughtermap,strlst) -- | get all dependent daughter packages on a given package
1,040
makeProjDepOrderList bc pc = do let projects = pc_projects pc let idnamemap = idprojmap projects nameidmap = projidmap projects -- let (p,w) = (,) <$> bc_srcbase <*> bc_workspacebase $ bc p = bc_srcbase bc gdescs <- mapM (readPackageDescription normal . getCabalFileName p ) projects let deps = map ((,) <$> getPkgName <*> getDependency) gdescs motherlist = map ((,) <$> fst <*> (filter (nameMatch projects). snd)) deps daughtermap = convertMotherMapToDaughterMap motherlist daughterlist = M.toList daughtermap edgelist = concatMap (mkDepEdge nameidmap) daughterlist allnodes = idproj projects gr :: Gr String () gr = mkGraph allnodes edgelist linear = topsort gr strlst = map (\x->fromJust $ M.lookup x idnamemap) linear return (daughtermap,strlst) -- | get all dependent daughter packages on a given package
901
false
true
0
17
262
284
142
142
null
null
jml/haverer
lib/Haverer/Action.hs
apache-2.0
_validatePlay player Knight play@(Attack target) | player == target = throwError SelfTarget | otherwise = return play
121
_validatePlay player Knight play@(Attack target) | player == target = throwError SelfTarget | otherwise = return play
121
_validatePlay player Knight play@(Attack target) | player == target = throwError SelfTarget | otherwise = return play
121
false
false
0
8
20
57
23
34
null
null
smaccm/capDL-tool
CapDL/Model.hs
bsd-2-clause
tcbIPCBufferSlot :: Word tcbIPCBufferSlot = 4
45
tcbIPCBufferSlot :: Word tcbIPCBufferSlot = 4
45
tcbIPCBufferSlot = 4
20
false
true
0
6
5
18
7
11
null
null
akhileshs/stack
src/Path/IO.hs
bsd-3-clause
-- | Move a dir. Optimistically assumes it exists. If it doesn't, -- doesn't complain. moveDirIfExists :: MonadIO m => Path b1 Dir -> Path b2 Dir -> m () moveDirIfExists from to = ignoreDoesNotExist (moveDir from to)
216
moveDirIfExists :: MonadIO m => Path b1 Dir -> Path b2 Dir -> m () moveDirIfExists from to = ignoreDoesNotExist (moveDir from to)
129
moveDirIfExists from to = ignoreDoesNotExist (moveDir from to)
62
true
true
0
9
37
59
28
31
null
null
mmx1/tokenGen
src/Generator.hs
bsd-3-clause
getRandom :: IO Int getRandom = do g <- newGenIO :: IO CtrDRBG case (crandom g :: Either GenError (Int, CtrDRBG))of Left err -> (error $ show err) >> exitFailure Right (result, g2 ) -> return result
210
getRandom :: IO Int getRandom = do g <- newGenIO :: IO CtrDRBG case (crandom g :: Either GenError (Int, CtrDRBG))of Left err -> (error $ show err) >> exitFailure Right (result, g2 ) -> return result
210
getRandom = do g <- newGenIO :: IO CtrDRBG case (crandom g :: Either GenError (Int, CtrDRBG))of Left err -> (error $ show err) >> exitFailure Right (result, g2 ) -> return result
190
false
true
0
14
48
101
48
53
null
null
iljakuklic/eel-proto
src/Parser/Core.hs
bsd-3-clause
-- parse input between two string tokens psbet a b = between (pstok a) (pstok b)
80
psbet a b = between (pstok a) (pstok b)
39
psbet a b = between (pstok a) (pstok b)
39
true
false
1
7
15
33
14
19
null
null
mariefarrell/Hets
Framework/Analysis.hs
gpl-2.0
lookupMorph :: Logic lid sublogics basic_spec sentence symb_items symb_map_items sign morphism symbol raw_symbol proof_tree => lid -> MORPH_NAME -> DGraph -> Result morphism lookupMorph l n dg = do let extView = case lookupGlobalEnvDG n dg of Just (ViewOrStructEntry _ ev) -> ev _ -> error $ "The morphism " ++ show n ++ " could not be found." case extView of ExtViewSig _ (GMorphism c _ _ morph _) _ -> do let l' = targetLogic c if Logic l /= Logic l' then error $ "The morphism " ++ show n ++ " is not in the logic " ++ show l ++ "." else coerceMorphism l' l "" morph
732
lookupMorph :: Logic lid sublogics basic_spec sentence symb_items symb_map_items sign morphism symbol raw_symbol proof_tree => lid -> MORPH_NAME -> DGraph -> Result morphism lookupMorph l n dg = do let extView = case lookupGlobalEnvDG n dg of Just (ViewOrStructEntry _ ev) -> ev _ -> error $ "The morphism " ++ show n ++ " could not be found." case extView of ExtViewSig _ (GMorphism c _ _ morph _) _ -> do let l' = targetLogic c if Logic l /= Logic l' then error $ "The morphism " ++ show n ++ " is not in the logic " ++ show l ++ "." else coerceMorphism l' l "" morph
732
lookupMorph l n dg = do let extView = case lookupGlobalEnvDG n dg of Just (ViewOrStructEntry _ ev) -> ev _ -> error $ "The morphism " ++ show n ++ " could not be found." case extView of ExtViewSig _ (GMorphism c _ _ morph _) _ -> do let l' = targetLogic c if Logic l /= Logic l' then error $ "The morphism " ++ show n ++ " is not in the logic " ++ show l ++ "." else coerceMorphism l' l "" morph
522
false
true
0
17
274
218
102
116
null
null
diminishedprime/.org
programmey_stuff/write_yourself_a_scheme/ch_05/hello.hs
mit
eval val@(Ratio _ ) = return val
32
eval val@(Ratio _ ) = return val
32
eval val@(Ratio _ ) = return val
32
false
false
0
7
6
22
10
12
null
null
DominikDitoIvosevic/Uni
IRG/src/Irg/Lab3/Utility.hs
mit
-- import Irg.Lab3.Geometry.Shapes getWindowSize :: Num a => IO (a, a) getWindowSize = do GLUT.Size width height <- GLUT.get GLUT.windowSize return (fromIntegral width,fromIntegral height)
193
getWindowSize :: Num a => IO (a, a) getWindowSize = do GLUT.Size width height <- GLUT.get GLUT.windowSize return (fromIntegral width,fromIntegral height)
157
getWindowSize = do GLUT.Size width height <- GLUT.get GLUT.windowSize return (fromIntegral width,fromIntegral height)
121
true
true
0
10
28
73
33
40
null
null
ComputationWithBoundedResources/ara-inference
doc/tpdb_trs/Haskell/basic_haskell/show_4.hs
mit
primModInt vx vy = error
24
primModInt vx vy = error
24
primModInt vx vy = error
24
false
false
1
5
4
16
5
11
null
null
chrisbloecker/Hive
src/Hive/Data/Graph.hs
gpl-3.0
------------------------------------------------------------------------------- (<+>) :: (Applicative a, Num n) => a n -> a n -> a n m <+> n = (+) <$> m <*> n
159
(<+>) :: (Applicative a, Num n) => a n -> a n -> a n m <+> n = (+) <$> m <*> n
78
m <+> n = (+) <$> m <*> n
25
true
true
0
10
25
62
32
30
null
null
joelburget/haste-compiler
libraries/base-ghc-7.6/GHC/Float/RealFracMethods.hs
bsd-3-clause
float2Int :: Float -> Int float2Int (F# x) = I# (float2Int# x)
62
float2Int :: Float -> Int float2Int (F# x) = I# (float2Int# x)
62
float2Int (F# x) = I# (float2Int# x)
36
false
true
0
7
11
33
16
17
null
null
beni55/ghcjs
src/Compiler/JMacro/Util.hs
mit
jstr :: Text -> JExpr jstr = ValExpr P.. JStr
45
jstr :: Text -> JExpr jstr = ValExpr P.. JStr
45
jstr = ValExpr P.. JStr
23
false
true
1
7
9
29
12
17
null
null
cirquit/quizlearner
quizlearner/Handler/Quizcreator.hs
mit
toExamAttributes :: Text -> ExamAttributes toExamAttributes ((splitOn "($)") -> [a, b, c]) = let (Just passPercentage) = maybeDouble $ unpack b (Just questCount) = maybeInt $ unpack c title = (unwords . words) a in ExamAttributes title passPercentage questCount
423
toExamAttributes :: Text -> ExamAttributes toExamAttributes ((splitOn "($)") -> [a, b, c]) = let (Just passPercentage) = maybeDouble $ unpack b (Just questCount) = maybeInt $ unpack c title = (unwords . words) a in ExamAttributes title passPercentage questCount
423
toExamAttributes ((splitOn "($)") -> [a, b, c]) = let (Just passPercentage) = maybeDouble $ unpack b (Just questCount) = maybeInt $ unpack c title = (unwords . words) a in ExamAttributes title passPercentage questCount
380
false
true
0
11
200
111
54
57
null
null
sitewisely/zellige
src/Data/Geometry/Clip/Internal/LineNichollLeeNicholl.hs
apache-2.0
clipLine ::TypesGeography.BoundingBox -> TypesGeography.GeoStorableLine -> Maybe TypesGeography.GeoStorableLine clipLine r@(TypesGeography.BoundingBox left _ right _) l@(TypesGeography.GeoStorableLine (Geospatial.PointXY p1x _) _) | p1x < left = _p1Left r l | p1x > right = rotateLine180c <$> _p1Left (rotateRect180c r) (rotateLine180c l) | otherwise = _p1Centre r l
375
clipLine ::TypesGeography.BoundingBox -> TypesGeography.GeoStorableLine -> Maybe TypesGeography.GeoStorableLine clipLine r@(TypesGeography.BoundingBox left _ right _) l@(TypesGeography.GeoStorableLine (Geospatial.PointXY p1x _) _) | p1x < left = _p1Left r l | p1x > right = rotateLine180c <$> _p1Left (rotateRect180c r) (rotateLine180c l) | otherwise = _p1Centre r l
375
clipLine r@(TypesGeography.BoundingBox left _ right _) l@(TypesGeography.GeoStorableLine (Geospatial.PointXY p1x _) _) | p1x < left = _p1Left r l | p1x > right = rotateLine180c <$> _p1Left (rotateRect180c r) (rotateLine180c l) | otherwise = _p1Centre r l
263
false
true
15
10
52
133
66
67
null
null
danchoi/jsonconvhtml
Main.hs
mit
decodeWith :: (FromJSON a) => Parser Value -> BL.ByteString -> (Maybe a, BL.ByteString) decodeWith p s = case Atto.parse p s of Atto.Done r v -> f v r Atto.Fail _ _ _ -> (Nothing, mempty) where f v' r = (\x -> case x of Success a -> (Just a, r) _ -> (Nothing, r)) $ fromJSON v'
340
decodeWith :: (FromJSON a) => Parser Value -> BL.ByteString -> (Maybe a, BL.ByteString) decodeWith p s = case Atto.parse p s of Atto.Done r v -> f v r Atto.Fail _ _ _ -> (Nothing, mempty) where f v' r = (\x -> case x of Success a -> (Just a, r) _ -> (Nothing, r)) $ fromJSON v'
340
decodeWith p s = case Atto.parse p s of Atto.Done r v -> f v r Atto.Fail _ _ _ -> (Nothing, mempty) where f v' r = (\x -> case x of Success a -> (Just a, r) _ -> (Nothing, r)) $ fromJSON v'
252
false
true
0
12
121
160
81
79
null
null
rvion/lamdu
bottlelib/Graphics/DrawingCombinators/Utils.hs
gpl-3.0
textHeight :: Draw.R textHeight = 2
35
textHeight :: Draw.R textHeight = 2
35
textHeight = 2
14
false
true
0
7
5
20
8
12
null
null
SamirTalwar/advent-of-code
2021/AOC_09_1.hs
mit
main :: IO () main = do heightMap <- Grid.fromDigits <$> getContents let lowestPoints = findLowestPoints heightMap print $ sum $ map succ lowestPoints
156
main :: IO () main = do heightMap <- Grid.fromDigits <$> getContents let lowestPoints = findLowestPoints heightMap print $ sum $ map succ lowestPoints
156
main = do heightMap <- Grid.fromDigits <$> getContents let lowestPoints = findLowestPoints heightMap print $ sum $ map succ lowestPoints
142
false
true
0
10
29
57
26
31
null
null
bairyn/bitmaps
src/Data/Bitmap/Class.hs
bsd-3-clause
tryIBF_RGB32 :: (S.Stringy s, Bitmap bmp) => bmp -> s -> Either String bmp tryIBF_RGB32 metaBitmap s | S.length s < minLength = Left $ printf "Data.Bitmap.Class.tryIBF_RGB32: string is too small to contain the pixels of a bitmap with the dimensions of the passed bitmap, which are (%d, %d); the string is %d bytes long, but needs to be at least %d bytes long" (fromIntegral width :: Integer) (fromIntegral height :: Integer) (S.length s) minLength | otherwise = Right $ constructPixels pixelf dms where (width, height) = dimensions metaBitmap dms = (fromIntegral width, fromIntegral height) bytesPerPixel = 4 bytesPerRow = bytesPerPixel * width minLength = fromIntegral $ bytesPerRow * height pixelf (row, column) = let offset = fromIntegral $ bytesPerRow * (fromIntegral row) + bytesPerPixel * (fromIntegral column) in (red =: (S.toWord8 $ s `S.index` (offset + 1))) . (green =: (S.toWord8 $ s `S.index` (offset + 2))) . (blue =: (S.toWord8 $ s `S.index` (offset + 3))) $ leastIntensity
1,224
tryIBF_RGB32 :: (S.Stringy s, Bitmap bmp) => bmp -> s -> Either String bmp tryIBF_RGB32 metaBitmap s | S.length s < minLength = Left $ printf "Data.Bitmap.Class.tryIBF_RGB32: string is too small to contain the pixels of a bitmap with the dimensions of the passed bitmap, which are (%d, %d); the string is %d bytes long, but needs to be at least %d bytes long" (fromIntegral width :: Integer) (fromIntegral height :: Integer) (S.length s) minLength | otherwise = Right $ constructPixels pixelf dms where (width, height) = dimensions metaBitmap dms = (fromIntegral width, fromIntegral height) bytesPerPixel = 4 bytesPerRow = bytesPerPixel * width minLength = fromIntegral $ bytesPerRow * height pixelf (row, column) = let offset = fromIntegral $ bytesPerRow * (fromIntegral row) + bytesPerPixel * (fromIntegral column) in (red =: (S.toWord8 $ s `S.index` (offset + 1))) . (green =: (S.toWord8 $ s `S.index` (offset + 2))) . (blue =: (S.toWord8 $ s `S.index` (offset + 3))) $ leastIntensity
1,224
tryIBF_RGB32 metaBitmap s | S.length s < minLength = Left $ printf "Data.Bitmap.Class.tryIBF_RGB32: string is too small to contain the pixels of a bitmap with the dimensions of the passed bitmap, which are (%d, %d); the string is %d bytes long, but needs to be at least %d bytes long" (fromIntegral width :: Integer) (fromIntegral height :: Integer) (S.length s) minLength | otherwise = Right $ constructPixels pixelf dms where (width, height) = dimensions metaBitmap dms = (fromIntegral width, fromIntegral height) bytesPerPixel = 4 bytesPerRow = bytesPerPixel * width minLength = fromIntegral $ bytesPerRow * height pixelf (row, column) = let offset = fromIntegral $ bytesPerRow * (fromIntegral row) + bytesPerPixel * (fromIntegral column) in (red =: (S.toWord8 $ s `S.index` (offset + 1))) . (green =: (S.toWord8 $ s `S.index` (offset + 2))) . (blue =: (S.toWord8 $ s `S.index` (offset + 3))) $ leastIntensity
1,149
false
true
2
15
396
347
179
168
null
null
SAdams601/HaRe
old/testing/simplifyExpr/EitherIn1_TokOut.hs
bsd-3-clause
p x@(y:ys) = case x of [] -> 42 (z:zs) -> zs
70
p x@(y:ys) = case x of [] -> 42 (z:zs) -> zs
70
p x@(y:ys) = case x of [] -> 42 (z:zs) -> zs
70
false
false
1
10
37
50
23
27
null
null
patperry/lapack
tests/Orthogonal.hs
bsd-3-clause
prop_perm_doSSolveMatrix alpha (Nat2 (m,n)) = forAll (testPerm m) $ \p -> forAll (Test.matrix (m,n)) $ \(b :: M) -> forAll (Test.matrix (m,n)) $ \c -> monadicST $ do b' <- run $ unsafeThawMatrix b run $ doSSolveMatrix alpha p c b' assert $ b ~== p <\\> (alpha *> c)
322
prop_perm_doSSolveMatrix alpha (Nat2 (m,n)) = forAll (testPerm m) $ \p -> forAll (Test.matrix (m,n)) $ \(b :: M) -> forAll (Test.matrix (m,n)) $ \c -> monadicST $ do b' <- run $ unsafeThawMatrix b run $ doSSolveMatrix alpha p c b' assert $ b ~== p <\\> (alpha *> c)
322
prop_perm_doSSolveMatrix alpha (Nat2 (m,n)) = forAll (testPerm m) $ \p -> forAll (Test.matrix (m,n)) $ \(b :: M) -> forAll (Test.matrix (m,n)) $ \c -> monadicST $ do b' <- run $ unsafeThawMatrix b run $ doSSolveMatrix alpha p c b' assert $ b ~== p <\\> (alpha *> c)
322
false
false
0
16
106
156
79
77
null
null
rzetterberg/alven
src/lib/Plugin/Auth/Email.hs
mit
postLoginR :: YesodAuthEmail master => HandlerT Auth (HandlerT master IO) TypedContent postLoginR = do (identifier, pass) <- lift $ runInputPost $ (,) <$> ireq textField "email" <*> ireq textField "password" mecreds <- lift $ getEmailCreds identifier maid <- case ( mecreds >>= emailCredsAuthId , emailCredsEmail <$> mecreds , emailCredsStatus <$> mecreds ) of (Just aid, Just email, Just True) -> do mrealpass <- lift $ getPassword aid case mrealpass of Nothing -> return Nothing Just realpass -> return $ if isValidPass pass realpass then Just email else Nothing _ -> return Nothing let isEmail = Text.Email.Validate.isValid $ encodeUtf8 identifier case maid of Just email -> lift $ setCredsRedirect $ Creds (if isEmail then "email" else "username") email [("verifiedEmail", email)] Nothing -> loginErrorMessageI LoginR $ if isEmail then Msg.InvalidEmailPass else Msg.InvalidUsernamePass
1,292
postLoginR :: YesodAuthEmail master => HandlerT Auth (HandlerT master IO) TypedContent postLoginR = do (identifier, pass) <- lift $ runInputPost $ (,) <$> ireq textField "email" <*> ireq textField "password" mecreds <- lift $ getEmailCreds identifier maid <- case ( mecreds >>= emailCredsAuthId , emailCredsEmail <$> mecreds , emailCredsStatus <$> mecreds ) of (Just aid, Just email, Just True) -> do mrealpass <- lift $ getPassword aid case mrealpass of Nothing -> return Nothing Just realpass -> return $ if isValidPass pass realpass then Just email else Nothing _ -> return Nothing let isEmail = Text.Email.Validate.isValid $ encodeUtf8 identifier case maid of Just email -> lift $ setCredsRedirect $ Creds (if isEmail then "email" else "username") email [("verifiedEmail", email)] Nothing -> loginErrorMessageI LoginR $ if isEmail then Msg.InvalidEmailPass else Msg.InvalidUsernamePass
1,292
postLoginR = do (identifier, pass) <- lift $ runInputPost $ (,) <$> ireq textField "email" <*> ireq textField "password" mecreds <- lift $ getEmailCreds identifier maid <- case ( mecreds >>= emailCredsAuthId , emailCredsEmail <$> mecreds , emailCredsStatus <$> mecreds ) of (Just aid, Just email, Just True) -> do mrealpass <- lift $ getPassword aid case mrealpass of Nothing -> return Nothing Just realpass -> return $ if isValidPass pass realpass then Just email else Nothing _ -> return Nothing let isEmail = Text.Email.Validate.isValid $ encodeUtf8 identifier case maid of Just email -> lift $ setCredsRedirect $ Creds (if isEmail then "email" else "username") email [("verifiedEmail", email)] Nothing -> loginErrorMessageI LoginR $ if isEmail then Msg.InvalidEmailPass else Msg.InvalidUsernamePass
1,205
false
true
0
18
532
309
153
156
null
null
f1u77y/xmonad-contrib
XMonad/Layout/BinarySpacePartition.hs
bsd-3-clause
modifyParentVal :: (a -> a) -> Crumb a -> Crumb a modifyParentVal f (LeftCrumb s t) = LeftCrumb (f s) t
103
modifyParentVal :: (a -> a) -> Crumb a -> Crumb a modifyParentVal f (LeftCrumb s t) = LeftCrumb (f s) t
103
modifyParentVal f (LeftCrumb s t) = LeftCrumb (f s) t
53
false
true
0
7
20
60
28
32
null
null
plclub/cis670-16fa
projects/DynamicLang/src/Compiler.hs
mit
arrowUnify :: TypeRep a -> Maybe (ArrowRep a) arrowUnify tra = do (TR.App tra' tra2) <- splitApp tra (TR.App tr tra1) <- splitApp tra' Refl <- eqT tr (typeRep :: TypeRep (->)) return $ ArrowRep tra1 tra2 -- ======================================================================================= -- | Take a TypeRep 'tr' and a StaticExp Dynamic 'he' and check whether 'tr' is an -- | ArrorRep of the -- | Updated compiler. We use information about the current context to infer the types -- | and we percolate this information down though an additional TypeRep parameter. -- | TODO: We can define an ADT which holds the different types of errors we expect to -- | have. Then we can change the return value to Either (Static t) (MyErrors) -- | Then we can return an error and unit test to ensure things that shouldn't -- | compile, don't.
847
arrowUnify :: TypeRep a -> Maybe (ArrowRep a) arrowUnify tra = do (TR.App tra' tra2) <- splitApp tra (TR.App tr tra1) <- splitApp tra' Refl <- eqT tr (typeRep :: TypeRep (->)) return $ ArrowRep tra1 tra2 -- ======================================================================================= -- | Take a TypeRep 'tr' and a StaticExp Dynamic 'he' and check whether 'tr' is an -- | ArrorRep of the -- | Updated compiler. We use information about the current context to infer the types -- | and we percolate this information down though an additional TypeRep parameter. -- | TODO: We can define an ADT which holds the different types of errors we expect to -- | have. Then we can change the return value to Either (Static t) (MyErrors) -- | Then we can return an error and unit test to ensure things that shouldn't -- | compile, don't.
847
arrowUnify tra = do (TR.App tra' tra2) <- splitApp tra (TR.App tr tra1) <- splitApp tra' Refl <- eqT tr (typeRep :: TypeRep (->)) return $ ArrowRep tra1 tra2 -- ======================================================================================= -- | Take a TypeRep 'tr' and a StaticExp Dynamic 'he' and check whether 'tr' is an -- | ArrorRep of the -- | Updated compiler. We use information about the current context to infer the types -- | and we percolate this information down though an additional TypeRep parameter. -- | TODO: We can define an ADT which holds the different types of errors we expect to -- | have. Then we can change the return value to Either (Static t) (MyErrors) -- | Then we can return an error and unit test to ensure things that shouldn't -- | compile, don't.
801
false
true
0
11
153
115
58
57
null
null
CBMM/CBaaS
cbaas-frontend/src/Frontend/Canvas.hs
bsd-3-clause
touchRelCoord x0 y0 tch = relativizedCoord x0 y0 <$> touchCoord tch
67
touchRelCoord x0 y0 tch = relativizedCoord x0 y0 <$> touchCoord tch
67
touchRelCoord x0 y0 tch = relativizedCoord x0 y0 <$> touchCoord tch
67
false
false
0
6
10
25
11
14
null
null
davidsundelius/JLC
src/TypeChecker.hs
mit
inferArray :: Ident -> Env -> Err Type inferArray i env = do t <- lookupVar i env (case t of TArray t' -> return t' _ -> fail $ "[inferArray] type of input expression is not array " ++ printTree i)
219
inferArray :: Ident -> Env -> Err Type inferArray i env = do t <- lookupVar i env (case t of TArray t' -> return t' _ -> fail $ "[inferArray] type of input expression is not array " ++ printTree i)
219
inferArray i env = do t <- lookupVar i env (case t of TArray t' -> return t' _ -> fail $ "[inferArray] type of input expression is not array " ++ printTree i)
180
false
true
0
12
63
78
36
42
null
null
DanielWaterworth/Idris-dev
src/Idris/IdeSlave.hs
bsd-3-clause
constTy _ = "Type"
18
constTy _ = "Type"
18
constTy _ = "Type"
18
false
false
0
5
3
9
4
5
null
null
emwap/feldspar-language
src/Feldspar/Core/Types.hs
bsd-3-clause
typeEq DoubleType DoubleType = Just TypeEq
42
typeEq DoubleType DoubleType = Just TypeEq
42
typeEq DoubleType DoubleType = Just TypeEq
42
false
false
0
5
5
14
6
8
null
null
nevrenato/Hets_Fork
Comorphisms/Hs2HOLCFaux.hs
gpl-2.0
mthFractional = ["/","recip","fromRational"]
44
mthFractional = ["/","recip","fromRational"]
44
mthFractional = ["/","recip","fromRational"]
44
false
false
0
5
2
15
9
6
null
null
gridaphobe/cabal
cabal-install/Distribution/Client/PackageIndex.hs
bsd-3-clause
invariant :: Package pkg => PackageIndex pkg -> Bool invariant (PackageIndex m) = all (uncurry goodBucket) (Map.toList m) where goodBucket _ [] = False goodBucket name (pkg0:pkgs0) = check (packageId pkg0) pkgs0 where check pkgid [] = packageName pkgid == name check pkgid (pkg':pkgs) = packageName pkgid == name && pkgid < pkgid' && check pkgid' pkgs where pkgid' = packageId pkg' -- -- * Internal helpers --
526
invariant :: Package pkg => PackageIndex pkg -> Bool invariant (PackageIndex m) = all (uncurry goodBucket) (Map.toList m) where goodBucket _ [] = False goodBucket name (pkg0:pkgs0) = check (packageId pkg0) pkgs0 where check pkgid [] = packageName pkgid == name check pkgid (pkg':pkgs) = packageName pkgid == name && pkgid < pkgid' && check pkgid' pkgs where pkgid' = packageId pkg' -- -- * Internal helpers --
526
invariant (PackageIndex m) = all (uncurry goodBucket) (Map.toList m) where goodBucket _ [] = False goodBucket name (pkg0:pkgs0) = check (packageId pkg0) pkgs0 where check pkgid [] = packageName pkgid == name check pkgid (pkg':pkgs) = packageName pkgid == name && pkgid < pkgid' && check pkgid' pkgs where pkgid' = packageId pkg' -- -- * Internal helpers --
473
false
true
1
10
184
186
85
101
null
null
dcreager/cabal
Distribution/Simple/Setup.hs
bsd-3-clause
optionDistPref :: (flags -> Flag FilePath) -> (Flag FilePath -> flags -> flags) -> ShowOrParseArgs -> OptionField flags optionDistPref get set = \showOrParseArgs -> option "" (distPrefFlagName showOrParseArgs) ( "The directory where Cabal puts generated build files " ++ "(default " ++ defaultDistPref ++ ")") get set (reqArgFlag "DIR") where distPrefFlagName ShowArgs = ["builddir"] distPrefFlagName ParseArgs = ["builddir", "distdir", "distpref"]
525
optionDistPref :: (flags -> Flag FilePath) -> (Flag FilePath -> flags -> flags) -> ShowOrParseArgs -> OptionField flags optionDistPref get set = \showOrParseArgs -> option "" (distPrefFlagName showOrParseArgs) ( "The directory where Cabal puts generated build files " ++ "(default " ++ defaultDistPref ++ ")") get set (reqArgFlag "DIR") where distPrefFlagName ShowArgs = ["builddir"] distPrefFlagName ParseArgs = ["builddir", "distdir", "distpref"]
525
optionDistPref get set = \showOrParseArgs -> option "" (distPrefFlagName showOrParseArgs) ( "The directory where Cabal puts generated build files " ++ "(default " ++ defaultDistPref ++ ")") get set (reqArgFlag "DIR") where distPrefFlagName ShowArgs = ["builddir"] distPrefFlagName ParseArgs = ["builddir", "distdir", "distpref"]
360
false
true
0
9
136
135
67
68
null
null
afcady/servant-reflex
src/Servant/Common/Req.hs
bsd-3-clause
reqFailure (RequestFailure _ s) = Just s
43
reqFailure (RequestFailure _ s) = Just s
43
reqFailure (RequestFailure _ s) = Just s
43
false
false
0
7
9
20
9
11
null
null
agocorona/ghcjs-perch
src/Internal/Perch.hs
mit
param = nelem "param"
24
param = nelem "param"
24
param = nelem "param"
24
false
false
1
5
6
13
4
9
null
null
GaloisInc/galua
lib/macho/src/Data/Macho.hs
mit
-- | Throws 'MachoFailure' macho_magic :: Word32 -> MH_MAGIC macho_magic 0xfeedface = MH_MAGIC32
96
macho_magic :: Word32 -> MH_MAGIC macho_magic 0xfeedface = MH_MAGIC32
69
macho_magic 0xfeedface = MH_MAGIC32
35
true
true
0
7
12
25
11
14
null
null
hazel-el/hazel
Hazel/Parser/OWL/RFC3987.hs
gpl-3.0
iRegName :: Parser Text iRegName = T.concat <$> many' (singleton <$> iUnreserved <|> pctEncoded <|> singleton <$> subDelims)
186
iRegName :: Parser Text iRegName = T.concat <$> many' (singleton <$> iUnreserved <|> pctEncoded <|> singleton <$> subDelims)
186
iRegName = T.concat <$> many' (singleton <$> iUnreserved <|> pctEncoded <|> singleton <$> subDelims)
162
false
true
2
10
79
48
22
26
null
null
janschulz/pandoc
src/Text/Pandoc/UTF8.hs
gpl-2.0
filterCRs (x:xs) = x : filterCRs xs
35
filterCRs (x:xs) = x : filterCRs xs
35
filterCRs (x:xs) = x : filterCRs xs
35
false
false
2
6
6
25
11
14
null
null
johanjoensson/QuantumHaskell
FockState.hs
agpl-3.0
(0 :+ 0) *| _ = KetZero
25
(0 :+ 0) *| _ = KetZero
25
(0 :+ 0) *| _ = KetZero
25
false
false
0
7
8
19
9
10
null
null
wavewave/hoodle-core
src/Hoodle/Coroutine/Default.hs
gpl-3.0
menuEventProcess MenuFirstPage = changePage (const 0)
53
menuEventProcess MenuFirstPage = changePage (const 0)
53
menuEventProcess MenuFirstPage = changePage (const 0)
53
false
false
0
7
5
18
8
10
null
null
VictorCMiraldo/mmm
MMM/Core/Distributive.hs
mit
(.+) :: Float -> Float -> Lift Dist FreeMaybe Float (.+) m n = additionFaulty m n
81
(.+) :: Float -> Float -> Lift Dist FreeMaybe Float (.+) m n = additionFaulty m n
81
(.+) m n = additionFaulty m n
29
false
true
0
7
16
40
21
19
null
null
gatlin/psilo
src/Lib/Parser.hs
gpl-3.0
tau :: Parser Type tau = ty_sym <|> (parens compound)
53
tau :: Parser Type tau = ty_sym <|> (parens compound)
53
tau = ty_sym <|> (parens compound)
34
false
true
1
8
9
28
12
16
null
null
styx/gtc
Gt/Core.hs
gpl-3.0
-- Conversion of inner list with pos-term data pt :: (String, JSValue) -> Dict -> Dict pt (tclass, jval) s = case tclass of "pos" -> s { pos = pos_to_str jval } "terms" -> s { terms = nub_sort $ jlist_to_slist jval} _ -> s
269
pt :: (String, JSValue) -> Dict -> Dict pt (tclass, jval) s = case tclass of "pos" -> s { pos = pos_to_str jval } "terms" -> s { terms = nub_sort $ jlist_to_slist jval} _ -> s
222
pt (tclass, jval) s = case tclass of "pos" -> s { pos = pos_to_str jval } "terms" -> s { terms = nub_sort $ jlist_to_slist jval} _ -> s
182
true
true
0
11
92
91
47
44
null
null
sukhmel/algorithms.intro
part_1/chapter_2/BubbleSort.hs
mit
main :: IO () main = mapM_ (perform ((<),(>))) [ bubbleSortBy'' , bubbleSortBy' , bubbleSortBy ]
162
main :: IO () main = mapM_ (perform ((<),(>))) [ bubbleSortBy'' , bubbleSortBy' , bubbleSortBy ]
162
main = mapM_ (perform ((<),(>))) [ bubbleSortBy'' , bubbleSortBy' , bubbleSortBy ]
148
false
true
0
8
81
52
28
24
null
null