hexsha stringlengths 40 40 | size int64 2 991k | ext stringclasses 2 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 4 208 | max_stars_repo_name stringlengths 6 106 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses list | max_stars_count int64 1 33.5k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 208 | max_issues_repo_name stringlengths 6 106 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses list | max_issues_count int64 1 16.3k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 208 | max_forks_repo_name stringlengths 6 106 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses list | max_forks_count int64 1 6.91k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 2 991k | avg_line_length float64 1 36k | max_line_length int64 1 977k | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
1cc210f63a7b34ac0016a5d41aef20cfb04c3d76 | 5,502 | ex | Elixir | lib/typelixir/pattern_builder.ex | Typelixir/typelixir | 1f13d5a75eb337e67eb3881fb2844462237fd338 | [
"MIT"
] | 25 | 2020-08-31T13:25:03.000Z | 2022-01-19T20:14:20.000Z | lib/typelixir/pattern_builder.ex | Typelixir/typelixir | 1f13d5a75eb337e67eb3881fb2844462237fd338 | [
"MIT"
] | null | null | null | lib/typelixir/pattern_builder.ex | Typelixir/typelixir | 1f13d5a75eb337e67eb3881fb2844462237fd338 | [
"MIT"
] | 2 | 2021-06-23T21:33:07.000Z | 2021-08-31T14:18:35.000Z | defmodule Typelixir.PatternBuilder do
alias Typelixir.{TypeComparator}
# ---------------------------------------------------------------------------------------------------
# type -> returns the type of types defined on @spec
# -> returns the type of any pattern
def type({:list, _, [type]}, env), do: {:list, type(type, env)}
def type({:tuple, _, [types_list]}, env), do: {:tuple, Enum.map(types_list, fn type -> type(type, env) end)}
def type({:map, _, [key_type, value_type]}, env), do: {:map, {type(key_type, env), type(value_type, env)}}
def type({:_, _, _}, _env), do: :any
# @spec
def type({type, _, _}, _env) when (type in [:string, :boolean, :integer, :float, :atom, :any, :none]), do: type
# tuple more 2 elems
def type({:{}, _, list}, env), do: {:tuple, Enum.map(list, fn t -> type(t, env) end)}
# map
def type({:%{}, _, []}, _env), do: {:map, {:any, :any}}
def type({:%{}, _, list}, env) do
keys_values = Enum.map(list, fn {key, elem} -> {type(key, env), type(elem, env)} end)
{:map, {
elem(Enum.reduce(keys_values, fn {k_acc, _}, {k_e, _} -> {TypeComparator.supremum(k_acc, k_e), :_} end), 0),
Enum.map(keys_values, fn {_, v} -> v end)
}}
end
def type({:|, _, [operand1, operand2]}, env),
do: {:list, TypeComparator.supremum(type(operand1, env), type(operand2, env))}
# binding
def type({:=, _, [operand1, operand2]}, env), do: TypeComparator.supremum(type(operand1, env), type(operand2, env))
# variables
def type({value, _, _}, env) do
case env[:vars][value] do
nil -> :any
type -> type
end
end
# list
def type([], _env), do: {:list, :any}
def type(value, env) when is_list(value),
do: {:list, TypeComparator.supremum(Enum.map(value, fn t -> type(t, env) end))}
# tuple 2 elems
def type(value, env) when is_tuple(value),
do: {:tuple, Enum.map(Tuple.to_list(value), fn t -> type(t, env) end)}
# literals
def type(value, _env) do
cond do
value === nil -> :atom
is_boolean(value) -> :boolean
is_bitstring(value) -> :string
is_integer(value) -> :integer
is_float(value) -> :float
is_atom(value) -> :atom
true -> :any
end
end
# ---------------------------------------------------------------------------------------------------
# vars -> returns a map with the vars of params and the corresponding types of
# param_type_list, or {:error, "message"}
def vars(params, param_type_list) do
new_vars =
Enum.zip(params, param_type_list)
|> Enum.map(fn {var, type} -> get_vars(var, type) end)
|> List.flatten()
case new_vars[:error] do
nil ->
Enum.reduce_while(new_vars, %{}, fn {var, type}, acc ->
t = Map.get(acc, var)
cond do
t === nil or t === type -> {:cont, Map.put(acc, var, type)}
true -> {:halt, {:error, "Variable #{var} is already defined with type #{t}"}}
end
end)
message -> {:error, message}
end
end
defp get_vars(_, :any), do: []
defp get_vars({op, _, _}, type) when (op not in [:{}, :%{}, :=, :_, :|]), do: {op, type}
defp get_vars({:_, _, _}, _type), do: []
defp get_vars({:=, _, [operand1, operand2]}, type),
do: [get_vars(operand1, type), get_vars(operand2, type)]
defp get_vars([], {:list, _type}), do: []
defp get_vars(op, {:list, type}) when is_list(op), do: Enum.map(op, fn x -> get_vars(x, type) end)
defp get_vars({:|, _, [operand1, operand2]}, {:list, type}),
do: [get_vars(operand1, type), get_vars(operand2, {:list, type})]
defp get_vars(_, {:list, _}), do: {:error, "Parameters does not match type specification"}
defp get_vars([], _), do: {:error, "Parameters does not match type specification"}
defp get_vars({:|, _, _}, _), do: {:error, "Parameters does not match type specification"}
defp get_vars({:%{}, _, op}, {:map, {_, value_types}}), do: Enum.zip(op, value_types) |> Enum.map(fn {{_, value}, value_type} -> get_vars(value, value_type) end)
defp get_vars({:%{}, _, _}, _), do: {:error, "Parameters does not match type specification"}
defp get_vars(_, {:map, {_, _}}), do: {:error, "Parameters does not match type specification"}
defp get_vars({:{}, _, ops}, {:tuple, type_list}), do: get_vars_tuple(ops, type_list)
defp get_vars(ops, {:tuple, type_list}) when is_tuple(ops), do: get_vars_tuple(Tuple.to_list(ops), type_list)
defp get_vars({:{}, _, _}, _), do: {:error, "Parameters does not match type specification"}
defp get_vars(_, {:tuple, _}), do: {:error, "Parameters does not match type specification"}
defp get_vars(value, type) when (type in [:string, :boolean, :integer, :float, :atom, :any]) do
cond do
type === :any or
(is_boolean(value) and type === :boolean) or
(is_bitstring(value) and type === :string) or
(is_integer(value) and (type === :integer or type === :float)) or
(is_float(value) and type === :float) or
(is_atom(value) and type === :atom)
-> []
true -> {:error, "Parameters does not match type specification"}
end
end
defp get_vars(_, _), do: {:error, "Parameters does not match type specification"}
defp get_vars_tuple(ops, type_list) do
if length(ops) === length(type_list),
do: Enum.zip(ops, type_list) |> Enum.map(fn {var, type} -> get_vars(var, type) end),
else: {:error, "The number of parameters in tuple does not match the number of types"}
end
end | 36.68 | 163 | 0.577608 |
1cc21bd25c023ac3d684348be6d2dd964678fb30 | 1,727 | ex | Elixir | clients/api_gateway/lib/google_api/api_gateway/v1/model/apigateway_test_iam_permissions_request.ex | renovate-bot/elixir-google-api | 1da34cd39b670c99f067011e05ab90af93fef1f6 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/api_gateway/lib/google_api/api_gateway/v1/model/apigateway_test_iam_permissions_request.ex | swansoffiee/elixir-google-api | 9ea6d39f273fb430634788c258b3189d3613dde0 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/api_gateway/lib/google_api/api_gateway/v1/model/apigateway_test_iam_permissions_request.ex | dazuma/elixir-google-api | 6a9897168008efe07a6081d2326735fe332e522c | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.APIGateway.V1.Model.ApigatewayTestIamPermissionsRequest do
@moduledoc """
Request message for `TestIamPermissions` method.
## Attributes
* `permissions` (*type:* `list(String.t)`, *default:* `nil`) - The set of permissions to check for the `resource`. Permissions with wildcards (such as '*' or 'storage.*') are not allowed. For more information see [IAM Overview](https://cloud.google.com/iam/docs/overview#permissions).
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:permissions => list(String.t()) | nil
}
field(:permissions, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.APIGateway.V1.Model.ApigatewayTestIamPermissionsRequest do
def decode(value, options) do
GoogleApi.APIGateway.V1.Model.ApigatewayTestIamPermissionsRequest.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.APIGateway.V1.Model.ApigatewayTestIamPermissionsRequest do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 36.744681 | 288 | 0.751013 |
1cc27a4f228ce39f4ebd47d53f60936de93c3f07 | 27,054 | exs | Elixir | lib/elixir/test/elixir/enum_test.exs | QuinnWilton/elixir | e42e3e55ca1561fe56b58d6f51c7b0faae6a7a1e | [
"Apache-2.0"
] | null | null | null | lib/elixir/test/elixir/enum_test.exs | QuinnWilton/elixir | e42e3e55ca1561fe56b58d6f51c7b0faae6a7a1e | [
"Apache-2.0"
] | null | null | null | lib/elixir/test/elixir/enum_test.exs | QuinnWilton/elixir | e42e3e55ca1561fe56b58d6f51c7b0faae6a7a1e | [
"Apache-2.0"
] | null | null | null | Code.require_file "test_helper.exs", __DIR__
defmodule EnumTest.List do
use ExUnit.Case, async: true
test :empty? do
assert Enum.empty?([])
refute Enum.empty?([1, 2, 3])
refute Enum.empty?(1..3)
end
test :member? do
assert Enum.member?([1, 2, 3], 2)
refute Enum.member?([], 0)
refute Enum.member?([1, 2, 3], 0)
assert Enum.member?(1..3, 2)
refute Enum.member?(1..3, 0)
end
test :count do
assert Enum.count([1, 2, 3]) == 3
assert Enum.count([]) == 0
end
test :count_fun do
assert Enum.count([1, 2, 3], fn(x) -> rem(x, 2) == 0 end) == 1
assert Enum.count([], fn(x) -> rem(x, 2) == 0 end) == 0
end
test :all? do
assert Enum.all?([2, 4, 6], fn(x) -> rem(x, 2) == 0 end)
refute Enum.all?([2, 3, 4], fn(x) -> rem(x, 2) == 0 end)
assert Enum.all?([2, 4, 6])
refute Enum.all?([2, nil, 4])
assert Enum.all?([])
end
test :any? do
refute Enum.any?([2, 4, 6], fn(x) -> rem(x, 2) == 1 end)
assert Enum.any?([2, 3, 4], fn(x) -> rem(x, 2) == 1 end)
refute Enum.any?([false, false, false])
assert Enum.any?([false, true, false])
refute Enum.any?([])
end
test :at do
assert Enum.at([2, 4, 6], 0) == 2
assert Enum.at([2, 4, 6], 2) == 6
assert Enum.at([2, 4, 6], 4) == nil
assert Enum.at([2, 4, 6], 4, :none) == :none
assert Enum.at([2, 4, 6], -2) == 4
assert Enum.at([2, 4, 6], -4) == nil
end
test :concat_1 do
assert Enum.concat([[1, [2], 3], [4], [5, 6]]) == [1, [2], 3, 4, 5, 6]
assert Enum.concat(1..3, []) == [1,2,3]
assert Enum.concat([[], []]) == []
assert Enum.concat([[]]) == []
assert Enum.concat([]) == []
assert Enum.concat([1..5, fn acc, _ -> acc end, [1]]) == [1,2,3,4,5,1]
end
test :concat_2 do
assert Enum.concat([], [1]) == [1]
assert Enum.concat([1, [2], 3], [4, 5]) == [1, [2], 3, 4, 5]
assert Enum.concat(1..3, []) == [1,2,3]
assert Enum.concat([], []) == []
assert Enum.concat(fn acc, _ -> acc end, [1]) == [1]
end
test :fetch! do
assert Enum.fetch!([2, 4, 6], 0) == 2
assert Enum.fetch!([2, 4, 6], 2) == 6
assert Enum.fetch!([2, 4, 6], -2) == 4
assert_raise Enum.OutOfBoundsError, fn ->
Enum.fetch!([2, 4, 6], 4)
end
assert_raise Enum.OutOfBoundsError, fn ->
Enum.fetch!([2, 4, 6], -4)
end
end
test :drop do
assert Enum.drop([1, 2, 3], 0) == [1, 2, 3]
assert Enum.drop([1, 2, 3], 1) == [2, 3]
assert Enum.drop([1, 2, 3], 2) == [3]
assert Enum.drop([1, 2, 3], 3) == []
assert Enum.drop([1, 2, 3], 4) == []
assert Enum.drop([1, 2, 3], -1) == [1, 2]
assert Enum.drop([1, 2, 3], -2) == [1]
assert Enum.drop([1, 2, 3], -4) == []
assert Enum.drop([], 3) == []
end
test :drop_while do
assert Enum.drop_while([1, 2, 3, 4, 3, 2, 1], fn(x) -> x <= 3 end) == [4, 3, 2, 1]
assert Enum.drop_while([1, 2, 3], fn(_) -> false end) == [1, 2, 3]
assert Enum.drop_while([1, 2, 3], fn(x) -> x <= 3 end) == []
assert Enum.drop_while([], fn(_) -> false end) == []
end
test :find do
assert Enum.find([2, 4, 6], fn(x) -> rem(x, 2) == 1 end) == nil
assert Enum.find([2, 4, 6], 0, fn(x) -> rem(x, 2) == 1 end) == 0
assert Enum.find([2, 3, 4], fn(x) -> rem(x, 2) == 1 end) == 3
end
test :find_value do
assert Enum.find_value([2, 4, 6], fn(x) -> rem(x, 2) == 1 end) == nil
assert Enum.find_value([2, 4, 6], 0, fn(x) -> rem(x, 2) == 1 end) == 0
assert Enum.find_value([2, 3, 4], fn(x) -> rem(x, 2) == 1 end)
end
test :find_index do
assert Enum.find_index([2, 4, 6], fn(x) -> rem(x, 2) == 1 end) == nil
assert Enum.find_index([2, 3, 4], fn(x) -> rem(x, 2) == 1 end) == 1
end
test :each do
assert Enum.each([], fn(x) -> x end) == :ok
assert Enum.each([1, 2, 3], fn(x) -> Process.put(:enum_test_each, x * 2) end) == :ok
assert Process.get(:enum_test_each) == 6
after
Process.delete(:enum_test_each)
end
test :fetch do
assert Enum.fetch([2, 4, 6], 0) == { :ok, 2 }
assert Enum.fetch([2, 4, 6], 2) == { :ok, 6 }
assert Enum.fetch([2, 4, 6], 4) == :error
assert Enum.fetch([2, 4, 6], -2) == { :ok, 4}
assert Enum.fetch([2, 4, 6], -4) == :error
end
test :filter do
assert Enum.filter([1, 2, 3], fn(x) -> rem(x, 2) == 0 end) == [2]
assert Enum.filter([2, 4, 6], fn(x) -> rem(x, 2) == 0 end) == [2, 4, 6]
end
test :filter_with_match do
assert Enum.filter([1, 2, 3], &match?(1, &1)) == [1]
assert Enum.filter([1, 2, 3], &match?(x when x < 3, &1)) == [1, 2]
assert Enum.filter([1, 2, 3], &match?(_, &1)) == [1, 2, 3]
end
test :filter_map do
assert Enum.filter_map([1, 2, 3], fn(x) -> rem(x, 2) == 0 end, &(&1 * 2)) == [4]
assert Enum.filter_map([2, 4, 6], fn(x) -> rem(x, 2) == 0 end, &(&1 * 2)) == [4, 8, 12]
end
test :flat_map do
assert Enum.flat_map([], fn(x) -> [x, x] end) == []
assert Enum.flat_map([1, 2, 3], fn(x) -> [x, x] end) == [1, 1, 2, 2, 3, 3]
assert Enum.flat_map([1, 2, 3], fn(x) -> x..x+1 end) == [1, 2, 2, 3, 3, 4]
end
test :flat_map_reduce do
assert Enum.flat_map_reduce([1, 2, 3], 0, &{ [&1, &2], &1 + &2 }) ==
{ [1, 0, 2, 1, 3, 3], 6 }
assert Enum.flat_map_reduce(1..100, 0, fn i, acc ->
if acc < 3, do: { [i], acc + 1 }, else: { :halt, acc }
end) == { [1,2,3], 3 }
end
test :reduce do
assert Enum.reduce([], 1, fn(x, acc) -> x + acc end) == 1
assert Enum.reduce([1, 2, 3], 1, fn(x, acc) -> x + acc end) == 7
assert Enum.reduce([1, 2, 3], fn(x, acc) -> x + acc end) == 6
assert_raise Enum.EmptyError, fn ->
Enum.reduce([], fn(x, acc) -> x + acc end)
end
end
test :intersperse do
assert Enum.intersperse([], true) == []
assert Enum.intersperse([1], true) == [1]
assert Enum.intersperse([1,2,3], true) == [1, true, 2, true, 3]
end
test :join do
assert Enum.join([], " = ") == ""
assert Enum.join([1, 2, 3], " = ") == "1 = 2 = 3"
assert Enum.join([1, "2", 3], " = ") == "1 = 2 = 3"
assert Enum.join([1, 2, 3]) == "123"
end
test :map_join do
assert Enum.map_join([], " = ", &(&1 * 2)) == ""
assert Enum.map_join([1, 2, 3], " = ", &(&1 * 2)) == "2 = 4 = 6"
assert Enum.map_join([1, 2, 3], &(&1 * 2)) == "246"
end
test :join_empty do
fun = fn (acc, _) -> acc end
assert Enum.join(fun, ".") == ""
assert Enum.map_join(fun, ".", &(&1 + 0)) == ""
end
test :map do
assert Enum.map([], fn x -> x * 2 end) == []
assert Enum.map([1, 2, 3], fn x -> x * 2 end) == [2, 4, 6]
end
test :map_reduce do
assert Enum.map_reduce([], 1, fn(x, acc) -> { x * 2, x + acc } end) == { [], 1 }
assert Enum.map_reduce([1, 2, 3], 1, fn(x, acc) -> { x * 2, x + acc } end) == { [2, 4, 6], 7 }
end
test :partition do
assert Enum.partition([1, 2, 3], fn(x) -> rem(x, 2) == 0 end) == { [2], [1, 3] }
assert Enum.partition([2, 4, 6], fn(x) -> rem(x, 2) == 0 end) == { [2, 4, 6], [] }
end
test :reject do
assert Enum.reject([1, 2, 3], fn(x) -> rem(x, 2) == 0 end) == [1, 3]
assert Enum.reject([2, 4, 6], fn(x) -> rem(x, 2) == 0 end) == []
end
test :reverse do
assert Enum.reverse([]) == []
assert Enum.reverse([1, 2, 3]) == [3, 2, 1]
assert Enum.reverse([1, 2, 3], [4, 5, 6]) == [3, 2, 1, 4, 5, 6]
end
test :scan do
assert Enum.scan([1,2,3,4,5], &(&1 + &2)) == [1,3,6,10,15]
assert Enum.scan([], &(&1 + &2)) == []
assert Enum.scan([1,2,3,4,5], 0, &(&1 + &2)) == [1,3,6,10,15]
assert Enum.scan([], 0, &(&1 + &2)) == []
end
test :shuffle do
# set a fixed seed so the test can be deterministic
:random.seed(1374, 347975, 449264)
assert Enum.shuffle([1, 2, 3, 4, 5]) == [2, 4, 1, 5, 3]
end
test :sort do
assert Enum.sort([5, 3, 2, 4, 1]) == [1, 2, 3, 4, 5]
assert Enum.sort([5, 3, 2, 4, 1], &(&1 > &2)) == [5, 4, 3, 2, 1]
end
test :split do
assert Enum.split([1, 2, 3], 0) == { [], [1, 2, 3] }
assert Enum.split([1, 2, 3], 1) == { [1], [2, 3] }
assert Enum.split([1, 2, 3], 2) == { [1, 2], [3] }
assert Enum.split([1, 2, 3], 3) == { [1, 2, 3], [] }
assert Enum.split([1, 2, 3], 4) == { [1, 2, 3], [] }
assert Enum.split([], 3) == { [], [] }
assert Enum.split([1, 2, 3], -1) == { [1, 2], [3] }
assert Enum.split([1, 2, 3], -2) == { [1], [2, 3] }
assert Enum.split([1, 2, 3], -3) == { [], [1, 2, 3] }
assert Enum.split([1, 2, 3], -10) == { [], [1, 2, 3] }
end
test :split_while do
assert Enum.split_while([1, 2, 3], fn(_) -> false end) == { [], [1, 2, 3] }
assert Enum.split_while([1, 2, 3], fn(_) -> true end) == { [1, 2, 3], [] }
assert Enum.split_while([1, 2, 3], fn(x) -> x > 2 end) == { [], [1, 2, 3] }
assert Enum.split_while([1, 2, 3], fn(x) -> x > 3 end) == { [], [1, 2, 3] }
assert Enum.split_while([1, 2, 3], fn(x) -> x < 3 end) == { [1, 2], [3] }
assert Enum.split_while([], fn(_) -> true end) == { [], [] }
end
test :take do
assert Enum.take([1, 2, 3], 0) == []
assert Enum.take([1, 2, 3], 1) == [1]
assert Enum.take([1, 2, 3], 2) == [1, 2]
assert Enum.take([1, 2, 3], 3) == [1, 2, 3]
assert Enum.take([1, 2, 3], 4) == [1, 2, 3]
assert Enum.take([1, 2, 3], -1) == [3]
assert Enum.take([1, 2, 3], -2) == [2, 3]
assert Enum.take([1, 2, 3], -4) == [1, 2, 3]
assert Enum.take([], 3) == []
end
test :take_every do
assert Enum.take_every([1, 2, 3, 4, 5, 6, 7, 8, 9, 10], 2) == [1, 3, 5, 7, 9]
assert Enum.take_every([], 2) == []
assert Enum.take_every([1, 2], 2) == [1]
assert Enum.take_every([1, 2, 3], 0) == []
end
test :take_while do
assert Enum.take_while([1, 2, 3], fn(x) -> x > 3 end) == []
assert Enum.take_while([1, 2, 3], fn(x) -> x <= 1 end) == [1]
assert Enum.take_while([1, 2, 3], fn(x) -> x <= 3 end) == [1, 2, 3]
assert Enum.take_while([], fn(_) -> true end) == []
end
test :to_list do
assert Enum.to_list([]) == []
assert Enum.to_list(1 .. 3) == [1, 2, 3]
end
test :uniq do
assert Enum.uniq([1, 2, 3, 2, 1]) == [1, 2, 3]
assert Enum.uniq([1, 2, 3, 2, 1], fn x -> x end) == [1, 2, 3]
end
test :zip do
assert Enum.zip([:a, :b], [1, 2]) == [{:a, 1}, {:b, 2}]
assert Enum.zip([:a, :b], [1, 2, 3, 4]) == [{:a, 1}, {:b, 2}]
assert Enum.zip([:a, :b, :c, :d], [1, 2]) == [{:a, 1}, {:b, 2}]
assert Enum.zip([], [1]) == []
assert Enum.zip([1], []) == []
assert Enum.zip([], []) == []
end
test :with_index do
assert Enum.with_index([]) == []
assert Enum.with_index([1,2,3]) == [{1,0},{2,1},{3,2}]
end
test :max do
assert Enum.max([1]) == 1
assert Enum.max([1, 2, 3]) == 3
assert Enum.max([1, [], :a, {}]) == []
assert_raise Enum.EmptyError, fn ->
Enum.max([])
end
end
test :max_by do
assert Enum.max_by(["a", "aa", "aaa"], fn(x) -> String.length(x) end) == "aaa"
assert_raise Enum.EmptyError, fn ->
Enum.max_by([], fn(x) -> String.length(x) end)
end
end
test :min do
assert Enum.min([1]) == 1
assert Enum.min([1, 2, 3]) == 1
assert Enum.min([[], :a, {}]) == :a
assert_raise Enum.EmptyError, fn ->
Enum.min([])
end
end
test :min_by do
assert Enum.min_by(["a", "aa", "aaa"], fn(x) -> String.length(x) end) == "a"
assert_raise Enum.EmptyError, fn ->
Enum.min_by([], fn(x) -> String.length(x) end)
end
end
test :chunk do
assert Enum.chunk([1, 2, 3, 4, 5], 2) == [[1, 2], [3, 4]]
assert Enum.chunk([1, 2, 3, 4, 5], 2, 2, [6]) == [[1, 2], [3, 4], [5, 6]]
assert Enum.chunk([1, 2, 3, 4, 5, 6], 3, 2) == [[1, 2, 3], [3, 4, 5]]
assert Enum.chunk([1, 2, 3, 4, 5, 6], 2, 3) == [[1, 2], [4, 5]]
assert Enum.chunk([1, 2, 3, 4, 5, 6], 3, 2, []) == [[1, 2, 3], [3, 4, 5], [5, 6]]
assert Enum.chunk([1, 2, 3, 4, 5, 6], 3, 3, []) == [[1, 2, 3], [4, 5, 6]]
assert Enum.chunk([1, 2, 3, 4, 5], 4, 4, 6..10) == [[1, 2, 3, 4], [5, 6, 7, 8]]
end
test :chunk_by do
assert Enum.chunk_by([1, 2, 2, 3, 4, 4, 6, 7, 7], &(rem(&1, 2) == 1)) == [[1], [2, 2], [3], [4, 4, 6], [7, 7]]
assert Enum.chunk_by([1, 2, 3, 4], fn _ -> true end) == [[1, 2, 3, 4]]
assert Enum.chunk_by([], fn _ -> true end) == []
assert Enum.chunk_by([1], fn _ -> true end) == [[1]]
end
test :slice do
assert Enum.slice([1,2,3,4,5], 0, 0) == []
assert Enum.slice([1,2,3,4,5], 0, 1) == [1]
assert Enum.slice([1,2,3,4,5], 0, 2) == [1, 2]
assert Enum.slice([1,2,3,4,5], 1, 2) == [2, 3]
assert Enum.slice([1,2,3,4,5], 1, 0) == []
assert Enum.slice([1,2,3,4,5], 2, 5) == [3, 4, 5]
assert Enum.slice([1,2,3,4,5], 5, 5) == []
assert Enum.slice([1,2,3,4,5], 6, 5) == nil
assert Enum.slice([1,2,3,4,5], 6, 0) == nil
assert Enum.slice([1,2,3,4,5], -6, 0) == nil
assert Enum.slice([1,2,3,4,5], -6, 5) == nil
assert Enum.slice([1,2,3,4,5], -2, 5) == [4, 5]
assert Enum.slice([1,2,3,4,5], -3, 1) == [3]
end
test :slice_range do
assert Enum.slice([1,2,3,4,5], 0..0) == [1]
assert Enum.slice([1,2,3,4,5], 0..1) == [1, 2]
assert Enum.slice([1,2,3,4,5], 0..2) == [1, 2, 3]
assert Enum.slice([1,2,3,4,5], 1..2) == [2, 3]
assert Enum.slice([1,2,3,4,5], 1..0) == nil
assert Enum.slice([1,2,3,4,5], 2..5) == [3, 4, 5]
assert Enum.slice([1,2,3,4,5], 4..4) == [5]
assert Enum.slice([1,2,3,4,5], 5..5) == []
assert Enum.slice([1,2,3,4,5], 6..5) == nil
assert Enum.slice([1,2,3,4,5], 6..0) == nil
assert Enum.slice([1,2,3,4,5], -6..0) == nil
assert Enum.slice([1,2,3,4,5], -6..5) == nil
assert Enum.slice([1,2,3,4,5], -5..-1) == [1, 2, 3, 4, 5]
assert Enum.slice([1,2,3,4,5], -5..-3) == [1, 2, 3]
assert Enum.slice([1,2,3,4,5], -6..-1) == nil
assert Enum.slice([1,2,3,4,5], -6..-3) == nil
end
end
defmodule EnumTest.Range do
use ExUnit.Case, async: true
test :all? do
range = 0..5
refute Enum.all?(range, fn(x) -> rem(x, 2) == 0 end)
range = 0..1
assert Enum.all?(range, fn(x) -> x < 2 end)
assert Enum.all?(range)
range = 1..0
assert Enum.all?(range)
end
test :any? do
range = 0..5
refute Enum.any?(range, &(&1 > 10))
range = 0..5
assert Enum.any?(range, &(&1 > 3))
range = 1..0
assert Enum.any?(range)
end
test :fetch! do
assert Enum.fetch!(2..6, 0) == 2
assert Enum.fetch!(2..6, 4) == 6
assert Enum.fetch!(2..6, -1) == 6
assert Enum.fetch!(2..6, -2) == 5
assert Enum.fetch!(-2..-6, 0) == -2
assert Enum.fetch!(-2..-6, 4) == -6
assert_raise Enum.OutOfBoundsError, fn ->
Enum.fetch!(2..6, 8)
end
assert_raise Enum.OutOfBoundsError, fn ->
Enum.fetch!(-2..-6, 8)
end
assert_raise Enum.OutOfBoundsError, fn ->
Enum.fetch!(2..6, -8)
end
end
test :count do
range = 1..5
assert Enum.count(range) == 5
range = 1..1
assert Enum.count(range) == 1
end
test :count_fun do
range = 1..5
assert Enum.count(range, fn(x) -> rem(x, 2) == 0 end) == 2
range = 1..1
assert Enum.count(range, fn(x) -> rem(x, 2) == 0 end) == 0
end
test :drop do
range = 1..3
assert Enum.drop(range, 0) == [1, 2, 3]
assert Enum.drop(range, 1) == [2, 3]
assert Enum.drop(range, 2) == [3]
assert Enum.drop(range, 3) == []
assert Enum.drop(range, 4) == []
assert Enum.drop(range, -1) == [1, 2]
assert Enum.drop(range, -2) == [1]
assert Enum.drop(range, -4) == []
range = 1..0
assert Enum.drop(range, 3) == []
end
test :drop_while do
range = 0..6
assert Enum.drop_while(range, fn(x) -> x <= 3 end) == [4, 5, 6]
assert Enum.drop_while(range, fn(_) -> false end) == [0, 1, 2, 3, 4, 5, 6]
range = 0..3
assert Enum.drop_while(range, fn(x) -> x <= 3 end) == []
range = 1..0
assert Enum.drop_while(range, fn(_) -> false end) == [1, 0]
end
test :find do
range = 2..6
assert Enum.find(range, fn(x) -> rem(x, 2) == 0 end) == 2
assert Enum.find(range, fn(x) -> rem(x, 2) == 1 end) == 3
assert Enum.find(range, fn _ -> false end) == nil
assert Enum.find(range, 0, fn _ -> false end) == 0
end
test :find_value do
range = 2..6
assert Enum.find_value(range, fn(x) -> rem(x, 2) == 1 end)
end
test :find_index do
range = 2..6
assert Enum.find_index(range, fn(x) -> rem(x, 2) == 1 end) == 1
end
test :empty? do
range = 1..0
refute Enum.empty?(range)
range = 1..2
refute Enum.empty?(range)
end
test :each do
try do
range = 1..0
assert Enum.each(range, fn(x) -> x end) == :ok
range = 1..3
assert Enum.each(range, fn(x) -> Process.put(:enum_test_each, x * 2) end) == :ok
assert Process.get(:enum_test_each) == 6
after
Process.delete(:enum_test_each)
end
try do
range = -1..-3
assert Enum.each(range, fn(x) -> Process.put(:enum_test_each, x * 2) end) == :ok
assert Process.get(:enum_test_each) == -6
after
Process.delete(:enum_test_each)
end
end
test :filter do
range = 1..3
assert Enum.filter(range, fn(x) -> rem(x, 2) == 0 end) == [2]
range = 1..6
assert Enum.filter(range, fn(x) -> rem(x, 2) == 0 end) == [2, 4, 6]
end
test :filter_with_match do
range = 1..3
assert Enum.filter(range, &match?(1, &1)) == [1]
assert Enum.filter(range, &match?(x when x < 3, &1)) == [1, 2]
assert Enum.filter(range, &match?(_, &1)) == [1, 2, 3]
end
test :filter_map do
range = 1..3
assert Enum.filter_map(range, fn(x) -> rem(x, 2) == 0 end, &(&1 * 2)) == [4]
range = 2..6
assert Enum.filter_map(range, fn(x) -> rem(x, 2) == 0 end, &(&1 * 2)) == [4, 8, 12]
end
test :flat_map do
range = 1..3
assert Enum.flat_map(range, fn(x) -> [x, x] end) == [1, 1, 2, 2, 3, 3]
end
test :reduce do
range = 1..0
assert Enum.reduce(range, 1, fn(x, acc) -> x + acc end) == 2
range = 1..3
assert Enum.reduce(range, 1, fn(x, acc) -> x + acc end) == 7
range = 1..3
assert Enum.reduce(range, fn(x, acc) -> x + acc end) == 6
end
test :reject do
range = 1..3
assert Enum.reject(range, fn(x) -> rem(x, 2) == 0 end) == [1, 3]
range = 1..6
assert Enum.reject(range, fn(x) -> rem(x, 2) == 0 end) == [1, 3, 5]
end
test :intersperse do
range = 1..0
assert Enum.intersperse(range, true) == [1, true, 0]
range = 1..3
assert Enum.intersperse(range, false) == [1, false, 2, false, 3]
end
test :join do
range = 1..0
assert Enum.join(range, " = ") == "1 = 0"
range = 1..3
assert Enum.join(range, " = ") == "1 = 2 = 3"
assert Enum.join(range) == "123"
end
test :map_join do
range = 1..0
assert Enum.map_join(range, " = ", &(&1 * 2)) == "2 = 0"
range = 1..3
assert Enum.map_join(range, " = ", &(&1 * 2)) == "2 = 4 = 6"
assert Enum.map_join(range, &(&1 * 2)) == "246"
end
test :map do
range = 1..3
assert Enum.map(range, fn x -> x * 2 end) == [2, 4, 6]
range = -1..-3
assert Enum.map(range, fn x -> x * 2 end) == [-2, -4, -6]
end
test :map_reduce do
range = 1..0
assert Enum.map_reduce(range, 1, fn(x, acc) -> { x * 2, x + acc } end) == { [2, 0], 2 }
range = 1..3
assert Enum.map_reduce(range, 1, fn(x, acc) -> { x * 2, x + acc } end) == { [2, 4, 6], 7 }
end
test :partition do
range = 1..3
assert Enum.partition(range, fn(x) -> rem(x, 2) == 0 end) == { [2], [1, 3] }
end
test :scan do
assert Enum.scan(1..5, &(&1 + &2)) == [1,3,6,10,15]
assert Enum.scan(1..5, 0, &(&1 + &2)) == [1,3,6,10,15]
end
test :shuffle do
# set a fixed seed so the test can be deterministic
:random.seed(1374, 347975, 449264)
assert Enum.shuffle(1..5) == [2, 4, 1, 5, 3]
end
test :sort do
assert Enum.sort(3..1) == [1, 2, 3]
assert Enum.sort(2..1) == [1, 2]
assert Enum.sort(1..1) == [1]
assert Enum.sort(3..1, &(&1 > &2)) == [3, 2, 1]
assert Enum.sort(2..1, &(&1 > &2)) == [2, 1]
assert Enum.sort(1..1, &(&1 > &2)) == [1]
end
test :split do
range = 1..3
assert Enum.split(range, 0) == { [], [1, 2, 3] }
assert Enum.split(range, 1) == { [1], [2, 3] }
assert Enum.split(range, 2) == { [1, 2], [3] }
assert Enum.split(range, 3) == { [1, 2, 3], [] }
assert Enum.split(range, 4) == { [1, 2, 3], [] }
assert Enum.split(range, -1) == { [1, 2], [3] }
assert Enum.split(range, -2) == { [1], [2, 3] }
assert Enum.split(range, -3) == { [], [1, 2, 3] }
assert Enum.split(range, -10) == { [], [1, 2, 3] }
range = 1..0
assert Enum.split(range, 3) == { [1, 0], [] }
end
test :split_while do
range = 1..3
assert Enum.split_while(range, fn(_) -> false end) == { [], [1, 2, 3] }
assert Enum.split_while(range, fn(_) -> true end) == { [1, 2, 3], [] }
assert Enum.split_while(range, fn(x) -> x > 2 end) == { [], [1, 2, 3] }
assert Enum.split_while(range, fn(x) -> x > 3 end) == { [], [1, 2, 3] }
assert Enum.split_while(range, fn(x) -> x < 3 end) == { [1, 2], [3] }
range = 1..0
assert Enum.split_while(range, fn(_) -> true end) == { [1, 0], [] }
end
test :take do
range = 1..3
assert Enum.take(range, 0) == []
assert Enum.take(range, 1) == [1]
assert Enum.take(range, 2) == [1, 2]
assert Enum.take(range, 3) == [1, 2, 3]
assert Enum.take(range, 4) == [1, 2, 3]
assert Enum.take(range, -1) == [3]
assert Enum.take(range, -2) == [2, 3]
assert Enum.take(range, -4) == [1, 2, 3]
range = 1..0
assert Enum.take(range, 3) == [1, 0]
end
test :take_every do
assert Enum.take_every(1..10, 2) == [1, 3, 5, 7, 9]
assert Enum.take_every(1..2, 2) == [1]
assert Enum.take_every(1..3, 0) == []
end
test :take_while do
range = 1..3
assert Enum.take_while(range, fn(x) -> x > 3 end) == []
assert Enum.take_while(range, fn(x) -> x <= 1 end) == [1]
assert Enum.take_while(range, fn(x) -> x <= 3 end) == [1, 2, 3]
assert Enum.take_while([], fn(_) -> true end) == []
end
test :uniq do
assert Enum.uniq(1..3) == [1, 2, 3]
assert Enum.uniq(1..3, fn x -> x end) == [1, 2, 3]
end
test :zip do
assert Enum.zip([:a, :b], 1..2) == [{:a, 1}, {:b, 2}]
assert Enum.zip([:a, :b], 1..4) == [{:a, 1}, {:b, 2}]
assert Enum.zip([:a, :b, :c, :d], 1..2) == [{:a, 1}, {:b, 2}]
assert Enum.zip(1..2, [:a, :b]) == [{1, :a}, {2, :b}]
assert Enum.zip(1..4, [:a, :b]) == [{1, :a}, {2, :b}]
assert Enum.zip(1..2, [:a, :b, :c, :d]) == [{1, :a}, {2, :b}]
assert Enum.zip(1..2, 1..2) == [{1, 1}, {2, 2}]
assert Enum.zip(1..4, 1..2) == [{1, 1}, {2, 2}]
assert Enum.zip(1..2, 1..4) == [{1, 1}, {2, 2}]
end
test :with_index do
assert Enum.with_index(1..3) == [{1,0},{2,1},{3,2}]
end
test :max do
assert Enum.max(1..1) == 1
assert Enum.max(1..3) == 3
assert Enum.max(3..1) == 3
end
test :max_by do
assert Enum.max_by(1..1, fn(x) -> :math.pow(-2, x) end) == 1
assert Enum.max_by(1..3, fn(x) -> :math.pow(-2, x) end) == 2
end
test :min do
assert Enum.min([1]) == 1
assert Enum.min([1, 2, 3]) == 1
assert Enum.min([[], :a, {}]) == :a
end
test :min_by do
assert Enum.min_by(1..1, fn(x) -> :math.pow(-2, x) end) == 1
assert Enum.min_by(1..3, fn(x) -> :math.pow(-2, x) end) == 3
end
test :chunk do
assert Enum.chunk(1..5, 2) == [[1, 2], [3, 4]]
assert Enum.chunk(1..5, 2, 2, [6]) == [[1, 2], [3, 4], [5, 6]]
assert Enum.chunk(1..6, 3, 2) == [[1, 2, 3], [3, 4, 5]]
assert Enum.chunk(1..6, 2, 3) == [[1, 2], [4, 5]]
assert Enum.chunk(1..6, 3, 2, []) == [[1, 2, 3], [3, 4, 5], [5, 6]]
assert Enum.chunk(1..5, 4, 4, 6..10) == [[1, 2, 3, 4], [5, 6, 7, 8]]
end
test :chunk_by do
assert Enum.chunk_by(1..4, fn _ -> true end) == [[1, 2, 3, 4]]
assert Enum.chunk_by(1..4, &(rem(&1, 2) == 1)) == [[1], [2], [3], [4]]
end
test :slice do
assert Enum.slice(1..5, 0, 0) == []
assert Enum.slice(1..5, 0, 1) == [1]
assert Enum.slice(1..5, 0, 2) == [1, 2]
assert Enum.slice(1..5, 1, 2) == [2, 3]
assert Enum.slice(1..5, 1, 0) == []
assert Enum.slice(1..5, 2, 5) == [3, 4, 5]
assert Enum.slice(1..5, 5, 5) == []
assert Enum.slice(1..5, 6, 5) == nil
assert Enum.slice(1..5, 6, 0) == nil
assert Enum.slice(1..5, -6, 0) == nil
assert Enum.slice(1..5, -6, 5) == nil
assert Enum.slice(1..5, -2, 5) == [4, 5]
assert Enum.slice(1..5, -3, 1) == [3]
end
test :slice_range do
assert Enum.slice(1..5, 0..0) == [1]
assert Enum.slice(1..5, 0..1) == [1, 2]
assert Enum.slice(1..5, 0..2) == [1, 2, 3]
assert Enum.slice(1..5, 1..2) == [2, 3]
assert Enum.slice(1..5, 1..0) == nil
assert Enum.slice(1..5, 2..5) == [3, 4, 5]
assert Enum.slice(1..5, 4..4) == [5]
assert Enum.slice(1..5, 5..5) == []
assert Enum.slice(1..5, 6..5) == nil
assert Enum.slice(1..5, 6..0) == nil
assert Enum.slice(1..5, -6..0) == nil
assert Enum.slice(1..5, -6..5) == nil
assert Enum.slice(1..5, -5..-1) == [1, 2, 3, 4, 5]
assert Enum.slice(1..5, -5..-3) == [1, 2, 3]
assert Enum.slice(1..5, -6..-1) == nil
assert Enum.slice(1..5, -6..-3) == nil
end
test :reverse_range do
assert Enum.reverse([]) == []
assert Enum.reverse([1, 2, 3]) == [3, 2, 1]
assert Enum.reverse([1, 2, 3], [4, 5, 6]) == [3, 2, 1, 4, 5, 6]
assert Enum.reverse(0..0) == [0]
assert Enum.reverse(1..3) == [3, 2, 1]
assert Enum.reverse(1..3, 4..6) == [3, 2, 1, 4, 5, 6]
assert Enum.reverse([1, 2, 3], 4..6) == [3, 2, 1, 4, 5, 6]
assert Enum.reverse(1..3, [4, 5, 6]) == [3, 2, 1, 4, 5, 6]
end
end
defmodule EnumTest.Others do
use ExUnit.Case, async: true
import ExUnit.CaptureIO
test "reverse custom enumerable" do
assert Enum.reverse(URI.query_decoder("foo=bar&baz=bat")) ==
[{ "baz", "bat" }, { "foo", "bar" }]
end
test "sort with custom enumerable" do
d = HashDict.new [a: 7, another: 4, some: 2, multi_word: 3,
this: 9, punctuation: 1, is: 8, sentence: 6, with: 5]
assert Enum.sort(d, fn({_, v1}, {_, v2}) -> v1 > v2 end) ==
[this: 9, is: 8, a: 7, sentence: 6, with: 5,
another: 4, multi_word: 3, some: 2, punctuation: 1]
end
test "take with side effects" do
stream = Stream.unfold(1, fn x -> IO.puts x; { x, x + 1 } end)
assert capture_io(fn ->
Enum.take(stream, 1)
end) == "1\n"
end
test "takes does not consume next without a need" do
import PathHelpers
File.open!(fixture_path("one-liner.txt"), [], fn file ->
iterator = IO.stream(file, :line)
assert Enum.take(iterator, 1) == ["ONE"]
assert Enum.take(iterator, 5) == []
end)
end
test "take with no item works as no-op" do
import PathHelpers
iterator = File.stream!(fixture_path("unknown.txt"))
assert Enum.take(iterator, 0) == []
assert Enum.take(iterator, 0) == []
assert Enum.take(iterator, 0) == []
assert Enum.take(iterator, 0) == []
end
end
| 31.025229 | 114 | 0.499335 |
1cc2bafba9b2b47f94bdb5db7497e383f0cb36d5 | 36,609 | exs | Elixir | lib/elixir/test/elixir/enum_test.exs | pap/elixir | c803afe90c766663823c74397fb23ed40ec52c5b | [
"Apache-2.0"
] | null | null | null | lib/elixir/test/elixir/enum_test.exs | pap/elixir | c803afe90c766663823c74397fb23ed40ec52c5b | [
"Apache-2.0"
] | null | null | null | lib/elixir/test/elixir/enum_test.exs | pap/elixir | c803afe90c766663823c74397fb23ed40ec52c5b | [
"Apache-2.0"
] | null | null | null | Code.require_file "test_helper.exs", __DIR__
defmodule EnumTest.List do
use ExUnit.Case, async: true
test "empty?" do
assert Enum.empty?([])
refute Enum.empty?([1, 2, 3])
refute Enum.empty?(1..3)
end
test "member?" do
assert Enum.member?([1, 2, 3], 2)
refute Enum.member?([], 0)
refute Enum.member?([1, 2, 3], 0)
assert Enum.member?(1..3, 2)
refute Enum.member?(1..3, 0)
end
test "count" do
assert Enum.count([1, 2, 3]) == 3
assert Enum.count([]) == 0
end
test "count fun" do
assert Enum.count([1, 2, 3], fn(x) -> rem(x, 2) == 0 end) == 1
assert Enum.count([], fn(x) -> rem(x, 2) == 0 end) == 0
end
test "all?" do
assert Enum.all?([2, 4, 6], fn(x) -> rem(x, 2) == 0 end)
refute Enum.all?([2, 3, 4], fn(x) -> rem(x, 2) == 0 end)
assert Enum.all?([2, 4, 6])
refute Enum.all?([2, nil, 4])
assert Enum.all?([])
end
test "any?" do
refute Enum.any?([2, 4, 6], fn(x) -> rem(x, 2) == 1 end)
assert Enum.any?([2, 3, 4], fn(x) -> rem(x, 2) == 1 end)
refute Enum.any?([false, false, false])
assert Enum.any?([false, true, false])
assert Enum.any?([:foo, false, false])
refute Enum.any?([false, nil, false])
refute Enum.any?([])
end
test "at" do
assert Enum.at([2, 4, 6], 0) == 2
assert Enum.at([2, 4, 6], 2) == 6
assert Enum.at([2, 4, 6], 4) == nil
assert Enum.at([2, 4, 6], 4, :none) == :none
assert Enum.at([2, 4, 6], -2) == 4
assert Enum.at([2, 4, 6], -4) == nil
end
test "concat/1" do
assert Enum.concat([[1, [2], 3], [4], [5, 6]]) == [1, [2], 3, 4, 5, 6]
assert Enum.concat(1..3, []) == [1, 2, 3]
assert Enum.concat([[], []]) == []
assert Enum.concat([[]]) == []
assert Enum.concat([]) == []
assert Enum.concat([1..5, fn acc, _ -> acc end, [1]]) == [1, 2, 3, 4, 5, 1]
end
test "concat/2" do
assert Enum.concat([], [1]) == [1]
assert Enum.concat([1, [2], 3], [4, 5]) == [1, [2], 3, 4, 5]
assert Enum.concat(1..3, []) == [1, 2, 3]
assert Enum.concat([], []) == []
assert Enum.concat(fn acc, _ -> acc end, [1]) == [1]
end
test "fetch!" do
assert Enum.fetch!([2, 4, 6], 0) == 2
assert Enum.fetch!([2, 4, 6], 2) == 6
assert Enum.fetch!([2, 4, 6], -2) == 4
assert_raise Enum.OutOfBoundsError, fn ->
Enum.fetch!([2, 4, 6], 4)
end
assert_raise Enum.OutOfBoundsError, fn ->
Enum.fetch!([2, 4, 6], -4)
end
end
test "dedup" do
assert Enum.dedup([1, 1, 2, 1, 1, 2, 1]) == [1, 2, 1, 2, 1]
assert Enum.dedup([2, 1, 1, 2, 1]) == [2, 1, 2, 1]
assert Enum.dedup([1, 2, 3, 4]) == [1, 2, 3, 4]
assert Enum.dedup([1, 1.0, 2.0, 2]) == [1, 1.0, 2.0, 2]
assert Enum.dedup([]) == []
assert Enum.dedup([nil, nil, true, {:value, true}]) == [nil, true, {:value, true}]
assert Enum.dedup([nil]) == [nil]
end
test "dedup by" do
assert Enum.dedup_by([{1, :x}, {2, :y}, {2, :z}, {1, :x}], fn {x, _} -> x end)
== [{1, :x}, {2, :y}, {1, :x}]
assert Enum.dedup_by([5, 1, 2, 3, 2, 1], fn x -> x > 2 end) == [5, 1, 3, 2]
end
test "drop" do
assert Enum.drop([1, 2, 3], 0) == [1, 2, 3]
assert Enum.drop([1, 2, 3], 1) == [2, 3]
assert Enum.drop([1, 2, 3], 2) == [3]
assert Enum.drop([1, 2, 3], 3) == []
assert Enum.drop([1, 2, 3], 4) == []
assert Enum.drop([1, 2, 3], -1) == [1, 2]
assert Enum.drop([1, 2, 3], -2) == [1]
assert Enum.drop([1, 2, 3], -4) == []
assert Enum.drop([], 3) == []
end
test "drop while" do
assert Enum.drop_while([1, 2, 3, 4, 3, 2, 1], fn(x) -> x <= 3 end) == [4, 3, 2, 1]
assert Enum.drop_while([1, 2, 3], fn(_) -> false end) == [1, 2, 3]
assert Enum.drop_while([1, 2, 3], fn(x) -> x <= 3 end) == []
assert Enum.drop_while([], fn(_) -> false end) == []
end
test "find" do
assert Enum.find([2, 4, 6], fn(x) -> rem(x, 2) == 1 end) == nil
assert Enum.find([2, 4, 6], 0, fn(x) -> rem(x, 2) == 1 end) == 0
assert Enum.find([2, 3, 4], fn(x) -> rem(x, 2) == 1 end) == 3
end
test "find value" do
assert Enum.find_value([2, 4, 6], fn(x) -> rem(x, 2) == 1 end) == nil
assert Enum.find_value([2, 4, 6], 0, fn(x) -> rem(x, 2) == 1 end) == 0
assert Enum.find_value([2, 3, 4], fn(x) -> rem(x, 2) == 1 end)
end
test "find index" do
assert Enum.find_index([2, 4, 6], fn(x) -> rem(x, 2) == 1 end) == nil
assert Enum.find_index([2, 3, 4], fn(x) -> rem(x, 2) == 1 end) == 1
end
test "each" do
assert Enum.each([], fn(x) -> x end) == :ok
assert Enum.each([1, 2, 3], fn(x) -> Process.put(:enum_test_each, x * 2) end) == :ok
assert Process.get(:enum_test_each) == 6
after
Process.delete(:enum_test_each)
end
test "fetch" do
assert Enum.fetch([2, 4, 6], 0) == {:ok, 2}
assert Enum.fetch([2, 4, 6], 2) == {:ok, 6}
assert Enum.fetch([2, 4, 6], 4) == :error
assert Enum.fetch([2, 4, 6], -2) == {:ok, 4}
assert Enum.fetch([2, 4, 6], -4) == :error
end
test "filter" do
assert Enum.filter([1, 2, 3], fn(x) -> rem(x, 2) == 0 end) == [2]
assert Enum.filter([2, 4, 6], fn(x) -> rem(x, 2) == 0 end) == [2, 4, 6]
end
test "filter with match" do
assert Enum.filter([1, 2, 3], &match?(1, &1)) == [1]
assert Enum.filter([1, 2, 3], &match?(x when x < 3, &1)) == [1, 2]
assert Enum.filter([1, 2, 3], &match?(_, &1)) == [1, 2, 3]
end
test "filter map" do
assert Enum.filter_map([1, 2, 3], fn(x) -> rem(x, 2) == 0 end, &(&1 * 2)) == [4]
assert Enum.filter_map([2, 4, 6], fn(x) -> rem(x, 2) == 0 end, &(&1 * 2)) == [4, 8, 12]
end
test "flat map" do
assert Enum.flat_map([], fn(x) -> [x, x] end) == []
assert Enum.flat_map([1, 2, 3], fn(x) -> [x, x] end) == [1, 1, 2, 2, 3, 3]
assert Enum.flat_map([1, 2, 3], fn(x) -> x..x+1 end) == [1, 2, 2, 3, 3, 4]
end
test "flat map reduce" do
assert Enum.flat_map_reduce([1, 2, 3], 0, &{[&1, &2], &1 + &2}) ==
{[1, 0, 2, 1, 3, 3], 6}
assert Enum.flat_map_reduce(1..100, 0, fn i, acc ->
if acc < 3, do: {[i], acc + 1}, else: {:halt, acc}
end) == {[1, 2, 3], 3}
end
test "group by" do
assert Enum.group_by([], fn -> nil end) == %{}
assert Enum.group_by(1..6, &rem(&1, 3)) ==
%{0 => [6, 3], 1 => [4, 1], 2 => [5, 2]}
result = Enum.group_by(1..6, %{3 => :default}, &rem(&1, 3))
assert result[0] == [6, 3]
assert result[3] == :default
end
test "into" do
assert Enum.into([a: 1, b: 2], %{}) == %{a: 1, b: 2}
assert Enum.into([a: 1, b: 2], %{c: 3}) == %{a: 1, b: 2, c: 3}
assert Enum.into(%{a: 1, b: 2}, []) == [a: 1, b: 2]
assert Enum.into([1, 2, 3], "numbers: ", &to_string/1) == "numbers: 123"
end
test "intersperse" do
assert Enum.intersperse([], true) == []
assert Enum.intersperse([1], true) == [1]
assert Enum.intersperse([1, 2, 3], true) == [1, true, 2, true, 3]
end
test "join" do
assert Enum.join([], " = ") == ""
assert Enum.join([1, 2, 3], " = ") == "1 = 2 = 3"
assert Enum.join([1, "2", 3], " = ") == "1 = 2 = 3"
assert Enum.join([1, 2, 3]) == "123"
assert Enum.join(["", "", 1, 2, "", 3, "", "\n"], ";") == ";;1;2;;3;;\n"
assert Enum.join([""]) == ""
end
test "map join" do
assert Enum.map_join([], " = ", &(&1 * 2)) == ""
assert Enum.map_join([1, 2, 3], " = ", &(&1 * 2)) == "2 = 4 = 6"
assert Enum.map_join([1, 2, 3], &(&1 * 2)) == "246"
assert Enum.map_join(["", "", 1, 2, "", 3, "", "\n"], ";", &(&1)) == ";;1;2;;3;;\n"
assert Enum.map_join([""], "", &(&1)) == ""
end
test "join empty" do
fun = fn (acc, _) -> acc end
assert Enum.join(fun, ".") == ""
assert Enum.map_join(fun, ".", &(&1 + 0)) == ""
end
test "map" do
assert Enum.map([], fn x -> x * 2 end) == []
assert Enum.map([1, 2, 3], fn x -> x * 2 end) == [2, 4, 6]
end
test "map reduce" do
assert Enum.map_reduce([], 1, fn(x, acc) -> {x * 2, x + acc} end) == {[], 1}
assert Enum.map_reduce([1, 2, 3], 1, fn(x, acc) -> {x * 2, x + acc} end) == {[2, 4, 6], 7}
end
test "partition" do
assert Enum.partition([1, 2, 3], fn(x) -> rem(x, 2) == 0 end) == {[2], [1, 3]}
assert Enum.partition([2, 4, 6], fn(x) -> rem(x, 2) == 0 end) == {[2, 4, 6], []}
end
test "reduce" do
assert Enum.reduce([], 1, fn(x, acc) -> x + acc end) == 1
assert Enum.reduce([1, 2, 3], 1, fn(x, acc) -> x + acc end) == 7
assert Enum.reduce([1, 2, 3], fn(x, acc) -> x + acc end) == 6
assert_raise Enum.EmptyError, fn ->
Enum.reduce([], fn(x, acc) -> x + acc end)
end
end
test "reduce while" do
assert Enum.reduce_while(1..100, 0, fn i, acc ->
if i <= 3, do: {:cont, acc + i}, else: {:halt, acc}
end) == 6
assert Enum.reduce_while([1, 2, 3], 1, fn i, acc -> {:cont, acc + i} end) == 7
assert Enum.reduce_while([1, 2, 3], 1, fn _i, acc -> {:halt, acc} end) == 1
assert Enum.reduce_while([], 0, fn _i, acc -> {:cont, acc} end) == 0
end
test "reject" do
assert Enum.reject([1, 2, 3], fn(x) -> rem(x, 2) == 0 end) == [1, 3]
assert Enum.reject([2, 4, 6], fn(x) -> rem(x, 2) == 0 end) == []
end
test "reverse" do
assert Enum.reverse([]) == []
assert Enum.reverse([1, 2, 3]) == [3, 2, 1]
assert Enum.reverse([1, 2, 3], [4, 5, 6]) == [3, 2, 1, 4, 5, 6]
end
test "reverse slice" do
assert Enum.reverse_slice([], 1, 2) == []
assert Enum.reverse_slice([1, 2, 3], 0, 0) == [1, 2, 3]
assert Enum.reverse_slice([1, 2, 3], 0, 1) == [1, 2, 3]
assert Enum.reverse_slice([1, 2, 3], 0, 2) == [2, 1, 3]
assert Enum.reverse_slice([1, 2, 3], 0, 20000000) == [3, 2, 1]
assert Enum.reverse_slice([1, 2, 3], 100, 2) == [1, 2, 3]
assert Enum.reverse_slice([1, 2, 3], 10, 10) == [1, 2, 3]
end
test "random" do
# corner cases, independent of the seed
assert_raise Enum.EmptyError, fn -> Enum.random([]) end
assert Enum.random([1]) == 1
# set a fixed seed so the test can be deterministic
# please note the order of following assertions is important
seed1 = {1406, 407414, 139258}
seed2 = {1306, 421106, 567597}
:rand.seed(:exsplus, seed1)
assert Enum.random([1, 2]) == 1
assert Enum.random([1, 2, 3]) == 1
assert Enum.random([1, 2, 3, 4]) == 2
assert Enum.random([1, 2, 3, 4, 5]) == 4
:rand.seed(:exsplus, seed2)
assert Enum.random([1, 2]) == 2
assert Enum.random([1, 2, 3]) == 2
assert Enum.random([1, 2, 3, 4]) == 3
assert Enum.random([1, 2, 3, 4, 5]) == 5
end
test "take random" do
# corner cases, independent of the seed
assert_raise FunctionClauseError, fn -> Enum.take_random([1, 2], -1) end
assert Enum.take_random([], 0) == []
assert Enum.take_random([], 3) == []
assert Enum.take_random([1], 0) == []
assert Enum.take_random([1], 2) == [1]
assert Enum.take_random([1, 2], 0) == []
# set a fixed seed so the test can be deterministic
# please note the order of following assertions is important
seed1 = {1406, 407414, 139258}
seed2 = {1406, 421106, 567597}
:rand.seed(:exsplus, seed1)
assert Enum.take_random([1, 2, 3], 1) == [1]
assert Enum.take_random([1, 2, 3], 2) == [1, 3]
assert Enum.take_random([1, 2, 3], 3) == [2, 1, 3]
assert Enum.take_random([1, 2, 3], 4) == [3, 1, 2]
:rand.seed(:exsplus, seed2)
assert Enum.take_random([1, 2, 3], 1) == [3]
assert Enum.take_random([1, 2, 3], 2) == [1, 2]
assert Enum.take_random([1, 2, 3], 3) == [1, 2, 3]
assert Enum.take_random([1, 2, 3], 4) == [1, 2, 3]
# assert that every item in the sample comes from the input list
list = for _<-1..100, do: make_ref
for x <- Enum.take_random(list, 50) do
assert x in list
end
end
test "scan" do
assert Enum.scan([1, 2, 3, 4, 5], &(&1 + &2)) == [1, 3, 6, 10, 15]
assert Enum.scan([], &(&1 + &2)) == []
assert Enum.scan([1, 2, 3, 4, 5], 0, &(&1 + &2)) == [1, 3, 6, 10, 15]
assert Enum.scan([], 0, &(&1 + &2)) == []
end
test "shuffle" do
# set a fixed seed so the test can be deterministic
:rand.seed(:exsplus, {1374, 347975, 449264})
assert Enum.shuffle([1, 2, 3, 4, 5]) == [2, 1, 3, 5, 4]
end
test "sort" do
assert Enum.sort([5, 3, 2, 4, 1]) == [1, 2, 3, 4, 5]
assert Enum.sort([5, 3, 2, 4, 1], &(&1 > &2)) == [5, 4, 3, 2, 1]
end
test "sort by" do
collection = [
[other_data: 1, sorted_data: 5],
[other_data: 3, sorted_data: 4],
[other_data: 4, sorted_data: 3],
[other_data: 2, sorted_data: 2],
[other_data: 5, sorted_data: 1]
]
assert Enum.sort_by(
collection,
&(&1[:sorted_data])
) == [
[other_data: 5, sorted_data: 1],
[other_data: 2, sorted_data: 2],
[other_data: 4, sorted_data: 3],
[other_data: 3, sorted_data: 4],
[other_data: 1, sorted_data: 5]
]
assert Enum.sort_by(collection, &(&1[:sorted_data]), &>=/2) == collection
end
test "split" do
assert Enum.split([1, 2, 3], 0) == {[], [1, 2, 3]}
assert Enum.split([1, 2, 3], 1) == {[1], [2, 3]}
assert Enum.split([1, 2, 3], 2) == {[1, 2], [3]}
assert Enum.split([1, 2, 3], 3) == {[1, 2, 3], []}
assert Enum.split([1, 2, 3], 4) == {[1, 2, 3], []}
assert Enum.split([], 3) == {[], []}
assert Enum.split([1, 2, 3], -1) == {[1, 2], [3]}
assert Enum.split([1, 2, 3], -2) == {[1], [2, 3]}
assert Enum.split([1, 2, 3], -3) == {[], [1, 2, 3]}
assert Enum.split([1, 2, 3], -10) == {[], [1, 2, 3]}
end
test "split while" do
assert Enum.split_while([1, 2, 3], fn(_) -> false end) == {[], [1, 2, 3]}
assert Enum.split_while([1, 2, 3], fn(_) -> true end) == {[1, 2, 3], []}
assert Enum.split_while([1, 2, 3], fn(x) -> x > 2 end) == {[], [1, 2, 3]}
assert Enum.split_while([1, 2, 3], fn(x) -> x > 3 end) == {[], [1, 2, 3]}
assert Enum.split_while([1, 2, 3], fn(x) -> x < 3 end) == {[1, 2], [3]}
assert Enum.split_while([], fn(_) -> true end) == {[], []}
end
test "sum" do
assert Enum.sum([]) == 0
assert Enum.sum([1]) == 1
assert Enum.sum([1, 2, 3]) == 6
assert Enum.sum([1.1, 2.2, 3.3]) == 6.6
assert_raise ArithmeticError, fn ->
Enum.sum([{}])
end
assert_raise ArithmeticError, fn ->
Enum.sum([1, {}])
end
end
test "take" do
assert Enum.take([1, 2, 3], 0) == []
assert Enum.take([1, 2, 3], 1) == [1]
assert Enum.take([1, 2, 3], 2) == [1, 2]
assert Enum.take([1, 2, 3], 3) == [1, 2, 3]
assert Enum.take([1, 2, 3], 4) == [1, 2, 3]
assert Enum.take([1, 2, 3], -1) == [3]
assert Enum.take([1, 2, 3], -2) == [2, 3]
assert Enum.take([1, 2, 3], -4) == [1, 2, 3]
assert Enum.take([], 3) == []
end
test "take every" do
assert Enum.take_every([1, 2, 3, 4, 5, 6, 7, 8, 9, 10], 2) == [1, 3, 5, 7, 9]
assert Enum.take_every([], 2) == []
assert Enum.take_every([1, 2], 2) == [1]
assert Enum.take_every([1, 2, 3], 0) == []
assert Enum.take_every(1..3, 1) == [1, 2, 3]
assert_raise FunctionClauseError, fn ->
Enum.take_every([1, 2, 3], -1)
end
assert_raise FunctionClauseError, fn ->
Enum.take_every(1..10, 3.33)
end
end
test "take while" do
assert Enum.take_while([1, 2, 3], fn(x) -> x > 3 end) == []
assert Enum.take_while([1, 2, 3], fn(x) -> x <= 1 end) == [1]
assert Enum.take_while([1, 2, 3], fn(x) -> x <= 3 end) == [1, 2, 3]
assert Enum.take_while([], fn(_) -> true end) == []
end
test "to list" do
assert Enum.to_list([]) == []
assert Enum.to_list(1 .. 3) == [1, 2, 3]
end
test "uniq by" do
assert Enum.uniq_by([1, 2, 3, 2, 1], fn x -> x end) == [1, 2, 3]
end
test "uniq" do
assert Enum.uniq([5, 1, 2, 3, 2, 1]) == [5, 1, 2, 3]
assert Enum.uniq([1, 2, 3, 2, 1], fn x -> x end) == [1, 2, 3]
end
test "zip" do
assert Enum.zip([:a, :b], [1, 2]) == [{:a, 1}, {:b, 2}]
assert Enum.zip([:a, :b], [1, 2, 3, 4]) == [{:a, 1}, {:b, 2}]
assert Enum.zip([:a, :b, :c, :d], [1, 2]) == [{:a, 1}, {:b, 2}]
assert Enum.zip([], [1]) == []
assert Enum.zip([1], []) == []
assert Enum.zip([], []) == []
end
test "unzip" do
assert Enum.unzip([{:a, 1}, {:b, 2}, {:c, 3}]) == {[:a, :b, :c], [1, 2, 3]}
assert Enum.unzip([]) == {[], []}
assert Enum.unzip(%{a: 1, b: 2}) == {[:a, :b], [1, 2]}
assert Enum.unzip([foo: "a", bar: "b"]) == {[:foo, :bar], ["a", "b"]}
assert_raise FunctionClauseError, fn -> Enum.unzip([{:a, 1}, {:b, 2, "foo"}]) end
assert_raise FunctionClauseError, fn -> Enum.unzip([{1, 2, {3, {4, 5}}}]) end
assert_raise FunctionClauseError, fn -> Enum.unzip([1, 2, 3]) end
end
test "with index" do
assert Enum.with_index([]) == []
assert Enum.with_index([1, 2, 3]) == [{1, 0}, {2, 1}, {3, 2}]
end
test "max" do
assert Enum.max([1]) == 1
assert Enum.max([1, 2, 3]) == 3
assert Enum.max([1, [], :a, {}]) == []
assert_raise Enum.EmptyError, fn ->
Enum.max([])
end
end
test "max by" do
assert Enum.max_by(["a", "aa", "aaa"], fn(x) -> String.length(x) end) == "aaa"
assert_raise Enum.EmptyError, fn ->
Enum.max_by([], fn(x) -> String.length(x) end)
end
end
test "min" do
assert Enum.min([1]) == 1
assert Enum.min([1, 2, 3]) == 1
assert Enum.min([[], :a, {}]) == :a
assert_raise Enum.EmptyError, fn ->
Enum.min([])
end
end
test "min by" do
assert Enum.min_by(["a", "aa", "aaa"], fn(x) -> String.length(x) end) == "a"
assert_raise Enum.EmptyError, fn ->
Enum.min_by([], fn(x) -> String.length(x) end)
end
end
test "min max" do
assert Enum.min_max([1]) == {1, 1}
assert Enum.min_max([2, 3, 1]) == {1, 3}
assert Enum.min_max([[], :a, {}]) == {:a, []}
assert_raise Enum.EmptyError, fn ->
Enum.min_max([])
end
end
test "min max by" do
assert Enum.min_max_by(["aaa", "a", "aa"], fn(x) -> String.length(x) end) == {"a", "aaa"}
assert_raise Enum.EmptyError, fn ->
Enum.min_max_by([], fn(x) -> String.length(x) end)
end
end
test "chunk" do
assert Enum.chunk([1, 2, 3, 4, 5], 2) == [[1, 2], [3, 4]]
assert Enum.chunk([1, 2, 3, 4, 5], 2, 2, [6]) == [[1, 2], [3, 4], [5, 6]]
assert Enum.chunk([1, 2, 3, 4, 5, 6], 3, 2) == [[1, 2, 3], [3, 4, 5]]
assert Enum.chunk([1, 2, 3, 4, 5, 6], 2, 3) == [[1, 2], [4, 5]]
assert Enum.chunk([1, 2, 3, 4, 5, 6], 3, 2, []) == [[1, 2, 3], [3, 4, 5], [5, 6]]
assert Enum.chunk([1, 2, 3, 4, 5, 6], 3, 3, []) == [[1, 2, 3], [4, 5, 6]]
assert Enum.chunk([1, 2, 3, 4, 5], 4, 4, 6..10) == [[1, 2, 3, 4], [5, 6, 7, 8]]
end
test "chunk by" do
assert Enum.chunk_by([1, 2, 2, 3, 4, 4, 6, 7, 7], &(rem(&1, 2) == 1)) == [[1], [2, 2], [3], [4, 4, 6], [7, 7]]
assert Enum.chunk_by([1, 2, 3, 4], fn _ -> true end) == [[1, 2, 3, 4]]
assert Enum.chunk_by([], fn _ -> true end) == []
assert Enum.chunk_by([1], fn _ -> true end) == [[1]]
end
test "slice" do
list = [1, 2, 3, 4, 5]
assert Enum.slice(list, 0, 0) == []
assert Enum.slice(list, 0, 1) == [1]
assert Enum.slice(list, 0, 2) == [1, 2]
assert Enum.slice(list, 1, 2) == [2, 3]
assert Enum.slice(list, 1, 0) == []
assert Enum.slice(list, 2, 5) == [3, 4, 5]
assert Enum.slice(list, 2, 6) == [3, 4, 5]
assert Enum.slice(list, 5, 5) == []
assert Enum.slice(list, 6, 5) == []
assert Enum.slice(list, 6, 0) == []
assert Enum.slice(list, -6, 0) == []
assert Enum.slice(list, -6, 5) == []
assert Enum.slice(list, -2, 5) == [4, 5]
assert Enum.slice(list, -3, 1) == [3]
assert_raise FunctionClauseError, fn ->
Enum.slice(list, 0, -1)
end
assert_raise FunctionClauseError, fn ->
Enum.slice(list, 0.99, 0)
end
assert_raise FunctionClauseError, fn ->
Enum.slice(list, 0, 0.99)
end
end
test "slice range" do
list = [1, 2, 3, 4, 5]
assert Enum.slice(list, 0..0) == [1]
assert Enum.slice(list, 0..1) == [1, 2]
assert Enum.slice(list, 0..2) == [1, 2, 3]
assert Enum.slice(list, 1..2) == [2, 3]
assert Enum.slice(list, 1..0) == []
assert Enum.slice(list, 2..5) == [3, 4, 5]
assert Enum.slice(list, 2..6) == [3, 4, 5]
assert Enum.slice(list, 4..4) == [5]
assert Enum.slice(list, 5..5) == []
assert Enum.slice(list, 6..5) == []
assert Enum.slice(list, 6..0) == []
assert Enum.slice(list, -6..0) == []
assert Enum.slice(list, -6..5) == []
assert Enum.slice(list, -5..-1) == [1, 2, 3, 4, 5]
assert Enum.slice(list, -5..-3) == [1, 2, 3]
assert Enum.slice(list, -6..-1) == []
assert Enum.slice(list, -6..-3) == []
assert_raise ArgumentError, fn ->
x = 1.1
Enum.slice(list, x..2)
end
assert_raise ArgumentError, fn ->
x = 1.9
Enum.slice(list, 1..x)
end
end
end
defmodule EnumTest.Range do
use ExUnit.Case, async: true
test "all?" do
range = 0..5
refute Enum.all?(range, fn(x) -> rem(x, 2) == 0 end)
range = 0..1
assert Enum.all?(range, fn(x) -> x < 2 end)
assert Enum.all?(range)
range = 1..0
assert Enum.all?(range)
end
test "any?" do
range = 0..5
refute Enum.any?(range, &(&1 > 10))
range = 0..5
assert Enum.any?(range, &(&1 > 3))
range = 1..0
assert Enum.any?(range)
end
test "fetch!" do
assert Enum.fetch!(2..6, 0) == 2
assert Enum.fetch!(2..6, 4) == 6
assert Enum.fetch!(2..6, -1) == 6
assert Enum.fetch!(2..6, -2) == 5
assert Enum.fetch!(-2..-6, 0) == -2
assert Enum.fetch!(-2..-6, 4) == -6
assert_raise Enum.OutOfBoundsError, fn ->
Enum.fetch!(2..6, 8)
end
assert_raise Enum.OutOfBoundsError, fn ->
Enum.fetch!(-2..-6, 8)
end
assert_raise Enum.OutOfBoundsError, fn ->
Enum.fetch!(2..6, -8)
end
end
test "count" do
range = 1..5
assert Enum.count(range) == 5
range = 1..1
assert Enum.count(range) == 1
assert Enum.count([1, true, false, nil]) == 4
end
test "count fun" do
range = 1..5
assert Enum.count(range, fn(x) -> rem(x, 2) == 0 end) == 2
range = 1..1
assert Enum.count(range, fn(x) -> rem(x, 2) == 0 end) == 0
assert Enum.count([1, true, false, nil], & &1) == 2
end
test "chunk" do
assert Enum.chunk(1..5, 2) == [[1, 2], [3, 4]]
assert Enum.chunk(1..5, 2, 2, [6]) == [[1, 2], [3, 4], [5, 6]]
assert Enum.chunk(1..6, 3, 2) == [[1, 2, 3], [3, 4, 5]]
assert Enum.chunk(1..6, 2, 3) == [[1, 2], [4, 5]]
assert Enum.chunk(1..6, 3, 2, []) == [[1, 2, 3], [3, 4, 5], [5, 6]]
assert Enum.chunk(1..5, 4, 4, 6..10) == [[1, 2, 3, 4], [5, 6, 7, 8]]
end
test "chunk by" do
assert Enum.chunk_by(1..4, fn _ -> true end) == [[1, 2, 3, 4]]
assert Enum.chunk_by(1..4, &(rem(&1, 2) == 1)) == [[1], [2], [3], [4]]
end
test "dedup" do
assert Enum.dedup(1..3) == [1, 2, 3]
end
test "dedup by" do
assert Enum.dedup_by(1..3, fn _ -> 1 end) == [1]
end
test "drop" do
range = 1..3
assert Enum.drop(range, 0) == [1, 2, 3]
assert Enum.drop(range, 1) == [2, 3]
assert Enum.drop(range, 2) == [3]
assert Enum.drop(range, 3) == []
assert Enum.drop(range, 4) == []
assert Enum.drop(range, -1) == [1, 2]
assert Enum.drop(range, -2) == [1]
assert Enum.drop(range, -4) == []
range = 1..0
assert Enum.drop(range, 3) == []
end
test "drop while" do
range = 0..6
assert Enum.drop_while(range, fn(x) -> x <= 3 end) == [4, 5, 6]
assert Enum.drop_while(range, fn(_) -> false end) == [0, 1, 2, 3, 4, 5, 6]
range = 0..3
assert Enum.drop_while(range, fn(x) -> x <= 3 end) == []
range = 1..0
assert Enum.drop_while(range, fn(_) -> nil end) == [1, 0]
end
test "find" do
range = 2..6
assert Enum.find(range, fn(x) -> rem(x, 2) == 0 end) == 2
assert Enum.find(range, fn(x) -> rem(x, 2) == 1 end) == 3
assert Enum.find(range, fn _ -> false end) == nil
assert Enum.find(range, 0, fn _ -> false end) == 0
end
test "find value" do
range = 2..6
assert Enum.find_value(range, fn(x) -> rem(x, 2) == 1 end)
end
test "find index" do
range = 2..6
assert Enum.find_index(range, fn(x) -> rem(x, 2) == 1 end) == 1
end
test "empty?" do
range = 1..0
refute Enum.empty?(range)
range = 1..2
refute Enum.empty?(range)
end
test "each" do
try do
range = 1..0
assert Enum.each(range, fn(x) -> x end) == :ok
range = 1..3
assert Enum.each(range, fn(x) -> Process.put(:enum_test_each, x * 2) end) == :ok
assert Process.get(:enum_test_each) == 6
after
Process.delete(:enum_test_each)
end
try do
range = -1..-3
assert Enum.each(range, fn(x) -> Process.put(:enum_test_each, x * 2) end) == :ok
assert Process.get(:enum_test_each) == -6
after
Process.delete(:enum_test_each)
end
end
test "filter" do
range = 1..3
assert Enum.filter(range, fn(x) -> rem(x, 2) == 0 end) == [2]
range = 1..6
assert Enum.filter(range, fn(x) -> rem(x, 2) == 0 end) == [2, 4, 6]
assert Enum.filter([1, 2, false, 3, nil], & &1) == [1, 2, 3]
end
test "filter with match" do
range = 1..3
assert Enum.filter(range, &match?(1, &1)) == [1]
assert Enum.filter(range, &match?(x when x < 3, &1)) == [1, 2]
assert Enum.filter(range, &match?(_, &1)) == [1, 2, 3]
end
test "filter map" do
range = 1..3
assert Enum.filter_map(range, fn(x) -> rem(x, 2) == 0 end, &(&1 * 2)) == [4]
range = 2..6
assert Enum.filter_map(range, fn(x) -> rem(x, 2) == 0 end, &(&1 * 2)) == [4, 8, 12]
end
test "flat map" do
range = 1..3
assert Enum.flat_map(range, fn(x) -> [x, x] end) == [1, 1, 2, 2, 3, 3]
end
test "intersperse" do
range = 1..0
assert Enum.intersperse(range, true) == [1, true, 0]
range = 1..3
assert Enum.intersperse(range, false) == [1, false, 2, false, 3]
end
test "into" do
assert Enum.into([a: 1, b: 2], %{}) == %{a: 1, b: 2}
assert Enum.into(%{a: 1, b: 2}, []) == [a: 1, b: 2]
assert Enum.into(3..5, [1, 2]) == [1, 2, 3, 4, 5]
assert Enum.into(1..5, []) == [1, 2, 3, 4, 5]
assert Enum.into(1..5, [], fn x -> x * 2 end) == [2, 4, 6, 8, 10]
assert Enum.into(1..3, "numbers: ", &to_string/1) == "numbers: 123"
end
test "join" do
range = 1..0
assert Enum.join(range, " = ") == "1 = 0"
range = 1..3
assert Enum.join(range, " = ") == "1 = 2 = 3"
assert Enum.join(range) == "123"
end
test "map join" do
range = 1..0
assert Enum.map_join(range, " = ", &(&1 * 2)) == "2 = 0"
range = 1..3
assert Enum.map_join(range, " = ", &(&1 * 2)) == "2 = 4 = 6"
assert Enum.map_join(range, &(&1 * 2)) == "246"
end
test "map" do
range = 1..3
assert Enum.map(range, fn x -> x * 2 end) == [2, 4, 6]
range = -1..-3
assert Enum.map(range, fn x -> x * 2 end) == [-2, -4, -6]
end
test "map reduce" do
range = 1..0
assert Enum.map_reduce(range, 1, fn(x, acc) -> {x * 2, x + acc} end) == {[2, 0], 2}
range = 1..3
assert Enum.map_reduce(range, 1, fn(x, acc) -> {x * 2, x + acc} end) == {[2, 4, 6], 7}
end
test "max" do
assert Enum.max(1..1) == 1
assert Enum.max(1..3) == 3
assert Enum.max(3..1) == 3
end
test "max by" do
assert Enum.max_by(1..1, fn(x) -> :math.pow(-2, x) end) == 1
assert Enum.max_by(1..3, fn(x) -> :math.pow(-2, x) end) == 2
end
test "min" do
assert Enum.min(1..1) == 1
assert Enum.min(1..3) == 1
end
test "min by" do
assert Enum.min_by(1..1, fn(x) -> :math.pow(-2, x) end) == 1
assert Enum.min_by(1..3, fn(x) -> :math.pow(-2, x) end) == 3
end
test "partition" do
range = 1..3
assert Enum.partition(range, fn(x) -> rem(x, 2) == 0 end) == {[2], [1, 3]}
end
test "reduce" do
range = 1..0
assert Enum.reduce(range, 1, fn(x, acc) -> x + acc end) == 2
range = 1..3
assert Enum.reduce(range, 1, fn(x, acc) -> x + acc end) == 7
range = 1..3
assert Enum.reduce(range, fn(x, acc) -> x + acc end) == 6
end
test "reject" do
range = 1..3
assert Enum.reject(range, fn(x) -> rem(x, 2) == 0 end) == [1, 3]
range = 1..6
assert Enum.reject(range, fn(x) -> rem(x, 2) == 0 end) == [1, 3, 5]
assert Enum.reject([1, true, nil, false, 2], & &1) == [nil, false]
end
test "reverse" do
assert Enum.reverse(0..0) == [0]
assert Enum.reverse(1..3) == [3, 2, 1]
assert Enum.reverse(1..3, 4..6) == [3, 2, 1, 4, 5, 6]
assert Enum.reverse([1, 2, 3], 4..6) == [3, 2, 1, 4, 5, 6]
assert Enum.reverse(1..3, [4, 5, 6]) == [3, 2, 1, 4, 5, 6]
end
test "reverse slice" do
assert Enum.reverse_slice(1..6, 2, 0) == [1, 2, 3, 4, 5, 6]
assert Enum.reverse_slice(1..6, 2, 2) == [1, 2, 4, 3, 5, 6]
assert Enum.reverse_slice(1..6, 2, 4) == [1, 2, 6, 5, 4, 3]
assert Enum.reverse_slice(1..6, 2, 10000000) == [1, 2, 6, 5, 4, 3]
assert Enum.reverse_slice(1..6, 10000000, 4) == [1, 2, 3, 4, 5, 6]
assert Enum.reverse_slice(1..6, 50, 50) == [1, 2, 3, 4, 5, 6]
end
test "random" do
# corner cases, independent of the seed
assert Enum.random(1..1) == 1
# set a fixed seed so the test can be deterministic
# please note the order of following assertions is important
seed1 = {1406, 407414, 139258}
seed2 = {1306, 421106, 567597}
:rand.seed(:exsplus, seed1)
assert Enum.random(1..2) == 1
assert Enum.random(1..3) == 1
:rand.seed(:exsplus, seed2)
assert Enum.random(1..2) == 2
assert Enum.random(1..3) == 2
end
test "take random" do
# corner cases, independent of the seed
assert_raise FunctionClauseError, fn -> Enum.take_random(1..2, -1) end
assert Enum.take_random(1..1, 0) == []
assert Enum.take_random(1..1, 2) == [1]
assert Enum.take_random(1..2, 0) == []
# set a fixed seed so the test can be deterministic
# please note the order of following assertions is important
seed1 = {1406, 407414, 139258}
seed2 = {1406, 421106, 567597}
:rand.seed(:exsplus, seed1)
assert Enum.take_random(1..3, 1) == [1]
assert Enum.take_random(1..3, 2) == [1, 3]
assert Enum.take_random(1..3, 3) == [2, 1, 3]
assert Enum.take_random(1..3, 4) == [3, 1, 2]
:rand.seed(:exsplus, seed2)
assert Enum.take_random(1..3, 1) == [3]
assert Enum.take_random(1..3, 2) == [1, 2]
assert Enum.take_random(1..3, 3) == [1, 2, 3]
assert Enum.take_random(1..3, 4) == [1, 2, 3]
end
test "scan" do
assert Enum.scan(1..5, &(&1 + &2)) == [1, 3, 6, 10, 15]
assert Enum.scan(1..5, 0, &(&1 + &2)) == [1, 3, 6, 10, 15]
end
test "shuffle" do
# set a fixed seed so the test can be deterministic
:rand.seed(:exsplus, {1374, 347975, 449264})
assert Enum.shuffle(1..5) == [2, 1, 3, 5, 4]
end
test "slice" do
assert Enum.slice(1..5, 0, 0) == []
assert Enum.slice(1..5, 0, 1) == [1]
assert Enum.slice(1..5, 0, 2) == [1, 2]
assert Enum.slice(1..5, 1, 2) == [2, 3]
assert Enum.slice(1..5, 1, 0) == []
assert Enum.slice(1..5, 2, 5) == [3, 4, 5]
assert Enum.slice(1..5, 2, 6) == [3, 4, 5]
assert Enum.slice(1..5, 5, 5) == []
assert Enum.slice(1..5, 6, 5) == []
assert Enum.slice(1..5, 6, 0) == []
assert Enum.slice(1..5, -6, 0) == []
assert Enum.slice(1..5, -6, 5) == []
assert Enum.slice(1..5, -2, 5) == [4, 5]
assert Enum.slice(1..5, -3, 1) == [3]
end
test "slice range" do
assert Enum.slice(1..5, 0..0) == [1]
assert Enum.slice(1..5, 0..1) == [1, 2]
assert Enum.slice(1..5, 0..2) == [1, 2, 3]
assert Enum.slice(1..5, 1..2) == [2, 3]
assert Enum.slice(1..5, 1..0) == []
assert Enum.slice(1..5, 2..5) == [3, 4, 5]
assert Enum.slice(1..5, 2..6) == [3, 4, 5]
assert Enum.slice(1..5, 4..4) == [5]
assert Enum.slice(1..5, 5..5) == []
assert Enum.slice(1..5, 6..5) == []
assert Enum.slice(1..5, 6..0) == []
assert Enum.slice(1..5, -6..0) == []
assert Enum.slice(1..5, -6..5) == []
assert Enum.slice(1..5, -5..-1) == [1, 2, 3, 4, 5]
assert Enum.slice(1..5, -5..-3) == [1, 2, 3]
assert Enum.slice(1..5, -6..-1) == []
assert Enum.slice(1..5, -6..-3) == []
end
test "sort" do
assert Enum.sort(3..1) == [1, 2, 3]
assert Enum.sort(2..1) == [1, 2]
assert Enum.sort(1..1) == [1]
assert Enum.sort(3..1, &(&1 > &2)) == [3, 2, 1]
assert Enum.sort(2..1, &(&1 > &2)) == [2, 1]
assert Enum.sort(1..1, &(&1 > &2)) == [1]
end
test "split" do
range = 1..3
assert Enum.split(range, 0) == {[], [1, 2, 3]}
assert Enum.split(range, 1) == {[1], [2, 3]}
assert Enum.split(range, 2) == {[1, 2], [3]}
assert Enum.split(range, 3) == {[1, 2, 3], []}
assert Enum.split(range, 4) == {[1, 2, 3], []}
assert Enum.split(range, -1) == {[1, 2], [3]}
assert Enum.split(range, -2) == {[1], [2, 3]}
assert Enum.split(range, -3) == {[], [1, 2, 3]}
assert Enum.split(range, -10) == {[], [1, 2, 3]}
range = 1..0
assert Enum.split(range, 3) == {[1, 0], []}
end
test "split while" do
range = 1..3
assert Enum.split_while(range, fn(_) -> false end) == {[], [1, 2, 3]}
assert Enum.split_while(range, fn(_) -> nil end) == {[], [1, 2, 3]}
assert Enum.split_while(range, fn(_) -> true end) == {[1, 2, 3], []}
assert Enum.split_while(range, fn(x) -> x > 2 end) == {[], [1, 2, 3]}
assert Enum.split_while(range, fn(x) -> x > 3 end) == {[], [1, 2, 3]}
assert Enum.split_while(range, fn(x) -> x < 3 end) == {[1, 2], [3]}
assert Enum.split_while(range, fn(x) -> x end) == {[1, 2, 3], []}
range = 1..0
assert Enum.split_while(range, fn(_) -> true end) == {[1, 0], []}
end
test "sum" do
assert Enum.sum(1..1) == 1
assert Enum.sum(1..3) == 6
end
test "take" do
range = 1..3
assert Enum.take(range, 0) == []
assert Enum.take(range, 1) == [1]
assert Enum.take(range, 2) == [1, 2]
assert Enum.take(range, 3) == [1, 2, 3]
assert Enum.take(range, 4) == [1, 2, 3]
assert Enum.take(range, -1) == [3]
assert Enum.take(range, -2) == [2, 3]
assert Enum.take(range, -4) == [1, 2, 3]
range = 1..0
assert Enum.take(range, 3) == [1, 0]
end
test "take every" do
assert Enum.take_every(1..10, 2) == [1, 3, 5, 7, 9]
assert Enum.take_every(1..2, 2) == [1]
assert Enum.take_every(1..3, 0) == []
assert_raise FunctionClauseError, fn ->
Enum.take_every(1..3, -1)
end
end
test "take while" do
range = 1..3
assert Enum.take_while(range, fn(x) -> x > 3 end) == []
assert Enum.take_while(range, fn(x) -> x <= 1 end) == [1]
assert Enum.take_while(range, fn(x) -> x <= 3 end) == [1, 2, 3]
assert Enum.take_while(range, fn(x) -> x end) == [1, 2, 3]
assert Enum.take_while(range, fn(_) -> nil end) == []
assert Enum.take_while([], fn(_) -> true end) == []
end
test "uniq" do
assert Enum.uniq(1..3) == [1, 2, 3]
assert Enum.uniq(1..3, fn x -> x end) == [1, 2, 3]
end
test "zip" do
assert Enum.zip([:a, :b], 1..2) == [{:a, 1}, {:b, 2}]
assert Enum.zip([:a, :b], 1..4) == [{:a, 1}, {:b, 2}]
assert Enum.zip([:a, :b, :c, :d], 1..2) == [{:a, 1}, {:b, 2}]
assert Enum.zip(1..2, [:a, :b]) == [{1, :a}, {2, :b}]
assert Enum.zip(1..4, [:a, :b]) == [{1, :a}, {2, :b}]
assert Enum.zip(1..2, [:a, :b, :c, :d]) == [{1, :a}, {2, :b}]
assert Enum.zip(1..2, 1..2) == [{1, 1}, {2, 2}]
assert Enum.zip(1..4, 1..2) == [{1, 1}, {2, 2}]
assert Enum.zip(1..2, 1..4) == [{1, 1}, {2, 2}]
end
test "with index" do
assert Enum.with_index(1..3) == [{1, 0}, {2, 1}, {3, 2}]
end
end
defmodule EnumTest.SideEffects do
use ExUnit.Case, async: true
import ExUnit.CaptureIO
import PathHelpers
test "take with side effects" do
stream = Stream.unfold(1, fn x -> IO.puts x; {x, x + 1} end)
assert capture_io(fn ->
Enum.take(stream, 1)
end) == "1\n"
end
test "take does not consume next without a need" do
path = tmp_path("oneliner.txt")
File.mkdir(Path.dirname(path))
try do
File.write!(path, "ONE")
File.open!(path, [], fn file ->
iterator = IO.stream(file, :line)
assert Enum.take(iterator, 1) == ["ONE"]
assert Enum.take(iterator, 5) == []
end)
after
File.rm(path)
end
end
test "take with no item works as no-op" do
iterator = File.stream!(fixture_path("unknown.txt"))
assert Enum.take(iterator, 0) == []
assert Enum.take(iterator, 0) == []
assert Enum.take(iterator, 0) == []
assert Enum.take(iterator, 0) == []
end
end
| 31.668685 | 114 | 0.50982 |
1cc2c6e118a8b367d60e167e7a0c924c10f921ee | 53 | exs | Elixir | .formatter.exs | adamswsk/farmbot_os | d177d3b74888c1e7bcbf8f8595818708ee97f73b | [
"MIT"
] | 1 | 2021-08-23T13:36:14.000Z | 2021-08-23T13:36:14.000Z | .formatter.exs | adamswsk/farmbot_os | d177d3b74888c1e7bcbf8f8595818708ee97f73b | [
"MIT"
] | null | null | null | .formatter.exs | adamswsk/farmbot_os | d177d3b74888c1e7bcbf8f8595818708ee97f73b | [
"MIT"
] | null | null | null | [
inputs: ["*.{ex,exs}", "{test}/**/*.{ex,exs}"]
]
| 13.25 | 48 | 0.377358 |
1cc2c98381ce618c6ade2a81d8e4c42b919d05c7 | 271 | ex | Elixir | notes.ex | ghitchens/stopwatch | c0570d984b926b68d7a3d24a98b390e23b19ccbf | [
"MIT"
] | 1 | 2017-05-01T21:00:39.000Z | 2017-05-01T21:00:39.000Z | notes.ex | ghitchens/stopwatch | c0570d984b926b68d7a3d24a98b390e23b19ccbf | [
"MIT"
] | null | null | null | notes.ex | ghitchens/stopwatch | c0570d984b926b68d7a3d24a98b390e23b19ccbf | [
"MIT"
] | null | null | null | # subscriber api
subscribe()
handle_notification(...)
# source API (not including state)
register_source(registry, source, initial_event_data) # sends registration info
deregister_source(registry, source, final_notification_data)
inform(notification_data)
| 22.583333 | 82 | 0.782288 |
1cc2ed4351121b5d6c017bdabe68e4eaf817d450 | 1,098 | ex | Elixir | lib/whistle/http_server.ex | andreasronge/whistle | 8dc5b4246e9f0fea8bc5ffe8c89737eceb082a32 | [
"MIT"
] | 59 | 2018-12-18T15:24:23.000Z | 2020-11-19T18:40:25.000Z | lib/whistle/http_server.ex | andreasronge/whistle | 8dc5b4246e9f0fea8bc5ffe8c89737eceb082a32 | [
"MIT"
] | 8 | 2019-02-05T22:36:17.000Z | 2019-03-20T21:13:42.000Z | lib/whistle/http_server.ex | andreasronge/whistle | 8dc5b4246e9f0fea8bc5ffe8c89737eceb082a32 | [
"MIT"
] | 3 | 2019-01-30T19:13:53.000Z | 2020-11-19T18:40:28.000Z | defmodule Whistle.HttpServer do
def child_spec(opts) do
dispatch_rules = dispatch(Keyword.get(opts, :routers, []), Keyword.get(opts, :plug, nil))
children =
Enum.flat_map([:http, :https], fn scheme ->
opts
|> Keyword.get(scheme, false)
|> case do
false ->
[]
opts ->
cowboy_opts = Keyword.put_new(opts, :dispatch, dispatch_rules)
[{Plug.Cowboy, [scheme: scheme, plug: nil, options: cowboy_opts]}]
end
end)
default = %{
id: __MODULE__,
start: {Supervisor, :start_link, [children, [strategy: :one_for_one]]},
type: :supervisor
}
Supervisor.child_spec(default, [])
end
def build_handlers(routers) do
routers
|> Enum.map(fn router ->
{router.__path(), Whistle.SocketHandler, {router, []}}
end)
end
defp dispatch(routers, plug) do
plug_handler =
if is_nil(plug) do
[]
else
[{:_, Plug.Cowboy.Handler, {plug, []}}]
end
[
{:_, build_handlers(routers) ++ plug_handler}
]
end
end
| 22.408163 | 93 | 0.563752 |
1cc2f85767fb0a91829db9f6acfaf433506ebf71 | 18,360 | ex | Elixir | lib/cli/help.ex | sndnv/track | 3d3727f752877f51a69700c0efff13d3eed978fa | [
"Apache-2.0"
] | null | null | null | lib/cli/help.ex | sndnv/track | 3d3727f752877f51a69700c0efff13d3eed978fa | [
"Apache-2.0"
] | null | null | null | lib/cli/help.ex | sndnv/track | 3d3727f752877f51a69700c0efff13d3eed978fa | [
"Apache-2.0"
] | null | null | null | defmodule Cli.Help do
@moduledoc """
Module used for generating application documentation and help messages.
"""
@app "track"
@brief "#{IO.ANSI.format([:bright, @app], true)} - Simple time/task tracking terminal utility"
@description [
"#{IO.ANSI.format([:bright, @app], true)} is a basic time/task tracking terminal-based application.",
"It provides functionality for managing tasks and reporting on their duration and distribution."
]
@supported_commands %{
add: %{
arguments: [],
options: %{
required: [
[{"task", "Task name", ["Working on project", "dev", "bookkeeping"]}],
[{"start-date", "Task start date", ["today", "today+2d", "today-1d", "1999-12-21"]}],
[
{"start-time", "Task start time",
["now", "now+10m", "now-90m", "now+3h", "now-1h", "23:45"]}
],
[
{"end-time", "Task end time",
["now", "now+10m", "now-90m", "now+3h", "now-1h", "23:45"]},
{"duration", "Task duration", ["45m", "5h"]}
]
],
optional: []
},
description: ["Adds a new task"]
},
remove: %{
arguments: [
{"<id>", "Task UUID", []}
],
options: %{},
description: ["Removes an existing task"]
},
update: %{
arguments: [
{"<id>", "Task UUID", []}
],
options: %{
required: [],
optional: [
[{"task", "Task name", ["Working on project", "dev", "bookkeeping"]}],
[{"start-date", "Task start date", ["today", "today+2d", "today-1d", "1999-12-21"]}],
[
{"start-time", "Task start time",
["now", "now+10m", "now-90m", "now+3h", "now-1h", "23:45"]}
],
[
{"duration", "Task duration", ["45m", "5h"]}
]
]
},
description: [
"Updates an existing task",
"All parameters are optional but at least one is required"
]
},
start: %{
arguments: [
{"<task>", "Task name", ["Working on project", "dev", "bookkeeping"]}
],
options: %{},
description: [
"Starts a new active task",
"Only one active tasks is allowed; the currently active task can be stopped with '#{@app} stop'"
]
},
stop: %{
arguments: [],
options: %{},
description: [
"Stops an active task",
"If the task's duration is under one minute, it is discarded."
]
},
list: %{
arguments: [],
options: %{
required: [],
optional: [
[{"from", "Query start date", ["today", "today+2d", "today-1d", "1999-12-21"]}],
[{"to", "Query end date", ["today", "today+2d", "today-1d", "1999-12-21"]}],
[{"sort-by", "Field name to sort by", ["task", "start", "duration"]}],
[{"order", "Sorting order", ["desc", "asc"]}]
]
},
description: [
"Retrieves a list of all tasks based on the specified query parameters",
"If no query parameters are supplied, today's tasks are retrieved, sorted by start time"
]
},
report: %{
arguments: [
{"duration", "Shows the total duration of each task for the queried period", []},
{"day", "Shows daily distribution of tasks", []},
{"week", "Shows weekly distribution of tasks", []},
{"month", "Shows monthly distribution of tasks", []},
{"task", "Shows total duration of the task(s) per day", []},
{"overlap",
"Shows all tasks that are overlapping and the day on which the overlap occurs", []}
],
options: %{
required: [],
optional: [
[{"from", "Query start date", ["today", "today+2d", "today-1d", "1999-12-21"]}],
[{"to", "Query end date", ["today", "today+2d", "today-1d", "1999-12-21"]}],
[{"sort-by", "Field name to sort by", ["task", "start", "duration"]}],
[{"order", "Sorting order", ["desc", "asc"]}]
]
},
description: [
"Generates reports",
"If no query parameters are supplied, today's tasks are retrieved and processed"
]
},
service: %{
arguments: [
{"store clear", "Removes all stored tasks", []}
],
options: %{},
description: ["Executes management commands"]
},
legend: %{
arguments: [],
options: %{},
description: [
"Shows a colour legend with a brief description of what the various chart/table colours mean"
]
},
help: %{
arguments: [],
options: %{},
description: ["Shows this help message"]
},
generate: %{
arguments: [],
options: %{},
description: ["Generates a bash_completion script"]
}
}
@additional_options %{
"--verbose": %{
arguments: [],
description: ["Enables extra logging"]
},
"--config": %{
arguments: [{"file-path", "Path to custom config file", ["~/track/tasks.log"]}],
description: [
"Sets a custom config file",
"The file should contain parameters in 'config_key=value' format; the only config key currently supported is 'log_file_path'"
]
}
}
@examples %{
add: %{
description: "Adds a new task called 'dev', starting now with a duration of 30 minutes",
examples: [
"dev today now now+30m",
"dev today now 30m",
"--task dev --start-date today --start-time now --end-time now+30m",
"--task dev --start-date today --start-time now --duration 30m",
"task=dev start-date=today start-time=now end-time=now+30m",
"task=dev start-date=today start-time=now duration=30m"
]
},
remove: %{
description: "Removes an existing task with ID '56f3db20-...'",
examples: [
"56f3db20-88c9-44ba-a0f1-da78dc990b84"
]
},
update: %{
description:
"Updates an existing task with ID '56f3db20-...' to be called 'bookkeeping', starting yesterday with a duration of 45 minutes",
examples: [
"56f3db20-88c9-44ba-a0f1-da78dc990b84 bookkeeping today-1d 45m",
"56f3db20-88c9-44ba-a0f1-da78dc990b84 --task bookkeeping --start-date today-1d --start-time now --duration 45m",
"56f3db20-88c9-44ba-a0f1-da78dc990b84 task=bookkeeping start-date=today-1d start-time=now duration=45m"
]
},
start: %{
description: "Starts a new active task called 'dev'",
examples: [
"dev"
]
},
stop: %{
description: "Stops the currently active task",
examples: [
""
]
},
list: %{
description: "Lists all tasks in the last 30 days and sorts them by ascending duration",
examples: [
"today-30d today duration asc",
"--from today-30d --to today --sort-by duration --order asc",
"from=today-30d to=today sort-by=duration order=asc"
]
},
report: %{
description:
"Generates a report of the daily distribution of tasks, for all tasks in the last 10 and the next 5 days, with default sorting",
examples: [
"daily today-10d today+5d",
"daily --from today-10d --to today+5d",
"daily from=today-10d to=today+5d"
]
},
service: %{
description: "Clears all tasks",
examples: [
"store clear"
]
},
legend: %{
description: "Shows the colour legend",
examples: [
""
]
}
}
@default_padding 12
@prefix "#{String.pad_leading("", @default_padding)} |"
@doc """
Generates the application's usage message.
"""
def generate_usage_message() do
generate_usage_from_attributes(@app, @supported_commands) |> Enum.join("\n")
end
@doc """
Generates the application's help message.
"""
def generate_help_message(for_command) do
result =
generate_help_message_from_attributes(
@app,
@brief,
@description,
@supported_commands,
@additional_options,
@examples,
@prefix,
for_command
)
case result do
{:ok, message} -> {:ok, message |> Enum.join("\n\n")}
error -> error
end
end
@doc """
Generates a bash_completion script.
"""
def generate_bash_completion_script() do
generate_bash_completion_script_from_attributes(
@app,
@supported_commands,
@additional_options
)
end
@doc """
Generates a usage message based on the supplied data.
"""
def generate_usage_from_attributes(app, supported_commands) do
commands = supported_commands |> Enum.map(fn {command, _} -> command end) |> Enum.sort()
[
"Usage: #{app} <command> [arguments] [parameters]",
"Commands: #{commands |> Enum.join(", ")}"
]
end
@doc """
Generates a help message based on the supplied data.
"""
def generate_help_message_from_attributes(
app,
brief,
description,
supported_commands,
additional_options,
examples,
prefix,
for_command \\ :all
) do
{supported_commands, examples} =
case for_command do
:all ->
{supported_commands, examples}
command ->
supported_commands =
supported_commands |> Enum.filter(fn {k, _} -> k |> Atom.to_string() == command end)
examples = examples |> Enum.filter(fn {k, _} -> k |> Atom.to_string() == command end)
{supported_commands, examples}
end
brief = "\t#{brief}"
description =
description
|> Enum.map(fn description_line -> "\t#{description_line}" end)
|> Enum.join("\n")
commands =
supported_commands
|> Enum.map(fn {command, data} ->
command = command |> Atom.to_string()
{simple_args, detailed_args} = arguments_to_string(data[:arguments], prefix)
{simple_required_opts, detailed_required_opts} =
options_to_string(data[:options][:required], "required", prefix)
{simple_optional_opts, detailed_optional_opts} =
options_to_string(data[:options][:optional], "optional", prefix)
command_parameters = "#{simple_args}#{simple_required_opts}#{simple_optional_opts}"
command_description = description_to_string(data[:description], prefix)
command_overview = [
"#{command |> String.pad_leading(@default_padding) |> add_style(:bright)} | #{
command_description
}",
prefix,
"#{prefix} $ #{app} #{command |> add_style(:bright)}#{command_parameters}"
]
(command_overview ++ detailed_args ++ detailed_required_opts ++ detailed_optional_opts)
|> Enum.join("\n")
end)
additional_options =
additional_options
|> Enum.map(fn {option, data} ->
option = option |> Atom.to_string()
{simple_args, detailed_args} = arguments_to_string(data[:arguments], prefix)
option_description = description_to_string(data[:description], prefix)
option_overview = [
"#{option |> String.pad_leading(@default_padding) |> add_style(:bright)} | #{
option_description
}",
prefix,
"#{prefix} $ #{app} <command> [arguments] [parameters] #{option |> add_style(:bright)}#{
simple_args
}"
]
(option_overview ++ detailed_args)
|> Enum.join("\n")
end)
examples =
examples
|> Enum.map(fn {command, example_data} ->
command = command |> Atom.to_string()
alternatives =
example_data[:examples]
|> Enum.map(fn alternative ->
"\t $ #{app} #{command |> add_style(:bright)} #{alternative}"
end)
|> Enum.join("\n")
"\t#{example_data[:description] |> add_style(:italic)}\n#{alternatives}"
end)
commands =
case commands do
[_ | _] -> ["Commands" |> add_style(:bright) | commands]
[] -> []
end
additional_options =
case additional_options do
[_ | _] -> ["Additional Options" |> add_style(:bright) | additional_options]
[] -> []
end
examples =
case examples do
[_ | _] -> ["Examples" |> add_style(:bright) | examples]
[] -> []
end
case commands do
[_ | _] ->
{
:ok,
["Name" |> add_style(:bright), brief] ++
["Description" |> add_style(:bright), description] ++
commands ++ additional_options ++ examples
}
[] ->
{:error, "No help found for command [#{for_command}]"}
end
end
@doc """
Generates a bash_completion script based on the supplied data.
"""
def generate_bash_completion_script_from_attributes(app, supported_commands, additional_options) do
commands = supported_commands |> Enum.map(fn {command, _} -> command |> Atom.to_string() end)
options = additional_options |> Enum.map(fn {command, _} -> command |> Atom.to_string() end)
commands_options =
supported_commands
|> Enum.map(fn {command, data} ->
command = command |> Atom.to_string()
args = data[:arguments] |> Enum.map(fn {name, _, _} -> name end)
required =
(data[:options][:required] || [])
|> Enum.flat_map(fn alt -> alt |> Enum.map(fn {name, _, _} -> name end) end)
optional =
(data[:options][:optional] || [])
|> Enum.flat_map(fn alt -> alt |> Enum.map(fn {name, _, _} -> name end) end)
opts = required ++ optional
opts = opts |> Enum.map(fn opt -> "--#{opt}" end)
args_or_opts =
case args do
[_ | _] -> args
[] -> opts
end
"""
| #{command})
| if [[ "${prev}" == "#{command}" ]] ; then
| COMPREPLY=( $(compgen -W "#{args_or_opts |> Enum.join(" ")}" -- ${cur}) )
| else
| declare -a opts=(#{opts |> Enum.join(" ")})
| declare -a filtered_opts=()
| for word in "${opts[@]}"; do [[ ! " ${COMP_WORDS[@]} " =~ " ${word} " ]] && filtered_opts+=(${word}); done
| opts="${filtered_opts[@]}"
| COMPREPLY=($(compgen -W "${opts}" -- ${cur}))
| fi
| return 0
| ;;
"""
end)
"""
|# Automatically generated by `#{app}`
|_#{app}()
|{
| local cur prev opts base
| COMPREPLY=()
| command="${COMP_WORDS[1]}"
| cur="${COMP_WORDS[COMP_CWORD]}"
| prev="${COMP_WORDS[COMP_CWORD-1]}"
|
| opts="#{commands |> Enum.join(" ")} #{options |> Enum.join(" ")}"
|
| case "${command}" in
#{commands_options |> Enum.join("\n")}
| *)
| ;;
| esac
|
| COMPREPLY=($(compgen -W "${opts}" -- ${cur}))
| return 0
|}
|complete -F _#{app} #{app}
"""
|> strip_margin()
end
@doc """
Removes all leading whitespace and the first character from a multi-line string.
"""
def strip_margin(string) do
string
|> String.split("\n")
|> Enum.map(fn line -> line |> String.trim_leading() |> String.slice(1..-1) end)
|> Enum.join("\n")
end
@doc """
Converts the supplied command description data to a string to be presented to the user.
"""
def description_to_string(description, prefix) do
case description do
[primary | additional] ->
primary = primary |> add_style(:italic)
additional =
additional
|> Enum.map(fn description_line -> "#{prefix} #{description_line}" end)
case additional do
[_ | _] -> "#{primary}\n#{prefix}\n#{additional |> Enum.join("\n")}"
[] -> primary
end
[] ->
""
end
end
@doc """
Converts the supplied arguments data to a string to be presented to the user.
A tuple containing simple and detailed arguments is returned - `{simple, detailed}`.
"""
def arguments_to_string(arguments, prefix) do
simple_args = arguments |> Enum.map(fn {name, _, _} -> name end)
simple_args =
case simple_args do
[_ | _] -> " #{simple_args |> Enum.join("|")}"
[] -> ""
end
detailed_args =
arguments
|> Enum.map(fn {name, arg_description, examples} ->
"#{prefix} #{name |> String.pad_trailing(@default_padding)} - #{arg_description}#{
examples |> examples_to_string
}"
end)
detailed_args =
case detailed_args do
[_ | _] -> [prefix, "#{prefix} Arguments:" | detailed_args]
[] -> []
end
{simple_args, detailed_args}
end
@doc """
Converts the supplied options data to a string to be presented to the user.
A tuple containing simple and detailed options is returned - `{simple, detailed}`.
"""
def options_to_string(options, type, prefix) do
simple_opts =
(options || [])
|> Enum.map(fn alternatives ->
alternatives = alternatives |> Enum.map(fn {name, _, _} -> name end) |> Enum.join("|")
case type do
"required" -> " <#{alternatives}>"
"optional" -> " [<#{alternatives}>]"
end
end)
|> Enum.join("")
detailed_opts =
(options || [])
|> Enum.flat_map(fn alternatives ->
alternatives
|> Enum.map(fn {name, alt_description, examples} ->
"#{prefix} --#{name |> String.pad_trailing(@default_padding)} - #{alt_description}#{
examples |> examples_to_string
}"
end)
end)
detailed_opts =
case detailed_opts do
[_ | _] -> [prefix, "#{prefix} Options (#{type}):" | detailed_opts]
[] -> []
end
{simple_opts, detailed_opts}
end
@doc """
Converts the supplied examples list to a string to be presented to the user.
"""
def examples_to_string(examples) do
case examples do
[_ | _] ->
examples =
examples
|> Enum.map(fn example -> "\"#{example}\"" |> add_style(:italic) end)
|> Enum.join(", ")
" (e.g. #{examples})"
[] ->
""
end
end
@doc """
Adds the specified ANSI style to the supplied string.
"""
def add_style(string, style) do
IO.ANSI.format([style, string], true)
end
end
| 29.096672 | 136 | 0.545098 |
1cc31a0764e9aeaecc4e9afd973cc18e490d4184 | 8,601 | ex | Elixir | samples/client/petstore/elixir/lib/swagger_petstore/api/fake.ex | swavans/swagger-codegen | 8f4d7d7ba3155b8ad8af0ebb196b1f87014f9c84 | [
"Apache-2.0"
] | null | null | null | samples/client/petstore/elixir/lib/swagger_petstore/api/fake.ex | swavans/swagger-codegen | 8f4d7d7ba3155b8ad8af0ebb196b1f87014f9c84 | [
"Apache-2.0"
] | null | null | null | samples/client/petstore/elixir/lib/swagger_petstore/api/fake.ex | swavans/swagger-codegen | 8f4d7d7ba3155b8ad8af0ebb196b1f87014f9c84 | [
"Apache-2.0"
] | null | null | null | # NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule SwaggerPetstore.Api.Fake do
@moduledoc """
API calls for all endpoints tagged `Fake`.
"""
alias SwaggerPetstore.Connection
import SwaggerPetstore.RequestBuilder
@doc """
Test serialization of outer boolean types
## Parameters
- connection (SwaggerPetstore.Connection): Connection to server
- opts (KeywordList): [optional] Optional parameters
- :body (OuterBoolean): Input boolean as post body
## Returns
{:ok, %SwaggerPetstore.Model.OuterBoolean{}} on success
{:error, info} on failure
"""
@spec fake_outer_boolean_serialize(Tesla.Env.client, keyword()) :: {:ok, SwaggerPetstore.Model.OuterBoolean.t} | {:error, Tesla.Env.t}
def fake_outer_boolean_serialize(connection, opts \\ []) do
optional_params = %{
:"body" => :body
}
%{}
|> method(:post)
|> url("/fake/outer/boolean")
|> add_optional_params(optional_params, opts)
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> decode(%SwaggerPetstore.Model.OuterBoolean{})
end
@doc """
Test serialization of object with outer number type
## Parameters
- connection (SwaggerPetstore.Connection): Connection to server
- opts (KeywordList): [optional] Optional parameters
- :body (OuterComposite): Input composite as post body
## Returns
{:ok, %SwaggerPetstore.Model.OuterComposite{}} on success
{:error, info} on failure
"""
@spec fake_outer_composite_serialize(Tesla.Env.client, keyword()) :: {:ok, SwaggerPetstore.Model.OuterComposite.t} | {:error, Tesla.Env.t}
def fake_outer_composite_serialize(connection, opts \\ []) do
optional_params = %{
:"body" => :body
}
%{}
|> method(:post)
|> url("/fake/outer/composite")
|> add_optional_params(optional_params, opts)
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> decode(%SwaggerPetstore.Model.OuterComposite{})
end
@doc """
Test serialization of outer number types
## Parameters
- connection (SwaggerPetstore.Connection): Connection to server
- opts (KeywordList): [optional] Optional parameters
- :body (OuterNumber): Input number as post body
## Returns
{:ok, %SwaggerPetstore.Model.OuterNumber{}} on success
{:error, info} on failure
"""
@spec fake_outer_number_serialize(Tesla.Env.client, keyword()) :: {:ok, SwaggerPetstore.Model.OuterNumber.t} | {:error, Tesla.Env.t}
def fake_outer_number_serialize(connection, opts \\ []) do
optional_params = %{
:"body" => :body
}
%{}
|> method(:post)
|> url("/fake/outer/number")
|> add_optional_params(optional_params, opts)
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> decode(%SwaggerPetstore.Model.OuterNumber{})
end
@doc """
Test serialization of outer string types
## Parameters
- connection (SwaggerPetstore.Connection): Connection to server
- opts (KeywordList): [optional] Optional parameters
- :body (OuterString): Input string as post body
## Returns
{:ok, %SwaggerPetstore.Model.OuterString{}} on success
{:error, info} on failure
"""
@spec fake_outer_string_serialize(Tesla.Env.client, keyword()) :: {:ok, SwaggerPetstore.Model.OuterString.t} | {:error, Tesla.Env.t}
def fake_outer_string_serialize(connection, opts \\ []) do
optional_params = %{
:"body" => :body
}
%{}
|> method(:post)
|> url("/fake/outer/string")
|> add_optional_params(optional_params, opts)
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> decode(%SwaggerPetstore.Model.OuterString{})
end
@doc """
To test \"client\" model
To test \"client\" model
## Parameters
- connection (SwaggerPetstore.Connection): Connection to server
- body (Client): client model
- opts (KeywordList): [optional] Optional parameters
## Returns
{:ok, %SwaggerPetstore.Model.Client{}} on success
{:error, info} on failure
"""
@spec test_client_model(Tesla.Env.client, SwaggerPetstore.Model.Client.t, keyword()) :: {:ok, SwaggerPetstore.Model.Client.t} | {:error, Tesla.Env.t}
def test_client_model(connection, body, _opts \\ []) do
%{}
|> method(:patch)
|> url("/fake")
|> add_param(:body, :"body", body)
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> decode(%SwaggerPetstore.Model.Client{})
end
@doc """
Fake endpoint for testing various parameters 假端點 偽のエンドポイント 가짜 엔드 포인트
Fake endpoint for testing various parameters 假端點 偽のエンドポイント 가짜 엔드 포인트
## Parameters
- connection (SwaggerPetstore.Connection): Connection to server
- number (Float): None
- double (Float): None
- pattern_without_delimiter (String): None
- byte (String): None
- opts (KeywordList): [optional] Optional parameters
- :integer (Integer): None
- :int32 (Integer): None
- :int64 (Integer): None
- :float (Float): None
- :string (String): None
- :binary (String): None
- :date (DateTime): None
- :date_time (DateTime): None
- :password (String): None
- :callback (String): None
## Returns
{:ok, %{}} on success
{:error, info} on failure
"""
@spec test_endpoint_parameters(Tesla.Env.client, float(), float(), String.t, String.t, keyword()) :: {:ok, nil} | {:error, Tesla.Env.t}
def test_endpoint_parameters(connection, number, double, pattern_without_delimiter, byte, opts \\ []) do
optional_params = %{
:"integer" => :form,
:"int32" => :form,
:"int64" => :form,
:"float" => :form,
:"string" => :form,
:"binary" => :form,
:"date" => :form,
:"dateTime" => :form,
:"password" => :form,
:"callback" => :form
}
%{}
|> method(:post)
|> url("/fake")
|> add_param(:form, :"number", number)
|> add_param(:form, :"double", double)
|> add_param(:form, :"pattern_without_delimiter", pattern_without_delimiter)
|> add_param(:form, :"byte", byte)
|> add_optional_params(optional_params, opts)
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> decode(false)
end
@doc """
To test enum parameters
To test enum parameters
## Parameters
- connection (SwaggerPetstore.Connection): Connection to server
- opts (KeywordList): [optional] Optional parameters
- :enum_form_string_array (List[String]): Form parameter enum test (string array)
- :enum_form_string (String): Form parameter enum test (string)
- :enum_header_string_array (List[String]): Header parameter enum test (string array)
- :enum_header_string (String): Header parameter enum test (string)
- :enum_query_string_array (List[String]): Query parameter enum test (string array)
- :enum_query_string (String): Query parameter enum test (string)
- :enum_query_integer (Integer): Query parameter enum test (double)
- :enum_query_double (Float): Query parameter enum test (double)
## Returns
{:ok, %{}} on success
{:error, info} on failure
"""
@spec test_enum_parameters(Tesla.Env.client, keyword()) :: {:ok, nil} | {:error, Tesla.Env.t}
def test_enum_parameters(connection, opts \\ []) do
optional_params = %{
:"enum_form_string_array" => :form,
:"enum_form_string" => :form,
:"enum_header_string_array" => :headers,
:"enum_header_string" => :headers,
:"enum_query_string_array" => :query,
:"enum_query_string" => :query,
:"enum_query_integer" => :query,
:"enum_query_double" => :form
}
%{}
|> method(:get)
|> url("/fake")
|> add_optional_params(optional_params, opts)
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> decode(false)
end
@doc """
test json serialization of form data
## Parameters
- connection (SwaggerPetstore.Connection): Connection to server
- param (String): field1
- param2 (String): field2
- opts (KeywordList): [optional] Optional parameters
## Returns
{:ok, %{}} on success
{:error, info} on failure
"""
@spec test_json_form_data(Tesla.Env.client, String.t, String.t, keyword()) :: {:ok, nil} | {:error, Tesla.Env.t}
def test_json_form_data(connection, param, param2, _opts \\ []) do
%{}
|> method(:get)
|> url("/fake/jsonFormData")
|> add_param(:form, :"param", param)
|> add_param(:form, :"param2", param2)
|> Enum.into([])
|> (&Connection.request(connection, &1)).()
|> decode(false)
end
end
| 30.938849 | 151 | 0.650389 |
1cc341ac7d1bb4a3f75de08a37c3ba06fd2dde11 | 8,611 | exs | Elixir | apps/omg_watcher/test/integration/block_getter_test.exs | Pongch/elixir-omg | 8a33c246898b49cba62b847e0989d9b6c89f5106 | [
"Apache-2.0"
] | null | null | null | apps/omg_watcher/test/integration/block_getter_test.exs | Pongch/elixir-omg | 8a33c246898b49cba62b847e0989d9b6c89f5106 | [
"Apache-2.0"
] | null | null | null | apps/omg_watcher/test/integration/block_getter_test.exs | Pongch/elixir-omg | 8a33c246898b49cba62b847e0989d9b6c89f5106 | [
"Apache-2.0"
] | null | null | null | # Copyright 2018 OmiseGO Pte Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule OMG.Watcher.Integration.BlockGetterTest do
use ExUnitFixtures
use ExUnit.Case, async: false
use OMG.API.Fixtures
use OMG.API.Integration.Fixtures
use Plug.Test
use Phoenix.ChannelTest
alias OMG.API
alias OMG.API.Crypto
alias OMG.API.Utxo
require Utxo
alias OMG.Eth
alias OMG.JSONRPC.Client
alias OMG.Watcher.Eventer.Event
alias OMG.Watcher.Integration
alias OMG.Watcher.TestHelper
alias OMG.Watcher.Web.Channel
import ExUnit.CaptureLog
@moduletag :integration
@timeout 40_000
@eth Crypto.zero_address()
@eth_hex String.duplicate("00", 20)
@endpoint OMG.Watcher.Web.Endpoint
@tag fixtures: [:watcher_sandbox, :child_chain, :alice, :bob, :alice_deposits]
test "get the blocks from child chain after sending a transaction and start exit", %{
alice: alice,
bob: bob,
alice_deposits: {deposit_blknum, _}
} do
{:ok, alice_address} = Crypto.encode_address(alice.addr)
{:ok, _, _socket} =
subscribe_and_join(socket(), Channel.Transfer, TestHelper.create_topic("transfer", alice_address))
tx = API.TestHelper.create_encoded([{deposit_blknum, 0, 0, alice}], @eth, [{alice, 7}, {bob, 3}])
{:ok, %{blknum: block_nr}} = Client.call(:submit, %{transaction: tx})
Integration.TestHelper.wait_until_block_getter_fetches_block(block_nr, @timeout)
encode_tx = Client.encode(tx)
assert [
%{
"amount" => 3,
"blknum" => ^block_nr,
"txindex" => 0,
"oindex" => 1,
"currency" => @eth_hex,
"txbytes" => ^encode_tx
}
] = get_utxos(bob)
assert [
%{
"amount" => 7,
"blknum" => ^block_nr,
"txindex" => 0,
"oindex" => 0,
"currency" => @eth_hex,
"txbytes" => ^encode_tx
}
] = get_utxos(alice)
{:ok, recovered_tx} = API.Core.recover_tx(tx)
{:ok, {block_hash, _}} = Eth.RootChain.get_child_chain(block_nr)
event_eth_height = get_block_submitted_event_height(block_nr)
address_received_event =
Client.encode(%Event.AddressReceived{
tx: recovered_tx,
child_blknum: block_nr,
child_block_hash: block_hash,
submited_at_ethheight: event_eth_height
})
address_spent_event =
Client.encode(%Event.AddressSpent{
tx: recovered_tx,
child_blknum: block_nr,
child_block_hash: block_hash,
submited_at_ethheight: event_eth_height
})
assert_push("address_received", ^address_received_event)
assert_push("address_spent", ^address_spent_event)
%{
"utxo_pos" => utxo_pos,
"txbytes" => txbytes,
"proof" => proof,
"sigs" => sigs
} = Integration.TestHelper.get_exit_data(block_nr, 0, 0)
{:ok, txhash} =
Eth.RootChain.start_exit(
utxo_pos,
txbytes,
proof,
sigs,
alice.addr
)
{:ok, %{"status" => "0x1"}} = Eth.WaitFor.eth_receipt(txhash, @timeout)
{:ok, height} = Eth.get_ethereum_height()
utxo_pos = Utxo.position(block_nr, 0, 0) |> Utxo.Position.encode()
assert {:ok, [%{amount: 7, utxo_pos: utxo_pos, owner: alice.addr, currency: @eth}]} ==
Eth.RootChain.get_exits(0, height)
# exiting spends UTXO on child chain
# wait until the exit is recognized and attempt to spend the exited utxo
Process.sleep(4_000)
tx2 = API.TestHelper.create_encoded([{block_nr, 0, 0, alice}], @eth, [{alice, 7}])
{:error, {-32_603, "Internal error", "utxo_not_found"}} = Client.call(:submit, %{transaction: tx2})
end
defp get_block_submitted_event_height(block_number) do
{:ok, height} = Eth.get_ethereum_height()
{:ok, block_submissions} = Eth.RootChain.get_block_submitted_events({1, height})
[%{eth_height: eth_height}] = Enum.filter(block_submissions, fn submission -> submission.blknum == block_number end)
eth_height
end
@tag fixtures: [:watcher_sandbox, :token, :child_chain, :alice, :alice_deposits]
test "exit erc20, without challenging an invalid exit", %{
token: token,
alice: alice,
alice_deposits: {_, token_deposit_blknum}
} do
token_tx = API.TestHelper.create_encoded([{token_deposit_blknum, 0, 0, alice}], token, [{alice, 10}])
# spend the token deposit
{:ok, %{blknum: spend_token_child_block}} = Client.call(:submit, %{transaction: token_tx})
Integration.TestHelper.wait_until_block_getter_fetches_block(spend_token_child_block, @timeout)
%{
"txbytes" => txbytes,
"proof" => proof,
"sigs" => sigs,
"utxo_pos" => utxo_pos
} = Integration.TestHelper.get_exit_data(spend_token_child_block, 0, 0)
{:ok, txhash} =
Eth.RootChain.start_exit(
utxo_pos,
txbytes,
proof,
sigs,
alice.addr
)
{:ok, %{"status" => "0x1"}} = Eth.WaitFor.eth_receipt(txhash, @timeout)
end
@tag fixtures: [:watcher_sandbox, :alice]
test "diffrent hash send by child chain", %{alice: alice} do
defmodule BadChildChainHash do
use JSONRPC2.Server.Handler
def empty_block, do: [] |> API.Block.hashed_txs_at(1000)
def different_hash, do: <<0::256>>
def handle_request(_, _) do
Client.encode(%API.Block{empty_block() | hash: different_hash()})
end
end
{:ok, _, _socket} = subscribe_and_join(socket(), Channel.Byzantine, "byzantine")
JSONRPC2.Servers.HTTP.http(BadChildChainHash, port: Application.get_env(:omg_jsonrpc, :omg_api_rpc_port))
assert capture_log(fn ->
{:ok, _txhash} = Eth.RootChain.submit_block(BadChildChainHash.different_hash(), 1, 20_000_000_000)
assert_block_getter_down()
end) =~ inspect(:incorrect_hash)
invalid_block_event =
Client.encode(%Event.InvalidBlock{
error_type: :incorrect_hash,
hash: BadChildChainHash.different_hash(),
number: 1000
})
assert_push("invalid_block", ^invalid_block_event)
JSONRPC2.Servers.HTTP.shutdown(BadChildChainHash)
end
@tag fixtures: [:watcher_sandbox]
test "bad transaction with not existing utxo, detected by interactions with State" do
defmodule BadChildChainTransaction do
use JSONRPC2.Server.Handler
# using module attribute to have a stable alice (we can't use fixtures, because modules don't see the parent
@alice API.TestHelper.generate_entity()
def block_with_incorrect_transaction do
alice = @alice
recovered = API.TestHelper.create_recovered([{1, 0, 0, alice}], Crypto.zero_address(), [{alice, 10}])
API.Block.hashed_txs_at([recovered], 1000)
end
def handle_request(_, _) do
Client.encode(block_with_incorrect_transaction())
end
end
{:ok, _, _socket} = subscribe_and_join(socket(), Channel.Byzantine, "byzantine")
JSONRPC2.Servers.HTTP.http(
BadChildChainTransaction,
port: Application.get_env(:omg_jsonrpc, :omg_api_rpc_port)
)
%API.Block{hash: hash} = BadChildChainTransaction.block_with_incorrect_transaction()
assert capture_log(fn ->
{:ok, _txhash} = Eth.RootChain.submit_block(hash, 1, 20_000_000_000)
assert_block_getter_down()
end) =~ inspect(:tx_execution)
invalid_block_event =
Client.encode(%Event.InvalidBlock{
error_type: :tx_execution,
hash: hash,
number: 1000
})
assert_push("invalid_block", ^invalid_block_event)
JSONRPC2.Servers.HTTP.shutdown(BadChildChainTransaction)
end
defp assert_block_getter_down do
:ok = TestHelper.wait_for_process(Process.whereis(OMG.Watcher.BlockGetter))
end
defp get_utxos(%{addr: address}) do
{:ok, address_encode} = Crypto.encode_address(address)
assert %{
"result" => "success",
"data" => utxos
} = TestHelper.rest_call(:get, "utxos?address=#{address_encode}")
utxos
end
end
| 30.753571 | 120 | 0.656021 |
1cc365911ad25d5535b609d67ff09af426cbc904 | 461 | ex | Elixir | example/web/router.ex | poplarhq/ua_parser | 9246082a67c11fbcbeb7e409ffdc7ae4aef86612 | [
"Apache-2.0"
] | null | null | null | example/web/router.ex | poplarhq/ua_parser | 9246082a67c11fbcbeb7e409ffdc7ae4aef86612 | [
"Apache-2.0"
] | null | null | null | example/web/router.ex | poplarhq/ua_parser | 9246082a67c11fbcbeb7e409ffdc7ae4aef86612 | [
"Apache-2.0"
] | null | null | null | defmodule Example.Router do
use Example.Web, :router
pipeline :browser do
plug :accepts, ["html"]
plug :fetch_session
plug :fetch_flash
plug :protect_from_forgery
plug :put_secure_browser_headers
end
pipeline :api do
plug :accepts, ["json"]
end
scope "/", Example do
pipe_through :browser # Use the default browser stack
get "/user_agent", FooController, :user_agent
get "/", PageController, :index
end
end
| 20.043478 | 57 | 0.687636 |
1cc36b831d0c119f470a4b08d2bb55112e7d4f9f | 999 | exs | Elixir | mix.exs | allenan/coinbase_commerce | cbd97484fe559b47cc62d6bf068094942d8e0d86 | [
"MIT"
] | 4 | 2018-08-13T20:41:37.000Z | 2021-11-11T04:58:58.000Z | mix.exs | allenan/coinbase_commerce | cbd97484fe559b47cc62d6bf068094942d8e0d86 | [
"MIT"
] | null | null | null | mix.exs | allenan/coinbase_commerce | cbd97484fe559b47cc62d6bf068094942d8e0d86 | [
"MIT"
] | null | null | null | defmodule CoinbaseCommerce.MixProject do
use Mix.Project
def project do
[
app: :coinbase_commerce,
name: "coinbase_commerce",
description: "Simple Elixir wrapper for the Coinbase Commerce API",
source_url: "https://github.com/allenan/coinbase_commerce",
version: "0.3.0",
elixir: "~> 1.7",
start_permanent: Mix.env() == :prod,
deps: deps(),
package: package()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:httpoison, "~> 1.0"},
{:poison, "~> 3.1"},
{:ex_doc, "~> 0.19", only: :dev, runtime: false}
]
end
defp package do
[
licenses: ["MIT"],
links: %{
"GitHub Repo" => "https://github.com/allenan/coinbase_commerce",
"Official Docs" => "https://commerce.coinbase.com/docs/"
},
]
end
end
| 22.704545 | 73 | 0.582583 |
1cc37729eab1c535f0cd79e56c63e24f67622194 | 7,721 | ex | Elixir | lib/vintage_net/power_manager.ex | axelson/vintage_net | 68866ff223fa40e7ad6fd4d9fb2f7960cf51b1ca | [
"Apache-2.0"
] | 85 | 2019-05-09T14:54:38.000Z | 2022-02-08T16:52:04.000Z | lib/vintage_net/power_manager.ex | axelson/vintage_net | 68866ff223fa40e7ad6fd4d9fb2f7960cf51b1ca | [
"Apache-2.0"
] | 132 | 2019-05-09T15:57:59.000Z | 2022-02-28T16:31:22.000Z | lib/vintage_net/power_manager.ex | axelson/vintage_net | 68866ff223fa40e7ad6fd4d9fb2f7960cf51b1ca | [
"Apache-2.0"
] | 14 | 2019-07-08T19:18:23.000Z | 2022-02-08T16:52:05.000Z | defmodule VintageNet.PowerManager do
@moduledoc """
This is a behaviour for implementing platform-specific power management.
From VintageNet's point of view, network devices have the following
lifecycle:
```
off ---> on ---> powering-off ---> off
```
Power management does not necessarily mean controlling the power. The end
effect should be similar, since VintageNet will try to toggle the power off
and on if the network interface doesn't seem to be working. For example,
unloading the kernel module for the network device on "power off" and loading
it on "power on" may have the desired effect of getting a network interface
unstuck.
When a device is "on", VintageNet expects to be regularly told that the
device is working ok. Working ok is device dependent, but could be something
like the device has transmitted and received data. If VintageNet is not told
that the device is working for a long enough time, it will reset the device
by powering it off and then back on again.
VintageNet calls functions here based on how it wants to transition a device.
VintageNet maintains the device's power status internally, so implementations
can blindly do what VintageNet tells them too in most cases. Powering on and
off can be asynchronous to these function calls. VintageNet uses the presence
of the networking interface (like "wlan0") to determine when the device is
really available for networking.
The following timeouts are important to consider:
1. `time_to_power_off`
2. `power_on_hold_time`
3. `min_power_off_time`
4. `watchdog_timeout`
The `time_to_power_off` specifies the time in the `powering-off` state. This
is the maximum time to allow for a graceful shutdown. VintageNet won't bother
the device until that time has expired. That means that if there's a request
to use the device, it will wait the `powering-off` time before calling
`finish_power_off` and then it will power the device back on. Device app
notes may have recommendations for this time.
The `power_on_hold_time` specifies how much time a device should be in the
`powered-on` state before it is ok to power off again. This allows devices
some time to initialize and recover on their own.
The `min_power_off_time` specifies how long the device should remain powered
off before it is powered back on.
Finally, `watchdog_timeout` specifies how long to wait between notifications
that the device is ok. Code reports that a device is ok by calling
`VintageNet.PowerManager.PMControl.pet_watchdog/1`.
While normal Erlang supervision expects that it can restart processes
immediately and without regard to how long they have been running, bad things
can happen to hardware if too aggressively restarted. Devices also initialize
asynchronously so it's hard to know when they're fully available and some
flakiness may be naturally due to VintageNet not knowing how to wait for a
component to finish initialization. Please review your network device's power
management guidelines before too aggressively reducing hold times. Cellular
devices, in particular, want to signal their disconnection from the network
to the tower and flush any unsaved configuration changes to Flash before
power removal.
Here's an example for a cellular device with a reset line connected to it:
* `power_on` - De-assert the reset line. Return a `power_on_hold_time` of 10
minutes
* `start_powering_off` - Open the UART and send the power down command to the
modem. Return a `time_to_power_off` of 1 minute.
* `power_off` - Assert the reset line and return that power shouldn't be turned
back on for another 10 seconds.
PowerManager implementation lifetimes are the same as VintageNet's. In other
words, they start and end with VintageNet. This is unlike a network interface
which runs only as its existence and configuration allow. As such, VintageNet
needs to know about all PowerManager implementations in its application
environment. For example, add something like this to your `config.exs`:
```elixir
config :vintage_net,
power_managers: [{MyCellularPM, [ifname: "ppp0", watchdog_timeout: 60_000, reset_gpio: 123]}]
```
Each tuple is the implementation's module name and init arguments. VintageNet
requires `:ifname` to be set. If you're managing the power for an interface
with a dynamic name, enable predictable interface naming with `VintageNet`
and use that name. The `watchdog_timeout` parameter is optional and defaults
to one minute.
"""
@doc """
Initialize state for managing the power to the specified interface
This is called on start and if the power management GenServer restarts. It
should not assume that hardware is powered down.
IMPORTANT: VintageNet assumes that `init/1` runs quickly and succeeds. Errors
and exceptions from calling `init/1` are handled by disabling the PowerManager.
The reason is that VintageNet has no knowledge on how to recover and disabling
a power manager was deemed less bad that having supervision tree failures
propagate upwards to terminate VintageNet. Messages are logged if this does
happen.
"""
@callback init(args :: keyword()) :: {:ok, state :: any()}
@doc """
Power on the hardware for a network interface
The function should turn on power rails, deassert reset lines, load kernel
modules or do whatever else is necessary to make the interface show up in
Linux.
Failure handling is not supported by VintageNet yet, so if power up can fail
and the right handling for that is to try again later, then this function
should do that.
It is ok for this function to return immediately. When the network interface
appears, VintageNet will start trying to use it.
The return tuple should include the number of seconds VintageNet should wait
before trying to power down the module again. This value should be
sufficiently large to avoid getting into loops where VintageNet gives up on a
network interface before it has initialized. 10 minutes (600 seconds), for
example, is a reasonable setting.
"""
@callback power_on(state :: any()) ::
{:ok, next_state :: any(), hold_time :: non_neg_integer()}
@doc """
Start powering off the hardware for a network interface
This function should start a graceful shutdown of the network interface
hardware. It may return immediately. The return value specifies how long in
seconds VintageNet should wait before calling `power_off/2`. The idea is that
a graceful power off should be allowed some time to complete, but not
forever.
"""
@callback start_powering_off(state :: any()) ::
{:ok, next_state :: any(), time_to_power_off :: non_neg_integer()}
@doc """
Power off the hardware
This function should finish powering off the network interface hardware. Since
this is called after the graceful power down should have completed, it should
forcefully turn off the power to the hardware.
The implementation also returns a time that power must remain off. `power_on/1`
won't be called until that time expires.
"""
@callback power_off(state :: any()) ::
{:ok, next_state :: any(), min_off_time :: non_neg_integer()}
@doc """
Handle other messages
All unknown messages sent to the power management `GenServer` come here. This
callback is similar to `c:GenServer.handle_info/2`.
To receive your own messages here, send them to `self()` in code run in any
of the other callbacks. Another option is to call
`VintageNet.PowerManager.PMControl.send_message/2`
"""
@callback handle_info(msg :: any(), state :: any()) :: {:noreply, new_state :: any()}
end
| 45.417647 | 97 | 0.756249 |
1cc37ac3aeaf3831fc24d040cb30a5b414a489ab | 3,488 | exs | Elixir | test/mock_test.exs | azhi/mock | 77b5b6758991b348ecedb195cd884bf66a019bdb | [
"MIT"
] | null | null | null | test/mock_test.exs | azhi/mock | 77b5b6758991b348ecedb195cd884bf66a019bdb | [
"MIT"
] | null | null | null | test/mock_test.exs | azhi/mock | 77b5b6758991b348ecedb195cd884bf66a019bdb | [
"MIT"
] | null | null | null | Code.require_file "test_helper.exs", __DIR__
defmodule MockTest do
use ExUnit.Case, async: false
import Mock
setup_all do
foo = "bar"
{:ok, foo: foo}
end
test "simple mock" do
with_mock String,
[reverse: fn(x) -> 2*x end] do
assert String.reverse(3) == 6
end
end
test "mock functions with multiple returns" do
with_mock(Map, [
get: fn
(%{}, "http://example.com") -> "<html>Hello from example.com</html>"
(%{}, "http://example.org") -> "<html>example.org says hi</html>"
end
]) do
assert Map.get(%{}, "http://example.com") == "<html>Hello from example.com</html>"
assert Map.get(%{}, "http://example.org") == "<html>example.org says hi</html>"
end
end
test "multiple mocks" do
with_mocks([
{Map,
[],
[get: fn(%{}, "http://example.com") -> "<html></html>" end]},
{String,
[],
[reverse: fn(x) -> 2*x end,
length: fn(_x) -> :ok end]}
]) do
assert Map.get(%{}, "http://example.com") == "<html></html>"
assert String.reverse(3) == 6
assert String.length(3) == :ok
end
end
test "mock fuctions with different arity" do
with_mock String,
[slice: fn(string, _range) -> string end,
slice: fn(string, _range, _len) -> string end]
do
assert String.slice("test", 1..3) == "test"
assert String.slice("test", 1, 3) == "test"
end
end
test "mock returns the result" do
result = with_mock String,
[reverse: fn(x) -> 2*x end] do
assert String.reverse(3) == 6
String.reverse(3)
end
assert result == 6
end
test "called" do
with_mock String,
[reverse: fn(x) -> 2*x end,
length: fn(_x) -> :ok end] do
String.reverse 3
assert :meck.called String, :reverse, [3]
assert called String.reverse(3)
refute called String.reverse(2)
refute called String.length(3)
end
end
test "assert_called" do
with_mock String,
[reverse: fn(x) -> 2*x end,
length: fn(_x) -> :ok end] do
String.reverse(3)
assert_called(String.reverse(3))
try do
"This should never be tested" = assert_called(String.reverse(2))
rescue
error in [ExUnit.AssertionError] ->
"""
Expected call but did not receive it. Calls which were received:
0. Elixir.String.reverse(3) (returned 6)\
""" = error.message
end
end
end
test_with_mock "test_with_mock",
String,
[reverse: fn(_x) -> :ok end] do
assert String.reverse 3
assert called String.reverse(3)
refute called String.reverse(4)
end
test_with_mock "test_with_mock with context", %{foo: foo}, String, [],
[reverse: fn(_x) -> :ok end] do
assert String.reverse 3
assert foo == "bar"
assert called String.reverse(3)
refute called String.reverse(4)
end
test_with_mock "passthrough", Map, [:passthrough],
[] do
hd = Map.put(Map.new(), :a, 1)
assert Map.get(hd, :a) == 1
assert called Map.new()
assert called Map.get(hd, :a)
refute called Map.get(hd, :b)
end
test "restore after exception" do
assert String.downcase("A") == "a"
try do
with_mock String,
[downcase: fn(x) -> x end] do
assert String.downcase("A") == "A"
raise "some error"
end
rescue
RuntimeError -> :ok
end
assert String.downcase("A") == "a"
end
end
| 25.093525 | 88 | 0.571101 |
1cc3e03e73215875f6e45e2f4fcdd82188a66998 | 568 | exs | Elixir | mix.exs | pcranaway/metro | 99c17e3f59b54affaec615c9f044b294be7064a6 | [
"Apache-2.0"
] | 1 | 2022-01-23T15:49:13.000Z | 2022-01-23T15:49:13.000Z | mix.exs | pcranaway/metro | 99c17e3f59b54affaec615c9f044b294be7064a6 | [
"Apache-2.0"
] | null | null | null | mix.exs | pcranaway/metro | 99c17e3f59b54affaec615c9f044b294be7064a6 | [
"Apache-2.0"
] | null | null | null | defmodule Metro.MixProject do
use Mix.Project
def project do
[
app: :metro,
version: "0.1.0",
elixir: "~> 1.10",
start_permanent: Mix.env() == :prod,
deps: deps()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
# {:dep_from_hexpm, "~> 0.3.0"},
# {:dep_from_git, git: "https://github.com/elixir-lang/my_dep.git", tag: "0.1.0"}
]
end
end
| 19.586207 | 87 | 0.573944 |
1cc421ad8cf97b609cbeeced18d28e1ffbef6dd3 | 1,046 | exs | Elixir | machine_translation/MorpHIN/Learned/Resources/Set4/TrainingInstances/35.exs | AdityaPrasadMishra/NLP--Project-Group-16 | fb62cc6a1db4a494058171f11c14a2be3933a9a1 | [
"MIT"
] | null | null | null | machine_translation/MorpHIN/Learned/Resources/Set4/TrainingInstances/35.exs | AdityaPrasadMishra/NLP--Project-Group-16 | fb62cc6a1db4a494058171f11c14a2be3933a9a1 | [
"MIT"
] | null | null | null | machine_translation/MorpHIN/Learned/Resources/Set4/TrainingInstances/35.exs | AdityaPrasadMishra/NLP--Project-Group-16 | fb62cc6a1db4a494058171f11c14a2be3933a9a1 | [
"MIT"
] | null | null | null | **EXAMPLE FILE**
cm cm noun cm adjective;
conj noun SYM noun adjective;
pn cm pnoun cm adjective;
pnoun conj noun adjective adjective;
SYM adjective noun cm adjective;
verb conj adverb particle adjective;
cm adjective noun cm adjective;
cardinal adjective noun cm adjective;
cm adjective noun cm adjective;
verb_aux SYM noun cm adjective;
demonstrative cardinal adjective noun adjective;
pnoun cm noun cm adjective;
verb_aux conj pnoun pnoun adjective;
conj pn noun cm adjective;
pnoun cm noun cm adjective;
pnoun cm noun cm adjective;
pnoun cm adjective noun adjective;
verb_aux SYM noun pnoun adjective;
pnoun SYM noun pnoun adjective;
conj SYM noun cm adjective;
verb_aux SYM noun noun adjective;
verb pn noun cm adjective;
SYM adjective noun cm adjective;
pnoun cm noun cm adjective;
SYM SYM adjective noun adjective;
verb_aux conj noun cm adjective;
verb_aux conj noun cm adjective;
cardinal adjective noun cm adjective;
cm verb noun cm adjective;
noun conj noun cm adjective;
pn adjective noun cm adjective;
| 30.764706 | 49 | 0.785851 |
1cc45300992900fb6282ffbb3eac8675230d5658 | 2,662 | exs | Elixir | test/swotex_test.exs | maartenvanvliet/swotex | 960ae3a1214c832af400b2b517ee5846e234cace | [
"MIT"
] | 7 | 2018-08-18T09:16:27.000Z | 2021-01-16T23:12:33.000Z | test/swotex_test.exs | maartenvanvliet/swotex | 960ae3a1214c832af400b2b517ee5846e234cace | [
"MIT"
] | 15 | 2020-03-30T04:13:05.000Z | 2022-02-28T04:02:57.000Z | test/swotex_test.exs | maartenvanvliet/swotex | 960ae3a1214c832af400b2b517ee5846e234cace | [
"MIT"
] | null | null | null | defmodule SwotExTest do
use ExUnit.Case, async: true
test "recognizes academic email addresses and domains" do
assert_academic("lreilly@stanford.edu")
assert_academic("LREILLY@STANFORD.EDU")
assert_academic("Lreilly@Stanford.Edu")
assert_academic("lreilly@slac.stanford.edu")
assert_academic("lreilly@strath.ac.uk")
assert_academic("lreilly@soft-eng.strath.ac.uk")
assert_academic("lee@ugr.es")
assert_academic("lee@uottawa.ca")
assert_academic("lee@mother.edu.ru")
assert_academic("lee@ucy.ac.cy")
refute_academic("lee@leerilly.net")
refute_academic("lee@gmail.com")
refute_academic("lee@stanford.edu.com")
refute_academic("lee@strath.ac.uk.com")
assert_academic("stanford.edu")
assert_academic("slac.stanford.edu")
assert_academic("www.stanford.edu")
assert_academic("http://www.stanford.edu")
assert_academic("https://www.stanford.edu")
assert_academic("//www.stanford.edu")
assert_academic("http://www.stanford.edu:9393")
assert_academic("strath.ac.uk")
assert_academic("soft-eng.strath.ac.uk")
assert_academic("ugr.es")
assert_academic("uottawa.ca")
assert_academic("mother.edu.ru")
assert_academic("ucy.ac.cy")
refute_academic("leerilly.net")
refute_academic("gmail.com")
refute_academic("stanford.edu.com")
refute_academic("strath.ac.uk.com")
refute_academic(nil)
refute_academic("")
refute_academic("the")
assert_academic(" stanford.edu")
assert_academic("lee@strath.ac.uk ")
refute_academic(" gmail.com ")
assert_academic("lee@stud.uni-corvinus.hu")
# overkill
assert_academic("lee@harvard.edu")
assert_academic("lee@mail.harvard.edu")
end
test "not err on tld-only domains" do
refute_academic(".com")
end
test "does not err on invalid domains" do
refute_academic("foo@bar.invalid")
end
test "fail blacklisted domains" do
["si.edu", " si.edu ", "imposter@si.edu", "foo.si.edu", "america.edu", "folger.edu"]
|> Enum.each(fn domain ->
refute_academic(domain)
end)
end
test "returns name of valid institution" do
assert "University of Strathclyde", SwotEx.institution_name("lreilly@cs.strath.ac.uk")
assert "BRG Fadingerstraße Linz, Austria", SwotEx.institution_name("lreilly@fadi.at")
end
test "returns nil when institution invalid" do
refute SwotEx.institution_name("foo@shop.com")
end
defp assert_academic(domain) do
assert SwotEx.is_academic?(domain), "#{domain} should be confirmed"
end
defp refute_academic(domain) do
refute SwotEx.is_academic?(domain), "#{domain} should be denied"
end
end
| 30.597701 | 90 | 0.706236 |
1cc4647fafc628ca0e1e080842f7b9fb488384f3 | 144 | ex | Elixir | lib/wong_bejo_web/views/layout_view.ex | fossabot/web.bejoistic | edd672a8f125180397b89e1f5ee469b1f2344e7f | [
"MIT"
] | null | null | null | lib/wong_bejo_web/views/layout_view.ex | fossabot/web.bejoistic | edd672a8f125180397b89e1f5ee469b1f2344e7f | [
"MIT"
] | null | null | null | lib/wong_bejo_web/views/layout_view.ex | fossabot/web.bejoistic | edd672a8f125180397b89e1f5ee469b1f2344e7f | [
"MIT"
] | null | null | null | defmodule WongBejoWeb.LayoutView do
use WongBejoWeb, :view
def production? do
Application.get_env(:wong_bejo, :env) == :prod
end
end
| 18 | 50 | 0.729167 |
1cc46da055f810daf4306183636f8d1cad0028a4 | 807 | exs | Elixir | test/channels/game_channel_test.exs | deerob4/words-with-enemies | f9de13d599bdecc71007db57d3c21651b9be7463 | [
"MIT"
] | 1 | 2015-02-15T23:50:29.000Z | 2015-02-15T23:50:29.000Z | test/channels/game_channel_test.exs | deerob4/words-with-enemies | f9de13d599bdecc71007db57d3c21651b9be7463 | [
"MIT"
] | null | null | null | test/channels/game_channel_test.exs | deerob4/words-with-enemies | f9de13d599bdecc71007db57d3c21651b9be7463 | [
"MIT"
] | null | null | null | defmodule WordsWithEnemies.GameChannelTest do
use WordsWithEnemies.ChannelCase
alias WordsWithEnemies.GameChannel
setup do
{:ok, _, socket} =
socket("user_id", %{some: :assign})
|> subscribe_and_join(GameChannel, "games:lobby")
{:ok, socket: socket}
end
test "ping replies with status ok", %{socket: socket} do
ref = push socket, "ping", %{"hello" => "there"}
assert_reply ref, :ok, %{"hello" => "there"}
end
test "shout broadcasts to games:lobby", %{socket: socket} do
push socket, "shout", %{"hello" => "all"}
assert_broadcast "shout", %{"hello" => "all"}
end
test "broadcasts are pushed to the client", %{socket: socket} do
broadcast_from! socket, "broadcast", %{"some" => "data"}
assert_push "broadcast", %{"some" => "data"}
end
end
| 27.827586 | 66 | 0.639405 |
1cc4707954c9f7aab4b1bd343f53280081f581a6 | 393 | exs | Elixir | config/environment/test.exs | Fire-Dragon-DoL/esp_ex | 0cd95de570ed7963744b298ad403fe4e1947dd2b | [
"MIT"
] | null | null | null | config/environment/test.exs | Fire-Dragon-DoL/esp_ex | 0cd95de570ed7963744b298ad403fe4e1947dd2b | [
"MIT"
] | null | null | null | config/environment/test.exs | Fire-Dragon-DoL/esp_ex | 0cd95de570ed7963744b298ad403fe4e1947dd2b | [
"MIT"
] | null | null | null | use Mix.Config
config :logger,
backends: [],
utc_log: true,
compile_time_purge_level: :debug
config :delugex, Delugex.MessageStore.Postgres, notify: false
config :delugex, Delugex.MessageStore.Postgres.Repo,
pool: Ecto.Adapters.SQL.Sandbox,
url:
System.get_env("TEST_DELUGEX_DATABASE_URL") ||
"postgres://message_store:message_store@localhost/delugex_test?pool_size=15"
| 26.2 | 82 | 0.763359 |
1cc4b4d4844b6aaf60c076089291df6e137e8681 | 1,111 | exs | Elixir | config/config.exs | samacs/xslt | 503f4ca7de86eb63643c8e6288dd961e2f4c167e | [
"MIT"
] | 10 | 2017-03-08T16:21:58.000Z | 2021-05-30T12:32:25.000Z | config/config.exs | samacs/xslt | 503f4ca7de86eb63643c8e6288dd961e2f4c167e | [
"MIT"
] | 4 | 2017-11-06T12:02:29.000Z | 2021-05-22T13:38:11.000Z | config/config.exs | samacs/xslt | 503f4ca7de86eb63643c8e6288dd961e2f4c167e | [
"MIT"
] | 7 | 2017-04-09T01:37:37.000Z | 2021-05-21T11:01:22.000Z | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# 3rd-party users, it should be done in your "mix.exs" file.
# You can configure for your application as:
#
# config :xslt, key: :value
#
# And access this configuration in your application as:
#
# Application.get_env(:xslt, :key)
#
# Or configure a 3rd-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env}.exs"
| 35.83871 | 73 | 0.749775 |
1cc4ef0ba34f842c91180f426f1833d2825728ed | 1,713 | ex | Elixir | lib/remote_points_web/telemetry.ex | DianaOlympos/remote_test | e222d4e937789871baab3a7b4fd8428b714c1af4 | [
"MIT"
] | 1 | 2020-09-18T03:32:45.000Z | 2020-09-18T03:32:45.000Z | lib/remote_points_web/telemetry.ex | DianaOlympos/remote_test | e222d4e937789871baab3a7b4fd8428b714c1af4 | [
"MIT"
] | null | null | null | lib/remote_points_web/telemetry.ex | DianaOlympos/remote_test | e222d4e937789871baab3a7b4fd8428b714c1af4 | [
"MIT"
] | null | null | null | defmodule RemotePointsWeb.Telemetry do
use Supervisor
import Telemetry.Metrics
def start_link(arg) do
Supervisor.start_link(__MODULE__, arg, name: __MODULE__)
end
@impl true
def init(_arg) do
children = [
{:telemetry_poller, measurements: periodic_measurements(), period: 10_000}
# Add reporters as children of your supervision tree.
# {Telemetry.Metrics.ConsoleReporter, metrics: metrics()}
]
Supervisor.init(children, strategy: :one_for_one)
end
def metrics do
[
# Phoenix Metrics
summary("phoenix.endpoint.stop.duration",
unit: {:native, :millisecond}
),
summary("phoenix.router_dispatch.stop.duration",
tags: [:route],
unit: {:native, :millisecond}
),
# Database Metrics
summary("remote_points.repo.query.total_time", unit: {:native, :millisecond}),
summary("remote_points.repo.query.decode_time", unit: {:native, :millisecond}),
summary("remote_points.repo.query.query_time", unit: {:native, :millisecond}),
summary("remote_points.repo.query.queue_time", unit: {:native, :millisecond}),
summary("remote_points.repo.query.idle_time", unit: {:native, :millisecond}),
# VM Metrics
summary("vm.memory.total", unit: {:byte, :kilobyte}),
summary("vm.total_run_queue_lengths.total"),
summary("vm.total_run_queue_lengths.cpu"),
summary("vm.total_run_queue_lengths.io")
]
end
defp periodic_measurements do
[
# A module, function and arguments to be invoked periodically.
# This function must call :telemetry.execute/3 and a metric must be added above.
# {RemotePointsWeb, :count_users, []}
]
end
end
| 31.722222 | 86 | 0.673088 |
1cc4f1d14c95d4fde7f1bc185a4d881faddc8533 | 999 | ex | Elixir | lib/schemas/server/container.ex | Kintull/elidactyl | 9a051ed511ed92fa7578038784baa73288f1312b | [
"MIT"
] | 6 | 2020-04-28T21:38:40.000Z | 2022-02-13T01:04:10.000Z | lib/schemas/server/container.ex | Kintull/elidactyl | 9a051ed511ed92fa7578038784baa73288f1312b | [
"MIT"
] | 1 | 2021-03-16T10:39:32.000Z | 2021-03-16T10:39:32.000Z | lib/schemas/server/container.ex | Kintull/elidactyl | 9a051ed511ed92fa7578038784baa73288f1312b | [
"MIT"
] | null | null | null | defmodule Elidactyl.Schemas.Server.Container do
@moduledoc false
alias Ecto.Changeset
alias Elidactyl.Utils
alias Elidactyl.Response.Parser
use Ecto.Schema
@behaviour Parser
@type t :: %__MODULE__{
startup_command: binary | nil,
image: binary | nil,
environment: Parser.json_map | nil,
installed: boolean | nil,
}
@derive {Jason.Encoder, only: [:startup_command, :image, :environment, :installed]}
@primary_key false
embedded_schema do
field :startup_command, :string
field :image, :string
field :environment, :map
field :installed, :boolean
end
@spec changeset(t(), map) :: Changeset.t()
def changeset(struct, params) do
struct
|> Changeset.cast(params, [:startup_command, :image, :environment, :installed])
|> Changeset.validate_required([:startup_command, :image, :environment])
end
@impl Parser
def parse(%{} = attributes) do
struct(__MODULE__, Utils.keys_to_atoms(attributes, ~w[environment]))
end
end
| 24.365854 | 85 | 0.6997 |
1cc50a5982d747fd1df03694ea3fd85dcf823809 | 4,401 | ex | Elixir | lib/ex338_web/controllers/trade_controller.ex | axelclark/ex338 | 3fb3c260d93bda61f7636ee1a677770d2dc1b89a | [
"MIT"
] | 17 | 2016-12-22T06:39:26.000Z | 2021-01-20T13:51:13.000Z | lib/ex338_web/controllers/trade_controller.ex | axelclark/ex338 | 3fb3c260d93bda61f7636ee1a677770d2dc1b89a | [
"MIT"
] | 608 | 2016-08-06T18:57:58.000Z | 2022-03-01T02:48:17.000Z | lib/ex338_web/controllers/trade_controller.ex | axelclark/ex338 | 3fb3c260d93bda61f7636ee1a677770d2dc1b89a | [
"MIT"
] | 6 | 2017-11-21T22:35:45.000Z | 2022-01-11T21:37:40.000Z | defmodule Ex338Web.TradeController do
use Ex338Web, :controller
alias Ex338.{DraftPicks, FantasyLeagues, Trades, FantasyTeams, Accounts}
alias Ex338Web.{Authorization, TradeEmail, Mailer}
import Canary.Plugs
plug(
:load_and_authorize_resource,
model: FantasyTeams.FantasyTeam,
only: [:create, :new, :update],
preload: [:owners, :fantasy_league],
persisted: true,
id_name: "fantasy_team_id",
unauthorized_handler: {Authorization, :handle_unauthorized}
)
plug(:scrub_params, "trade" when action in [:create, :update])
plug(:authorize_status_update when action in [:update])
def index(conn, %{"fantasy_league_id" => league_id}) do
league = FantasyLeagues.get(league_id)
render(
conn,
"index.html",
fantasy_league: league,
trades: Trades.all_for_league(league.id)
)
end
def new(conn, %{"fantasy_team_id" => _id}) do
team = %{fantasy_league_id: league_id} = conn.assigns.fantasy_team
changeset = Trades.build_new_changeset()
league_teams = FantasyTeams.list_teams_for_league(league_id)
league_players = FantasyTeams.owned_players_for_league(league_id)
league_future_picks = DraftPicks.list_future_picks_by_league(league_id)
render(
conn,
"new.html",
changeset: changeset,
fantasy_league: team.fantasy_league,
fantasy_team: team,
league_future_picks: league_future_picks,
league_teams: league_teams,
league_players: league_players
)
end
def create(conn, %{"fantasy_team_id" => _id, "trade" => trade_params}) do
team = %{fantasy_league: league} = conn.assigns.fantasy_team
trade_params =
trade_params
|> Map.put("submitted_by_user_id", conn.assigns.current_user.id)
|> Map.put("submitted_by_team_id", team.id)
case Trades.create_trade(trade_params) do
{:ok, trade} ->
trade = Trades.find!(trade.id)
admin_emails = Accounts.get_admin_emails()
recipients = (Trades.Trade.get_teams_emails(trade) ++ admin_emails) |> Enum.uniq()
conn
|> TradeEmail.propose(league, trade, recipients)
|> Mailer.deliver()
|> Mailer.handle_delivery()
conn
|> put_flash(:info, "Trade submitted for approval.")
|> redirect(to: Routes.fantasy_team_path(conn, :show, team))
{:error, %Ecto.Changeset{} = changeset} ->
league_teams = FantasyTeams.list_teams_for_league(league.id)
league_players = FantasyTeams.owned_players_for_league(league.id)
league_future_picks = DraftPicks.list_future_picks_by_league(league.id)
render(
conn,
"new.html",
changeset: changeset,
fantasy_team: team,
league_future_picks: league_future_picks,
league_teams: league_teams,
league_players: league_players
)
end
end
def update(conn, %{
"fantasy_team_id" => _team_id,
"id" => trade_id,
"trade" => trade_params
}) do
%{fantasy_league: league} = team = conn.assigns.fantasy_team
case Trades.update_trade(trade_id, trade_params) do
{:ok, %{trade: trade}} ->
if(trade.status == "Canceled") do
trade = Trades.find!(trade.id)
admin_emails = Accounts.get_admin_emails()
recipients = (Trades.Trade.get_teams_emails(trade) ++ admin_emails) |> Enum.uniq()
conn
|> TradeEmail.cancel(league, trade, recipients, team)
|> Mailer.deliver()
|> Mailer.handle_delivery()
end
conn
|> put_flash(:info, "Trade successfully processed")
|> redirect(to: Routes.fantasy_league_trade_path(conn, :index, league.id))
{:error, error} ->
conn
|> put_flash(:error, inspect(error))
|> redirect(to: Routes.fantasy_league_trade_path(conn, :index, league.id))
end
end
# Helpers
defp authorize_status_update(
%{params: %{"id" => trade_id, "trade" => %{"status" => "Canceled"}}} = conn,
_opts
) do
trade = Trades.find!(trade_id)
case trade.status == "Proposed" do
true ->
conn
false ->
conn
|> put_flash(:error, "Can only update a proposed trade")
|> redirect(to: "/")
|> halt
end
end
defp authorize_status_update(conn, _opts), do: Authorization.authorize_admin(conn, [])
end
| 30.143836 | 92 | 0.643263 |
1cc50b0a8f7747ff7f758dec87ff33fb6e7cee2f | 145 | ex | Elixir | lib/philopets_web/controllers/page_controller.ex | audacioustux/philopets | 9380416937d635d4b1f5e13fa6f8b52ee603addf | [
"blessing"
] | null | null | null | lib/philopets_web/controllers/page_controller.ex | audacioustux/philopets | 9380416937d635d4b1f5e13fa6f8b52ee603addf | [
"blessing"
] | null | null | null | lib/philopets_web/controllers/page_controller.ex | audacioustux/philopets | 9380416937d635d4b1f5e13fa6f8b52ee603addf | [
"blessing"
] | null | null | null | defmodule PhilopetsWeb.PageController do
use PhilopetsWeb, :controller
def index(conn, _params) do
render(conn, "index.html")
end
end
| 18.125 | 40 | 0.744828 |
1cc50e48b1f94c1dddec5408295772364c92e016 | 705 | ex | Elixir | src/myapp/apps/services/web/gettext.ex | ecamacho/elixir_at_work | c8e4c604c925455b4794931c2b9c15ef3774005f | [
"MIT"
] | null | null | null | src/myapp/apps/services/web/gettext.ex | ecamacho/elixir_at_work | c8e4c604c925455b4794931c2b9c15ef3774005f | [
"MIT"
] | null | null | null | src/myapp/apps/services/web/gettext.ex | ecamacho/elixir_at_work | c8e4c604c925455b4794931c2b9c15ef3774005f | [
"MIT"
] | null | null | null | defmodule Services.Gettext do
@moduledoc """
A module providing Internationalization with a gettext-based API.
By using [Gettext](https://hexdocs.pm/gettext),
your module gains a set of macros for translations, for example:
import Services.Gettext
# Simple translation
gettext "Here is the string to translate"
# Plural translation
ngettext "Here is the string to translate",
"Here are the strings to translate",
3
# Domain-based translation
dgettext "errors", "Here is the error message to translate"
See the [Gettext Docs](https://hexdocs.pm/gettext) for detailed usage.
"""
use Gettext, otp_app: :services
end
| 28.2 | 72 | 0.679433 |
1cc51c16be56cc7b350c65925693c5653dc0c390 | 7,212 | exs | Elixir | lib/mix/test/mix/tasks/app.start_test.exs | namjae/elixir | 6d1561a5939d68fb61f422b83271fbc824847395 | [
"Apache-2.0"
] | 1 | 2021-05-20T13:08:37.000Z | 2021-05-20T13:08:37.000Z | lib/mix/test/mix/tasks/app.start_test.exs | namjae/elixir | 6d1561a5939d68fb61f422b83271fbc824847395 | [
"Apache-2.0"
] | null | null | null | lib/mix/test/mix/tasks/app.start_test.exs | namjae/elixir | 6d1561a5939d68fb61f422b83271fbc824847395 | [
"Apache-2.0"
] | null | null | null | Code.require_file("../../test_helper.exs", __DIR__)
defmodule Mix.Tasks.App.StartTest do
use MixTest.Case
defmodule AppStartSample do
def project do
[app: :app_start_sample, version: "0.1.0"]
end
def application do
[applications: [:logger]]
end
end
defmodule WrongElixirProject do
def project do
[app: :error, version: "0.1.0", elixir: "~> 0.8.1"]
end
end
@tag apps: [:app_start_sample]
test "compiles and starts the project" do
Mix.Project.push(AppStartSample)
in_fixture "no_mixfile", fn ->
assert_raise Mix.Error, fn ->
Mix.Tasks.App.Start.run(["--no-compile"])
end
refute List.keyfind(Application.started_applications(), :logger, 0)
Application.start(:logger)
Mix.Tasks.App.Start.run(["--no-start"])
assert File.regular?("_build/dev/lib/app_start_sample/ebin/Elixir.A.beam")
assert File.regular?("_build/dev/lib/app_start_sample/ebin/app_start_sample.app")
refute List.keyfind(Application.started_applications(), :app_start_sample, 0)
assert List.keyfind(Application.started_applications(), :logger, 0)
Mix.Tasks.App.Start.run([])
assert List.keyfind(Application.started_applications(), :app_start_sample, 0)
assert List.keyfind(Application.started_applications(), :logger, 0)
end
end
@tag apps: [:app_start_sample, :app_loaded_sample]
test "start checks for invalid configuration", context do
Mix.Project.push(AppStartSample)
in_tmp context.test, fn ->
:ok = :application.load({:application, :app_loaded_sample, [vsn: '1.0.0', env: []]})
Mix.ProjectStack.configured_applications([
:app_start_sample,
:app_unknown_sample,
:app_loaded_sample
])
Mix.Tasks.Compile.run([])
Mix.Tasks.App.Start.run([])
assert_received {:mix_shell, :error, [
"You have configured application :app_unknown_sample" <> _
]}
refute_received {:mix_shell, :error, [
"You have configured application :app_loaded_sample" <> _
]}
end
end
@tag apps: [:error]
test "validates Elixir version requirement", context do
Mix.ProjectStack.post_config(elixir: "~> ~> 0.8.1")
Mix.Project.push(WrongElixirProject)
in_tmp context.test, fn ->
assert_raise Mix.Error, ~r"Invalid Elixir version requirement", fn ->
Mix.Tasks.App.Start.run(["--no-start"])
end
end
end
@tag apps: [:error]
test "validates the Elixir version with requirement", context do
Mix.Project.push(WrongElixirProject)
in_tmp context.test, fn ->
assert_raise Mix.ElixirVersionError, ~r/You're trying to run :error on Elixir/, fn ->
Mix.Tasks.App.Start.run(["--no-start"])
end
end
end
@tag apps: [:error]
test "does not validate the Elixir version with requirement when disabled", context do
Mix.Project.push(WrongElixirProject)
in_tmp context.test, fn ->
Mix.Tasks.App.Start.run(["--no-start", "--no-elixir-version-check"])
end
end
test "start does nothing if app is nil" do
assert Mix.Tasks.App.Start.start([app: nil], []) == :ok
end
test "allows type to be configured" do
assert Mix.Tasks.App.Start.type([], permanent: true) == :permanent
assert Mix.Tasks.App.Start.type([], temporary: true) == :temporary
assert Mix.Tasks.App.Start.type([start_permanent: true], []) == :permanent
assert Mix.Tasks.App.Start.type([], []) == :temporary
end
defmodule ReturnSample do
def project do
[app: :return_sample, version: "0.1.0"]
end
def application do
Process.get(:application_definition)
end
end
defmodule ReturnApp do
use Application
def start(_type, return), do: return
end
@tag apps: [:return_sample]
test "start points to report on error", context do
Mix.Project.push(ReturnSample)
in_tmp context.test, fn ->
Process.put(:application_definition, mod: {ReturnApp, {:error, :bye}})
Mix.Tasks.Compile.run([])
message =
"Could not start application return_sample: " <>
"Mix.Tasks.App.StartTest.ReturnApp.start(:normal, {:error, :bye}) " <>
"returned an error: :bye"
assert_raise Mix.Error, message, fn ->
Mix.Tasks.App.Start.start([app: :return_sample], [])
end
end
end
@tag apps: [:return_sample]
test "start points to report on exception error", context do
Mix.Project.push(ReturnSample)
in_tmp context.test, fn ->
mod = {ReturnApp, {:error, {:badarg, [{ReturnApp, :start, 2, []}]}}}
Process.put(:application_definition, mod: mod)
Mix.Tasks.Compile.run([])
message =
"Could not start application return_sample: " <>
"Mix.Tasks.App.StartTest.ReturnApp.start(:normal, {:error, {:badarg, [{Mix.Tasks.App.StartTest.ReturnApp, :start, 2, []}]}}) " <>
"returned an error: an exception was raised:\n" <>
" ** (ArgumentError) argument error\n" <>
" Mix.Tasks.App.StartTest.ReturnApp.start/2"
assert_raise Mix.Error, message, fn ->
Mix.Tasks.App.Start.start([app: :return_sample], [])
end
end
end
@tag apps: [:return_sample]
test "start points to report on bad return", context do
Mix.Project.push(ReturnSample)
in_tmp context.test, fn ->
Process.put(:application_definition, mod: {ReturnApp, :bad})
Mix.Tasks.Compile.run([])
message =
"Could not start application return_sample: " <>
"Mix.Tasks.App.StartTest.ReturnApp.start(:normal, :bad) " <>
"returned a bad value: :bad"
assert_raise Mix.Error, message, fn ->
Mix.Tasks.App.Start.start([app: :return_sample], [])
end
end
end
defmodule ExitSample do
def project do
[app: :exit_sample, version: "0.1.0"]
end
def application do
Process.get(:application_definition)
end
end
defmodule ExitApp do
use Application
def start(_type, reason), do: exit(reason)
end
@tag apps: [:exit_sample]
test "start points to report on exit", context do
Mix.Project.push(ExitSample)
in_tmp context.test, fn ->
Process.put(:application_definition, mod: {ExitApp, :bye})
Mix.Tasks.Compile.run([])
message =
"Could not start application exit_sample: exited in: " <>
"Mix.Tasks.App.StartTest.ExitApp.start(:normal, :bye)\n" <> " ** (EXIT) :bye"
assert_raise Mix.Error, message, fn ->
Mix.Tasks.App.Start.start([app: :exit_sample], [])
end
end
end
@tag apps: [:exit_sample]
test "start points to report on normal exit", context do
Mix.Project.push(ExitSample)
in_tmp context.test, fn ->
Process.put(:application_definition, mod: {ExitApp, :normal})
Mix.Tasks.Compile.run([])
message =
"Could not start application exit_sample: exited in: " <>
"Mix.Tasks.App.StartTest.ExitApp.start(:normal, :normal)\n" <> " ** (EXIT) normal"
assert_raise Mix.Error, message, fn ->
Mix.Tasks.App.Start.start([app: :exit_sample], [])
end
end
end
end
| 29.557377 | 139 | 0.635885 |
1cc54dcf006edc63431905034eb1c46dc837e57b | 18,247 | exs | Elixir | lib/elixir/test/elixir/kernel/expansion_test.exs | diogovk/elixir | 7b8213affaad38b50afaa3dfc3a43717f35ba4e7 | [
"Apache-2.0"
] | 4 | 2015-12-22T02:46:39.000Z | 2016-04-26T06:11:09.000Z | lib/elixir/test/elixir/kernel/expansion_test.exs | diogovk/elixir | 7b8213affaad38b50afaa3dfc3a43717f35ba4e7 | [
"Apache-2.0"
] | null | null | null | lib/elixir/test/elixir/kernel/expansion_test.exs | diogovk/elixir | 7b8213affaad38b50afaa3dfc3a43717f35ba4e7 | [
"Apache-2.0"
] | null | null | null | Code.require_file "../test_helper.exs", __DIR__
defmodule Kernel.ExpansionTarget do
defmacro seventeen, do: 17
end
defmodule Kernel.ExpansionTest do
use ExUnit.Case, async: true
## __block__
test "__block__: expands to nil when empty" do
assert expand(quote do: __block__()) == nil
end
test "__block__: expands to argument when arity is 1" do
assert expand(quote do: __block__(1)) == 1
end
test "__block__: is recursive to argument when arity is 1" do
assert expand(quote do: __block__(_ = 1, __block__(2))) == quote do: __block__(_ = 1, 2)
end
test "__block__: accumulates vars" do
assert expand(quote(do: (a = 1; a))) == quote do: (a = 1; a)
end
## alias
test "alias: expand args, defines alias and returns itself" do
alias true, as: True
input = quote do: (alias :hello, as: World, warn: True)
{output, env} = expand_env(input, __ENV__)
assert output == quote do: (alias :hello, as: :"Elixir.World", warn: true)
assert env.aliases == [{:"Elixir.True", true}, {:"Elixir.World", :hello}]
end
## __aliases__
test "__aliases__: expands even if no alias" do
assert expand(quote do: World) == :"Elixir.World"
assert expand(quote do: Elixir.World) == :"Elixir.World"
end
test "__aliases__: expands with alias" do
alias Hello, as: World
assert expand_env(quote(do: World), __ENV__) |> elem(0) == :"Elixir.Hello"
end
test "__aliases__: expands with alias is recursive" do
alias Source, as: Hello
alias Hello, as: World
assert expand_env(quote(do: World), __ENV__) |> elem(0) == :"Elixir.Source"
end
## =
test "=: sets context to match" do
assert expand(quote do: __ENV__.context = :match) == quote do: :match = :match
end
test "=: defines vars" do
{output, env} = expand_env(quote(do: a = 1), __ENV__)
assert output == quote(do: a = 1)
assert {:a, __MODULE__} in env.vars
end
test "=: does not carry rhs imports" do
assert expand(quote do: (flatten([1, 2, 3]) = import List)) ==
quote do: (flatten([1, 2, 3]) = import :"Elixir.List", [])
end
test "=: does not define _" do
{output, env} = expand_env(quote(do: _ = 1), __ENV__)
assert output == quote(do: _ = 1)
assert env.vars == []
end
## Pseudo vars
test "__MODULE__" do
assert expand(quote do: __MODULE__) == __MODULE__
end
test "__DIR__" do
assert expand(quote do: __DIR__) == __DIR__
end
test "__CALLER__" do
assert expand(quote do: __CALLER__) == quote do: __CALLER__
end
test "__ENV__" do
env = %{__ENV__ | line: 0}
assert expand_env(quote(do: __ENV__), env) ==
{{:%{}, [], Map.to_list(env)}, env}
end
test "__ENV__.accessor" do
env = %{__ENV__ | line: 0}
assert expand_env(quote(do: __ENV__.file), env) == {__ENV__.file, env}
assert expand_env(quote(do: __ENV__.unknown), env) ==
{quote(do: unquote({:%{}, [], Map.to_list(env)}).unknown), env}
end
## Super
test "super: expand args" do
assert expand(quote do: super(a, b)) == quote do: super(a(), b())
end
## Vars
test "vars: expand to local call" do
{output, env} = expand_env(quote(do: a), __ENV__)
assert output == quote(do: a())
assert env.vars == []
end
test "vars: forces variable to exist" do
assert expand(quote do: (var!(a) = 1; var!(a)))
message = ~r"expected var \"a\" to expand to an existing variable or be part of a match"
assert_raise CompileError, message, fn -> expand(quote do: var!(a)) end
message = ~r"expected var \"a\" \(context Unknown\) to expand to an existing variable or be part of a match"
assert_raise CompileError, message, fn -> expand(quote do: var!(a, Unknown)) end
end
test "^: expands args" do
assert expand(quote do: ^a = 1) == quote do: ^a = 1
end
test "^: raises outside match" do
assert_raise CompileError, ~r"cannot use \^a outside of match clauses", fn ->
expand(quote do: ^a)
end
end
test "^: raises without var" do
assert_raise CompileError, ~r"invalid argument for unary operator \^, expected an existing variable, got: \^1", fn ->
expand(quote do: ^1 = 1)
end
end
## Locals
test "locals: expands to remote calls" do
assert {{:., _, [Kernel, :=~]}, _, [{:a, _, []}, {:b, _, []}]} =
expand(quote do: a =~ b)
end
test "locals: in guards" do
assert expand(quote(do: fn pid when :erlang.==(pid, self) -> pid end)) ==
quote(do: fn pid when :erlang.==(pid, :erlang.self()) -> pid end)
end
test "locals: custom imports" do
assert expand(quote do: (import Kernel.ExpansionTarget; seventeen)) ==
quote do: (import :"Elixir.Kernel.ExpansionTarget", []; 17)
end
## Tuples
test "tuples: expanded as arguments" do
assert expand(quote(do: {a = 1, a})) == quote do: {a = 1, a()}
assert expand(quote(do: {b, a = 1, a})) == quote do: {b(), a = 1, a()}
end
## Maps & structs
test "maps: expanded as arguments" do
assert expand(quote(do: %{a: a = 1, b: a})) == quote do: %{a: a = 1, b: a()}
end
test "structs: expanded as arguments" do
assert expand(quote(do: %:elixir{a: a = 1, b: a})) ==
quote do: %:elixir{a: a = 1, b: a()}
assert expand(quote(do: %:"Elixir.Kernel"{a: a = 1, b: a})) ==
quote do: %:"Elixir.Kernel"{a: a = 1, b: a()}
end
test "structs: expects atoms" do
expand(quote do: %unknown{a: 1} = x)
assert_raise CompileError, ~r"expected struct name to be a compile time atom or alias", fn ->
expand(quote do: %unknown{a: 1})
end
assert_raise CompileError, ~r"expected struct name to be a compile time atom or alias", fn ->
expand(quote do: %unquote(1){a: 1})
end
assert_raise CompileError, ~r"expected struct name in a match to be a compile time atom, alias or a variable", fn ->
expand(quote do: %unquote(1){a: 1} = x)
end
end
## quote
test "quote: expanded to raw forms" do
assert expand(quote do: (quote do: hello)) == {:{}, [], [:hello, [], __MODULE__]}
end
## Anonymous calls
test "anonymous calls: expands base and args" do
assert expand(quote do: a.(b)) == quote do: a().(b())
end
test "anonymous calls: raises on atom base" do
assert_raise CompileError, ~r"invalid function call :foo.()", fn ->
expand(quote do: :foo.(a))
end
end
## Remote calls
test "remote calls: expands to erlang" do
assert expand(quote do: Kernel.is_atom(a)) == quote do: :erlang.is_atom(a())
end
test "remote calls: expands macros" do
assert expand(quote do: Kernel.ExpansionTest.thirteen) == 13
end
test "remote calls: expands receiver and args" do
assert expand(quote do: a.is_atom(b)) == quote do: a().is_atom(b())
assert expand(quote do: (a = :foo).is_atom(a)) == quote do: (a = :foo).is_atom(a())
end
test "remote calls: modules must be required for macros" do
assert expand(quote do: (require Kernel.ExpansionTarget; Kernel.ExpansionTarget.seventeen)) ==
quote do: (require :"Elixir.Kernel.ExpansionTarget", []; 17)
end
test "remote calls: raises when not required" do
msg = ~r"you must require Kernel\.ExpansionTarget before invoking the macro Kernel\.ExpansionTarget\.seventeen/0"
assert_raise CompileError, msg, fn ->
expand(quote do: Kernel.ExpansionTarget.seventeen)
end
end
## Comprehensions
test "variables inside comprehensions do not leak with enums" do
assert expand(quote do: (for(a <- b, do: c = 1); c)) ==
quote do: (for(a <- b(), do: c = 1); c())
end
test "variables inside comprehensions do not leak with binaries" do
assert expand(quote do: (for(<<a <- b>>, do: c = 1); c)) ==
quote do: (for(<< <<a>> <- b() >>, do: c = 1); c())
end
test "variables inside filters are available in blocks" do
assert expand(quote do: for(a <- b, c = a, do: c)) ==
quote do: (for(a <- b(), c = a, do: c))
end
test "variables inside comprehensions options do not leak" do
assert expand(quote do: (for(a <- c = b, into: [], do: 1); c)) ==
quote do: (for(a <- c = b(), do: 1, into: []); c())
assert expand(quote do: (for(a <- b, into: c = [], do: 1); c)) ==
quote do: (for(a <- b(), do: 1, into: c = []); c())
end
## With
test "variables inside with do not leak" do
assert expand(quote do: (with(a <- b, do: c = 1); c)) ==
quote do: (with(a <- b(), do: c = 1); c())
assert expand(quote do: (with(a = b, do: a); a)) ==
quote do: (with(a = b(), do: a); a())
end
test "variables inside with are available in blocks" do
assert expand(quote do: with(a <- b, c = a, do: c)) ==
quote do: (with(a <- b(), c = a, do: c))
end
test "with: variables inside else do not leak" do
assert expand(quote do: (with(a <- b, do: 1, else: (a -> a)); a)) ==
quote do: (with(a <- b(), do: 1, else: (a -> a)); a())
end
## Capture
test "&: keeps locals" do
assert expand(quote do: &unknown/2) ==
{:&, [], [{:/, [], [{:unknown, [], nil}, 2]}]}
assert expand(quote do: &unknown(&1, &2)) ==
{:&, [], [{:/, [], [{:unknown, [], nil}, 2]}]}
end
test "&: expands remotes" do
assert expand(quote do: &List.flatten/2) ==
quote do: :erlang.make_fun(:"Elixir.List", :flatten, 2)
assert expand(quote do: &Kernel.is_atom/1) ==
quote do: :erlang.make_fun(:erlang, :is_atom, 1)
end
test "&: expands macros" do
assert expand(quote do: (require Kernel.ExpansionTarget; &Kernel.ExpansionTarget.seventeen/0)) ==
quote do: (require :"Elixir.Kernel.ExpansionTarget", []; fn -> 17 end)
end
## fn
test "fn: expands each clause" do
assert expand(quote do: fn x -> x; _ -> x end) ==
quote do: fn x -> x; _ -> x() end
end
test "fn: does not share lexical scope between clauses" do
assert expand(quote do: fn 1 -> import List; 2 -> flatten([1, 2, 3]) end) ==
quote do: fn 1 -> import :"Elixir.List", []; 2 -> flatten([1, 2, 3]) end
end
test "fn: expands guards" do
assert expand(quote do: fn x when x when __ENV__.context -> true end) ==
quote do: fn x when x when :guard -> true end
end
test "fn: does not leak vars" do
assert expand(quote do: (fn x -> x end; x)) ==
quote do: (fn x -> x end; x())
end
## Cond
test "cond: expands each clause" do
assert expand_and_clean(quote do: (cond do x = 1 -> x; _ -> x end)) ==
quote do: (cond do x = 1 -> x; _ -> x() end)
end
test "cond: does not share lexical scope between clauses" do
assert expand_and_clean(quote do: (cond do 1 -> import List; 2 -> flatten([1, 2, 3]) end)) ==
quote do: (cond do 1 -> import :"Elixir.List", []; 2 -> flatten([1, 2, 3]) end)
end
test "cond: does not leaks vars on head" do
assert expand_and_clean(quote do: (cond do x = 1 -> x; y = 2 -> y end; :erlang.+(x, y))) ==
quote do: (cond do x = 1 -> x; y = 2 -> y end; :erlang.+(x(), y()))
end
test "cond: leaks vars" do
assert expand_and_clean(quote do: (cond do 1 -> x = 1; 2 -> y = 2 end; :erlang.+(x, y))) ==
quote do: (cond do 1 -> x = 1; 2 -> y = 2 end; :erlang.+(x, y))
end
test "cond: expects at most one do" do
assert_raise CompileError, ~r"duplicated do clauses given for cond", fn ->
expand(quote(do: (cond do: (x -> x), do: (y -> y))))
end
end
## Case
test "case: expands each clause" do
assert expand_and_clean(quote do: (case w do x -> x; _ -> x end)) ==
quote do: (case w() do x -> x; _ -> x() end)
end
test "case: does not share lexical scope between clauses" do
assert expand_and_clean(quote do: (case w do 1 -> import List; 2 -> flatten([1, 2, 3]) end)) ==
quote do: (case w() do 1 -> import :"Elixir.List", []; 2 -> flatten([1, 2, 3]) end)
end
test "case: expands guards" do
assert expand_and_clean(quote do: (case w do x when x when __ENV__.context -> true end)) ==
quote do: (case w() do x when x when :guard -> true end)
end
test "case: does not leaks vars on head" do
assert expand_and_clean(quote do: (case w do x -> x; y -> y end; :erlang.+(x, y))) ==
quote do: (case w() do x -> x; y -> y end; :erlang.+(x(), y()))
end
test "case: leaks vars" do
assert expand_and_clean(quote do: (case w do x -> x = x; y -> y = y end; :erlang.+(x, y))) ==
quote do: (case w() do x -> x = x; y -> y = y end; :erlang.+(x, y))
end
test "case: expects at most one do" do
assert_raise CompileError, ~r"duplicated do clauses given for case", fn ->
expand(quote(do: (case e, do: (x -> x), do: (y -> y))))
end
end
## Receive
test "receive: expands each clause" do
assert expand_and_clean(quote do: (receive do x -> x; _ -> x end)) ==
quote do: (receive do x -> x; _ -> x() end)
end
test "receive: does not share lexical scope between clauses" do
assert expand_and_clean(quote do: (receive do 1 -> import List; 2 -> flatten([1, 2, 3]) end)) ==
quote do: (receive do 1 -> import :"Elixir.List", []; 2 -> flatten([1, 2, 3]) end)
end
test "receive: expands guards" do
assert expand_and_clean(quote do: (receive do x when x when __ENV__.context -> true end)) ==
quote do: (receive do x when x when :guard -> true end)
end
test "receive: does not leaks clause vars" do
assert expand_and_clean(quote do: (receive do x -> x; y -> y end; :erlang.+(x, y))) ==
quote do: (receive do x -> x; y -> y end; :erlang.+(x(), y()))
end
test "receive: leaks vars" do
assert expand_and_clean(quote do: (receive do x -> x = x; y -> y = y end; :erlang.+(x, y))) ==
quote do: (receive do x -> x = x; y -> y = y end; :erlang.+(x, y))
end
test "receive: leaks vars on after" do
assert expand_and_clean(quote do: (receive do x -> x = x after y -> y; w = y end; :erlang.+(x, w))) ==
quote do: (receive do x -> x = x after y() -> y(); w = y() end; :erlang.+(x, w))
end
test "receive: expects at most one clause" do
assert_raise CompileError, ~r"duplicated do clauses given for receive", fn ->
expand(quote(do: (receive do: (x -> x), do: (y -> y))))
end
assert_raise CompileError, ~r"duplicated after clauses given for receive", fn ->
expand(quote(do: (receive do x -> x after y -> y after z -> z end)))
end
end
## Try
test "try: expands catch" do
assert expand(quote do: (try do x catch x, y -> z = :erlang.+(x, y) end; z)) ==
quote do: (try do x() catch x, y -> z = :erlang.+(x, y) end; z())
end
test "try: expands after" do
assert expand(quote do: (try do x after z = y end; z)) ==
quote do: (try do x() after z = y() end; z())
end
test "try: expands else" do
assert expand(quote do: (try do x else z -> z end; z)) ==
quote do: (try do x() else z -> z end; z())
end
test "try: expands rescue" do
assert expand(quote do: (try do x rescue x -> x; Error -> x end; x)) ==
quote do: (try do x() rescue unquote(:in)(x, _) -> x; unquote(:in)(_, [:"Elixir.Error"]) -> x() end; x())
end
test "try: expects more than do" do
assert_raise CompileError, ~r"missing catch/rescue/after/else keyword in try", fn ->
expand(quote do: (try do x = y end; x))
end
end
test "try: expects at most one clause" do
assert_raise CompileError, ~r"duplicated do clauses given for try", fn ->
expand(quote(do: (try do: e, do: f)))
end
assert_raise CompileError, ~r"duplicated rescue clauses given for try", fn ->
expand(quote(do: (try do e rescue x -> x rescue y -> y end)))
end
assert_raise CompileError, ~r"duplicated after clauses given for try", fn ->
expand(quote(do: (try do e after x = y after x = y end)))
end
assert_raise CompileError, ~r"duplicated else clauses given for try", fn ->
expand(quote(do: (try do e else x -> x else y -> y end)))
end
assert_raise CompileError, ~r"duplicated catch clauses given for try", fn ->
expand(quote(do: (try do e catch x -> x catch y -> y end)))
end
end
## Binaries
test "bitstrings: size * unit" do
import Kernel, except: [-: 2]
assert expand(quote do: <<x::13>>) ==
quote do: <<x()::size(13)>>
assert expand(quote do: <<x::13 * 6>>) ==
quote do: <<x()::unit(6)-size(13)>>
assert expand(quote do: <<x::_ * 6>>) ==
quote do: <<x()::unit(6)>>
assert expand(quote do: <<x::13 * 6-binary>>) ==
quote do: <<x()::unit(6)-binary()-size(13) >>
assert expand(quote do: <<x::binary-13 * 6>>) ==
quote do: <<x()::binary()-unit(6)-size(13)>>
end
test "bitstrings: expands modifiers" do
assert expand(quote do: (import Kernel.ExpansionTarget; <<x::seventeen>>)) ==
quote do: (import :"Elixir.Kernel.ExpansionTarget", []; <<x()::size(17)>>)
assert expand(quote do: (import Kernel.ExpansionTarget; <<seventeen::seventeen, x::size(seventeen)>> = 1)) ==
quote do: (import :"Elixir.Kernel.ExpansionTarget", [];
<<seventeen::size(17), x::size(seventeen)>> = 1)
end
test "bitstrings: expands modifiers args" do
assert expand(quote do: (require Kernel.ExpansionTarget; <<x::size(Kernel.ExpansionTarget.seventeen)>>)) ==
quote do: (require :"Elixir.Kernel.ExpansionTarget", []; <<x()::size(17)>>)
end
## Invalid
test "handles invalid expressions" do
assert_raise CompileError, ~r"invalid quoted expression: {1, 2, 3}", fn ->
expand(quote do: unquote({1, 2, 3}))
end
assert_raise CompileError, ~r"invalid quoted expression: #Function<", fn ->
expand(quote do: unquote({:sample, fn -> nil end}))
end
end
## Helpers
defmacro thirteen do
13
end
defp expand_and_clean(expr) do
cleaner = &Keyword.drop(&1, [:export])
expr
|> expand_env(__ENV__)
|> elem(0)
|> Macro.prewalk(&Macro.update_meta(&1, cleaner))
end
defp expand(expr) do
expand_env(expr, __ENV__) |> elem(0)
end
defp expand_env(expr, env) do
:elixir_exp.expand(expr, env)
end
end
| 32.642218 | 121 | 0.590672 |
1cc55135b2e1a20841510a26c19c7f1d5dad81ee | 397 | exs | Elixir | test/test_helper.exs | andyl/phx-cmdd | f72319d81b264b4deba4831ee1d0eb725d2b5adb | [
"MIT"
] | 5 | 2019-05-23T23:25:25.000Z | 2019-05-31T12:10:59.000Z | test/test_helper.exs | andyl/comgen | f72319d81b264b4deba4831ee1d0eb725d2b5adb | [
"MIT"
] | 3 | 2019-05-31T04:42:02.000Z | 2019-06-01T06:42:14.000Z | test/test_helper.exs | andyl/comgen | f72319d81b264b4deba4831ee1d0eb725d2b5adb | [
"MIT"
] | 1 | 2019-06-01T05:04:14.000Z | 2019-06-01T05:04:14.000Z | ExUnit.start()
defmodule TestHelper do
@doc """
Use this function to reset the base_dir before running tests that generate
files. Note that when MIX_ENV == test, basedir == "tmp/test/"
"""
def reset_base_dir(_args) do
basedir = Comspec.basedir()
case [basedir, Mix.env()] do
["tmp/", :test] ->
File.rm_rf!(basedir)
File.mkdir_p(basedir)
end
end
end
| 23.352941 | 76 | 0.639798 |
1cc5575c32ceee0e75e5a8a15ba03eb4bea54de6 | 1,783 | exs | Elixir | apps/asf_bo_web/test/asf_web/controllers/user_registration_controller_test.exs | LazarRistic/asf | 2c557f06839a129b35174142c91f60696be2fa89 | [
"MIT"
] | null | null | null | apps/asf_bo_web/test/asf_web/controllers/user_registration_controller_test.exs | LazarRistic/asf | 2c557f06839a129b35174142c91f60696be2fa89 | [
"MIT"
] | null | null | null | apps/asf_bo_web/test/asf_web/controllers/user_registration_controller_test.exs | LazarRistic/asf | 2c557f06839a129b35174142c91f60696be2fa89 | [
"MIT"
] | null | null | null | defmodule AsfBOWeb.UserRegistrationControllerTest do
use AsfBOWeb.ConnCase, async: true
import Asf.AccountsFixtures
describe "GET /users/register" do
test "renders registration page", %{conn: conn} do
conn = get(conn, Routes.user_registration_path(conn, :new))
response = html_response(conn, 200)
assert response =~ "<h1>Register</h1>"
assert response =~ "Log in</a>"
assert response =~ "Register</a>"
end
test "redirects if already logged in", %{conn: conn} do
conn = conn |> log_in_user(user_fixture()) |> get(Routes.user_registration_path(conn, :new))
assert redirected_to(conn) == "/"
end
end
describe "POST /users/register" do
@tag :capture_log
test "creates account and logs the user in", %{conn: conn} do
email = unique_user_email()
conn =
post(conn, Routes.user_registration_path(conn, :create), %{
"user" => %{"email" => email, "password" => valid_user_password()}
})
assert get_session(conn, :user_token)
assert redirected_to(conn) =~ "/"
# Now do a logged in request and assert on the menu
conn = get(conn, "/")
response = html_response(conn, 200)
assert response =~ email
assert response =~ "Settings</a>"
assert response =~ "Log out</a>"
end
test "render errors for invalid data", %{conn: conn} do
conn =
post(conn, Routes.user_registration_path(conn, :create), %{
"user" => %{"email" => "with spaces", "password" => "too short"}
})
response = html_response(conn, 200)
assert response =~ "<h1>Register</h1>"
assert response =~ "must have the @ sign and no spaces"
assert response =~ "should be at least 12 character"
end
end
end
| 32.418182 | 98 | 0.624229 |
1cc569ec8fd93b08a0bed71400f0c9c1918d86f9 | 459 | ex | Elixir | system3/lib/beb.ex | mihaivanea/broadcast_algorithms | 124a93791c0589e4f56088fe98b0a3469b3d90c7 | [
"MIT"
] | null | null | null | system3/lib/beb.ex | mihaivanea/broadcast_algorithms | 124a93791c0589e4f56088fe98b0a3469b3d90c7 | [
"MIT"
] | null | null | null | system3/lib/beb.ex | mihaivanea/broadcast_algorithms | 124a93791c0589e4f56088fe98b0a3469b3d90c7 | [
"MIT"
] | null | null | null | # Mihail Vanea (mv1315)
defmodule BEB do
def start() do
receive do
{:bind, pl, app, neighbours} -> next(pl, app, neighbours)
end
end
defp next(pl, app, neighbours) do
receive do
{:broadcast, message, from} ->
for n <- neighbours, do:
send(pl, {:beb_broadcast, n, message, from})
{:beb_broadcast, message, source} ->
send(app, {message, source})
end
next(pl, app, neighbours)
end
end
| 20.863636 | 63 | 0.586057 |
1cc57f005f35b2a712a4fb23b3a7ee92be7b991a | 1,181 | ex | Elixir | lib/camino_challenge/contratos/entities/partes_contrato.ex | kadmohardy/camino_challenge | 53117f763c0a51b0825cac18b799b7d772781671 | [
"MIT"
] | null | null | null | lib/camino_challenge/contratos/entities/partes_contrato.ex | kadmohardy/camino_challenge | 53117f763c0a51b0825cac18b799b7d772781671 | [
"MIT"
] | null | null | null | lib/camino_challenge/contratos/entities/partes_contrato.ex | kadmohardy/camino_challenge | 53117f763c0a51b0825cac18b799b7d772781671 | [
"MIT"
] | null | null | null | defmodule CaminoChallenge.Contratos.Entities.PartesContrato do
@moduledoc """
Module relativo a entidade de PartesContrato que é representa uma ligação
entre um contrato e uma pessoa.
"""
use Ecto.Schema
import Ecto.Changeset
alias CaminoChallenge.Contratos.Entities.Contrato
alias CaminoChallenge.Pessoas.Entities.Pessoa
@primary_key {:id, :binary_id, autogenerate: true}
@derive {Phoenix.Param, key: :id}
schema "contratos_partes" do
field :pessoa_id, :binary_id, foreign_key: :pessoa_id, references: :id
field :contrato_id, :binary_id, foreign_key: :contrato_id, references: :id
belongs_to :pessoa, Pessoa, define_field: false
belongs_to :contrato, Contrato, define_field: false
end
@doc """
A função `changeset/2` filtra, valida e define as restrições quando manipula-se
estruturas de partes de contrato
## Parâmetros da função
- pessoa_id: código uuid da pessoa
- contrato_id: código uuid do contrato
"""
def changeset(partes_contrato, attrs) do
partes_contrato
|> cast(attrs, [:pessoa_id, :contrato_id])
|> foreign_key_constraint(:pessoa_id)
|> foreign_key_constraint(:contrato_id)
end
end
| 31.078947 | 81 | 0.740898 |
1cc5838f766a99561dea294fbaf97eca13453778 | 1,434 | exs | Elixir | apps/ewallet/mix.exs | vanmil/ewallet | 6c1aca95a83e0a9d93007670a40d8c45764a8122 | [
"Apache-2.0"
] | null | null | null | apps/ewallet/mix.exs | vanmil/ewallet | 6c1aca95a83e0a9d93007670a40d8c45764a8122 | [
"Apache-2.0"
] | null | null | null | apps/ewallet/mix.exs | vanmil/ewallet | 6c1aca95a83e0a9d93007670a40d8c45764a8122 | [
"Apache-2.0"
] | null | null | null | defmodule EWallet.Mixfile do
use Mix.Project
def project do
[
app: :ewallet,
version: "0.1.0-beta",
build_path: "../../_build",
config_path: "../../config/config.exs",
deps_path: "../../deps",
lockfile: "../../mix.lock",
elixir: "~> 1.5",
elixirc_paths: elixirc_paths(Mix.env),
elixirc_options: [warnings_as_errors: true],
compilers: [:phoenix] ++ Mix.compilers,
start_permanent: Mix.env == :prod,
test_coverage: [tool: ExCoveralls],
preferred_cli_env: [
"coveralls": :test,
"coveralls.detail": :test,
"coveralls.post": :test,
"coveralls.html": :test
],
deps: deps()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
mod: {EWallet.Application, []},
extra_applications: [:logger]
]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:phoenix, "~> 1.3.0"},
{:phoenix_html, "~> 2.11.0"},
{:quantum, "~> 2.2.6"},
{:timex, "~> 3.0"},
{:bodyguard, "~> 2.2"},
{:decimal, "~> 1.0"},
{:ewallet_db, in_umbrella: true},
{:local_ledger, in_umbrella: true},
{:local_ledger_db, in_umbrella: true}
]
end
end
| 26.072727 | 59 | 0.561367 |
1cc5aef7f816090e09695b1c40431c47b99ebd76 | 151 | ex | Elixir | lib/metro_cdmx_api_web/controllers/page_controller.ex | erickbarcenas/metro_cdmx_api | 6a598fe0b0e8a2bb999ea6c517053f902f16662b | [
"MIT"
] | null | null | null | lib/metro_cdmx_api_web/controllers/page_controller.ex | erickbarcenas/metro_cdmx_api | 6a598fe0b0e8a2bb999ea6c517053f902f16662b | [
"MIT"
] | null | null | null | lib/metro_cdmx_api_web/controllers/page_controller.ex | erickbarcenas/metro_cdmx_api | 6a598fe0b0e8a2bb999ea6c517053f902f16662b | [
"MIT"
] | null | null | null | defmodule MetroCdmxApiWeb.PageController do
use MetroCdmxApiWeb, :controller
def index(conn, _params) do
render(conn, "index.html")
end
end
| 18.875 | 43 | 0.754967 |
1cc5cec56b4d22b1bb8ec0aef5030aedb2bf4735 | 236 | exs | Elixir | test/cforum_web/controllers/forum_controller_test.exs | campingrider/cforum_ex | cf27684c47d6dc26c9c37a946f1c729a79d27c70 | [
"MIT"
] | 16 | 2019-04-04T06:33:33.000Z | 2021-08-16T19:34:31.000Z | test/cforum_web/controllers/forum_controller_test.exs | campingrider/cforum_ex | cf27684c47d6dc26c9c37a946f1c729a79d27c70 | [
"MIT"
] | 294 | 2019-02-10T11:10:27.000Z | 2022-03-30T04:52:53.000Z | test/cforum_web/controllers/forum_controller_test.exs | campingrider/cforum_ex | cf27684c47d6dc26c9c37a946f1c729a79d27c70 | [
"MIT"
] | 10 | 2019-02-10T10:39:24.000Z | 2021-07-06T11:46:05.000Z | defmodule CforumWeb.ForumControllerTest do
use CforumWeb.ConnCase
test "lists forums on index", %{conn: conn} do
conn = get(conn, Path.root_path(conn, :index))
assert html_response(conn, 200) =~ gettext("Forums")
end
end
| 26.222222 | 56 | 0.716102 |
1cc5dc7066f3c158117df81c1a276b132fd22d91 | 2,466 | ex | Elixir | lib/db_connection/backoff.ex | khattori/db_connection | fb82de035522762e4df804e48750ee1f977aec0d | [
"Apache-2.0"
] | 227 | 2016-06-16T13:56:02.000Z | 2022-03-09T23:03:58.000Z | deps/db_connection/lib/db_connection/backoff.ex | rwtrecs/rocketseat-nlw5-inmana | 8ce8bc32e0bdd005c423394bb163945747b557e2 | [
"MIT"
] | 198 | 2016-06-20T08:08:15.000Z | 2022-03-06T17:54:37.000Z | deps/db_connection/lib/db_connection/backoff.ex | adrianomota/blog | ef3b2d2ed54f038368ead8234d76c18983caa75b | [
"MIT"
] | 110 | 2016-06-20T03:50:39.000Z | 2022-03-03T20:53:01.000Z | defmodule DBConnection.Backoff do
@moduledoc false
@compile :nowarn_deprecated_function
alias DBConnection.Backoff
@default_type :rand_exp
@min 1_000
@max 30_000
defstruct [:type, :min, :max, :state]
def new(opts) do
case Keyword.get(opts, :backoff_type, @default_type) do
:stop ->
nil
type ->
{min, max} = min_max(opts)
new(type, min, max)
end
end
def backoff(%Backoff{type: :rand, min: min, max: max} = s) do
{rand(min, max), s}
end
def backoff(%Backoff{type: :exp, min: min, state: nil} = s) do
{min, %Backoff{s | state: min}}
end
def backoff(%Backoff{type: :exp, max: max, state: prev} = s) do
require Bitwise
next = min(Bitwise.<<<(prev, 1), max)
{next, %Backoff{s | state: next}}
end
def backoff(%Backoff{type: :rand_exp, max: max, state: state} = s) do
{prev, lower} = state
next_min = min(prev, lower)
next_max = min(prev * 3, max)
next = rand(next_min, next_max)
{next, %Backoff{s | state: {next, lower}}}
end
def reset(%Backoff{type: :rand} = s), do: s
def reset(%Backoff{type: :exp} = s), do: %Backoff{s | state: nil}
def reset(%Backoff{type: :rand_exp, min: min, state: {_, lower}} = s) do
%Backoff{s | state: {min, lower}}
end
## Internal
defp min_max(opts) do
case {opts[:backoff_min], opts[:backoff_max]} do
{nil, nil} -> {@min, @max}
{nil, max} -> {min(@min, max), max}
{min, nil} -> {min, max(min, @max)}
{min, max} -> {min, max}
end
end
defp new(_, min, _) when not (is_integer(min) and min >= 0) do
raise ArgumentError, "minimum #{inspect(min)} not 0 or a positive integer"
end
defp new(_, _, max) when not (is_integer(max) and max >= 0) do
raise ArgumentError, "maximum #{inspect(max)} not 0 or a positive integer"
end
defp new(_, min, max) when min > max do
raise ArgumentError, "minimum #{min} is greater than maximum #{max}"
end
defp new(:rand, min, max) do
%Backoff{type: :rand, min: min, max: max, state: nil}
end
defp new(:exp, min, max) do
%Backoff{type: :exp, min: min, max: max, state: nil}
end
defp new(:rand_exp, min, max) do
lower = max(min, div(max, 3))
%Backoff{type: :rand_exp, min: min, max: max, state: {min, lower}}
end
defp new(type, _, _) do
raise ArgumentError, "unknown type #{inspect(type)}"
end
defp rand(min, max) do
:rand.uniform(max - min + 1) + min - 1
end
end
| 25.42268 | 78 | 0.603406 |
1cc5ea55d7069a1d514366be90627ead476907f7 | 2,753 | ex | Elixir | clients/container/lib/google_api/container/v1/model/set_network_policy_request.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | clients/container/lib/google_api/container/v1/model/set_network_policy_request.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | clients/container/lib/google_api/container/v1/model/set_network_policy_request.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Container.V1.Model.SetNetworkPolicyRequest do
@moduledoc """
SetNetworkPolicyRequest enables/disables network policy for a cluster.
## Attributes
* `clusterId` (*type:* `String.t`, *default:* `nil`) - Deprecated. The name of the cluster.
This field has been deprecated and replaced by the name field.
* `name` (*type:* `String.t`, *default:* `nil`) - The name (project, location, cluster id) of the cluster to set networking
policy. Specified in the format `projects/*/locations/*/clusters/*`.
* `networkPolicy` (*type:* `GoogleApi.Container.V1.Model.NetworkPolicy.t`, *default:* `nil`) - Required. Configuration options for the NetworkPolicy feature.
* `projectId` (*type:* `String.t`, *default:* `nil`) - Deprecated. The Google Developers Console [project ID or project
number](https://developers.google.com/console/help/new/#projectnumber).
This field has been deprecated and replaced by the name field.
* `zone` (*type:* `String.t`, *default:* `nil`) - Deprecated. The name of the Google Compute Engine
[zone](https://cloud.google.com/compute/docs/zones#available) in which the
cluster resides. This field has been deprecated and replaced by the name
field.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:clusterId => String.t(),
:name => String.t(),
:networkPolicy => GoogleApi.Container.V1.Model.NetworkPolicy.t(),
:projectId => String.t(),
:zone => String.t()
}
field(:clusterId)
field(:name)
field(:networkPolicy, as: GoogleApi.Container.V1.Model.NetworkPolicy)
field(:projectId)
field(:zone)
end
defimpl Poison.Decoder, for: GoogleApi.Container.V1.Model.SetNetworkPolicyRequest do
def decode(value, options) do
GoogleApi.Container.V1.Model.SetNetworkPolicyRequest.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Container.V1.Model.SetNetworkPolicyRequest do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 41.712121 | 161 | 0.714493 |
1cc608b722c9ae6bafb69ff4ec8f1f3877e63e93 | 2,731 | exs | Elixir | test/wallaby/selenium_test.exs | bobwaycott/wallaby | b25e6937f007f6e56be1b7b6a62591db5ac5a240 | [
"MIT"
] | 926 | 2016-03-28T17:01:54.000Z | 2019-11-05T11:59:58.000Z | test/wallaby/selenium_test.exs | marcandre/wallaby | d351c46f1a272bbcf8333c8327d154be3bd12c5c | [
"MIT"
] | 377 | 2016-03-17T00:35:56.000Z | 2019-11-03T07:15:24.000Z | test/wallaby/selenium_test.exs | marcandre/wallaby | d351c46f1a272bbcf8333c8327d154be3bd12c5c | [
"MIT"
] | 144 | 2016-03-29T15:28:28.000Z | 2019-10-31T00:48:42.000Z | defmodule Wallaby.SeleniumTest do
use Wallaby.HttpClientCase, async: true
alias Wallaby.Selenium
alias Wallaby.Session
alias Wallaby.TestSupport.JSONWireProtocolResponses
describe "start_session/1" do
test "starts a selenium session with remote_url", %{bypass: bypass} do
remote_url = bypass_url(bypass, "/")
session_id = "abc123"
Bypass.expect(bypass, "POST", "/session", fn conn ->
response = JSONWireProtocolResponses.start_session_response(session_id: session_id)
send_json_resp(conn, 200, response)
end)
assert {:ok, session} = Selenium.start_session(remote_url: remote_url)
assert session == %Wallaby.Session{
session_url: remote_url |> URI.merge("session/#{session_id}") |> to_string(),
url: remote_url |> URI.merge("session/#{session_id}") |> to_string(),
id: session_id,
server: :none,
capabilities: Wallaby.Selenium.default_capabilities(),
driver: Wallaby.Selenium
}
end
test "raises a RuntimeError on unknown domain" do
remote_url = "http://does.not.exist-asdf/"
assert_raise RuntimeError, ~r/:nxdomain/, fn ->
Selenium.start_session(remote_url: remote_url)
end
end
test "raises a RuntimeError when unable to connect", %{bypass: bypass} do
remote_url = bypass_url(bypass, "/")
Bypass.down(bypass)
assert_raise RuntimeError, ~r/:econnrefused/, fn ->
Selenium.start_session(remote_url: remote_url)
end
end
end
describe "end_session/1" do
test "returns :ok on success", %{bypass: bypass} do
%Session{id: session_id} =
session =
bypass
|> bypass_url("/")
|> build_session()
Bypass.expect_once(bypass, "DELETE", "/session/#{session_id}", fn conn ->
response = %{"sessionId" => session_id, "value" => nil, "status" => 0}
send_json_resp(conn, 200, response)
end)
assert :ok = Selenium.end_session(session)
end
test "returns :ok when unable to connect", %{bypass: bypass} do
session =
bypass
|> bypass_url("/")
|> build_session()
Bypass.down(bypass)
assert :ok = Selenium.end_session(session)
end
end
defp build_session(remote_url) do
session_id = random_string(24)
session_url = remote_url |> URI.merge("session/#{session_id}") |> to_string()
%Wallaby.Session{
session_url: session_url,
url: session_url,
id: session_id,
driver: Wallaby.Selenium
}
end
defp random_string(length) do
:crypto.strong_rand_bytes(length) |> Base.url_encode64() |> binary_part(0, length)
end
end
| 29.365591 | 92 | 0.6342 |
1cc65d13c716e8708a40a03c66d2c9125183991f | 2,155 | ex | Elixir | clients/language/lib/google_api/language/v1/model/analyze_sentiment_response.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | null | null | null | clients/language/lib/google_api/language/v1/model/analyze_sentiment_response.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | null | null | null | clients/language/lib/google_api/language/v1/model/analyze_sentiment_response.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Language.V1.Model.AnalyzeSentimentResponse do
@moduledoc """
The sentiment analysis response message.
## Attributes
- documentSentiment (Sentiment): The overall sentiment of the input document. Defaults to: `null`.
- language (String.t): The language of the text, which will be the same as the language specified in the request or, if not specified, the automatically-detected language. See Document.language field for more details. Defaults to: `null`.
- sentences ([Sentence]): The sentiment for all the sentences in the document. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:documentSentiment => GoogleApi.Language.V1.Model.Sentiment.t(),
:language => any(),
:sentences => list(GoogleApi.Language.V1.Model.Sentence.t())
}
field(:documentSentiment, as: GoogleApi.Language.V1.Model.Sentiment)
field(:language)
field(:sentences, as: GoogleApi.Language.V1.Model.Sentence, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.Language.V1.Model.AnalyzeSentimentResponse do
def decode(value, options) do
GoogleApi.Language.V1.Model.AnalyzeSentimentResponse.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Language.V1.Model.AnalyzeSentimentResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 39.907407 | 240 | 0.749884 |
1cc675aa3ded67417124148bc510795176f927e0 | 606 | ex | Elixir | lib/level_web/views/form_helpers.ex | mindriot101/level | 0a2cbae151869c2d9b79b3bfb388f5d00739ae12 | [
"Apache-2.0"
] | 928 | 2018-04-03T16:18:11.000Z | 2019-09-09T17:59:55.000Z | lib/level_web/views/form_helpers.ex | mindriot101/level | 0a2cbae151869c2d9b79b3bfb388f5d00739ae12 | [
"Apache-2.0"
] | 74 | 2018-04-03T00:46:50.000Z | 2019-03-10T18:57:27.000Z | lib/level_web/views/form_helpers.ex | mindriot101/level | 0a2cbae151869c2d9b79b3bfb388f5d00739ae12 | [
"Apache-2.0"
] | 89 | 2018-04-03T17:33:20.000Z | 2019-08-19T03:40:20.000Z | defmodule LevelWeb.FormHelpers do
@moduledoc """
Helper functions for generating form markup.
"""
@doc """
Generates the list of classes for a form input.
If there are errors for the given field, the input will be outlined in red.
"""
def input_classes(form, field) do
if form.errors[field] do
"input-field input-field-error"
else
"input-field"
end
end
@doc """
Returns the "shake" class if the given changeset contains errors.
"""
def error_shake(%Ecto.Changeset{action: nil}), do: ""
def error_shake(%Ecto.Changeset{valid?: false}), do: "shake"
end
| 24.24 | 77 | 0.674917 |
1cc68cf90bed25b7a04e3efb8026f8b7b187130e | 4,043 | ex | Elixir | lib/mongo/protocol/utils.ex | activeprospect/mongodb | 6bfd814480c3b4eaff505e1c8791d219b8071acc | [
"Apache-2.0"
] | 1 | 2019-01-24T09:02:56.000Z | 2019-01-24T09:02:56.000Z | lib/mongo/protocol/utils.ex | yatender-oktalk/mongodb | 3dd14b9eeff769aee315ad07e050f79501fbad1c | [
"Apache-2.0"
] | 1 | 2021-09-23T18:21:31.000Z | 2021-09-28T14:39:42.000Z | lib/mongo/protocol/utils.ex | activeprospect/mongodb | 6bfd814480c3b4eaff505e1c8791d219b8071acc | [
"Apache-2.0"
] | null | null | null | defmodule Mongo.Protocol.Utils do
@moduledoc false
import Kernel, except: [send: 2]
import Mongo.Messages
def message(id, ops, s) when is_list(ops) do
with :ok <- send(ops, s),
{:ok, ^id, reply} <- recv(s),
do: {:ok, reply}
end
def message(id, op, s) do
with :ok <- send(id, op, s),
{:ok, ^id, reply} <- recv(s),
do: {:ok, reply}
end
def command(id, command, s) do
ns =
if Keyword.get(command, :mechanism) == "MONGODB-X509" && Keyword.get(command, :authenticate) == 1 do
namespace("$cmd", nil, "$external")
else
namespace("$cmd", s, nil)
end
op = op_query(coll: ns, query: BSON.Encoder.document(command),
select: "", num_skip: 0, num_return: 1, flags: [])
case message(id, op, s) do
{:ok, op_reply(docs: docs)} ->
case BSON.Decoder.documents(docs) do
[] -> {:ok, nil}
[doc] -> {:ok, doc}
end
{:disconnect, _, _} = error ->
error
end
end
def send(id, op, %{socket: {mod, sock}} = s) do
case mod.send(sock, encode(id, op)) do
:ok -> :ok
{:error, reason} -> send_error(reason, s)
end
end
# Performance regressions of a factor of 1000x have been observed on
# linux systems for write operations that do not include the getLastError
# command in the same call to :gen_tcp.send/2 so we hide the workaround
# for mongosniff behind a flag
if Mix.env in [:dev, :test] && System.get_env("MONGO_NO_BATCH_SEND") do
def send(ops, %{socket: {mod, sock}} = s) do
# Do a separate :gen_tcp.send/2 for each message because mongosniff
# cannot handle more than one message per packet. TCP is a stream
# protocol, but no.
# https://jira.mongodb.org/browse/TOOLS-821
Enum.find_value(List.wrap(ops), fn {id, op} ->
data = encode(id, op)
case mod.send(sock, data) do
:ok -> nil
{:error, reason} -> send_error(reason, s)
end
end)
|| :ok
end
else
def send(ops, %{socket: {mod, sock}} = s) do
data =
Enum.reduce(List.wrap(ops), "", fn {id, op}, acc ->
[acc|encode(id, op)]
end)
case mod.send(sock, data) do
:ok -> :ok
{:error, reason} -> send_error(reason, s)
end
end
end
def recv(s) do
recv(nil, "", s)
end
# TODO: Optimize to reduce :gen_tcp.recv and decode_message calls
# based on message size in header.
# :gen.tcp.recv(socket, min(size, max_packet))
# where max_packet = 64mb
defp recv(nil, data, %{socket: {mod, sock}} = s) do
case decode_header(data) do
{:ok, header, rest} ->
recv(header, rest, s)
:error ->
case mod.recv(sock, 0, s.timeout) do
{:ok, tail} -> recv(nil, [data|tail], s)
{:error, reason} -> recv_error(reason, s)
end
end
end
defp recv(header, data, %{socket: {mod, sock}} = s) do
case decode_message(header, data) do
{:ok, id, reply, ""} ->
{:ok, id, reply}
:error ->
case mod.recv(sock, 0, s.timeout) do
{:ok, tail} -> recv(header, [data|tail], s)
{:error, reason} -> recv_error(reason, s)
end
end
end
defp send_error(reason, s) do
error = Mongo.Error.exception(tag: :tcp, action: "send", reason: reason)
{:disconnect, error, s}
end
defp recv_error(reason, s) do
error = Mongo.Error.exception(tag: :tcp, action: "recv", reason: reason)
{:disconnect, error, s}
end
def namespace(coll, s, nil),
do: [s.database, ?. | coll]
def namespace(coll, _, database),
do: [database, ?. | coll]
def digest(nonce, username, password) do
:crypto.hash(:md5, [nonce, username, digest_password(username, password)])
|> Base.encode16(case: :lower)
end
def digest_password(username, password) do
:crypto.hash(:md5, [username, ":mongo:", password])
|> Base.encode16(case: :lower)
end
end
| 30.398496 | 106 | 0.564432 |
1cc6bb63b8624e1fdcda92d7d0484c5f530a5cde | 1,817 | ex | Elixir | clients/app_engine/lib/google_api/app_engine/v1/model/network_utilization.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/app_engine/lib/google_api/app_engine/v1/model/network_utilization.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/app_engine/lib/google_api/app_engine/v1/model/network_utilization.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | 1 | 2018-07-28T20:50:50.000Z | 2018-07-28T20:50:50.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.AppEngine.V1.Model.NetworkUtilization do
@moduledoc """
Target scaling by network usage. Only applicable for VM runtimes.
## Attributes
- targetReceivedBytesPerSecond (Integer): Target bytes received per second. Defaults to: `null`.
- targetReceivedPacketsPerSecond (Integer): Target packets received per second. Defaults to: `null`.
- targetSentBytesPerSecond (Integer): Target bytes sent per second. Defaults to: `null`.
- targetSentPacketsPerSecond (Integer): Target packets sent per second. Defaults to: `null`.
"""
defstruct [
:"targetReceivedBytesPerSecond",
:"targetReceivedPacketsPerSecond",
:"targetSentBytesPerSecond",
:"targetSentPacketsPerSecond"
]
end
defimpl Poison.Decoder, for: GoogleApi.AppEngine.V1.Model.NetworkUtilization do
def decode(value, _options) do
value
end
end
defimpl Poison.Encoder, for: GoogleApi.AppEngine.V1.Model.NetworkUtilization do
def encode(value, options) do
GoogleApi.AppEngine.V1.Deserializer.serialize_non_nil(value, options)
end
end
| 34.942308 | 102 | 0.763897 |
1cc6bf044657762e1c9ffcad7ccff54d5cccd407 | 1,504 | ex | Elixir | apps/gitgud/lib/gitgud/repo_supervisor.ex | EdmondFrank/gitgud | 1952c16130564357aa6f23e35f48f19e3a50d4dd | [
"MIT"
] | 449 | 2018-03-06T01:05:55.000Z | 2022-03-23T21:03:56.000Z | apps/gitgud/lib/gitgud/repo_supervisor.ex | EdmondFrank/gitgud | 1952c16130564357aa6f23e35f48f19e3a50d4dd | [
"MIT"
] | 69 | 2018-03-06T09:26:41.000Z | 2022-03-21T22:43:09.000Z | apps/gitgud/lib/gitgud/repo_supervisor.ex | EdmondFrank/gitgud | 1952c16130564357aa6f23e35f48f19e3a50d4dd | [
"MIT"
] | 41 | 2018-03-06T01:06:07.000Z | 2021-11-21T17:55:04.000Z | defmodule GitGud.RepoSupervisor do
@moduledoc """
Supervisor for dealing with repository processes.
"""
use Supervisor
alias GitGud.RepoPool
alias GitGud.RepoStorage
@doc """
Starts the supervisor as part of a supervision tree.
"""
@spec start_link(Supervisor.option | Supervisor.init_option) :: Supervisor.on_start()
def start_link(opts \\ []) do
opts = Keyword.put(opts, :name, __MODULE__)
Supervisor.start_link(__MODULE__, [], opts)
end
@doc """
Returns a process name for a given module and volume.
"""
@spec volume_name(module, binary | nil) :: term
def volume_name(mod, nil), do: mod
def volume_name(mod, volume), do: {:global, {mod, volume}}
@doc """
Registers the current process and volume with an unique name.
"""
@spec register_volume(module, binary | nil) :: :ok | {:error, {:already_started, pid}}
def register_volume(_mod, nil), do: :ok
def register_volume(mod, volume) do
global_name = {mod, volume}
case :global.register_name(global_name, self()) do
:yes ->
:ok
:no ->
{:error, {:already_started, :global.whereis_name(global_name)}}
end
end
#
# Callbacks
#
@impl true
def init([]) do
case RepoStorage.ensure_volume_tagged() do
{:ok, volume} ->
children = [
{RepoStorage, volume},
{RepoPool, volume},
]
Supervisor.init(children, strategy: :one_for_one)
{:error, reason} ->
{:stop, reason}
end
end
end
| 25.491525 | 88 | 0.637633 |
1cc6d0620352b9da85dfb765ea91af2be8178304 | 600 | exs | Elixir | test/hergetto_web/live/page_live_test.exs | dusthijsvdh/hergetto | 87598d8023a68fdb23b0eeb7659f5c61113997c9 | [
"MIT"
] | 8 | 2021-03-07T16:24:05.000Z | 2022-02-28T02:36:35.000Z | test/hergetto_web/live/page_live_test.exs | dusthijsvdh/hergetto | 87598d8023a68fdb23b0eeb7659f5c61113997c9 | [
"MIT"
] | 126 | 2021-02-15T10:51:07.000Z | 2022-03-28T02:25:01.000Z | test/hergetto_web/live/page_live_test.exs | dusthijsvdh/hergetto | 87598d8023a68fdb23b0eeb7659f5c61113997c9 | [
"MIT"
] | null | null | null | defmodule HergettoWeb.PageLiveTest do
use HergettoWeb.ConnCase
import Phoenix.LiveViewTest
test "disconnected and connected render", %{conn: conn} do
{:ok, page_live, disconnected_html} = live(conn, "/")
assert disconnected_html =~ "Resources"
assert render(page_live) =~ "Resources"
end
test "rendered meta tags", %{conn: conn} do
{:ok, _view, html} = live(conn, "/")
assert html =~ ~s|<meta content="Hergetto · Together in a safe way!" name="title"/>|
assert html =~ ~s|<meta content="phoenix watch youtube videos together hergetto" name="keywords"/>|
end
end
| 35.294118 | 103 | 0.688333 |
1cc6f77822a982d915e1dac53980882e92c4f7d1 | 1,910 | ex | Elixir | example/lib/wizard_example/button.ex | paulanthonywilson/vintage_net_wizard | 338877f79b96e9ba1c2482260b95a1a44d7c66d8 | [
"Apache-2.0"
] | 50 | 2019-08-02T18:56:53.000Z | 2022-02-19T20:00:52.000Z | example/lib/wizard_example/button.ex | paulanthonywilson/vintage_net_wizard | 338877f79b96e9ba1c2482260b95a1a44d7c66d8 | [
"Apache-2.0"
] | 93 | 2019-06-21T02:36:09.000Z | 2022-03-21T09:47:27.000Z | example/lib/wizard_example/button.ex | paulanthonywilson/vintage_net_wizard | 338877f79b96e9ba1c2482260b95a1a44d7c66d8 | [
"Apache-2.0"
] | 14 | 2019-09-16T23:50:11.000Z | 2022-03-18T16:36:21.000Z | defmodule WizardExample.Button do
use GenServer
@moduledoc """
This GenServer starts the wizard if a button is depressed for long enough.
"""
alias Circuits.GPIO
@doc """
Start the button monitor
Pass an index to the GPIO that's connected to the button.
"""
@spec start_link(non_neg_integer()) :: GenServer.on_start()
def start_link(gpio_pin) do
GenServer.start_link(__MODULE__, gpio_pin)
end
@impl GenServer
def init(gpio_pin) do
{:ok, gpio} = GPIO.open(gpio_pin, :input)
:ok = GPIO.set_interrupts(gpio, :both)
{:ok, %{pin: gpio_pin, gpio: gpio}}
end
@impl GenServer
def handle_info({:circuits_gpio, gpio_pin, _timestamp, 1}, %{pin: gpio_pin} = state) do
# Button pressed. Start a timer to launch the wizard when it's long enough
{:noreply, state, 5_000}
end
def handle_info({:circuits_gpio, gpio_pin, _timestamp, 0}, %{pin: gpio_pin} = state) do
# Button released. The GenServer timer is implicitly cancelled by receiving this message.
{:noreply, state}
end
def handle_info(:timeout, state) do
:ok = VintageNetWizard.run_wizard(device_info: get_device_info())
{:noreply, state}
end
defp get_device_info() do
kv =
Nerves.Runtime.KV.get_all_active()
|> kv_to_map
mac_addr = VintageNet.get(["interface", "wlan0", "mac_address"])
[
{"WiFi Address", mac_addr},
{"Serial number", serial_number()},
{"Firmware", kv["nerves_fw_product"]},
{"Firmware version", kv["nerves_fw_version"]},
{"Firmware UUID", kv["nerves_fw_uuid"]}
]
end
defp kv_to_map(key_values) do
for kv <- key_values, into: %{}, do: kv
end
defp serial_number() do
with boardid_path when not is_nil(boardid_path) <- System.find_executable("boardid"),
{id, 0} <- System.cmd(boardid_path, []) do
String.trim(id)
else
_other -> "Unknown"
end
end
end
| 26.527778 | 93 | 0.663351 |
1cc701efc5d9032c794dcf9f248a0ac758dfb269 | 9,186 | exs | Elixir | apps/snitch_core/test/domain/shipping_calculator_test.exs | VeryBigThings/avia | 7ce5d5b244ae0dfddc30c09c17efe27f1718a4c9 | [
"MIT"
] | 1 | 2021-04-08T22:29:19.000Z | 2021-04-08T22:29:19.000Z | apps/snitch_core/test/domain/shipping_calculator_test.exs | VeryBigThings/avia | 7ce5d5b244ae0dfddc30c09c17efe27f1718a4c9 | [
"MIT"
] | null | null | null | apps/snitch_core/test/domain/shipping_calculator_test.exs | VeryBigThings/avia | 7ce5d5b244ae0dfddc30c09c17efe27f1718a4c9 | [
"MIT"
] | null | null | null | defmodule Snitch.Domain.ShippingCalculatorTest do
use ExUnit.Case, async: true
use Snitch.DataCase
import Snitch.Factory
alias Snitch.Data.Schema.Package
alias Snitch.Domain.ShippingCalculator
setup do
Application.put_env(:snitch_core, :defaults, currency: :USD)
end
setup :zones
setup :shipping_methods
setup :embedded_shipping_methods
describe "fixed rate per order :ofr" do
test "returns only cost for :ofr", context do
rule_active_manifest = %{fso: false, fsoa: false, ofr: true, fsrp: false}
preference_manifest = %{
fso: %{},
fsoa: %{amount: 100},
ofr: %{cost: 20},
fsrp: %{cost_per_item: 5}
}
item_info = %{unit_price: Money.new!(currency(), 10), quantity: 5}
%{package: package, category: category} = setup_package_with_sc(context, item_info)
setup_shipping_rules(rule_active_manifest, preference_manifest, category)
shipping_cost = ShippingCalculator.calculate(package)
assert shipping_cost ==
currency()
|> Money.new!(preference_manifest.ofr.cost)
|> Money.round()
end
test "with free for order above amount, returns 0 as :fsoa applies", context do
rule_active_manifest = %{fso: false, fsoa: true, ofr: true, fsrp: false}
preference_manifest = %{
fso: %{},
fsoa: %{amount: 50},
ofr: %{cost: 20},
fsrp: %{cost_per_item: 5}
}
item_info = %{unit_price: Money.new!(currency(), 10), quantity: 7}
%{package: package, category: category} = setup_package_with_sc(context, item_info)
setup_shipping_rules(rule_active_manifest, preference_manifest, category)
shipping_cost = ShippingCalculator.calculate(package)
# as order cost is above 50 USD shipping cost is 0
# item_info.unit_price * item_info.quantity > preference_manifest.fsoa.amount
assert shipping_cost ==
currency()
|> Money.new!(0)
|> Money.round()
end
test "with free for order above amount :fsoa does not apply", context do
rule_active_manifest = %{fso: false, fsoa: true, ofr: true, fsrp: false}
preference_manifest = %{
fso: %{},
fsoa: %{amount: 200},
ofr: %{cost: 20},
fsrp: %{cost_per_item: 5}
}
item_info = %{unit_price: Money.new!(currency(), 10), quantity: 1}
%{package: package, category: category} = setup_package_with_sc(context, item_info)
setup_shipping_rules(rule_active_manifest, preference_manifest, category)
shipping_cost = ShippingCalculator.calculate(package)
# as free shipping is available over 200, fixed rate cost is applied
# item_info.unit_price * item_info.quantity < preference_manifest.fsoa.amount
assert shipping_cost ==
currency()
|> Money.new!(preference_manifest.ofr.cost)
|> Money.round()
end
end
describe "fixed rate per product" do
test "returns only cost for :fsrp", context do
rule_active_manifest = %{fso: false, fsoa: false, ofr: false, fsrp: true}
preference_manifest = %{
fso: %{},
fsoa: %{amount: 100},
ofr: %{cost: 20},
fsrp: %{cost_per_item: 5}
}
item_info = %{unit_price: Money.new!(currency(), 10), quantity: 5}
%{package: package, category: category} = setup_package_with_sc(context, item_info)
setup_shipping_rules(rule_active_manifest, preference_manifest, category)
shipping_cost = ShippingCalculator.calculate(package)
# shipping cost at the rate of 5 per product is applied for 5 products
# as no other rule is active.
assert shipping_cost ==
currency()
|> Money.new!(preference_manifest.fsrp.cost_per_item)
|> Money.mult!(item_info.quantity)
|> Money.round()
end
test "with free for order above amount, returns 0 as :fsoa applies", context do
rule_active_manifest = %{fso: false, fsoa: true, ofr: true, fsrp: true}
preference_manifest = %{
fso: %{},
fsoa: %{amount: 50},
ofr: %{cost: 20},
fsrp: %{cost_per_item: 5}
}
item_info = %{unit_price: Money.new!(currency(), 10), quantity: 7}
%{package: package, category: category} = setup_package_with_sc(context, item_info)
setup_shipping_rules(rule_active_manifest, preference_manifest, category)
shipping_cost = ShippingCalculator.calculate(package)
# as order cost is above 50 USD shipping cost is 0
assert shipping_cost == currency() |> Money.new!(0) |> Money.round()
end
test "with free for order above amount :fsoa does not apply", context do
rule_active_manifest = %{fso: false, fsoa: true, ofr: false, fsrp: true}
preference_manifest = %{
fso: %{},
fsoa: %{amount: 200},
ofr: %{cost: 20},
fsrp: %{cost_per_item: 5}
}
item_info = %{unit_price: Money.new!(currency(), 10), quantity: 1}
%{package: package, category: category} = setup_package_with_sc(context, item_info)
setup_shipping_rules(rule_active_manifest, preference_manifest, category)
shipping_cost = ShippingCalculator.calculate(package)
# as free shipping is available over 200, fixed rate per product is applied
assert shipping_cost ==
currency()
|> Money.new!(preference_manifest.fsrp.cost_per_item)
|> Money.mult!(item_info.quantity)
|> Money.round()
end
end
test "for free shipping for all orders", context do
rule_active_manifest = %{fso: true, fsoa: false, ofr: false, fsrp: false}
preference_manifest = %{
fso: %{},
fsoa: %{amount: 200},
ofr: %{cost: 20},
fsrp: %{cost_per_item: 5}
}
item_info = %{unit_price: Money.new!(currency(), 10), quantity: 1}
%{package: package, category: category} = setup_package_with_sc(context, item_info)
setup_shipping_rules(rule_active_manifest, preference_manifest, category)
shipping_cost = ShippingCalculator.calculate(package)
# as free shipping is available over 200, fixed rate per product is applied
assert shipping_cost == Money.new!(currency(), 0) |> Money.round()
end
test "check for no rules set", context do
item_info = %{unit_price: Money.new!(currency(), 10), quantity: 1}
%{package: package, category: category} = setup_package_with_sc(context, item_info)
shipping_cost = ShippingCalculator.calculate(package)
# since no rules set cost is 0
assert shipping_cost == Money.new!(currency(), 0) |> Money.round()
end
############################# priavte functions #########################
defp setup_shipping_rules(rule_active_manifest, preference_manifest, category) do
%{
ofr:
shipping_rule(
insert(:shipping_identifier, code: :ofr, description: "order fixed rate"),
category,
preference_manifest.ofr,
rule_active_manifest.ofr
),
fso:
shipping_rule(
insert(:shipping_identifier, code: :fso, description: "free shipping"),
category,
preference_manifest.fso,
rule_active_manifest.fso
),
fsrp:
shipping_rule(
insert(:shipping_identifier, code: :fsrp, description: "fixed shipping per product"),
category,
preference_manifest.fsrp,
rule_active_manifest.fsrp
),
fsoa:
shipping_rule(
insert(:shipping_identifier, code: :fsoa, description: "fixed shipping above amount"),
category,
preference_manifest.fsoa,
rule_active_manifest.fsoa
)
}
end
defp shipping_rule(identifier, category, preferences, active_status) do
sr =
insert(:shipping_rule,
active?: active_status,
preferences: preferences,
shipping_rule_identifier: identifier,
shipping_category: category
)
end
defp setup_package_with_sc(context, item_info) do
%{embedded_shipping_methods: embedded_shipping_methods} = context
%{quantity: quantity, unit_price: unit_price} = item_info
# setup stock for product
stock_item = insert(:stock_item, count_on_hand: 100)
shipping_category = insert(:shipping_category)
# make order and it's packages
product = stock_item.product
order = insert(:order, state: "delivery")
line_item =
insert(:line_item,
order: order,
product: product,
quantity: quantity,
unit_price: unit_price
)
package =
insert(:package,
shipping_methods: embedded_shipping_methods,
order: order,
items: [],
origin: stock_item.stock_location,
shipping_category: shipping_category
)
package_item =
insert(:package_item,
quantity: quantity,
product: product,
line_item: line_item,
package: package
)
package = Package |> Repo.get(package.id) |> Repo.preload(:items)
%{package: package, category: shipping_category}
end
end
| 31.675862 | 96 | 0.63673 |
1cc71ae1b93298825aff57a24cf554e9291f4826 | 3,458 | ex | Elixir | lib/plaid/transactions.ex | degzhaus/plaid-elixir | b10557303276efc6d66063af9ed1b2db2444e06b | [
"MIT"
] | null | null | null | lib/plaid/transactions.ex | degzhaus/plaid-elixir | b10557303276efc6d66063af9ed1b2db2444e06b | [
"MIT"
] | null | null | null | lib/plaid/transactions.ex | degzhaus/plaid-elixir | b10557303276efc6d66063af9ed1b2db2444e06b | [
"MIT"
] | null | null | null | defmodule Plaid.Transactions do
@moduledoc """
Functions for Plaid `transactions` endpoint.
"""
import Plaid, only: [make_request_with_cred: 4, get_cred: 0]
alias Plaid.Utils
defstruct accounts: [], item: nil, total_transactions: nil, transactions: [], request_id: nil
@type t :: %__MODULE__{
accounts: [Plaid.Accounts.Account.t()],
item: Plaid.Item.t(),
total_transactions: integer,
transactions: [Plaid.Transactions.Transaction.t()],
request_id: String.t()
}
@type params :: %{required(atom) => String.t() | map}
@type cred :: %{required(atom) => String.t()}
@endpoint "transactions"
defmodule Transaction do
@moduledoc """
Plaid Transaction data structure.
"""
defstruct account_id: nil,
account_owner: nil,
amount: nil,
category: nil,
category_id: nil,
date: nil,
location: nil,
name: nil,
payment_meta: nil,
pending: false,
pending_transaction_id: nil,
transaction_id: nil,
transaction_type: nil
@type t :: %__MODULE__{
account_id: String.t(),
account_owner: String.t(),
amount: float,
category: [String.t()],
category_id: String.t(),
date: String.t(),
location: Plaid.Transactions.Transaction.Location.t(),
name: String.t(),
payment_meta: Plaid.Transactions.Transaction.PaymentMeta.t(),
pending: true | false,
pending_transaction_id: String.t(),
transaction_id: String.t(),
transaction_type: String.t()
}
defmodule Location do
@moduledoc """
Plaid Transaction Location data structure.
"""
defstruct address: nil, city: nil, state: nil, zip: nil, lat: nil, lon: nil
@type t :: %__MODULE__{
address: String.t(),
city: String.t(),
state: String.t(),
zip: String.t(),
lat: float,
lon: float
}
end
defmodule PaymentMeta do
@moduledoc """
Plaid Transaction Payment Metadata data structure.
"""
defstruct by_order_of: nil,
payee: nil,
payer: nil,
payment_method: nil,
payment_processor: nil,
ppd_id: nil,
reason: nil,
reference_number: nil
@type t :: %__MODULE__{
by_order_of: String.t(),
payee: String.t(),
payer: String.t(),
payment_method: String.t(),
payment_processor: String.t(),
ppd_id: String.t(),
reason: String.t(),
reference_number: String.t()
}
end
end
@doc """
Gets transactions data associated with an Item.
Parameters
```
%{
access_token: "access-env-identifier",
start_date: "2017-01-01",
end_date: "2017-03-31",
options: %{
count: 20,
offset: 0
}
}
```
"""
@spec get(params, cred | nil) :: {:ok, Plaid.Transactions.t()} | {:error, Plaid.Error.t()}
def get(params, cred \\ get_cred()) do
endpoint = "#{@endpoint}/get"
make_request_with_cred(:post, endpoint, cred, params)
|> Utils.handle_resp(:transactions)
end
end
| 27.228346 | 95 | 0.530943 |
1cc72f10c01334d1202a5a827d68ccda0a348e19 | 893 | exs | Elixir | .check.exs | qworks-io/rabbitex | 3359fc573b8e8e60cdd2cc03b6ebc2bdcc8a3d70 | [
"MIT"
] | 29 | 2020-05-02T18:25:31.000Z | 2021-02-16T19:43:39.000Z | .check.exs | qworks-io/rabbit_ex_mq | 3359fc573b8e8e60cdd2cc03b6ebc2bdcc8a3d70 | [
"MIT"
] | 43 | 2020-04-30T16:34:46.000Z | 2021-07-26T06:22:43.000Z | .check.exs | qworks-io/rabbit_ex_mq | 3359fc573b8e8e60cdd2cc03b6ebc2bdcc8a3d70 | [
"MIT"
] | 3 | 2020-05-03T15:47:16.000Z | 2021-04-05T05:05:47.000Z | [
## all available options with default values (see `mix check` docs for description)
# parallel: true,
# skipped: true,
## list of tools (see `mix check` docs for defaults)
tools: [
## curated tools may be disabled (e.g. the check for compilation warnings)
# {:compiler, false},
{:sobelow, false},
## ...or adjusted (e.g. use one-line formatter for more compact credo output)
# {:credo, "mix credo --format oneline"},
{:dialyzer, "mix dialyzer"},
## ...or reordered (e.g. to see output from ex_unit before others)
# {:ex_unit, order: -1},
## custom new tools may be added (mix tasks or arbitrary commands)
# {:my_mix_task, command: "mix release", env: %{"MIX_ENV" => "prod"}},
# {:my_arbitrary_tool, command: "npm test", cd: "assets"},
# {:my_arbitrary_script, command: ["my_script", "argument with spaces"], cd: "scripts"}
]
]
| 35.72 | 91 | 0.634938 |
1cc76b3e2652f1d068138e8528f20dddbfb4c64e | 236 | exs | Elixir | priv/repo/migrations/20200812040021_add_parent_field_in_person.exs | calvin-kargo/fictitious | a37cb7db2370d136ff59c8072108456da9c757c4 | [
"MIT"
] | 18 | 2020-05-26T12:22:51.000Z | 2021-12-18T15:34:48.000Z | priv/repo/migrations/20200812040021_add_parent_field_in_person.exs | calvin-kargo/fictitious | a37cb7db2370d136ff59c8072108456da9c757c4 | [
"MIT"
] | 4 | 2020-09-05T07:41:08.000Z | 2021-07-28T03:22:36.000Z | priv/repo/migrations/20200812040021_add_parent_field_in_person.exs | calvin-kargo/fictitious | a37cb7db2370d136ff59c8072108456da9c757c4 | [
"MIT"
] | 4 | 2020-08-12T03:19:28.000Z | 2021-06-28T04:53:20.000Z | defmodule Fictitious.Repo.Migrations.AddParentFieldInPerson do
use Ecto.Migration
def change do
alter table(:persons) do
add :parent_id, references(:persons, column: :id, type: :id, on_delete: :nothing)
end
end
end
| 23.6 | 87 | 0.724576 |
1cc7754cfa07bf0c14e4566f9bfd917adbedc49f | 856 | ex | Elixir | lib/brewing_stand/dummy_client.ex | Ovyerus/brewing-stand | a631d5e90485b959525e3a79b941f7aaa86b3fdd | [
"MIT"
] | 14 | 2021-03-28T10:27:50.000Z | 2021-04-27T10:14:07.000Z | lib/brewing_stand/dummy_client.ex | Ovyerus/brewing-stand | a631d5e90485b959525e3a79b941f7aaa86b3fdd | [
"MIT"
] | null | null | null | lib/brewing_stand/dummy_client.ex | Ovyerus/brewing-stand | a631d5e90485b959525e3a79b941f7aaa86b3fdd | [
"MIT"
] | null | null | null | defmodule BrewingStand.DummyClient do
require Logger
alias BrewingStand.DummyClient.{Level, PacketReader}
import BrewingStand.Util
def start do
Level.init()
case :gen_tcp.connect({127, 0, 0, 1}, 25565, [:list, :inet, packet: :raw, active: false]) do
{:ok, socket} ->
Logger.info("Connected to 127.0.0.1:25565")
loop(socket)
{:error, :econnrefused} ->
Logger.error("Unable to connect to server ECONNREFUSED")
System.stop(1)
e ->
IO.inspect(e)
System.stop(1)
end
end
def loop(socket) do
# Identify to kick things off
username = pad_string('Testy')
key = pad_string('(none)')
packet = [0x00, 0x07, username, key, 0x00] |> List.flatten()
Logger.debug("Sent identify")
:gen_tcp.send(socket, packet)
PacketReader.read(socket)
end
end
| 23.135135 | 96 | 0.626168 |
1cc77a9082a8458556a963105dcbad289d7f6194 | 1,922 | exs | Elixir | mix.exs | rayrrr/fameliphotos | cc928abdc6d761d76113067432e9d6d0fcb2507b | [
"MIT"
] | null | null | null | mix.exs | rayrrr/fameliphotos | cc928abdc6d761d76113067432e9d6d0fcb2507b | [
"MIT"
] | null | null | null | mix.exs | rayrrr/fameliphotos | cc928abdc6d761d76113067432e9d6d0fcb2507b | [
"MIT"
] | null | null | null | defmodule PhotoGallery.MixProject do
use Mix.Project
def project do
[
app: :photo_gallery,
version: "0.1.0",
elixir: "~> 1.5",
elixirc_paths: elixirc_paths(Mix.env()),
compilers: [:phoenix, :gettext] ++ Mix.compilers(),
start_permanent: Mix.env() == :prod,
aliases: aliases(),
deps: deps()
]
end
# Configuration for the OTP application.
#
# Type `mix help compile.app` for more information.
def application do
[
mod: {PhotoGallery.Application, []},
extra_applications: [:logger, :runtime_tools]
]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
# Specifies your project dependencies.
#
# Type `mix help deps` for examples and options.
defp deps do
[
{:phoenix, "~> 1.4.0"},
{:phoenix_pubsub, "~> 1.1"},
{:phoenix_ecto, "~> 4.0"},
{:ecto_sql, "~> 3.0"},
{:postgrex, ">= 0.0.0"},
{:phoenix_html, "~> 2.11"},
{:phoenix_live_reload, "~> 1.2", only: :dev},
{:gettext, "~> 0.11"},
{:jason, "~> 1.0"},
{:plug_cowboy, "~> 2.0"},
{:pow, "~> 1.0.4"},
{:bodyguard, "~> 2.2"},
{:arc, "~> 0.11.0"},
{:arc_ecto, "~> 0.11.1"},
{:ex_aws, "~> 2.0"},
{:ex_aws_s3, "~> 2.0"},
{:hackney, "~> 1.6"},
{:poison, "~> 3.1"},
{:sweet_xml, "~> 0.6"}
]
end
# Aliases are shortcuts or tasks specific to the current project.
# For example, to create, migrate and run the seeds file at once:
#
# $ mix ecto.setup
#
# See the documentation for `Mix` for more info on aliases.
defp aliases do
[
"ecto.setup": ["ecto.create", "ecto.migrate", "run priv/repo/seeds.exs"],
"ecto.reset": ["ecto.drop", "ecto.setup"],
test: ["ecto.create --quiet", "ecto.migrate", "test"]
]
end
end
| 26.694444 | 79 | 0.546306 |
1cc782de2cdbddecdf0e3fb573bf2aac708bdd69 | 283 | ex | Elixir | lib/flea/Variable.ex | davidgrupp/Flea | a15bb7d4b121b08155862b390dfe1626bbdc5840 | [
"Apache-2.0"
] | null | null | null | lib/flea/Variable.ex | davidgrupp/Flea | a15bb7d4b121b08155862b390dfe1626bbdc5840 | [
"Apache-2.0"
] | null | null | null | lib/flea/Variable.ex | davidgrupp/Flea | a15bb7d4b121b08155862b390dfe1626bbdc5840 | [
"Apache-2.0"
] | null | null | null | defmodule Flea.Variable do
defstruct name: nil, memfuncs: HashDict.new
def init(name) do
%Flea.Variable { name: name }
end
def add_func(var, name, memfunc) do
memfuncs = Dict.put(var.memfuncs, name, memfunc)
Map.put(var, :memfuncs, memfuncs )
end
end | 21.769231 | 53 | 0.667845 |
1cc7883e608aa1b7cda3af15261c3976b24e6cbc | 1,082 | exs | Elixir | test/type/inspect/maps_test.exs | kianmeng/mavis | 6ba154efdfadcce1aca92ac735dadb209380c25b | [
"MIT"
] | null | null | null | test/type/inspect/maps_test.exs | kianmeng/mavis | 6ba154efdfadcce1aca92ac735dadb209380c25b | [
"MIT"
] | null | null | null | test/type/inspect/maps_test.exs | kianmeng/mavis | 6ba154efdfadcce1aca92ac735dadb209380c25b | [
"MIT"
] | null | null | null | defmodule TypeTest.Type.Inspect.MapsTest do
use ExUnit.Case, async: true
import TypeTest.InspectCase
@moduletag :inspect
import Type, only: :macros
alias Type.Map
@source TypeTest.TypeExample.Maps
test "empty map literal" do
assert "%{}" == inspect_type(@source, :empty_map_type)
end
test "the any map type" do
assert "map()" == inspect_type(@source, :any_map_type)
end
test "atom key map literal" do
assert "%{atom: integer()}" == inspect_type(@source, :atom_key_type)
end
test "required integer literal type" do
assert "%{required(0) => integer()}" == inspect %Map{required: %{0 => builtin(:integer)}}
end
test "optional literal type" do
assert "%{optional(:foo) => integer()}" == inspect_type(@source, :optional_literal_type)
end
test "struct literal type" do
assert "%#{inspect @source}{}" ==
inspect_type(@source, :struct_literal_type)
end
test "struct defined literal type" do
assert "%#{inspect @source}{foo: integer()}" ==
inspect_type(@source, :struct_defined_literal_type)
end
end
| 25.162791 | 93 | 0.676525 |
1cc7951781a5fbe04630c208b6be0b64fbbd6a72 | 586 | ex | Elixir | exercises/practice/go-counting/lib/go_counting.ex | devtayls/elixir | 67824de8209ff1b6ed2f736deedfb5bd815130ca | [
"MIT"
] | 343 | 2017-06-22T16:28:28.000Z | 2022-03-25T21:33:32.000Z | exercises/practice/go-counting/lib/go_counting.ex | devtayls/elixir | 67824de8209ff1b6ed2f736deedfb5bd815130ca | [
"MIT"
] | 583 | 2017-06-19T10:48:40.000Z | 2022-03-28T21:43:12.000Z | exercises/practice/go-counting/lib/go_counting.ex | devtayls/elixir | 67824de8209ff1b6ed2f736deedfb5bd815130ca | [
"MIT"
] | 228 | 2017-07-05T07:09:32.000Z | 2022-03-27T08:59:08.000Z | defmodule GoCounting do
@type position :: {integer, integer}
@type owner :: %{owner: atom, territory: [position]}
@type territories :: %{white: [position], black: [position], none: [position]}
@doc """
Return the owner and territory around a position
"""
@spec territory(board :: String.t(), position :: position) ::
{:ok, owner} | {:error, String.t()}
def territory(board, {x, y} = pos) do
end
@doc """
Return all white, black and neutral territories
"""
@spec territories(board :: String.t()) :: territories
def territories(board) do
end
end
| 27.904762 | 80 | 0.638225 |
1cc79a065c586349f94e0f60a835f7465bef7058 | 9,028 | ex | Elixir | lib/oli/delivery/attempts/page_lifecycle/hierarchy.ex | malav2110/oli-torus | 8af64e762a7c8a2058bd27a7ab8e96539ffc055f | [
"MIT"
] | 1 | 2022-03-17T20:35:47.000Z | 2022-03-17T20:35:47.000Z | lib/oli/delivery/attempts/page_lifecycle/hierarchy.ex | malav2110/oli-torus | 8af64e762a7c8a2058bd27a7ab8e96539ffc055f | [
"MIT"
] | 9 | 2021-11-02T16:52:09.000Z | 2022-03-25T15:14:01.000Z | lib/oli/delivery/attempts/page_lifecycle/hierarchy.ex | malav2110/oli-torus | 8af64e762a7c8a2058bd27a7ab8e96539ffc055f | [
"MIT"
] | null | null | null | defmodule Oli.Delivery.Attempts.PageLifecycle.Hierarchy do
import Ecto.Query, warn: false
require Logger
alias Oli.Repo
alias Oli.Delivery.Attempts.Core.{
PartAttempt,
ActivityAttempt
}
import Oli.Delivery.Attempts.Core
alias Oli.Activities.Realizer.Query.Source
alias Oli.Resources.Revision
alias Oli.Activities.Transformers
alias Oli.Delivery.ActivityProvider.Result
alias Oli.Delivery.Attempts.PageLifecycle.{VisitContext}
@doc """
Creates an attempt hierarchy for a given resource visit context, optimized to
use a constant number of queries relative to the number of activities and parts.
Returns {:ok, %ResourceAttempt{}}
"""
def create(%VisitContext{} = context) do
{resource_access_id, next_attempt_number} =
case context.latest_resource_attempt do
nil ->
{get_resource_access(
context.page_revision.resource_id,
context.section_slug,
context.user_id
).id, 1}
attempt ->
{attempt.resource_access_id, attempt.attempt_number + 1}
end
%Result{
errors: errors,
revisions: activity_revisions,
transformed_content: transformed_content,
unscored: unscored
} =
context.activity_provider.(
context.page_revision,
%Source{
blacklisted_activity_ids: [],
section_slug: context.section_slug,
publication_id: context.publication_id
},
Oli.Publishing.DeliveryResolver
)
case create_resource_attempt(%{
content: transformed_content,
errors: errors,
attempt_guid: UUID.uuid4(),
resource_access_id: resource_access_id,
attempt_number: next_attempt_number,
revision_id: context.page_revision.id
}) do
{:ok, resource_attempt} ->
bulk_create_attempts(resource_attempt, activity_revisions, unscored)
{:ok, resource_attempt}
error ->
error
end
end
# Instead of one insertion query for every part attempt and one insertion query for
# every activity attempt, this implementation does the same with exactly three queries:
#
# 1. Bulk activity attempt creation (regardless of the number of attempts)
# 2. A query to fetch the newly created IDs and their corresponding resource_ids
# 3. A final bulk insert query to create the part attempts
#
defp bulk_create_attempts(resource_attempt, activity_revisions, unscored) do
# Use a common timestamp for all insertions
right_now =
DateTime.utc_now()
|> DateTime.truncate(:second)
# Create the activity attempts, in bulk
Enum.filter(activity_revisions, fn r -> !is_nil(r) end)
|> Enum.map(fn r ->
scoreable = !MapSet.member?(unscored, r.resource_id)
create_raw_activity_attempt(r, scoreable)
end)
|> optimize_transformed_model()
|> bulk_create_activity_attempts(right_now, resource_attempt.id)
query_driven_part_attempt_creation(resource_attempt.id)
end
defp bulk_create_activity_attempts(raw_attempts, now, resource_attempt_id) do
placeholders = %{
now: now,
attempt_number: 1,
resource_attempt_id: resource_attempt_id
}
Repo.insert_all(ActivityAttempt, raw_attempts, placeholders: placeholders)
end
# This is the optimal way to bulk create part attempts: passing a query driven 'insert'
# to the database, instead of passing the raw payload of each record to create.
defp query_driven_part_attempt_creation(resource_attempt_id) do
query = """
INSERT INTO part_attempts(part_id, activity_attempt_id, attempt_guid, inserted_at, updated_at, hints, attempt_number, lifecycle_state, grading_approach)
SELECT pm.part_id, a.id, gen_random_uuid(), now(), now(), '{}'::varchar[], 1, 'active', (CASE WHEN pm.grading_approach IS NULL THEN
'automatic'
ELSE
pm.grading_approach
END)
FROM activity_attempts as a
LEFT JOIN part_mapping as pm on a.revision_id = pm.revision_id
WHERE a.resource_attempt_id = $1;
"""
Repo.query!(query, [resource_attempt_id])
end
# If all of the transformed_model attrs are nil, we do not need to include them in
# the query, as they will be set to nil by default
defp optimize_transformed_model(raw_attempts) do
case Enum.all?(raw_attempts, fn a -> is_nil(a.transformed_model) end) do
true -> Enum.map(raw_attempts, fn a -> Map.delete(a, :transformed_model) end)
_ -> raw_attempts
end
end
defp create_raw_activity_attempt(
%Revision{resource_id: resource_id, id: id, content: model},
scoreable
) do
transformed_model =
case Transformers.apply_transforms(model) do
{:ok, t} -> t
_ -> nil
end
%{
resource_attempt_id: {:placeholder, :resource_attempt_id},
attempt_guid: UUID.uuid4(),
attempt_number: {:placeholder, :attempt_number},
revision_id: id,
resource_id: resource_id,
transformed_model: transformed_model,
scoreable: scoreable,
lifecycle_state: :active,
inserted_at: {:placeholder, :now},
updated_at: {:placeholder, :now}
}
end
@doc """
Retrieves the state of the latest attempts for a given resource attempt id.
Return value is a map of activity ids to a two element tuple. The first
element is the latest activity attempt and the second is a map of part ids
to their part attempts. As an example:
%{
232 => {%ActivityAttempt{}, %{ "1" => %PartAttempt{}, "2" => %PartAttempt{}}}
233 => {%ActivityAttempt{}, %{ "1" => %PartAttempt{}, "2" => %PartAttempt{}}}
}
"""
def get_latest_attempts(resource_attempt_id) do
Repo.all(
from(aa1 in ActivityAttempt,
join: r in assoc(aa1, :revision),
left_join: aa2 in ActivityAttempt,
on:
aa1.resource_id == aa2.resource_id and aa1.id < aa2.id and
aa1.resource_attempt_id == aa2.resource_attempt_id,
join: pa1 in PartAttempt,
on: aa1.id == pa1.activity_attempt_id,
left_join: pa2 in PartAttempt,
on:
aa1.id == pa2.activity_attempt_id and pa1.part_id == pa2.part_id and pa1.id < pa2.id and
pa1.activity_attempt_id == pa2.activity_attempt_id,
where:
aa1.resource_attempt_id == ^resource_attempt_id and is_nil(aa2.id) and is_nil(pa2.id),
preload: [revision: r],
select: {pa1, aa1}
)
)
|> results_to_activity_map
end
def get_latest_attempts(resource_attempt_id, activity_ids) do
Repo.all(
from(aa1 in ActivityAttempt,
join: r in assoc(aa1, :revision),
left_join: aa2 in ActivityAttempt,
on:
aa1.resource_id == aa2.resource_id and aa1.id < aa2.id and
aa1.resource_attempt_id == aa2.resource_attempt_id,
join: pa1 in PartAttempt,
on: aa1.id == pa1.activity_attempt_id,
left_join: pa2 in PartAttempt,
on:
aa1.id == pa2.activity_attempt_id and pa1.part_id == pa2.part_id and pa1.id < pa2.id and
pa1.activity_attempt_id == pa2.activity_attempt_id,
where:
aa1.resource_id in ^activity_ids and
aa1.resource_attempt_id == ^resource_attempt_id and is_nil(aa2.id) and is_nil(pa2.id),
preload: [revision: r],
select: {pa1, aa1}
)
)
|> results_to_activity_map
end
def full_hierarchy(resource_attempt) do
get_latest_attempts(resource_attempt.id)
end
def thin_hierarchy(resource_attempt) do
map =
Oli.Activities.list_activity_registrations()
|> Enum.reduce(%{}, fn r, m -> Map.put(m, r.id, r) end)
get_thin_activity_context(resource_attempt.id)
|> Enum.map(fn {id, guid, type_id} ->
{id,
%{
id: id,
attemptGuid: guid,
deliveryElement: Map.get(map, type_id).delivery_element
}}
end)
|> Map.new()
end
# Take results in the form of a list of {part attempt, activity attempt} tuples
# and convert that to a map of activity id to tuple of the activity attempt and
# a map of part ids to part attempts.
#
# For example:
#
# %{
# 232 => {%ActivityAttempt{}, %{ "1" => %PartAttempt{}, "2" => %PartAttempt{}}}
# 233 => {%ActivityAttempt{}, %{ "1" => %PartAttempt{}, "2" => %PartAttempt{}}}
# }
defp results_to_activity_map(results) do
Enum.reduce(results, %{}, fn {part_attempt, activity_attempt}, m ->
activity_id = activity_attempt.resource_id
part_id = part_attempt.part_id
# ensure we have an entry for this resource
m =
case Map.has_key?(m, activity_id) do
true -> m
false -> Map.put(m, activity_id, {activity_attempt, %{}})
end
activity_entry =
case Map.get(m, activity_id) do
{current_attempt, part_map} ->
{current_attempt, Map.put(part_map, part_id, part_attempt)}
end
Map.put(m, activity_id, activity_entry)
end)
end
end
| 33.686567 | 158 | 0.661498 |
1cc7eb2d73f6f262ebec355b60a2954e74f44166 | 6,802 | exs | Elixir | .credo.exs | hippware/loner | 7f7831b65bb9a2e5e63540f877bbb0b1243b3885 | [
"MIT"
] | 1 | 2020-08-18T15:00:51.000Z | 2020-08-18T15:00:51.000Z | .credo.exs | hippware/loner | 7f7831b65bb9a2e5e63540f877bbb0b1243b3885 | [
"MIT"
] | null | null | null | .credo.exs | hippware/loner | 7f7831b65bb9a2e5e63540f877bbb0b1243b3885 | [
"MIT"
] | null | null | null | # This file contains the configuration for Credo and you are probably reading
# this after creating it with `mix credo.gen.config`.
#
# If you find anything wrong or unclear in this file, please report an
# issue on GitHub: https://github.com/rrrene/credo/issues
#
%{
#
# You can have as many configs as you like in the `configs:` field.
configs: [
%{
#
# Run any exec using `mix credo -C <name>`. If no exec name is given
# "default" is used.
#
name: "default",
#
# These are the files included in the analysis:
files: %{
#
# You can give explicit globs or simply directories.
# In the latter case `**/*.{ex,exs}` will be used.
#
included: ["lib/", "src/", "test/", "web/", "apps/"],
excluded: [~r"/_build/", ~r"/deps/", ~r"/node_modules/"]
},
#
# Load and configure plugins here:
#
plugins: [],
#
# If you create your own checks, you must specify the source files for
# them here, so they can be loaded by Credo before running the analysis.
#
requires: [],
#
# If you want to enforce a style guide and need a more traditional linting
# experience, you can change `strict` to `true` below:
#
strict: true,
#
# If you want to use uncolored output by default, you can change `color`
# to `false` below:
#
color: true,
#
# You can customize the parameters of any check by adding a second element
# to the tuple.
#
# To disable a check put `false` as second element:
#
# {Credo.Check.Design.DuplicatedCode, false}
#
checks: [
#
## Consistency Checks
#
{Credo.Check.Consistency.ExceptionNames, []},
{Credo.Check.Consistency.LineEndings, []},
{Credo.Check.Consistency.ParameterPatternMatching, []},
{Credo.Check.Consistency.SpaceAroundOperators, []},
{Credo.Check.Consistency.SpaceInParentheses, []},
{Credo.Check.Consistency.TabsOrSpaces, []},
#
## Design Checks
#
# You can customize the priority of any check
# Priority values are: `low, normal, high, higher`
#
{Credo.Check.Design.AliasUsage,
[
priority: :low,
if_called_more_often_than: 2,
if_nested_deeper_than: 1
]},
# You can also customize the exit_status of each check.
# If you don't want TODO comments to cause `mix credo` to fail, just
# set this value to 0 (zero).
#
{Credo.Check.Design.TagTODO, false},
{Credo.Check.Design.TagFIXME, false},
#
## Readability Checks
#
{Credo.Check.Readability.AliasOrder, []},
{Credo.Check.Readability.FunctionNames, []},
{Credo.Check.Readability.LargeNumbers, []},
{Credo.Check.Readability.MaxLineLength,
[priority: :low, max_length: 120]},
{Credo.Check.Readability.ModuleAttributeNames, []},
{Credo.Check.Readability.ModuleDoc, []},
{Credo.Check.Readability.ModuleNames, []},
{Credo.Check.Readability.ParenthesesInCondition, []},
{Credo.Check.Readability.ParenthesesOnZeroArityDefs, []},
{Credo.Check.Readability.PredicateFunctionNames, []},
{Credo.Check.Readability.PreferImplicitTry, []},
{Credo.Check.Readability.RedundantBlankLines, [max_blank_lines: 2]},
{Credo.Check.Readability.Semicolons, []},
{Credo.Check.Readability.SpaceAfterCommas, []},
{Credo.Check.Readability.StringSigils, []},
{Credo.Check.Readability.TrailingBlankLine, []},
{Credo.Check.Readability.TrailingWhiteSpace, []},
# TODO: enable by default in Credo 1.1
{Credo.Check.Readability.UnnecessaryAliasExpansion, false},
{Credo.Check.Readability.VariableNames, []},
#
## Refactoring Opportunities
#
{Credo.Check.Refactor.CondStatements, []},
{Credo.Check.Refactor.CyclomaticComplexity, []},
{Credo.Check.Refactor.FunctionArity, []},
{Credo.Check.Refactor.LongQuoteBlocks, []},
# Not an issue with Elixir 1.8.1
{Credo.Check.Refactor.MapInto, false},
{Credo.Check.Refactor.MatchInCondition, []},
{Credo.Check.Refactor.NegatedConditionsInUnless, []},
{Credo.Check.Refactor.NegatedConditionsWithElse, []},
{Credo.Check.Refactor.Nesting, []},
{Credo.Check.Refactor.UnlessWithElse, []},
{Credo.Check.Refactor.WithClauses, []},
#
## Warnings
#
{Credo.Check.Warning.BoolOperationOnSameValues, []},
{Credo.Check.Warning.ExpensiveEmptyEnumCheck, []},
{Credo.Check.Warning.IExPry, []},
{Credo.Check.Warning.IoInspect, []},
# Not an issue with Elixir 1.8.1
{Credo.Check.Warning.LazyLogging, false},
{Credo.Check.Warning.OperationOnSameValues, []},
{Credo.Check.Warning.OperationWithConstantResult, []},
{Credo.Check.Warning.RaiseInsideRescue, []},
{Credo.Check.Warning.UnusedEnumOperation, []},
{Credo.Check.Warning.UnusedFileOperation, []},
{Credo.Check.Warning.UnusedKeywordOperation, []},
{Credo.Check.Warning.UnusedListOperation, []},
{Credo.Check.Warning.UnusedPathOperation, []},
{Credo.Check.Warning.UnusedRegexOperation, []},
{Credo.Check.Warning.UnusedStringOperation, []},
{Credo.Check.Warning.UnusedTupleOperation, []},
#
# Controversial and experimental checks (opt-in, just replace `false` with `[]`)
#
{Credo.Check.Consistency.MultiAliasImportRequireUse, []},
{Credo.Check.Consistency.UnusedVariableNames, false},
{Credo.Check.Design.DuplicatedCode, []},
{Credo.Check.Readability.AliasAs, false},
{Credo.Check.Readability.MultiAlias, []},
{Credo.Check.Readability.Specs, false},
{Credo.Check.Readability.SinglePipe, false},
{Credo.Check.Refactor.ABCSize, false},
{Credo.Check.Refactor.AppendSingleItem, []},
{Credo.Check.Refactor.DoubleBooleanNegation, []},
{Credo.Check.Refactor.ModuleDependencies, false},
{Credo.Check.Refactor.PipeChainStart,
[
excluded_argument_types: [:atom, :binary, :fn, :keyword, :number],
excluded_functions: ["from"]
]},
{Credo.Check.Refactor.VariableRebinding, [allow_bang: true]},
{Credo.Check.Warning.MapGetUnsafePass, []},
{Credo.Check.Warning.UnsafeToAtom, []},
#
# Custom checks can be created using `mix credo.gen.check`.
#
{CredoNaming.Check.Consistency.ModuleFilename, []}
]
}
]
}
| 38.429379 | 88 | 0.608792 |
1cc804a993a684eb2e3cf1e363707325481ef5d0 | 5,772 | ex | Elixir | lib/phoenix/router/route.ex | misfo/phoenix | 04464429d9b958e331b2ffe0f0f5926690ab3b56 | [
"MIT"
] | 1 | 2021-03-14T17:50:24.000Z | 2021-03-14T17:50:24.000Z | lib/phoenix/router/route.ex | misfo/phoenix | 04464429d9b958e331b2ffe0f0f5926690ab3b56 | [
"MIT"
] | null | null | null | lib/phoenix/router/route.ex | misfo/phoenix | 04464429d9b958e331b2ffe0f0f5926690ab3b56 | [
"MIT"
] | 2 | 2020-08-02T04:00:17.000Z | 2020-10-07T16:07:37.000Z | defmodule Phoenix.Router.Route do
# This module defines the Route struct that is used
# throughout Phoenix's router. This struct is private
# as it contains internal routing information.
@moduledoc false
alias Phoenix.Router.Route
@doc """
The `Phoenix.Router.Route` struct. It stores:
* :verb - the HTTP verb as an upcased string
* :kind - the kind of route, one of `:match`, `:forward`
* :path - the normalized path as string
* :host - the request host or host prefix
* :plug - the plug module
* :opts - the plug options
* :helper - the name of the helper as a string (may be nil)
* :private - the private route info
* :assigns - the route info
* :pipe_through - the pipeline names as a list of atoms
"""
defstruct [:verb, :kind, :path, :host, :plug, :opts,
:helper, :private, :pipe_through, :assigns]
@type t :: %Route{}
@doc """
Receives the verb, path, plug, options and helper
and returns a `Phoenix.Router.Route` struct.
"""
@spec build(:match | :forward, String.t, String.t, String.t | nil, atom, atom, atom | nil, atom, %{}, %{}) :: t
def build(kind, verb, path, host, plug, opts, helper, pipe_through, private, assigns)
when is_atom(verb) and (is_binary(host) or is_nil(host)) and
is_atom(plug) and (is_binary(helper) or is_nil(helper)) and
is_list(pipe_through) and is_map(private and is_map(assigns))
and kind in [:match, :forward] do
%Route{kind: kind, verb: verb, path: path, host: host, private: private,
plug: plug, opts: opts, helper: helper,
pipe_through: pipe_through, assigns: assigns}
end
@doc """
Builds the expressions used by the route.
"""
def exprs(route) do
{path, binding} = build_path_and_binding(route)
%{path: path,
host: build_host(route.host),
verb_match: verb_match(route.verb),
binding: binding,
dispatch: build_dispatch(route, binding)}
end
defp verb_match(:*), do: Macro.var(:_verb, nil)
defp verb_match(verb), do: verb |> to_string() |> String.upcase()
defp build_path_and_binding(%Route{path: path} = route) do
{params, segments} = case route.kind do
:forward -> Plug.Router.Utils.build_path_match(path <> "/*_forward_path_info")
:match -> Plug.Router.Utils.build_path_match(path)
end
binding = for var <- params, var != :_forward_path_info do
{Atom.to_string(var), Macro.var(var, nil)}
end
{segments, binding}
end
defp build_host(host) do
cond do
is_nil(host) -> quote do: _
String.last(host) == "." -> quote do: unquote(host) <> _
true -> host
end
end
defp build_dispatch(route, binding) do
exprs =
[maybe_binding(binding),
maybe_merge(:private, route.private),
maybe_merge(:assigns, route.assigns),
build_pipes(route)]
{:__block__, [], Enum.filter(exprs, & &1 != nil)}
end
defp maybe_merge(key, data) do
if map_size(data) > 0 do
quote do
var!(conn) =
update_in var!(conn).unquote(key), &Map.merge(&1, unquote(Macro.escape(data)))
end
end
end
defp maybe_binding([]), do: nil
defp maybe_binding(binding) do
quote do
var!(conn) =
update_in var!(conn).params, &Map.merge(&1, unquote({:%{}, [], binding}))
end
end
defp build_pipes(%Route{kind: :forward} = route) do
{_params, fwd_segments} = Plug.Router.Utils.build_path_match(route.path)
opts = route.plug.init(route.opts)
quote do
var!(conn)
|> Plug.Conn.put_private(:phoenix_pipelines, unquote(route.pipe_through))
|> Plug.Conn.put_private(:phoenix_route, fn conn ->
Phoenix.Router.Route.forward(conn, unquote(fwd_segments),
unquote(route.plug), unquote(opts))
end)
end |> pipe_through(route)
end
defp build_pipes(route) do
quote do
var!(conn)
|> Plug.Conn.put_private(:phoenix_pipelines, unquote(route.pipe_through))
|> Plug.Conn.put_private(:phoenix_route, fn conn ->
# We need to store this in a variable so the compiler
# does not see a call and then suddenly start tracking
# changes in the controller.
plug = unquote(route.plug)
opts = plug.init(unquote(route.opts))
plug.call(conn, opts)
end)
end |> pipe_through(route)
end
defp pipe_through(initial, route) do
plugs = route.pipe_through |> Enum.reverse |> Enum.map(&{&1, [], true})
{conn, body} = Plug.Builder.compile(__ENV__, plugs, [])
quote do
unquote(conn) = unquote(initial)
unquote(body)
end
end
@doc """
Forwards requests to another Plug at a new path.
"""
def forward(%Plug.Conn{path_info: path, script_name: script} = conn, fwd_segments, target, opts) do
new_path = path -- fwd_segments
{base, ^new_path} = Enum.split(path, length(path) - length(new_path))
conn = %{conn | path_info: new_path, script_name: script ++ base} |> target.call(opts)
%{conn | path_info: path, script_name: script}
end
@doc """
Validates and returns the list of forward path segments.
Raises RuntimeError plug is already forwarded or path contains
a dynamic segment.
"""
def forward_path_segments(path, plug, phoenix_forwards) do
case Plug.Router.Utils.build_path_match(path) do
{[], path_segments} ->
if phoenix_forwards[plug] do
raise ArgumentError, "`#{inspect plug}` has already been forwarded to. A module can only be forwarded a single time."
end
path_segments
_ ->
raise ArgumentError, "Dynamic segment `\"#{path}\"` not allowed when forwarding. Use a static path instead."
end
end
end
| 32.795455 | 127 | 0.637561 |
1cc814a249c750e4dfc96242f870d74d8c39753c | 3,593 | ex | Elixir | lib/coxir/gateway.ex | jano017/coxir | 81e89e0113a9219685be2b61af7d17c5fcd9e5fb | [
"Apache-2.0"
] | null | null | null | lib/coxir/gateway.ex | jano017/coxir | 81e89e0113a9219685be2b61af7d17c5fcd9e5fb | [
"Apache-2.0"
] | null | null | null | lib/coxir/gateway.ex | jano017/coxir | 81e89e0113a9219685be2b61af7d17c5fcd9e5fb | [
"Apache-2.0"
] | null | null | null | defmodule Coxir.Gateway do
defmodule Worker do
require Logger
use WebSockex
defmodule State do
defstruct [
:token,
heartbeat_interval: 5000,
sequence: 0,
id: 0,
shards: 1
]
end
def start_link(url, state) do
WebSockex.start_link(url, __MODULE__, state)
end
def handle_frame({:binary, msg}, state) do
packet = :erlang.binary_to_term(msg)
next_state = case packet.op do
10 ->
Logger.debug("Coxir.Gateway: Connected to #{inspect packet.d._trace}")
Process.send(self, {:send,
:binary,
payload(%{
"token" => state.token,
"properties" => %{
"$os" => "linux",
"$device" => "coxir",
"$browser" => "coxir"
},
"compress" => false,
"large_threshold" => 250,
"shard" => [state.id, state.shards]
}, 2)
}, [])
Process.send(self, :heartbeat, [])
{:ok, %{state | heartbeat_interval: packet.d.heartbeat_interval, sequence: packet.s}}
9 ->
Process.send(self, {:send,
:binary,
payload(%{
"token" => state.token,
"properties" => %{
"$os" => "linux",
"$device" => "coxir",
"$browser" => "coxir"
},
"compress" => false,
"large_threshold" => 250,
"shard" => [state.id, state.shards]
}, 2)
}, [])
{:ok, %{state | sequence: packet.s}}
0 ->
Swarm.publish(packet.t, {packet.t, packet.d |> Enum.reduce(%{}, fn({key, val}, acc)
-> Map.put(acc, if is_binary(key) do String.to_atom(key) else key end, val) end)})
{:ok, %{state | sequence: packet.s}}
1 ->
Process.send(self, {:send, :binary, payload(packet.s, 1)}, [])
{:ok, %{state | sequence: packet.s}}
11 ->
{:ok, state}
_ ->
Logger.debug("Coxir.Gateway<#{state.id}>: Opcode fallthrough: #{inspect packet}")
{:ok, %{state | sequence: packet.s}}
end
end
def handle_frame({type, msg}, state) do
Logger.debug("Coixr.Gateway<#{state.id}>: #{inspect type}, #{inspect msg}")
{:ok, state}
end
def handle_info({:"$gen_call", pid, :shard}, state) do
GenServer.reply(pid, String.to_atom "gateway_#{inspect state.id}")
{:ok, state}
end
def handle_info({:send, type, msg}, state) do
{:reply, {type, msg}, state}
end
def handle_info(:heartbeat, %State{heartbeat_interval: heartbeat, sequence: seq} = state) do
Logger.debug("Coxir.Gateway<#{state.id}>: heartbeat #{inspect seq}")
Process.send_after(self, :heartbeat, heartbeat)
{:reply, {:binary, payload(seq, 1)}, state}
end
def payload(data, op) do
%{"op" => op, "d" => data}
|> :erlang.term_to_binary
end
end
def start(nprocs) do
for n <- 0..nprocs-1 do
name = :"gateway_#{inspect n}"
{:ok, pid} = Swarm.register_name(name, __MODULE__, :register, [nprocs, n])
Swarm.join(:gateway, pid)
end
end
def register(shards, shard) do
token = Application.get_env(:coxir, :token)
if !token, do: raise "Please provide a token."
Worker.start_link("wss://gateway.discord.gg/?v=6&encoding=etf", %Worker.State{
token: token,
shards: shards,
id: shard
})
end
def get(shard) when is_integer(shard) do
String.to_atom("gateway_#{inspect shard}")
end
end | 30.193277 | 96 | 0.527971 |
1cc82292005c7bef6a17862468c5d3ed0664e29a | 540 | exs | Elixir | priv/repo/migrations/20170406153416_tags_associations.exs | sahilpaudel/AfterGlow | 0859ec14b47c8c5704cc8e5cba86d39aa258fff5 | [
"MIT"
] | null | null | null | priv/repo/migrations/20170406153416_tags_associations.exs | sahilpaudel/AfterGlow | 0859ec14b47c8c5704cc8e5cba86d39aa258fff5 | [
"MIT"
] | null | null | null | priv/repo/migrations/20170406153416_tags_associations.exs | sahilpaudel/AfterGlow | 0859ec14b47c8c5704cc8e5cba86d39aa258fff5 | [
"MIT"
] | null | null | null | defmodule AfterGlow.Repo.Migrations.TagsAssociations do
use Ecto.Migration
def change do
create table(:tag_questions) do
add :tag_id, references(:tags)
add :question_id, references(:questions)
timestamps()
end
create table(:tag_dashboards) do
add :tag_id, references(:tags)
add :dashboard_id, references(:dashboards)
timestamps()
end
create unique_index(:tag_dashboards, [:tag_id, :dashboard_id])
create unique_index(:tag_questions, [:tag_id, :question_id])
end
end
| 24.545455 | 66 | 0.692593 |
1cc8389c2dbdfa27db6ede9860b49cdd8f01f79b | 94 | ex | Elixir | lib/chirper_web/views/relationship_view.ex | PranavPuranik/project_twitter | 0b660e8749488a632d6f64212205757254caaec3 | [
"MIT"
] | null | null | null | lib/chirper_web/views/relationship_view.ex | PranavPuranik/project_twitter | 0b660e8749488a632d6f64212205757254caaec3 | [
"MIT"
] | 1 | 2021-03-10T07:27:11.000Z | 2021-03-10T07:27:11.000Z | lib/chirper_web/views/relationship_view.ex | PranavPuranik/project_twitter | 0b660e8749488a632d6f64212205757254caaec3 | [
"MIT"
] | null | null | null | defmodule ChirperWeb.RelationshipView do
use ChirperWeb, :view
alias Chirper.Accounts
end | 18.8 | 40 | 0.819149 |
1cc883e06a1b88411a6183c790c20e9f2ee456da | 57 | ex | Elixir | web/views/layout_view.ex | leifg/byzal | 64b688de4c597808ee82a8222e7c3fd7ce4c295b | [
"MIT"
] | null | null | null | web/views/layout_view.ex | leifg/byzal | 64b688de4c597808ee82a8222e7c3fd7ce4c295b | [
"MIT"
] | null | null | null | web/views/layout_view.ex | leifg/byzal | 64b688de4c597808ee82a8222e7c3fd7ce4c295b | [
"MIT"
] | null | null | null | defmodule Byzal.LayoutView do
use Byzal.Web, :view
end
| 14.25 | 29 | 0.77193 |
1cc88621d357174a941d91559d16865c0b0917be | 1,664 | exs | Elixir | test/run_workflows/simple_workflow_test.exs | nipierre/ex_step_flow | 4345ee57bd4e5eb79138df68d10579ba1b9ec6a1 | [
"MIT"
] | null | null | null | test/run_workflows/simple_workflow_test.exs | nipierre/ex_step_flow | 4345ee57bd4e5eb79138df68d10579ba1b9ec6a1 | [
"MIT"
] | null | null | null | test/run_workflows/simple_workflow_test.exs | nipierre/ex_step_flow | 4345ee57bd4e5eb79138df68d10579ba1b9ec6a1 | [
"MIT"
] | null | null | null | defmodule StepFlow.RunWorkflows.SimpleWorkflowTest do
use ExUnit.Case
use Plug.Test
alias Ecto.Adapters.SQL.Sandbox
alias StepFlow.Step
doctest StepFlow
setup do
# Explicitly get a connection before each test
:ok = Sandbox.checkout(StepFlow.Repo)
{_conn, channel} = StepFlow.HelpersTest.get_amqp_connection()
on_exit(fn ->
StepFlow.HelpersTest.consume_messages(channel, "job_test", 1)
end)
:ok
end
describe "workflows" do
@workflow_definition %{
schema_version: "1.8",
identifier: "id",
version_major: 6,
version_minor: 5,
version_micro: 4,
reference: "some id",
icon: "custom_icon",
label: "Simple workflow test",
tags: ["test"],
steps: [
%{
id: 0,
name: "job_test",
icon: "step_icon",
label: "My first step",
parameters: [
%{
id: "source_paths",
type: "array_of_strings",
value: ["my_file.mov"]
}
]
}
],
parameters: [],
rights: [
%{
action: "create",
groups: ["adminitstrator"]
}
]
}
test "run simple workflow with 1 step" do
workflow = StepFlow.HelpersTest.workflow_fixture(@workflow_definition)
{:ok, "started"} = Step.start_next(workflow)
StepFlow.HelpersTest.check(workflow.id, 1)
StepFlow.HelpersTest.check(workflow.id, 0, 1)
StepFlow.HelpersTest.complete_jobs(workflow.id, 0)
{:ok, "completed"} = Step.start_next(workflow)
StepFlow.HelpersTest.check(workflow.id, 1)
end
end
end
| 23.43662 | 76 | 0.579928 |
1cc8a2f2569a8854775430903163c401ec9b1b6a | 1,510 | ex | Elixir | clients/content/lib/google_api/content/v2/model/liasettings_request_gmb_access_response.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/content/lib/google_api/content/v2/model/liasettings_request_gmb_access_response.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/content/lib/google_api/content/v2/model/liasettings_request_gmb_access_response.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Content.V2.Model.LiasettingsRequestGmbAccessResponse do
@moduledoc """
## Attributes
* `kind` (*type:* `String.t`, *default:* `nil`) - Identifies what kind of resource this is. Value: the fixed string "content#liasettingsRequestGmbAccessResponse".
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:kind => String.t() | nil
}
field(:kind)
end
defimpl Poison.Decoder, for: GoogleApi.Content.V2.Model.LiasettingsRequestGmbAccessResponse do
def decode(value, options) do
GoogleApi.Content.V2.Model.LiasettingsRequestGmbAccessResponse.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Content.V2.Model.LiasettingsRequestGmbAccessResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 32.12766 | 166 | 0.750331 |
1cc921ab3e9c889dafe3d3c9a3798b4ba9271216 | 182 | exs | Elixir | priv/repo/migrations/20170213140200_add_alert_emails_field_to_cameras.exs | mekongit/evercam_models | 7c36d52f2b47ffc89a3eaca65c2e353716fafacc | [
"MIT"
] | null | null | null | priv/repo/migrations/20170213140200_add_alert_emails_field_to_cameras.exs | mekongit/evercam_models | 7c36d52f2b47ffc89a3eaca65c2e353716fafacc | [
"MIT"
] | null | null | null | priv/repo/migrations/20170213140200_add_alert_emails_field_to_cameras.exs | mekongit/evercam_models | 7c36d52f2b47ffc89a3eaca65c2e353716fafacc | [
"MIT"
] | null | null | null | defmodule Evercam.Repo.Migrations.AddAlertEmailsFieldToCameras do
use Ecto.Migration
def change do
alter table(:cameras) do
add :alert_emails, :text
end
end
end
| 18.2 | 65 | 0.736264 |
1cc9247776e4eee90841e1c29cd970f5585fa837 | 199 | ex | Elixir | apps/rayyan_site/lib/rayyan_site_web/views/page_view.ex | boqolo/boqolo | ba9a2ebcf379f5056b3836756179b49163f91d72 | [
"BSD-3-Clause"
] | null | null | null | apps/rayyan_site/lib/rayyan_site_web/views/page_view.ex | boqolo/boqolo | ba9a2ebcf379f5056b3836756179b49163f91d72 | [
"BSD-3-Clause"
] | null | null | null | apps/rayyan_site/lib/rayyan_site_web/views/page_view.ex | boqolo/boqolo | ba9a2ebcf379f5056b3836756179b49163f91d72 | [
"BSD-3-Clause"
] | null | null | null | defmodule RayyanSiteWeb.PageView do
use RayyanSiteWeb, :view
def recent_posts() do
Writings.list_entries
|> Enum.take(5)
end
def all_posts() do
Writings.list_entries
end
end
| 14.214286 | 35 | 0.708543 |
1cc93961191bfaa9489a9574ea80845ed6768ed5 | 1,324 | ex | Elixir | lib/virgo_web/controllers/judgement_type_controller.ex | GinShio/AstraeaVirgo | 92804cbae01f67e21b8f421009fa37fddc9054e1 | [
"BSD-2-Clause"
] | null | null | null | lib/virgo_web/controllers/judgement_type_controller.ex | GinShio/AstraeaVirgo | 92804cbae01f67e21b8f421009fa37fddc9054e1 | [
"BSD-2-Clause"
] | null | null | null | lib/virgo_web/controllers/judgement_type_controller.ex | GinShio/AstraeaVirgo | 92804cbae01f67e21b8f421009fa37fddc9054e1 | [
"BSD-2-Clause"
] | null | null | null | defmodule AstraeaVirgoWeb.JudgementTypeController do
use AstraeaVirgoWeb, :controller
@moduledoc """
Impl Judgement Type API:
- `GET /api/judgement-types`: get judgement types info
- `GET /api/judgement-types/<judgement_type_id>` get the specified judgement type info
"""
@doc """
Get all judgement type info API
API: GET /api/judgement-types
Response: return 200, response `AstraeaVirgoWeb.JudgementTypeView.render/2` index.json
"""
def index(conn, _params) do
conn |>
put_status(:ok) |>
render(AstraeaVirgoWeb.JudgementTypeView, "index.json")
end
@doc """
Get the specified judgement type info API
API: GET /api/judgement-types/<judgement_type_id>
Response:
1. return 200, response `AstraeaVirgoWeb.JudgementTypeView.render/2` show.json
2. return 400 when id is invalid, response `AstraeaVirgoWeb.ErrorView.render/2` validation.json
"""
def show(conn, %{"id" => id}) do
cond do
id in ["CE", "AC", "TLE", "RTE", "WA"] ->
conn |>
put_status(:ok) |>
render(AstraeaVirgoWeb.JudgementTypeView, "show.json", type: id)
true ->
conn |>
put_status(:bad_request) |>
render(AstraeaVirgoWeb.ErrorView, "validation.json", fields: %{"id" => ["is invalid. got:\"#{id}\""]})
end
end
end
| 29.422222 | 112 | 0.655589 |
1cc93ee732f3e85e75dbdc4c1624cda01676a8fb | 62 | ex | Elixir | test/support/cast.ex | depressed-pho/xema | 56de4a5d3b3f37827c70f6052f895c59feb0bb51 | [
"MIT"
] | 49 | 2018-06-05T09:42:19.000Z | 2022-02-15T12:50:51.000Z | test/support/cast.ex | depressed-pho/xema | 56de4a5d3b3f37827c70f6052f895c59feb0bb51 | [
"MIT"
] | 152 | 2017-06-11T13:43:06.000Z | 2022-01-09T17:13:45.000Z | test/support/cast.ex | depressed-pho/xema | 56de4a5d3b3f37827c70f6052f895c59feb0bb51 | [
"MIT"
] | 6 | 2019-05-31T05:41:47.000Z | 2021-12-14T08:09:36.000Z | defmodule Cast do
@moduledoc File.read!("docs/cast.md")
end
| 15.5 | 39 | 0.725806 |
1cc9459eb62661a47c4dc1ad27407a15dc36e889 | 1,722 | ex | Elixir | lib/slack/lookups.ex | nmohoric/Elixir-Slack | da6df7612cc0a8c56bc29498a8c636fa6f095d88 | [
"MIT"
] | null | null | null | lib/slack/lookups.ex | nmohoric/Elixir-Slack | da6df7612cc0a8c56bc29498a8c636fa6f095d88 | [
"MIT"
] | null | null | null | lib/slack/lookups.ex | nmohoric/Elixir-Slack | da6df7612cc0a8c56bc29498a8c636fa6f095d88 | [
"MIT"
] | 3 | 2017-08-16T23:02:52.000Z | 2020-09-23T13:31:35.000Z | defmodule Slack.Lookups do
@doc ~S"""
Turns a string like `"@USER_NAME"` into the ID that Slack understands (`"U…"`).
"""
def lookup_user_id("@" <> user_name, slack) do
slack.users
|> Map.values
|> Enum.find(%{ }, fn user -> user.name == user_name end)
|> Map.get(:id)
end
@doc ~S"""
Turns a string like `"@USER_NAME"` or a user ID (`"U…"`) into the ID for the
direct message channel of that user (`"D…"`). `nil` is returned if a direct
message channel has not yet been opened.
"""
def lookup_direct_message_id(user = "@" <> _user_name, slack) do
user
|> lookup_user_id(slack)
|> lookup_direct_message_id(slack)
end
def lookup_direct_message_id(user_id, slack) do
slack.ims
|> Map.values
|> Enum.find(%{ }, fn direct_message -> direct_message.user == user_id end)
|> Map.get(:id)
end
@doc ~S"""
Turns a string like `"@CHANNEL_NAME"` into the ID that Slack understands
(`"C…"`).
"""
def lookup_channel_id("#" <> channel_name, slack) do
slack.channels
|> Map.values
|> Enum.find(fn channel -> channel.name == channel_name end)
|> Map.get(:id)
end
@doc ~S"""
Turns a Slack user ID (`"U…"`) or direct message ID (`"D…"`) into a string in
the format "@USER_NAME".
"""
def lookup_user_name(direct_message_id = "D" <> _id, slack) do
lookup_user_name(slack.ims[direct_message_id].user, slack)
end
def lookup_user_name(user_id = "U" <> _id, slack) do
"@" <> slack.users[user_id].name
end
@doc ~S"""
Turns a Slack channel ID (`"C…"`) into a string in the format "#CHANNEL_NAME".
"""
def lookup_channel_name(channel_id = "C" <> _id, slack) do
"#" <> slack.channels[channel_id].name
end
end
| 29.689655 | 81 | 0.631243 |
1cc94857c2bdbdde23221f71a4c620a3ae3b1c16 | 1,276 | ex | Elixir | lib/mix/exenv.master_key.ex | nsweeting/exenv | 88a9863fd055aa4d99ab9bf416c0b35bc86eadac | [
"MIT"
] | 35 | 2019-03-10T05:16:16.000Z | 2021-12-05T00:12:55.000Z | lib/mix/exenv.master_key.ex | nsweeting/exenv | 88a9863fd055aa4d99ab9bf416c0b35bc86eadac | [
"MIT"
] | 2 | 2019-03-08T17:01:50.000Z | 2019-03-14T09:20:22.000Z | lib/mix/exenv.master_key.ex | nsweeting/exenv | 88a9863fd055aa4d99ab9bf416c0b35bc86eadac | [
"MIT"
] | null | null | null | defmodule Mix.Tasks.Exenv.MasterKey do
@shortdoc "Generates a master.key file within the given path"
@moduledoc """
Generates a master key file at the given path.
The generated file will be added to your `.gitignore` file.
mix exenv.master_key /config/master.key
"""
use Mix.Task
alias Exenv.Encryption
@impl Mix.Task
def run([]) do
run(["/config/master.key"])
end
def run([path]) do
path = sanitize_path(path)
full_path = File.cwd!() <> path
key = Encryption.create_master_key!(full_path)
add_gitignore(path)
print_info(key)
end
defp sanitize_path(path) do
if String.first(path) == "/", do: path, else: "/#{path}"
end
defp add_gitignore(path) do
device = File.open!(".gitignore", [:read, :append])
gitignore = device |> IO.binread(:all) |> String.split("\n")
unless Enum.member?(gitignore, path) do
IO.binwrite(device, "\n")
IO.binwrite(device, "\n")
IO.binwrite(device, "# Ignore the master key generated for encrypted secrets.\n")
IO.binwrite(device, path)
end
File.close(device)
end
defp print_info(path) do
Mix.Shell.IO.info("""
Master key generated at #{path}.
File has been added to your projects .gitignore.
""")
end
end
| 23.2 | 87 | 0.647335 |
1cc9789dab39276b24a97613bd82aecc09352d67 | 444 | exs | Elixir | elixir/sum-of-multiples/sum_of_multiples.exs | macborowy/exercism | c5d45e074e81b946a82a340b2730e0d2732b7e0a | [
"MIT"
] | null | null | null | elixir/sum-of-multiples/sum_of_multiples.exs | macborowy/exercism | c5d45e074e81b946a82a340b2730e0d2732b7e0a | [
"MIT"
] | null | null | null | elixir/sum-of-multiples/sum_of_multiples.exs | macborowy/exercism | c5d45e074e81b946a82a340b2730e0d2732b7e0a | [
"MIT"
] | null | null | null | defmodule SumOfMultiples do
@doc """
Adds up all numbers from 1 to a given end number that are multiples of the factors provided.
"""
@spec to(non_neg_integer, [non_neg_integer]) :: non_neg_integer
def to(limit, factors) do
multiples(limit, factors)
|> List.flatten
|> Enum.uniq
|> Enum.sum
end
defp multiples(limit, factors) do
for f <- factors, do: Enum.filter(1..(limit - 1), &(rem(&1, f) == 0))
end
end
| 26.117647 | 94 | 0.655405 |
1cc980833818f18919aa37f20f4b06928f16d922 | 237 | exs | Elixir | .formatter.exs | am-kantox/lazy_for | 2b6882170bf9f2c3ef346aec595cb1cbe7850a36 | [
"MIT"
] | null | null | null | .formatter.exs | am-kantox/lazy_for | 2b6882170bf9f2c3ef346aec595cb1cbe7850a36 | [
"MIT"
] | 6 | 2019-09-17T15:45:43.000Z | 2019-09-18T12:34:09.000Z | .formatter.exs | am-kantox/lazy_for | 2b6882170bf9f2c3ef346aec595cb1cbe7850a36 | [
"MIT"
] | null | null | null | locals_without_parens = Enum.map(2..43, &{:stream, &1})
[
inputs: ["{mix,.formatter}.exs", "{config,lib,test}/**/*.{ex,exs}"],
locals_without_parens: locals_without_parens,
export: [locals_without_parens: locals_without_parens]
]
| 29.625 | 70 | 0.704641 |
1cc9a0373dff71455706c3ac4b8991f5c083beaa | 2,420 | ex | Elixir | lib/chat_api/users/user.ex | xprazak2/papercups | 925e7c20ab868648e078a129e832856026c50424 | [
"MIT"
] | 1 | 2021-01-18T09:57:23.000Z | 2021-01-18T09:57:23.000Z | lib/chat_api/users/user.ex | xprazak2/papercups | 925e7c20ab868648e078a129e832856026c50424 | [
"MIT"
] | null | null | null | lib/chat_api/users/user.ex | xprazak2/papercups | 925e7c20ab868648e078a129e832856026c50424 | [
"MIT"
] | null | null | null | defmodule ChatApi.Users.User do
use Ecto.Schema
use Pow.Ecto.Schema
import Ecto.Changeset
alias ChatApi.Conversations.Conversation
alias ChatApi.Messages.Message
alias ChatApi.Accounts.Account
alias ChatApi.Users.{UserProfile, UserSettings}
schema "users" do
field(:email_confirmation_token, :string)
field(:password_reset_token, :string)
field(:email_confirmed_at, :utc_datetime)
field(:disabled_at, :utc_datetime)
field(:archived_at, :utc_datetime)
field(:role, :string, default: "user")
field(:has_valid_email, :boolean)
has_many(:conversations, Conversation, foreign_key: :assignee_id)
has_many(:messages, Message, foreign_key: :user_id)
belongs_to(:account, Account, type: :binary_id)
has_one(:profile, UserProfile)
has_one(:settings, UserSettings)
pow_user_fields()
timestamps()
end
def changeset(user_or_changeset, attrs) do
user_or_changeset
|> pow_changeset(attrs)
|> cast(attrs, [:account_id, :role])
|> validate_required([:account_id])
end
@spec role_changeset(Ecto.Schema.t() | Ecto.Changeset.t(), map()) :: Ecto.Changeset.t()
def role_changeset(user_or_changeset, attrs) do
user_or_changeset
|> cast(attrs, [:role])
|> validate_inclusion(:role, ~w(user admin))
end
@spec disabled_at_changeset(Ecto.Schema.t() | Ecto.Changeset.t(), map()) :: Ecto.Changeset.t()
def disabled_at_changeset(user_or_changeset, attrs) do
user_or_changeset
|> cast(attrs, [:disabled_at, :archived_at])
|> validate_required([])
end
@spec email_verification_changeset(Ecto.Schema.t() | Ecto.Changeset.t(), map()) ::
Ecto.Changeset.t()
def email_verification_changeset(user_or_changeset, attrs) do
user_or_changeset
|> cast(attrs, [:email_confirmation_token, :email_confirmed_at, :has_valid_email])
|> validate_required([])
end
@spec password_reset_changeset(Ecto.Schema.t() | Ecto.Changeset.t(), map()) ::
Ecto.Changeset.t()
def password_reset_changeset(user_or_changeset, attrs) do
user_or_changeset
|> cast(attrs, [:password_reset_token])
|> validate_required([])
end
@spec password_changeset(Ecto.Schema.t() | Ecto.Changeset.t(), map()) ::
Ecto.Changeset.t()
def password_changeset(user_or_changeset, attrs) do
user_or_changeset
|> pow_password_changeset(attrs)
|> password_reset_changeset(attrs)
end
end
| 31.842105 | 96 | 0.71157 |
1cc9d075ef10bb7378c7bcaeaec3f18aa49860df | 1,226 | ex | Elixir | lib/satellite_web/router.ex | joshnuss/satellite-prototype | 9cf813d8b75efab9a8118f9fecb9ac6f70bd841a | [
"MIT"
] | null | null | null | lib/satellite_web/router.ex | joshnuss/satellite-prototype | 9cf813d8b75efab9a8118f9fecb9ac6f70bd841a | [
"MIT"
] | null | null | null | lib/satellite_web/router.ex | joshnuss/satellite-prototype | 9cf813d8b75efab9a8118f9fecb9ac6f70bd841a | [
"MIT"
] | null | null | null | defmodule SatelliteWeb.Router do
use SatelliteWeb, :router
pipeline :browser do
plug :accepts, ["html"]
plug :fetch_session
plug :fetch_flash
plug :protect_from_forgery
plug :put_secure_browser_headers
end
pipeline :api do
plug :accepts, ["json"]
end
scope "/", SatelliteWeb do
pipe_through :browser
get "/", PageController, :index
end
scope "/api", SatelliteWeb.API do
pipe_through :api
post "/requests", RequestController, :create
post "/logs", LogController, :create
post "/errors", ErrorController, :create
post "/metrics", MetricController, :create
end
# Enables LiveDashboard only for development
#
# If you want to use the LiveDashboard in production, you should put
# it behind authentication and allow only admins to access it.
# If your application does not have an admins-only section yet,
# you can use Plug.BasicAuth to set up some basic authentication
# as long as you are also using SSL (which you should anyway).
if Mix.env() in [:dev, :test] do
import Phoenix.LiveDashboard.Router
scope "/" do
pipe_through :browser
live_dashboard "/dashboard", metrics: SatelliteWeb.Telemetry
end
end
end
| 26.085106 | 70 | 0.702284 |
1cc9f77f2b49d40f95350e5ae6cc3383ed4c265b | 9,301 | ex | Elixir | lib/ecto_mnesia/storage/migrator.ex | rudebono/ecto_mnesia | a2672ee25692d24570cefa6025ca73c8929566cb | [
"MIT"
] | 245 | 2016-10-22T14:43:36.000Z | 2022-02-28T03:46:27.000Z | lib/ecto_mnesia/storage/migrator.ex | KushanChamindu/ecto_mnesia | bc0fb46d210eb1be63bcc8f0383212f0633b8c52 | [
"MIT"
] | 80 | 2016-10-22T16:09:19.000Z | 2021-03-03T10:45:01.000Z | lib/ecto_mnesia/storage/migrator.ex | KushanChamindu/ecto_mnesia | bc0fb46d210eb1be63bcc8f0383212f0633b8c52 | [
"MIT"
] | 53 | 2016-12-03T14:23:05.000Z | 2021-07-01T01:55:22.000Z | defmodule EctoMnesia.Storage.Migrator do
@moduledoc """
This module implements `Ecto.Storage` behavior that is used by `Ecto.Migrations`.
"""
alias EctoMnesia.Table
alias :mnesia, as: Mnesia
@pk_table_name :id_seq
@doc false
# Tables
def execute(repo, {:create, %Ecto.Migration.Table{name: table, engine: type}, instructions}, _opts) do
ensure_pk_table!(repo)
table_attrs =
instructions
|> Enum.reduce([], &reduce_fields(&1, &2, [], :skip))
|> Enum.uniq()
case do_create_table(repo, table, type, table_attrs) do
:ok -> :ok
:already_exists -> raise "Table #{table} already exists"
end
end
def execute(repo, {:create_if_not_exists, %Ecto.Migration.Table{name: table, engine: type}, instructions}, _opts) do
ensure_pk_table!(repo)
table_attrs =
try do
table
|> Table.get_name()
|> Mnesia.table_info(:attributes)
catch
:exit, {:aborted, _reason} -> []
end
new_table_attrs =
instructions
|> Enum.reduce(table_attrs, &reduce_fields(&1, &2, [], :skip))
|> Enum.uniq()
case do_create_table(repo, table, type, new_table_attrs) do
:ok -> :ok
:already_exists -> :ok
end
end
def execute(repo, {:alter, %Ecto.Migration.Table{name: table}, instructions}, _opts) do
ensure_pk_table!(repo)
table = if String.valid?(table), do: String.to_atom(table), else: table
table_attrs =
try do
table
|> Table.get_name()
|> Mnesia.table_info(:attributes)
catch
:exit, {:aborted, _reason} -> []
end
new_table_attrs =
instructions
|> Enum.reduce(table_attrs, &reduce_fields(&1, &2, table_attrs, :raise))
|> Enum.uniq()
try do
case Mnesia.transform_table(table, &alter_fn(&1, table_attrs, new_table_attrs), new_table_attrs) do
{:atomic, :ok} -> :ok
error -> error
end
catch
:exit, {:aborted, {:no_exists, {_, :record_name}}} -> raise "Table #{table} does not exists"
end
end
def execute(repo, {:rename, %Ecto.Migration.Table{name: table}, old_field, new_field}, _opts) do
ensure_pk_table!(repo)
table = if String.valid?(table), do: String.to_atom(table), else: table
table_attrs =
try do
table
|> Table.get_name()
|> Mnesia.table_info(:attributes)
catch
:exit, {:aborted, _reason} -> []
end
new_table_attrs =
[{:rename, old_field, new_field}]
|> Enum.reduce(table_attrs, &reduce_fields(&1, &2, table_attrs, :raise))
|> Enum.uniq()
renames = [{old_field, new_field}]
try do
case Mnesia.transform_table(table, &alter_fn(&1, table_attrs, new_table_attrs, renames), new_table_attrs) do
{:atomic, :ok} -> :ok
error -> error
end
catch
:exit, {:aborted, {:no_exists, {_, :record_name}}} -> raise "Table #{table} does not exists"
end
end
def execute(_repo, {:drop, %Ecto.Migration.Table{name: table}}, _opts) do
table = if String.valid?(table), do: String.to_atom(table), else: table
case Mnesia.delete_table(table) do
{:atomic, :ok} -> :ok
{:aborted, {:no_exists, _}} -> raise "Table #{table} does not exists"
end
end
def execute(_repo, {:drop_if_exists, %Ecto.Migration.Table{name: table}}, _opts) do
table = if String.valid?(table), do: String.to_atom(table), else: table
case Mnesia.delete_table(table) do
{:atomic, :ok} -> :ok
{:aborted, {:no_exists, _}} -> :ok
end
end
# Indexes
def execute(_repo, {:create, %Ecto.Migration.Index{table: table, columns: columns}}, _opts) do
table = if String.valid?(table), do: String.to_atom(table), else: table
columns
|> Enum.uniq()
|> Enum.map(fn index ->
case Mnesia.add_table_index(table, index) do
{:atomic, :ok} -> :ok
{:aborted, {:node_not_running, not_found_node}} -> raise "Node #{inspect(not_found_node)} is not started"
{:aborted, {:already_exists, ^table, _}} -> raise "Index for field #{index} in table #{table} already exists"
end
end)
end
def execute(_repo, {:create_if_not_exists, %Ecto.Migration.Index{table: table, columns: columns}}, _opts) do
table = if String.valid?(table), do: String.to_atom(table), else: table
columns
|> Enum.uniq()
|> Enum.map(fn index ->
case Mnesia.add_table_index(table, index) do
{:atomic, :ok} -> :ok
{:aborted, {:node_not_running, not_found_node}} -> raise "Node #{inspect(not_found_node)} is not started"
{:aborted, {:already_exists, ^table, _}} -> :ok
end
end)
end
def execute(_repo, {:drop, %Ecto.Migration.Index{table: table, columns: columns}}, _opts) do
table = if String.valid?(table), do: String.to_atom(table), else: table
columns
|> Enum.uniq()
|> Enum.map(fn index ->
case Mnesia.del_table_index(table, index) do
{:atomic, :ok} -> :ok
{:aborted, {:node_not_running, not_found_node}} -> raise "Node #{inspect(not_found_node)} is not started"
{:aborted, {:no_exists, ^table, _}} -> raise "Index for field #{index} in table #{table} does not exists"
end
end)
end
def execute(_repo, {:drop_if_exists, %Ecto.Migration.Index{table: table, columns: columns}}, _opts) do
table = if String.valid?(table), do: String.to_atom(table), else: table
columns
|> Enum.uniq()
|> Enum.map(fn index ->
case Mnesia.del_table_index(table, index) do
{:atomic, :ok} -> :ok
{:aborted, {:node_not_running, not_found_node}} -> raise "Node #{inspect(not_found_node)} is not started"
{:aborted, {:no_exists, ^table, _}} -> :ok
end
end)
end
# Helpers
defp do_create_table(repo, table, type, attributes) do
config = conf(repo)
attributes =
if length(attributes) == 1 do
attributes ++ [:__hidden]
else
attributes
end
tab_def = [{:attributes, attributes}, {config[:storage_type], [config[:host]]}, {:type, get_engine(type)}]
table = if String.valid?(table), do: String.to_atom(table), else: table
case Mnesia.create_table(table, tab_def) do
{:atomic, :ok} ->
Mnesia.wait_for_tables([table], 1_000)
:ok
{:aborted, {:already_exists, ^table}} ->
:already_exists
end
end
defp get_engine(nil), do: :ordered_set
defp get_engine(type) when is_atom(type), do: type
defp reduce_fields({:remove, field}, fields, table_fields, on_not_found) do
if on_not_found == :raise and !field_exists?(table_fields, field) do
raise "Field #{field} not found"
end
Enum.filter(fields, &(&1 != field))
end
defp reduce_fields({:rename, old_field, new_field}, fields, table_fields, on_not_found) do
if on_not_found == :raise and !field_exists?(table_fields, old_field) do
raise "Field #{old_field} not found"
end
case Enum.find_index(fields, &(&1 == old_field)) do
nil ->
if on_not_found == :raise,
do: raise("Field #{old_field} not found"),
else: fields
index when is_number(index) ->
List.replace_at(fields, index, new_field)
end
end
defp reduce_fields({:add, field, _type, _opts}, fields, _table_fields, on_duplicate) do
if on_duplicate == :raise and field_exists?(fields, field) do
raise "Duplicate field #{field}"
end
fields ++ [field]
end
defp reduce_fields({:modify, field, _type, _opts}, fields, table_fields, on_not_found) do
if on_not_found == :raise and !field_exists?(table_fields ++ fields, field) do
raise "Field #{field} not found"
end
fields
end
defp field_exists?(table_fields, field), do: field in table_fields
# Altering function traverses Mnesia table on schema migrations and moves field values to persist them
defp alter_fn(record, fields_before, fields_after, data_migrations \\ []) do
record_name = elem(record, 0)
acc = Enum.map(1..length(fields_after), fn _ -> nil end)
fields_after
|> Enum.reduce(acc, fn field, acc ->
old_index = find_field_index(fields_before, field, data_migrations)
new_index = find_field_index(fields_after, field)
value =
case old_index do
nil -> nil
index -> elem(record, index + 1)
end
List.replace_at(acc, new_index, value)
end)
|> List.insert_at(0, record_name)
|> List.to_tuple()
end
def find_field_index(fields, field), do: Enum.find_index(fields, &(&1 == field))
def find_field_index(fields, field, data_migrations) do
case Enum.find(data_migrations, fn {_old_name, new_name} -> new_name == field end) do
{old_field, _new_field} ->
find_field_index(fields, old_field)
nil ->
find_field_index(fields, field)
end
end
defp ensure_pk_table!(repo) do
res =
try do
Mnesia.table_info(:size, @pk_table_name)
catch
:exit, {:aborted, {:no_exists, :size, _}} -> :no_exists
end
case res do
:no_exists ->
do_create_table(repo, @pk_table_name, :set, [:thing, :id])
_ ->
Mnesia.wait_for_tables([@pk_table_name], 1_000)
:ok
end
end
defp conf(repo), do: EctoMnesia.Storage.conf(repo.config)
end
| 30.495082 | 118 | 0.628212 |
1cca073d6248547145cf73a7216cb719f2d1995c | 2,922 | exs | Elixir | mix.exs | lexa62/hb | c6b2169ad3e5af546da3f7ad0d499f5d5d29d044 | [
"MIT"
] | null | null | null | mix.exs | lexa62/hb | c6b2169ad3e5af546da3f7ad0d499f5d5d29d044 | [
"MIT"
] | null | null | null | mix.exs | lexa62/hb | c6b2169ad3e5af546da3f7ad0d499f5d5d29d044 | [
"MIT"
] | null | null | null | defmodule Hb.Mixfile do
use Mix.Project
def project do
[app: :hb,
version: "1.0.0",
elixir: "~> 1.2",
description: "Hb release deb",
elixirc_paths: elixirc_paths(Mix.env),
compilers: [:phoenix, :gettext] ++ Mix.compilers,
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
aliases: aliases(),
deps: deps(),
package: package()]
end
# Configuration for the OTP application.
#
# Type `mix help compile.app` for more information.
def application do
[mod: {Hb, []},
applications: [:phoenix, :phoenix_pubsub, :phoenix_html, :cowboy, :logger, :gettext,
:phoenix_ecto, :postgrex, :comeonin, :httpoison, :cachex, :arbor,
:nimble_csv, :filterable, :money, :ecto_enum, :guardian, :exrm_deb, :elixir_make]]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "web", "test/support"]
defp elixirc_paths(_), do: ["lib", "web"]
# Specifies your project dependencies.
#
# Type `mix help deps` for examples and options.
defp deps do
[{:phoenix, "~> 1.2.1"},
{:phoenix_pubsub, "~> 1.0"},
{:phoenix_ecto, "~> 3.0"},
{:postgrex, ">= 0.0.0"},
{:phoenix_html, "~> 2.6"},
{:phoenix_live_reload, "~> 1.0", only: :dev},
{:gettext, "~> 0.11"},
{:cowboy, "~> 1.0"},
{:comeonin, "~> 3.0"},
{:guardian, "~> 0.14"},
{:ecto_enum, "~> 1.0"},
{:httpoison, "~> 0.11.1"},
{:cachex, "~> 2.1"},
{:money, "~> 1.2.1"},
{:currencies, "~> 0.5.1"},
{:arbor, "~> 1.0.3"},
{:nimble_csv, "~> 0.1.0"},
{:filterable, "~> 0.5.2"},
{:exrm, "~> 1.0"},
{:distillery, "~> 1.4"},
{:exrm_deb, github: "johnhamelink/exrm_deb", branch: "feature/distillery-support"}]
end
def package do
[
maintainer_scripts: [],
external_dependencies: [],
license_file: "LICENSE",
files: [ "lib", "priv", "mix.exs", "README*", "LICENSE"],
maintainers: ["Aleksey Alekhine <lexa62@tut.by>"],
licenses: ["MIT"],
vendor: "Aleksey Alekhine",
config_files: [],
links: %{
"GitHub" => "https://gitlab.com/lexa62/hb",
"Docs" => "https://hexdocs.pm/exrm_deb",
"Homepage" => "https://gitlab.com/lexa62/hb"
}
]
end
def lsb_release do
{release, _} = System.cmd("lsb_release", ["-c", "-s"])
String.replace(release, "\n", "")
"jessie"
end
# Aliases are shortcuts or tasks specific to the current project.
# For example, to create, migrate and run the seeds file at once:
#
# $ mix ecto.setup
#
# See the documentation for `Mix` for more info on aliases.
defp aliases do
["ecto.setup": ["ecto.create", "ecto.migrate", "run priv/repo/seeds.exs"],
"ecto.reset": ["ecto.drop", "ecto.setup"],
"test": ["ecto.create --quiet", "ecto.migrate", "test"]]
end
end
| 30.757895 | 102 | 0.561944 |
1cca2b5372059a9ac452583c8c64412c1ce12815 | 3,866 | ex | Elixir | projects/api/lib/margaret_web/resolvers/publication_invitations.ex | strattadb/margaret | dde5d7b42f6d9b4d320069a0117136dae03b13b5 | [
"MIT"
] | 82 | 2017-11-06T01:00:55.000Z | 2020-12-09T10:35:29.000Z | projects/api/lib/margaret_web/resolvers/publication_invitations.ex | dbstratta/margaret | dde5d7b42f6d9b4d320069a0117136dae03b13b5 | [
"MIT"
] | 98 | 2017-11-06T22:57:32.000Z | 2020-07-03T04:46:39.000Z | projects/api/lib/margaret_web/resolvers/publication_invitations.ex | strattadb/margaret | dde5d7b42f6d9b4d320069a0117136dae03b13b5 | [
"MIT"
] | 10 | 2017-11-16T05:31:58.000Z | 2020-10-29T18:02:35.000Z | defmodule MargaretWeb.Resolvers.PublicationInvitations do
@moduledoc """
The Publication Invitation GraphQL resolvers.
"""
import Margaret.Helpers, only: [ok: 1]
alias MargaretWeb.Helpers
alias Margaret.{Accounts, Publications}
alias Accounts.User
alias Publications.PublicationInvitation
@doc """
Resolves the sending of a publication invitation.
"""
def resolve_send_publication_invitation(args, %{context: %{viewer: viewer}}) do
%{publication_id: publication_id, invitee_id: invitee_id, role: role} = args
publication_id
|> Publications.get_publication()
|> do_resolve_send_publication_invitation(invitee_id, viewer, role)
end
defp do_resolve_send_publication_invitation(nil = _publication, _invitee_id, _inviter, _role),
do: Helpers.GraphQLErrors.publication_not_found()
defp do_resolve_send_publication_invitation(publication, invitee_id, inviter, role) do
with %User{} = invitee <- Accounts.get_user(invitee_id),
true <- Publications.can_invite?(publication, inviter, invitee, role),
false <- Publications.member?(publication, invitee),
{:ok, %{invitation: invitation}} <-
Publications.invite_user(publication, inviter, invitee, role) do
{:ok, %{invitation: invitation}}
else
nil ->
Helpers.GraphQLErrors.user_not_found()
false ->
Helpers.GraphQLErrors.unauthorized()
true ->
{:error, "User is already a member of the publication"}
{:error, _, reason, _} ->
{:error, reason}
end
end
@doc """
Resolves the publication of a publication invitation.
"""
def resolve_publication(%PublicationInvitation{publication_id: publication_id}, _, _) do
publication_id
|> Publications.get_publication()
|> ok()
end
@doc """
Resolves the invitee of a publication invitation.
"""
def resolve_invitee(%PublicationInvitation{invitee_id: invitee_id}, _, _) do
invitee_id
|> Accounts.get_user()
|> ok()
end
@doc """
Resolves the inviter of a publication invitation.
"""
def resolve_inviter(%PublicationInvitation{inviter_id: inviter_id}, _, _) do
inviter_id
|> Accounts.get_user()
|> ok()
end
@doc """
Accepts a publication invitation.
"""
def resolve_accept_publication_invitation(%{invitation_id: invitation_id}, %{
context: %{viewer: %{id: viewer_id}}
}) do
invitation_id
|> Publications.get_invitation()
|> do_resolve_accept_publication_invitation(viewer_id)
end
defp do_resolve_accept_publication_invitation(
%PublicationInvitation{invitee_id: invitee_id} = invitation,
viewer_id
)
when invitee_id === viewer_id do
case Publications.accept_invitation(invitation) do
{:ok, %{invitation: invitation}} -> {:ok, %{invitation: invitation}}
{:error, _, _, _} -> Helpers.GraphQLErrors.something_went_wrong()
end
end
defp do_resolve_accept_publication_invitation(nil, _) do
Helpers.GraphQLErrors.invitation_not_found()
end
defp do_resolve_accept_publication_invitation(
%PublicationInvitation{invitee_id: invitee_id},
viewer_id
)
when invitee_id !== viewer_id do
Helpers.GraphQLErrors.unauthorized()
end
@doc """
Rejects a publication invitation.
"""
def resolve_reject_publication_invitation(%{invitation_id: invitation_id}, %{
context: %{viewer: viewer}
}) do
with %PublicationInvitation{} = invitation <- Publications.get_invitation(invitation_id),
true <- Publications.can_reject_invitation?(invitation, viewer),
invitation <- Publications.reject_invitation!(invitation) do
{:ok, %{invitation: invitation}}
else
nil -> Helpers.GraphQLErrors.invitation_not_found()
false -> Helpers.GraphQLErrors.unauthorized()
end
end
end
| 30.928 | 96 | 0.696586 |
1cca5e169354b138f69890aacbabcf87ccde5144 | 2,886 | ex | Elixir | clients/access_approval/lib/google_api/access_approval/v1/model/enrolled_service.ex | Contractbook/elixir-google-api | 342751041aaf8c2e7f76f9922cf24b9c5895802b | [
"Apache-2.0"
] | 1 | 2021-10-01T09:20:41.000Z | 2021-10-01T09:20:41.000Z | clients/access_approval/lib/google_api/access_approval/v1/model/enrolled_service.ex | Contractbook/elixir-google-api | 342751041aaf8c2e7f76f9922cf24b9c5895802b | [
"Apache-2.0"
] | null | null | null | clients/access_approval/lib/google_api/access_approval/v1/model/enrolled_service.ex | Contractbook/elixir-google-api | 342751041aaf8c2e7f76f9922cf24b9c5895802b | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.AccessApproval.V1.Model.EnrolledService do
@moduledoc """
Represents the enrollment of a cloud resource into a specific service.
## Attributes
* `cloudProduct` (*type:* `String.t`, *default:* `nil`) - The product for which Access Approval will be enrolled. Allowed values are listed below (case-sensitive): * all * GA * App Engine * BigQuery * Cloud Bigtable * Cloud Key Management Service * Compute Engine * Cloud Dataflow * Cloud DLP * Cloud EKM * Cloud HSM * Cloud Identity and Access Management * Cloud Logging * Cloud Pub/Sub * Cloud Spanner * Cloud SQL * Cloud Storage * Google Kubernetes Engine * Persistent Disk Note: These values are supported as input for legacy purposes, but will not be returned from the API. * all * ga-only * appengine.googleapis.com * bigquery.googleapis.com * bigtable.googleapis.com * container.googleapis.com * cloudkms.googleapis.com * cloudsql.googleapis.com * compute.googleapis.com * dataflow.googleapis.com * dlp.googleapis.com * iam.googleapis.com * logging.googleapis.com * pubsub.googleapis.com * spanner.googleapis.com * storage.googleapis.com Calls to UpdateAccessApprovalSettings using 'all' or any of the XXX.googleapis.com will be translated to the associated product name ('all', 'App Engine', etc.). Note: 'all' will enroll the resource in all products supported at both 'GA' and 'Preview' levels. More information about levels of support is available at https://cloud.google.com/access-approval/docs/supported-services
* `enrollmentLevel` (*type:* `String.t`, *default:* `nil`) - The enrollment level of the service.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:cloudProduct => String.t() | nil,
:enrollmentLevel => String.t() | nil
}
field(:cloudProduct)
field(:enrollmentLevel)
end
defimpl Poison.Decoder, for: GoogleApi.AccessApproval.V1.Model.EnrolledService do
def decode(value, options) do
GoogleApi.AccessApproval.V1.Model.EnrolledService.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.AccessApproval.V1.Model.EnrolledService do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 57.72 | 1,330 | 0.752252 |
1cca614e7d9d86dde86c03d5c7002c45b3e00908 | 1,652 | ex | Elixir | lib/flickrex/parsers/auth.ex | christopheradams/flickrex | 67e3a507e128c55969aeda3802f6dacd94bb03c5 | [
"MIT"
] | 13 | 2017-04-02T10:55:10.000Z | 2022-03-02T02:55:40.000Z | lib/flickrex/parsers/auth.ex | christopheradams/flickrex | 67e3a507e128c55969aeda3802f6dacd94bb03c5 | [
"MIT"
] | null | null | null | lib/flickrex/parsers/auth.ex | christopheradams/flickrex | 67e3a507e128c55969aeda3802f6dacd94bb03c5 | [
"MIT"
] | 1 | 2021-01-31T17:47:27.000Z | 2021-01-31T17:47:27.000Z | defmodule Flickrex.Parsers.Auth do
@moduledoc false
alias Flickrex.Response
@type response :: {:ok, Response.t()} | {:error, term}
@spec parse_request_token(response) :: response
def parse_request_token({:ok, %{status_code: 200, body: body} = resp}) do
oauth_token = URI.decode_query(body, %{})
token = oauth_token["oauth_token"]
secret = oauth_token["oauth_token_secret"]
callback_confirmed =
case oauth_token["oauth_callback_confirmed"] do
"true" -> true
"false" -> false
_ -> nil
end
parsed_body = %{
oauth_token: token,
oauth_token_secret: secret,
oauth_callback_confirmed: callback_confirmed
}
{:ok, %{resp | body: parsed_body}}
end
def parse_request_token(val) do
parse_token(val)
end
@spec parse_access_token(response) :: response
def parse_access_token({:ok, %{status_code: 200, body: body} = resp}) do
access_token = URI.decode_query(body, %{})
parsed_body = %{
fullname: access_token["fullname"],
oauth_token: access_token["oauth_token"],
oauth_token_secret: access_token["oauth_token_secret"],
user_nsid: access_token["user_nsid"],
username: access_token["username"]
}
{:ok, %{resp | body: parsed_body}}
end
def parse_access_token(val) do
parse_token(val)
end
defp parse_token({:ok, %{status_code: code, body: body} = resp}) when code in [400, 401] do
{:error, %{resp | body: URI.decode_query(body)}}
end
defp parse_token({:ok, %{status_code: status_code} = resp}) when status_code >= 400 do
{:error, resp}
end
defp parse_token(val), do: val
end
| 25.8125 | 93 | 0.659201 |
1ccaa81d83577af1882933738e8507a67067ef3f | 3,761 | ex | Elixir | clients/double_click_bid_manager/lib/google_api/double_click_bid_manager/v1/model/query_metadata.ex | jechol/elixir-google-api | 0290b683dfc6491ca2ef755a80bc329378738d03 | [
"Apache-2.0"
] | null | null | null | clients/double_click_bid_manager/lib/google_api/double_click_bid_manager/v1/model/query_metadata.ex | jechol/elixir-google-api | 0290b683dfc6491ca2ef755a80bc329378738d03 | [
"Apache-2.0"
] | null | null | null | clients/double_click_bid_manager/lib/google_api/double_click_bid_manager/v1/model/query_metadata.ex | jechol/elixir-google-api | 0290b683dfc6491ca2ef755a80bc329378738d03 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DoubleClickBidManager.V1.Model.QueryMetadata do
@moduledoc """
Query metadata.
## Attributes
* `dataRange` (*type:* `String.t`, *default:* `nil`) - Range of report data.
* `format` (*type:* `String.t`, *default:* `nil`) - Format of the generated report.
* `googleCloudStoragePathForLatestReport` (*type:* `String.t`, *default:* `nil`) - The path to the location in Google Cloud Storage where the latest report is stored.
* `googleDrivePathForLatestReport` (*type:* `String.t`, *default:* `nil`) - The path in Google Drive for the latest report.
* `latestReportRunTimeMs` (*type:* `String.t`, *default:* `nil`) - The time when the latest report started to run.
* `locale` (*type:* `String.t`, *default:* `nil`) - Locale of the generated reports. Valid values are cs CZECH de GERMAN en ENGLISH es SPANISH fr FRENCH it ITALIAN ja JAPANESE ko KOREAN pl POLISH pt-BR BRAZILIAN_PORTUGUESE ru RUSSIAN tr TURKISH uk UKRAINIAN zh-CN CHINA_CHINESE zh-TW TAIWAN_CHINESE An locale string not in the list above will generate reports in English.
* `reportCount` (*type:* `integer()`, *default:* `nil`) - Number of reports that have been generated for the query.
* `running` (*type:* `boolean()`, *default:* `nil`) - Whether the latest report is currently running.
* `sendNotification` (*type:* `boolean()`, *default:* `nil`) - Whether to send an email notification when a report is ready. Default to false.
* `shareEmailAddress` (*type:* `list(String.t)`, *default:* `nil`) - List of email addresses which are sent email notifications when the report is finished. Separate from sendNotification.
* `title` (*type:* `String.t`, *default:* `nil`) - Query title. It is used to name the reports generated from this query.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:dataRange => String.t() | nil,
:format => String.t() | nil,
:googleCloudStoragePathForLatestReport => String.t() | nil,
:googleDrivePathForLatestReport => String.t() | nil,
:latestReportRunTimeMs => String.t() | nil,
:locale => String.t() | nil,
:reportCount => integer() | nil,
:running => boolean() | nil,
:sendNotification => boolean() | nil,
:shareEmailAddress => list(String.t()) | nil,
:title => String.t() | nil
}
field(:dataRange)
field(:format)
field(:googleCloudStoragePathForLatestReport)
field(:googleDrivePathForLatestReport)
field(:latestReportRunTimeMs)
field(:locale)
field(:reportCount)
field(:running)
field(:sendNotification)
field(:shareEmailAddress, type: :list)
field(:title)
end
defimpl Poison.Decoder, for: GoogleApi.DoubleClickBidManager.V1.Model.QueryMetadata do
def decode(value, options) do
GoogleApi.DoubleClickBidManager.V1.Model.QueryMetadata.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.DoubleClickBidManager.V1.Model.QueryMetadata do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 48.844156 | 375 | 0.701143 |
1ccaa897ab472938457027a6a459fdcca5d04a8b | 1,248 | exs | Elixir | mix.exs | twix14/elastic | 1e2c7a3fe7b28bfa6cc574b10fd7bc6dca3dadfa | [
"MIT"
] | null | null | null | mix.exs | twix14/elastic | 1e2c7a3fe7b28bfa6cc574b10fd7bc6dca3dadfa | [
"MIT"
] | null | null | null | mix.exs | twix14/elastic | 1e2c7a3fe7b28bfa6cc574b10fd7bc6dca3dadfa | [
"MIT"
] | null | null | null | defmodule Elastic.Mixfile do
use Mix.Project
@version "3.5.2"
def project do
[
app: :elastic,
version: @version,
elixir: "~> 1.5",
build_embedded: Mix.env() == :prod,
start_permanent: Mix.env() == :prod,
elixirc_paths: elixirc_paths(Mix.env()),
package: package(),
# Docs
name: "Elastic",
docs: [
source_ref: "v#{@version}",
main: "Elastic",
canonical: "http://hexdocs.pm/elastic",
source_url: "https://github.com/radar/elastic"
],
deps: deps()
]
end
def application do
[applications: [:logger, :httpotion, :aws_auth, :jason]]
end
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
defp deps do
[
{:httpotion, "~> 3.1"},
{:jason, "~> 1.1.2"},
{:aws_auth, "~> 0.7.1"},
{:credo, "~> 1.0", only: [:dev, :test]},
{:ex_doc, ">= 0.0.0", only: :dev}
]
end
defp package do
[
name: :elastic,
description: "You Know, for (Elastic) Search",
files: ["lib", "README*", "mix.exs"],
maintainers: ["Ryan Bigg"],
licenses: ["MIT"],
links: %{"GitHub" => "https://github.com/radar/elastic"}
]
end
end
| 23.111111 | 62 | 0.527244 |
1ccae2cd9a997eddab057e09d22bbc15a24c0788 | 920 | ex | Elixir | lib/griffin/lib/http.ex | craigspaeth/griffin | 44a59e08b119b4725369e90dc1398b9ec51173b7 | [
"MIT"
] | null | null | null | lib/griffin/lib/http.ex | craigspaeth/griffin | 44a59e08b119b4725369e90dc1398b9ec51173b7 | [
"MIT"
] | 2 | 2017-12-11T06:40:30.000Z | 2018-03-06T06:04:05.000Z | lib/griffin/lib/http.ex | craigspaeth/griffin | 44a59e08b119b4725369e90dc1398b9ec51173b7 | [
"MIT"
] | null | null | null | defmodule Griffin.HTTP do
@moduledoc """
Elixir(script) wrapper for a universal HTTP API
"""
import ExScript.Universal
def gql!(url, query) do
res = await(post!(url, body: query, headers: ["Content-Type": "application/graphql"]))
res.data
end
def post!(url, options) do
json =
if env?(:server) do
res =
HTTPotion.post(
url,
body: options[:body],
headers: options[:headers]
)
Griffin.JSON.parse!(res.body)
else
opts = to_map(options)
headers = to_map(opts["headers"])
res = JS.embed("fetch(url, { body: opts.body, headers: headers, method: 'POST' })")
res.then(fn r -> r.json(nil) end)
end
await(json)
end
defp to_map(keywords) do
Enum.reduce(keywords, %{}, fn {k, v}, acc ->
k = Atom.to_string(k)
Map.merge(acc, %{k => v})
end)
end
end
| 22.439024 | 91 | 0.554348 |
1ccaf67a42e3f4f946657cb4dc2c8d6235f00955 | 1,927 | ex | Elixir | clients/health_care/lib/google_api/health_care/v1beta1/model/list_dicom_stores_response.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/health_care/lib/google_api/health_care/v1beta1/model/list_dicom_stores_response.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/health_care/lib/google_api/health_care/v1beta1/model/list_dicom_stores_response.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.HealthCare.V1beta1.Model.ListDicomStoresResponse do
@moduledoc """
Lists the DICOM stores in the given dataset.
## Attributes
* `dicomStores` (*type:* `list(GoogleApi.HealthCare.V1beta1.Model.DicomStore.t)`, *default:* `nil`) - The returned DICOM stores. Won't be more DICOM stores than the value of page_size in the request.
* `nextPageToken` (*type:* `String.t`, *default:* `nil`) - Token to retrieve the next page of results or empty if there are no more results in the list.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:dicomStores => list(GoogleApi.HealthCare.V1beta1.Model.DicomStore.t()) | nil,
:nextPageToken => String.t() | nil
}
field(:dicomStores, as: GoogleApi.HealthCare.V1beta1.Model.DicomStore, type: :list)
field(:nextPageToken)
end
defimpl Poison.Decoder, for: GoogleApi.HealthCare.V1beta1.Model.ListDicomStoresResponse do
def decode(value, options) do
GoogleApi.HealthCare.V1beta1.Model.ListDicomStoresResponse.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.HealthCare.V1beta1.Model.ListDicomStoresResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 38.54 | 203 | 0.744681 |
1ccb1d9bbc8e7b575af3bcb8bcad7ea7c7d1471f | 411 | ex | Elixir | lib/events/adhoc.ex | feihong/event-announcer | b73d6f0c286b1101accb0898b16e658b68f9bb01 | [
"Apache-2.0"
] | null | null | null | lib/events/adhoc.ex | feihong/event-announcer | b73d6f0c286b1101accb0898b16e658b68f9bb01 | [
"Apache-2.0"
] | null | null | null | lib/events/adhoc.ex | feihong/event-announcer | b73d6f0c286b1101accb0898b16e658b68f9bb01 | [
"Apache-2.0"
] | null | null | null | defmodule Events.Adhoc do
@events_file "adhoc_events.json"
@keywords Application.fetch_env!(:events, :keywords)
@doc """
Fetch adhoc events from adhoc_events.json file.
"""
def fetch_all() do
if File.exists?(@events_file) do
@events_file
|> Events.Util.from_json_file()
|> Enum.map(fn evt -> Events.Util.match_keywords(evt, @keywords) end)
else
[]
end
end
end
| 22.833333 | 75 | 0.659367 |
1ccb3a50ba572d26c1cae31fde360662e0dc0277 | 1,233 | ex | Elixir | apps/admin_api/lib/admin_api/v1/channels/user_channel.ex | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 322 | 2018-02-28T07:38:44.000Z | 2020-05-27T23:09:55.000Z | apps/admin_api/lib/admin_api/v1/channels/user_channel.ex | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 643 | 2018-02-28T12:05:20.000Z | 2020-05-22T08:34:38.000Z | apps/admin_api/lib/admin_api/v1/channels/user_channel.ex | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 63 | 2018-02-28T10:57:06.000Z | 2020-05-27T23:10:38.000Z | # Copyright 2018-2019 OmiseGO Pte Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# credo:disable-for-this-file
defmodule AdminAPI.V1.UserChannel do
@moduledoc """
Represents the user channel.
"""
use Phoenix.Channel, async: false
alias EWalletDB.User
alias EWallet.EndUserPolicy
def join(
"user:" <> user_id,
_params,
%{
assigns: %{auth: auth}
} = socket
) do
with %User{} = user <- User.get(user_id) || User.get_by_provider_user_id(user_id),
{:ok, _} <- EndUserPolicy.authorize(:listen, auth, user) do
{:ok, socket}
else
_ ->
{:error, :forbidden_channel}
end
end
def join(_, _, _), do: {:error, :invalid_parameter}
end
| 29.357143 | 86 | 0.673966 |
1ccb3ed39927062388c14566f0900ceda144c0ee | 388 | ex | Elixir | web/models/location.ex | colbydehart/MartaWhistle | 852d1aaecb1fe5705fdcaab30283870613f6a66f | [
"MIT"
] | null | null | null | web/models/location.ex | colbydehart/MartaWhistle | 852d1aaecb1fe5705fdcaab30283870613f6a66f | [
"MIT"
] | null | null | null | web/models/location.ex | colbydehart/MartaWhistle | 852d1aaecb1fe5705fdcaab30283870613f6a66f | [
"MIT"
] | null | null | null | defmodule TrainWhistle.Location do
use TrainWhistle.Web, :model
schema "locations" do
field :name, :string
field :lat, :float
field :long, :float
timestamps()
end
@doc """
Builds a changeset based on the `struct` and `params`.
"""
def changeset(struct, params \\ %{}) do
struct
|> cast(params, [:name])
|> validate_required([:name])
end
end
| 18.47619 | 56 | 0.626289 |
1ccb5df116c5f9a50f237a3b7caa23b362743e2a | 3,689 | ex | Elixir | lib/livebook_web/endpoint.ex | aleDsz/livebook | 3ad817ac69b8459b684ff8d00c879ae7787b6dcc | [
"Apache-2.0"
] | null | null | null | lib/livebook_web/endpoint.ex | aleDsz/livebook | 3ad817ac69b8459b684ff8d00c879ae7787b6dcc | [
"Apache-2.0"
] | null | null | null | lib/livebook_web/endpoint.ex | aleDsz/livebook | 3ad817ac69b8459b684ff8d00c879ae7787b6dcc | [
"Apache-2.0"
] | null | null | null | defmodule LivebookWeb.Endpoint do
use Phoenix.Endpoint, otp_app: :livebook
# The session will be stored in the cookie and signed,
# this means its contents can be read but not tampered with.
# Set :encryption_salt if you would also like to encrypt it.
@session_options [
store: :cookie,
key: "_livebook_key",
signing_salt: "deadbook",
same_site: "Lax"
]
# Don't check the origin as we don't know how the web app is gonna be accessed.
# It runs locally, but may be exposed via IP or domain name. The WebSocket
# connection is already protected from CSWSH by using CSRF token.
@websocket_options [
check_origin: false,
connect_info: [:user_agent, :uri, session: @session_options]
]
socket "/live", Phoenix.LiveView.Socket, websocket: @websocket_options
socket "/socket", LivebookWeb.Socket, websocket: @websocket_options
# We use Escript for distributing Livebook, so we don't have access to the static
# files at runtime in the prod environment. To overcome this we load contents of
# those files at compilation time, so that they become a part of the executable
# and can be served from memory.
defmodule AssetsMemoryProvider do
use LivebookWeb.MemoryProvider,
from: Path.expand("../../static", __DIR__),
gzip: true
end
defmodule AssetsFileSystemProvider do
use LivebookWeb.FileSystemProvider,
from: "tmp/static_dev"
end
# Serve static files at "/"
if code_reloading? do
# In development we use assets from tmp/static_dev (rebuilt dynamically on every change).
# Note that this directory doesn't contain predefined files (e.g. images), so we also
# use `AssetsMemoryProvider` to serve those from static/.
plug LivebookWeb.StaticPlug,
at: "/",
file_provider: AssetsFileSystemProvider,
gzip: false
end
plug LivebookWeb.StaticPlug,
at: "/",
file_provider: AssetsMemoryProvider,
gzip: true
plug :force_ssl
# Code reloading can be explicitly enabled under the
# :code_reloader configuration of your endpoint.
if code_reloading? do
socket "/phoenix/live_reload/socket", Phoenix.LiveReloader.Socket
plug Phoenix.LiveReloader
plug Phoenix.CodeReloader
end
plug Phoenix.LiveDashboard.RequestLogger,
param_key: "request_logger",
cookie_key: "request_logger"
plug Plug.RequestId
plug Plug.Telemetry, event_prefix: [:phoenix, :endpoint]
plug Plug.Parsers,
parsers: [:urlencoded, :multipart, :json],
pass: ["*/*"],
json_decoder: Phoenix.json_library()
plug Plug.MethodOverride
plug Plug.Head
plug Plug.Session, @session_options
# Run custom plugs from the app configuration
plug LivebookWeb.ConfiguredPlug
plug LivebookWeb.Router
@plug_ssl Plug.SSL.init(host: {Application, :get_env, [:livebook, :force_ssl_host, nil]})
def force_ssl(conn, _opts) do
force_ssl_host = Application.get_env(:livebook, :force_ssl_host, nil)
if force_ssl_host do
Plug.SSL.call(conn, @plug_ssl)
else
conn
end
end
def access_struct_url() do
base =
case struct_url() do
%URI{scheme: "https", port: 0} = uri ->
%{uri | port: Livebook.Utils.get_port(__MODULE__.HTTPS, 433)}
%URI{scheme: "http", port: 0} = uri ->
%{uri | port: Livebook.Utils.get_port(__MODULE__.HTTP, 80)}
%URI{} = uri ->
uri
end
base = update_in(base.path, &(&1 || "/"))
if Livebook.Config.auth_mode() == :token do
token = Application.fetch_env!(:livebook, :token)
%{base | query: "token=" <> token}
else
base
end
end
def access_url do
URI.to_string(access_struct_url())
end
end
| 29.277778 | 93 | 0.692057 |
1ccb67cd08312d7fb7757856db52bfb49180d508 | 404 | ex | Elixir | lib/matchmaker/matchmaker_supervisor.ex | spawnphile/garuda | f394e38fbbbcf89380cac4b331f21e3be0fd8518 | [
"MIT"
] | 4 | 2021-02-01T12:20:24.000Z | 2021-07-29T08:37:50.000Z | lib/matchmaker/matchmaker_supervisor.ex | spawnphile/garuda | f394e38fbbbcf89380cac4b331f21e3be0fd8518 | [
"MIT"
] | 2 | 2021-06-10T11:22:48.000Z | 2021-07-30T06:09:10.000Z | lib/matchmaker/matchmaker_supervisor.ex | spawnphile/garuda | f394e38fbbbcf89380cac4b331f21e3be0fd8518 | [
"MIT"
] | null | null | null | defmodule Garuda.Matchmaker.MatchmakerSupervisor do
@moduledoc false
# Supervises the core matchmaking process
use Supervisor
alias Garuda.MatchMaker.Matcher
def start_link(opts \\ []) do
Supervisor.start_link(__MODULE__, opts, name: __MODULE__)
end
@impl true
def init(_opts) do
children = [
Matcher
]
Supervisor.init(children, strategy: :one_for_one)
end
end
| 20.2 | 61 | 0.720297 |
1ccbe1471e079c73d17596e3bbb167c9c4d701b9 | 1,487 | ex | Elixir | lib/pluggy/models/group.ex | TE4-David-Sundqvist/Pro1-Docsify | b6857195508c1d6135f9084cc383e853468bf012 | [
"MIT"
] | null | null | null | lib/pluggy/models/group.ex | TE4-David-Sundqvist/Pro1-Docsify | b6857195508c1d6135f9084cc383e853468bf012 | [
"MIT"
] | null | null | null | lib/pluggy/models/group.ex | TE4-David-Sundqvist/Pro1-Docsify | b6857195508c1d6135f9084cc383e853468bf012 | [
"MIT"
] | null | null | null | defmodule Pluggy.Group do
defstruct(id: nil, name: "")
alias Pluggy.Group
alias Pluggy.Student
def get(id) do
Postgrex.query!(DB, "SELECT id, name FROM groups WHERE id = $1 LIMIT 1", [id],
pool: DBConnection.ConnectionPool
).rows
|> to_struct
end
def get_id(name) do
Postgrex.query!(DB, "SELECT id FROM groups WHERE name = $1", [name], pool: DBConnection.ConnectionPool)
end
def get_all() do
Postgrex.query!(DB, "SELECT id FROM groups", [], pool: DBConnection.ConnectionPool).rows
|> get_all()
end
defp get_all([]), do: []
defp get_all([[head]|tail]), do: [Group.get(head)|get_all(tail)]
def get_students(group_id) do
Postgrex.query!(DB, "SELECT id FROM students WHERE group_id = $1", [group_id], pool: DBConnection.ConnectionPool).rows
|> _get_students()
end
defp _get_students([]), do: []
defp _get_students([[head]|tail]), do: [Student.get(head)|_get_students(tail)]
def create(name, school_id) do
Postgrex.query!(DB, "INSERT INTO groups(name, school_id) VALUES($1, $2)", [name, school_id], pool: DBConnection.ConnectionPool)
end
def delete(id) do
Postgrex.query!(DB, "SELECT id FROM students WHERE group_id = $1", [id], pool: DBConnection.ConnectionPool).rows
|> Enum.each(fn [id] -> Student.delete(id) end)
Postgrex.query!(DB, "DELETE FROM groups WHERE id = $1", [id], pool: DBConnection.ConnectionPool)
end
def to_struct([[id, name]]) do
%Group{id: id, name: name}
end
end
| 32.326087 | 131 | 0.672495 |
1ccbe2a04d4ec7b48c945634abe011eec5565d80 | 760 | ex | Elixir | test/support/live_case.ex | paulgoetze/adoptoposs | 1a143917ac5a192f12054fff4410a1ee18935353 | [
"MIT"
] | 120 | 2020-03-30T13:58:40.000Z | 2022-03-30T10:17:50.000Z | test/support/live_case.ex | paulgoetze/adoptoposs | 1a143917ac5a192f12054fff4410a1ee18935353 | [
"MIT"
] | 34 | 2020-04-01T23:11:59.000Z | 2022-03-13T08:14:37.000Z | test/support/live_case.ex | paulgoetze/adoptoposs | 1a143917ac5a192f12054fff4410a1ee18935353 | [
"MIT"
] | 16 | 2020-04-24T17:14:23.000Z | 2022-03-27T22:32:48.000Z | defmodule AdoptopossWeb.LiveCase do
@moduledoc """
This module defines the test case to be used by
tests that require setting up a live view.
Such tests rely on `AdoptopossWeb.ConnCase`,
`Phoenix.LiveViewTest` and also import other
functionality to make it easier to build common
data structures and query the data layer.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with connections
use AdoptopossWeb.ConnCase
import Phoenix.LiveViewTest
end
end
end
| 28.148148 | 56 | 0.744737 |
1ccbe6c26dd3e92f2cc761ffd44b7b7bfa25d60a | 1,511 | ex | Elixir | apps/zones/lib/zone.ex | noisecapella/dotcom | d5ef869412102d2230fac3dcc216f01a29726227 | [
"MIT"
] | 42 | 2019-05-29T16:05:30.000Z | 2021-08-09T16:03:37.000Z | apps/zones/lib/zone.ex | noisecapella/dotcom | d5ef869412102d2230fac3dcc216f01a29726227 | [
"MIT"
] | 872 | 2019-05-29T17:55:50.000Z | 2022-03-30T09:28:43.000Z | apps/zones/lib/zone.ex | noisecapella/dotcom | d5ef869412102d2230fac3dcc216f01a29726227 | [
"MIT"
] | 12 | 2019-07-01T18:33:21.000Z | 2022-03-10T02:13:57.000Z | defmodule Zones.Zone do
@moduledoc """
Represents a commuter rail zone
"""
@type t :: String.t()
@doc """
Returns whether or not this is a "combo" zone.
A combo zone uses one fare between the terminus stop, and another between all other stops.
iex> Zones.Zone.combo_zone?("1A-1")
true
iex> Zones.Zone.combo_zone?("1A-2")
true
iex> Zones.Zone.combo_zone?("1A")
false
iex> Zones.Zone.combo_zone?("2")
false
"""
@spec combo_zone?(t()) :: boolean()
def combo_zone?(zone), do: String.contains?(zone, "-")
@doc """
Returns the zone designation between the stop and the terminus.
Intended to help with combo zones, but safe if given a non-combo zone.
iex> Zones.Zone.terminus_zone("1A-1")
"1A"
iex> Zones.Zone.terminus_zone("1A-2")
"1A"
iex> Zones.Zone.terminus_zone("1A")
"1A"
"""
@spec terminus_zone(t()) :: t()
def terminus_zone(zone) do
if combo_zone?(zone) do
zone
|> String.split("-")
|> List.first()
else
zone
end
end
@doc """
Returns the zone designation between the stop and other non-terminus stops.
Intended to help with combo zones, but safe if given a non-combo zone.
iex> Zones.Zone.general_zone("1A-1")
"1"
iex> Zones.Zone.general_zone("1A-2")
"2"
iex> Zones.Zone.general_zone("1A")
"1A"
"""
@spec general_zone(t()) :: t()
def general_zone(zone) do
if combo_zone?(zone) do
zone
|> String.split("-")
|> List.last()
else
zone
end
end
end
| 22.220588 | 92 | 0.630046 |
1ccbea11e2145d2837e241f93875f6ce8291602f | 1,320 | ex | Elixir | lib/brando_admin/live/pages/page_list_live.ex | brandocms/brando | 4198e0c0920031bd909969055064e4e2b7230d21 | [
"MIT"
] | 4 | 2020-10-30T08:40:38.000Z | 2022-01-07T22:21:37.000Z | lib/brando_admin/live/pages/page_list_live.ex | brandocms/brando | 4198e0c0920031bd909969055064e4e2b7230d21 | [
"MIT"
] | 1,162 | 2020-07-05T11:20:15.000Z | 2022-03-31T06:01:49.000Z | lib/brando_admin/live/pages/page_list_live.ex | brandocms/brando | 4198e0c0920031bd909969055064e4e2b7230d21 | [
"MIT"
] | null | null | null | defmodule BrandoAdmin.Pages.PageListLive do
use BrandoAdmin.LiveView.Listing, schema: Brando.Pages.Page
alias BrandoAdmin.Components.Content
alias Surface.Components.LivePatch
import Brando.Gettext
def render(assigns) do
~F"""
<Content.Header
title={gettext("Pages & sections")}
subtitle={gettext("Overview")}>
<LivePatch to="/admin/pages/create" class="primary">
Create new
</LivePatch>
</Content.Header>
<Content.List
id={"content_listing_#{@schema}_default"}
blueprint={@blueprint}
current_user={@current_user}
uri={@uri}
params={@params}
listing={:default} />
"""
end
def handle_event("create_subpage", %{"id" => parent_id, "language" => language}, socket) do
{:noreply,
push_redirect(socket,
to:
Brando.routes().admin_live_path(socket, BrandoAdmin.Pages.PageCreateLive,
parent_id: parent_id,
language: language
)
)}
end
def handle_event("create_fragment", %{"id" => page_id, "language" => language}, socket) do
{:noreply,
push_redirect(socket,
to:
Brando.routes().admin_live_path(socket, BrandoAdmin.Pages.PageFragmentCreateLive,
page_id: page_id,
language: language
)
)}
end
end
| 26.4 | 93 | 0.630303 |
1ccbeeb02e2169703510222895da2de652fcfd26 | 7,284 | ex | Elixir | lib/ex_oauth2_provider/oauth2/authorization/strategy/code.ex | gozego/ex_oauth2_provider | d3a7658d28233dda2dfdef7ed397b5b440a2f737 | [
"Unlicense",
"MIT"
] | 2 | 2021-04-25T20:59:53.000Z | 2021-07-13T22:49:20.000Z | lib/ex_oauth2_provider/oauth2/authorization/strategy/code.ex | gozego/ex_oauth2_provider | d3a7658d28233dda2dfdef7ed397b5b440a2f737 | [
"Unlicense",
"MIT"
] | null | null | null | lib/ex_oauth2_provider/oauth2/authorization/strategy/code.ex | gozego/ex_oauth2_provider | d3a7658d28233dda2dfdef7ed397b5b440a2f737 | [
"Unlicense",
"MIT"
] | null | null | null | defmodule ExOauth2Provider.Authorization.Code do
@moduledoc """
Methods for authorization code flow.
The flow consists of three method calls:
1. `preauthorize(resource_owner, request)`
This validates the request. If a resource owner already have been
authenticated previously it'll respond with a redirect tuple.
2. `authorize(resource_owner, request)`
This confirms a resource owner authorization, and will generate an access
token.
3. `deny(resource_owner, request)`
This rejects a resource owner authorization.
---
In a controller it could look like this:
```elixir
alias ExOauth2Provider.Authorization
def new(conn, params) do
case Authorization.preauthorize(current_resource_owner(conn), params) do
{:ok, client, scopes} ->
render(conn, "new.html", params: params, client: client, scopes: scopes)
{:native_redirect, %{code: code}} ->
redirect(conn, to: oauth_authorization_path(conn, :show, code))
{:redirect, redirect_uri} ->
redirect(conn, external: redirect_uri)
{:error, error, status} ->
conn
|> put_status(status)
|> render("error.html", error: error)
end
end
def create(conn, params) do
conn
|> current_resource_owner
|> Authorization.authorize(params)
|> redirect_or_render(conn)
end
def delete(conn, params) do
conn
|> current_resource_owner
|> Authorization.deny(params)
|> redirect_or_render(conn)
end
```
"""
alias ExOauth2Provider.OauthAccessTokens
alias ExOauth2Provider.OauthAccessGrants
alias ExOauth2Provider.RedirectURI
alias ExOauth2Provider.Authorization.Utils.Response
alias ExOauth2Provider.Utils.Error
alias ExOauth2Provider.Authorization.Utils
alias ExOauth2Provider.Authorization.Utils.Response
alias ExOauth2Provider.Scopes
@doc """
Validates an authorization code flow request.
Will check if there's already an existing access token with same scope and client
for the resource owner.
## Example
resource_owner
|> ExOauth2Provider.Authorization.preauthorize(%{
"client_id" => "Jf5rM8hQBc",
"response_type" => "code"
})
## Response
{:ok, client, scopes} # Show request page with client and scopes
{:error, %{error: error, error_description: _}, http_status} # Show error page with error and http status
{:redirect, redirect_uri} # Redirect
{:native_redirect, %{code: code}} # Redirect to :show page
"""
def preauthorize(resource_owner, %{} = request) do
resource_owner
|> Utils.prehandle_request(request)
|> validate_request
|> check_previous_authorization
|> reissue_grant
|> Response.preauthorize_response
end
defp check_previous_authorization(%{error: _} = params), do: params
defp check_previous_authorization(%{resource_owner: resource_owner, client: client, request: %{"scope" => scopes}} = params) do
case OauthAccessTokens.get_matching_token_for(resource_owner, client, scopes) do
nil -> params
token -> Map.merge(params, %{access_token: token})
end
end
defp reissue_grant(%{error: _} = params), do: params
defp reissue_grant(%{access_token: _} = params) do
params
|> issue_grant
end
defp reissue_grant(params), do: params
@doc """
Authorizes an authorization code flow request.
This is used when a resource owner has authorized access. If successful,
this will generate an access token grant.
## Example
resource_owner
|> ExOauth2Provider.Authorization.authorize(%{
"client_id" => "Jf5rM8hQBc",
"response_type" => "code",
"scope" => "read write", # Optional
"state" => "46012", # Optional
"redirect_uri" => "https://example.com/" # Optional
})
## Response
{:ok, code} # A grant was generated
{:error, %{error: error, error_description: _}, http_status} # Error occurred
{:redirect, redirect_uri} # Redirect
{:native_redirect, %{code: code}} # Redirect to :show page
"""
def authorize(resource_owner, %{} = request) do
resource_owner
|> Utils.prehandle_request(request)
|> validate_request
|> issue_grant
|> Response.authorize_response
end
defp issue_grant(%{error: _} = params), do: params
defp issue_grant(%{resource_owner: resource_owner, client: application, request: request} = params) do
grant_params = request
|> Map.take(["redirect_uri", "scope"])
|> Map.new(fn {k, v} ->
case k do
"scope" -> {:scopes, v}
_ -> {String.to_atom(k), v}
end
end)
|> Map.merge(%{expires_in: ExOauth2Provider.Config.authorization_code_expires_in})
case OauthAccessGrants.create_grant(resource_owner, application, grant_params) do
{:ok, grant} -> Map.merge(params, %{grant: grant})
{:error, error} -> Error.add_error(params, error)
end
end
@doc """
Rejects an authorization code flow request.
This is used when a resource owner has rejected access.
## Example
resource_owner
|> ExOauth2Provider.Authorization.deny(%{
"client_id" => "Jf5rM8hQBc",
"response_type" => "code"
})
## Response type
{:error, %{error: error, error_description: _}, http_status} # Error occurred
{:redirect, redirect_uri} # Redirect
"""
def deny(resource_owner, %{} = request) do
resource_owner
|> Utils.prehandle_request(request)
|> validate_request
|> Error.add_error(Error.access_denied())
|> Response.deny_response
end
defp validate_request(%{error: _} = params), do: params
defp validate_request(%{request: _, client: _} = params) do
params
|> validate_resource_owner
|> validate_redirect_uri
|> validate_scopes
end
defp validate_resource_owner(%{error: _} = params), do: params
defp validate_resource_owner(%{resource_owner: resource_owner} = params) do
case resource_owner do
%{id: _} -> params
_ -> Error.add_error(params, Error.invalid_request())
end
end
defp validate_scopes(%{error: _} = params), do: params
defp validate_scopes(%{request: %{"scope" => scopes}, client: client} = params) do
scopes = scopes |> Scopes.to_list
server_scopes = client.scopes |> Scopes.to_list |> Scopes.default_to_server_scopes
case Scopes.all?(server_scopes, scopes) do
true -> params
false -> Error.add_error(params, Error.invalid_scopes())
end
end
defp validate_redirect_uri(%{error: _} = params), do: params
defp validate_redirect_uri(%{request: %{"redirect_uri" => redirect_uri}, client: client} = params) do
cond do
RedirectURI.native_redirect_uri?(redirect_uri) -> params
RedirectURI.valid_for_authorization?(redirect_uri, client.redirect_uri) -> params
true -> Error.add_error(params, Error.invalid_redirect_uri())
end
end
defp validate_redirect_uri(params), do: Error.add_error(params, Error.invalid_request())
end
| 33.56682 | 129 | 0.653213 |
1ccc14f11c235150627ffa1babc4247d7f74ba11 | 1,058 | ex | Elixir | lib/autocomplete.ex | ventuno/elixir-fundamentals | eda1388f9815f8f9238b236edea9ca55447e3d27 | [
"BSD-3-Clause"
] | 14 | 2018-04-18T12:58:54.000Z | 2020-09-14T16:40:07.000Z | lib/autocomplete.ex | ventuno/elixir-fundamentals | eda1388f9815f8f9238b236edea9ca55447e3d27 | [
"BSD-3-Clause"
] | 6 | 2018-02-27T18:25:57.000Z | 2021-10-19T02:40:08.000Z | lib/autocomplete.ex | ventuno/elixir-fundamentals | eda1388f9815f8f9238b236edea9ca55447e3d27 | [
"BSD-3-Clause"
] | 5 | 2018-02-28T16:10:53.000Z | 2019-04-18T11:18:05.000Z | defmodule Autocomplete do
require Logger
def get_completions(hint) do
# Switch this to use get_completions_good
{time, result} = measure(fn -> get_completions_bad(hint) end)
Logger.info "Completed search for \"#{hint}\" in #{time}s"
result
end
'''
This is the enum (non-stream) version of the autocomplete function.
'''
defp get_completions_bad(hint) when is_bitstring(hint) and byte_size(hint) > 2 do
lower_hint = String.downcase(hint)
"words.txt"
|> File.read!
|> String.split("\n")
|> Enum.filter(fn w -> String.starts_with?(w, lower_hint) end)
end
'''
Implement something similar here, using streams
'''
defp get_completions_good(hint) when is_bitstring(hint) and byte_size(hint) > 2 do
lower_hint = String.downcase(hint)
# PUT EXERCISE 10 SOLUTION HERE
end
# a function used to benchmark the autocomplete
defp measure(function) do
parts = function
|> :timer.tc
time = elem(parts, 0)
|> Kernel./(1_000_000)
{time, elem(parts, 1)}
end
end | 25.804878 | 84 | 0.665406 |
1ccc22d655f036634a58b799ae5194829b4eb798 | 1,257 | ex | Elixir | lib/mipha/topics/node.ex | ZPVIP/mipha | a7df054f72eec7de88b60d94c501488375bdff6a | [
"MIT"
] | 156 | 2018-06-01T19:52:32.000Z | 2022-02-03T10:58:10.000Z | lib/mipha/topics/node.ex | ZPVIP/mipha | a7df054f72eec7de88b60d94c501488375bdff6a | [
"MIT"
] | 139 | 2018-07-10T01:57:23.000Z | 2021-08-02T21:29:24.000Z | lib/mipha/topics/node.ex | ZPVIP/mipha | a7df054f72eec7de88b60d94c501488375bdff6a | [
"MIT"
] | 29 | 2018-07-17T08:43:45.000Z | 2021-12-14T13:45:30.000Z | defmodule Mipha.Topics.Node do
@moduledoc false
use Ecto.Schema
import Ecto.{Changeset, Query}
alias Mipha.Repo
alias Mipha.Topics.{Topic, Node}
@type t :: %Node{}
schema "nodes" do
field :name, :string
field :position, :integer
field :summary, :string
belongs_to :parent, Node, foreign_key: :parent_id
has_many :topics, Topic
has_many :children, Node, foreign_key: :parent_id
timestamps()
end
@doc """
Returns the children node.
"""
@spec is_child(Ecto.Queryable.t()) :: Ecto.Query.t()
def is_child(query \\ __MODULE__), do: from(q in query, where: not is_nil(q.parent_id))
@doc """
Returns the parent of node.
"""
@spec is_parent(Ecto.Queryable.t()) :: Ecto.Query.t()
def is_parent(query \\ __MODULE__), do: from(q in query, where: is_nil(q.parent_id))
@doc """
Preloads the children of a node.
"""
@spec preload_children(t()) :: t()
def preload_children(node), do: Repo.preload(node, :children)
@doc false
def changeset(node, attrs) do
permitted_attrs = ~w(
name
summary
position
parent_id
)a
required_attrs = ~w(
name
)a
node
|> cast(attrs, permitted_attrs)
|> validate_required(required_attrs)
end
end
| 20.95 | 89 | 0.645982 |
1ccc2cd5e4d66bbdf7a0391cf4903922f94add09 | 670 | exs | Elixir | lib/elixir/test/elixir/bitwise_test.exs | doughsay/elixir | 7356a47047d0b54517bd6886603f09b1121dde2b | [
"Apache-2.0"
] | 19,291 | 2015-01-01T02:42:49.000Z | 2022-03-31T21:01:40.000Z | lib/elixir/test/elixir/bitwise_test.exs | doughsay/elixir | 7356a47047d0b54517bd6886603f09b1121dde2b | [
"Apache-2.0"
] | 8,082 | 2015-01-01T04:16:23.000Z | 2022-03-31T22:08:02.000Z | lib/elixir/test/elixir/bitwise_test.exs | doughsay/elixir | 7356a47047d0b54517bd6886603f09b1121dde2b | [
"Apache-2.0"
] | 3,472 | 2015-01-03T04:11:56.000Z | 2022-03-29T02:07:30.000Z | Code.require_file("test_helper.exs", __DIR__)
defmodule BitwiseTest do
use ExUnit.Case, async: true
import Bitwise
doctest Bitwise
test "bnot/1" do
assert bnot(1) == -2
end
test "band/2" do
assert band(1, 1) == 1
end
test "bor/2" do
assert bor(0, 1) == 1
end
test "bxor/2" do
assert bxor(1, 1) == 0
end
test "bsl/2" do
assert bsl(1, 1) == 2
end
test "bsr/2" do
assert bsr(1, 1) == 0
end
test "band (&&&)" do
assert (1 &&& 1) == 1
end
test "bor (|||)" do
assert (0 ||| 1) == 1
end
test "bsl (<<<)" do
assert 1 <<< 1 == 2
end
test "bsr (>>>)" do
assert 1 >>> 1 == 0
end
end
| 13.673469 | 45 | 0.523881 |
1ccc404af88939b7250e9a5db06172f0d190de75 | 566 | exs | Elixir | test/views/error_view_test.exs | zcdunn/elixirstatus-web | 9df758dce01c676403effdeb3231db0529285e6c | [
"MIT"
] | 299 | 2015-06-24T09:14:27.000Z | 2022-03-03T13:31:59.000Z | test/views/error_view_test.exs | zcdunn/elixirstatus-web | 9df758dce01c676403effdeb3231db0529285e6c | [
"MIT"
] | 63 | 2015-07-04T19:42:12.000Z | 2021-12-10T14:27:28.000Z | test/views/error_view_test.exs | zcdunn/elixirstatus-web | 9df758dce01c676403effdeb3231db0529285e6c | [
"MIT"
] | 49 | 2015-07-06T13:42:43.000Z | 2021-12-22T21:38:04.000Z | defmodule ElixirStatus.ErrorViewTest do
use ElixirStatus.ConnCase, async: true
# Bring render/3 and render_to_string/3 for testing custom views
import Phoenix.View
test "renders 404.html" do
assert render_to_string(ElixirStatus.ErrorView, "404.html", []) == "404 - Page not found"
end
test "render 500.html" do
assert render_to_string(ElixirStatus.ErrorView, "500.html", []) == "Server internal error"
end
test "render any other" do
assert render_to_string(ElixirStatus.ErrorView, "505.html", []) == "Server internal error"
end
end
| 29.789474 | 94 | 0.727915 |
1ccc572aead4cb3e6d8f1e2b11022d56311a148d | 766 | exs | Elixir | apps/day1/mix.exs | DFilipeS/advent-of-code-2020 | 89acbc1dcc2352e91463b0faebda868417d7e5d7 | [
"Unlicense"
] | null | null | null | apps/day1/mix.exs | DFilipeS/advent-of-code-2020 | 89acbc1dcc2352e91463b0faebda868417d7e5d7 | [
"Unlicense"
] | null | null | null | apps/day1/mix.exs | DFilipeS/advent-of-code-2020 | 89acbc1dcc2352e91463b0faebda868417d7e5d7 | [
"Unlicense"
] | null | null | null | defmodule Day1.MixProject do
use Mix.Project
def project do
[
app: :day1,
version: "0.1.0",
build_path: "../../_build",
config_path: "../../config/config.exs",
deps_path: "../../deps",
lockfile: "../../mix.lock",
elixir: "~> 1.11",
start_permanent: Mix.env() == :prod,
deps: deps()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
# {:dep_from_hexpm, "~> 0.3.0"},
# {:dep_from_git, git: "https://github.com/elixir-lang/my_dep.git", tag: "0.1.0"},
# {:sibling_app_in_umbrella, in_umbrella: true}
]
end
end
| 22.529412 | 88 | 0.563969 |
1cccca7658cdddc195054ac33dea30c0df9bb9e2 | 1,125 | exs | Elixir | config/config.exs | mpahrens/Nehe_Elixir | ba40e0390b2d524643ec29b7de046cf13b325daa | [
"MIT"
] | null | null | null | config/config.exs | mpahrens/Nehe_Elixir | ba40e0390b2d524643ec29b7de046cf13b325daa | [
"MIT"
] | null | null | null | config/config.exs | mpahrens/Nehe_Elixir | ba40e0390b2d524643ec29b7de046cf13b325daa | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# 3rd-party users, it should be done in your "mix.exs" file.
# You can configure for your application as:
#
# config :nehe_elixir, key: :value
#
# And access this configuration in your application as:
#
# Application.get_env(:nehe_elixir, :key)
#
# Or configure a 3rd-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env}.exs"
| 36.290323 | 73 | 0.752889 |
1cccebbe2d618035d59093d0561e1943fe33d234 | 2,256 | exs | Elixir | test/membrane_file/sink_multi_test.exs | nathanjohnson320/membrane_file_plugin | 1011a0a20b79962a84fc6129f96dd598b58c4abb | [
"Apache-2.0"
] | null | null | null | test/membrane_file/sink_multi_test.exs | nathanjohnson320/membrane_file_plugin | 1011a0a20b79962a84fc6129f96dd598b58c4abb | [
"Apache-2.0"
] | null | null | null | test/membrane_file/sink_multi_test.exs | nathanjohnson320/membrane_file_plugin | 1011a0a20b79962a84fc6129f96dd598b58c4abb | [
"Apache-2.0"
] | null | null | null | defmodule Membrane.File.Sink.MultiTest do
use ExUnit.Case
use Mockery
alias Membrane.File
alias File.{CommonFile, SplitEvent}
alias Membrane.Buffer
@module File.Sink.Multi
use File.TestSupport.Common
def state(_ctx) do
%{
state: %{
location: "",
fd: nil,
naming_fun: fn _ -> "" end,
split_on: SplitEvent,
index: 0
}
}
end
setup_all :state
describe "handle_write" do
setup :file
test "should write received chunk and request demand", %{state: state} do
%{fd: file} = state
mock(CommonFile, [binwrite: 2], :ok)
assert {{:ok, demand: :input}, state} ==
@module.handle_write(:input, %Buffer{payload: <<1, 2, 3>>}, nil, state)
assert_called(CommonFile, :binwrite, [^file, <<1, 2, 3>>], 1)
end
end
describe "handle_event" do
setup :file
setup %{state: state} do
%{state: %{state | naming_fun: fn x -> "#{x}" end}}
end
test "should close current file and open new one if event type is state.split_on", %{
state: state
} do
mock(CommonFile, [close: 1], fn state -> {:ok, %{state | fd: nil}} end)
mock(CommonFile, [open: 3], fn "1", _mode, state -> {:ok, %{state | fd: :new_file}} end)
assert {:ok, %{state | index: 1, fd: :new_file}} ==
@module.handle_event(:input, %SplitEvent{}, nil, state)
assert_called(CommonFile, :close, [^state], 1)
assert_called(CommonFile, :open, ["1", _mode, _state], 1)
end
test "should not close current file and open new one if event type is not state.split_on", %{
state: state
} do
mock(CommonFile, [close: 1], fn state -> {:ok, %{state | fd: nil}} end)
mock(CommonFile, [open: 3], fn "1", _mode, state -> {:ok, %{state | fd: :new_file}} end)
assert {:ok, %{state | index: 0, fd: :file}} ==
@module.handle_event(:input, :whatever, nil, state)
end
end
describe "handle_prepared_to_stopped" do
setup :file
test "should increment file index", %{state: state} do
mock(CommonFile, [close: 1], fn state -> {:ok, %{state | fd: nil}} end)
assert {:ok, %{index: 1}} = @module.handle_prepared_to_stopped(%{}, state)
end
end
end
| 28.923077 | 97 | 0.587323 |
1ccd04c184dbe704134f362951e91c7eb8a16c33 | 1,454 | ex | Elixir | lib/magic_number.ex | themitigater/elixir-magic-number | 6b7b82bd5333835995ffea11bf5e98a3254c827a | [
"MIT"
] | null | null | null | lib/magic_number.ex | themitigater/elixir-magic-number | 6b7b82bd5333835995ffea11bf5e98a3254c827a | [
"MIT"
] | null | null | null | lib/magic_number.ex | themitigater/elixir-magic-number | 6b7b82bd5333835995ffea11bf5e98a3254c827a | [
"MIT"
] | null | null | null | defmodule MagicNumber do
@moduledoc """
The module to determine a file's type from its
[magic number](https://en.wikipedia.org/wiki/File_format#Magic_number).
"""
@typedoc """
A media type is a two-part identifier for file format. For example:
```elixir
{:application, :zip} # application/zip
{:image, :png} # image/png
```
See [IANA list of official media types](https://www.iana.org/assignments/media-types/media-types.xhtml).
"""
@type media_type :: {atom, atom}
# Rules :: [{media_type, [binary]}]
@rules [
# image
{{:image, :gif}, ["GIF87a", "GIF89a"]},
{{:image, :jpeg}, [<<0xff, 0xd8, 0xff>>]},
{{:image, :png}, [<<0x89, 0x50, 0x4e, 0x47, 0x0d, 0x0a, 0x1a, 0x0a>>]},
{{:image, :tiff}, ["II*\0", "MM\0*"]},
# application
{{:application, :pdf}, ["%PDF"]},
{{:application, :zip}, [
<<0x50, 0x4b, 0x03, 0x04>>,
<<0x50, 0x4b, 0x05, 0x06>>,
<<0x50, 0x4b, 0x07, 0x08>>,
]},
{{:application, :gzip}, [<<0x1f, 0x8b>>]},
]
@doc """
Determine media type from its contents.
## Examples
iex> MagicNumber.detect("GIF89a...")
{:ok, {:image, :gif}}
iex> MagicNumber.detect(<<>>)
:error
"""
@spec detect(binary) :: {:ok, media_type} | :error
for {media_type, headers} <- @rules, magic <- headers do
def detect(unquote(magic) <> _), do: {:ok, unquote(media_type)}
end
# error
def detect(_), do: :error
end
| 25.068966 | 106 | 0.568776 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.