hexsha stringlengths 40 40 | size int64 2 991k | ext stringclasses 2 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 4 208 | max_stars_repo_name stringlengths 6 106 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses list | max_stars_count int64 1 33.5k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 208 | max_issues_repo_name stringlengths 6 106 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses list | max_issues_count int64 1 16.3k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 208 | max_forks_repo_name stringlengths 6 106 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses list | max_forks_count int64 1 6.91k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 2 991k | avg_line_length float64 1 36k | max_line_length int64 1 977k | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
1c65852df350193c0e6aa1821ed3b9e1022322da | 2,461 | exs | Elixir | test/credo/check/warning/raise_inside_rescue_test.exs | hrzndhrn/credo | 71a7b24a5ca8e7a48416e0cdfb42cf8a0fef9593 | [
"MIT"
] | 4,590 | 2015-09-28T06:01:43.000Z | 2022-03-29T08:48:57.000Z | test/credo/check/warning/raise_inside_rescue_test.exs | hrzndhrn/credo | 71a7b24a5ca8e7a48416e0cdfb42cf8a0fef9593 | [
"MIT"
] | 890 | 2015-11-16T21:07:07.000Z | 2022-03-29T08:52:07.000Z | test/credo/check/warning/raise_inside_rescue_test.exs | hrzndhrn/credo | 71a7b24a5ca8e7a48416e0cdfb42cf8a0fef9593 | [
"MIT"
] | 479 | 2015-11-17T19:42:40.000Z | 2022-03-29T00:09:21.000Z | defmodule Credo.Check.Warning.RaiseInsideRescueTest do
use Credo.Test.Case
@described_check Credo.Check.Warning.RaiseInsideRescue
test "it should NOT report expected code" do
"""
defmodule CredoSampleModule do
use ExUnit.Case
def catcher do
try do
raise "oops"
rescue
e in RuntimeError ->
Logger.warn("Something bad happened")
e ->
reraise e, System.stacktrace
end
end
end
"""
|> to_source_file
|> run_check(@described_check)
|> refute_issues()
end
test "it should NOT report expected code /2" do
"""
defmodule CredoSampleModule do
use ExUnit.Case
def catcher do
try do
raise "oops"
end
end
end
"""
|> to_source_file
|> run_check(@described_check)
|> refute_issues()
end
test "it should report a violation when raise appears inside of a rescue block" do
"""
defmodule CredoSampleModule do
use ExUnit.Case
def catcher do
try do
raise "oops"
rescue
e in RuntimeError ->
Logger.warn("Something bad happened")
raise e
end
end
end
"""
|> to_source_file
|> run_check(@described_check)
|> assert_issue(fn issue ->
assert "raise" == issue.trigger
assert 10 == issue.line_no
end)
end
test "it should report a violation when raise appears inside of a rescue block for an implicit try" do
"""
defmodule CredoSampleModule do
use ExUnit.Case
def catcher do
raise "oops"
rescue
e in RuntimeError ->
Logger.warn("Something bad happened")
raise e
end
end
"""
|> to_source_file
|> run_check(@described_check)
|> assert_issue(fn issue ->
assert "raise" == issue.trigger
assert 9 == issue.line_no
end)
end
test "it should report a violation when raise appears inside of an expression in rescue" do
"""
defmodule CredoSampleModule do
use ExUnit.Case
def catcher do
try do
raise "oops"
rescue
e -> Logger.warn("Something bad happened") && raise e
end
end
end
"""
|> to_source_file
|> run_check(@described_check)
|> assert_issue(fn issue ->
assert "raise" == issue.trigger
assert 8 == issue.line_no
end)
end
end
| 21.778761 | 104 | 0.584722 |
1c65920cd479b338503a41da1566aee19debf0e2 | 69 | ex | Elixir | testData/org/elixir_lang/parser_definition/matched_or_operation_parsing_test_case/List.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 1,668 | 2015-01-03T05:54:27.000Z | 2022-03-25T08:01:20.000Z | testData/org/elixir_lang/parser_definition/matched_or_operation_parsing_test_case/List.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 2,018 | 2015-01-01T22:43:39.000Z | 2022-03-31T20:13:08.000Z | testData/org/elixir_lang/parser_definition/matched_or_operation_parsing_test_case/List.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 145 | 2015-01-15T11:37:16.000Z | 2021-12-22T05:51:02.000Z | [one: ()] || [two: ()]
[one: ()] ||| [two: ()]
[one: ()] or [two: ()] | 23 | 23 | 0.289855 |
1c65952aa20e9a584a018c0d06145156181b9dff | 164 | exs | Elixir | test/cain_test.exs | pfitz/cain | c1038a9d80e51213ee665149c585231b16c11b3f | [
"Apache-2.0"
] | 6 | 2019-12-17T12:40:19.000Z | 2022-01-18T07:25:20.000Z | test/cain_test.exs | pfitz/cain | c1038a9d80e51213ee665149c585231b16c11b3f | [
"Apache-2.0"
] | 9 | 2019-12-18T13:01:11.000Z | 2021-02-12T14:24:43.000Z | test/cain_test.exs | pfitz/cain | c1038a9d80e51213ee665149c585231b16c11b3f | [
"Apache-2.0"
] | 1 | 2020-09-16T09:19:08.000Z | 2020-09-16T09:19:08.000Z | defmodule CainTest do
use ExUnit.Case
# doctest Cain
# test "Get all process definitions" do
# assert Cain.Rest.call(Cain.Rest.Task.GetList)
# end
end
| 18.222222 | 51 | 0.707317 |
1c6596d44cff6c556441482f0037406dacf437a3 | 1,190 | ex | Elixir | lib/transformation.ex | joedevivo/open_scad | c8981396b088e795a4269d1ac8a66ef7a1ecc744 | [
"MIT"
] | 16 | 2018-02-12T22:06:00.000Z | 2022-01-04T05:07:35.000Z | lib/transformation.ex | joedevivo/open_scad | c8981396b088e795a4269d1ac8a66ef7a1ecc744 | [
"MIT"
] | 2 | 2019-12-07T15:54:10.000Z | 2019-12-07T19:21:14.000Z | lib/transformation.ex | joedevivo/open_scad | c8981396b088e795a4269d1ac8a66ef7a1ecc744 | [
"MIT"
] | null | null | null | defmodule OpenSCAD.Transformation do
@moduledoc """
Transformations are only different from Objects in that they can contain child
Actions, and this module defines the generic functionallity for traversing the
children on render.
"""
alias OpenSCAD.Action
defmacro __using__(_opts) do
quote do
use OpenSCAD.Action, has_children: true
defimpl OpenSCAD.Renderable do
def to_scad(me, opts), do: OpenSCAD.Transformation.to_scad(me, opts)
def type(_me), do: :transformation
end
end
end
def to_scad(transformation, opts) do
fname = Action.scad_name(transformation)
params = Action.scad_params(transformation)
child_opts = Keyword.update!(opts, :indent, &(&1 + 2))
[
OpenSCAD.Renderable.to_scad("#{fname}(#{params}){", opts),
cond do
is_list(transformation.children) ->
transformation.children
is_nil(transformation.children) ->
[]
true ->
[transformation.children]
end
|> Enum.map(&OpenSCAD.Renderable.to_scad(&1, child_opts)),
OpenSCAD.Renderable.to_scad("}", opts)
]
|> List.flatten()
|> Enum.join("\n")
end
end
| 25.869565 | 80 | 0.653782 |
1c65a331acd701313508070f7728cdae6bcd4cca | 123 | ex | Elixir | lib/teslamate/settings/units/temperature.ex | douglasevaristo/teslamate | d239023942b319e9446d2da316d0afbddf701b9d | [
"MIT"
] | 1 | 2020-08-31T10:21:12.000Z | 2020-08-31T10:21:12.000Z | lib/teslamate/settings/units/temperature.ex | douglasevaristo/teslamate | d239023942b319e9446d2da316d0afbddf701b9d | [
"MIT"
] | 94 | 2020-10-26T06:28:30.000Z | 2022-03-30T15:03:41.000Z | lib/teslamate/settings/units/temperature.ex | douglasevaristo/teslamate | d239023942b319e9446d2da316d0afbddf701b9d | [
"MIT"
] | 1 | 2021-03-26T15:46:37.000Z | 2021-03-26T15:46:37.000Z | defmodule TeslaMate.Settings.Units.Temperature do
use EctoEnum.Postgres, type: :unit_of_temperature, enums: [:C, :F]
end
| 30.75 | 68 | 0.780488 |
1c65a7d8efd6acbd6ddfbf17955c3dac72e11868 | 3,588 | ex | Elixir | kousa/lib/kousa/room_chat.ex | lazarospsa/dogehouse | 4400518f5b6bce929e40eada615356e8814a8d28 | [
"MIT"
] | 1 | 2021-04-19T19:32:51.000Z | 2021-04-19T19:32:51.000Z | kousa/lib/kousa/room_chat.ex | lazarospsa/dogehouse | 4400518f5b6bce929e40eada615356e8814a8d28 | [
"MIT"
] | 1 | 2022-03-25T19:20:22.000Z | 2022-03-25T19:20:22.000Z | kousa/lib/kousa/room_chat.ex | lazarospsa/dogehouse | 4400518f5b6bce929e40eada615356e8814a8d28 | [
"MIT"
] | 1 | 2021-11-24T04:01:49.000Z | 2021-11-24T04:01:49.000Z | defmodule Kousa.RoomChat do
alias Kousa.Utils.RegUtils
alias Beef.Rooms
@message_character_limit 512
@spec send_msg(String.t(), list(map), list(String.t())) :: any
def send_msg(user_id, tokens, whispered_to) do
tokens = validate_tokens(tokens)
# NB: length(list) is O(N) so use a match for stuff like this
if length(tokens) > 0 do
case Beef.Users.get_current_room_id(user_id) do
nil ->
nil
current_room_id ->
with {avatar_url, display_name, username} <-
Onion.UserSession.send_call!(user_id, {:get_info_for_msg}) do
RegUtils.lookup_and_cast(
Onion.RoomChat,
current_room_id,
{:new_msg, user_id,
%{
id: Ecto.UUID.generate(),
avatarUrl: avatar_url,
displayName: display_name,
username: username,
userId: user_id,
tokens: tokens,
sentAt: DateTime.utc_now(),
isWhisper: whispered_to != []
}, whispered_to}
)
end
end
end
end
defp validate_tokens(tokens) when is_list(tokens) do
if Enum.reduce_while(tokens, 0, &count_message_characters/2) <= @message_character_limit do
tokens
|> Enum.reduce([], &validate_tokens/2)
|> Enum.reverse()
else
[]
end
end
defp validate_tokens(_), do: []
defp validate_tokens(token, acc) do
case validate_token(token) do
{:ok, token} -> [token | acc]
_ -> acc
end
end
defp count_message_characters(%{"v" => v}, acc) do
if acc <= @message_character_limit, do: {:cont, String.length(v) + acc}, else: {:halt, acc}
end
defp validate_token(token = %{"t" => type, "v" => _})
when type in ["text", "mention", "block", "emote"],
do: {:ok, token}
defp validate_token(token = %{"t" => "link", "v" => link}) do
link
|> URI.parse()
|> valid_url?()
|> case do
true -> {:ok, token}
_ -> :invalid
end
end
defp validate_token(_), do: :invalid
defp valid_url?(%URI{host: host, scheme: scheme}) when is_binary(host) and is_binary(scheme),
do: true
defp valid_url?(_), do: false
def ban_user(user_id, user_id_to_ban) do
case Rooms.get_room_status(user_id) do
{:creator, room} ->
if room.creatorId != user_id_to_ban do
RegUtils.lookup_and_cast(Onion.RoomChat, room.id, {:ban_user, user_id_to_ban})
end
{:mod, room} ->
if room.creatorId != user_id_to_ban do
RegUtils.lookup_and_cast(Onion.RoomChat, room.id, {:ban_user, user_id_to_ban})
end
_ ->
nil
end
:ok
end
# Delete room chat messages
def delete_message(deleter_id, message_id, user_id) do
case Rooms.get_room_status(deleter_id) do
{:creator, room} ->
RegUtils.lookup_and_cast(
Onion.RoomChat,
room.id,
{:message_deleted, deleter_id, message_id}
)
# Mods can delete other mod' messages
{:mod, room} ->
if user_id != room.creatorId do
RegUtils.lookup_and_cast(
Onion.RoomChat,
room.id,
{:message_deleted, deleter_id, message_id}
)
end
{:listener, room} ->
if user_id == deleter_id do
RegUtils.lookup_and_cast(
Onion.RoomChat,
room.id,
{:message_deleted, deleter_id, message_id}
)
end
_ ->
nil
end
end
end
| 26.189781 | 95 | 0.567447 |
1c65accb65709c112e8c9c8ae08524b8a9abfe0a | 13,732 | ex | Elixir | lib/phoenix/router/helpers.ex | jesseshieh/phoenix | 1776e9df0a71de67374ed488b3f00ccb434045b3 | [
"MIT"
] | 1 | 2020-04-14T09:49:46.000Z | 2020-04-14T09:49:46.000Z | lib/phoenix/router/helpers.ex | jesseshieh/phoenix | 1776e9df0a71de67374ed488b3f00ccb434045b3 | [
"MIT"
] | 1 | 2020-05-26T19:38:18.000Z | 2020-05-26T19:38:18.000Z | lib/phoenix/router/helpers.ex | jesseshieh/phoenix | 1776e9df0a71de67374ed488b3f00ccb434045b3 | [
"MIT"
] | null | null | null | defmodule Phoenix.Router.Helpers do
# Module that generates the routing helpers.
@moduledoc false
alias Phoenix.Router.Route
alias Plug.Conn
@anno (if :erlang.system_info(:otp_release) >= '19' do
[generated: true, unquote: false]
else
[line: -1, unquote: false]
end)
@doc """
Callback invoked by the url generated in each helper module.
"""
def url(_router, %Conn{private: private}) do
case private do
%{phoenix_router_url: %URI{} = uri} -> URI.to_string(uri)
%{phoenix_router_url: url} when is_binary(url) -> url
%{phoenix_endpoint: endpoint} -> endpoint.url()
end
end
def url(_router, %_{endpoint: endpoint}) do
endpoint.url()
end
def url(_router, %URI{} = uri) do
URI.to_string(%{uri | path: nil})
end
def url(_router, endpoint) when is_atom(endpoint) do
endpoint.url()
end
def url(router, other) do
raise ArgumentError,
"expected a %Plug.Conn{}, a %Phoenix.Socket{}, a %URI{}, a struct with an :endpoint key, " <>
"or a Phoenix.Endpoint when building url for #{inspect(router)}, got: #{inspect(other)}"
end
@doc """
Callback invoked by path generated in each helper module.
"""
def path(router, %Conn{} = conn, path) do
conn
|> build_own_forward_path(router, path)
|> Kernel.||(build_conn_forward_path(conn, router, path))
|> Kernel.||(path_with_script(path, conn.script_name))
end
def path(_router, %URI{} = uri, path) do
(uri.path || "") <> path
end
def path(_router, %_{endpoint: endpoint}, path) do
endpoint.path(path)
end
def path(_router, endpoint, path) when is_atom(endpoint) do
endpoint.path(path)
end
def path(router, other, _path) do
raise ArgumentError,
"expected a %Plug.Conn{}, a %Phoenix.Socket{}, a %URI{}, a struct with an :endpoint key, " <>
"or a Phoenix.Endpoint when building path for #{inspect(router)}, got: #{inspect(other)}"
end
## Helpers
defp build_own_forward_path(conn, router, path) do
case Map.fetch(conn.private, router) do
{:ok, {local_script, _}} ->
path_with_script(path, local_script)
:error -> nil
end
end
defp build_conn_forward_path(%Conn{private: %{phoenix_router: phx_router}} = conn, router, path) do
case Map.fetch(conn.private, phx_router) do
{:ok, {script_name, forwards}} ->
case Map.fetch(forwards, router) do
{:ok, local_script} ->
path_with_script(path, script_name ++ local_script)
:error -> nil
end
:error -> nil
end
end
defp build_conn_forward_path(_conn, _router, _path), do: nil
defp path_with_script(path, []) do
path
end
defp path_with_script(path, script) do
"/" <> Enum.join(script, "/") <> path
end
@doc """
Generates the helper module for the given environment and routes.
"""
def define(env, routes, opts \\ []) do
# Ignore any route without helper or forwards.
routes =
Enum.reject(routes, fn {route, _exprs} ->
is_nil(route.helper) or route.kind == :forward
end)
trailing_slash? = Enum.any?(routes, fn {route, _} -> route.trailing_slash? end)
groups = Enum.group_by(routes, fn {route, _exprs} -> route.helper end)
impls =
for {_helper, group} <- groups,
{route, exprs} <- Enum.sort_by(group, fn {_, exprs} -> length(exprs.binding) end),
do: defhelper(route, exprs)
catch_all = Enum.map(groups, &defhelper_catch_all/1)
defhelper = quote @anno do
defhelper = fn helper, vars, opts, bins, segs, trailing_slash? ->
def unquote(:"#{helper}_path")(conn_or_endpoint, unquote(Macro.escape(opts)), unquote_splicing(vars)) do
unquote(:"#{helper}_path")(conn_or_endpoint, unquote(Macro.escape(opts)), unquote_splicing(vars), [])
end
def unquote(:"#{helper}_path")(conn_or_endpoint, unquote(Macro.escape(opts)), unquote_splicing(vars), params)
when is_list(params) or is_map(params) do
path(conn_or_endpoint, segments(unquote(segs), params, unquote(bins), unquote(trailing_slash?),
{unquote(helper), unquote(Macro.escape(opts)), unquote(Enum.map(vars, &Macro.to_string/1))}))
end
def unquote(:"#{helper}_url")(conn_or_endpoint, unquote(Macro.escape(opts)), unquote_splicing(vars)) do
unquote(:"#{helper}_url")(conn_or_endpoint, unquote(Macro.escape(opts)), unquote_splicing(vars), [])
end
def unquote(:"#{helper}_url")(conn_or_endpoint, unquote(Macro.escape(opts)), unquote_splicing(vars), params)
when is_list(params) or is_map(params) do
url(conn_or_endpoint) <> unquote(:"#{helper}_path")(conn_or_endpoint, unquote(Macro.escape(opts)), unquote_splicing(vars), params)
end
end
end
defcatch_all = quote @anno do
defcatch_all = fn helper, lengths, routes ->
for length <- lengths do
binding = List.duplicate({:_, [], nil}, length)
arity = length + 2
def unquote(:"#{helper}_path")(conn_or_endpoint, action, unquote_splicing(binding)) do
path(conn_or_endpoint, "/")
raise_route_error(unquote(helper), :path, unquote(arity), action, [])
end
def unquote(:"#{helper}_path")(conn_or_endpoint, action, unquote_splicing(binding), params) do
path(conn_or_endpoint, "/")
raise_route_error(unquote(helper), :path, unquote(arity + 1), action, params)
end
def unquote(:"#{helper}_url")(conn_or_endpoint, action, unquote_splicing(binding)) do
url(conn_or_endpoint)
raise_route_error(unquote(helper), :url, unquote(arity), action, [])
end
def unquote(:"#{helper}_url")(conn_or_endpoint, action, unquote_splicing(binding), params) do
url(conn_or_endpoint)
raise_route_error(unquote(helper), :url, unquote(arity + 1), action, params)
end
end
defp raise_route_error(unquote(helper), suffix, arity, action, params) do
Phoenix.Router.Helpers.raise_route_error(
__MODULE__,
"#{unquote(helper)}_#{suffix}",
arity,
action,
unquote(Macro.escape(routes)),
params
)
end
end
end
docs = Keyword.get(opts, :docs, true)
# It is in general bad practice to generate large chunks of code
# inside quoted expressions. However, we can get away with this
# here for two reasons:
#
# * Helper modules are quite uncommon, typically one per project.
#
# * We inline most of the code for performance, so it is specific
# per helper module anyway.
#
code = quote do
@moduledoc unquote(docs) && """
Module with named helpers generated from #{inspect unquote(env.module)}.
"""
unquote(defhelper)
unquote(defcatch_all)
unquote_splicing(impls)
unquote_splicing(catch_all)
@doc """
Generates the path information including any necessary prefix.
"""
def path(data, path) do
Phoenix.Router.Helpers.path(unquote(env.module), data, path)
end
@doc """
Generates the connection/endpoint base URL without any path information.
"""
def url(data) do
Phoenix.Router.Helpers.url(unquote(env.module), data)
end
@doc """
Generates path to a static asset given its file path.
"""
def static_path(%Conn{private: private} = conn, path) do
private.phoenix_endpoint.static_path(path)
end
def static_path(%_{endpoint: endpoint} = conn, path) do
endpoint.static_path(path)
end
def static_path(endpoint, path) when is_atom(endpoint) do
endpoint.static_path(path)
end
@doc """
Generates url to a static asset given its file path.
"""
def static_url(%Conn{private: private}, path) do
case private do
%{phoenix_static_url: %URI{} = uri} -> URI.to_string(uri) <> path
%{phoenix_static_url: url} when is_binary(url) -> url <> path
%{phoenix_endpoint: endpoint} -> static_url(endpoint, path)
end
end
def static_url(%_{endpoint: endpoint} = conn, path) do
static_url(endpoint, path)
end
def static_url(endpoint, path) when is_atom(endpoint) do
endpoint.static_url <> endpoint.static_path(path)
end
@doc """
Generates an integrity hash to a static asset given its file path.
"""
def static_integrity(%Conn{private: %{phoenix_endpoint: endpoint}}, path) do
static_integrity(endpoint, path)
end
def static_integrity(%_{endpoint: endpoint}, path) do
static_integrity(endpoint, path)
end
def static_integrity(endpoint, path) when is_atom(endpoint) do
endpoint.static_integrity(path)
end
# Functions used by generated helpers
# Those are inlined here for performance
defp to_param(int) when is_integer(int), do: Integer.to_string(int)
defp to_param(bin) when is_binary(bin), do: bin
defp to_param(false), do: "false"
defp to_param(true), do: "true"
defp to_param(data), do: Phoenix.Param.to_param(data)
defp segments(segments, [], _reserved, trailing_slash?, _opts) do
maybe_append_slash(segments, trailing_slash?)
end
defp segments(segments, query, reserved, trailing_slash?, _opts) when is_list(query) or is_map(query) do
dict = for {k, v} <- query,
not ((k = to_string(k)) in reserved),
do: {k, v}
case Conn.Query.encode dict, &to_param/1 do
"" -> maybe_append_slash(segments, trailing_slash?)
o -> maybe_append_slash(segments, trailing_slash?) <> "?" <> o
end
end
if unquote(trailing_slash?) do
defp maybe_append_slash("/", _), do: "/"
defp maybe_append_slash(path, true), do: path <> "/"
end
defp maybe_append_slash(path, _), do: path
end
Module.create(Module.concat(env.module, Helpers), code, line: env.line, file: env.file)
end
@doc """
Receives a route and returns the quoted definition for its helper function.
In case a helper name was not given, or route is forwarded, returns nil.
"""
def defhelper(%Route{} = route, exprs) do
helper = route.helper
opts = route.plug_opts
trailing_slash? = route.trailing_slash?
{bins, vars} = :lists.unzip(exprs.binding)
segs = expand_segments(exprs.path)
quote do
defhelper.(
unquote(helper),
unquote(Macro.escape(vars)),
unquote(Macro.escape(opts)),
unquote(Macro.escape(bins)),
unquote(Macro.escape(segs)),
unquote(Macro.escape(trailing_slash?))
)
end
end
def defhelper_catch_all({helper, routes_and_exprs}) do
routes =
routes_and_exprs
|> Enum.map(fn {routes, exprs} -> {routes.plug_opts, Enum.map(exprs.binding, &elem(&1, 0))} end)
|> Enum.sort()
lengths =
routes
|> Enum.map(fn {_, bindings} -> length(bindings) end)
|> Enum.uniq()
quote do
defcatch_all.(
unquote(helper),
unquote(lengths),
unquote(Macro.escape(routes))
)
end
end
@doc """
Callback for generate router catch alls.
"""
def raise_route_error(mod, fun, arity, action, routes, params) do
cond do
not Keyword.has_key?(routes, action) ->
"no action #{inspect action} for #{inspect mod}.#{fun}/#{arity}"
|> invalid_route_error(fun, routes)
is_list(params) or is_map(params) ->
"no function clause for #{inspect mod}.#{fun}/#{arity} and action #{inspect action}"
|> invalid_route_error(fun, routes)
true ->
invalid_param_error(mod, fun, arity, action, routes)
end
end
defp invalid_route_error(prelude, fun, routes) do
suggestions =
for {action, bindings} <- routes do
bindings = Enum.join([inspect(action) | bindings], ", ")
"\n #{fun}(conn_or_endpoint, #{bindings}, params \\\\ [])"
end
raise ArgumentError, "#{prelude}. The following actions/clauses are supported:\n#{suggestions}"
end
defp invalid_param_error(mod, fun, arity, action, routes) do
call_vars = Keyword.fetch!(routes, action)
raise ArgumentError, """
#{inspect(mod)}.#{fun}/#{arity} called with invalid params.
The last argument to this function should be a keyword list or a map.
For example:
#{fun}(#{Enum.join(["conn", ":#{action}" | call_vars], ", ")}, page: 5, per_page: 10)
It is possible you have called this function without defining the proper
number of path segments in your router.
"""
end
@doc """
Callback for properly encoding parameters in routes.
"""
def encode_param(str), do: URI.encode(str, &URI.char_unreserved?/1)
defp expand_segments([]), do: "/"
defp expand_segments(segments) when is_list(segments) do
expand_segments(segments, "")
end
defp expand_segments(segments) do
quote(do: "/" <> Enum.map_join(unquote(segments), "/", &unquote(__MODULE__).encode_param/1))
end
defp expand_segments([{:|, _, [h, t]}], acc),
do: quote(do: unquote(expand_segments([h], acc)) <> "/" <> Enum.map_join(unquote(t), "/", fn(s) -> URI.encode(s, &URI.char_unreserved?/1) end))
defp expand_segments([h|t], acc) when is_binary(h),
do: expand_segments(t, quote(do: unquote(acc) <> unquote("/" <> h)))
defp expand_segments([h|t], acc),
do: expand_segments(t, quote(do: unquote(acc) <> "/" <> URI.encode(to_param(unquote(h)), &URI.char_unreserved?/1)))
defp expand_segments([], acc),
do: acc
end
| 32.77327 | 147 | 0.631518 |
1c65b37c5da71fea62618a17981e09cba90778e8 | 6,139 | ex | Elixir | lib/client.ex | asonix/http-client-elixir | 5610dc572d0d0b45d0b1ce7c1dba62e9d4c17c7e | [
"MIT"
] | 4 | 2016-08-16T11:47:58.000Z | 2018-06-08T05:47:09.000Z | lib/client.ex | asonix/http-client-elixir | 5610dc572d0d0b45d0b1ce7c1dba62e9d4c17c7e | [
"MIT"
] | null | null | null | lib/client.ex | asonix/http-client-elixir | 5610dc572d0d0b45d0b1ce7c1dba62e9d4c17c7e | [
"MIT"
] | null | null | null | defmodule Client do
@moduledoc """
The client module contains two functions of note, `do_request/6` and
`do_request!/6` which perform HTTP actions as well as encoding and decoding
data and setting headers involved in the request.
## Actions
Actions are the part that actually make the HTTP Request, if that is what you
choose to do with this library. It is failry generic. Some actions are provided.
### Provided Actions
- `Client.get/3` / `Client.get!/3`
- `Client.post/3` / `Client.post!/3`
- `Client.patch/3` / `Client.patch!/3`
- `Client.put/3` / `Client.put!/3`
- `Client.delete/3` / `Client.delete!/3`
The provided actions are all simple wrappers around HTTPoison to make the
arguments resemble what the callback requires in `do_request/6` and
`do_request!/6`
"""
alias HTTPoison.Response
defmodule ResponseNot200Error do
defexception [:message]
def exception(%Response{status_code: status}) do
msg = "status code: #{status}"
{:error, %__MODULE__{message: msg}}
end
def exception(value) do
msg = "expected %HTTPoison.Response{}, got: #{inspect value}"
{:error, %__MODULE__{message: msg}}
end
end
def get(href, payload, headers),
do: HTTPoison.get(href, headers, payload)
def post(href, payload, headers),
do: HTTPoison.post(href, payload, headers)
def patch(href, payload, headers),
do: HTTPoison.patch(href, payload, headers)
def put(href, payload, headers),
do: HTTPoison.put(href, payload, headers)
def delete(href, _payload, headers),
do: HTTPoison.delete(href, headers)
def get!(href, payload, headers),
do: HTTPoison.get!(href, headers, payload)
def post!(href, payload, headers),
do: HTTPoison.post!(href, payload, headers)
def patch!(href, payload, headers),
do: HTTPoison.patch!(href, payload, headers)
def put!(href, payload, headers),
do: HTTPoison.put!(href, payload, headers)
def delete!(href, _payload, headers),
do: HTTPoison.delete!(href, headers)
@doc """
Sequences calls to encoder, action, and decoder to perform HTTPoison requests.
It is important to understand how this client works in order to properly use it.
It provides two implementations of a single function `do_request/6`, which takes
the arguments explained below:
| Argument | description |
| ---------- | ----------- |
| `href` | The URL of the resource to be queried |
| `payload` | A Map, Struct, or List to be sent to the server |
| `headers` | The headers to be sent with the query |
| `encoder` | This is an encoder from the Client package, a list of encoders is provided below |
| `decoder` | This is a decoder from the Client package, a list of decoders is proved below |
| `action` | This is an HTTPoison verb. Usage defined below |
## Notes
When using `do_request/6`, your actions all need to return a tuple of the format
`{:ok, data}` or `{:error, reason}`, any other formats will not be properly
handled by `do_request/6`.
## Examples
data = Client.do_request(
"https://httpbin.org/post",
%{"key" => "value", "key2" => ["value1", "value2"]},
%{"Header" => "Header/Value"},
Client.Encoders.JSON,
Client.Decoders.JSON,
&Client.post(&1, &2, &3)
)
assert data == {
:ok,
%{
"args" => %{},
"data" => "{\\"key2\\":[\\"value1\\",\\"value2\\"],\\"key\\":\\"value\\"}",
"files" => %{},
"form" => %{},
"headers" => %{
"Accept" => "application/json",
"Content-Length" => "42",
"Content-Type" => "application/json",
"Header" => "Header/Value",
"Host" => "httpbin.org",
"User-Agent" => "hackney/1.6.1"
},
"json" => %{
"key" => "value",
"key2" => ["value1", "value2"]
},
"origin" => "127.0.0.1",
"url" => "https://httpbin.org/post"
}
}
iex> Client.do_request("a.com", %{"key" => "value"}, %{}, Client.Encoders.JSON, Client.Decoders.JSON, fn _href, payload, _headers -> {:ok, %HTTPoison.Response{status_code: 200, body: payload}} end)
{:ok, %{"key" => "value"}}
"""
def do_request(href, payload, headers, encoder, decoder, action) do
with {:ok, payload} <- encoder.encode(payload) do
headers = encoder.headers
|> Map.merge(decoder.headers)
|> Map.merge(headers)
with {:ok, response} <- action.(href, payload, headers),
do: handle_response(response, decoder)
end
end
@doc """
Aggressive version of `do_request/6`. Aggressive means raising errors rather
than returning error structs.
## Notes
When using `do_request!/6`, your actions must all return `data` directly,
outside of the tuple used in the safer version. The reason for this is we expect
errors in this case to be raised rather than returned.
## Examples
iex> Client.do_request!("a.com", %{"key" => "value"}, %{}, Client.Encoders.JSON, Client.Decoders.JSON, fn _href, payload, _headers -> %HTTPoison.Response{status_code: 200, body: payload} end)
%{"key" => "value"}
"""
def do_request!(href, payload, headers, encoder, decoder, action) do
payload = encoder.encode!(payload)
headers = encoder.headers
|> Map.merge(decoder.headers)
|> Map.merge(headers)
href
|> action.(payload, headers)
|> handle_response!(decoder)
end
defp handle_response(%Response{status_code: status, body: body}=response, decoder) do
cond do
status in [200, 201] ->
decoder.decode(body)
status == 204 ->
{:ok, :no_content}
true ->
ResponseNot200Error.exception(response)
end
end
defp handle_response!(%Response{status_code: status, body: body}=response, decoder) do
cond do
status in [200, 201] ->
decoder.decode!(body)
status == 204 ->
:no_content
true ->
with {:error, error} <- ResponseNot200Error.exception(response),
do: raise error
end
end
end
| 31.80829 | 203 | 0.61655 |
1c65ce58ab95a6612034588b6d00d539b871654c | 2,027 | ex | Elixir | apps/kv_server/lib/kv_server/command.ex | jeremy-miller/mix-otp-guide | f949e3221f2ccbb604496c169a16715b73e3676b | [
"MIT"
] | null | null | null | apps/kv_server/lib/kv_server/command.ex | jeremy-miller/mix-otp-guide | f949e3221f2ccbb604496c169a16715b73e3676b | [
"MIT"
] | null | null | null | apps/kv_server/lib/kv_server/command.ex | jeremy-miller/mix-otp-guide | f949e3221f2ccbb604496c169a16715b73e3676b | [
"MIT"
] | null | null | null | defmodule KVServer.Command do
@doc """
Runs the given command.
"""
def run(command)
def run({:create, bucket}) do
KV.Registry.create(KV.Registry, bucket)
{:ok, "OK\r\n"}
end
def run({:get, bucket, key}) do
lookup(bucket, fn pid ->
value = KV.Bucket.get(pid, key)
{:ok, "#{value}\r\nOK\r\n"}
end)
end
def run({:put, bucket, key, value}) do
lookup(bucket, fn pid ->
KV.Bucket.put(pid, key, value)
{:ok, "OK\r\n"}
end)
end
def run({:delete, bucket, key}) do
lookup(bucket, fn pid ->
KV.Bucket.delete(pid, key)
{:ok, "OK\r\n"}
end)
end
defp lookup(bucket, callback) do
case KV.Router.route(bucket, KV.Registry, :lookup, [KV.Registry, bucket]) do
{:ok, pid} -> callback.(pid)
:error -> {:error, :not_found}
end
end
@doc ~S"""
Parses the given `line` into a command.
## Examples
iex> KVServer.Command.parse "CREATE shopping\r\n"
{:ok, {:create, "shopping"}}
iex> KVServer.Command.parse "CREATE shopping \r\n"
{:ok, {:create, "shopping"}}
iex> KVServer.Command.parse "PUT shopping milk 1\r\n"
{:ok, {:put, "shopping", "milk", "1"}}
iex> KVServer.Command.parse "GET shopping milk\r\n"
{:ok, {:get, "shopping", "milk"}}
iex> KVServer.Command.parse "DELETE shopping eggs\r\n"
{:ok, {:delete, "shopping", "eggs"}}
Unknown commands or commands with the wrong number of
arguments return an error:
iex> KVServer.Command.parse "UNKNOWN shopping eggs\r\n"
{:error, :unknown_command}
iex> KVServer.Command.parse "GET shopping\r\n"
{:error, :unknown_command}
"""
def parse(line) do
case String.split(line) do
["CREATE", bucket] -> {:ok, {:create, bucket}}
["GET", bucket, key] -> {:ok, {:get, bucket, key}}
["PUT", bucket, key, value] -> {:ok, {:put, bucket, key, value}}
["DELETE", bucket, key] -> {:ok, {:delete, bucket, key}}
_ -> {:error, :unknown_command}
end
end
end
| 25.3375 | 80 | 0.579674 |
1c65d95b4540187bcf27837890ee6cca99a3a8bb | 2,016 | exs | Elixir | mix.exs | nelsonmfinda/discuss | 219144cd82240a6acd1335710a8f0a8d1b6b4a88 | [
"MIT"
] | null | null | null | mix.exs | nelsonmfinda/discuss | 219144cd82240a6acd1335710a8f0a8d1b6b4a88 | [
"MIT"
] | null | null | null | mix.exs | nelsonmfinda/discuss | 219144cd82240a6acd1335710a8f0a8d1b6b4a88 | [
"MIT"
] | null | null | null | defmodule Discuss.MixProject do
use Mix.Project
def project do
[
app: :discuss,
version: "0.1.0",
elixir: "~> 1.12",
elixirc_paths: elixirc_paths(Mix.env()),
compilers: [:gettext] ++ Mix.compilers(),
start_permanent: Mix.env() == :prod,
aliases: aliases(),
deps: deps()
]
end
# Configuration for the OTP application.
#
# Type `mix help compile.app` for more information.
def application do
[
mod: {Discuss.Application, []},
extra_applications: [:logger, :runtime_tools]
]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
# Specifies your project dependencies.
#
# Type `mix help deps` for examples and options.
defp deps do
[
{:phoenix, "~> 1.6.2"},
{:phoenix_ecto, "~> 4.4"},
{:ecto_sql, "~> 3.6"},
{:postgrex, ">= 0.0.0"},
{:phoenix_html, "~> 3.0"},
{:phoenix_live_reload, "~> 1.2", only: :dev},
{:phoenix_live_view, "~> 0.16.0"},
{:floki, ">= 0.30.0", only: :test},
{:phoenix_live_dashboard, "~> 0.5"},
{:esbuild, "~> 0.2", runtime: Mix.env() == :dev},
{:swoosh, "~> 1.3"},
{:telemetry_metrics, "~> 0.6"},
{:telemetry_poller, "~> 1.0"},
{:gettext, "~> 0.18"},
{:jason, "~> 1.2"},
{:plug_cowboy, "~> 2.5"}
]
end
# Aliases are shortcuts or tasks specific to the current project.
# For example, to install project dependencies and perform other setup tasks, run:
#
# $ mix setup
#
# See the documentation for `Mix` for more info on aliases.
defp aliases do
[
setup: ["deps.get", "ecto.setup"],
"ecto.setup": ["ecto.create", "ecto.migrate", "run priv/repo/seeds.exs"],
"ecto.reset": ["ecto.drop", "ecto.setup"],
test: ["ecto.create --quiet", "ecto.migrate --quiet", "test"],
"assets.deploy": ["esbuild default --minify", "phx.digest"]
]
end
end
| 28.394366 | 84 | 0.565476 |
1c65e463b469d7b403e301fd6c0b6cb339644d83 | 1,882 | ex | Elixir | lib/accent/auth/user_remote/persister.ex | leonardocouy/accent | 29fb324395ff998cc5cdc6947c60070ffabe647c | [
"BSD-3-Clause"
] | null | null | null | lib/accent/auth/user_remote/persister.ex | leonardocouy/accent | 29fb324395ff998cc5cdc6947c60070ffabe647c | [
"BSD-3-Clause"
] | null | null | null | lib/accent/auth/user_remote/persister.ex | leonardocouy/accent | 29fb324395ff998cc5cdc6947c60070ffabe647c | [
"BSD-3-Clause"
] | null | null | null | defmodule Accent.UserRemote.Persister do
@moduledoc """
Manage user creation and provider creation.
This module makes sure that a user, returned from the Accent.UserRemote.Fetcher,
is persisted in the database with its provider infos and email.
3 cases can happen when a user is fetched.
- New user with new provider. (First time logging in)
- Existing user with same provider. (Same login as the first time)
- Existing user but with a different provider. (Login with a different provider)
"""
alias Accent.Repo
alias Accent.AuthProvider
alias Accent.User, as: RepoUser
alias Accent.UserRemote.Adapter.User, as: FetchedUser
alias Ecto.Changeset
@spec persist(FetchedUser.t()) :: {:ok, RepoUser.t(), AuthProvider.t()}
def persist(user = %FetchedUser{provider: provider, uid: uid}) do
user = find_user(user)
provider = find_provider(user, provider, uid)
{:ok, user, provider}
end
defp find_user(fetched_user) do
case Repo.get_by(RepoUser, email: fetched_user.email) do
user = %RepoUser{} -> update_user(user, fetched_user)
_ -> create_user(fetched_user)
end
end
defp find_provider(user, provider_name, uid) do
case Repo.get_by(AuthProvider, name: provider_name, uid: uid) do
provider = %AuthProvider{} -> provider
_ -> create_provider(user, provider_name, uid)
end
end
defp create_provider(user, name, uid), do: Repo.insert!(%AuthProvider{name: name, uid: uid, user_id: user.id})
defp create_user(fetched_user), do: Repo.insert!(%RepoUser{email: fetched_user.email, fullname: fetched_user.fullname, picture_url: fetched_user.picture_url})
defp update_user(user, fetched_user) do
user
|> Changeset.change(%{
fullname: fetched_user.fullname || user.fullname,
picture_url: fetched_user.picture_url || user.picture_url
})
|> Repo.update!()
end
end
| 34.218182 | 160 | 0.717853 |
1c661c348f9f88c2436aa1121835cc428e35d2d5 | 1,608 | exs | Elixir | mix.exs | saleyn/sorted_set | 2f0dc27c39637fa97413e1a8740b1ec12ef1653f | [
"MIT"
] | 534 | 2020-03-25T06:24:07.000Z | 2022-03-31T21:55:15.000Z | mix.exs | saleyn/sorted_set | 2f0dc27c39637fa97413e1a8740b1ec12ef1653f | [
"MIT"
] | 6 | 2020-09-08T11:21:02.000Z | 2021-07-23T23:29:27.000Z | mix.exs | saleyn/sorted_set | 2f0dc27c39637fa97413e1a8740b1ec12ef1653f | [
"MIT"
] | 50 | 2020-04-06T19:05:37.000Z | 2022-03-16T18:17:50.000Z | defmodule SortedSet.MixProject do
use Mix.Project
def project do
[
app: :sorted_set_nif,
name: "SortedSet",
version: "1.1.0",
elixir: "~> 1.5",
start_permanent: Mix.env() == :prod,
compilers: Mix.compilers(),
deps: deps(),
docs: docs(),
elixirc_paths: elixirc_paths(Mix.env()),
package: package()
]
end
def application do
[
extra_applications: [:logger]
]
end
defp deps do
[
{:rustler, "~> 0.22.0"},
{:jemalloc_info, "~> 0.3", app: false},
{:ex_doc, "~> 0.19", only: [:dev], runtime: false},
{:benchee, "~> 1.0", only: [:dev]},
{:benchee_html, "~> 1.0", only: [:dev]},
{:stream_data, "~> 0.4", only: [:test]},
{:dialyxir, "~> 1.0.0", only: [:dev], runtime: false}
]
end
defp docs do
[
name: "SortedSet",
extras: ["README.md"],
main: "readme",
source_url: "https://github.com/discord/sorted_set"
]
end
defp elixirc_paths(:test) do
elixirc_paths(:default) ++ ["test/support"]
end
defp elixirc_paths(_) do
["lib"]
end
defp package do
[
name: :sorted_set_nif,
description: "SortedSet is a fast and efficient Rust backed sorted set.",
files: ["lib", "native/sorted_set_nif/Cargo.toml", "native/sorted_set_nif/README.md", "native/sorted_set_nif/src", ".formatter.exs", "README*", "LICENSE*", "mix.exs"],
maintainers: ["Discord Core Infrastructure"],
licenses: ["MIT"],
links: %{
"GitHub" => "https://github.com/discord/sorted_set_nif"
}
]
end
end
| 24 | 173 | 0.560323 |
1c663f9cac6adea147bad012ece3b269c0980ccb | 1,720 | exs | Elixir | config/dev.exs | francois2metz/asitext | 169b6488c4214012d5fc6ecafd4bcb5233008b20 | [
"MIT"
] | 2 | 2018-01-15T08:37:32.000Z | 2018-01-16T18:02:24.000Z | config/dev.exs | francois2metz/asitext | 169b6488c4214012d5fc6ecafd4bcb5233008b20 | [
"MIT"
] | 5 | 2018-07-24T15:42:04.000Z | 2021-09-10T10:07:38.000Z | config/dev.exs | francois2metz/asitext | 169b6488c4214012d5fc6ecafd4bcb5233008b20 | [
"MIT"
] | null | null | null | use Mix.Config
# For development, we disable any cache and enable
# debugging and code reloading.
#
# The watchers configuration can be used to run external
# watchers to your application. For example, we use it
# with brunch.io to recompile .js and .css sources.
config :asitext, AsitextWeb.Endpoint,
http: [port: 4000],
debug_errors: true,
code_reloader: true,
check_origin: false,
watchers: [node: ["node_modules/brunch/bin/brunch", "watch", "--stdin",
cd: Path.expand("../assets", __DIR__)]]
# ## SSL Support
#
# In order to use HTTPS in development, a self-signed
# certificate can be generated by running the following
# command from your terminal:
#
# openssl req -new -newkey rsa:4096 -days 365 -nodes -x509 -subj "/C=US/ST=Denial/L=Springfield/O=Dis/CN=www.example.com" -keyout priv/server.key -out priv/server.pem
#
# The `http:` config above can be replaced with:
#
# https: [port: 4000, keyfile: "priv/server.key", certfile: "priv/server.pem"],
#
# If desired, both `http:` and `https:` keys can be
# configured to run both http and https servers on
# different ports.
# Watch static and templates for browser reloading.
config :asitext, AsitextWeb.Endpoint,
live_reload: [
patterns: [
~r{priv/static/.*(js|css|png|jpeg|jpg|gif|svg)$},
~r{priv/gettext/.*(po)$},
~r{lib/asitext_web/views/.*(ex)$},
~r{lib/asitext_web/templates/.*(eex)$}
]
]
# Do not include metadata nor timestamps in development logs
config :logger, :console, format: "[$level] $message\n"
# Set a higher stacktrace during development. Avoid configuring such
# in production as building large stacktraces may be expensive.
config :phoenix, :stacktrace_depth, 20
| 34.4 | 170 | 0.700581 |
1c669a9dadf0f39334342b3ce94fd1d5ecfd893c | 5,512 | ex | Elixir | lib/event_store/storage/appender.ex | MichalDolata/eventstore | 2a6725fcb4c398d81a74455b6077901189b75386 | [
"MIT"
] | null | null | null | lib/event_store/storage/appender.ex | MichalDolata/eventstore | 2a6725fcb4c398d81a74455b6077901189b75386 | [
"MIT"
] | 1 | 2018-08-23T16:47:17.000Z | 2018-08-23T19:56:42.000Z | lib/event_store/storage/appender.ex | MichalDolata/eventstore | 2a6725fcb4c398d81a74455b6077901189b75386 | [
"MIT"
] | 1 | 2018-07-30T18:25:06.000Z | 2018-07-30T18:25:06.000Z | defmodule EventStore.Storage.Appender do
@moduledoc false
require Logger
alias EventStore.RecordedEvent
alias EventStore.Sql.Statements
@all_stream_id 0
@doc """
Append the given list of events to storage.
Events are inserted atomically in batches of 1,000 within a single
transaction. This is due to PostgreSQL's limit of 65,535 parameters in a
single statement.
Returns `:ok` on success, `{:error, reason}` on failure.
"""
def append(conn, stream_id, events, opts \\ [])
def append(conn, stream_id, events, opts) do
stream_uuid = stream_uuid(events)
Postgrex.transaction(
conn,
fn transaction ->
events
|> Stream.map(&encode_uuids/1)
|> Stream.chunk_every(1_000)
|> Enum.map(fn batch ->
case insert_event_batch(transaction, batch, opts) do
:ok -> Enum.map(batch, & &1.event_id)
{:error, reason} -> Postgrex.rollback(transaction, reason)
end
end)
|> Enum.each(fn event_ids ->
event_count = length(event_ids)
parameters =
event_ids
|> Stream.with_index(1)
|> Enum.flat_map(fn {event_id, index} -> [index, event_id] end)
with :ok <- insert_stream_events(transaction, parameters, stream_id, event_count, opts),
:ok <- insert_link_events(transaction, parameters, @all_stream_id, event_count, opts) do
:ok
else
{:error, reason} -> Postgrex.rollback(transaction, reason)
end
end)
end,
opts
)
|> case do
{:ok, :ok} ->
Logger.debug(fn ->
"Appended #{length(events)} event(s) to stream #{inspect(stream_uuid)}"
end)
:ok
{:error, reason} = reply ->
Logger.warn(fn ->
"Failed to append events to stream #{inspect(stream_uuid)} due to: #{inspect(reason)}"
end)
reply
end
end
@doc """
Link the given list of existing event ids to another stream in storage.
Returns `:ok` on success, `{:error, reason}` on failure.
"""
def link(conn, stream_id, event_ids, opts \\ [])
def link(conn, stream_id, event_ids, opts) do
Postgrex.transaction(
conn,
fn transaction ->
event_ids
|> Stream.map(&encode_uuid/1)
|> Stream.chunk_every(1_000)
|> Enum.each(fn batch ->
count = length(batch)
parameters =
batch
|> Stream.with_index(1)
|> Enum.flat_map(fn {event_id, index} -> [index, event_id] end)
with :ok <- insert_link_events(transaction, parameters, stream_id, count, opts) do
:ok
else
{:error, reason} -> Postgrex.rollback(transaction, reason)
end
end)
end,
opts
)
|> case do
{:ok, :ok} ->
Logger.debug(fn ->
"Linked #{length(event_ids)} event(s) to stream"
end)
:ok
{:error, reason} = reply ->
Logger.warn(fn ->
"Failed to link events to stream due to: #{inspect(reason)}"
end)
reply
end
end
defp encode_uuids(%RecordedEvent{} = event) do
%RecordedEvent{
event
| event_id: event.event_id |> uuid(),
causation_id: event.causation_id |> uuid(),
correlation_id: event.correlation_id |> uuid()
}
end
defp encode_uuid(event_id) when is_bitstring(event_id) do
event_id |> uuid()
end
defp insert_event_batch(conn, events, opts) do
event_count = length(events)
statement = Statements.create_events(event_count)
parameters = build_insert_parameters(events)
conn
|> Postgrex.query(statement, parameters, opts)
|> handle_response()
end
defp build_insert_parameters(events) do
events
|> Enum.flat_map(fn event ->
[
event.event_id,
event.event_type,
event.causation_id,
event.correlation_id,
event.data,
event.metadata,
event.created_at
]
end)
end
defp insert_stream_events(conn, parameters, stream_id, event_count, opts) do
statement = Statements.create_stream_events(event_count)
params = [stream_id | [event_count | parameters]]
conn
|> Postgrex.query(statement, params, opts)
|> handle_response()
end
defp insert_link_events(conn, parameters, stream_id, event_count, opts) do
statement = Statements.create_link_events(event_count)
params = [stream_id | [event_count | parameters]]
conn
|> Postgrex.query(statement, params, opts)
|> handle_response()
end
defp uuid(nil), do: nil
defp uuid(uuid), do: UUID.string_to_binary!(uuid)
defp handle_response({:ok, %Postgrex.Result{num_rows: rows}}) do
case rows do
0 -> {:error, :not_found}
_ -> :ok
end
end
defp handle_response({:error, %Postgrex.Error{} = error}) do
%Postgrex.Error{
postgres: %{
code: error_code,
constraint: constraint,
message: message
}
} = error
Logger.warn(fn ->
"Failed to append events to stream due to: #{inspect(message)}"
end)
case {error_code, constraint} do
{:foreign_key_violation, _} -> {:error, :not_found}
{:unique_violation, "stream_events_pkey"} -> {:error, :duplicate_event}
{:unique_violation, _} -> {:error, :wrong_expected_version}
{reason, _} -> {:error, reason}
end
end
defp stream_uuid([event | _]), do: event.stream_uuid
end
| 26.373206 | 103 | 0.604499 |
1c669e68b8103c6964b2c66a37f9fd68bfa779b9 | 2,164 | exs | Elixir | config/prod.exs | jakewilkins/gexbot | b92cd5bae794becaedbee9c8884e4b66942198b7 | [
"WTFPL"
] | null | null | null | config/prod.exs | jakewilkins/gexbot | b92cd5bae794becaedbee9c8884e4b66942198b7 | [
"WTFPL"
] | null | null | null | config/prod.exs | jakewilkins/gexbot | b92cd5bae794becaedbee9c8884e4b66942198b7 | [
"WTFPL"
] | null | null | null | use Mix.Config
# For production, we often load configuration from external
# sources, such as your system environment. For this reason,
# you won't find the :http configuration below, but set inside
# GexbotWeb.Endpoint.init/2 when load_from_system_env is
# true. Any dynamic configuration should be done there.
#
# Don't forget to configure the url host to something meaningful,
# Phoenix uses this information when generating URLs.
#
# Finally, we also include the path to a cache manifest
# containing the digested version of static files. This
# manifest is generated by the mix phx.digest task
# which you typically run after static files are built.
config :gexbot, GexbotWeb.Endpoint,
load_from_system_env: true,
url: [host: "example.com", port: 80],
cache_static_manifest: "priv/static/cache_manifest.json"
# Do not print debug messages in production
config :logger, level: :info
# ## SSL Support
#
# To get SSL working, you will need to add the `https` key
# to the previous section and set your `:url` port to 443:
#
# config :gexbot, GexbotWeb.Endpoint,
# ...
# url: [host: "example.com", port: 443],
# https: [:inet6,
# port: 443,
# keyfile: System.get_env("SOME_APP_SSL_KEY_PATH"),
# certfile: System.get_env("SOME_APP_SSL_CERT_PATH")]
#
# Where those two env variables return an absolute path to
# the key and cert in disk or a relative path inside priv,
# for example "priv/ssl/server.key".
#
# We also recommend setting `force_ssl`, ensuring no data is
# ever sent via http, always redirecting to https:
#
# config :gexbot, GexbotWeb.Endpoint,
# force_ssl: [hsts: true]
#
# Check `Plug.SSL` for all available options in `force_ssl`.
# ## Using releases
#
# If you are doing OTP releases, you need to instruct Phoenix
# to start the server for all endpoints:
#
# config :phoenix, :serve_endpoints, true
#
# Alternatively, you can configure exactly which server to
# start per endpoint:
#
# config :gexbot, GexbotWeb.Endpoint, server: true
#
# Finally import the config/prod.secret.exs
# which should be versioned separately.
import_config "prod.secret.exs"
| 33.292308 | 67 | 0.720425 |
1c66a5ea4d1fc93fab41cf598342fb1b43ab9c45 | 12,455 | exs | Elixir | test/clickhouse_ecto_test.exs | kundi/clickhouse_ecto | 3251221bd63a159c0e8af8336663b0bef855bbca | [
"Apache-2.0"
] | 1 | 2019-06-14T16:00:30.000Z | 2019-06-14T16:00:30.000Z | test/clickhouse_ecto_test.exs | kundi/clickhouse_ecto | 3251221bd63a159c0e8af8336663b0bef855bbca | [
"Apache-2.0"
] | null | null | null | test/clickhouse_ecto_test.exs | kundi/clickhouse_ecto | 3251221bd63a159c0e8af8336663b0bef855bbca | [
"Apache-2.0"
] | 2 | 2019-07-08T12:28:35.000Z | 2019-09-25T14:18:10.000Z | defmodule ClickhouseEctoTest do
use ExUnit.Case
doctest ClickhouseEcto
import Ecto.Query
alias ClickhouseEcto.Connection, as: SQL
defmodule Schema do
use Ecto.Schema
schema "schema" do
field :x, :integer
field :y, :integer
field :z, :integer
end
end
defp normalize(query, operation \\ :all, counter \\ 0) do
{query, _params, _key} = Ecto.Query.Planner.plan(query, operation, ClickhouseEcto)
{query, _} = Ecto.Query.Planner.normalize(query, operation, ClickhouseEcto, counter)
query
end
defp all(query), do: query |> SQL.all |> IO.iodata_to_binary()
defp insert(prefx, table, header, rows, on_conflict, returning) do
IO.iodata_to_binary SQL.insert(prefx, table, header, rows, on_conflict, returning)
end
test "from" do
query = Schema |> select([r], r.x) |> normalize
assert all(query) == ~s{SELECT s0."x" FROM "schema" AS s0}
end
test "from without schema" do
query = "posts" |> select([r], r.x) |> normalize
assert all(query) == ~s{SELECT p0."x" FROM "posts" AS p0}
query = "Posts" |> select([:x]) |> normalize
assert all(query) == ~s{SELECT P0."x" FROM "Posts" AS P0}
# FIXME
# query = "0posts" |> select([:x]) |> normalize
# assert all(query) == ~s{SELECT t0."x" FROM "0posts" AS t0}
# assert_raise Ecto.QueryError, ~r/MySQL does not support selecting all fields from "posts" without a schema/, fn ->
# all from(p in "posts", select: p) |> normalize()
# end
end
test "from with subquery" do
query = subquery("posts" |> select([r], %{x: r.x, y: r.y})) |> select([r], r.x) |> normalize
assert all(query) == ~s{SELECT s0."x" FROM (SELECT p0."x" AS "x", p0."y" AS "y" FROM "posts" AS p0) AS s0}
query = subquery("posts" |> select([r], %{x: r.x, z: r.y})) |> select([r], r) |> normalize
assert all(query) == ~s{SELECT s0."x", s0."z" FROM (SELECT p0."x" AS "x", p0."y" AS "z" FROM "posts" AS p0) AS s0}
end
test "select" do
query = Schema |> select([r], {r.x, r.y}) |> normalize
assert all(query) == ~s{SELECT s0."x", s0."y" FROM "schema" AS s0}
query = Schema |> select([r], [r.x, r.y]) |> normalize
assert all(query) == ~s{SELECT s0."x", s0."y" FROM "schema" AS s0}
query = Schema |> select([r], struct(r, [:x, :y])) |> normalize
assert all(query) == ~s{SELECT s0."x", s0."y" FROM "schema" AS s0}
end
test "distinct" do
query = Schema |> distinct([r], true) |> select([r], {r.x, r.y}) |> normalize
assert all(query) == ~s{SELECT DISTINCT s0."x", s0."y" FROM "schema" AS s0}
query = Schema |> distinct([r], false) |> select([r], {r.x, r.y}) |> normalize
assert all(query) == ~s{SELECT s0."x", s0."y" FROM "schema" AS s0}
query = Schema |> distinct(true) |> select([r], {r.x, r.y}) |> normalize
assert all(query) == ~s{SELECT DISTINCT s0."x", s0."y" FROM "schema" AS s0}
query = Schema |> distinct(false) |> select([r], {r.x, r.y}) |> normalize
assert all(query) == ~s{SELECT s0."x", s0."y" FROM "schema" AS s0}
assert_raise Ecto.QueryError, ~r"DISTINCT ON is not supported", fn ->
query = Schema |> distinct([r], [r.x, r.y]) |> select([r], {r.x, r.y}) |> normalize
all(query)
end
end
test "where" do
query = Schema |> where([r], r.x == 42) |> where([r], r.y != 43) |> select([r], r.x) |> normalize
assert all(query) == ~s{SELECT s0."x" FROM "schema" AS s0 WHERE (s0."x" = 42) AND (s0."y" != 43)}
end
test "or_where" do
query = Schema |> or_where([r], r.x == 42) |> or_where([r], r.y != 43) |> select([r], r.x) |> normalize
assert all(query) == ~s{SELECT s0."x" FROM "schema" AS s0 WHERE (s0."x" = 42) OR (s0."y" != 43)}
query = Schema |> or_where([r], r.x == 42) |> or_where([r], r.y != 43) |> where([r], r.z == 44) |> select([r], r.x) |> normalize
assert all(query) == ~s{SELECT s0."x" FROM "schema" AS s0 WHERE ((s0."x" = 42) OR (s0."y" != 43)) AND (s0."z" = 44)}
end
test "order by" do
query = Schema |> order_by([r], r.x) |> select([r], r.x) |> normalize
assert all(query) == ~s{SELECT s0."x" FROM "schema" AS s0 ORDER BY s0."x"}
query = Schema |> order_by([r], [r.x, r.y]) |> select([r], r.x) |> normalize
assert all(query) == ~s{SELECT s0."x" FROM "schema" AS s0 ORDER BY s0."x", s0."y"}
query = Schema |> order_by([r], [asc: r.x, desc: r.y]) |> select([r], r.x) |> normalize
assert all(query) == ~s{SELECT s0."x" FROM "schema" AS s0 ORDER BY s0."x", s0."y" DESC}
query = Schema |> order_by([r], []) |> select([r], r.x) |> normalize
assert all(query) == ~s{SELECT s0."x" FROM "schema" AS s0}
end
test "limit and offset" do
query = Schema |> limit([r], 3) |> select([], true) |> normalize
assert all(query) == ~s{SELECT 'TRUE' FROM "schema" AS s0 LIMIT 3}
query = Schema |> offset([r], 5) |> limit([r], 3) |> select([], true) |> normalize
assert all(query) == ~s{SELECT 'TRUE' FROM "schema" AS s0 LIMIT 5, 3}
end
test "string escape" do
# FIXME
# query = "schema" |> where(foo: "'\\ ") |> select([], true) |> normalize
# assert all(query) == ~s{SELECT 'TRUE' FROM "schema" AS s0 WHERE (s0."foo" = '''\\\\ ')}
query = "schema" |> where(foo: "'") |> select([], true) |> normalize
assert all(query) == ~s{SELECT 'TRUE' FROM "schema" AS s0 WHERE (s0."foo" = '''')}
end
test "binary ops" do
query = Schema |> select([r], r.x == 2) |> normalize
assert all(query) == ~s{SELECT s0."x" = 2 FROM "schema" AS s0}
query = Schema |> select([r], r.x != 2) |> normalize
assert all(query) == ~s{SELECT s0."x" != 2 FROM "schema" AS s0}
query = Schema |> select([r], r.x <= 2) |> normalize
assert all(query) == ~s{SELECT s0."x" <= 2 FROM "schema" AS s0}
query = Schema |> select([r], r.x >= 2) |> normalize
assert all(query) == ~s{SELECT s0."x" >= 2 FROM "schema" AS s0}
query = Schema |> select([r], r.x < 2) |> normalize
assert all(query) == ~s{SELECT s0."x" < 2 FROM "schema" AS s0}
query = Schema |> select([r], r.x > 2) |> normalize
assert all(query) == ~s{SELECT s0."x" > 2 FROM "schema" AS s0}
end
test "is_nil" do
query = Schema |> select([r], is_nil(r.x)) |> normalize
assert all(query) == ~s{SELECT s0."x" IS NULL FROM "schema" AS s0}
query = Schema |> select([r], not is_nil(r.x)) |> normalize
assert all(query) == ~s{SELECT NOT (s0."x" IS NULL) FROM "schema" AS s0}
end
test "fragments" do
query = Schema |> select([r], fragment("lcase(?)", r.x)) |> normalize
assert all(query) == ~s{SELECT lcase(s0."x") FROM "schema" AS s0}
query = Schema |> select([r], r.x) |> where([], fragment("? = \"query\\?\"", ^10)) |> normalize
assert all(query) == ~s{SELECT s0."x" FROM "schema" AS s0 WHERE (? = \"query?\")}
value = 13
query = Schema |> select([r], fragment("lcase(?, ?)", r.x, ^value)) |> normalize
assert all(query) == ~s{SELECT lcase(s0."x", ?) FROM "schema" AS s0}
query = Schema |> select([], fragment(title: 2)) |> normalize
assert_raise Ecto.QueryError, fn ->
all(query)
end
end
test "literals" do
query = "schema" |> where(foo: true) |> select([], true) |> normalize
assert all(query) == ~s{SELECT 'TRUE' FROM "schema" AS s0 WHERE (s0."foo" = 1)}
query = "schema" |> where(foo: false) |> select([], true) |> normalize
assert all(query) == ~s{SELECT 'TRUE' FROM "schema" AS s0 WHERE (s0."foo" = 0)}
query = "schema" |> where(foo: "abc") |> select([], true) |> normalize
assert all(query) == ~s{SELECT 'TRUE' FROM "schema" AS s0 WHERE (s0."foo" = 'abc')}
query = "schema" |> where(foo: 123) |> select([], true) |> normalize
assert all(query) == ~s{SELECT 'TRUE' FROM "schema" AS s0 WHERE (s0."foo" = 123)}
query = "schema" |> where(foo: 123.0) |> select([], true) |> normalize
assert all(query) == ~s{SELECT 'TRUE' FROM "schema" AS s0 WHERE (s0."foo" = 123.0)}
end
test "tagged type" do
query = Schema |> select([], type(^"601d74e4-a8d3-4b6e-8365-eddb4c893327", Ecto.UUID)) |> normalize
assert all(query) == ~s{SELECT CAST(? AS FixedString(36)) FROM "schema" AS s0}
end
test "string type" do
query = Schema |> select([], type(^"test", :string)) |> normalize
assert all(query) == ~s{SELECT CAST(? AS String) FROM "schema" AS s0}
end
test "nested expressions" do
z = 123
query = from(r in Schema, []) |> select([r], r.x > 0 and (r.y > ^(-z)) or true) |> normalize
assert all(query) == ~s{SELECT ((s0."x" > 0) AND (s0."y" > ?)) OR 1 FROM "schema" AS s0}
end
test "in expression" do
query = Schema |> select([e], 1 in []) |> normalize
assert all(query) == ~s{SELECT 0=1 FROM "schema" AS s0}
query = Schema |> select([e], 1 in [1,e.x,3]) |> normalize
assert all(query) == ~s{SELECT 1 IN (1,s0."x",3) FROM "schema" AS s0}
query = Schema |> select([e], 1 in ^[]) |> normalize
assert all(query) == ~s{SELECT 0=1 FROM "schema" AS s0}
query = Schema |> select([e], 1 in ^[1, 2, 3]) |> normalize
assert all(query) == ~s{SELECT 1 IN (?,?,?) FROM "schema" AS s0}
query = Schema |> select([e], 1 in [1, ^2, 3]) |> normalize
assert all(query) == ~s{SELECT 1 IN (1,?,3) FROM "schema" AS s0}
query = Schema |> select([e], 1 in fragment("foo")) |> normalize
assert all(query) == ~s{SELECT 1 = ANY(foo) FROM "schema" AS s0}
query = Schema |> select([e], e.x == ^0 or e.x in ^[1, 2, 3] or e.x == ^4) |> normalize
assert all(query) == ~s{SELECT ((s0."x" = ?) OR (s0."x" IN (?,?,?))) OR (s0."x" = ?) FROM "schema" AS s0}
end
test "having" do
query = Schema |> having([p], p.x == p.x) |> select([p], p.x) |> normalize
assert all(query) == ~s{SELECT s0."x" FROM "schema" AS s0 HAVING (s0."x" = s0."x")}
query = Schema |> having([p], p.x == p.x) |> having([p], p.y == p.y) |> select([p], [p.y, p.x]) |> normalize
assert all(query) == ~s{SELECT s0."y", s0."x" FROM "schema" AS s0 HAVING (s0."x" = s0."x") AND (s0."y" = s0."y")}
end
test "or_having" do
query = Schema |> or_having([p], p.x == p.x) |> select([p], p.x) |> normalize
assert all(query) == ~s{SELECT s0."x" FROM "schema" AS s0 HAVING (s0."x" = s0."x")}
query = Schema |> or_having([p], p.x == p.x) |> or_having([p], p.y == p.y) |> select([p], [p.y, p.x]) |> normalize
assert all(query) == ~s{SELECT s0."y", s0."x" FROM "schema" AS s0 HAVING (s0."x" = s0."x") OR (s0."y" = s0."y")}
end
test "group by" do
query = Schema |> group_by([r], r.x) |> select([r], r.x) |> normalize
assert all(query) == ~s{SELECT s0."x" FROM "schema" AS s0 GROUP BY s0."x"}
query = Schema |> group_by([r], 2) |> select([r], r.x) |> normalize
assert all(query) == ~s{SELECT s0."x" FROM "schema" AS s0 GROUP BY 2}
query = Schema |> group_by([r], [r.x, r.y]) |> select([r], r.x) |> normalize
assert all(query) == ~s{SELECT s0."x" FROM "schema" AS s0 GROUP BY s0."x", s0."y"}
query = Schema |> group_by([r], []) |> select([r], r.x) |> normalize
assert all(query) == ~s{SELECT s0."x" FROM "schema" AS s0}
end
test "interpolated values" do
query = Schema
|> select([m], {m.id, ^0})
|> where([], fragment("?", ^true))
|> where([], fragment("?", ^false))
|> having([], fragment("?", ^true))
|> having([], fragment("?", ^false))
|> group_by([], fragment("?", ^1))
|> group_by([], fragment("?", ^2))
|> order_by([], fragment("?", ^3))
|> order_by([], ^:x)
|> limit([], ^4)
|> offset([], ^5)
|> normalize
result =
~s{SELECT s0."id", ? FROM "schema" AS s0 } <>
~s{WHERE (?) AND (?) GROUP BY ?, ? HAVING (?) AND (?) } <>
~s{ORDER BY ?, s0."x" LIMIT ?, ?}
assert all(query) == String.trim(result)
end
# Schema based
test "insert" do
query = insert(nil, "schema", [:x, :y], [[:x, :y]], {:raise, [], []}, [])
assert query == ~s{INSERT INTO "schema" ("x","y") VALUES (?,?)}
query = insert(nil, "schema", [:x, :y], [[:x, :y], [nil, :y]], {:raise, [], []}, [])
assert query == ~s{INSERT INTO "schema" ("x","y") VALUES (?,?),(DEFAULT,?)}
query = insert(nil, "schema", [], [[]], {:raise, [], []}, [])
assert query == ~s{INSERT INTO "schema" () VALUES ()}
query = insert("prefix", "schema", [], [[]], {:raise, [], []}, [])
assert query == ~s{INSERT INTO "prefix"."schema" () VALUES ()}
end
end
| 40.836066 | 132 | 0.553673 |
1c66b3a986e52a7713b60c35cbe2624724797f42 | 1,078 | exs | Elixir | clients/policy_analyzer/test/test_helper.exs | renovate-bot/elixir-google-api | 1da34cd39b670c99f067011e05ab90af93fef1f6 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/policy_analyzer/test/test_helper.exs | swansoffiee/elixir-google-api | 9ea6d39f273fb430634788c258b3189d3613dde0 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/policy_analyzer/test/test_helper.exs | dazuma/elixir-google-api | 6a9897168008efe07a6081d2326735fe332e522c | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
ExUnit.start()
defmodule GoogleApi.PolicyAnalyzer.TestHelper do
defmacro __using__(opts) do
quote do
use ExUnit.Case, unquote(opts)
import GoogleApi.PolicyAnalyzer.TestHelper
end
end
def for_scope(scopes) when is_list(scopes), do: for_scope(Enum.join(scopes, " "))
def for_scope(scope) do
{:ok, token} = Goth.Token.for_scope(scope)
token.token
end
end
| 29.944444 | 83 | 0.742115 |
1c66b93c4678984125df10665d8ee2acb3338382 | 1,126 | exs | Elixir | clients/ad_sense/mix.exs | linjunpop/elixir-google-api | 444cb2b2fb02726894535461a474beddd8b86db4 | [
"Apache-2.0"
] | null | null | null | clients/ad_sense/mix.exs | linjunpop/elixir-google-api | 444cb2b2fb02726894535461a474beddd8b86db4 | [
"Apache-2.0"
] | null | null | null | clients/ad_sense/mix.exs | linjunpop/elixir-google-api | 444cb2b2fb02726894535461a474beddd8b86db4 | [
"Apache-2.0"
] | null | null | null | defmodule GoogleApi.AdSense.V14.Mixfile do
use Mix.Project
@version "0.2.0"
def project do
[app: :google_api_ad_sense,
version: @version,
elixir: "~> 1.4",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
description: description(),
package: package(),
deps: deps(),
source_url: "https://github.com/GoogleCloudPlatform/elixir-google-api/tree/master/clients/ad_sense"
]
end
def application() do
[extra_applications: [:logger]]
end
defp deps() do
[
{:google_gax, "~> 0.1.0"},
{:ex_doc, "~> 0.16", only: :dev}
]
end
defp description() do
"""
Accesses AdSense publishers' inventory and generates performance reports.
"""
end
defp package() do
[
files: ["lib", "mix.exs", "README*", "LICENSE"],
maintainers: ["Jeff Ching"],
licenses: ["Apache 2.0"],
links: %{
"GitHub" => "https://github.com/GoogleCloudPlatform/elixir-google-api/tree/master/clients/ad_sense",
"Homepage" => "https://developers.google.com/adsense/management/"
}
]
end
end
| 23.458333 | 108 | 0.604796 |
1c66bf974a807bfb1ac0c4fd48f52b7ca09f35be | 4,529 | ex | Elixir | apps/ewallet/lib/ewallet/web/v1/event_handlers/transaction_consumption_event_handler.ex | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 322 | 2018-02-28T07:38:44.000Z | 2020-05-27T23:09:55.000Z | apps/ewallet/lib/ewallet/web/v1/event_handlers/transaction_consumption_event_handler.ex | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 643 | 2018-02-28T12:05:20.000Z | 2020-05-22T08:34:38.000Z | apps/ewallet/lib/ewallet/web/v1/event_handlers/transaction_consumption_event_handler.ex | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 63 | 2018-02-28T10:57:06.000Z | 2020-05-27T23:10:38.000Z | # Copyright 2018-2019 OmiseGO Pte Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule EWallet.Web.V1.TransactionConsumptionEventHandler do
@moduledoc """
This module represents the transaction_consumption_confirmation event and how to build it.
"""
alias EWallet.Web.Orchestrator
alias EWallet.Web.V1.{Event, TransactionConsumptionSerializer, TransactionConsumptionOverlay}
alias Utils.Helpers.Assoc
alias EWalletDB.{Helpers.Preloader, TransactionConsumption}
@spec broadcast(atom(), %{:consumption => %TransactionConsumption{}}) ::
:ok | {:error, :unhandled_event}
def broadcast(:transaction_consumption_request, %{consumption: consumption}) do
{:ok, consumption} = Orchestrator.one(consumption, TransactionConsumptionOverlay)
topics =
[]
|> Event.address_topic(Assoc.get(consumption, [:transaction_request, :wallet_address]))
|> Event.transaction_request_topic(Assoc.get(consumption, [:transaction_request, :id]))
|> Event.user_topic(Assoc.get(consumption, [:transaction_request, :user, :id]))
|> Event.account_topic(Assoc.get(consumption, [:transaction_request, :account, :id]))
Event.broadcast(
event: "transaction_consumption_request",
topics: topics,
payload: %{
status: :ok,
data: TransactionConsumptionSerializer.serialize(consumption)
}
)
end
def broadcast(:transaction_consumption_finalized, %{consumption: consumption}) do
broadcast_change("transaction_consumption_finalized", consumption)
end
def broadcast(_, _), do: {:error, :unhandled_event}
defp broadcast_change(event, consumption) do
{:ok, consumption} = Orchestrator.one(consumption, TransactionConsumptionOverlay)
transaction_request = consumption.transaction_request
request_user_id = Assoc.get(transaction_request, [:user, :id])
consumption_user_id = Assoc.get(consumption, [:user, :id])
request_account_id = Assoc.get(transaction_request, [:account, :id])
consumption_account_id = Assoc.get(consumption, [:account, :id])
topics =
[]
|> Event.transaction_request_topic(Assoc.get(consumption, [:transaction_request, :id]))
|> Event.transaction_consumption_topic(consumption.id)
|> add_topic_if_different(
transaction_request.wallet_address,
consumption.wallet_address,
&Event.address_topic/2
)
|> add_topic_if_different(request_user_id, consumption_user_id, &Event.user_topic/2)
|> add_topic_if_different(
request_account_id,
consumption_account_id,
&Event.account_topic/2
)
Event.broadcast(
event: event,
topics: topics,
payload: payload(consumption)
)
end
defp add_topic_if_different(topics, value_1, value_2, topic_fun) when value_1 == value_2 do
topic_fun.(topics, value_1)
end
defp add_topic_if_different(topics, value_1, value_2, topic_fun) do
topics
|> topic_fun.(value_1)
|> topic_fun.(value_2)
end
defp payload(consumption) do
case TransactionConsumption.success?(consumption) do
true ->
%{
status: :ok,
data: TransactionConsumptionSerializer.serialize(consumption)
}
false ->
%{
status: :error,
error: error_code(consumption),
data: TransactionConsumptionSerializer.serialize(consumption)
}
end
end
defp error_code(consumption) do
consumption = Preloader.preload(consumption, :transaction)
case consumption.status do
"failed" ->
%{
code: consumption.error_code || Assoc.get(consumption, [:transaction, :error_code]),
description:
consumption.error_description ||
Assoc.get(consumption, [:transaction, :error_description]) ||
Assoc.get(consumption, [:transaction, :error_data])
}
"expired" ->
:expired_transaction_consumption
"pending" ->
:unfinalized_transaction_consumption
end
end
end
| 34.572519 | 95 | 0.700155 |
1c66de03d106f37d5d55dbdd7c39ea8bca148f2a | 9,400 | ex | Elixir | clients/ad_exchange_buyer/lib/google_api/ad_exchange_buyer/v14/api/budget.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/ad_exchange_buyer/lib/google_api/ad_exchange_buyer/v14/api/budget.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/ad_exchange_buyer/lib/google_api/ad_exchange_buyer/v14/api/budget.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.AdExchangeBuyer.V14.Api.Budget do
@moduledoc """
API calls for all endpoints tagged `Budget`.
"""
alias GoogleApi.AdExchangeBuyer.V14.Connection
alias GoogleApi.Gax.{Request, Response}
@library_version Mix.Project.config() |> Keyword.get(:version, "")
@doc """
Returns the budget information for the adgroup specified by the accountId and billingId.
## Parameters
* `connection` (*type:* `GoogleApi.AdExchangeBuyer.V14.Connection.t`) - Connection to server
* `account_id` (*type:* `String.t`) - The account id to get the budget information for.
* `billing_id` (*type:* `String.t`) - The billing id to get the budget information for.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.AdExchangeBuyer.V14.Model.Budget{}}` on success
* `{:error, info}` on failure
"""
@spec adexchangebuyer_budget_get(
Tesla.Env.client(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.AdExchangeBuyer.V14.Model.Budget.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def adexchangebuyer_budget_get(
connection,
account_id,
billing_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/billinginfo/{accountId}/{billingId}", %{
"accountId" => URI.encode(account_id, &URI.char_unreserved?/1),
"billingId" => URI.encode(billing_id, &(URI.char_unreserved?(&1) || &1 == ?/))
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.AdExchangeBuyer.V14.Model.Budget{}])
end
@doc """
Updates the budget amount for the budget of the adgroup specified by the accountId and billingId, with the budget amount in the request. This method supports patch semantics.
## Parameters
* `connection` (*type:* `GoogleApi.AdExchangeBuyer.V14.Connection.t`) - Connection to server
* `account_id` (*type:* `String.t`) - The account id associated with the budget being updated.
* `billing_id` (*type:* `String.t`) - The billing id associated with the budget being updated.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `:body` (*type:* `GoogleApi.AdExchangeBuyer.V14.Model.Budget.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.AdExchangeBuyer.V14.Model.Budget{}}` on success
* `{:error, info}` on failure
"""
@spec adexchangebuyer_budget_patch(
Tesla.Env.client(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.AdExchangeBuyer.V14.Model.Budget.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def adexchangebuyer_budget_patch(
connection,
account_id,
billing_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:patch)
|> Request.url("/billinginfo/{accountId}/{billingId}", %{
"accountId" => URI.encode(account_id, &URI.char_unreserved?/1),
"billingId" => URI.encode(billing_id, &(URI.char_unreserved?(&1) || &1 == ?/))
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.AdExchangeBuyer.V14.Model.Budget{}])
end
@doc """
Updates the budget amount for the budget of the adgroup specified by the accountId and billingId, with the budget amount in the request.
## Parameters
* `connection` (*type:* `GoogleApi.AdExchangeBuyer.V14.Connection.t`) - Connection to server
* `account_id` (*type:* `String.t`) - The account id associated with the budget being updated.
* `billing_id` (*type:* `String.t`) - The billing id associated with the budget being updated.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `:body` (*type:* `GoogleApi.AdExchangeBuyer.V14.Model.Budget.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.AdExchangeBuyer.V14.Model.Budget{}}` on success
* `{:error, info}` on failure
"""
@spec adexchangebuyer_budget_update(
Tesla.Env.client(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.AdExchangeBuyer.V14.Model.Budget.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def adexchangebuyer_budget_update(
connection,
account_id,
billing_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:put)
|> Request.url("/billinginfo/{accountId}/{billingId}", %{
"accountId" => URI.encode(account_id, &URI.char_unreserved?/1),
"billingId" => URI.encode(billing_id, &(URI.char_unreserved?(&1) || &1 == ?/))
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.AdExchangeBuyer.V14.Model.Budget{}])
end
end
| 40.869565 | 187 | 0.62234 |
1c66f8f97c240841251731f8451cdfef7fa39fd8 | 1,744 | ex | Elixir | lib/limit_formatter.ex | LostKobrakai/extrace | cb569b491daa3ac2307b73b1b4b28943b49f65f7 | [
"Apache-2.0",
"BSD-3-Clause"
] | 33 | 2019-03-01T11:46:27.000Z | 2022-01-27T01:44:08.000Z | lib/limit_formatter.ex | LostKobrakai/extrace | cb569b491daa3ac2307b73b1b4b28943b49f65f7 | [
"Apache-2.0",
"BSD-3-Clause"
] | 17 | 2019-04-03T21:15:04.000Z | 2022-03-23T22:06:46.000Z | lib/limit_formatter.ex | LostKobrakai/extrace | cb569b491daa3ac2307b73b1b4b28943b49f65f7 | [
"Apache-2.0",
"BSD-3-Clause"
] | 4 | 2020-07-14T04:34:05.000Z | 2021-11-02T12:48:15.000Z | defmodule Extrace.LimitFormatter do
@moduledoc """
This module handles formatting `Map` & `Struct`.
more details can be found `:recon_map`.
"""
import Inspect.Algebra
alias Code.Identifier
@doc """
Formatting & Trimming output to selected fields.
"""
def limit_inspect(term, opts) when is_map(term) do
case term do
%module{} ->
# struct data
case process_map(term) do
{_, term} ->
Inspect.Any.inspect(term, Identifier.inspect_as_atom(module), opts)
_ ->
Inspect.inspect(term, opts)
end
_ ->
# map data
term
|> process_map()
|> inspect_limited_map(opts)
end
end
def limit_inspect(term, opts) do
Inspect.inspect(term, opts)
end
defp process_map(old_term) do
with true <- :recon_map.is_active(),
{label, term} <- :recon_map.process_map(old_term),
true <- Map.keys(old_term) != Map.keys(term) do
{label, term}
else
_ ->
old_term
end
end
defp inspect_limited_map({_label, map}, opts) do
opts = %{opts | limit: min(opts.limit, map_size(map))}
map = Map.to_list(map)
open = color("%{", :map, opts)
sep = color(",", :map, opts)
close = color("}", :map, opts)
traverse_fun =
if Inspect.List.keyword?(map) do
&Inspect.List.keyword/2
else
sep = color(" => ", :map, opts)
fn {key, value}, opts ->
concat(concat(to_doc(key, opts), sep), to_doc(value, opts))
end
end
container_doc(open, map ++ ["..."], close, opts, traverse_fun, separator: sep, break: :strict)
end
defp inspect_limited_map(map, opts) do
Inspect.Map.inspect(map, opts)
end
end
| 24.222222 | 98 | 0.584289 |
1c674b022523b6b4e066ec4b6c6afeb252e58a8b | 335 | ex | Elixir | backend/lib/windshield/mailer.ex | EOS-CSX/windshield-docker | 77769dfd16a0b2da09fef2a62b5d393647f0aa6b | [
"MIT"
] | 19 | 2018-06-07T23:28:34.000Z | 2019-04-23T18:57:43.000Z | backend/lib/windshield/mailer.ex | EOS-CSX/windshield-docker | 77769dfd16a0b2da09fef2a62b5d393647f0aa6b | [
"MIT"
] | 27 | 2018-06-10T14:38:55.000Z | 2018-10-10T23:28:43.000Z | backend/lib/windshield/mailer.ex | EOS-CSX/windshield-docker | 77769dfd16a0b2da09fef2a62b5d393647f0aa6b | [
"MIT"
] | 7 | 2018-06-08T01:38:46.000Z | 2022-01-12T18:25:34.000Z | defmodule Windshield.Mailer do
@moduledoc "Mailing Helpers"
use Bamboo.Mailer, otp_app: :windshield
def sender do
:windshield
|> Application.get_env(__MODULE__)
|> Keyword.get(:sender_email)
end
def recipients do
:windshield
|> Application.get_env(__MODULE__)
|> Keyword.get(:recipients)
end
end
| 18.611111 | 41 | 0.701493 |
1c6798a361d79ad9145b5399c967cc1225d6aead | 90 | ex | Elixir | lib/structs/paged_pins.ex | MitjaBezensek/pinterex | 2c278212f17227d64c7402e65f0dad6563dea49f | [
"MIT"
] | 1 | 2018-01-29T17:52:47.000Z | 2018-01-29T17:52:47.000Z | lib/structs/paged_pins.ex | MitjaBezensek/pinterex | 2c278212f17227d64c7402e65f0dad6563dea49f | [
"MIT"
] | null | null | null | lib/structs/paged_pins.ex | MitjaBezensek/pinterex | 2c278212f17227d64c7402e65f0dad6563dea49f | [
"MIT"
] | null | null | null | defmodule Pinterex.Structs.PagedPins do
defstruct pins: nil, cursor: nil, next: nil
end
| 22.5 | 45 | 0.777778 |
1c67c182c3623ade6c8d06d35d9a8630dfe2fdd0 | 9,306 | ex | Elixir | lib/stripe/api.ex | roguesherlock/stripity_stripe | cc665f1b7751c71b5519acb46d8a6baf68f9e235 | [
"BSD-3-Clause"
] | null | null | null | lib/stripe/api.ex | roguesherlock/stripity_stripe | cc665f1b7751c71b5519acb46d8a6baf68f9e235 | [
"BSD-3-Clause"
] | null | null | null | lib/stripe/api.ex | roguesherlock/stripity_stripe | cc665f1b7751c71b5519acb46d8a6baf68f9e235 | [
"BSD-3-Clause"
] | null | null | null | defmodule Stripe.API do
@moduledoc """
Low-level utilities for interacting with the Stripe API.
Usually the utilities in `Stripe.Request` are a better way to write custom interactions with
the API.
"""
alias Stripe.{Config, Error}
@callback oauth_request(method, String.t(), map) :: {:ok, map}
@type method :: :get | :post | :put | :delete | :patch
@type headers :: %{String.t() => String.t()} | %{}
@type body :: iodata() | {:multipart, list()}
@typep http_success :: {:ok, integer, [{String.t(), String.t()}], String.t()}
@typep http_failure :: {:error, term}
@pool_name __MODULE__
@api_version "2018-11-08"
@doc """
In config.exs your implicit or expicit configuration is:
config :stripity_stripe,
json_library: Jason # defaults to Poison but can be configured to Jason
"""
@spec json_library() :: module
def json_library() do
Config.resolve(:json_library, Poison)
end
def supervisor_children do
if use_pool?() do
[:hackney_pool.child_spec(@pool_name, get_pool_options())]
else
[]
end
end
@spec get_pool_options() :: Keyword.t()
defp get_pool_options() do
Config.resolve(:pool_options)
end
@spec get_base_url() :: String.t()
defp get_base_url() do
Config.resolve(:api_base_url)
end
@spec get_upload_url() :: String.t()
defp get_upload_url() do
Config.resolve(:api_upload_url)
end
@spec get_default_api_key() :: String.t()
defp get_default_api_key() do
# if no API key is set default to `""` which will raise a Stripe API error
Config.resolve(:api_key, "")
end
@spec use_pool?() :: boolean
defp use_pool?() do
Config.resolve(:use_connection_pool)
end
@spec http_module() :: module
defp http_module() do
Config.resolve(:http_module, :hackney)
end
@spec add_common_headers(headers) :: headers
defp add_common_headers(existing_headers) do
Map.merge(existing_headers, %{
"Accept" => "application/json; charset=utf8",
"Accept-Encoding" => "gzip",
"Connection" => "keep-alive",
"User-Agent" => "Stripe/v1 stripity-stripe/#{@api_version}",
"Stripe-Version" => @api_version
})
end
@spec add_default_headers(headers) :: headers
defp add_default_headers(existing_headers) do
existing_headers = add_common_headers(existing_headers)
case Map.has_key?(existing_headers, "Content-Type") do
false -> existing_headers |> Map.put("Content-Type", "application/x-www-form-urlencoded")
true -> existing_headers
end
end
@spec add_multipart_form_headers(headers) :: headers
defp add_multipart_form_headers(existing_headers) do
existing_headers
|> Map.put("Content-Type", "multipart/form-data")
end
@spec add_auth_header(headers, String.t() | nil) :: headers
defp add_auth_header(existing_headers, api_key) do
api_key = fetch_api_key(api_key)
Map.put(existing_headers, "Authorization", "Bearer #{api_key}")
end
@spec fetch_api_key(String.t() | nil) :: String.t()
defp fetch_api_key(api_key) do
case api_key do
key when is_binary(key) -> key
_ -> get_default_api_key()
end
end
@spec add_connect_header(headers, String.t() | nil) :: headers
defp add_connect_header(existing_headers, nil), do: existing_headers
defp add_connect_header(existing_headers, account_id) do
Map.put(existing_headers, "Stripe-Account", account_id)
end
@spec add_default_options(list) :: list
defp add_default_options(opts) do
[:with_body | opts]
end
@spec add_pool_option(list) :: list
defp add_pool_option(opts) do
if use_pool?() do
[{:pool, @pool_name} | opts]
else
opts
end
end
@doc """
A low level utility function to make a direct request to the Stripe API
## Connect Accounts
If you'd like to make a request on behalf of another Stripe account
utilizing the Connect program, you can pass the other Stripe account's
ID to the request function as follows:
request(%{}, :get, "/customers", %{}, connect_account: "acc_134151")
"""
@spec request(body, method, String.t(), headers, list) ::
{:ok, map} | {:error, Stripe.Error.t()}
def request(body, :get, endpoint, headers, opts) do
{expansion, opts} = Keyword.pop(opts, :expand)
base_url = get_base_url()
req_url =
body
|> Stripe.Util.map_keys_to_atoms()
|> add_object_expansion(expansion)
|> Stripe.URI.encode_query()
|> prepend_url("#{base_url}#{endpoint}")
perform_request(req_url, :get, "", headers, opts)
end
def request(body, method, endpoint, headers, opts) do
{expansion, opts} = Keyword.pop(opts, :expand)
{idempotency_key, opts} = Keyword.pop(opts, :idempotency_key)
base_url = get_base_url()
req_url = add_object_expansion("#{base_url}#{endpoint}", expansion)
headers = add_idempotency_header(idempotency_key, headers, method)
req_body =
body
|> Stripe.Util.map_keys_to_atoms()
|> Stripe.URI.encode_query()
perform_request(req_url, method, req_body, headers, opts)
end
@doc """
A low level utility function to make a direct request to the files Stripe API
"""
@spec request_file_upload(body, method, String.t(), headers, list) ::
{:ok, map} | {:error, Stripe.Error.t()}
def request_file_upload(body, :post, endpoint, headers, opts) do
base_url = get_upload_url()
req_url = base_url <> endpoint
req_headers =
headers
|> add_multipart_form_headers()
parts =
body
|> Enum.map(fn {key, value} ->
{Stripe.Util.multipart_key(key), value}
end)
perform_request(req_url, :post, {:multipart, parts}, req_headers, opts)
end
def request_file_upload(body, method, endpoint, headers, opts) do
base_url = get_upload_url()
req_url = base_url <> endpoint
req_body =
body
|> Stripe.Util.map_keys_to_atoms()
|> Stripe.URI.encode_query()
perform_request(req_url, method, req_body, headers, opts)
end
@doc """
A low level utility function to make an OAuth request to the Stripe API
"""
@spec oauth_request(method, String.t(), map) :: {:ok, map} | {:error, Stripe.Error.t()}
def oauth_request(method, endpoint, body) do
base_url = "https://connect.stripe.com/oauth/"
req_url = base_url <> endpoint
req_body = Stripe.URI.encode_query(body)
req_headers =
%{}
|> add_default_headers()
|> Map.to_list()
req_opts =
[]
|> add_default_options()
|> add_pool_option()
http_module().request(method, req_url, req_headers, req_body, req_opts)
|> handle_response()
end
@spec perform_request(String.t(), method, body, headers, list) ::
{:ok, map} | {:error, Stripe.Error.t()}
defp perform_request(req_url, method, body, headers, opts) do
{connect_account_id, opts} = Keyword.pop(opts, :connect_account)
{api_key, opts} = Keyword.pop(opts, :api_key)
req_headers =
headers
|> add_default_headers()
|> add_auth_header(api_key)
|> add_connect_header(connect_account_id)
|> Map.to_list()
req_opts =
opts
|> add_default_options()
|> add_pool_option()
http_module().request(method, req_url, req_headers, body, req_opts)
|> handle_response()
end
@spec handle_response(http_success | http_failure) :: {:ok, map} | {:error, Stripe.Error.t()}
defp handle_response({:ok, status, headers, body}) when status >= 200 and status <= 299 do
decoded_body =
body
|> decompress_body(headers)
|> json_library().decode!()
{:ok, decoded_body}
end
defp handle_response({:ok, status, headers, body}) when status >= 300 and status <= 599 do
request_id = headers |> List.keyfind("Request-Id", 0)
error =
case json_library().decode(body) do
{:ok, %{"error_description" => _} = api_error} ->
Error.from_stripe_error(status, api_error, request_id)
{:ok, %{"error" => api_error}} ->
Error.from_stripe_error(status, api_error, request_id)
{:error, _} ->
# e.g. if the body is empty
Error.from_stripe_error(status, nil, request_id)
end
{:error, error}
end
defp handle_response({:error, reason}) do
error = Error.from_hackney_error(reason)
{:error, error}
end
defp decompress_body(body, headers) do
headers_dict = :hackney_headers.new(headers)
case :hackney_headers.get_value("Content-Encoding", headers_dict) do
"gzip" -> :zlib.gunzip(body)
"deflate" -> :zlib.unzip(body)
_ -> body
end
end
defp prepend_url("", url), do: url
defp prepend_url(query, url), do: "#{url}?#{query}"
defp add_object_expansion(query, expansion) when is_map(query) and is_list(expansion) do
query |> Map.put(:expand, expansion)
end
defp add_object_expansion(url, expansion) when is_list(expansion) do
expansion
|> Enum.map(&"expand[]=#{&1}")
|> Enum.join("&")
|> prepend_url(url)
end
defp add_object_expansion(url, _), do: url
defp add_idempotency_header(nil, headers, _), do: headers
defp add_idempotency_header(idempotency_key, headers, :post) do
Map.put(headers, "Idempotency-Key", idempotency_key)
end
defp add_idempotency_header(_, headers, _), do: headers
end
| 28.811146 | 95 | 0.663335 |
1c6804a1136216f0b390d96b3fe57719674548b1 | 1,265 | ex | Elixir | lib/phoenix_sample_web/router.ex | TehSnappy/phoenix_sample | e43b32c754bb64f94e3f8a3348395241c3bf7317 | [
"Apache-2.0"
] | 1 | 2021-03-07T06:56:35.000Z | 2021-03-07T06:56:35.000Z | lib/phoenix_sample_web/router.ex | TehSnappy/phoenix_sample | e43b32c754bb64f94e3f8a3348395241c3bf7317 | [
"Apache-2.0"
] | null | null | null | lib/phoenix_sample_web/router.ex | TehSnappy/phoenix_sample | e43b32c754bb64f94e3f8a3348395241c3bf7317 | [
"Apache-2.0"
] | 2 | 2020-11-29T05:56:44.000Z | 2021-07-23T10:22:52.000Z | defmodule PhoenixSampleWeb.Router do
use PhoenixSampleWeb, :router
pipeline :browser do
plug :accepts, ["html"]
plug :fetch_session
plug :fetch_live_flash
plug :put_root_layout, {PhoenixSampleWeb.LayoutView, :root}
plug :protect_from_forgery
plug :put_secure_browser_headers
end
pipeline :api do
plug :accepts, ["json"]
end
scope "/", PhoenixSampleWeb do
pipe_through :browser
live "/", PageLive, :index
end
# Other scopes may use custom stacks.
# scope "/api", PhoenixSampleWeb do
# pipe_through :api
# end
# Enables LiveDashboard only for development
#
# If you want to use the LiveDashboard in production, you should put
# it behind authentication and allow only admins to access it.
# If your application does not have an admins-only section yet,
# you can use Plug.BasicAuth to set up some basic authentication
# as long as you are also using SSL (which you should anyway).
# enabling this for prod without auth is a bad idea. but this is just a flashy sample
if Mix.env() in [:dev, :test, :prod] do
import Phoenix.LiveDashboard.Router
scope "/" do
pipe_through :browser
live_dashboard "/dashboard", metrics: PhoenixSampleWeb.Telemetry
end
end
end
| 27.5 | 87 | 0.711462 |
1c680f9f253597923aba25da78865f5e4d3dd0f0 | 1,515 | ex | Elixir | lib/ex_permissions.ex | mtwilliams/ex_permissions | 1842f27f41b5d404d2e2409ad3c421de08970843 | [
"Unlicense"
] | null | null | null | lib/ex_permissions.ex | mtwilliams/ex_permissions | 1842f27f41b5d404d2e2409ad3c421de08970843 | [
"Unlicense"
] | 3 | 2015-05-03T05:15:56.000Z | 2015-05-03T05:20:02.000Z | lib/ex_permissions.ex | mtwilliams/ex_permissions | 1842f27f41b5d404d2e2409ad3c421de08970843 | [
"Unlicense"
] | null | null | null | # ===----------------------------------------------------------------------=== #
#
# ExPermissions
#
# This is free and unencumbered software released into the public domain.
#
# Anyone is free to copy, modify, publish, use, compile, sell, or
# distribute this software, either in source code form or as a compiled
# binary, for any purpose, commercial or non-commercial, and by any
# means.
#
# In jurisdictions that recognize copyright laws, the author or authors
# of this software dedicate any and all copyright interest in the
# software to the public domain. We make this dedication for the benefit
# of the public at large and to the detriment of our heirs and
# successors. We intend this dedication to be an overt act of
# relinquishment in perpetuity of all present and future rights to this
# software under copyright law.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
# ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
#
# For more information, please refer to <http://unlicense.org/>
#
# ===----------------------------------------------------------------------=== #
defmodule ExPermissions do
@moduledoc """
"""
end
| 42.083333 | 80 | 0.673927 |
1c681b68d7cfedfc098370b74de57cd05883e030 | 851 | ex | Elixir | test/support/mock_socket.ex | adamvaughan/memcachir | dc7bede81384d936c1209c3b8e7729b4f78c1e22 | [
"MIT"
] | 6 | 2017-08-08T10:53:50.000Z | 2021-06-03T13:31:16.000Z | test/support/mock_socket.ex | adamvaughan/memcachir | dc7bede81384d936c1209c3b8e7729b4f78c1e22 | [
"MIT"
] | 13 | 2017-08-03T19:44:49.000Z | 2021-08-23T15:41:35.000Z | test/support/mock_socket.ex | adamvaughan/memcachir | dc7bede81384d936c1209c3b8e7729b4f78c1e22 | [
"MIT"
] | 10 | 2017-08-03T19:26:14.000Z | 2021-09-30T15:58:27.000Z | defmodule MockSocketModule do
def start_link() do
Agent.start_link(fn -> [] end, name: __MODULE__)
end
def connect(_host, _port, _timeout) do
case get() do
[] -> {:error, :econnrefused}
_ -> {:ok, :socket}
end
end
def close(_), do: :ok
def send_and_recv(_socket, command, _timeout) do
case command do
"version\n" ->
{:ok, "VERSION 1.4.14\r\n"}
"config get cluster\n" ->
servers = get() |> Enum.join(" ")
{:ok,
"CONFIG cluster 0 #{String.length(servers)}\r\n1\n#{servers}\n\r\nEND\r\n"}
end
end
def get() do
Agent.get(__MODULE__, fn servers -> servers end)
end
def update(servers) do
Agent.update(__MODULE__, fn _ -> servers end)
send(Memcachir.Cluster, :health_check)
# wait for it to be picked up
Process.sleep(500)
end
end
| 21.820513 | 84 | 0.596945 |
1c6838573b35ee11adf3cc5bd44cd8e4e39ffd98 | 13,220 | ex | Elixir | lib/websockex/frame.ex | valiot/websockex | 569392dc1262395463ed97135e34ea519955ba93 | [
"MIT"
] | null | null | null | lib/websockex/frame.ex | valiot/websockex | 569392dc1262395463ed97135e34ea519955ba93 | [
"MIT"
] | null | null | null | lib/websockex/frame.ex | valiot/websockex | 569392dc1262395463ed97135e34ea519955ba93 | [
"MIT"
] | null | null | null | defmodule WebSockex.Frame do
@moduledoc """
Functions for parsing and encoding frames.
"""
@otp_vesion System.otp_release() |> Integer.parse() |> elem(0)
@type opcode :: :text | :binary | :close | :ping | :pong
@type close_code :: 1000..4999
@typedoc "The incomplete or unhandled remainder of a binary"
@type buffer :: bitstring
@typedoc "This is required to be valid UTF-8"
@type utf8 :: binary
@type frame ::
:ping
| :pong
| :close
| {:ping, binary}
| {:pong, binary}
| {:close, close_code, utf8}
| {:text, utf8}
| {:binary, binary}
| {:fragment, :text | :binary, binary}
| {:continuation, binary}
| {:finish, binary}
@opcodes %{continuation: 0, text: 1, binary: 2, close: 8, ping: 9, pong: 10}
@doc """
Parses a bitstring and returns a frame.
"""
@spec parse_frame(bitstring) ::
:incomplete | {:ok, frame, buffer} | {:error, %WebSockex.FrameError{}}
def parse_frame(data) when bit_size(data) < 16 do
:incomplete
end
for {key, opcode} <- Map.take(@opcodes, [:close, :ping, :pong]) do
# Control Codes can have 0 length payloads
def parse_frame(<<1::1, 0::3, unquote(opcode)::4, 0::1, 0::7, buffer::bitstring>>) do
{:ok, unquote(key), buffer}
end
# Large Control Frames
def parse_frame(<<1::1, 0::3, unquote(opcode)::4, 0::1, 126::7, _::bitstring>> = buffer) do
{:error,
%WebSockex.FrameError{
reason: :control_frame_too_large,
opcode: unquote(key),
buffer: buffer
}}
end
def parse_frame(<<1::1, 0::3, unquote(opcode)::4, 0::1, 127::7, _::bitstring>> = buffer) do
{:error,
%WebSockex.FrameError{
reason: :control_frame_too_large,
opcode: unquote(key),
buffer: buffer
}}
end
# Nonfin Control Frames
def parse_frame(<<0::1, 0::3, unquote(opcode)::4, 0::1, _::7, _::bitstring>> = buffer) do
{:error,
%WebSockex.FrameError{reason: :nonfin_control_frame, opcode: unquote(key), buffer: buffer}}
end
end
# Incomplete Frames
def parse_frame(<<_::9, len::7, remaining::bitstring>>) when byte_size(remaining) < len do
:incomplete
end
for {_key, opcode} <- Map.take(@opcodes, [:continuation, :text, :binary]) do
def parse_frame(
<<_::1, 0::3, unquote(opcode)::4, 0::1, 126::7, len::16, remaining::bitstring>>
)
when byte_size(remaining) < len do
:incomplete
end
def parse_frame(
<<_::1, 0::3, unquote(opcode)::4, 0::1, 127::7, len::64, remaining::bitstring>>
)
when byte_size(remaining) < len do
:incomplete
end
end
# Close Frame with Single Byte
def parse_frame(<<1::1, 0::3, 8::4, 0::1, 1::7, _::bitstring>> = buffer) do
{:error,
%WebSockex.FrameError{
reason: :close_with_single_byte_payload,
opcode: :close,
buffer: buffer
}}
end
# Parse Close Frames with Payloads
def parse_frame(
<<1::1, 0::3, 8::4, 0::1, len::7, close_code::integer-size(16), remaining::bitstring>> =
buffer
)
when close_code in 1000..4999 do
size = len - 2
<<payload::bytes-size(size), rest::bitstring>> = remaining
if String.valid?(payload) do
{:ok, {:close, close_code, payload}, rest}
else
{:error, %WebSockex.FrameError{reason: :invalid_utf8, opcode: :close, buffer: buffer}}
end
end
def parse_frame(<<1::1, 0::3, 8::4, _::bitstring>> = buffer) do
{:error, %WebSockex.FrameError{reason: :invalid_close_code, opcode: :close, buffer: buffer}}
end
# Ping and Pong with Payloads
for {key, opcode} <- Map.take(@opcodes, [:ping, :pong]) do
def parse_frame(<<1::1, 0::3, unquote(opcode)::4, 0::1, len::7, remaining::bitstring>>) do
<<payload::bytes-size(len), rest::bitstring>> = remaining
{:ok, {unquote(key), payload}, rest}
end
end
# Text Frames (Check Valid UTF-8 Payloads)
def parse_frame(<<1::1, 0::3, 1::4, 0::1, 126::7, len::16, remaining::bitstring>> = buffer) do
parse_text_payload(len, remaining, buffer)
end
def parse_frame(<<1::1, 0::3, 1::4, 0::1, 127::7, len::64, remaining::bitstring>> = buffer) do
parse_text_payload(len, remaining, buffer)
end
def parse_frame(<<1::1, 0::3, 1::4, 0::1, len::7, remaining::bitstring>> = buffer) do
parse_text_payload(len, remaining, buffer)
end
# Binary Frames
def parse_frame(<<1::1, 0::3, 2::4, 0::1, 126::7, len::16, remaining::bitstring>>) do
<<payload::bytes-size(len), rest::bitstring>> = remaining
{:ok, {:binary, payload}, rest}
end
def parse_frame(<<1::1, 0::3, 2::4, 0::1, 127::7, len::64, remaining::bitstring>>) do
<<payload::bytes-size(len), rest::bitstring>> = remaining
{:ok, {:binary, payload}, rest}
end
def parse_frame(<<1::1, 0::3, 2::4, 0::1, len::7, remaining::bitstring>>) do
<<payload::bytes-size(len), rest::bitstring>> = remaining
{:ok, {:binary, payload}, rest}
end
# Start of Fragmented Message
for {key, opcode} <- Map.take(@opcodes, [:text, :binary]) do
def parse_frame(
<<0::1, 0::3, unquote(opcode)::4, 0::1, 126::7, len::16, remaining::bitstring>>
) do
<<payload::bytes-size(len), rest::bitstring>> = remaining
{:ok, {:fragment, unquote(key), payload}, rest}
end
def parse_frame(
<<0::1, 0::3, unquote(opcode)::4, 0::1, 127::7, len::64, remaining::bitstring>>
) do
<<payload::bytes-size(len), rest::bitstring>> = remaining
{:ok, {:fragment, unquote(key), payload}, rest}
end
def parse_frame(<<0::1, 0::3, unquote(opcode)::4, 0::1, len::7, remaining::bitstring>>) do
<<payload::bytes-size(len), rest::bitstring>> = remaining
{:ok, {:fragment, unquote(key), payload}, rest}
end
end
# Parse Fragmentation Continuation Frames
def parse_frame(<<0::1, 0::3, 0::4, 0::1, 126::7, len::16, remaining::bitstring>>) do
<<payload::bytes-size(len), rest::bitstring>> = remaining
{:ok, {:continuation, payload}, rest}
end
def parse_frame(<<0::1, 0::3, 0::4, 0::1, 127::7, len::64, remaining::bitstring>>) do
<<payload::bytes-size(len), rest::bitstring>> = remaining
{:ok, {:continuation, payload}, rest}
end
def parse_frame(<<0::1, 0::3, 0::4, 0::1, len::7, remaining::bitstring>>) do
<<payload::bytes-size(len), rest::bitstring>> = remaining
{:ok, {:continuation, payload}, rest}
end
# Parse Fragmentation Finish Frames
def parse_frame(<<1::1, 0::3, 0::4, 0::1, 126::7, len::16, remaining::bitstring>>) do
<<payload::bytes-size(len), rest::bitstring>> = remaining
{:ok, {:finish, payload}, rest}
end
def parse_frame(<<1::1, 0::3, 0::4, 0::1, 127::7, len::64, remaining::bitstring>>) do
<<payload::bytes-size(len), rest::bitstring>> = remaining
{:ok, {:finish, payload}, rest}
end
def parse_frame(<<1::1, 0::3, 0::4, 0::1, len::7, remaining::bitstring>>) do
<<payload::bytes-size(len), rest::bitstring>> = remaining
{:ok, {:finish, payload}, rest}
end
@doc """
Parses and combines two frames in a fragmented segment.
"""
@spec parse_fragment({:fragment, :text | :binary, binary}, {:continuation | :finish, binary}) ::
{:fragment, :text | :binary, binary}
| {:text | :binary, binary}
| {:error, %WebSockex.FragmentParseError{}}
def parse_fragment(fragmented_parts, continuation_frame)
def parse_fragment({:fragment, _, _} = frame0, {:fragment, _, _} = frame1) do
{:error,
%WebSockex.FragmentParseError{
reason: :two_start_frames,
fragment: frame0,
continuation: frame1
}}
end
def parse_fragment({:fragment, type, fragment}, {:continuation, continuation}) do
{:ok, {:fragment, type, <<fragment::binary, continuation::binary>>}}
end
def parse_fragment({:fragment, :binary, fragment}, {:finish, continuation}) do
{:ok, {:binary, <<fragment::binary, continuation::binary>>}}
end
# Make sure text is valid UTF-8
def parse_fragment({:fragment, :text, fragment}, {:finish, continuation}) do
text = <<fragment::binary, continuation::binary>>
if String.valid?(text) do
{:ok, {:text, text}}
else
{:error, %WebSockex.FrameError{reason: :invalid_utf8, opcode: :text, buffer: text}}
end
end
@doc """
Encodes a frame into a binary for sending.
"""
@spec encode_frame(frame) :: {:ok, binary} | {:error, %WebSockex.FrameEncodeError{}}
def encode_frame(frame)
# Encode Ping and Pong Frames
for {key, opcode} <- Map.take(@opcodes, [:ping, :pong]) do
def encode_frame({unquote(key), <<payload::binary>>}) when byte_size(payload) > 125 do
{:error,
%WebSockex.FrameEncodeError{
reason: :control_frame_too_large,
frame_type: unquote(key),
frame_payload: payload
}}
end
def encode_frame(unquote(key)) do
mask = create_mask_key()
{:ok, <<1::1, 0::3, unquote(opcode)::4, 1::1, 0::7, mask::bytes-size(4)>>}
end
def encode_frame({unquote(key), <<payload::binary>>}) do
mask = create_mask_key()
len = byte_size(payload)
masked_payload = mask(mask, payload)
{:ok,
<<1::1, 0::3, unquote(opcode)::4, 1::1, len::7, mask::bytes-size(4),
masked_payload::binary-size(len)>>}
end
end
# Encode Close Frames
def encode_frame({:close, close_code, <<payload::binary>>})
when not (close_code in 1000..4999) do
{:error,
%WebSockex.FrameEncodeError{
reason: :close_code_out_of_range,
frame_type: :close,
frame_payload: payload,
close_code: close_code
}}
end
def encode_frame({:close, close_code, <<payload::binary>>})
when byte_size(payload) > 123 do
{:error,
%WebSockex.FrameEncodeError{
reason: :control_frame_too_large,
frame_type: :close,
frame_payload: payload,
close_code: close_code
}}
end
def encode_frame(:close) do
mask = create_mask_key()
{:ok, <<1::1, 0::3, 8::4, 1::1, 0::7, mask::bytes-size(4)>>}
end
def encode_frame({:close, close_code, <<payload::binary>>}) do
mask = create_mask_key()
payload = <<close_code::16, payload::binary>>
len = byte_size(payload)
masked_payload = mask(mask, payload)
{:ok, <<1::1, 0::3, 8::4, 1::1, len::7, mask::bytes-size(4), masked_payload::binary>>}
end
# Encode Text and Binary frames
for {key, opcode} <- Map.take(@opcodes, [:text, :binary]) do
def encode_frame({unquote(key), payload}) do
mask = create_mask_key()
{payload_len_bin, payload_len_size} = get_payload_length_bin(payload)
masked_payload = mask(mask, payload)
{:ok,
<<1::1, 0::3, unquote(opcode)::4, 1::1, payload_len_bin::bits-size(payload_len_size),
mask::bytes-size(4), masked_payload::binary>>}
end
# Start Fragments!
def encode_frame({:fragment, unquote(key), payload}) do
mask = create_mask_key()
{payload_len_bin, payload_len_size} = get_payload_length_bin(payload)
masked_payload = mask(mask, payload)
{:ok,
<<0::1, 0::3, unquote(opcode)::4, 1::1, payload_len_bin::bits-size(payload_len_size),
mask::bytes-size(4), masked_payload::binary>>}
end
end
# Handle other Fragments
for {key, fin_bit} <- [{:continuation, 0}, {:finish, 1}] do
def encode_frame({unquote(key), payload}) do
mask = create_mask_key()
{payload_len_bin, payload_len_size} = get_payload_length_bin(payload)
masked_payload = mask(mask, payload)
{:ok,
<<unquote(fin_bit)::1, 0::3, 0::4, 1::1, payload_len_bin::bits-size(payload_len_size),
mask::bytes-size(4), masked_payload::binary>>}
end
end
def encode_frame(frame), do: {:error, %WebSockex.InvalidFrameError{frame: frame}}
defp parse_text_payload(len, remaining, buffer) do
<<payload::bytes-size(len), rest::bitstring>> = remaining
if String.valid?(payload) do
{:ok, {:text, payload}, rest}
else
{:error, %WebSockex.FrameError{reason: :invalid_utf8, opcode: :text, buffer: buffer}}
end
end
defp create_mask_key do
:crypto.strong_rand_bytes(4)
end
defp get_payload_length_bin(payload) do
case byte_size(payload) do
size when size <= 125 ->
{<<size::7>>, 7}
size when size <= 0xFFFF ->
{<<126::7, size::16>>, 16 + 7}
size when size <= 0x7FFFFFFFFFFFFFFF ->
{<<127::7, 0::1, size::63>>, 64 + 7}
_ ->
raise "WTF, Seriously? You're trying to send a payload larger than #{0x7FFFFFFFFFFFFFFF} bytes?"
end
end
defp mask(key, payload, acc \\ <<>>)
defp mask(_, <<>>, acc), do: acc
for x <- 1..3 do
defp mask(<<key::8*unquote(x), _::binary>>, <<part::8*unquote(x)>>, acc) do
masked = xor(part, key)
<<acc::binary, masked::8*unquote(x)>>
end
end
defp mask(<<key::32>> = key_bin, <<part::8*4, rest::binary>>, acc) do
masked = xor(part, key)
mask(key_bin, rest, <<acc::binary, masked::8*4>>)
end
if @otp_vesion >= 24 do
defp xor(a, b), do: Bitwise.bxor(a, b)
else
defp xor(a, b), do: Bitwise.^^^(a, b)
end
end
| 32.243902 | 104 | 0.6059 |
1c68407fe5ddc523e878ca74d0b3d9444fbac925 | 635 | exs | Elixir | elixir/app/test/lasagna_test.exs | dangvanthanh/1984 | 984e632c49d2d90784576cfd31aa77375846827d | [
"MIT"
] | null | null | null | elixir/app/test/lasagna_test.exs | dangvanthanh/1984 | 984e632c49d2d90784576cfd31aa77375846827d | [
"MIT"
] | null | null | null | elixir/app/test/lasagna_test.exs | dangvanthanh/1984 | 984e632c49d2d90784576cfd31aa77375846827d | [
"MIT"
] | null | null | null | defmodule LasagnaTest do
use ExUnit.Case
doctest Lasagna
test "Define the expected oven time in minutes" do
assert Lasagna.expected_minutes_in_oven() == 40
end
test "Calculate the remaining oven time in minutes" do
assert Lasagna.remaining_minutes_in_oven(30) == 10
end
test "Calculate the preparation time in minutes" do
assert Lasagna.preparation_time_in_minutes(2) == 4
end
test "Calculate the total working time in minutes" do
assert Lasagna.total_time_in_minutes(3, 20) == 26
end
test "Create a notification that the lasagna is ready" do
assert Lasagna.alarm() == "Ding!"
end
end
| 25.4 | 59 | 0.733858 |
1c6856be104fbc49bd846fb14c1c57616508490f | 1,358 | ex | Elixir | hello/lib/hello_web/endpoint.ex | ancaciascaiu/PhoenixApp | a7d18f76d69f445d11673144474e3e11855d298d | [
"MIT"
] | null | null | null | hello/lib/hello_web/endpoint.ex | ancaciascaiu/PhoenixApp | a7d18f76d69f445d11673144474e3e11855d298d | [
"MIT"
] | null | null | null | hello/lib/hello_web/endpoint.ex | ancaciascaiu/PhoenixApp | a7d18f76d69f445d11673144474e3e11855d298d | [
"MIT"
] | null | null | null | defmodule HelloWeb.Endpoint do
use Phoenix.Endpoint, otp_app: :hello
socket("/live", Phoenix.LiveView.Socket)
socket("/socket", HelloWeb.UserSocket,
websocket: true,
longpoll: false
)
# Serve at "/" the static files from "priv/static" directory.
#
# You should set gzip to true if you are running phx.digest
# when deploying your static files in production.
plug(Plug.Static,
at: "/",
from: :hello,
gzip: false,
only: ~w(css fonts images js favicon.ico robots.txt)
)
# Code reloading can be explicitly enabled under the
# :code_reloader configuration of your endpoint.
if code_reloading? do
socket("/phoenix/live_reload/socket", Phoenix.LiveReloader.Socket)
plug(Phoenix.LiveReloader)
plug(Phoenix.CodeReloader)
end
plug(Plug.RequestId)
plug(Plug.Telemetry, event_prefix: [:phoenix, :endpoint])
plug(Plug.Parsers,
parsers: [:urlencoded, :multipart, :json],
pass: ["*/*"],
json_decoder: Phoenix.json_library()
)
plug(Plug.MethodOverride)
plug(Plug.Head)
# The session will be stored in the cookie and signed,
# this means its contents can be read but not tampered with.
# Set :encryption_salt if you would also like to encrypt it.
plug(Plug.Session,
store: :cookie,
key: "_hello_key",
signing_salt: "6yPdr1y4"
)
plug(HelloWeb.Router)
end
| 25.622642 | 70 | 0.69514 |
1c6869bd1b028f514285232422fdfb08d01e8ab6 | 2,639 | ex | Elixir | lib/sanbase/alerts/history/price_volume_diff_history.ex | santiment/sanbase2 | 9ef6e2dd1e377744a6d2bba570ea6bd477a1db31 | [
"MIT"
] | 81 | 2017-11-20T01:20:22.000Z | 2022-03-05T12:04:25.000Z | lib/sanbase/alerts/history/price_volume_diff_history.ex | rmoorman/sanbase2 | 226784ab43a24219e7332c49156b198d09a6dd85 | [
"MIT"
] | 359 | 2017-10-15T14:40:53.000Z | 2022-01-25T13:34:20.000Z | lib/sanbase/alerts/history/price_volume_diff_history.ex | rmoorman/sanbase2 | 226784ab43a24219e7332c49156b198d09a6dd85 | [
"MIT"
] | 16 | 2017-11-19T13:57:40.000Z | 2022-02-07T08:13:02.000Z | defmodule Sanbase.Alert.History.PriceVolumeDifferenceHistory do
@moduledoc """
Implementations of historical trigger points for price_volume_difference.
The history goes 180 days back.
"""
alias Sanbase.Alert.Trigger.PriceVolumeDifferenceTriggerSettings
require Logger
@type historical_trigger_points_type :: %{
datetime: %DateTime{},
price_volume_diff: float(),
triggered?: boolean()
}
defimpl Sanbase.Alert.History, for: PriceVolumeDifferenceTriggerSettings do
@historical_days_from 180
alias Sanbase.Alert.History.PriceVolumeDifferenceHistory
@spec historical_trigger_points(%PriceVolumeDifferenceTriggerSettings{}, String.t()) ::
{:ok, list(PriceVolumeDifferenceHistory.historical_trigger_points_type())}
| {:error, String.t()}
def historical_trigger_points(
%PriceVolumeDifferenceTriggerSettings{target: %{slug: target}} = settings,
cooldown
)
when is_binary(target) do
case get_price_volume_data(settings) do
{:ok, result} ->
result = result |> add_triggered_marks(cooldown, settings)
{:ok, result}
{:error, error} ->
{:error, error}
end
end
defp get_price_volume_data(settings) do
Sanbase.TechIndicators.PriceVolumeDifference.price_volume_diff(
Sanbase.Model.Project.by_slug(settings.target.slug),
"USD",
Timex.shift(Timex.now(), days: -@historical_days_from),
Timex.now(),
settings.aggregate_interval,
settings.window_type,
settings.approximation_window,
settings.comparison_window
)
end
defp add_triggered_marks(result, cooldown, settings) do
threshold = settings.threshold
result
|> Enum.reduce({[], DateTime.from_unix!(0)}, fn
%{datetime: datetime, price_volume_diff: pvd} = elem, {acc, cooldown_until} ->
# triggered if not in cooldown and the value is above the threshold
triggered? = DateTime.compare(datetime, cooldown_until) != :lt and pvd >= threshold
case triggered? do
false ->
new_elem = elem |> Map.put(:triggered?, false)
{[new_elem | acc], cooldown_until}
true ->
new_elem = elem |> Map.put(:triggered?, true)
cooldown_until =
Timex.shift(datetime,
seconds: Sanbase.DateTimeUtils.str_to_sec(cooldown)
)
{[new_elem | acc], cooldown_until}
end
end)
|> elem(0)
|> Enum.reverse()
end
end
end
| 31.795181 | 93 | 0.632815 |
1c686e71b3072b6aa1aa82b1ca2c5ac0d3691a58 | 274 | exs | Elixir | ex_cubic_ingestion/config/test.exs | mbta/data_platform | 3fa66cb74134b2baa5234e908e147bf393c13926 | [
"MIT"
] | 1 | 2022-01-30T21:02:48.000Z | 2022-01-30T21:02:48.000Z | ex_cubic_ingestion/config/test.exs | mbta/data_platform | 3fa66cb74134b2baa5234e908e147bf393c13926 | [
"MIT"
] | 21 | 2022-01-25T16:35:50.000Z | 2022-03-31T19:42:52.000Z | ex_cubic_ingestion/config/test.exs | mbta/data_platform | 3fa66cb74134b2baa5234e908e147bf393c13926 | [
"MIT"
] | 1 | 2022-02-02T14:34:17.000Z | 2022-02-02T14:34:17.000Z | import Config
# only log warnings+ in test
config :logger, level: :warning
config :ex_cubic_ingestion,
start_app?: false
config :ex_cubic_ingestion, Oban, queues: false, plugins: false
config :ex_cubic_ingestion, ExCubicIngestion.Repo, pool: Ecto.Adapters.SQL.Sandbox
| 22.833333 | 82 | 0.788321 |
1c6870e6cc01011e53a49ead12a38f8aac7693dc | 4,700 | ex | Elixir | lib/mix/tasks/nerves_hub.org.ex | danielspofford/nerves_hub_cli | 3d12abe873e27448ba5c862a08f03d719cc40dc2 | [
"Apache-2.0"
] | null | null | null | lib/mix/tasks/nerves_hub.org.ex | danielspofford/nerves_hub_cli | 3d12abe873e27448ba5c862a08f03d719cc40dc2 | [
"Apache-2.0"
] | null | null | null | lib/mix/tasks/nerves_hub.org.ex | danielspofford/nerves_hub_cli | 3d12abe873e27448ba5c862a08f03d719cc40dc2 | [
"Apache-2.0"
] | null | null | null | defmodule Mix.Tasks.NervesHub.Org do
use Mix.Task
import Mix.NervesHubCLI.Utils
alias Mix.NervesHubCLI.Shell
@shortdoc "Manages an organization"
@moduledoc """
Manages an organization
# Managing user roles
The following functions allow the management of user roles within your organization.
Roles are a way of granting users a permission level so they may perform
actions for your org. The following is a list of valid roles in order of
highest role to lowest role:
* `admin`
* `delete`
* `write`
* `read`
NervesHub will validate all actions with your user role. If an action you are
trying to perform requires `write`, the user performing the action will be
required to have an org role of `write` or higher (`admin`, `delete`).
Managing user roles in your org will require that your user has the org role of
`admin`.
## user list
List the users and their role for the organization.
mix nerves_hub.org user list
## user add
Add an existing user to an org with a role.
mix nerves_hub.org user add USERNAME ROLE
## user update
Update an existing user in your org with a new role.
mix nerves_hub.org user update USERNAME ROLE
## user remove
Remove an existing user from having a role in your organization.
mix nerves_hub.org user remove USERNAME
"""
@switches [
org: :string
]
def run(args) do
Application.ensure_all_started(:nerves_hub_cli)
{opts, args} = OptionParser.parse!(args, strict: @switches)
show_api_endpoint()
org = org(opts)
case args do
["user", "list"] ->
user_list(org)
["user", "add", username, role] ->
user_add(org, username, role)
["user", "update", username, role] ->
user_update(org, username, role)
["user", "remove", username] ->
user_remove(org, username)
_ ->
render_help()
end
end
@spec render_help() :: no_return()
def render_help() do
Shell.raise("""
Invalid arguments to `mix nerves_hub.org`.
Usage:
mix nerves_hub.org user list
mix nerves_hub.org user add USERNAME ROLE
mix nerves_hub.org user update USERNAME ROLE
mix nerves_hub.org user remove USERNAME
Run `mix help nerves_hub.org` for more information.
""")
end
def user_list(org) do
auth = Shell.request_auth()
case NervesHubUserAPI.OrgUser.list(org, auth) do
{:ok, %{"data" => users}} ->
render_users(users)
error ->
Shell.info("Failed to list org users \nreason: #{inspect(error)}")
end
end
def user_add(org, username, role, auth \\ nil) do
Shell.info("")
Shell.info("Adding user '#{username}' to org '#{org}' with role '#{role}'...")
auth = auth || Shell.request_auth()
case NervesHubUserAPI.OrgUser.add(org, username, String.to_atom(role), auth) do
{:ok, %{"data" => %{} = _org_user}} ->
Shell.info("User '#{username}' was added.")
error ->
Shell.render_error(error)
end
end
def user_update(org, username, role) do
Shell.info("")
Shell.info("Updating user '#{username}' in org '#{org}' to role '#{role}'...")
auth = Shell.request_auth()
case NervesHubUserAPI.OrgUser.update(org, username, String.to_atom(role), auth) do
{:ok, %{"data" => %{} = _org_user}} ->
Shell.info("User '#{username}' was updated.")
{:error, %{"errors" => %{"detail" => "Not Found"}}} ->
Shell.error("""
'#{username}' is not a user in the organization '#{org}'.
""")
if Shell.yes?("Would you like to add them?") do
user_add(org, username, role, auth)
end
error ->
Shell.render_error(error)
end
end
def user_remove(org, username) do
Shell.info("")
Shell.info("Removing user '#{username}' from org '#{org}'...")
auth = Shell.request_auth()
case NervesHubUserAPI.OrgUser.remove(org, username, auth) do
{:ok, ""} ->
Shell.info("User '#{username}' was removed.")
{:error, %{"errors" => %{"detail" => "Not Found"}}} ->
Shell.error("""
'#{username}' is not a user in the organization '#{org}'
""")
error ->
IO.inspect(error)
Shell.render_error(error)
end
end
defp render_users(users) when is_list(users) do
Shell.info("\nOrganization users:")
Enum.each(users, fn params ->
Shell.info("------------")
render_user(params)
end)
Shell.info("------------")
Shell.info("")
end
defp render_user(params) do
Shell.info(" username: #{params["username"]}")
Shell.info(" role: #{params["role"]}")
end
end
| 24.479167 | 86 | 0.614894 |
1c6872dc388f622bd41206e0a871fb9ba48a14b1 | 2,476 | ex | Elixir | clients/firestore/lib/google_api/firestore/v1beta1/model/batch_get_documents_response.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/firestore/lib/google_api/firestore/v1beta1/model/batch_get_documents_response.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/firestore/lib/google_api/firestore/v1beta1/model/batch_get_documents_response.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Firestore.V1beta1.Model.BatchGetDocumentsResponse do
@moduledoc """
The streamed response for Firestore.BatchGetDocuments.
## Attributes
* `found` (*type:* `GoogleApi.Firestore.V1beta1.Model.Document.t`, *default:* `nil`) - A document that was requested.
* `missing` (*type:* `String.t`, *default:* `nil`) - A document name that was requested but does not exist. In the format: `projects/{project_id}/databases/{database_id}/documents/{document_path}`.
* `readTime` (*type:* `DateTime.t`, *default:* `nil`) - The time at which the document was read. This may be monotically increasing, in this case the previous documents in the result stream are guaranteed not to have changed between their read_time and this one.
* `transaction` (*type:* `String.t`, *default:* `nil`) - The transaction that was started as part of this request. Will only be set in the first response, and only if BatchGetDocumentsRequest.new_transaction was set in the request.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:found => GoogleApi.Firestore.V1beta1.Model.Document.t(),
:missing => String.t(),
:readTime => DateTime.t(),
:transaction => String.t()
}
field(:found, as: GoogleApi.Firestore.V1beta1.Model.Document)
field(:missing)
field(:readTime, as: DateTime)
field(:transaction)
end
defimpl Poison.Decoder, for: GoogleApi.Firestore.V1beta1.Model.BatchGetDocumentsResponse do
def decode(value, options) do
GoogleApi.Firestore.V1beta1.Model.BatchGetDocumentsResponse.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Firestore.V1beta1.Model.BatchGetDocumentsResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 44.214286 | 266 | 0.735864 |
1c687543f7627aed7dc8c72bdfc1f0e21699172d | 1,783 | ex | Elixir | apps/engine/lib/engine/db/transaction/payment_v1/validators/merge.ex | omgnetwork/omg-childchain-v2 | 31cc9cf9e42718fc3b9bd6668f24a627cac80b4f | [
"Apache-2.0"
] | 4 | 2020-11-30T17:38:57.000Z | 2021-01-23T21:29:41.000Z | apps/engine/lib/engine/db/transaction/payment_v1/validators/merge.ex | omgnetwork/omg-childchain-v2 | 31cc9cf9e42718fc3b9bd6668f24a627cac80b4f | [
"Apache-2.0"
] | 24 | 2020-11-30T17:32:48.000Z | 2021-02-22T06:25:22.000Z | apps/engine/lib/engine/db/transaction/payment_v1/validators/merge.ex | omgnetwork/omg-childchain-v2 | 31cc9cf9e42718fc3b9bd6668f24a627cac80b4f | [
"Apache-2.0"
] | null | null | null | defmodule Engine.DB.Transaction.PaymentV1.Validator.Merge do
@moduledoc """
Decides whether transactions qualify as "merge" transactions that use a single token,
single recipient address and have fewer outputs than inputs. This decision is necessary
to know by the child chain to not require the transaction fees.
"""
alias Engine.DB.Transaction.PaymentV1.Type
@doc """
Decides whether the given input and ouput data qualify as "merge".
To be a "merge" we must:
- Have the same `output_guard` for all inputs and outputs
- Have the same `token` for all inputs and outputs
- Have less outputs than inputs
Returns `true` if the transaction is a merge, or `false` otherwise.
## Example:
iex> Engine.DB.Transaction.PaymentV1.Validator.Merge.is_merge?([
...> %{output_guard: <<1::160>>, token: <<1::160>>, amount: 1 },
...> %{output_guard: <<1::160>>, token: <<1::160>>, amount: 2}], [
...> %{output_guard: <<1::160>>, token: <<1::160>>, amount: 3}])
true
"""
@spec is_merge?(Type.output_list_t(), Type.output_list_t()) :: boolean()
def is_merge?(input_data, output_data) do
with true <- has_same?(input_data, output_data, :output_guard),
true <- has_same?(input_data, output_data, :token),
true <- has_less_outputs_than_inputs?(input_data, output_data) do
true
end
end
defp has_same?(input_data, output_data, element) do
input_elements = Enum.map(input_data, & &1[element])
output_elements = Enum.map(output_data, & &1[element])
input_elements
|> Enum.concat(output_elements)
|> single?()
end
defp has_less_outputs_than_inputs?(inputs, outputs), do: length(inputs) >= 1 and length(inputs) > length(outputs)
defp single?(list), do: list |> Enum.uniq() |> length() == 1
end
| 35.66 | 115 | 0.684801 |
1c6884ff35be83584e9f4c3afd2ff2fd5406350c | 334 | exs | Elixir | lib/mix/templates/001_create_chat_rooms.exs | karabiner-inc/materia_chat | 6670c97e2bd6e677b4ac1234f2c9f10a7f0020b2 | [
"Apache-2.0"
] | null | null | null | lib/mix/templates/001_create_chat_rooms.exs | karabiner-inc/materia_chat | 6670c97e2bd6e677b4ac1234f2c9f10a7f0020b2 | [
"Apache-2.0"
] | 4 | 2019-04-01T01:35:25.000Z | 2019-06-06T05:36:31.000Z | priv/repo/migrations/20190319040732_materia_chat_1_create_chat_rooms.exs | karabiner-inc/materia_chat | 6670c97e2bd6e677b4ac1234f2c9f10a7f0020b2 | [
"Apache-2.0"
] | null | null | null | defmodule MateriaChat.Repo.Migrations.CreateChatRooms do
use Ecto.Migration
def change do
create table(:chat_rooms) do
add(:title, :string)
add(:access_poricy, :string)
add(:status, :integer)
add(:lock_version, :bigint)
timestamps()
end
create(index(:chat_rooms, [:status]))
end
end
| 19.647059 | 56 | 0.658683 |
1c68ae76ca133429c72f1ae7d5c93c40c0170c1a | 1,178 | ex | Elixir | phoenix_app/benchmarker/web/channels/user_socket.ex | ansrivas/phoenix-and-gin | c3ee9f2bc5db3bb41179bc7b8747455ae148b55b | [
"MIT"
] | null | null | null | phoenix_app/benchmarker/web/channels/user_socket.ex | ansrivas/phoenix-and-gin | c3ee9f2bc5db3bb41179bc7b8747455ae148b55b | [
"MIT"
] | null | null | null | phoenix_app/benchmarker/web/channels/user_socket.ex | ansrivas/phoenix-and-gin | c3ee9f2bc5db3bb41179bc7b8747455ae148b55b | [
"MIT"
] | null | null | null | defmodule Benchmarker.UserSocket do
use Phoenix.Socket
## Channels
# channel "room:*", Benchmarker.RoomChannel
## Transports
transport :websocket, Phoenix.Transports.WebSocket
# transport :longpoll, Phoenix.Transports.LongPoll
# Socket params are passed from the client and can
# be used to verify and authenticate a user. After
# verification, you can put default assigns into
# the socket that will be set for all channels, ie
#
# {:ok, assign(socket, :user_id, verified_user_id)}
#
# To deny connection, return `:error`.
#
# See `Phoenix.Token` documentation for examples in
# performing token verification on connect.
def connect(_params, socket) do
{:ok, socket}
end
# Socket id's are topics that allow you to identify all sockets for a given user:
#
# def id(socket), do: "users_socket:#{socket.assigns.user_id}"
#
# Would allow you to broadcast a "disconnect" event and terminate
# all active sockets and channels for a given user:
#
# Benchmarker.Endpoint.broadcast("users_socket:#{user.id}", "disconnect", %{})
#
# Returning `nil` makes this socket anonymous.
def id(_socket), do: nil
end
| 31 | 84 | 0.704584 |
1c68af4ffa9c58cf62656a18aae012a33fba333c | 516 | ex | Elixir | apps/elixir_ls_debugger/test/fixtures/mix_project/lib/mix_project.ex | maciej-szlosarczyk/elixir-ls | f9e3a969a32212482a7625deec9e0fd0f533f991 | [
"Apache-2.0"
] | 865 | 2018-10-31T20:29:13.000Z | 2022-03-29T11:13:39.000Z | apps/elixir_ls_debugger/test/fixtures/mix_project/lib/mix_project.ex | maciej-szlosarczyk/elixir-ls | f9e3a969a32212482a7625deec9e0fd0f533f991 | [
"Apache-2.0"
] | 441 | 2019-01-05T02:33:52.000Z | 2022-03-30T20:56:50.000Z | apps/elixir_ls_debugger/test/fixtures/mix_project/lib/mix_project.ex | maciej-szlosarczyk/elixir-ls | f9e3a969a32212482a7625deec9e0fd0f533f991 | [
"Apache-2.0"
] | 126 | 2018-11-12T19:16:53.000Z | 2022-03-26T13:27:50.000Z | defmodule MixProject do
def quadruple(x) do
double(double(x))
end
def double(y) do
2 * y
end
def exit do
Task.start(fn ->
Task.start_link(fn ->
Process.sleep(1000)
raise "Fixture MixProject expected error"
end)
Process.sleep(:infinity)
end)
Process.sleep(:infinity)
end
def exit_self do
Task.start_link(fn ->
Process.sleep(1000)
raise "Fixture MixProject raise for exit_self/0"
end)
Process.sleep(:infinity)
end
end
| 16.125 | 54 | 0.620155 |
1c68cbc61bb5166a0dab68b078d00b7b1a3f6301 | 3,012 | ex | Elixir | clients/cloud_build/lib/google_api/cloud_build/v1/model/git_file_source.ex | yoshi-code-bot/elixir-google-api | cdb6032f01fac5ab704803113c39f2207e9e019d | [
"Apache-2.0"
] | null | null | null | clients/cloud_build/lib/google_api/cloud_build/v1/model/git_file_source.ex | yoshi-code-bot/elixir-google-api | cdb6032f01fac5ab704803113c39f2207e9e019d | [
"Apache-2.0"
] | null | null | null | clients/cloud_build/lib/google_api/cloud_build/v1/model/git_file_source.ex | yoshi-code-bot/elixir-google-api | cdb6032f01fac5ab704803113c39f2207e9e019d | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.CloudBuild.V1.Model.GitFileSource do
@moduledoc """
GitFileSource describes a file within a (possibly remote) code repository.
## Attributes
* `bitbucketServerConfig` (*type:* `String.t`, *default:* `nil`) - The full resource name of the bitbucket server config. Format: `projects/{project}/locations/{location}/bitbucketServerConfigs/{id}`.
* `githubEnterpriseConfig` (*type:* `String.t`, *default:* `nil`) - The full resource name of the github enterprise config. Format: `projects/{project}/locations/{location}/githubEnterpriseConfigs/{id}`. `projects/{project}/githubEnterpriseConfigs/{id}`.
* `path` (*type:* `String.t`, *default:* `nil`) - The path of the file, with the repo root as the root of the path.
* `repoType` (*type:* `String.t`, *default:* `nil`) - See RepoType above.
* `revision` (*type:* `String.t`, *default:* `nil`) - The branch, tag, arbitrary ref, or SHA version of the repo to use when resolving the filename (optional). This field respects the same syntax/resolution as described here: https://git-scm.com/docs/gitrevisions If unspecified, the revision from which the trigger invocation originated is assumed to be the revision from which to read the specified path.
* `uri` (*type:* `String.t`, *default:* `nil`) - The URI of the repo. Either uri or repository can be specified. If unspecified, the repo from which the trigger invocation originated is assumed to be the repo from which to read the specified path.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:bitbucketServerConfig => String.t() | nil,
:githubEnterpriseConfig => String.t() | nil,
:path => String.t() | nil,
:repoType => String.t() | nil,
:revision => String.t() | nil,
:uri => String.t() | nil
}
field(:bitbucketServerConfig)
field(:githubEnterpriseConfig)
field(:path)
field(:repoType)
field(:revision)
field(:uri)
end
defimpl Poison.Decoder, for: GoogleApi.CloudBuild.V1.Model.GitFileSource do
def decode(value, options) do
GoogleApi.CloudBuild.V1.Model.GitFileSource.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.CloudBuild.V1.Model.GitFileSource do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 48.580645 | 410 | 0.715803 |
1c6900769106c0e48159a7a62c71b5c75598b427 | 796 | exs | Elixir | priv/repo/migrations/20191214212545_add_predecessor_host_to_performances.exs | richeterre/jumubase-phoenix | 7584f890af117d496971b5284bf9de798e22266f | [
"MIT"
] | 2 | 2019-01-20T07:03:30.000Z | 2019-04-11T10:20:14.000Z | priv/repo/migrations/20191214212545_add_predecessor_host_to_performances.exs | richeterre/jumubase-phoenix | 7584f890af117d496971b5284bf9de798e22266f | [
"MIT"
] | 6 | 2018-09-20T05:52:14.000Z | 2019-04-23T19:27:39.000Z | priv/repo/migrations/20191214212545_add_predecessor_host_to_performances.exs | richeterre/jumubase-phoenix | 7584f890af117d496971b5284bf9de798e22266f | [
"MIT"
] | null | null | null | defmodule Jumubase.Repo.Migrations.AddPredecessorHostToPerformances do
use Ecto.Migration
import Ecto.Query
alias Jumubase.Repo
def up do
alter table(:performances) do
add :predecessor_host_id, references(:hosts, on_delete: :nilify_all)
end
flush()
from(c in "contests",
join: h in "hosts",
on: c.host_id == h.id,
select: %{contest_id: c.id, host_id: h.id}
)
|> Repo.all()
|> Enum.each(fn row ->
Repo.update_all(
from(p in "performances", where: p.predecessor_contest_id == ^row.contest_id),
set: [predecessor_host_id: row.host_id]
)
end)
create index(:performances, :predecessor_host_id)
end
def down do
alter table(:performances) do
remove :predecessor_host_id
end
end
end
| 22.742857 | 86 | 0.650754 |
1c692ab9c7b8bbd857e4e106b76a713f1f345140 | 6,461 | ex | Elixir | apps/merkle_patricia_tree/lib/merkle_patricia_tree/trie/builder.ex | wolflee/mana | db66dac85addfaad98d40da5bd4082b3a0198bb1 | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 152 | 2018-10-27T04:52:03.000Z | 2022-03-26T10:34:00.000Z | apps/merkle_patricia_tree/lib/merkle_patricia_tree/trie/builder.ex | wolflee/mana | db66dac85addfaad98d40da5bd4082b3a0198bb1 | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 270 | 2018-04-14T07:34:57.000Z | 2018-10-25T18:10:45.000Z | apps/merkle_patricia_tree/lib/merkle_patricia_tree/trie/builder.ex | wolflee/mana | db66dac85addfaad98d40da5bd4082b3a0198bb1 | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 25 | 2018-10-27T12:15:13.000Z | 2022-01-25T20:31:14.000Z | defmodule MerklePatriciaTree.Trie.Builder do
@moduledoc """
Builder is responsible for adding keys to an
existing merkle trie. To add a key, we need to
make a delta to our trie that ends up as the canonical
form of the given tree as defined in http://gavwood.com/Paper.pdf.
Note: this algorithm is non-obvious, and hence why we have a good
number of functional and invariant tests. We should add more specific
unit tests to this module.
"""
import MerklePatriciaTree.ListHelper, only: [overlap: 2]
alias MerklePatriciaTree.Trie
alias MerklePatriciaTree.Trie.Node
alias MerklePatriciaTree.TrieStorage
@empty_branch <<>>
@doc """
Adds a key-value pair to a given trie.
This may radically change the structure of the trie.
"""
@spec put_key(Node.trie_node(), Trie.key(), ExRLP.t(), TrieStorage.t()) :: Node.trie_node()
def put_key(trie_node, key, value, trie) do
trie_put_key(trie_node, key, value, trie)
end
# Merge into a leaf with identical key (overwrite)
defp trie_put_key({:leaf, old_prefix, _value}, new_prefix, new_value, _trie)
when old_prefix == new_prefix do
{:leaf, new_prefix, new_value}
end
# Merge leafs that share some prefix,
# this will cause us to construct an extension followed by a branch.
defp trie_put_key(
{:leaf, [old_prefix_hd | _old_prefix_tl] = old_prefix, old_value},
[new_prefix_hd | _new_prefix_tl] = new_prefix,
new_value,
trie
)
when old_prefix_hd == new_prefix_hd do
{matching_prefix, old_tl, new_tl} = overlap(old_prefix, new_prefix)
branch =
[{old_tl, old_value}, {new_tl, new_value}]
|> build_branch(trie)
|> TrieStorage.put_node(trie)
{:ext, matching_prefix, branch}
end
# Merge into a leaf with no matches (i.e. create a branch)
defp trie_put_key({:leaf, old_prefix, old_value}, new_prefix, new_value, trie) do
build_branch([{old_prefix, old_value}, {new_prefix, new_value}], trie)
end
# Merge into a branch with empty prefix to store branch value
defp trie_put_key({:branch, nodes}, [], value, _trie) when length(nodes) == 17 do
{:branch, List.replace_at(nodes, 16, value)}
end
# Merge down a branch node (recursively)
defp trie_put_key({:branch, nodes}, [prefix_hd | prefix_tl], value, trie) do
{:branch,
List.update_at(nodes, prefix_hd, fn branch ->
node =
branch
|> TrieStorage.into(trie)
|> TrieStorage.fetch_node()
# Insert the rest
node
|> put_key(prefix_tl, value, trie)
|> TrieStorage.put_node(trie)
end)}
end
# Merge into empty to create a leaf
defp trie_put_key(:empty, prefix, value, _trie) do
{:leaf, prefix, value}
end
# Merge exts that share some prefix,
# this will cause us to construct an extension followed by a branch.
defp trie_put_key(
{:ext, [old_prefix_hd | _old_prefix_tl] = old_prefix, old_value},
[new_prefix_hd | _new_prefix_tl] = new_prefix,
new_value,
trie
)
when old_prefix_hd == new_prefix_hd do
{matching_prefix, old_tl, new_tl} = overlap(old_prefix, new_prefix)
# We know that current `old_value` is a branch node because
# extension nodes are always followed by branch nodes.
# Now, lets see which one should go first.
if old_tl == [] do
# Ok, the `new_prefix` starts with the `old_prefix`.
#
# For example this could be the case when:
# old_prefix = [1, 2]
# new_prefix = [1, 2, 3]
#
# So the old one should go first followed by the new one.
# In this case let's just merge the new value into the `old_branch`.
# This is our decoded old branch trie.
old_trie =
old_value
|> TrieStorage.into(trie)
|> TrieStorage.fetch_node()
# Recursively merge the new value into
# the old branch trie.
new_encoded_trie =
old_trie
|> put_key(new_tl, new_value, trie)
|> TrieStorage.put_node(trie)
{:ext, matching_prefix, new_encoded_trie}
else
# If we've got here then we know that
# the `new_prefix` isn't prefixed by the `old_prefix`.
#
# This may happen, for example, when
# we "insert" into the middle/beginning of the trie:
# old_tl = [3] <= overlap([1,2,3], [1,2])
# old_tl = [1,2,3] <= overlap([1,2,3], [2,3,4])
# old_tl = [1,2,3] <= overlap([1,2,3], [])
#
# So new node should come first followed by the old node,
# which (as we already know) is a branch node.
# In this case we need to construct a new "empty" branch node,
# that may itself be placed "inside" another ext node,
# (if there are 2 or more shared nibbles) and then we need to
# (recursively) merge the old value into it.
first =
case old_tl do
# No shared nibbles.
# We need at least 2 for it to be the extension node.
[h | []] ->
# Here `h` is the nibble index inside
# our new branch node where the `old_value` will be inserted.
{h, {:encoded, old_value}}
# They have some common/shared prefix nibbles.
# So we need to "insert" an extension node.
[h | t] ->
ext_encoded = TrieStorage.put_node({:ext, t, old_value}, trie)
{h, {:encoded, ext_encoded}}
end
branch =
[first, {new_tl, new_value}]
|> build_branch(trie)
|> TrieStorage.put_node(trie)
{:ext, matching_prefix, branch}
end
end
# Merge into a ext with no matches (i.e. create a branch).
defp trie_put_key({:ext, old_prefix, old_value}, new_prefix, new_value, trie) do
first =
case old_prefix do
[h | []] ->
{h, {:encoded, old_value}}
[h | t] ->
ext_encoded = TrieStorage.put_node({:ext, t, old_value}, trie)
{h, {:encoded, ext_encoded}}
end
build_branch([first, {new_prefix, new_value}], trie)
end
# Builds a branch node with starter values.
defp build_branch(options, trie) do
base = {:branch, for(_ <- 0..15, do: @empty_branch) ++ [<<>>]}
Enum.reduce(options, base, fn
{prefix, {:encoded, value}}, {:branch, nodes} ->
next_nodes = List.replace_at(nodes, prefix, value)
{:branch, next_nodes}
{prefix, value}, acc ->
put_key(acc, prefix, value, trie)
end)
end
end
| 32.964286 | 93 | 0.62854 |
1c694212822aa69efbcecc2ce677b3f821989fa8 | 376 | ex | Elixir | web/views/error_view.ex | marcsugiyama/pinafore | a9ceae09118d07c56757340c4be6690aa6235ace | [
"Apache-2.0"
] | 2 | 2016-02-15T13:19:28.000Z | 2016-02-15T18:10:12.000Z | web/views/error_view.ex | marcsugiyama/pinafore | a9ceae09118d07c56757340c4be6690aa6235ace | [
"Apache-2.0"
] | 2 | 2016-02-14T22:01:49.000Z | 2016-02-14T22:02:46.000Z | web/views/error_view.ex | marcsugiyama/pinafore | a9ceae09118d07c56757340c4be6690aa6235ace | [
"Apache-2.0"
] | null | null | null | defmodule Pinafore.ErrorView do
use Pinafore.Web, :view
def render("404.html", _assigns) do
"Page not found"
end
def render("500.html", _assigns) do
"Server internal error"
end
# In case no render clause matches or no
# template is found, let's render it as 500
def template_not_found(_template, assigns) do
render "500.html", assigns
end
end
| 20.888889 | 47 | 0.699468 |
1c694a1a74bba6a03940bfa6eb225931b005cb36 | 1,263 | ex | Elixir | examples/bloggy/lib/bloggy_web/views/error_helpers.ex | wojtekmach/resourceful | 8425140aa1e89dfababcb2faa7bc4e2f59722661 | [
"Apache-2.0"
] | 1 | 2020-01-06T00:38:43.000Z | 2020-01-06T00:38:43.000Z | examples/bloggy/lib/bloggy_web/views/error_helpers.ex | wojtekmach/resourceful | 8425140aa1e89dfababcb2faa7bc4e2f59722661 | [
"Apache-2.0"
] | null | null | null | examples/bloggy/lib/bloggy_web/views/error_helpers.ex | wojtekmach/resourceful | 8425140aa1e89dfababcb2faa7bc4e2f59722661 | [
"Apache-2.0"
] | null | null | null | defmodule BloggyWeb.ErrorHelpers do
@moduledoc """
Conveniences for translating and building error messages.
"""
use Phoenix.HTML
@doc """
Generates tag for inlined form input errors.
"""
def error_tag(form, field) do
Enum.map(Keyword.get_values(form.errors, field), fn (error) ->
content_tag :span, translate_error(error), class: "help-block"
end)
end
@doc """
Translates an error message using gettext.
"""
def translate_error({msg, opts}) do
# Because error messages were defined within Ecto, we must
# call the Gettext module passing our Gettext backend. We
# also use the "errors" domain as translations are placed
# in the errors.po file.
# Ecto will pass the :count keyword if the error message is
# meant to be pluralized.
# On your own code and templates, depending on whether you
# need the message to be pluralized or not, this could be
# written simply as:
#
# dngettext "errors", "1 file", "%{count} files", count
# dgettext "errors", "is invalid"
#
if count = opts[:count] do
Gettext.dngettext(BloggyWeb.Gettext, "errors", msg, msg, count, opts)
else
Gettext.dgettext(BloggyWeb.Gettext, "errors", msg, opts)
end
end
end
| 30.804878 | 75 | 0.669834 |
1c694ec101fe05c3a318bd301f44d9fd139d6cd8 | 6,759 | ex | Elixir | lib/bamboo/adapters/sendin_blue_adapter_v3.ex | maysam/bamboo_sendinblue | 882511499ce8088b46713dbae06eee1bd84c860a | [
"MIT"
] | null | null | null | lib/bamboo/adapters/sendin_blue_adapter_v3.ex | maysam/bamboo_sendinblue | 882511499ce8088b46713dbae06eee1bd84c860a | [
"MIT"
] | null | null | null | lib/bamboo/adapters/sendin_blue_adapter_v3.ex | maysam/bamboo_sendinblue | 882511499ce8088b46713dbae06eee1bd84c860a | [
"MIT"
] | null | null | null | defmodule Bamboo.SendinBlueAdapterV3 do
@moduledoc """
Sends email using SendinBlue's JSON API v3.0.
This module requires a v3 API key to work.
Based on https://github.com/biospank/bamboo_sendinblue (the SendinBlue V2 API adapter)
## Reply-To field
To set the reply-to field, use `put_header(email, "reply-to-email", "user@mail.com")` and optionally `put_header(email, "reply-to-name", "The Name")`
## Example config
# In config/config.exs, or config.prod.exs, etc.
config :my_app, MyApp.Mailer,
adapter: Bamboo.SendinBlueAdapterV3,
api_key: "my_api_key"
# Define a Mailer. Maybe in lib/my_app/mailer.ex
defmodule MyApp.Mailer do
use Bamboo.Mailer, otp_app: :my_app
end
"""
@behaviour Bamboo.Adapter
require Logger
alias Bamboo.Email
alias Bamboo.Attachment
defmodule ApiError do
defexception [:message]
def exception(%{message: message}) do
%ApiError{message: message}
end
def exception(%{params: params, response: response}) do
filtered_params = params |> Plug.Conn.Query.decode() |> Map.put("key", "[FILTERED]")
message = """
There was a problem sending the email through the SendinBlue API v3.0.
Response:
#{inspect(response, limit: :infinity)}
Parameters:
#{inspect(filtered_params, limit: :infinity)}
"""
%ApiError{message: message}
end
end
def supports_attachments?, do: true
def deliver(email, config) do
api_key = get_key(config)
body = email |> to_sendinblue_body |> Poison.encode!()
url = get_api_url()
case :hackney.post(url, headers(api_key), body, [:with_body]) do
{:ok, status, _headers, response} when status > 299 ->
raise(ApiError, %{params: body, response: response})
{:ok, status, headers, response} ->
%{status_code: status, headers: headers, body: response}
{:error, reason} ->
Logger.warn("#{inspect({url, body})}")
raise(ApiError, %{message: inspect(reason)})
end
end
def handle_config(config) do
if config[:api_key] in [nil, ""] do
raise_api_key_error(config)
else
config
end
end
defp get_key(config) do
case Map.get(config, :api_key) do
nil -> raise_api_key_error(config)
key -> key
end
end
defp raise_api_key_error(config) do
raise ArgumentError, """
There was no API key set for the SendinBlue adapter.
* Here are the config options that were passed in:
#{inspect(config)}
"""
end
defp headers(api_key) do
[{"Content-Type", "application/json"}, {"api-key", api_key}]
end
defp to_sendinblue_body(%Email{} = email) do
%{}
|> put_sender(email)
|> put_to(email)
|> put_reply_to(email)
|> put_cc(email)
|> put_bcc(email)
|> put_subject(email)
|> put_html_body(email)
|> put_text_body(email)
|> put_attachments(email)
|> put_template_params(email)
|> put_tag_params(email)
end
defp put_sender(body, %Email{from: {nil, address}}) do
body |> Map.put(:sender, %{email: address})
end
defp put_sender(body, %Email{from: {name, address}}) do
body |> Map.put(:sender, %{email: address, name: name})
end
defp put_sender(body, %Email{from: address}) do
body |> Map.put(:sender, %{email: address})
end
defp put_to(body, %Email{to: to}) do
body |> put_addresses(:to, address_map(to))
end
defp put_cc(body, %Email{cc: []}), do: body
defp put_cc(body, %Email{cc: cc}) do
body |> put_addresses(:cc, address_map(cc))
end
defp put_bcc(body, %Email{bcc: []}), do: body
defp put_bcc(body, %Email{bcc: bcc}) do
body |> put_addresses(:bcc, address_map(bcc))
end
defp put_subject(body, %Email{subject: subject}), do: Map.put(body, :subject, subject)
defp put_html_body(body, %Email{html_body: nil}), do: body
defp put_html_body(body, %Email{html_body: html_body}),
do: Map.put(body, :htmlContent, html_body)
defp put_text_body(body, %Email{text_body: nil}), do: body
defp put_text_body(body, %Email{text_body: text_body}),
do: Map.put(body, :textContent, text_body)
defp put_reply_to(body, %Email{headers: headers} = _email) do
body |> put_reply_to_email(headers) |> put_reply_to_name(headers)
end
defp put_reply_to_email(body, %{"reply-to-email" => email}) do
reply_to = body |> Map.get(:replyTo, %{}) |> Map.put(:email, email)
Map.put(body, :replyTo, reply_to)
end
defp put_reply_to_email(body, _), do: body
defp put_reply_to_name(body, %{"reply-to-name" => name}) do
reply_to = body |> Map.get(:replyTo, %{}) |> Map.put(:name, name || "")
Map.put(body, :replyTo, reply_to)
end
defp put_reply_to_name(body, _), do: body
defp put_addresses(body, field, []), do: Map.delete(body, field)
defp put_addresses(body, field, addresses), do: Map.put(body, field, addresses)
defp base_uri do
Application.get_env(:bamboo, :sendinblue_base_uri) || default_base_uri()
end
defp put_template_params(params, %{private:
%{templateId: template_name, params: template_model}}) do
params
|> Map.put(:templateId, template_name)
|> Map.put(:params, template_model)
end
defp put_template_params(params, _) do
params
end
defp put_tag_params(params, %{private: %{tags: tag}}) do
Map.put(params, :tags, tag)
end
defp put_tag_params(params, _) do
params
end
defp put_attachments(body, %Email{attachments: []}), do: body
defp put_attachments(body, %Email{attachments: atts}) do
attachments =
atts
|> Enum.map(fn attachment -> prepare_attachment(attachment) end)
Map.put(body, :attachment, attachments)
end
defp prepare_attachment(%Attachment{data: data, filename: filename})
when not is_nil(data) and not is_nil(filename) do
%{content: Base.encode64(data), name: filename}
end
defp prepare_attachment(%Attachment{path: path, filename: filename} = att)
when not is_nil(filename) do
case URI.parse(path) do
%URI{scheme: nil} ->
att |> Map.put(:data, File.read!(path)) |> prepare_attachment()
%URI{} ->
%{url: path, name: filename}
end
end
defp prepare_attachment(%Attachment{path: path} = att) when not is_nil(path) do
att |> Map.put(:filename, Path.basename(path)) |> prepare_attachment()
end
defp address_map(addresses) when is_list(addresses) do
addresses
|> Enum.map(fn
{nil, address} -> %{email: address}
{name, address} -> %{email: address, name: name || ""}
address -> %{email: address}
end)
end
defp address_map(nil) do
[]
end
defp default_base_uri, do: "https://api.sendinblue.com"
defp get_api_url, do: "#{base_uri()}/v3/smtp/email"
end
| 27.144578 | 151 | 0.654091 |
1c695b05e46f432bf931a9890a0a4d91073481e2 | 312 | ex | Elixir | lib/elixir_google_scraper_web/controllers/api/error_handler.ex | junan/elixir_google_scraper | d032f3a9d5a30e354f1e6d607434670334936630 | [
"MIT"
] | null | null | null | lib/elixir_google_scraper_web/controllers/api/error_handler.ex | junan/elixir_google_scraper | d032f3a9d5a30e354f1e6d607434670334936630 | [
"MIT"
] | 25 | 2021-05-21T02:23:37.000Z | 2021-07-09T09:22:32.000Z | lib/elixir_google_scraper_web/controllers/api/error_handler.ex | junan/elixir_google_scraper | d032f3a9d5a30e354f1e6d607434670334936630 | [
"MIT"
] | null | null | null | defmodule ElixirGoogleScraperWeb.Api.ErrorHandler do
use ElixirGoogleScraperWeb, :controller
alias ElixirGoogleScraperWeb.V1.ErrorView
def unauthenticated(conn, _) do
conn
|> put_status(:unauthorized)
|> render(ErrorView, "error.json", errors: [%{detail: "Authentication failed"}])
end
end
| 26 | 84 | 0.75 |
1c6967ee7639e0fbeaa95790329b1c13821deaa5 | 2,196 | exs | Elixir | config/dev.exs | tyjet/taskmaster_api | 4ba670731717299b07abb5d56f3e068015826963 | [
"MIT"
] | null | null | null | config/dev.exs | tyjet/taskmaster_api | 4ba670731717299b07abb5d56f3e068015826963 | [
"MIT"
] | 1 | 2021-03-10T19:38:43.000Z | 2021-03-10T19:38:43.000Z | config/dev.exs | tyjet/taskmaster_api | 4ba670731717299b07abb5d56f3e068015826963 | [
"MIT"
] | null | null | null | use Mix.Config
# Configure your database
config :taskmaster_api, TaskmasterApi.Repo,
username: "postgres",
password: "postgres",
database: "taskmaster_api_dev",
hostname: "localhost",
show_sensitive_data_on_connection_error: true,
pool_size: 10
# For development, we disable any cache and enable
# debugging and code reloading.
#
# The watchers configuration can be used to run external
# watchers to your application. For example, we use it
# with webpack to recompile .js and .css sources.
config :taskmaster_api, TaskmasterApiWeb.Endpoint,
http: [port: 4000],
debug_errors: true,
code_reloader: true,
check_origin: false,
watchers: [
node: [
"node_modules/webpack/bin/webpack.js",
"--mode",
"development",
"--watch-stdin",
cd: Path.expand("../assets", __DIR__)
]
]
# ## SSL Support
#
# In order to use HTTPS in development, a self-signed
# certificate can be generated by running the following
# Mix task:
#
# mix phx.gen.cert
#
# Note that this task requires Erlang/OTP 20 or later.
# Run `mix help phx.gen.cert` for more information.
#
# The `http:` config above can be replaced with:
#
# https: [
# port: 4001,
# cipher_suite: :strong,
# keyfile: "priv/cert/selfsigned_key.pem",
# certfile: "priv/cert/selfsigned.pem"
# ],
#
# If desired, both `http:` and `https:` keys can be
# configured to run both http and https servers on
# different ports.
# Watch static and templates for browser reloading.
config :taskmaster_api, TaskmasterApiWeb.Endpoint,
live_reload: [
patterns: [
~r"priv/static/.*(js|css|png|jpeg|jpg|gif|svg)$",
~r"priv/gettext/.*(po)$",
~r"lib/taskmaster_api_web/(live|views)/.*(ex)$",
~r"lib/taskmaster_api_web/templates/.*(eex)$"
]
]
# Do not include metadata nor timestamps in development logs
config :logger, :console, format: "[$level] $message\n"
# Set a higher stacktrace during development. Avoid configuring such
# in production as building large stacktraces may be expensive.
config :phoenix, :stacktrace_depth, 20
# Initialize plugs at runtime for faster development compilation
config :phoenix, :plug_init_mode, :runtime
| 28.519481 | 68 | 0.698998 |
1c69950538406dd1f26f680a0f95d09085d1ab4f | 110 | exs | Elixir | test/fusion/utilities/socat_test.exs | elpddev/fusion | 7106ba4922786e1d79310074e46e6996d0c2f4aa | [
"MIT"
] | 2 | 2020-02-02T20:22:28.000Z | 2020-12-10T18:25:03.000Z | test/fusion/utilities/socat_test.exs | elpddev/fusion | 7106ba4922786e1d79310074e46e6996d0c2f4aa | [
"MIT"
] | null | null | null | test/fusion/utilities/socat_test.exs | elpddev/fusion | 7106ba4922786e1d79310074e46e6996d0c2f4aa | [
"MIT"
] | null | null | null | defmodule Fusion.Utilities.SocatTest do
use ExUnit.Case
doctest Fusion.Utilities.Socat, import: true
end
| 18.333333 | 46 | 0.8 |
1c6a06143b3ce0d8bf4e9f78708d5c85f66fc2c0 | 22,427 | ex | Elixir | lib/ecto/migration.ex | cnsa/ecto | a6e0eaaa5da1032fad571308c338eca1b5f77738 | [
"Apache-2.0"
] | null | null | null | lib/ecto/migration.ex | cnsa/ecto | a6e0eaaa5da1032fad571308c338eca1b5f77738 | [
"Apache-2.0"
] | null | null | null | lib/ecto/migration.ex | cnsa/ecto | a6e0eaaa5da1032fad571308c338eca1b5f77738 | [
"Apache-2.0"
] | null | null | null | defmodule Ecto.Migration do
@moduledoc """
Migrations are used to modify your database schema over time.
This module provides many helpers for migrating the database,
allowing developers to use Elixir to alter their storage in
a way that is database independent.
Here is an example:
defmodule MyRepo.Migrations.CreatePosts do
use Ecto.Migration
def up do
create table(:weather) do
add :city, :string, size: 40
add :temp_lo, :integer
add :temp_hi, :integer
add :prcp, :float
timestamps
end
end
def down do
drop table(:weather)
end
end
Note migrations have an `up/0` and `down/0` instructions, where
`up/0` is used to update your database and `down/0` rolls back
the prompted changes.
Ecto provides some mix tasks to help developers work with migrations:
* `mix ecto.gen.migration add_weather_table` - generates a
migration that the user can fill in with particular commands
* `mix ecto.migrate` - migrates a repository
* `mix ecto.rollback` - rolls back a particular migration
Run the `mix help COMMAND` for more information.
## Change
Migrations can also be automatically reversible by implementing
`change/0` instead of `up/0` and `down/0`. For example, the
migration above can be written as:
defmodule MyRepo.Migrations.CreatePosts do
use Ecto.Migration
def change do
create table(:weather) do
add :city, :string, size: 40
add :temp_lo, :integer
add :temp_hi, :integer
add :prcp, :float
timestamps
end
end
end
Notice not all commands are reversible though. Trying to rollback
a non-reversible command will raise an `Ecto.MigrationError`.
## Field Types
The Ecto primitive types are mapped to the appropriate database
type by the various database adapters. For example, `:string` is converted to
`:varchar`, `:datetime` to the underlying `:datetime` or `:timestamp` type,
`:binary` to `:bits` or `:blob`, and so on.
Similarly, you can pass any field type supported by your database
as long as it maps to an Ecto type. For instance, you can use `:text`,
`:varchar` or `:char` in your migrations as `add :field_name, :text`.
In your Ecto schema, they will all map to the same `:string` type.
## Prefixes
Migrations support specifying a table prefix or index prefix which will target either a schema
if using Postgres, or a different database if using MySQL. If no prefix is
provided, the default schema or database is used.
Any reference declared in the table migration refers by default to the table with
the same declared prefix.
The prefix is specified in the table options:
def up do
create table(:weather, prefix: :north_america) do
add :city, :string, size: 40
add :temp_lo, :integer
add :temp_hi, :integer
add :prcp, :float
add :group_id, references(:groups)
timestamps
end
create index(:weather, [:city], prefix: :north_america)
end
Note: if using MySQL with a prefixed table, you must use the same prefix for the references since
cross database references are not supported.
For both MySQL and Postgres with a prefixed table, you must use the same prefix for the index field to ensure
you index the prefix qualified table.
## Transactions
By default, Ecto runs all migrations inside a transaction. That's not always
ideal: for example, PostgreSQL allows to create/drop indexes concurrently but
only outside of any transaction (see the [PostgreSQL
docs](http://www.postgresql.org/docs/9.2/static/sql-createindex.html#SQL-CREATEINDEX-CONCURRENTLY)).
Migrations can be forced to run outside a transaction by setting the
`@disable_ddl_transaction` module attribute to `true`:
defmodule MyRepo.Migrations.CreateIndexes do
use Ecto.Migration
@disable_ddl_transaction true
def change do
create index(:posts, [:slug], concurrently: true)
end
end
Since running migrations outside a transaction can be dangerous, consider
performing very few operations in such migrations.
See the `index/3` function for more information on creating/dropping indexes
concurrently.
## Schema Migrations table
Version numbers of migrations will be saved in `schema_migrations` table.
But you can configure the table via:
config :app, App.Repo, migration_source: "my_migrations"
"""
defmodule Index do
@moduledoc """
Defines an index struct used in migrations.
"""
defstruct table: nil,
prefix: nil,
name: nil,
columns: [],
unique: false,
concurrently: false,
using: nil,
where: nil
@type t :: %__MODULE__{
table: atom,
prefix: atom,
name: atom,
columns: [atom | String.t],
unique: boolean,
concurrently: boolean,
using: atom | String.t,
where: atom | String.t
}
end
defmodule Table do
@moduledoc """
Defines a table struct used in migrations.
"""
defstruct name: nil, prefix: nil, primary_key: true, engine: nil, options: nil
@type t :: %__MODULE__{name: atom, prefix: atom | nil, primary_key: boolean,
engine: atom, options: String.t}
end
defmodule Reference do
@moduledoc """
Defines a reference struct used in migrations.
"""
defstruct name: nil, table: nil, column: :id, type: :serial, on_delete: :nothing, on_update: :nothing
@type t :: %__MODULE__{table: atom, column: atom, type: atom, on_delete: atom, on_update: atom}
end
defmodule Constraint do
@moduledoc """
Defines a Constraint struct used in migrations.
"""
defstruct name: nil, table: nil, check: nil, exclude: nil, prefix: nil
@type t :: %__MODULE__{name: atom, table: atom, prefix: atom | nil,
check: String.t | nil, exclude: String.t | nil}
end
alias Ecto.Migration.Runner
@doc false
defmacro __using__(_) do
quote location: :keep do
import Ecto.Migration
@disable_ddl_transaction false
@before_compile Ecto.Migration
end
end
@doc false
defmacro __before_compile__(_env) do
quote do
def __migration__,
do: [disable_ddl_transaction: @disable_ddl_transaction]
end
end
@doc """
Creates a table.
By default, the table will also include a primary_key of name `:id`
and type `:serial`. Check `table/2` docs for more information.
## Examples
create table(:posts) do
add :title, :string, default: "Untitled"
add :body, :text
timestamps
end
"""
defmacro create(object, do: block) do
do_create(object, :create, block)
end
@doc """
Creates a table if it does not exist.
Works just like `create/2` but does not raise an error when table
already exists.
"""
defmacro create_if_not_exists(object, do: block) do
do_create(object, :create_if_not_exists, block)
end
defp do_create(object, command, block) do
quote do
table = %Table{} = unquote(object)
Runner.start_command({unquote(command), Ecto.Migration.__prefix__(table)})
if table.primary_key do
add(:id, :serial, primary_key: true)
end
unquote(block)
Runner.end_command
table
end
end
@doc """
Alters a table.
## Examples
alter table(:posts) do
add :summary, :text
modify :title, :text
remove :views
end
"""
defmacro alter(object, do: block) do
quote do
table = %Table{} = unquote(object)
Runner.start_command({:alter, Ecto.Migration.__prefix__(table)})
unquote(block)
Runner.end_command
end
end
@doc """
Creates one of the following:
* an index
* a table with only an `:id` field
* a constraint
When reversing (in `change` running backward) indexes are only dropped if they
exist and no errors are raised. To enforce dropping an index use `drop/1`.
## Examples
create index(:posts, [:name])
create table(:version)
create constraint(:products, "price_must_be_positive", check: "price > 0")
"""
def create(%Index{} = index) do
Runner.execute {:create, __prefix__(index)}
index
end
def create(%Constraint{} = constraint) do
Runner.execute {:create, __prefix__(constraint)}
constraint
end
def create(%Table{} = table) do
do_create table, :create
table
end
@doc """
Creates an index or a table with only `:id` field if one does not yet exist.
## Examples
create_if_not_exists index(:posts, [:name])
create_if_not_exists table(:version)
"""
def create_if_not_exists(%Index{} = index) do
Runner.execute {:create_if_not_exists, __prefix__(index)}
end
def create_if_not_exists(%Table{} = table) do
do_create table, :create_if_not_exists
end
defp do_create(table, command) do
columns =
if table.primary_key do
[{:add, :id, :serial, primary_key: true}]
else
[]
end
Runner.execute {command, __prefix__(table), columns}
end
@doc """
Drops one of the following:
* an index
* a table
* a constraint
## Examples
drop index(:posts, [:name])
drop table(:posts)
drop constraint(:products, name: "price_must_be_positive")
"""
def drop(%{} = index_or_table_or_constraint) do
Runner.execute {:drop, __prefix__(index_or_table_or_constraint)}
index_or_table_or_constraint
end
@doc """
Drops a table or index if it exists.
Does not raise an error if table or index does not exist.
## Examples
drop_if_exists index(:posts, [:name])
drop_if_exists table(:posts)
"""
def drop_if_exists(%{} = index_or_table) do
Runner.execute {:drop_if_exists, __prefix__(index_or_table)}
index_or_table
end
@doc """
Returns a table struct that can be given on create, alter, etc.
## Examples
create table(:products) do
add :name, :string
add :price, :decimal
end
drop table(:products)
create table(:products, primary_key: false) do
add :name, :string
add :price, :decimal
end
## Options
* `:primary_key` - when false, does not generate primary key on table creation
* `:engine` - customizes the table storage for supported databases. For MySQL,
the default is InnoDB
* `:options` - provide custom options that will be appended after generated
statement, for example "WITH", "INHERITS" or "ON COMMIT" clauses
"""
def table(name, opts \\ []) when is_atom(name) do
struct(%Table{name: name}, opts)
end
@doc ~S"""
Returns an index struct that can be used on `create`, `drop`, etc.
Expects the table name as first argument and the index fields as
second. The field can be an atom, representing a column, or a
string representing an expression that is sent as is to the database.
Indexes are non-unique by default.
## Options
* `:name` - the name of the index. Defaults to "#{table}_#{column}_index"
* `:unique` - if the column(s) is unique or not
* `:concurrently` - if the index should be created/dropped concurrently
* `:using` - configures the index type
* `:prefix` - prefix for the index
* `:where` - the conditions for a partial index
## Adding/dropping indexes concurrently
PostgreSQL supports adding/dropping indexes concurrently (see the
[docs](http://www.postgresql.org/docs/9.4/static/sql-createindex.html)).
In order to take advantage of this, the `:concurrently` option needs to be set
to `true` when the index is created/dropped.
**Note**: in order for the `:concurrently` option to work, the migration must
not be run inside a transaction. See the `Ecto.Migration` docs for more
information on running migrations outside of a transaction.
## Index types
PostgreSQL supports several index types like B-tree, Hash or GiST. When
creating an index, the index type defaults to B-tree, but it can be specified
with the `:using` option. The `:using` option can be an atom or a string; its
value is passed to the `USING` clause as is.
More information on index types can be found in the [PostgreSQL
docs](http://www.postgresql.org/docs/9.4/static/indexes-types.html).
## Partial indexes
Databases like PostgreSQL and MSSQL supports partial indexes.
A partial index is an index built over a subset of a table. The subset
is defined by a conditional expression using the `:where` option.
The `:where` option can be an atom or a string; its value is passed
to the `WHERE` clause as is.
More information on partial indexes can be found in the [PostgreSQL
docs](http://www.postgresql.org/docs/9.4/static/indexes-partial.html).
## Examples
# Without a name, index defaults to products_category_id_sku_index
create index(:products, [:category_id, :sku], unique: true)
# Name can be given explicitly though
drop index(:products, [:category_id, :sku], name: :my_special_name)
# Indexes can be added concurrently
create index(:products, [:category_id, :sku], concurrently: true)
# The index type can be specified
create index(:products, [:name], using: :hash)
# Create an index on custom expressions
create index(:products, ["lower(name)"], name: :products_lower_name_index)
# Create a partial index
create index(:products, [:user_id], where: "price = 0", name: :free_products_index)
"""
def index(table, columns, opts \\ []) when is_atom(table) and is_list(columns) do
index = struct(%Index{table: table, columns: columns}, opts)
%{index | name: index.name || default_index_name(index)}
end
@doc """
Shortcut for creating a unique index.
See `index/3` for more information.
"""
def unique_index(table, columns, opts \\ []) when is_atom(table) and is_list(columns) do
index(table, columns, [unique: true] ++ opts)
end
defp default_index_name(index) do
[index.table, index.columns, "index"]
|> List.flatten
|> Enum.join("_")
|> String.replace(~r"[^\w_]", "_")
|> String.replace("__", "_")
|> String.to_atom
end
@doc """
Executes arbitrary SQL or a keyword command in NoSQL databases.
## Examples
execute "UPDATE posts SET published_at = NULL"
execute create: "posts", capped: true, size: 1024
"""
def execute(command) when is_binary(command) or is_list(command) do
Runner.execute command
end
@doc """
Gets the migrator direction.
"""
@spec direction :: :up | :down
def direction do
Runner.migrator_direction
end
@doc """
Gets the migrator prefix.
"""
def prefix do
Runner.prefix
end
@doc """
Adds a column when creating or altering a table.
This function also accepts Ecto primitive types as column types
and they are normalized by the database adapter. For example,
`:string` is converted to `:varchar`, `:datetime` to the underlying
`:datetime` or `:timestamp` type, `:binary` to `:bits` or `:blob`, and so on.
However, the column type is not always the same as the type used in your
schema. For example, a schema that has a `:string` field,
can be supported by columns of types `:char`, `:varchar`, `:text` and others.
For this reason, this function also accepts `:text` and other columns,
which are sent as is to the underlying database.
To sum up, the column type may be either an Ecto primitive type,
which is normalized in cases the database does not understand it,
like `:string` or `:binary`, or a database type which is passed as is.
Custom Ecto types, like `Ecto.Datetime`, are not supported because
they are application level concern and may not always map to the
database.
## Examples
create table(:posts) do
add :title, :string, default: "Untitled"
end
alter table(:posts) do
add :summary, :text # Database type
add :object, :json
end
## Options
* `:primary_key` - when true, marks this field as the primary key
* `:default` - the column's default value. can be a string, number
or a fragment generated by `fragment/1`
* `:null` - when `false`, the column does not allow null values
* `:size` - the size of the type (for example the numbers of characters).
Default is no size, except for `:string` that defaults to 255.
* `:precision` - the precision for a numeric type. Default is no precision
* `:scale` - the scale of a numeric type. Default is 0 scale
"""
def add(column, type, opts \\ []) when is_atom(column) do
validate_type!(type)
Runner.subcommand {:add, column, type, opts}
end
@doc """
Renames a table.
## Examples
rename table(:posts), to: table(:new_posts)
"""
def rename(%Table{} = table_current, to: %Table{} = table_new) do
Runner.execute {:rename, __prefix__(table_current), __prefix__(table_new)}
table_new
end
@doc """
Renames a column outside of the `alter` statement.
## Examples
rename table(:posts), :title, to: :summary
"""
def rename(%Table{} = table, current_column, to: new_column) when is_atom(current_column) and is_atom(new_column) do
Runner.execute {:rename, __prefix__(table), current_column, new_column}
table
end
@doc """
Generates a fragment to be used as default value.
## Examples
create table(:posts) do
add :inserted_at, :datetime, default: fragment("now()")
end
"""
def fragment(expr) when is_binary(expr) do
{:fragment, expr}
end
@doc """
Adds `:inserted_at` and `:updated_at` timestamps columns.
Those columns are of `:datetime` type and by default cannot
be null. `opts` can be given to customize the generated
fields.
## Options
* `:inserted_at` - the name of the column for insertion times
* `:updated_at` - the name of the column for update times
"""
def timestamps(opts \\ []) do
opts = Keyword.put_new(opts, :null, false)
inserted_at = opts[:inserted_at] || :inserted_at
updated_at = opts[:updated_at] || :updated_at
opts = Keyword.drop opts, [:inserted_at, :updated_at]
add(inserted_at, :datetime, opts)
add(updated_at, :datetime, opts)
end
@doc """
Modifies the type of column when altering a table.
See `add/3` for more information on supported types.
## Examples
alter table(:posts) do
modify :title, :text
end
## Options
* `:null` - sets to null or not null
* `:default` - changes the default
* `:size` - the size of the type (for example the numbers of characters). Default is no size.
* `:precision` - the precision for a numeric type. Default is no precision.
* `:scale` - the scale of a numeric type. Default is 0 scale.
"""
def modify(column, type, opts \\ []) when is_atom(column) do
Runner.subcommand {:modify, column, type, opts}
end
@doc """
Removes a column when altering a table.
## Examples
alter table(:posts) do
remove :title
end
"""
def remove(column) when is_atom(column) do
Runner.subcommand {:remove, column}
end
@doc ~S"""
Defines a foreign key.
## Examples
create table(:products) do
add :group_id, references(:groups)
end
## Options
* `:name` - The name of the underlying reference,
defaults to "#{table}_#{column}_fkey"
* `:column` - The foreign key column, default is `:id`
* `:type` - The foreign key type, default is `:serial`
* `:on_delete` - What to perform if the entry is deleted.
May be `:nothing`, `:delete_all` or `:nilify_all`.
Defaults to `:nothing`.
* `:on_update` - What to perform if the entry is updated.
May be `:nothing`, `:update_all` or `:nilify_all`.
Defaults to `:nothing`.
"""
def references(table, opts \\ []) when is_atom(table) do
reference = struct(%Reference{table: table}, opts)
unless reference.on_delete in [:nothing, :delete_all, :nilify_all] do
raise ArgumentError, "unknown :on_delete value: #{inspect reference.on_delete}"
end
unless reference.on_update in [:nothing, :update_all, :nilify_all] do
raise ArgumentError, "unknown :on_update value: #{inspect reference.on_update}"
end
reference
end
@doc ~S"""
Defines a constraint (either a check constraint or an exclusion constraint) to be evaluated by the database when a row is inserted or updated.
## Examples
create constraint(:users, :price_must_be_positive, check: "price > 0")
create constraint(:size_ranges, :no_overlap, exclude: ~s|gist (int4range("from", "to", '[]') WITH &&)|
drop constraint(:products, "price_must_be_positive")
## Options
* `:check` - The expression to evaluate on a row. Required when creating.
* `:name` - The name of the constraint - required.
"""
def constraint(table, name, opts \\ [] ) do
struct(%Constraint{table: table, name: name}, opts)
end
@doc """
Executes queue migration commands.
Reverses the order commands are executed when doing a rollback
on a change/0 function and resets commands queue.
"""
def flush do
Runner.flush
end
defp validate_type!(type) when is_atom(type) do
case Atom.to_string(type) do
"Elixir." <> _ ->
raise ArgumentError,
"#{inspect type} is not a valid database type, " <>
"please use an atom like :string, :text and so on"
_ ->
:ok
end
end
defp validate_type!({type, subtype}) when is_atom(type) and is_atom(subtype) do
validate_type!(subtype)
end
defp validate_type!(%Reference{} = reference) do
reference
end
@doc false
def __prefix__(%{prefix: prefix} = index_or_table) do
runner_prefix = Runner.prefix()
cond do
is_nil(prefix) ->
%{index_or_table | prefix: runner_prefix}
is_nil(runner_prefix) or runner_prefix == prefix ->
index_or_table
true ->
raise Ecto.MigrationError, message:
"the :prefix option `#{inspect prefix}` does match the migrator prefix `#{inspect runner_prefix}`"
end
end
end
| 28.938065 | 144 | 0.66139 |
1c6a0b4aac6da36bd8caf35bf9f04e39188d5c7a | 193 | ex | Elixir | elixir/codes-from-books/little-elixir/cap11/concuerror_playground/test/concurrency/stacky_test.ex | trxeste/wrk | 3e05e50ff621866f0361cc8494ce8f6bb4d97fae | [
"BSD-3-Clause"
] | 1 | 2017-10-16T03:00:50.000Z | 2017-10-16T03:00:50.000Z | elixir/codes-from-books/little-elixir/cap11/concuerror_playground/test/concurrency/stacky_test.ex | trxeste/wrk | 3e05e50ff621866f0361cc8494ce8f6bb4d97fae | [
"BSD-3-Clause"
] | null | null | null | elixir/codes-from-books/little-elixir/cap11/concuerror_playground/test/concurrency/stacky_test.ex | trxeste/wrk | 3e05e50ff621866f0361cc8494ce8f6bb4d97fae | [
"BSD-3-Clause"
] | null | null | null | Code.require_file "../test_helper.exs", __DIR__
defmodule Stacky.ConcurrencyTest do
def test do
{:ok, _pid} = Stacky.start_link
Stacky.tag(1)
Stacky.stop
:ok
end
end
| 12.866667 | 47 | 0.663212 |
1c6a9824123c7692480cfb0f1228bbab49a21e4a | 10,962 | ex | Elixir | lib/ecto/query/api.ex | jeregrine/ecto | 98b2dd4bf7b39738ab9a5ae3fa7e48e43a4af39b | [
"Apache-2.0"
] | null | null | null | lib/ecto/query/api.ex | jeregrine/ecto | 98b2dd4bf7b39738ab9a5ae3fa7e48e43a4af39b | [
"Apache-2.0"
] | null | null | null | lib/ecto/query/api.ex | jeregrine/ecto | 98b2dd4bf7b39738ab9a5ae3fa7e48e43a4af39b | [
"Apache-2.0"
] | null | null | null | defmodule Ecto.Query.API do
@moduledoc """
This module lists all functions allowed in the query API.
* Comparison operators: `==`, `!=`, `<=`, `>=`, `<`, `>`
* Boolean operators: `and`, `or`, `not`
* Inclusion operator: `in/2`
* Search functions: `like/2` and `ilike/2`
* Null check functions: `is_nil/1`
* Aggregates: `count/1`, `avg/1`, `sum/1`, `min/1`, `max/1`
* Date/time intervals: `datetime_add/3`, `date_add/3`, `from_now/2`, `ago/2`
* Inside select: `struct/2`, `map/2` and literals (map, tuples, lists, etc)
* General: `fragment/1`, `field/2` and `type/2`
Note the functions in this module exist for documentation
purposes and one should never need to invoke them directly.
Furthermore, it is possible to define your own macros and
use them in Ecto queries (see docs for `fragment/1`).
"""
@doc """
Binary `==` operation.
"""
def left == right, do: doc! [left, right]
@doc """
Binary `!=` operation.
"""
def left != right, do: doc! [left, right]
@doc """
Binary `<=` operation.
"""
def left <= right, do: doc! [left, right]
@doc """
Binary `>=` operation.
"""
def left >= right, do: doc! [left, right]
@doc """
Binary `<` operation.
"""
def left < right, do: doc! [left, right]
@doc """
Binary `>` operation.
"""
def left > right, do: doc! [left, right]
@doc """
Binary `and` operation.
"""
def left and right, do: doc! [left, right]
@doc """
Binary `or` operation.
"""
def left or right, do: doc! [left, right]
@doc """
Unary `not` operation.
"""
def not(value), do: doc! [value]
@doc """
Checks if the left-value is included in the right one.
from p in Post, where: p.id in [1, 2, 3]
The right side may either be a list, a literal list
or even a column in the database with array type:
from p in Post, where: "elixir" in p.tags
"""
def left in right, do: doc! [left, right]
@doc """
Searches for `search` in `string`.
from p in Post, where: like(p.body, "Chapter%")
Translates to the underlying SQL LIKE query, therefore
its behaviour is dependent on the database. In particular,
PostgreSQL will do a case-sensitive operation, while the
majority of other databases will be case-insensitive. For
performing a case-insensitive `like` in PostgreSQL, see `ilike/2`.
"""
def like(string, search), do: doc! [string, search]
@doc """
Searches for `search` in `string` in a case insensitive fashion.
from p in Post, where: ilike(p.body, "Chapter%")
Translates to the underlying SQL ILIKE query. This operation is
only available on PostgreSQL.
"""
def ilike(string, search), do: doc! [string, search]
@doc """
Checks if the given value is nil.
from p in Post, where: is_nil(p.published_at)
"""
def is_nil(value), do: doc! [value]
@doc """
Counts the given entry.
from p in Post, select: count(p.id)
"""
def count(value), do: doc! [value]
@doc """
Counts the distinct values in given entry.
from p in Post, select: count(p.id, :distinct)
"""
def count(value, :distinct), do: doc! [value, :distinct]
@doc """
Calculates the average for the given entry.
from p in Payment, select: avg(p.value)
"""
def avg(value), do: doc! [value]
@doc """
Calculates the sum for the given entry.
from p in Payment, select: sum(p.value)
"""
def sum(value), do: doc! [value]
@doc """
Calculates the minimum for the given entry.
from p in Payment, select: min(p.value)
"""
def min(value), do: doc! [value]
@doc """
Calculates the maximum for the given entry.
from p in Payment, select: max(p.value)
"""
def max(value), do: doc! [value]
@doc """
Adds a given interval to a datetime.
The first argument is a `datetime`, the second one is the count
for the interval, which may be either positive or negative and
the interval value:
# Get all items published since the last month
from p in Post, where: p.published_at >
datetime_add(^Ecto.DateTime.utc, -1, "month")
In the example above, we used `datetime_add/3` to subtract one month
from the current datetime and compared it with the `p.published_at`.
If you want to perform operations on date, `date_add/3` could be used.
The following intervals are supported: year, month, week, day, hour,
minute, second, millisecond and microsecond.
"""
def datetime_add(datetime, count, interval), do: doc! [datetime, count, interval]
@doc """
Adds a given interval to a date.
See `datetime_add/3` for more information.
"""
def date_add(date, count, interval), do: doc! [date, count, interval]
@doc """
Adds the given interval to the current time in UTC.
The current time in UTC is retrieved from Elixir and
not from the database.
## Examples
from a in Account, where: p.expires_at < from_now(3, "months")
"""
def from_now(count, interval), do: doc! [count, interval]
@doc """
Substracts the given interval from the current time in UTC.
The current time in UTC is retrieved from Elixir and
not from the database.
## Examples
from p in Post, where: p.published_at > ago(3, "months")
"""
def ago(count, interval), do: doc! [count, interval]
@doc """
Send fragments directly to the database.
It is not possible to represent all possible database queries using
Ecto's query syntax. When such is required, it is possible to use
fragments to send any expression to the database:
def unpublished_by_title(title) do
from p in Post,
where: is_nil(p.published_at) and
fragment("downcase(?)", p.title) == ^title
end
In the example above, we are using the downcase procedure in the
database to downcase the title column.
It is very important to keep in mind that Ecto is unable to do any
type casting described above when fragments are used. You can
however use the `type/2` function to give Ecto some hints:
fragment("downcase(?)", p.title) == type(^title, :string)
Or even say the right side is of the same type as `p.title`:
fragment("downcase(?)", p.title) == type(^title, p.title)
It is possible to make use of PostgreSQL's JSON/JSONB data type
with fragments, as well:
fragment("?->>? ILIKE ?", p.map, "key_name", ^some_value)
## Keyword fragments
In order to support databases that do not have string-based
queries, like MongoDB, fragments also allow keywords to be given:
from p in Post,
where: fragment(title: ["$eq": ^some_value])
## Defining custom functions using macros and fragment
You can add a custom Ecto query function using macros. For example
to expose SQL's coalesce function you can define this macro:
defmodule CustomFunctions do
defmacro coalesce(left, right) do
quote do
fragment("coalesce(?, ?)", unquote(left), unquote(right))
end
end
end
To have coalesce/2 available, just import the module that defines it.
import CustomFunctions
The only downside is that it will show up as a fragment when
inspecting the Elixir query. Other than that, it should be
equivalent to a built-in Ecto query function.
"""
def fragment(fragments), do: doc! [fragments]
@doc """
Allows a field to be dynamically accessed.
def at_least_four(doors_or_tires) do
from c in Car,
where: field(c, ^doors_or_tires) >= 4
end
In the example above, both `at_least_four(:doors)` and `at_least_four(:tires)`
would be valid calls as the field is dynamically generated.
"""
def field(source, field), do: doc! [source, field]
@doc """
Used in `select` to specify which struct fields should be returned.
For example, if you don't need all fields to be returned
as part of a struct, you can filter it to include only certain
fields by using `struct/2`:
from p in Post,
select: struct(p, [:title, :body])
`struct/2` can also be used to dynamically select fields:
fields = [:title, :body]
from p in Post, select: struct(p, ^fields)
As a convenience, `select` allows developers to take fields
without an explicit call to `struct/2`:
from p in Post, select: [:title, :body]
Or even dynamically:
fields = [:title, :body]
from p in Post, select: ^fields
However, `struct/2` is still useful when you want to limit
the fields of different structs:
from(city in City, join: country in assoc(city, :country),
select: {struct(city, [:country_id, :name]), struct(country, [:id, :population])}
For preloads, the selected fields may be specified from the parent:
from(city in City, preload: :country,
select: struct(city, [:country_id, :name, country: [:id, :population]]))
**IMPORTANT**: When filtering fields for associations, you
MUST include the foreign keys used in the relationship,
otherwise Ecto will be unable to find associated records.
"""
def struct(source, fields), do: doc! [source, fields]
@doc """
Used in `select` to specify which fields should be returned as a map.
For example, if you don't need all fields to be returned or
neither need a struct, you can use `map/2` to achieve both:
from p in Post,
select: map(p, [:title, :body])
`map/2` can also be used to dynamically select fields:
fields = [:title, :body]
from p in Post, select: map(p, ^fields)
`map/2` is also useful when you want to limit the fields
of different structs:
from(city in City, join: country in assoc(city, :country),
select: {map(city, [:country_id, :name]), map(country, [:id, :population])}
For preloads, the selected fields may be specified from the parent:
from(city in City, preload: :country,
select: map(city, [:country_id, :name, country: [:id, :population]]))
**IMPORTANT**: When filtering fields for associations, you
MUST include the foreign keys used in the relationship,
otherwise Ecto will be unable to find associated records.
"""
def map(source, fields), do: doc! [source, fields]
@doc """
Casts the given value to the given type.
Most of the times, Ecto is able to proper cast interpolated
values due to its type checking mechanism. In some situations
though, in particular when using fragments with `fragment/1`,
you may want to tell Ecto you are expecting a particular type:
fragment("downcase(?)", p.title) == type(^title, :string)
It is also possible to say the type must match the same of a column:
fragment("downcase(?)", p.title) == type(^title, p.title)
"""
def type(interpolated_value, type), do: doc! [interpolated_value, type]
defp doc!(_) do
raise "the functions in Ecto.Query.API should not be invoked directly, " <>
"they serve for documentation purposes only"
end
end
| 29.54717 | 92 | 0.657544 |
1c6abb77dfa105700a59a240a544c0da1562ef57 | 3,295 | ex | Elixir | apps/bytepack/lib/bytepack/packages.ex | dashbitco/bytepack_archive | 79f8e62149d020f2afcc501592ed399f7ce7a60b | [
"Unlicense"
] | 313 | 2020-12-03T17:26:24.000Z | 2022-03-18T09:05:14.000Z | apps/bytepack/lib/bytepack/packages.ex | dashbitco/bytepack_archive | 79f8e62149d020f2afcc501592ed399f7ce7a60b | [
"Unlicense"
] | null | null | null | apps/bytepack/lib/bytepack/packages.ex | dashbitco/bytepack_archive | 79f8e62149d020f2afcc501592ed399f7ce7a60b | [
"Unlicense"
] | 57 | 2020-12-03T17:41:53.000Z | 2022-03-17T17:28:16.000Z | defmodule Bytepack.Packages do
import Ecto.Query
alias Bytepack.Repo
alias Bytepack.AuditLog
alias Bytepack.Orgs.{Org, Membership}
alias Bytepack.Packages.{Package, Release, PackageDownload}
def list_available_packages(%Org{} = org, clauses \\ []) do
from(Package, where: [org_id: ^org.id], where: ^clauses, order_by: [asc: :name])
|> Repo.all()
end
def get_available_package_by!(%Org{} = org, clauses) do
Repo.get_by!(Package, [org_id: org.id] ++ clauses)
|> preload_releases()
end
def get_release_by_version!(%Package{} = package, version) do
Repo.get_by!(Release, package_id: package.id, version: version)
end
def create_package(%Org{} = org, attrs \\ %{}) do
%Package{org_id: org.id}
|> Package.changeset(attrs)
|> Repo.insert()
end
def update_package(audit_context, %Package{} = package, attrs \\ %{}) do
Ecto.Multi.new()
|> Ecto.Multi.update(:package, Package.update_changeset(package, attrs))
|> AuditLog.multi(audit_context, "packages.update_package", fn
audit_context, %{package: package} ->
%{
audit_context
| params: %{
package_id: package.id,
description: package.description,
external_doc_url: package.external_doc_url
}
}
end)
|> Repo.transaction()
|> case do
{:ok, %{package: package}} -> {:ok, package}
{:error, :package, changeset, _} -> {:error, changeset}
end
end
def change_package(package, attrs \\ %{}) do
Package.update_changeset(package, attrs)
end
def preload_releases(%Package{} = package) do
package
|> Repo.preload(:releases)
|> Map.update!(:releases, fn releases ->
Enum.sort_by(releases, & &1.version, {:desc, Version})
end)
end
def preload_releases(packages) when is_list(packages) do
packages
|> Repo.preload(:releases)
|> Enum.map(&preload_releases/1)
end
def create_release(%Package{} = package, size_in_bytes, attrs \\ %{}) do
%Release{package: package, size_in_bytes: size_in_bytes}
|> Release.changeset(attrs)
|> Repo.insert()
end
def broadcast_published(user_id, package, release, new_package?) do
topic =
if new_package? do
"user:#{user_id}:package:new"
else
"user:#{user_id}:package:#{package.id}"
end
Phoenix.PubSub.broadcast(
Bytepack.PubSub,
topic,
{:published,
%{
package_id: package.id,
package_name: package.name,
package_type: package.type,
version: release.version
}}
)
end
@doc """
It increments the download counter of a given release.
It will track based on date, user and org.
"""
def increment_download_counter!(
%Release{} = release,
%Membership{} = membership,
date \\ Date.utc_today()
) do
query = from(m in PackageDownload, update: [inc: [counter: 1]])
Repo.insert!(
%PackageDownload{
release_id: release.id,
org_id: membership.org_id,
user_id: membership.member_id,
date: date,
size: release.size_in_bytes,
counter: 1
},
on_conflict: query,
conflict_target: [:org_id, :user_id, :release_id, :size, :date]
)
end
end
| 27.458333 | 84 | 0.621851 |
1c6ad8063b74a39f66502f4247d87c3e385876b3 | 2,077 | ex | Elixir | apps/nerves_hub_web_core/lib/nerves_hub_web_core/release/tasks.ex | sportalliance/nerves_hub_web | 97635506876a25a8eed06a0bc0412378684bf477 | [
"Apache-2.0"
] | null | null | null | apps/nerves_hub_web_core/lib/nerves_hub_web_core/release/tasks.ex | sportalliance/nerves_hub_web | 97635506876a25a8eed06a0bc0412378684bf477 | [
"Apache-2.0"
] | null | null | null | apps/nerves_hub_web_core/lib/nerves_hub_web_core/release/tasks.ex | sportalliance/nerves_hub_web | 97635506876a25a8eed06a0bc0412378684bf477 | [
"Apache-2.0"
] | null | null | null | defmodule NervesHubWebCore.Release.Tasks do
alias Ecto.Migrator
@otp_app :nerves_hub_web_core
@start_apps [:logger, :ssl, :postgrex, :ecto_sql]
def migrate_and_seed do
init(@otp_app, @start_apps)
run_migrations_for(@otp_app)
run_seed_script("#{seed_path(@otp_app)}/seeds.exs")
stop()
end
def create do
init(@otp_app, @start_apps)
run_create_for(@otp_app)
run_migrations_for(@otp_app)
run_seed_script("#{seed_path(@otp_app)}/seeds.exs")
stop()
end
def gc do
init(@otp_app, @start_apps)
NervesHubWebCore.Workers.FirmwaresGC.run()
stop()
end
defp init(app, start_apps) do
IO.puts("Loading nerves_hub_web_core app for migrations...")
Application.load(app)
IO.puts("Starting dependencies...")
Enum.each(start_apps, &Application.ensure_all_started/1)
IO.puts("Starting repos...")
app
|> Application.get_env(:ecto_repos, [])
|> Enum.each(& &1.start_link(pool_size: 10))
end
defp stop do
IO.puts("Success!")
:init.stop()
end
defp run_create_for(app) do
IO.puts("Creating Database for #{app}")
ecto_repos(app) |> Enum.each(&create_repo(&1))
end
defp create_repo(repo) do
repo.__adapter__.storage_up(repo.config)
end
defp run_migrations_for(app) do
IO.puts("Running migrations for #{app}")
ecto_repos(app)
|> Enum.each(&Migrator.run(&1, migrations_path(app), :up, all: true))
end
def run_seed_script(seed_script) do
IO.puts("Running seed script #{seed_script}...")
Code.eval_file(seed_script)
end
defp ecto_repos(app) do
Application.get_env(app, :ecto_repos, [])
end
defp migrations_path(app), do: priv_dir(app, ["repo", "migrations"])
defp seed_path(app), do: priv_dir(app, ["repo"])
defp priv_dir(app, path) when is_list(path) do
case :code.priv_dir(app) do
priv_path when is_list(priv_path) or is_binary(priv_path) ->
Path.join([priv_path] ++ path)
{:error, :bad_name} ->
raise ArgumentError, "unknown application: #{inspect(app)}"
end
end
end
| 22.333333 | 73 | 0.670679 |
1c6b1f7ea37b8a4409b874570b53d5f05196175f | 1,246 | ex | Elixir | apps/ello_notifications/lib/ello_notifications/stream.ex | ello/apex | 4acb096b3ce172ff4ef9a51e5d068d533007b920 | [
"MIT"
] | 16 | 2017-06-21T21:31:20.000Z | 2021-05-09T03:23:26.000Z | apps/ello_notifications/lib/ello_notifications/stream.ex | ello/apex | 4acb096b3ce172ff4ef9a51e5d068d533007b920 | [
"MIT"
] | 25 | 2017-06-07T12:18:28.000Z | 2018-06-08T13:27:43.000Z | apps/ello_notifications/lib/ello_notifications/stream.ex | ello/apex | 4acb096b3ce172ff4ef9a51e5d068d533007b920 | [
"MIT"
] | 3 | 2018-06-14T15:34:07.000Z | 2022-02-28T21:06:13.000Z | defmodule Ello.Notifications.Stream do
alias __MODULE__.{
Item,
Client,
Loader,
}
@moduledoc """
Public API for interacting with the Ello Notifications Streams (in app notifications) via elixir.
"""
defstruct [
current_user: nil,
allow_nsfw: false,
allow_nudity: false,
per_page: 25,
before: nil,
next: nil,
models: [],
category: :all,
__response: nil,
preload: true,
]
@doc """
Fetch a page of in app notifications as a stream.
"""
def fetch(opts) do
__MODULE__
|> struct(opts)
|> Client.fetch
|> Loader.load
|> set_next
end
@doc """
Create a single in app notification.
"""
def create(opts) do
Item
|> struct(opts)
|> Item.validate
|> Client.create
end
@doc """
Delete a collection of notifications.
Either:
* delete all notifications for a user by passing user_id
* delete all notifications for a subject by passing subject_id and subject_type
"""
def delete_many(%{} = opts) do
Client.delete_many(opts)
end
defp set_next(%{models: []} = stream), do: stream
defp set_next(%{models: models} = stream) do
%{stream | next: DateTime.to_iso8601(List.last(models).created_at)}
end
end
| 19.777778 | 99 | 0.638042 |
1c6b5567fd2b6c0e5197e2fa63fa4972cc41665c | 2,646 | ex | Elixir | clients/service_consumer_management/lib/google_api/service_consumer_management/v1/model/o_auth_requirements.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/service_consumer_management/lib/google_api/service_consumer_management/v1/model/o_auth_requirements.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/service_consumer_management/lib/google_api/service_consumer_management/v1/model/o_auth_requirements.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.ServiceConsumerManagement.V1.Model.OAuthRequirements do
@moduledoc """
OAuth scopes are a way to define data and permissions on data. For example, there are scopes defined for "Read-only access to Google Calendar" and "Access to Cloud Platform". Users can consent to a scope for an application, giving it permission to access that data on their behalf. OAuth scope specifications should be fairly coarse grained; a user will need to see and understand the text description of what your scope means. In most cases: use one or at most two OAuth scopes for an entire family of products. If your product has multiple APIs, you should probably be sharing the OAuth scope across all of those APIs. When you need finer grained OAuth consent screens: talk with your product management about how developers will use them in practice. Please note that even though each of the canonical scopes is enough for a request to be accepted and passed to the backend, a request can still fail due to the backend requiring additional scopes or permissions.
## Attributes
* `canonicalScopes` (*type:* `String.t`, *default:* `nil`) - The list of publicly documented OAuth scopes that are allowed access. An OAuth token containing any of these scopes will be accepted. Example: canonical_scopes: https://www.googleapis.com/auth/calendar, https://www.googleapis.com/auth/calendar.read
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:canonicalScopes => String.t() | nil
}
field(:canonicalScopes)
end
defimpl Poison.Decoder, for: GoogleApi.ServiceConsumerManagement.V1.Model.OAuthRequirements do
def decode(value, options) do
GoogleApi.ServiceConsumerManagement.V1.Model.OAuthRequirements.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.ServiceConsumerManagement.V1.Model.OAuthRequirements do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 56.297872 | 967 | 0.776266 |
1c6b5d1024cb50e8dc502d7d60c86f8bde3f83c9 | 2,573 | ex | Elixir | lib/core/distribution.ex | jamcito/telemetry_metrics_prometheus_core | 8892adde76bf424573baee5ef5873910f4765aaa | [
"Apache-2.0"
] | 29 | 2019-09-20T05:33:26.000Z | 2021-11-12T12:09:12.000Z | lib/core/distribution.ex | jamcito/telemetry_metrics_prometheus_core | 8892adde76bf424573baee5ef5873910f4765aaa | [
"Apache-2.0"
] | 33 | 2019-09-15T04:27:04.000Z | 2022-01-30T15:58:59.000Z | lib/core/distribution.ex | jamcito/telemetry_metrics_prometheus_core | 8892adde76bf424573baee5ef5873910f4765aaa | [
"Apache-2.0"
] | 16 | 2019-10-22T12:53:10.000Z | 2022-02-24T11:31:24.000Z | defmodule TelemetryMetricsPrometheus.Core.Distribution do
@moduledoc false
alias Telemetry.Metrics
alias TelemetryMetricsPrometheus.Core.EventHandler
@typedoc """
Distribution metric bucket boundaries.
Bucket boundaries are represented by a non-empty list of increasing numbers.
## Examples
[0, 100, 200, 300]
# Buckets: [-inf, 0], [0, 100], [100, 200], [200, 300], [300, +inf]
[99.9]
# Buckets: [-inf, 99.9], [99.9, +inf]
"""
@type buckets :: [number(), ...]
@type config :: %{
keep: Metrics.keep(),
measurement: Metrics.measurement(),
metric_name: String.t(),
name: Metrics.normalized_metric_name(),
table: atom(),
tags: Metrics.tags(),
tag_values_fun: Metrics.tag_values(),
type: :histogram,
unit: Metrics.unit()
}
@spec register(metric :: Metrics.Distribution.t(), table_id :: atom(), owner :: pid()) ::
{:ok, :telemetry.handler_id()} | {:error, :already_exists}
def register(metric, table_id, owner) do
handler_id = EventHandler.handler_id(metric.name, owner)
with :ok <-
:telemetry.attach(
handler_id,
metric.event_name,
&__MODULE__.handle_event/4,
%{
keep: metric.keep,
measurement: metric.measurement,
metric_name: "",
name: metric.name,
table: table_id,
tags: metric.tags,
tag_values_fun: metric.tag_values,
type: :histogram,
unit: metric.unit
}
) do
{:ok, handler_id}
else
{:error, :already_exists} = error ->
error
end
end
@spec handle_event(
:telemetry.event_name(),
:telemetry.event_measurements(),
:telemetry.event_metadata(),
config()
) :: :ok
def handle_event(_event, measurements, metadata, config) do
with true <- EventHandler.keep?(config.keep, metadata),
{:ok, measurement} <-
EventHandler.get_measurement(measurements, metadata, config.measurement),
mapped_values <- config.tag_values_fun.(metadata),
:ok <- EventHandler.validate_tags_in_tag_values(config.tags, mapped_values),
labels <- Map.take(mapped_values, config.tags) do
true = :ets.insert(config.table, {config.name, {labels, measurement}})
:ok
else
false -> :ok
error -> EventHandler.handle_event_error(error, config)
end
end
end
| 30.630952 | 91 | 0.579868 |
1c6b8bb810d737bbb142cb60c601d3d4262bb0f1 | 1,031 | ex | Elixir | lib/monero/request/http_client.ex | cwc/monero_ex | 6c63513ef2770c74c0bd840877732c54ad8678d8 | [
"Unlicense",
"MIT"
] | null | null | null | lib/monero/request/http_client.ex | cwc/monero_ex | 6c63513ef2770c74c0bd840877732c54ad8678d8 | [
"Unlicense",
"MIT"
] | null | null | null | lib/monero/request/http_client.ex | cwc/monero_ex | 6c63513ef2770c74c0bd840877732c54ad8678d8 | [
"Unlicense",
"MIT"
] | null | null | null | defmodule Monero.Request.HttpClient do
@moduledoc """
Specifies expected behaviour of an http client
Monero allows you to use your http client of choice, provided that
it can be coerced into complying with this module's specification.
The default is :hackney.
## Example
Here for example is the code required to make HTTPotion comply with this spec.
In your config you would do:
```elixir
config :monero,
http_client: Monero.Request.HTTPotion
```
```elixir
defmodule Monero.Request.HTTPotion do
@behaviour Monero.Request.HttpClient
def request(method, url, body, headers) do
{:ok, HTTPotion.request(method, url, [body: body, headers: headers, ibrowse: [headers_as_is: true]])}
end
end
```
"""
@type http_method :: :get | :post | :put | :delete
@callback request(method :: http_method, url :: binary, req_body :: binary, headers :: [{binary, binary}, ...], http_opts :: term) ::
{:ok, %{status_code: pos_integer, body: binary}} |
{:error, %{reason: any}}
end
| 29.457143 | 135 | 0.684772 |
1c6bf1d8ba6e7d13d72440e962c0b355d651061c | 1,855 | exs | Elixir | config/prod.exs | MNDL-27/midarr-server | b749707a1777205cea2d93349cde2ef922e527ec | [
"MIT"
] | 1 | 2022-02-14T16:40:55.000Z | 2022-02-14T16:40:55.000Z | config/prod.exs | MNDL-27/midarr-server | b749707a1777205cea2d93349cde2ef922e527ec | [
"MIT"
] | null | null | null | config/prod.exs | MNDL-27/midarr-server | b749707a1777205cea2d93349cde2ef922e527ec | [
"MIT"
] | null | null | null | import Config
# For production, don't forget to configure the url host
# to something meaningful, Phoenix uses this information
# when generating URLs.
#
# Note we also include the path to a cache manifest
# containing the digested version of static files. This
# manifest is generated by the `mix phx.digest` task,
# which you should run after static files are built and
# before starting your production server.
config :media_server, MediaServerWeb.Endpoint,
url: [host: "example.com", port: 80],
cache_static_manifest: "priv/static/cache_manifest.json"
# Do not print debug messages in production
config :logger, level: :info
# ## SSL Support
#
# To get SSL working, you will need to add the `https` key
# to the previous section and set your `:url` port to 443:
#
# config :media_server, MediaServerWeb.Endpoint,
# ...,
# url: [host: "example.com", port: 443],
# https: [
# ...,
# port: 443,
# cipher_suite: :strong,
# keyfile: System.get_env("SOME_APP_SSL_KEY_PATH"),
# certfile: System.get_env("SOME_APP_SSL_CERT_PATH")
# ]
#
# The `cipher_suite` is set to `:strong` to support only the
# latest and more secure SSL ciphers. This means old browsers
# and clients may not be supported. You can set it to
# `:compatible` for wider support.
#
# `:keyfile` and `:certfile` expect an absolute path to the key
# and cert in disk or a relative path inside priv, for example
# "priv/ssl/server.key". For all supported SSL configuration
# options, see https://hexdocs.pm/plug/Plug.SSL.html#configure/1
#
# We also recommend setting `force_ssl` in your endpoint, ensuring
# no data is ever sent via http, always redirecting to https:
#
# config :media_server, MediaServerWeb.Endpoint,
# force_ssl: [hsts: true]
#
# Check `Plug.SSL` for all available options in `force_ssl`.
| 35.673077 | 66 | 0.708356 |
1c6bf32120bf3a6b2b935fe74e04e3e1a20e23d6 | 9,158 | exs | Elixir | test/strategy/auth0_test.exs | maciej-szlosarczyk/ueberauth_auth0 | 54d03b9eca0502a5e55553d4db711371df3c59ce | [
"MIT"
] | null | null | null | test/strategy/auth0_test.exs | maciej-szlosarczyk/ueberauth_auth0 | 54d03b9eca0502a5e55553d4db711371df3c59ce | [
"MIT"
] | null | null | null | test/strategy/auth0_test.exs | maciej-szlosarczyk/ueberauth_auth0 | 54d03b9eca0502a5e55553d4db711371df3c59ce | [
"MIT"
] | null | null | null | defmodule Ueberauth.Strategy.Auth0Test do
# Test resources:
use ExUnit.Case, async: true
use ExVCR.Mock, adapter: ExVCR.Adapter.Hackney
use Plug.Test
# Custom data:
import Ueberauth.Strategy.Auth0, only: [info: 1, extra: 1]
alias Ueberauth.Auth.{Extra, Info}
# Initializing utils:
doctest Ueberauth.Strategy.Auth0
@router SpecRouter.init([])
@test_email "janedoe@example.com"
@session_options Plug.Session.init(
store: Plug.Session.COOKIE,
key: "_my_key",
signing_salt: "CXlmrshG"
)
# Setups:
setup_all do
# Creating token:
token = %OAuth2.AccessToken{
access_token: "eyJz93alolk4laUWw",
expires_at: 1_592_551_369,
other_params: %{"id_token" => "eyJ0XAipop4faeEoQ"},
refresh_token: "GEbRxBNkitedjnXbL",
token_type: "Bearer"
}
# Read the fixture with the user information:
{:ok, json} =
"test/fixtures/auth0.json"
|> Path.expand()
|> File.read()
user_info = Jason.decode!(json)
{:ok,
%{
user_info: user_info,
token: token
}}
end
# Tests:
test "simple request phase" do
conn =
:get
|> conn("/auth/auth0")
|> SpecRouter.call(@router)
assert conn.resp_body =~ ~s|<html><body>You are being <a href=|
assert conn.resp_body =~ ~s|>redirected</a>.</body></html>|
assert conn.resp_body =~ ~s|href="https://example-app.auth0.com/authorize?|
assert conn.resp_body =~ ~s|client_id=clientidsomethingrandom|
assert conn.resp_body =~ ~s|redirect_uri=http%3A%2F%2Fwww.example.com%2Fauth%2Fauth0%2Fcallback|
assert conn.resp_body =~ ~s|response_type=code|
assert conn.resp_body =~ ~s|scope=openid+profile+email|
assert conn.resp_body =~ ~s|state=#{conn.private[:ueberauth_state_param]}|
end
test "advanced request phase" do
conn =
:get
|> conn(
"/auth/auth0?scope=profile%20address%20phone&audience=https%3A%2F%2Fexample-app.auth0.com%2Fmfa%2F" <>
"&connection=facebook&unknown_param=should_be_ignored" <>
"&prompt=login&screen_hint=signup&login_hint=user%40example.com"
)
|> SpecRouter.call(@router)
assert conn.resp_body =~ ~s|<html><body>You are being <a href=|
assert conn.resp_body =~ ~s|>redirected</a>.</body></html>|
assert conn.resp_body =~ ~s|href="https://example-app.auth0.com/authorize?|
assert conn.resp_body =~ ~s|client_id=clientidsomethingrandom|
assert conn.resp_body =~ ~s|connection=facebook|
assert conn.resp_body =~ ~s|login_hint=user|
assert conn.resp_body =~ ~s|screen_hint=signup|
assert conn.resp_body =~ ~s|redirect_uri=http%3A%2F%2Fwww.example.com%2Fauth%2Fauth0%2Fcallback|
assert conn.resp_body =~ ~s|response_type=code|
assert conn.resp_body =~ ~s|scope=profile+address+phone|
assert conn.resp_body =~ ~s|state=#{conn.private[:ueberauth_state_param]}|
end
test "default callback phase" do
request_conn =
:get
|> conn("/auth/auth0", id: "foo")
|> SpecRouter.call(@router)
|> Plug.Conn.fetch_cookies()
state = request_conn.private[:ueberauth_state_param]
code = "some_code"
use_cassette "auth0-responses", match_requests_on: [:query] do
conn =
:get
|> conn("/auth/auth0/callback",
id: "foo",
code: code,
state: state
)
|> Map.put(:cookies, request_conn.cookies)
|> Map.put(:req_cookies, request_conn.req_cookies)
|> Plug.Session.call(@session_options)
|> SpecRouter.call(@router)
assert conn.resp_body == "auth0 callback"
auth = conn.assigns.ueberauth_auth
assert auth.provider == :auth0
assert auth.strategy == Ueberauth.Strategy.Auth0
assert auth.uid == "auth0|lyy5v5utb6n9qfm4ihi3l7pv34po66"
assert conn.private.auth0_state == state
end
end
test "callback without code" do
request_conn =
:get
|> conn("/auth/auth0", id: "foo")
|> SpecRouter.call(@router)
|> Plug.Conn.fetch_cookies()
state = request_conn.private[:ueberauth_state_param]
use_cassette "auth0-responses", match_requests_on: [:query] do
conn =
:get
|> conn("/auth/auth0/callback",
id: "foo",
state: state
)
|> Map.put(:cookies, request_conn.cookies)
|> Map.put(:req_cookies, request_conn.req_cookies)
|> Plug.Session.call(@session_options)
|> SpecRouter.call(@router)
assert conn.resp_body == "auth0 callback"
auth = conn.assigns.ueberauth_failure
missing_code_error = %Ueberauth.Failure.Error{
message: "No code received",
message_key: "missing_code"
}
assert auth.provider == :auth0
assert auth.strategy == Ueberauth.Strategy.Auth0
assert auth.errors == [missing_code_error]
end
end
test "callback with invalid code" do
request_conn =
:get
|> conn("/auth/auth0", id: "foo")
|> SpecRouter.call(@router)
|> Plug.Conn.fetch_cookies()
state = request_conn.private[:ueberauth_state_param]
use_cassette "auth0-invalid-code", match_requests_on: [:query] do
assert_raise(OAuth2.Error, ~r/Server responded with status: 403.*/, fn ->
:get
|> conn("/auth/auth0/callback",
id: "foo",
code: "invalid_code",
state: state
)
|> Map.put(:cookies, request_conn.cookies)
|> Map.put(:req_cookies, request_conn.req_cookies)
|> Plug.Session.call(@session_options)
|> SpecRouter.call(@router)
end)
end
end
test "callback with no token in response" do
request_conn =
:get
|> conn("/auth/auth0", id: "foo")
|> SpecRouter.call(@router)
|> Plug.Conn.fetch_cookies()
state = request_conn.private[:ueberauth_state_param]
use_cassette "auth0-no-access-token", match_requests_on: [:query] do
conn =
:get
|> conn("/auth/auth0/callback",
id: "foo",
code: "some_code",
state: state
)
|> Map.put(:cookies, request_conn.cookies)
|> Map.put(:req_cookies, request_conn.req_cookies)
|> Plug.Session.call(@session_options)
|> SpecRouter.call(@router)
assert conn.resp_body == "auth0 callback"
auth = conn.assigns.ueberauth_failure
missing_code_error = %Ueberauth.Failure.Error{
message: "Something went wrong",
message_key: "something_wrong"
}
assert auth.provider == :auth0
assert auth.strategy == Ueberauth.Strategy.Auth0
assert auth.errors == [missing_code_error]
end
end
test "user information parsing", fixtures do
user_info = fixtures.user_info
token = fixtures.token
conn = %Plug.Conn{
private: %{
auth0_user: user_info,
auth0_token: token
}
}
assert info(conn) == %Info{
birthday: "1972-03-31",
description: nil,
email: @test_email,
first_name: "Jane",
image: "http://example.com/janedoe/me.jpg",
last_name: "Doe",
location: nil,
name: "Jane Josephine Doe",
nickname: "JJ",
phone: "+1 (111) 222-3434",
urls: %{
profile: "http://example.com/janedoe",
website: "http://example.com"
}
}
end
test "user extra information parsing", fixtures do
user_info = fixtures.user_info
token = fixtures.token
conn = %Plug.Conn{
private: %{
auth0_user: user_info,
auth0_token: token
}
}
assert extra(conn) == %Extra{
raw_info: %{
token: %OAuth2.AccessToken{
access_token: "eyJz93alolk4laUWw",
expires_at: 1_592_551_369,
other_params: %{"id_token" => "eyJ0XAipop4faeEoQ"},
refresh_token: "GEbRxBNkitedjnXbL",
token_type: "Bearer"
},
user: %{
"address" => %{"country" => "us"},
"birthdate" => "1972-03-31",
"email" => "janedoe@example.com",
"email_verified" => true,
"family_name" => "Doe",
"gender" => "female",
"given_name" => "Jane",
"locale" => "en-US",
"middle_name" => "Josephine",
"name" => "Jane Josephine Doe",
"nickname" => "JJ",
"phone_number" => "+1 (111) 222-3434",
"phone_number_verified" => false,
"picture" => "http://example.com/janedoe/me.jpg",
"preferred_username" => "j.doe",
"profile" => "http://example.com/janedoe",
"sub" => "auth0|lyy5v452u345tbn943qf",
"updated_at" => "1556845729",
"website" => "http://example.com",
"zoneinfo" => "America/Los_Angeles"
}
}
}
end
end
| 30.939189 | 110 | 0.58124 |
1c6bf769ee16fe4eb21ae82b81614b8f3beb9dc9 | 989 | ex | Elixir | lib/utility/poisoned_decimal.ex | tyrchen/ex_bitcoin | f399a3d2bd27fac0a4ba5e91379d5492ab94292a | [
"MIT"
] | 1 | 2018-03-22T15:24:37.000Z | 2018-03-22T15:24:37.000Z | lib/utility/poisoned_decimal.ex | tyrchen/ex_bitcoin | f399a3d2bd27fac0a4ba5e91379d5492ab94292a | [
"MIT"
] | 26 | 2018-07-22T01:10:59.000Z | 2019-10-20T08:43:10.000Z | lib/utility/poisoned_decimal.ex | tyrchen/ex_bitcoin | f399a3d2bd27fac0a4ba5e91379d5492ab94292a | [
"MIT"
] | null | null | null | defmodule PoisonedDecimal do
@moduledoc """
Copied from https://github.com/solatis/gold.
Hacky wrapper for Decimal library to use different encoding than the one prepared for Decimal in Ecto (or different conflicting libraries)
"""
defstruct [:decimal]
def new(%Decimal{} = decimal) do
%PoisonedDecimal{decimal: decimal}
end
def poison_params(%PoisonedDecimal{} = params) do
params
end
def poison_params(%Decimal{} = params) do
params |> new
end
def poison_params({key, value}) do
{poison_params(key), poison_params(value)}
end
def poison_params(params) when is_list(params) do
params |> Enum.map(&poison_params/1)
end
def poison_params(%{__struct__: s} = params) do
params
|> Map.from_struct()
|> Map.to_list()
|> poison_params
|> s.__struct__
end
def poison_params(%{} = params) do
params
|> Map.to_list()
|> poison_params
|> Map.new()
end
def poison_params(params), do: params
end
| 21.5 | 140 | 0.67543 |
1c6c09e2388e083d33c5fe3fa91939494c3e456a | 408 | ex | Elixir | lib/pigpiox/supervisor.ex | coop/pigpiox | 618f8669673b1a36649b20d2874cce2873af67a2 | [
"Apache-2.0"
] | null | null | null | lib/pigpiox/supervisor.ex | coop/pigpiox | 618f8669673b1a36649b20d2874cce2873af67a2 | [
"Apache-2.0"
] | null | null | null | lib/pigpiox/supervisor.ex | coop/pigpiox | 618f8669673b1a36649b20d2874cce2873af67a2 | [
"Apache-2.0"
] | null | null | null | defmodule Pigpiox.Supervisor do
@moduledoc false
use Supervisor
def start_link(arg) do
Supervisor.start_link(__MODULE__, arg, name: __MODULE__)
end
def init(_arg) do
children = [
{Pigpiox.Port, [name: Pigpiox.Port]},
{Pigpiox.Socket, [name: Pigpiox.Socket]},
{Pigpiox.GPIO.WatcherSupervisor, []}
]
Supervisor.init(children, strategy: :rest_for_one)
end
end
| 19.428571 | 60 | 0.676471 |
1c6c14cf772798ff58fbd2bbf66c37d2e93b7443 | 1,533 | ex | Elixir | lib/day7/part2.ex | anamba/adventofcode2020 | 2a749140d5393f7c69c630102daae977be30afcc | [
"MIT"
] | null | null | null | lib/day7/part2.ex | anamba/adventofcode2020 | 2a749140d5393f7c69c630102daae977be30afcc | [
"MIT"
] | null | null | null | lib/day7/part2.ex | anamba/adventofcode2020 | 2a749140d5393f7c69c630102daae977be30afcc | [
"MIT"
] | null | null | null | defmodule Day7.Part2 do
@doc """
iex> Day7.Part2.part2("day7-sample.txt")
32
iex> Day7.Part2.part2("day7-sample2.txt")
126
"""
def part2(filename) do
(parse_input(filename) |> count_contents("shiny gold")) - 1
end
@doc """
iex> Day7.Part2.part2
35487
"""
def part2, do: part2("day7.txt")
def parse_input(filename) do
"inputs/#{filename}"
|> File.stream!()
|> Stream.map(&String.trim/1)
|> Stream.map(&parse_record/1)
|> Enum.into(%{})
end
def parse_record(line) do
%{"color" => color, "contents" => contents_str} =
Regex.named_captures(~r/^(?<color>\w+ \w+) bags contain (?<contents>.*+)$/, line)
contents =
contents_str
|> String.split(",")
|> Enum.map(&String.trim/1)
|> Enum.map(&parse_bag_with_quantity/1)
|> Enum.filter(& &1)
{color, contents}
end
def parse_bag_with_quantity("no other bags."), do: nil
def parse_bag_with_quantity(str) do
%{"color" => color, "quantity" => quantity} =
Regex.named_captures(~r/^(?<quantity>\d+) (?<color>\w+ \w+) bag(s?)(\.?)$/, str)
{color, String.to_integer(quantity)}
end
def count_contents(ruleset, target, multiplier \\ 1) do
case ruleset[target] do
nil ->
0
[] ->
multiplier
contents ->
multiplier +
(Enum.map(contents, fn {color, quantity} ->
multiplier * count_contents(ruleset, color, quantity)
end)
|> Enum.sum())
end
end
end
| 23.227273 | 87 | 0.567515 |
1c6c5e01545d3ade9f2d5d5c2064018f81c25859 | 211 | ex | Elixir | lib/easypost/api_key.ex | winestyr/ex_easypost | a8563ccbff429ad181280c438efeea65383ff852 | [
"MIT"
] | 6 | 2017-09-21T13:19:56.000Z | 2021-01-07T18:31:42.000Z | lib/easypost/api_key.ex | winestyr/ex_easypost | a8563ccbff429ad181280c438efeea65383ff852 | [
"MIT"
] | null | null | null | lib/easypost/api_key.ex | winestyr/ex_easypost | a8563ccbff429ad181280c438efeea65383ff852 | [
"MIT"
] | 2 | 2018-07-11T07:12:08.000Z | 2020-06-29T02:04:48.000Z | defmodule EasyPost.APIKey do
@doc """
Retrieve an API key.
"""
@spec get(String.t()) :: EasyPost.Operation.t()
def get(id) do
%EasyPost.Operation{ method: :get, path: "/api_keys/#{id}" }
end
end
| 21.1 | 64 | 0.625592 |
1c6cdacc87ede2d3d5ea7782c702dd15f05c3b39 | 56,484 | exs | Elixir | test/ecto/query/planner_test.exs | wojtekmach/ecto | dc43fbf40943c9d11bfda8c233b1adc1ff540f7e | [
"Apache-2.0"
] | 2 | 2021-02-25T15:51:16.000Z | 2021-02-25T18:42:35.000Z | test/ecto/query/planner_test.exs | mikestok/ecto | dc43fbf40943c9d11bfda8c233b1adc1ff540f7e | [
"Apache-2.0"
] | null | null | null | test/ecto/query/planner_test.exs | mikestok/ecto | dc43fbf40943c9d11bfda8c233b1adc1ff540f7e | [
"Apache-2.0"
] | 1 | 2021-02-25T15:28:45.000Z | 2021-02-25T15:28:45.000Z | Code.require_file "../../../integration_test/support/types.exs", __DIR__
defmodule Ecto.Query.PlannerTest do
use ExUnit.Case, async: true
import Ecto.Query
alias Ecto.Query.Planner
alias Ecto.Query.JoinExpr
defmodule Comment do
use Ecto.Schema
schema "comments" do
field :text, :string
field :temp, :boolean, virtual: true
field :posted, :naive_datetime
field :uuid, :binary_id
field :crazy_comment, :string
belongs_to :post, Ecto.Query.PlannerTest.Post
belongs_to :crazy_post, Ecto.Query.PlannerTest.Post,
where: [title: "crazypost"]
belongs_to :crazy_post_with_list, Ecto.Query.PlannerTest.Post,
where: [title: {:in, ["crazypost1", "crazypost2"]}],
foreign_key: :crazy_post_id,
define_field: false
has_many :post_comments, through: [:post, :comments]
has_many :comment_posts, Ecto.Query.PlannerTest.CommentPost
end
end
defmodule CommentPost do
use Ecto.Schema
schema "comment_posts" do
belongs_to :comment, Comment
belongs_to :post, Post
belongs_to :special_comment, Comment, where: [text: nil]
belongs_to :special_long_comment, Comment, where: [text: {:fragment, "LEN(?) > 100"}]
field :deleted, :boolean
end
def inactive() do
dynamic([row], row.deleted)
end
end
defmodule Author do
use Ecto.Schema
embedded_schema do
field :name, :string
end
end
defmodule PostMeta do
use Ecto.Schema
embedded_schema do
field :slug, :string
embeds_one :author, Author
end
end
defmodule Post do
use Ecto.Schema
@primary_key {:id, CustomPermalink, []}
@schema_prefix "my_prefix"
schema "posts" do
field :title, :string, source: :post_title
field :text, :string
field :code, :binary
field :posted, :naive_datetime
field :visits, :integer
field :links, {:array, CustomPermalink}
field :prefs, {:map, :string}
field :payload, :map, load_in_query: false
field :status, Ecto.Enum, values: [:draft, :published, :deleted]
embeds_one :meta, PostMeta
embeds_many :metas, PostMeta
has_many :comments, Ecto.Query.PlannerTest.Comment
has_many :extra_comments, Ecto.Query.PlannerTest.Comment
has_many :special_comments, Ecto.Query.PlannerTest.Comment, where: [text: {:not, nil}]
many_to_many :crazy_comments, Comment, join_through: CommentPost, where: [text: "crazycomment"]
many_to_many :crazy_comments_with_list, Comment, join_through: CommentPost, where: [text: {:in, ["crazycomment1", "crazycomment2"]}], join_where: [deleted: true]
many_to_many :crazy_comments_without_schema, Comment, join_through: "comment_posts", join_where: [deleted: true]
end
end
defp plan(query, operation \\ :all) do
Planner.plan(query, operation, Ecto.TestAdapter)
end
defp normalize(query, operation \\ :all) do
normalize_with_params(query, operation) |> elem(0)
end
defp normalize_with_params(query, operation \\ :all) do
{query, params, _key} = plan(query, operation)
{query, select} =
query
|> Planner.ensure_select(operation == :all)
|> Planner.normalize(operation, Ecto.TestAdapter, 0)
{query, params, select}
end
defp select_fields(fields, ix) do
for field <- fields do
{{:., [], [{:&, [], [ix]}, field]}, [], []}
end
end
test "plan: merges all parameters" do
union = from p in Post, select: {p.title, ^"union"}
subquery = from Comment, where: [text: ^"subquery"]
query =
from p in Post,
select: {p.title, ^"select"},
join: c in subquery(subquery),
on: c.text == ^"join",
left_join: d in assoc(p, :comments),
union_all: ^union,
windows: [foo: [partition_by: fragment("?", ^"windows")]],
where: p.title == ^"where",
group_by: p.title == ^"group_by",
having: p.title == ^"having",
order_by: [asc: fragment("?", ^"order_by")],
limit: ^0,
offset: ^1
{_query, params, _key} = plan(query)
assert params ==
["select", "subquery", "join", "where", "group_by", "having", "windows"] ++
["union", "order_by", 0, 1]
end
test "plan: checks from" do
assert_raise Ecto.QueryError, ~r"query must have a from expression", fn ->
plan(%Ecto.Query{})
end
end
test "plan: casts values" do
{_query, params, _key} = plan(Post |> where([p], p.id == ^"1"))
assert params == [1]
exception = assert_raise Ecto.Query.CastError, fn ->
plan(Post |> where([p], p.title == ^1))
end
assert Exception.message(exception) =~ "value `1` in `where` cannot be cast to type :string"
assert Exception.message(exception) =~ "where: p0.title == ^1"
end
test "plan: Ecto.Query struct as right-side value of in operator" do
query = from(Post)
exception = assert_raise Ecto.QueryError, fn ->
plan(Post |> where([p], p.id in ^query))
end
assert Exception.message(exception) =~ "an Ecto.Query struct is not supported as right-side value of `in` operator"
assert Exception.message(exception) =~ "Did you mean to write `expr in subquery(query)` instead?"
end
test "plan: raises readable error on dynamic expressions/keyword lists" do
dynamic = dynamic([p], p.id == ^"1")
{_query, params, _key} = plan(Post |> where([p], ^dynamic))
assert params == [1]
assert_raise Ecto.QueryError, ~r/dynamic expressions can only be interpolated/, fn ->
plan(Post |> where([p], p.title == ^dynamic))
end
assert_raise Ecto.QueryError, ~r/keyword lists are only allowed/, fn ->
plan(Post |> where([p], p.title == ^[foo: 1]))
end
end
test "plan: casts and dumps custom types" do
permalink = "1-hello-world"
{_query, params, _key} = plan(Post |> where([p], p.id == ^permalink))
assert params == [1]
end
test "plan: casts and dumps binary ids" do
uuid = "00010203-0405-4607-8809-0a0b0c0d0e0f"
{_query, params, _key} = plan(Comment |> where([c], c.uuid == ^uuid))
assert params == [<<0, 1, 2, 3, 4, 5, 70, 7, 136, 9, 10, 11, 12, 13, 14, 15>>]
assert_raise Ecto.Query.CastError,
~r/`"00010203-0405-4607-8809"` cannot be dumped to type :binary_id/, fn ->
uuid = "00010203-0405-4607-8809"
plan(Comment |> where([c], c.uuid == ^uuid))
end
end
test "plan: casts and dumps custom types in left side of in-expressions" do
permalink = "1-hello-world"
{_query, params, _key} = plan(Post |> where([p], ^permalink in p.links))
assert params == [1]
message = ~r"value `\"1-hello-world\"` in `where` expected to be part of an array but matched type is :string"
assert_raise Ecto.Query.CastError, message, fn ->
plan(Post |> where([p], ^permalink in p.text))
end
end
test "plan: casts and dumps custom types in right side of in-expressions" do
datetime = ~N[2015-01-07 21:18:13.0]
{_query, params, _key} = plan(Comment |> where([c], c.posted in ^[datetime]))
assert params == [~N[2015-01-07 21:18:13]]
permalink = "1-hello-world"
{_query, params, _key} = plan(Post |> where([p], p.id in ^[permalink]))
assert params == [1]
datetime = ~N[2015-01-07 21:18:13.0]
{_query, params, _key} = plan(Comment |> where([c], c.posted in [^datetime]))
assert params == [~N[2015-01-07 21:18:13]]
permalink = "1-hello-world"
{_query, params, _key} = plan(Post |> where([p], p.id in [^permalink]))
assert params == [1]
{_query, params, _key} = plan(Post |> where([p], p.code in [^"abcd"]))
assert params == ["abcd"]
{_query, params, _key} = plan(Post |> where([p], p.code in ^["abcd"]))
assert params == ["abcd"]
end
test "plan: casts values on update_all" do
{_query, params, _key} = plan(Post |> update([p], set: [id: ^"1"]), :update_all)
assert params == [1]
{_query, params, _key} = plan(Post |> update([p], set: [title: ^nil]), :update_all)
assert params == [nil]
{_query, params, _key} = plan(Post |> update([p], set: [title: nil]), :update_all)
assert params == []
end
test "plan: joins" do
query = from(p in Post, join: c in "comments") |> plan |> elem(0)
assert hd(query.joins).source == {"comments", nil}
query = from(p in Post, join: c in Comment) |> plan |> elem(0)
assert hd(query.joins).source == {"comments", Comment}
query = from(p in Post, join: c in {"post_comments", Comment}) |> plan |> elem(0)
assert hd(query.joins).source == {"post_comments", Comment}
end
test "plan: joins associations" do
query = from(p in Post, join: assoc(p, :comments)) |> plan |> elem(0)
assert %JoinExpr{on: on, source: source, assoc: nil, qual: :inner} = hd(query.joins)
assert source == {"comments", Comment}
assert Macro.to_string(on.expr) == "&1.post_id() == &0.id()"
query = from(p in Post, left_join: assoc(p, :comments)) |> plan |> elem(0)
assert %JoinExpr{on: on, source: source, assoc: nil, qual: :left} = hd(query.joins)
assert source == {"comments", Comment}
assert Macro.to_string(on.expr) == "&1.post_id() == &0.id()"
query = from(p in Post, left_join: c in assoc(p, :comments), on: p.title == c.text) |> plan |> elem(0)
assert %JoinExpr{on: on, source: source, assoc: nil, qual: :left} = hd(query.joins)
assert source == {"comments", Comment}
assert Macro.to_string(on.expr) == "&1.post_id() == &0.id() and &0.title() == &1.text()"
end
test "plan: nested joins associations" do
query = from(c in Comment, left_join: assoc(c, :post_comments)) |> plan |> elem(0)
assert {{"comments", _, _}, {"comments", _, _}, {"posts", _, _}} = query.sources
assert [join1, join2] = query.joins
assert Enum.map(query.joins, & &1.ix) == [2, 1]
assert Macro.to_string(join1.on.expr) == "&2.id() == &0.post_id()"
assert Macro.to_string(join2.on.expr) == "&1.post_id() == &2.id()"
query = from(p in Comment, left_join: assoc(p, :post),
left_join: assoc(p, :post_comments)) |> plan |> elem(0)
assert {{"comments", _, _}, {"posts", _, _}, {"comments", _, _}, {"posts", _, _}} = query.sources
assert [join1, join2, join3] = query.joins
assert Enum.map(query.joins, & &1.ix) == [1, 3, 2]
assert Macro.to_string(join1.on.expr) == "&1.id() == &0.post_id()"
assert Macro.to_string(join2.on.expr) == "&3.id() == &0.post_id()"
assert Macro.to_string(join3.on.expr) == "&2.post_id() == &3.id()"
query = from(p in Comment, left_join: assoc(p, :post_comments),
left_join: assoc(p, :post)) |> plan |> elem(0)
assert {{"comments", _, _}, {"comments", _, _}, {"posts", _, _}, {"posts", _, _}} = query.sources
assert [join1, join2, join3] = query.joins
assert Enum.map(query.joins, & &1.ix) == [3, 1, 2]
assert Macro.to_string(join1.on.expr) == "&3.id() == &0.post_id()"
assert Macro.to_string(join2.on.expr) == "&1.post_id() == &3.id()"
assert Macro.to_string(join3.on.expr) == "&2.id() == &0.post_id()"
end
test "plan: joins associations with custom queries" do
query = from(p in Post, left_join: assoc(p, :special_comments)) |> plan |> elem(0)
assert {{"posts", _, _}, {"comments", _, _}} = query.sources
assert [join] = query.joins
assert join.ix == 1
assert Macro.to_string(join.on.expr) =~
~r"&1.post_id\(\) == &0.id\(\) and not[\s\(]is_nil\(&1.text\(\)\)\)?"
end
test "plan: nested joins associations with custom queries" do
query = from(p in Post,
join: c1 in assoc(p, :special_comments),
join: p2 in assoc(c1, :post),
join: cp in assoc(c1, :comment_posts),
join: c2 in assoc(cp, :special_comment),
join: c3 in assoc(cp, :special_long_comment))
|> plan
|> elem(0)
assert [join1, join2, join3, join4, join5] = query.joins
assert {{"posts", _, _}, {"comments", _, _}, {"posts", _, _},
{"comment_posts", _, _}, {"comments", _, _}, {"comments", _, _}} = query.sources
assert Macro.to_string(join1.on.expr) =~
~r"&1.post_id\(\) == &0.id\(\) and not[\s\(]is_nil\(&1.text\(\)\)\)?"
assert Macro.to_string(join2.on.expr) == "&2.id() == &1.post_id()"
assert Macro.to_string(join3.on.expr) == "&3.comment_id() == &1.id()"
assert Macro.to_string(join4.on.expr) == "&4.id() == &3.special_comment_id() and is_nil(&4.text())"
assert Macro.to_string(join5.on.expr) ==
"&5.id() == &3.special_long_comment_id() and fragment({:raw, \"LEN(\"}, {:expr, &5.text()}, {:raw, \") > 100\"})"
end
test "plan: cannot associate without schema" do
query = from(p in "posts", join: assoc(p, :comments))
message = ~r"cannot perform association join on \"posts\" because it does not have a schema"
assert_raise Ecto.QueryError, message, fn ->
plan(query)
end
end
test "plan: requires an association field" do
query = from(p in Post, join: assoc(p, :title))
assert_raise Ecto.QueryError, ~r"could not find association `title`", fn ->
plan(query)
end
end
test "plan: handles specific param type-casting" do
value = NaiveDateTime.utc_now()
{_, params, _} = from(p in Post, where: p.posted > datetime_add(^value, 1, "second")) |> plan()
assert params == [value]
value = DateTime.utc_now()
{_, params, _} = from(p in Post, where: p.posted > datetime_add(^value, 1, "second")) |> plan()
assert params == [value]
value = ~N[2010-04-17 14:00:00]
{_, params, _} =
from(p in Post, where: p.posted > datetime_add(^"2010-04-17 14:00:00", 1, "second")) |> plan()
assert params == [value]
end
test "plan: generates a cache key" do
{_query, _params, key} = plan(from(Post, []))
assert key == [:all, {"posts", Post, 74490763, "my_prefix"}]
query =
from(
p in Post,
prefix: "hello",
select: 1,
lock: "foo",
where: is_nil(nil),
or_where: is_nil(nil),
join: c in Comment,
prefix: "world",
preload: :comments
)
{_query, _params, key} = plan(%{query | prefix: "foo"})
assert key == [:all,
{:lock, "foo"},
{:prefix, "foo"},
{:where, [{:and, {:is_nil, [], [nil]}}, {:or, {:is_nil, [], [nil]}}]},
{:join, [{:inner, {"comments", Comment, 38292156, "world"}, true}]},
{"posts", Post, 74490763, "hello"},
{:select, 1}]
end
test "plan: generates a cache key for in based on the adapter" do
query = from(p in Post, where: p.id in ^[1, 2, 3])
{_query, _params, key} = Planner.plan(query, :all, Ecto.TestAdapter)
assert key == :nocache
end
test "plan: combination with uncacheable queries are uncacheable" do
query1 =
Post
|> where([p], p.id in ^[1, 2, 3])
|> select([p], p.id)
query2 =
Post
|> where([p], p.id in [1, 2])
|> select([p], p.id)
|> distinct(true)
{_, _, key} = query1 |> union_all(^query2) |> Planner.plan(:all, Ecto.TestAdapter)
assert key == :nocache
end
test "plan: normalizes prefixes" do
# No schema prefix in from
{query, _, _} = from(Comment, select: 1) |> plan()
assert query.sources == {{"comments", Comment, nil}}
{query, _, _} = from(Comment, select: 1) |> Map.put(:prefix, "global") |> plan()
assert query.sources == {{"comments", Comment, "global"}}
{query, _, _} = from(Comment, prefix: "local", select: 1) |> Map.put(:prefix, "global") |> plan()
assert query.sources == {{"comments", Comment, "local"}}
# Schema prefix in from
{query, _, _} = from(Post, select: 1) |> plan()
assert query.sources == {{"posts", Post, "my_prefix"}}
{query, _, _} = from(Post, select: 1) |> Map.put(:prefix, "global") |> plan()
assert query.sources == {{"posts", Post, "my_prefix"}}
{query, _, _} = from(Post, prefix: "local", select: 1) |> Map.put(:prefix, "global") |> plan()
assert query.sources == {{"posts", Post, "local"}}
# Schema prefix in join
{query, _, _} = from(c in Comment, join: Post) |> plan()
assert query.sources == {{"comments", Comment, nil}, {"posts", Post, "my_prefix"}}
{query, _, _} = from(c in Comment, join: Post) |> Map.put(:prefix, "global") |> plan()
assert query.sources == {{"comments", Comment, "global"}, {"posts", Post, "my_prefix"}}
{query, _, _} = from(c in Comment, join: Post, prefix: "local") |> Map.put(:prefix, "global") |> plan()
assert query.sources == {{"comments", Comment, "global"}, {"posts", Post, "local"}}
# Schema prefix in query join
{query, _, _} = from(p in Post, join: ^from(c in Comment)) |> plan()
assert query.sources == {{"posts", Post, "my_prefix"}, {"comments", Comment, nil}}
{query, _, _} = from(p in Post, join: ^from(c in Comment)) |> Map.put(:prefix, "global") |> plan()
assert query.sources == {{"posts", Post, "my_prefix"}, {"comments", Comment, "global"}}
{query, _, _} = from(p in Post, join: ^from(c in Comment), prefix: "local") |> Map.put(:prefix, "global") |> plan()
assert query.sources == {{"posts", Post, "my_prefix"}, {"comments", Comment, "local"}}
# No schema prefix in assoc join
{query, _, _} = from(c in Comment, join: assoc(c, :comment_posts)) |> plan()
assert query.sources == {{"comments", Comment, nil}, {"comment_posts", CommentPost, nil}}
{query, _, _} = from(c in Comment, join: assoc(c, :comment_posts)) |> Map.put(:prefix, "global") |> plan()
assert query.sources == {{"comments", Comment, "global"}, {"comment_posts", CommentPost, "global"}}
{query, _, _} = from(c in Comment, join: assoc(c, :comment_posts), prefix: "local") |> Map.put(:prefix, "global") |> plan()
assert query.sources == {{"comments", Comment, "global"}, {"comment_posts", CommentPost, "local"}}
# Schema prefix in assoc join
{query, _, _} = from(c in Comment, join: assoc(c, :post)) |> plan()
assert query.sources == {{"comments", Comment, nil}, {"posts", Post, "my_prefix"}}
{query, _, _} = from(c in Comment, join: assoc(c, :post)) |> Map.put(:prefix, "global") |> plan()
assert query.sources == {{"comments", Comment, "global"}, {"posts", Post, "my_prefix"}}
{query, _, _} = from(c in Comment, join: assoc(c, :post), prefix: "local") |> Map.put(:prefix, "global") |> plan()
assert query.sources == {{"comments", Comment, "global"}, {"posts", Post, "local"}}
# Schema prefix for assoc many-to-many joins
{query, _, _} = from(c in Post, join: assoc(c, :crazy_comments)) |> plan()
assert query.sources == {{"posts", Post, "my_prefix"}, {"comments", Comment, nil}, {"comment_posts", CommentPost, nil}}
{query, _, _} = from(c in Post, join: assoc(c, :crazy_comments)) |> Map.put(:prefix, "global") |> plan()
assert query.sources == {{"posts", Post, "my_prefix"}, {"comments", Comment, "global"}, {"comment_posts", CommentPost, "global"}}
{query, _, _} = from(c in Post, join: assoc(c, :crazy_comments), prefix: "local") |> Map.put(:prefix, "global") |> plan()
assert query.sources == {{"posts", Post, "my_prefix"}, {"comments", Comment, "local"}, {"comment_posts", CommentPost, "local"}}
# Schema prefix for assoc many-to-many joins (when join_through is a table name)
{query, _, _} = from(c in Post, join: assoc(c, :crazy_comments_without_schema)) |> plan()
assert query.sources == {{"posts", Post, "my_prefix"}, {"comments", Comment, nil}, {"comment_posts", nil, nil}}
{query, _, _} = from(c in Post, join: assoc(c, :crazy_comments_without_schema)) |> Map.put(:prefix, "global") |> plan()
assert query.sources == {{"posts", Post, "my_prefix"}, {"comments", Comment, "global"}, {"comment_posts", nil, "global"}}
{query, _, _} = from(c in Post, join: assoc(c, :crazy_comments_without_schema), prefix: "local") |> Map.put(:prefix, "global") |> plan()
assert query.sources == {{"posts", Post, "my_prefix"}, {"comments", Comment, "local"}, {"comment_posts", nil, "local"}}
# Schema prefix for assoc has through
{query, _, _} = from(c in Comment, join: assoc(c, :post_comments)) |> Map.put(:prefix, "global") |> plan()
assert query.sources == {{"comments", Comment, "global"}, {"comments", Comment, "global"}, {"posts", Ecto.Query.PlannerTest.Post, "my_prefix"}}
{query, _, _} = from(c in Comment, join: assoc(c, :post_comments), prefix: "local") |> Map.put(:prefix, "global") |> plan()
assert query.sources == {{"comments", Comment, "global"}, {"comments", Comment, "local"}, {"posts", Ecto.Query.PlannerTest.Post, "local"}}
end
test "plan: combination queries" do
{%{combinations: [{_, query}]}, _, cache} = from(c in Comment, union: ^from(c in Comment)) |> plan()
assert query.sources == {{"comments", Comment, nil}}
assert %Ecto.Query.SelectExpr{expr: {:&, [], [0]}} = query.select
assert [:all, {:union, _}, _] = cache
{%{combinations: [{_, query}]}, _, cache} = from(c in Comment, union: ^from(c in Comment, where: c in ^[1, 2, 3])) |> plan()
assert query.sources == {{"comments", Comment, nil}}
assert %Ecto.Query.SelectExpr{expr: {:&, [], [0]}} = query.select
assert :nocache = cache
end
test "plan: normalizes prefixes for combinations" do
# No schema prefix in from
assert {%{combinations: [{_, union_query}]} = query, _, _} = from(Comment, union: ^from(Comment)) |> plan()
assert query.sources == {{"comments", Comment, nil}}
assert union_query.sources == {{"comments", Comment, nil}}
assert {%{combinations: [{_, union_query}]} = query, _, _} = from(Comment, union: ^from(Comment)) |> Map.put(:prefix, "global") |> plan()
assert query.sources == {{"comments", Comment, "global"}}
assert union_query.sources == {{"comments", Comment, "global"}}
assert {%{combinations: [{_, union_query}]} = query, _, _} = from(Comment, prefix: "local", union: ^from(Comment)) |> plan()
assert query.sources == {{"comments", Comment, "local"}}
assert union_query.sources == {{"comments", Comment, nil}}
assert {%{combinations: [{_, union_query}]} = query, _, _} = from(Comment, prefix: "local", union: ^from(Comment)) |> Map.put(:prefix, "global") |> plan()
assert query.sources == {{"comments", Comment, "local"}}
assert union_query.sources == {{"comments", Comment, "global"}}
assert {%{combinations: [{_, union_query}]} = query, _, _} = from(Comment, prefix: "local", union: ^(from(Comment) |> Map.put(:prefix, "union"))) |> Map.put(:prefix, "global") |> plan()
assert query.sources == {{"comments", Comment, "local"}}
assert union_query.sources == {{"comments", Comment, "union"}}
# With schema prefix
assert {%{combinations: [{_, union_query}]} = query, _, _} = from(Post, union: ^from(p in Post)) |> plan()
assert query.sources == {{"posts", Post, "my_prefix"}}
assert union_query.sources == {{"posts", Post, "my_prefix"}}
assert {%{combinations: [{_, union_query}]} = query, _, _} = from(Post, union: ^from(Post)) |> Map.put(:prefix, "global") |> plan()
assert query.sources == {{"posts", Post, "my_prefix"}}
assert union_query.sources == {{"posts", Post, "my_prefix"}}
assert {%{combinations: [{_, union_query}]} = query, _, _} = from(Post, prefix: "local", union: ^from(Post)) |> plan()
assert query.sources == {{"posts", Post, "local"}}
assert union_query.sources == {{"posts", Post, "my_prefix"}}
assert {%{combinations: [{_, union_query}]} = query, _, _} = from(Post, prefix: "local", union: ^from(Post)) |> Map.put(:prefix, "global") |> plan()
assert query.sources == {{"posts", Post, "local"}}
assert union_query.sources == {{"posts", Post, "my_prefix"}}
# Deep-nested unions
assert {%{combinations: [{_, upper_level_union_query}]} = query, _, _} = from(Comment, union: ^from(Comment, union: ^from(Comment))) |> plan()
assert %{combinations: [{_, deeper_level_union_query}]} = upper_level_union_query
assert query.sources == {{"comments", Comment, nil}}
assert upper_level_union_query.sources == {{"comments", Comment, nil}}
assert deeper_level_union_query.sources == {{"comments", Comment, nil}}
assert {%{combinations: [{_, upper_level_union_query}]} = query, _, _} = from(Comment, union: ^from(Comment, union: ^from(Comment))) |> Map.put(:prefix, "global") |> plan()
assert %{combinations: [{_, deeper_level_union_query}]} = upper_level_union_query
assert query.sources == {{"comments", Comment, "global"}}
assert upper_level_union_query.sources == {{"comments", Comment, "global"}}
assert deeper_level_union_query.sources == {{"comments", Comment, "global"}}
assert {%{combinations: [{_, upper_level_union_query}]} = query, _, _} = from(Comment, prefix: "local", union: ^from(Comment, union: ^from(Comment))) |> plan()
assert %{combinations: [{_, deeper_level_union_query}]} = upper_level_union_query
assert query.sources == {{"comments", Comment, "local"}}
assert upper_level_union_query.sources == {{"comments", Comment, nil}}
assert deeper_level_union_query.sources == {{"comments", Comment, nil}}
assert {%{combinations: [{_, upper_level_union_query}]} = query, _, _} = from(Comment, prefix: "local", union: ^from(Comment, union: ^from(Comment))) |> Map.put(:prefix, "global") |> plan()
assert %{combinations: [{_, deeper_level_union_query}]} = upper_level_union_query
assert query.sources == {{"comments", Comment, "local"}}
assert upper_level_union_query.sources == {{"comments", Comment, "global"}}
assert deeper_level_union_query.sources == {{"comments", Comment, "global"}}
end
describe "plan: CTEs" do
test "with uncacheable queries are uncacheable" do
{_, _, cache} =
Comment
|> with_cte("cte", as: ^from(c in Comment, where: c.id in ^[1, 2, 3]))
|> plan()
assert cache == :nocache
end
test "on all" do
{%{with_ctes: with_expr}, _, cache} =
Comment
|> with_cte("cte", as: ^put_query_prefix(Comment, "another"))
|> plan()
%{queries: [{"cte", query}]} = with_expr
assert query.sources == {{"comments", Comment, "another"}}
assert %Ecto.Query.SelectExpr{expr: {:&, [], [0]}} = query.select
assert [
:all,
{"comments", Comment, _, nil},
{:non_recursive_cte, "cte",
[:all, {:prefix, "another"}, {"comments", Comment, _, nil}, {:select, {:&, _, [0]}}]}
] = cache
{%{with_ctes: with_expr}, _, cache} =
Comment
|> with_cte("cte", as: ^(from(c in Comment, where: c in ^[1, 2, 3])))
|> plan()
%{queries: [{"cte", query}]} = with_expr
assert query.sources == {{"comments", Comment, nil}}
assert %Ecto.Query.SelectExpr{expr: {:&, [], [0]}} = query.select
assert :nocache = cache
{%{with_ctes: with_expr}, _, cache} =
Comment
|> recursive_ctes(true)
|> with_cte("cte", as: fragment("SELECT * FROM comments WHERE id = ?", ^123))
|> plan()
%{queries: [{"cte", query_expr}]} = with_expr
expr = {:fragment, [], [raw: "SELECT * FROM comments WHERE id = ", expr: {:^, [], [0]}, raw: ""]}
assert expr == query_expr.expr
assert [:all, {"comments", Comment, _, nil}, {:recursive_cte, "cte", ^expr}] = cache
end
test "on update_all" do
recent_comments =
from(c in Comment,
order_by: [desc: c.posted],
limit: ^500,
select: [:id]
)
|> put_query_prefix("another")
{%{with_ctes: with_expr}, [500, "text"], cache} =
Comment
|> with_cte("recent_comments", as: ^recent_comments)
|> join(:inner, [c], r in "recent_comments", on: c.id == r.id)
|> update(set: [text: ^"text"])
|> select([c, r], c)
|> plan(:update_all)
%{queries: [{"recent_comments", cte}]} = with_expr
assert {{"comments", Comment, "another"}} = cte.sources
assert %{expr: {:^, [], [0]}, params: [{500, :integer}]} = cte.limit
assert [:update_all, _, _, _, _, {:non_recursive_cte, "recent_comments", cte_cache}] = cache
assert [
:all,
{:prefix, "another"},
{:take, %{0 => {:any, [:id]}}},
{:limit, {:^, [], [0]}},
{:order_by, [[desc: _]]},
{"comments", Comment, _, nil},
{:select, {:&, [], [0]}}
] = cte_cache
end
test "on delete_all" do
recent_comments =
from(c in Comment,
order_by: [desc: c.posted],
limit: ^500,
select: [:id]
)
|> put_query_prefix("another")
{%{with_ctes: with_expr}, [500, "text"], cache} =
Comment
|> with_cte("recent_comments", as: ^recent_comments)
|> join(:inner, [c], r in "recent_comments", on: c.id == r.id and c.text == ^"text")
|> select([c, r], c)
|> plan(:delete_all)
%{queries: [{"recent_comments", cte}]} = with_expr
assert {{"comments", Comment, "another"}} = cte.sources
assert %{expr: {:^, [], [0]}, params: [{500, :integer}]} = cte.limit
assert [:delete_all, _, _, _, {:non_recursive_cte, "recent_comments", cte_cache}] = cache
assert [
:all,
{:prefix, "another"},
{:take, %{0 => {:any, [:id]}}},
{:limit, {:^, [], [0]}},
{:order_by, [[desc: _]]},
{"comments", Comment, _, nil},
{:select, {:&, [], [0]}}
] = cte_cache
end
test "prefixes" do
{%{with_ctes: with_expr} = query, _, _} = Comment |> with_cte("cte", as: ^from(c in Comment)) |> plan()
%{queries: [{"cte", cte_query}]} = with_expr
assert query.sources == {{"comments", Comment, nil}}
assert cte_query.sources == {{"comments", Comment, nil}}
{%{with_ctes: with_expr} = query, _, _} = Comment |> with_cte("cte", as: ^from(c in Comment)) |> Map.put(:prefix, "global") |> plan()
%{queries: [{"cte", cte_query}]} = with_expr
assert query.sources == {{"comments", Comment, "global"}}
assert cte_query.sources == {{"comments", Comment, "global"}}
{%{with_ctes: with_expr} = query, _, _} = Comment |> with_cte("cte", as: ^(from(c in Comment) |> Map.put(:prefix, "cte"))) |> Map.put(:prefix, "global") |> plan()
%{queries: [{"cte", cte_query}]} = with_expr
assert query.sources == {{"comments", Comment, "global"}}
assert cte_query.sources == {{"comments", Comment, "cte"}}
end
end
test "normalize: validates literal types" do
assert_raise Ecto.QueryError, fn ->
Comment |> where([c], c.text == 123) |> normalize()
end
assert_raise Ecto.QueryError, fn ->
Comment |> where([c], c.text == '123') |> normalize()
end
end
test "normalize: casts atom values" do
{_query, params, _key} = normalize_with_params(Post |> where([p], p.status == :draft))
assert params == []
{_query, params, _key} = normalize_with_params(Post |> where([p], p.status == ^:published))
assert params == ["published"]
assert_raise Ecto.QueryError, ~r/value `:atoms_are_not_strings` cannot be dumped to type :string/, fn ->
normalize(Post |> where([p], p.title == :atoms_are_not_strings))
end
assert_raise Ecto.QueryError, ~r/value `:unknown_status` cannot be dumped to type \{:parameterized, Ecto.Enum/, fn ->
normalize(Post |> where([p], p.status == :unknown_status))
end
assert_raise Ecto.Query.CastError, ~r/value `:pinned` in `where` cannot be cast to type {:parameterized, Ecto.Enum/, fn ->
normalize(Post |> where([p], p.status == ^:pinned))
end
end
test "normalize: tagged types" do
{query, params, _select} = from(Post, []) |> select([p], type(^"1", :integer))
|> normalize_with_params
assert query.select.expr ==
%Ecto.Query.Tagged{type: :integer, value: {:^, [], [0]}, tag: :integer}
assert params == [1]
{query, params, _select} = from(Post, []) |> select([p], type(^"1", ^:integer))
|> normalize_with_params
assert query.select.expr ==
%Ecto.Query.Tagged{type: :integer, value: {:^, [], [0]}, tag: :integer}
assert params == [1]
{query, params, _select} = from(Post, []) |> select([p], type(^"1", CustomPermalink))
|> normalize_with_params
assert query.select.expr ==
%Ecto.Query.Tagged{type: :id, value: {:^, [], [0]}, tag: CustomPermalink}
assert params == [1]
{query, params, _select} = from(Post, []) |> select([p], type(^"1", p.visits))
|> normalize_with_params
assert query.select.expr ==
%Ecto.Query.Tagged{type: :integer, value: {:^, [], [0]}, tag: :integer}
assert params == [1]
assert_raise Ecto.Query.CastError, ~r/value `"1"` in `select` cannot be cast to type Ecto.UUID/, fn ->
from(Post, []) |> select([p], type(^"1", Ecto.UUID)) |> normalize
end
end
test "normalize: select types" do
param_type = Ecto.ParameterizedType.init(Ecto.Enum, values: [:foo, :bar])
_ = from(p in "posts", select: type(fragment("cost"), :decimal)) |> normalize()
_ = from(p in "posts", select: type(fragment("cost"), ^:decimal)) |> normalize()
_ = from(p in "posts", select: type(fragment("cost"), ^param_type)) |> normalize()
frag = ["$eq": 42]
_ = from(p in "posts", select: type(fragment(^frag), :decimal)) |> normalize()
_ = from(p in "posts", select: type(fragment(^frag), ^:decimal)) |> normalize()
_ = from(p in "posts", select: type(fragment(^frag), ^param_type)) |> normalize()
end
test "normalize: late bindings with as" do
query = from(Post, as: :posts, where: as(:posts).code == ^123) |> normalize()
assert Macro.to_string(hd(query.wheres).expr) == "&0.code() == ^0"
assert_raise Ecto.QueryError, ~r/could not find named binding `as\(:posts\)`/, fn ->
from(Post, where: as(:posts).code == ^123) |> normalize()
end
end
test "normalize: late parent bindings with as" do
child = from(c in Comment, where: parent_as(:posts).posted == c.posted)
query = from(Post, as: :posts, join: c in subquery(child)) |> normalize()
assert Macro.to_string(hd(hd(query.joins).source.query.wheres).expr) == "parent_as(&0).posted() == &0.posted()"
child = from(c in Comment, select: %{map: parent_as(:posts).posted})
query = from(Post, as: :posts, join: c in subquery(child)) |> normalize()
assert Macro.to_string(hd(query.joins).source.query.select.expr) == "%{map: parent_as(&0).posted()}"
assert_raise Ecto.SubQueryError, ~r/the parent_as in a subquery select used as a join can only access the `from` binding in query/, fn ->
child = from(c in Comment, select: %{map: parent_as(:itself).posted})
from(Post, as: :posts, join: c in subquery(child), as: :itself) |> normalize()
end
assert_raise Ecto.SubQueryError, ~r/could not find named binding `parent_as\(:posts\)`/, fn ->
from(Post, join: c in subquery(child)) |> normalize()
end
assert_raise Ecto.QueryError, ~r/`parent_as\(:posts\)` can only be used in subqueries/, fn ->
from(Post, where: parent_as(:posts).code == ^123) |> normalize()
end
end
test "normalize: assoc join with wheres that have regular filters" do
# Mixing both has_many and many_to_many
{_query, params, _select} =
from(post in Post,
join: comment in assoc(post, :crazy_comments),
join: post in assoc(comment, :crazy_post)) |> normalize_with_params()
assert params == ["crazycomment", "crazypost"]
end
test "normalize: has_many assoc join with wheres" do
{query, params, _select} =
from(comment in Comment, join: post in assoc(comment, :crazy_post_with_list)) |> normalize_with_params()
assert inspect(query) =~ "join: p1 in Ecto.Query.PlannerTest.Post, on: p1.id == c0.crazy_post_id and p1.post_title in ^..."
assert params == ["crazypost1", "crazypost2"]
{query, params, _} =
Ecto.assoc(%Comment{crazy_post_id: 1}, :crazy_post_with_list)
|> normalize_with_params()
assert inspect(query) =~ "where: p0.id == ^... and p0.post_title in ^..."
assert params == [1, "crazypost1", "crazypost2"]
end
test "normalize: many_to_many assoc join with schema and wheres" do
{query, params, _select} =
from(post in Post, join: comment in assoc(post, :crazy_comments_with_list)) |> normalize_with_params()
assert inspect(query) =~ "join: c1 in Ecto.Query.PlannerTest.Comment, on: c2.comment_id == c1.id and c1.text in ^... and c2.deleted == ^..."
assert params == ["crazycomment1", "crazycomment2", true]
{query, params, _} =
Ecto.assoc(%Post{id: 1}, :crazy_comments_with_list)
|> normalize_with_params()
assert inspect(query) =~ "join: c1 in Ecto.Query.PlannerTest.CommentPost, on: c0.id == c1.comment_id and c1.deleted == ^..."
assert inspect(query) =~ "where: c1.post_id in ^... and c0.text in ^..."
assert params == [true, 1, "crazycomment1", "crazycomment2"]
end
test "normalize: many_to_many assoc join without schema and wheres" do
{query, params, _select} =
from(post in Post, join: comment in assoc(post, :crazy_comments_without_schema)) |> normalize_with_params()
assert inspect(query) =~ "join: c1 in Ecto.Query.PlannerTest.Comment, on: c2.comment_id == c1.id and c2.deleted == ^..."
assert params == [true]
{query, params, _} =
Ecto.assoc(%Post{id: 1}, :crazy_comments_without_schema)
|> normalize_with_params()
assert inspect(query) =~ "join: c1 in \"comment_posts\", on: c0.id == c1.comment_id and c1.deleted == ^..."
assert inspect(query) =~ "where: c1.post_id in ^..."
assert params == [true, 1]
end
test "normalize: dumps in query expressions" do
assert_raise Ecto.QueryError, ~r"cannot be dumped", fn ->
normalize(from p in Post, where: p.posted == "2014-04-17 00:00:00")
end
end
test "normalize: validate fields" do
message = ~r"field `unknown` in `select` does not exist in schema Ecto.Query.PlannerTest.Comment"
assert_raise Ecto.QueryError, message, fn ->
query = from(Comment, []) |> select([c], c.unknown)
normalize(query)
end
message = ~r"field `temp` in `select` is a virtual field in schema Ecto.Query.PlannerTest.Comment"
assert_raise Ecto.QueryError, message, fn ->
query = from(Comment, []) |> select([c], c.temp)
normalize(query)
end
end
test "normalize: validate fields in left side of in expressions" do
query = from(Post, []) |> where([p], p.id in [1, 2, 3])
normalize(query)
message = ~r"value `\[1, 2, 3\]` cannot be dumped to type \{:array, :string\}"
assert_raise Ecto.QueryError, message, fn ->
query = from(Comment, []) |> where([c], c.text in [1, 2, 3])
normalize(query)
end
end
test "normalize: validate fields in json_extract_path/2" do
query = from(Post, []) |> select([p], p.meta["slug"])
normalize(query)
query = from(Post, []) |> select([p], p.meta["author"])
normalize(query)
query = from(Post, []) |> select([p], p.meta["author"]["name"])
normalize(query)
query = from(Post, []) |> select([p], p.metas[0]["slug"])
normalize(query)
query = from(Post, []) |> select([p], p.payload["unknown_field"])
normalize(query)
query = from(Post, []) |> select([p], p.prefs["unknown_field"])
normalize(query)
query = from(p in "posts") |> select([p], p.meta["slug"])
normalize(query)
query = from(p in "posts") |> select([p], p.meta["unknown_field"])
normalize(query)
query = from(p in "posts") |> select([p], p.meta["author"]["unknown_field"])
normalize(query)
query = from(p in "posts") |> select([p], p.metas["not_index"])
normalize(query)
query = from(p in "posts") |> select([p], p.metas["not_index"]["unknown_field"])
normalize(query)
assert_raise RuntimeError, "expected field `title` to be an embed or a map, got: `:string`", fn ->
query = from(Post, []) |> select([p], p.title["foo"])
normalize(query)
end
assert_raise RuntimeError, "field `unknown_field` does not exist in Ecto.Query.PlannerTest.PostMeta", fn ->
query = from(Post, []) |> select([p], p.meta["unknown_field"])
normalize(query)
end
assert_raise RuntimeError, "field `0` does not exist in Ecto.Query.PlannerTest.PostMeta", fn ->
query = from(Post, []) |> select([p], p.meta[0])
normalize(query)
end
assert_raise RuntimeError, "field `unknown_field` does not exist in Ecto.Query.PlannerTest.Author", fn ->
query = from(Post, []) |> select([p], p.meta["author"]["unknown_field"])
normalize(query)
end
assert_raise RuntimeError, "cannot use `not_index` to refer to an item in `embeds_many`", fn ->
query = from(Post, []) |> select([p], p.metas["not_index"])
normalize(query)
end
end
test "normalize: flattens and expands right side of in expressions" do
{query, params, _select} = where(Post, [p], p.id in [1, 2, 3]) |> normalize_with_params()
assert Macro.to_string(hd(query.wheres).expr) == "&0.id() in [1, 2, 3]"
assert params == []
{query, params, _select} = where(Post, [p], p.id in [^1, 2, ^3]) |> normalize_with_params()
assert Macro.to_string(hd(query.wheres).expr) == "&0.id() in [^0, 2, ^1]"
assert params == [1, 3]
{query, params, _select} = where(Post, [p], p.id in ^[]) |> normalize_with_params()
assert Macro.to_string(hd(query.wheres).expr) == "&0.id() in ^(0, 0)"
assert params == []
{query, params, _select} = where(Post, [p], p.id in ^[1, 2, 3]) |> normalize_with_params()
assert Macro.to_string(hd(query.wheres).expr) == "&0.id() in ^(0, 3)"
assert params == [1, 2, 3]
{query, params, _select} = where(Post, [p], p.title == ^"foo" and p.id in ^[1, 2, 3] and
p.title == ^"bar") |> normalize_with_params()
assert Macro.to_string(hd(query.wheres).expr) ==
"&0.post_title() == ^0 and &0.id() in ^(1, 3) and &0.post_title() == ^4"
assert params == ["foo", 1, 2, 3, "bar"]
end
test "normalize: reject empty order by and group by" do
query = order_by(Post, [], []) |> normalize()
assert query.order_bys == []
query = order_by(Post, [], ^[]) |> normalize()
assert query.order_bys == []
query = group_by(Post, [], []) |> normalize()
assert query.group_bys == []
end
describe "normalize: CTEs" do
test "single-level" do
%{with_ctes: with_expr} =
Comment
|> with_cte("cte", as: ^from(c in "comments", select: %{id: c.id, text: c.text}))
|> normalize()
%{queries: [{"cte", query}]} = with_expr
assert query.sources == {{"comments", nil, nil}}
assert {:%{}, [], [id: _, text: _]} = query.select.expr
assert [id: {{:., _, [{:&, _, [0]}, :id]}, _, []},
text: {{:., [{:type, _} | _], [{:&, _, [0]}, :text]}, _, []}] = query.select.fields
%{with_ctes: with_expr} =
Comment
|> with_cte("cte", as: ^(from(c in Comment, where: c in ^[1, 2, 3])))
|> normalize()
%{queries: [{"cte", query}]} = with_expr
assert query.sources == {{"comments", Comment, nil}}
assert {:&, [], [0]} = query.select.expr
assert [{:id, {{:., _, [{:&, _, [0]}, :id]}, _, []}},
{:text, {{:., _, [{:&, _, [0]}, :text]}, _, []}},
_ | _] = query.select.fields
end
test "multi-level with select" do
sensors =
"sensors"
|> where(id: ^"id")
|> select([s], map(s, [:number]))
# There was a bug where the parameter in select would be reverted
# to ^0, this test aims to guarantee it remains ^1
agg_values =
"values"
|> with_cte("sensors_cte", as: ^sensors)
|> join(:inner, [v], s in "sensors_cte")
|> select([v, s], %{bucket: ^123 + v.number})
query =
"agg_values"
|> with_cte("agg_values", as: ^agg_values)
|> select([agg_v], agg_v.bucket)
query = normalize(query)
[{"agg_values", query}] = query.with_ctes.queries
assert Macro.to_string(query.select.fields) == "[bucket: ^1 + &0.number()]"
end
test "with field select" do
query =
"parent"
|> with_cte("cte", as: ^from(r in "cte", select: r.child))
|> select([e], [:parent])
|> normalize()
[{"cte", query}] = query.with_ctes.queries
assert Macro.to_string(query.select.fields) == "[child: &0.child()]"
end
end
test "normalize: select" do
query = from(Post, []) |> normalize()
assert query.select.expr ==
{:&, [], [0]}
assert query.select.fields ==
select_fields([:id, :post_title, :text, :code, :posted, :visits, :links, :prefs, :status, :meta, :metas], 0)
query = from(Post, []) |> select([p], {p, p.title, "Post"}) |> normalize()
assert query.select.fields ==
select_fields([:id, :post_title, :text, :code, :posted, :visits, :links, :prefs, :status, :meta, :metas], 0) ++
[{{:., [type: :string], [{:&, [], [0]}, :post_title]}, [], []}]
query = from(Post, []) |> select([p], {p.title, p, "Post"}) |> normalize()
assert query.select.fields ==
select_fields([:id, :post_title, :text, :code, :posted, :visits, :links, :prefs, :status, :meta, :metas], 0) ++
[{{:., [type: :string], [{:&, [], [0]}, :post_title]}, [], []}]
query =
from(Post, [])
|> join(:inner, [_], c in Comment)
|> preload([_, c], comments: c)
|> select([p, _], {p.title, p})
|> normalize()
assert query.select.fields ==
select_fields([:id, :post_title, :text, :code, :posted, :visits, :links, :prefs, :status, :meta, :metas], 0) ++
select_fields([:id, :text, :posted, :uuid, :crazy_comment, :post_id, :crazy_post_id], 1) ++
[{{:., [type: :string], [{:&, [], [0]}, :post_title]}, [], []}]
end
test "normalize: select with unions" do
union_query = from(Post, []) |> select([p], %{title: p.title, category: "Post"})
query = from(Post, []) |> select([p], %{title: p.title, category: "Post"}) |> union(^union_query) |> normalize()
union_query = query.combinations |> List.first() |> elem(1)
assert "Post" in query.select.fields
assert query.select.fields == union_query.select.fields
end
test "normalize: select with unions and virtual literal" do
union_query = from(Post, []) |> select([p], %{title: p.title, temp: true})
query = from(Post, []) |> select([p], %{title: p.title, temp: false}) |> union(^union_query) |> normalize()
union_query = query.combinations |> List.first() |> elem(1)
assert false in query.select.fields
assert true in union_query.select.fields
end
test "normalize: select on schemaless" do
assert_raise Ecto.QueryError, ~r"need to explicitly pass a :select clause in query", fn ->
from("posts", []) |> normalize()
end
end
test "normalize: select with struct/2" do
assert_raise Ecto.QueryError, ~r"struct/2 in select expects a source with a schema", fn ->
"posts" |> select([p], struct(p, [:id, :title])) |> normalize()
end
query = Post |> select([p], struct(p, [:id, :title])) |> normalize()
assert query.select.expr == {:&, [], [0]}
assert query.select.fields == select_fields([:id, :post_title], 0)
query = Post |> select([p], {struct(p, [:id, :title]), p.title}) |> normalize()
assert query.select.fields ==
select_fields([:id, :post_title], 0) ++
[{{:., [type: :string], [{:&, [], [0]}, :post_title]}, [], []}]
query =
Post
|> join(:inner, [_], c in Comment)
|> select([p, c], {p, struct(c, [:id, :text])})
|> normalize()
assert query.select.fields ==
select_fields([:id, :post_title, :text, :code, :posted, :visits, :links, :prefs, :status, :meta, :metas], 0) ++
select_fields([:id, :text], 1)
end
test "normalize: select with struct/2 on assoc" do
query =
Post
|> join(:inner, [_], c in Comment)
|> select([p, c], struct(p, [:id, :title, comments: [:id, :text]]))
|> preload([p, c], comments: c)
|> normalize()
assert query.select.expr == {:&, [], [0]}
assert query.select.fields ==
select_fields([:id, :post_title], 0) ++
select_fields([:id, :text], 1)
query =
Post
|> join(:inner, [_], c in Comment)
|> select([p, c], struct(p, [:id, :title, comments: [:id, :text, post: :id], extra_comments: :id]))
|> preload([p, c], comments: {c, post: p}, extra_comments: c)
|> normalize()
assert query.select.expr == {:&, [], [0]}
assert query.select.fields ==
select_fields([:id, :post_title], 0) ++
select_fields([:id, :text], 1) ++
select_fields([:id], 0) ++
select_fields([:id], 1)
end
test "normalize: select with struct/2 on fragment" do
assert_raise Ecto.QueryError, ~r"it is not possible to return a struct subset of a fragment", fn ->
Post
|> join(:inner, [_], c in fragment("comments"))
|> select([_, c], struct(c, [:id]))
|> normalize()
end
end
test "normalize: select with map/2" do
query = Post |> select([p], map(p, [:id, :title])) |> normalize()
assert query.select.expr == {:&, [], [0]}
assert query.select.fields == select_fields([:id, :post_title], 0)
query = Post |> select([p], {map(p, [:id, :title]), p.title}) |> normalize()
assert query.select.fields ==
select_fields([:id, :post_title], 0) ++
[{{:., [type: :string], [{:&, [], [0]}, :post_title]}, [], []}]
query =
Post
|> join(:inner, [_], c in Comment)
|> select([p, c], {p, map(c, [:id, :text])})
|> normalize()
assert query.select.fields ==
select_fields([:id, :post_title, :text, :code, :posted, :visits, :links, :prefs, :status, :meta, :metas], 0) ++
select_fields([:id, :text], 1)
end
test "normalize: select with map/2 on assoc" do
query =
Post
|> join(:inner, [_], c in Comment)
|> select([p, c], map(p, [:id, :title, comments: [:id, :text]]))
|> preload([p, c], comments: c)
|> normalize()
assert query.select.expr == {:&, [], [0]}
assert query.select.fields ==
select_fields([:id, :post_title], 0) ++
select_fields([:id, :text], 1)
query =
Post
|> join(:inner, [_], c in Comment)
|> select([p, c], map(p, [:id, :title, comments: [:id, :text, post: :id], extra_comments: :id]))
|> preload([p, c], comments: {c, post: p}, extra_comments: c)
|> normalize()
assert query.select.expr == {:&, [], [0]}
assert query.select.fields ==
select_fields([:id, :post_title], 0) ++
select_fields([:id, :text], 1) ++
select_fields([:id], 0) ++
select_fields([:id], 1)
end
test "normalize: select with map/2 on fragment" do
query =
Post
|> join(:inner, [_], f in fragment("select 1 as a, 2 as b"))
|> select([_, f], map(f, [:a, :b]))
|> normalize()
assert query.select.expr == {:&, [], [1]}
assert query.select.fields ==
select_fields([:a], 1) ++
select_fields([:b], 1)
end
test "normalize: windows" do
assert_raise Ecto.QueryError, ~r"unknown window :v given to over/2", fn ->
Comment
|> windows([c], w: [partition_by: c.id])
|> select([c], count(c.id) |> over(:v))
|> normalize()
end
end
test "normalize: preload errors" do
message = ~r"the binding used in `from` must be selected in `select` when using `preload`"
assert_raise Ecto.QueryError, message, fn ->
Post |> preload(:hello) |> select([p], p.title) |> normalize
end
message = ~r"invalid query has specified more bindings than"
assert_raise Ecto.QueryError, message, fn ->
Post |> preload([p, c], comments: c) |> normalize
end
end
test "normalize: preload assoc merges" do
{_, _, select} =
from(p in Post)
|> join(:inner, [p], c in assoc(p, :comments))
|> join(:inner, [_, c], cp in assoc(c, :comment_posts))
|> join(:inner, [_, c], ip in assoc(c, :post))
|> preload([_, c, cp, _], comments: {c, comment_posts: cp})
|> preload([_, c, _, ip], comments: {c, post: ip})
|> normalize_with_params()
assert select.assocs == [comments: {1, [comment_posts: {2, []}, post: {3, []}]}]
end
test "normalize: preload assoc errors" do
message = ~r"field `Ecto.Query.PlannerTest.Post.not_field` in preload is not an association"
assert_raise Ecto.QueryError, message, fn ->
query = from(p in Post, join: c in assoc(p, :comments), preload: [not_field: c])
normalize(query)
end
message = ~r"requires an inner, left or lateral join, got right join"
assert_raise Ecto.QueryError, message, fn ->
query = from(p in Post, right_join: c in assoc(p, :comments), preload: [comments: c])
normalize(query)
end
end
test "normalize: fragments do not support preloads" do
query = from p in Post, join: c in fragment("..."), preload: [comments: c]
assert_raise Ecto.QueryError, ~r/can only preload sources with a schema/, fn ->
normalize(query)
end
end
test "normalize: all does not allow updates" do
message = ~r"`all` does not allow `update` expressions"
assert_raise Ecto.QueryError, message, fn ->
from(p in Post, update: [set: [name: "foo"]]) |> normalize(:all)
end
end
test "normalize: update all only allow filters and checks updates" do
message = ~r"`update_all` requires at least one field to be updated"
assert_raise Ecto.QueryError, message, fn ->
from(p in Post, select: p, update: []) |> normalize(:update_all)
end
message = ~r"duplicate field `title` for `update_all`"
assert_raise Ecto.QueryError, message, fn ->
from(p in Post, select: p, update: [set: [title: "foo", title: "bar"]])
|> normalize(:update_all)
end
message = ~r"`update_all` allows only `with_cte`, `where` and `join` expressions"
assert_raise Ecto.QueryError, message, fn ->
from(p in Post, order_by: p.title, update: [set: [title: "foo"]]) |> normalize(:update_all)
end
end
test "normalize: delete all only allow filters and forbids updates" do
message = ~r"`delete_all` does not allow `update` expressions"
assert_raise Ecto.QueryError, message, fn ->
from(p in Post, update: [set: [name: "foo"]]) |> normalize(:delete_all)
end
message = ~r"`delete_all` allows only `with_cte`, `where` and `join` expressions"
assert_raise Ecto.QueryError, message, fn ->
from(p in Post, order_by: p.title) |> normalize(:delete_all)
end
end
describe "normalize: subqueries in boolean expressions" do
test "replaces {:subquery, index} with an Ecto.SubQuery struct" do
subquery = from(p in Post, select: p.visits)
%{wheres: [where]} =
from(p in Post, where: p.visits in subquery(subquery))
|> normalize()
assert {:in, _, [_, %Ecto.SubQuery{}] } = where.expr
%{wheres: [where]} =
from(p in Post, where: p.visits >= all(subquery))
|> normalize()
assert {:>=, _, [_, {:all, _, [%Ecto.SubQuery{}] }]} = where.expr
%{wheres: [where]} =
from(p in Post, where: exists(subquery))
|> normalize()
assert {:exists, _, [%Ecto.SubQuery{}]} = where.expr
end
test "raises a runtime error if more than 1 field is selected" do
s = from(p in Post, select: [p.visits, p.id])
assert_raise Ecto.QueryError, fn ->
from(p in Post, where: p.id in subquery(s))
|> normalize()
end
assert_raise Ecto.QueryError, fn ->
from(p in Post, where: p.id > any(s))
|> normalize()
end
assert_raise Ecto.QueryError, fn ->
from(p in Post, where: p.id > all(s))
|> normalize()
end
end
end
end
| 41.169096 | 193 | 0.589388 |
1c6d2e9d22c00cc36931010cd703e62aacf7ad26 | 2,011 | ex | Elixir | apps/bytepack_web/lib/bytepack_web/live/sale_live/revoke_component.ex | dashbitco/bytepack_archive | 79f8e62149d020f2afcc501592ed399f7ce7a60b | [
"Unlicense"
] | 313 | 2020-12-03T17:26:24.000Z | 2022-03-18T09:05:14.000Z | apps/bytepack_web/lib/bytepack_web/live/sale_live/revoke_component.ex | dashbitco/bytepack_archive | 79f8e62149d020f2afcc501592ed399f7ce7a60b | [
"Unlicense"
] | null | null | null | apps/bytepack_web/lib/bytepack_web/live/sale_live/revoke_component.ex | dashbitco/bytepack_archive | 79f8e62149d020f2afcc501592ed399f7ce7a60b | [
"Unlicense"
] | 57 | 2020-12-03T17:41:53.000Z | 2022-03-17T17:28:16.000Z | defmodule BytepackWeb.SaleLive.RevokeComponent do
use BytepackWeb, :live_component
alias Bytepack.Sales
@impl true
def render(assigns) do
~L"""
<div class="modal-header">
<h4 class="modal-title">Revoke <%= Sales.sale_state(@sale) %> sale</h4>
</div>
<div class="modal-body">
<div class="alert alert-warning mb-3 mt-0" role="alert">
<h4 class="alert-heading"><i class="feather-icon icon-alert-triangle mr-1"></i> Attention!</h4>
<p class="mb-0">
If you revoke a sale, the purchase will be shown as "expired" to the buyer
with the reason given below. Once a sale is revoked, the buyer will no longer
be able to access its packages. This action is reversible.
</p>
</div>
<%= f = form_for @changeset, "#",
id: "form-sale-revoke",
phx_target: @myself,
phx_change: "validate",
phx_submit: "save" %>
<%= input f, :revoke_reason, label: "Reason" %>
<%= submit "Revoke", phx_disable_with: "Revoking..." %>
</form>
</div>
"""
end
@impl true
def update(assigns, socket) do
changeset = Sales.change_revoke_sale(assigns.sale)
{:ok, socket |> assign(assigns) |> assign_new(:changeset, fn -> changeset end)}
end
@impl true
def handle_event("validate", %{"sale" => params}, socket) do
changeset =
socket.assigns.sale
|> Sales.change_revoke_sale(params)
|> Map.put(:action, :validate)
{:noreply, assign(socket, :changeset, changeset)}
end
@impl true
def handle_event("save", %{"sale" => params}, socket) do
case Sales.revoke_sale(socket.assigns.audit_context, socket.assigns.sale, params) do
{:ok, _sale} ->
{:noreply,
socket
|> put_flash(:info, "Sale revoked successfully")
|> push_redirect(to: socket.assigns.return_to)}
{:error, changeset} ->
{:noreply, assign(socket, :changeset, changeset)}
end
end
end
| 32.435484 | 103 | 0.60368 |
1c6d332d0e7364f7e4d950e95e46cb6f2db2215e | 4,034 | ex | Elixir | hello/lib/hello/accounts/accounts.ex | Ogiwara-CostlierRain464/Elixir | 498d0aa7f1b66ed1674b89c0fec4a5cb34234a33 | [
"MIT"
] | null | null | null | hello/lib/hello/accounts/accounts.ex | Ogiwara-CostlierRain464/Elixir | 498d0aa7f1b66ed1674b89c0fec4a5cb34234a33 | [
"MIT"
] | null | null | null | hello/lib/hello/accounts/accounts.ex | Ogiwara-CostlierRain464/Elixir | 498d0aa7f1b66ed1674b89c0fec4a5cb34234a33 | [
"MIT"
] | null | null | null | defmodule Hello.Accounts do
@moduledoc """
The Accounts context.
"""
import Ecto.Query, warn: false
alias Hello.Repo
alias Hello.Accounts.{User, Credential}
@doc """
Returns the list of users.
## Examples
iex> list_users()
[%User{}, ...]
"""
def list_users do
User
|> Repo.all()
|> Repo.preload(:credential)
end
@doc """
Gets a single user.
Raises `Ecto.NoResultsError` if the User does not exist.
## Examples
iex> get_user!(123)
%User{}
iex> get_user!(456)
** (Ecto.NoResultsError)
"""
def get_user!(id) do
User
|> Repo.get!(id)
|> Repo.preload(:credential)
end
@doc """
Creates a user.
## Examples
iex> create_user(%{field: value})
{:ok, %User{}}
iex> create_user(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_user(attrs \\ %{}) do
%User{}
|> User.changeset(attrs)
|> Ecto.Changeset.cast_assoc(:credential, with: &Credential.changeset/2)
|> Repo.insert()
end
@doc """
Updates a user.
## Examples
iex> update_user(user, %{field: new_value})
{:ok, %User{}}
iex> update_user(user, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_user(%User{} = user, attrs) do
user
|> User.changeset(attrs)
|> Ecto.Changeset.cast_assoc(:credential, with: &Credential.changeset/2)
|> Repo.update()
end
@doc """
Deletes a User.
## Examples
iex> delete_user(user)
{:ok, %User{}}
iex> delete_user(user)
{:error, %Ecto.Changeset{}}
"""
def delete_user(%User{} = user) do
Repo.delete(user)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking user changes.
## Examples
iex> change_user(user)
%Ecto.Changeset{source: %User{}}
"""
def change_user(%User{} = user) do
User.changeset(user, %{})
end
alias Hello.Accounts.Credential
@doc """
Returns the list of credentials.
## Examples
iex> list_credentials()
[%Credential{}, ...]
"""
def list_credentials do
Repo.all(Credential)
end
@doc """
Gets a single credential.
Raises `Ecto.NoResultsError` if the Credential does not exist.
## Examples
iex> get_credential!(123)
%Credential{}
iex> get_credential!(456)
** (Ecto.NoResultsError)
"""
def get_credential!(id), do: Repo.get!(Credential, id)
@doc """
Creates a credential.
## Examples
iex> create_credential(%{field: value})
{:ok, %Credential{}}
iex> create_credential(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_credential(attrs \\ %{}) do
%Credential{}
|> Credential.changeset(attrs)
|> Repo.insert()
end
@doc """
Updates a credential.
## Examples
iex> update_credential(credential, %{field: new_value})
{:ok, %Credential{}}
iex> update_credential(credential, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_credential(%Credential{} = credential, attrs) do
credential
|> Credential.changeset(attrs)
|> Repo.update()
end
@doc """
Deletes a Credential.
## Examples
iex> delete_credential(credential)
{:ok, %Credential{}}
iex> delete_credential(credential)
{:error, %Ecto.Changeset{}}
"""
def delete_credential(%Credential{} = credential) do
Repo.delete(credential)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking credential changes.
## Examples
iex> change_credential(credential)
%Ecto.Changeset{source: %Credential{}}
"""
def change_credential(%Credential{} = credential) do
Credential.changeset(credential, %{})
end
def authenticate_by_email_password(email, _password) do
query =
from u in User,
inner_join: c in assoc(u, :credential),
where: c.email == ^email
case Repo.one(query) do
%User{} = user -> {:ok, user}
nil -> {:error, :unauthorized}
end
end
end
| 18.253394 | 76 | 0.598661 |
1c6d40382e1dcf96a5847353fb59095a96d5a98b | 49 | ex | Elixir | lib/novel_reader/model/feed.ex | jinyeow/novel_reader | 090c1076110f441d82d99f27f82958b79bf73d63 | [
"MIT"
] | null | null | null | lib/novel_reader/model/feed.ex | jinyeow/novel_reader | 090c1076110f441d82d99f27f82958b79bf73d63 | [
"MIT"
] | null | null | null | lib/novel_reader/model/feed.ex | jinyeow/novel_reader | 090c1076110f441d82d99f27f82958b79bf73d63 | [
"MIT"
] | null | null | null | defmodule NovelReader.Model.Feed do
# TODO
end
| 12.25 | 35 | 0.77551 |
1c6d450b39a98929173cca8677d7aae2c747aff3 | 17,455 | exs | Elixir | test/swoosh/adapters/postmark_test.exs | taobojlen/swoosh | b454ead1ce52602e210acf20e28ed3dce4147341 | [
"MIT"
] | null | null | null | test/swoosh/adapters/postmark_test.exs | taobojlen/swoosh | b454ead1ce52602e210acf20e28ed3dce4147341 | [
"MIT"
] | null | null | null | test/swoosh/adapters/postmark_test.exs | taobojlen/swoosh | b454ead1ce52602e210acf20e28ed3dce4147341 | [
"MIT"
] | null | null | null | defmodule Swoosh.Adapters.PostmarkTest do
use Swoosh.AdapterCase, async: true
import Swoosh.Email
alias Swoosh.Adapters.Postmark
@success_response """
{
"ErrorCode": 0,
"Message": "OK",
"MessageID": "b7bc2f4a-e38e-4336-af7d-e6c392c2f817",
"SubmittedAt": "2010-11-26T12:01:05.1794748-05:00",
"To": "tony.stark@example.com"
}
"""
setup do
bypass = Bypass.open()
config = [base_url: "http://localhost:#{bypass.port}", api_key: "jarvis"]
valid_email =
new()
|> from("steve.rogers@example.com")
|> to("tony.stark@example.com")
|> subject("Hello, Avengers!")
|> html_body("<h1>Hello</h1>")
{:ok, bypass: bypass, valid_email: valid_email, config: config}
end
test "a sent email results in :ok", %{bypass: bypass, config: config, valid_email: email} do
Bypass.expect(bypass, fn conn ->
conn = parse(conn)
body_params = %{
"Subject" => "Hello, Avengers!",
"To" => "tony.stark@example.com",
"From" => "steve.rogers@example.com",
"HtmlBody" => "<h1>Hello</h1>"
}
assert body_params == conn.body_params
assert "/email" == conn.request_path
assert "POST" == conn.method
Plug.Conn.resp(conn, 200, @success_response)
end)
assert Postmark.deliver(email, config) == {:ok, %{id: "b7bc2f4a-e38e-4336-af7d-e6c392c2f817"}}
end
test "deliver/1 with all fields returns :ok", %{bypass: bypass, config: config} do
email =
new()
|> from({"T Stark", "tony.stark@example.com"})
|> to("wasp.avengers@example.com")
|> to({"Steve Rogers", "steve.rogers@example.com"})
|> subject("Hello, Avengers!")
|> cc({"Bruce Banner", "hulk.smash@example.com"})
|> cc("thor.odinson@example.com")
|> bcc({"Clinton Francis Barton", "hawk.eye@example.com"})
|> bcc("beast.avengers@example.com")
|> reply_to("iron.stark@example.com")
|> html_body("<h1>Hello</h1>")
|> text_body("Hello")
Bypass.expect(bypass, fn conn ->
conn = parse(conn)
body_params = %{
"Subject" => "Hello, Avengers!",
"To" => "\"Steve Rogers\" <steve.rogers@example.com>, wasp.avengers@example.com",
"From" => "\"T Stark\" <tony.stark@example.com>",
"Cc" => "thor.odinson@example.com, \"Bruce Banner\" <hulk.smash@example.com>",
"Bcc" => "beast.avengers@example.com, \"Clinton Francis Barton\" <hawk.eye@example.com>",
"ReplyTo" => "iron.stark@example.com",
"TextBody" => "Hello",
"HtmlBody" => "<h1>Hello</h1>"
}
assert body_params == conn.body_params
assert "/email" == conn.request_path
assert "POST" == conn.method
Plug.Conn.resp(conn, 200, @success_response)
end)
assert Postmark.deliver(email, config) == {:ok, %{id: "b7bc2f4a-e38e-4336-af7d-e6c392c2f817"}}
end
test "deliver/1 with all fields for template id returns :ok", %{bypass: bypass, config: config} do
template_model = %{
name: "Tony Stark",
company: "Avengers"
}
email =
new()
|> from({"T Stark", "tony.stark@example.com"})
|> to("avengers@example.com")
|> put_provider_option(:template_id, 1)
|> put_provider_option(:template_model, template_model)
Bypass.expect(bypass, fn conn ->
conn = parse(conn)
body_params = %{
"To" => "avengers@example.com",
"From" => "\"T Stark\" <tony.stark@example.com>",
"TemplateId" => 1,
"TemplateModel" => %{
"company" => "Avengers",
"name" => "Tony Stark"
}
}
assert body_params == conn.body_params
assert "/email/withTemplate" == conn.request_path
assert "POST" == conn.method
Plug.Conn.resp(conn, 200, @success_response)
end)
assert Postmark.deliver(email, config) == {:ok, %{id: "b7bc2f4a-e38e-4336-af7d-e6c392c2f817"}}
end
test "deliver/1 with all fields for template alias returns :ok", %{
bypass: bypass,
config: config
} do
template_model = %{
name: "Tony Stark",
company: "Avengers"
}
email =
new()
|> from({"T Stark", "tony.stark@example.com"})
|> to("avengers@example.com")
|> put_provider_option(:template_alias, "welcome")
|> put_provider_option(:template_model, template_model)
Bypass.expect(bypass, fn conn ->
conn = parse(conn)
body_params = %{
"To" => "avengers@example.com",
"From" => "\"T Stark\" <tony.stark@example.com>",
"TemplateAlias" => "welcome",
"TemplateModel" => %{
"company" => "Avengers",
"name" => "Tony Stark"
}
}
assert body_params == conn.body_params
assert "/email/withTemplate" == conn.request_path
assert "POST" == conn.method
Plug.Conn.resp(conn, 200, @success_response)
end)
assert Postmark.deliver(email, config) == {:ok, %{id: "b7bc2f4a-e38e-4336-af7d-e6c392c2f817"}}
end
test "deliver/1 with custom headers returns :ok", %{bypass: bypass, config: config} do
email =
new()
|> from({"T Stark", "tony.stark@example.com"})
|> to("avengers@example.com")
|> header("In-Reply-To", "<1234@example.com>")
|> header("X-Accept-Language", "en")
|> header("X-Mailer", "swoosh")
Bypass.expect(bypass, fn conn ->
conn = parse(conn)
body_params = %{
"To" => "avengers@example.com",
"From" => "\"T Stark\" <tony.stark@example.com>",
"Headers" => [
%{"Name" => "In-Reply-To", "Value" => "<1234@example.com>"},
%{"Name" => "X-Accept-Language", "Value" => "en"},
%{"Name" => "X-Mailer", "Value" => "swoosh"}
]
}
assert body_params == conn.body_params
assert "/email" == conn.request_path
assert "POST" == conn.method
Plug.Conn.resp(conn, 200, @success_response)
end)
assert Postmark.deliver(email, config) == {:ok, %{id: "b7bc2f4a-e38e-4336-af7d-e6c392c2f817"}}
end
test "deliver/1 with 4xx response", %{bypass: bypass, config: config, valid_email: email} do
errors =
"{\"errors\":[\"The provided authorization grant is invalid, expired, or revoked\"], \"message\":\"error\"}"
Bypass.expect(bypass, &Plug.Conn.resp(&1, 422, errors))
response =
{:error,
{422,
%{
"errors" => ["The provided authorization grant is invalid, expired, or revoked"],
"message" => "error"
}}}
assert Postmark.deliver(email, config) == response
end
test "deliver/1 with 5xx response", %{bypass: bypass, valid_email: email, config: config} do
errors =
"{\"errors\":[\"The provided authorization grant is invalid, expired, or revoked\"], \"message\":\"error\"}"
Bypass.expect(bypass, &Plug.Conn.resp(&1, 500, errors))
response =
{:error,
{500,
%{
"errors" => ["The provided authorization grant is invalid, expired, or revoked"],
"message" => "error"
}}}
assert Postmark.deliver(email, config) == response
end
test "validate_config/1 with valid config", %{config: config} do
assert :ok = config |> Postmark.validate_config()
end
test "validate_config/1 with invalid config" do
assert_raise(
ArgumentError,
"expected [:api_key] to be set, got: []\n",
fn ->
Postmark.validate_config([])
end
)
end
test "deliver/1 with all fields and email tagging return :ok", %{bypass: bypass, config: config} do
email =
new()
|> from({"Steve Rogers", "steve.rogers@example.com"})
|> to("tony.stark@example.com")
|> put_provider_option(:tag, "top-secret")
Bypass.expect(bypass, fn conn ->
conn = parse(conn)
body_params = %{
"To" => "tony.stark@example.com",
"From" => "\"Steve Rogers\" <steve.rogers@example.com>",
"Tag" => "top-secret"
}
assert body_params == conn.body_params
assert "/email" == conn.request_path
assert "POST" == conn.method
Plug.Conn.resp(conn, 200, @success_response)
end)
assert Postmark.deliver(email, config) == {:ok, %{id: "b7bc2f4a-e38e-4336-af7d-e6c392c2f817"}}
end
test "deliver/1 with email metadata returns :ok", %{bypass: bypass, config: config} do
email =
new()
|> from({"Steve Rogers", "steve.rogers@example.com"})
|> to("tony.stark@example.com")
|> put_provider_option(:metadata, %{"foo" => "bar"})
Bypass.expect(bypass, fn conn ->
conn = parse(conn)
body_params = %{
"To" => "tony.stark@example.com",
"From" => "\"Steve Rogers\" <steve.rogers@example.com>",
"Metadata" => %{"foo" => "bar"}
}
assert body_params == conn.body_params
assert "/email" == conn.request_path
assert "POST" == conn.method
Plug.Conn.resp(conn, 200, @success_response)
end)
assert Postmark.deliver(email, config) == {:ok, %{id: "b7bc2f4a-e38e-4336-af7d-e6c392c2f817"}}
end
test "deliver/1 with inline attachment uses correct CID", %{bypass: bypass, config: config} do
email =
new()
|> from({"Steve Rogers", "steve.rogers@example.com"})
|> to("tony.stark@example.com")
|> attachment(
Swoosh.Attachment.new("test/support/attachment.txt", type: :inline, cid: "attachment-cid")
)
Bypass.expect(bypass, fn conn ->
conn = parse(conn)
attachment_content =
"test/support/attachment.txt"
|> File.read!()
|> Base.encode64()
body_params = %{
"To" => "tony.stark@example.com",
"From" => "\"Steve Rogers\" <steve.rogers@example.com>",
"Attachments" => [
%{
"Name" => "attachment.txt",
"ContentType" => "text/plain",
"Content" => attachment_content,
"ContentID" => "cid:attachment-cid"
}
]
}
assert body_params == conn.body_params
assert "/email" == conn.request_path
assert "POST" == conn.method
Plug.Conn.resp(conn, 200, @success_response)
end)
assert Postmark.deliver(email, config) == {:ok, %{id: "b7bc2f4a-e38e-4336-af7d-e6c392c2f817"}}
end
test "delivery/2 with defined message stream returns :ok", %{
bypass: bypass,
config: config
} do
email =
new()
|> from({"T Stark", "tony.stark@example.com"})
|> to("avengers@example.com")
|> subject("Hello, Avengers!")
|> html_body("<h1>Hello</h1>")
|> text_body("Hello")
|> put_provider_option(:message_stream, "test-stream-name")
Bypass.expect(bypass, fn conn ->
conn = parse(conn)
body_params = %{
"Subject" => "Hello, Avengers!",
"To" => "avengers@example.com",
"From" => "\"T Stark\" <tony.stark@example.com>",
"HtmlBody" => "<h1>Hello</h1>",
"TextBody" => "Hello",
"MessageStream" => "test-stream-name"
}
assert body_params == conn.body_params
assert "/email" == conn.request_path
assert "POST" == conn.method
Plug.Conn.resp(conn, 200, @success_response)
end)
assert Postmark.deliver(email, config) ==
{:ok, %{id: "b7bc2f4a-e38e-4336-af7d-e6c392c2f817"}}
end
test "deliver_many/2 with two emails not using templates and custom stream returns :ok", %{
bypass: bypass,
config: config
} do
email_to_steve =
new()
|> from({"T Stark", "tony.stark@example.com"})
|> to({"Steve Rogers", "steve.rogers@example.com"})
|> subject("Broadcast message: Thanos is here!")
|> html_body("<h1>Assemble!</h1>")
|> text_body("Assemble!")
|> put_provider_option(:message_stream, "test-stream-name")
email_to_natasha =
new()
|> from({"T Stark", "tony.stark@example.com"})
|> to({"Natasha Romanova", "natasha.romanova@example.com"})
|> subject("Broadcast message: Thanos is here!")
|> html_body("<h1>Assemble!</h1>")
|> text_body("Assemble!")
|> put_provider_option(:message_stream, "test-stream-name")
Bypass.expect(bypass, fn conn ->
conn = parse(conn)
expected_body_params = %{
# Plug puts parsed params under the "_json" key when the
# structure is not a map; otherwise it's just the keys themselves,
"_json" => [
%{
"Subject" => "Broadcast message: Thanos is here!",
"To" => "\"Steve Rogers\" <steve.rogers@example.com>",
"From" => "\"T Stark\" <tony.stark@example.com>",
"TextBody" => "Assemble!",
"HtmlBody" => "<h1>Assemble!</h1>",
"MessageStream" => "test-stream-name"
},
%{
"Subject" => "Broadcast message: Thanos is here!",
"To" => "\"Natasha Romanova\" <natasha.romanova@example.com>",
"From" => "\"T Stark\" <tony.stark@example.com>",
"TextBody" => "Assemble!",
"HtmlBody" => "<h1>Assemble!</h1>",
"MessageStream" => "test-stream-name"
}
]
}
assert expected_body_params == conn.body_params
assert "/email/batch" == conn.request_path
assert "POST" == conn.method
success_response = """
[
{
"ErrorCode": 0,
"Message": "OK",
"MessageID": "b7bc2f4a-e38e-4336-af7d-e6c392c2f817",
"SubmittedAt": "2010-11-26T12:01:05.1794748-05:00",
"To": "steve.rogers@example.com"
},
{
"ErrorCode": 0,
"Message": "OK",
"MessageID": "e2ecbbfc-fe12-463d-b933-9fe22915106d",
"SubmittedAt": "2010-11-26T12:01:05.1794748-05:00",
"To": "natasha.romanova@example.com"
}
]
"""
Plug.Conn.resp(conn, 200, success_response)
end)
assert Postmark.deliver_many([email_to_steve, email_to_natasha], config) ==
{:ok,
[
%{
id: "b7bc2f4a-e38e-4336-af7d-e6c392c2f817",
error_code: 0,
message: "OK"
},
%{
id: "e2ecbbfc-fe12-463d-b933-9fe22915106d",
error_code: 0,
message: "OK"
}
]}
end
test "deliver_many/2 with empty email list returns :ok" do
assert Postmark.deliver_many([], []) == {:ok, []}
end
test "deliver_many/2 with two emails using templates and custom stream returns :ok", %{
bypass: bypass,
config: config
} do
template_model = %{
threat: "Thanos",
company: "Avengers"
}
email_to_steve =
new()
|> from({"T Stark", "tony.stark@example.com"})
|> to({"Steve Rogers", "steve.rogers@example.com"})
|> put_provider_option(:template_alias, "welcome")
|> put_provider_option(:template_model, template_model)
|> put_provider_option(:message_stream, "test-stream-name")
email_to_natasha =
new()
|> from({"T Stark", "tony.stark@example.com"})
|> to({"Natasha Romanova", "natasha.romanova@example.com"})
|> put_provider_option(:template_alias, "welcome")
|> put_provider_option(:template_model, template_model)
|> put_provider_option(:message_stream, "test-stream-name")
Bypass.expect(bypass, fn conn ->
conn = parse(conn)
expected_body_params = %{
"Messages" => [
%{
"To" => "\"Steve Rogers\" <steve.rogers@example.com>",
"From" => "\"T Stark\" <tony.stark@example.com>",
"TemplateAlias" => "welcome",
"TemplateModel" => %{
"company" => "Avengers",
"threat" => "Thanos"
},
"MessageStream" => "test-stream-name"
},
%{
"To" => "\"Natasha Romanova\" <natasha.romanova@example.com>",
"From" => "\"T Stark\" <tony.stark@example.com>",
"TemplateAlias" => "welcome",
"TemplateModel" => %{
"company" => "Avengers",
"threat" => "Thanos"
},
"MessageStream" => "test-stream-name"
}
]
}
assert expected_body_params == conn.body_params
assert "/email/batchWithTemplates" == conn.request_path
assert "POST" == conn.method
success_response = """
[
{
"ErrorCode": 0,
"Message": "OK",
"MessageID": "b7bc2f4a-e38e-4336-af7d-e6c392c2f817",
"SubmittedAt": "2010-11-26T12:01:05.1794748-05:00",
"To": "steve.rogers@example.com"
},
{
"ErrorCode": 0,
"Message": "OK",
"MessageID": "e2ecbbfc-fe12-463d-b933-9fe22915106d",
"SubmittedAt": "2010-11-26T12:01:05.1794748-05:00",
"To": "natasha.romanova@example.com"
}
]
"""
Plug.Conn.resp(conn, 200, success_response)
end)
assert Postmark.deliver_many([email_to_steve, email_to_natasha], config) ==
{:ok,
[
%{
id: "b7bc2f4a-e38e-4336-af7d-e6c392c2f817",
error_code: 0,
message: "OK"
},
%{
id: "e2ecbbfc-fe12-463d-b933-9fe22915106d",
error_code: 0,
message: "OK"
}
]}
end
end
| 31.003552 | 114 | 0.556631 |
1c6dc153c580269ec1e4eecdc3afbefc858d700c | 840 | exs | Elixir | test/data/script_test.exs | shanesveller/ex_venture | 68507da11442a9e0423073fcd305e9021f649ca1 | [
"MIT"
] | null | null | null | test/data/script_test.exs | shanesveller/ex_venture | 68507da11442a9e0423073fcd305e9021f649ca1 | [
"MIT"
] | null | null | null | test/data/script_test.exs | shanesveller/ex_venture | 68507da11442a9e0423073fcd305e9021f649ca1 | [
"MIT"
] | null | null | null | defmodule Data.ScriptTest do
use Data.ModelCase
doctest Data.Script
doctest Data.Script.Line
alias Data.Script
alias Data.Script.Line
describe "validate the script" do
test "must include a start key" do
script = [%Line{key: "start", message: "Hi"}]
assert Script.valid_script?(script)
script = [%Line{key: "end", message: "Hi"}]
refute Script.valid_script?(script)
end
test "each key must be present" do
script = [
%Line{key: "start", message: "Hi", listeners: [%{phrase: "yes", key: "continue"}]},
%Line{key: "continue", message: "Hi"},
]
assert Script.valid_script?(script)
script = [
%Line{key: "start", message: "Hi", listeners: [%{phrase: "yes", key: "continue"}]},
]
refute Script.valid_script?(script)
end
end
end
| 26.25 | 91 | 0.609524 |
1c6df87207d6fdf610ea0ae6034570168dbbd7cb | 2,342 | exs | Elixir | test/oli/editing/utils_test.exs | malav2110/oli-torus | 8af64e762a7c8a2058bd27a7ab8e96539ffc055f | [
"MIT"
] | 45 | 2020-04-17T15:40:27.000Z | 2022-03-25T00:13:30.000Z | test/oli/editing/utils_test.exs | malav2110/oli-torus | 8af64e762a7c8a2058bd27a7ab8e96539ffc055f | [
"MIT"
] | 944 | 2020-02-13T02:37:01.000Z | 2022-03-31T17:50:07.000Z | test/oli/editing/utils_test.exs | malav2110/oli-torus | 8af64e762a7c8a2058bd27a7ab8e96539ffc055f | [
"MIT"
] | 23 | 2020-07-28T03:36:13.000Z | 2022-03-17T14:29:02.000Z | defmodule Oli.Authoring.Editing.UtilsTest do
use ExUnit.Case, async: true
alias Oli.Authoring.Editing.Utils
describe "diffing content for activity reference changes" do
test "diff_activity_references/2 finds additions and removals", _ do
content1 = %{
"model" => [
%{"type" => "content", "children" => [%{"text" => "A paragraph."}]},
%{"type" => "activity-reference", "activity_id" => 1}
]
}
content2 = %{
"model" => [
%{"type" => "content", "children" => [%{"text" => "A paragraph."}]},
%{"type" => "activity-reference", "activity_id" => 2}
]
}
{additions, deletions} = Utils.diff_activity_references(content1, content2)
assert MapSet.size(additions) == 1
assert MapSet.member?(additions, 2)
assert MapSet.size(deletions) == 1
assert MapSet.member?(deletions, 1)
end
test "diff_activity_references/2 finds no changes", _ do
content1 = %{
"model" => [
%{"type" => "content", "children" => [%{"text" => "A paragraph."}]},
%{"type" => "activity-reference", "activity_id" => 2}
]
}
content2 = %{
"model" => [
%{"type" => "content", "children" => [%{"text" => "A paragraph."}]},
%{"type" => "activity-reference", "activity_id" => 2}
]
}
{additions, deletions} = Utils.diff_activity_references(content1, content2)
assert MapSet.size(additions) == 0
assert MapSet.size(deletions) == 0
end
test "diff_activity_references/2 finds several additions", _ do
content1 = %{
"model" => [
%{"type" => "content", "children" => [%{"text" => "A paragraph."}]}
]
}
content2 = %{
"model" => [
%{"type" => "content", "children" => [%{"text" => "A paragraph."}]},
%{"type" => "activity-reference", "activity_id" => 1},
%{"type" => "activity-reference", "activity_id" => 2},
%{"type" => "activity-reference", "activity_id" => 3},
%{"type" => "activity-reference", "activity_id" => 4}
]
}
{additions, deletions} = Utils.diff_activity_references(content1, content2)
assert MapSet.size(additions) == 4
assert MapSet.size(deletions) == 0
end
end
end
| 31.226667 | 81 | 0.535013 |
1c6e04c35ba0185ab0abbc6887f77be84ea29b52 | 935 | ex | Elixir | lib/doctor_schedule/accounts/services/send_forgot_password_to_email.ex | theguuholi/doctor_schedule | a92dfa92d1398c59718be2428d36bb326d6bc361 | [
"MIT"
] | 2 | 2022-03-11T12:15:01.000Z | 2022-03-11T13:53:21.000Z | lib/doctor_schedule/accounts/services/send_forgot_password_to_email.ex | theguuholi/doctor_schedule | a92dfa92d1398c59718be2428d36bb326d6bc361 | [
"MIT"
] | 3 | 2020-12-12T22:10:17.000Z | 2021-04-05T12:53:12.000Z | lib/doctor_schedule/accounts/services/send_forgot_password_to_email.ex | theguuholi/doctor_schedule | a92dfa92d1398c59718be2428d36bb326d6bc361 | [
"MIT"
] | 1 | 2021-02-26T04:24:34.000Z | 2021-02-26T04:24:34.000Z | defmodule DoctorSchedule.Accounts.Services.SendForgotPasswordToEmail do
use Bamboo.Phoenix, view: DoctorScheduleWeb.EmailView
import Bamboo.Email
alias DoctorSchedule.Accounts.Repositories.TokenRepository
alias DoctorSchedule.Shared.MailProvider.Mailer
@host "http://localhost:4000"
def execute(email) do
TokenRepository.generate(email)
|> case do
{:error, msg} ->
{:error, msg}
{:ok, token, user} ->
Task.async(fn -> send_email(token, user) end)
{:ok, user, token}
end
end
def send_email(token, user) do
url = "#{@host}/reset-password/#{token}"
new_email()
|> from({"Doctor Schedule Team", "adm@doctorschedule.com"})
|> to({user.first_name, user.email})
|> subject("[DOCTOR SCHEDULE] - Recuperacao de Senha ")
|> assign(:data, %{name: user.first_name, url: url})
|> render("password_forgot.html")
|> Mailer.deliver_now()
end
end
| 26.714286 | 71 | 0.664171 |
1c6e0e3ef66a94763ba25191f54e898c61e475d7 | 3,003 | ex | Elixir | lib/task_bunny/initializer.ex | DylanReile/task_bunny | 3c23eb345e18daf9c16c6c295a18499eb8584469 | [
"MIT"
] | 1 | 2021-03-01T20:31:33.000Z | 2021-03-01T20:31:33.000Z | lib/task_bunny/initializer.ex | DylanReile/task_bunny | 3c23eb345e18daf9c16c6c295a18499eb8584469 | [
"MIT"
] | null | null | null | lib/task_bunny/initializer.ex | DylanReile/task_bunny | 3c23eb345e18daf9c16c6c295a18499eb8584469 | [
"MIT"
] | null | null | null | defmodule TaskBunny.Initializer do
# Handles initialization concerns.
#
# This module is private to TaskBunny and should not be accessed directly.
#
@moduledoc false
use GenServer
require Logger
alias TaskBunny.{Config, Queue}
@doc false
@spec start_link(boolean) :: GenServer.on_start
def start_link(initialized \\ false) do
GenServer.start_link(__MODULE__, initialized, name: __MODULE__)
end
@doc false
def init(true) do
# Already initialized. Nothing to do.
{:ok, true}
end
@doc false
@spec init(boolean) :: {:ok, boolean}
def init(false) do
declare_queues_from_config()
{:ok, true}
end
@doc """
Returns true if TaskBunny has been initialized
"""
@spec initialized?() :: boolean
def initialized? do
case Process.whereis(__MODULE__) do
nil -> false
pid -> GenServer.call(pid, :get_state)
end
end
@doc """
Returns true if Initializer process exists
"""
@spec alive?() :: boolean
def alive? do
Process.whereis(__MODULE__) != nil
end
@doc false
@spec handle_call(atom, {pid, term}, boolean) :: {:reply, boolean, boolean}
def handle_call(:get_state, _, state) do
{:reply, state, state}
end
@doc false
@spec handle_info(any, boolean) :: {:noreply, boolean}
def handle_info({:connected, _conn}, false) do
# This is called only on edge case where connection was disconnected.
# Since the attempt of subscribe_connection is still valid, Connection
# module will send a message.
# Try to initialize here.
declare_queues_from_config()
{:noreply, true}
end
def handle_info({:connected, _conn}, state) do
{:noreply, state}
end
@doc """
Loads config and declares queues listed
"""
@spec declare_queues_from_config() :: :ok
def declare_queues_from_config do
Config.queues
|> Enum.each(fn (queue) -> declare_queue(queue) end)
:ok
end
@spec declare_queue(map) :: :ok
defp declare_queue(queue_config) do
queue = queue_config[:name]
host = queue_config[:host] || :default
TaskBunny.Connection.subscribe_connection(host, self())
receive do
{:connected, conn} -> declare_queue(conn, queue)
after
2_000 ->
Logger.warn """
TaskBunny.Initializer: Failed to get connection for #{host}.
TaskBunny can't declare the queues but carries on.
"""
end
:ok
end
@spec declare_queue(AMQP.Connection.t, String.t) :: :ok
defp declare_queue(conn, queue) do
Queue.declare_with_subqueues(conn, queue)
:ok
catch
:exit, e ->
# Handles the error but we carry on...
# It's highly likely caused by the options on queue declare don't match.
# We carry on with error log.
Logger.warn """
TaskBunny.Initializer: Failed to declare queue for #{queue}.
If you have changed the queue configuration, you have to delete the queue and create it again.
Error: #{inspect e}
"""
{:error, {:exit, e}}
end
end
| 25.235294 | 100 | 0.662671 |
1c6e2082a11632f6a09fc6218b1739753c54b37a | 99 | ex | Elixir | rust-to-elixir/grpc_server/lib/message/repo.ex | poad/grpc-example | d1b775f6d2e89279cd29191d5d4dbec265bf0bf0 | [
"Apache-2.0"
] | null | null | null | rust-to-elixir/grpc_server/lib/message/repo.ex | poad/grpc-example | d1b775f6d2e89279cd29191d5d4dbec265bf0bf0 | [
"Apache-2.0"
] | 64 | 2021-08-30T23:54:04.000Z | 2022-03-14T21:06:11.000Z | rust-to-elixir/grpc_server/lib/message/repo.ex | poad/grpc-example | d1b775f6d2e89279cd29191d5d4dbec265bf0bf0 | [
"Apache-2.0"
] | null | null | null | defmodule Message.Repo do
use Ecto.Repo, adapter: Ecto.Adapters.MyXQL, otp_app: :grpc_server
end
| 24.75 | 68 | 0.787879 |
1c6e43776ec46c0a506956f2353e6c2b16e54eb9 | 2,872 | ex | Elixir | lib/docusign/model/title.ex | gaslight/docusign_elixir | d9d88d53dd85d32a39d537bade9db28d779414e6 | [
"MIT"
] | 4 | 2020-12-21T12:50:13.000Z | 2022-01-12T16:50:43.000Z | lib/docusign/model/title.ex | gaslight/docusign_elixir | d9d88d53dd85d32a39d537bade9db28d779414e6 | [
"MIT"
] | 12 | 2018-09-18T15:26:34.000Z | 2019-09-28T15:29:39.000Z | lib/docusign/model/title.ex | gaslight/docusign_elixir | d9d88d53dd85d32a39d537bade9db28d779414e6 | [
"MIT"
] | 15 | 2020-04-29T21:50:16.000Z | 2022-02-11T18:01:51.000Z | # NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule DocuSign.Model.Title do
@moduledoc """
A tab that displays the recipient's title.
"""
@derive [Poison.Encoder]
defstruct [
:anchorCaseSensitive,
:anchorHorizontalAlignment,
:anchorIgnoreIfNotPresent,
:anchorMatchWholeWord,
:anchorString,
:anchorUnits,
:anchorXOffset,
:anchorYOffset,
:bold,
:concealValueOnDocument,
:conditionalParentLabel,
:conditionalParentValue,
:customTabId,
:disableAutoSize,
:documentId,
:errorDetails,
:font,
:fontColor,
:fontSize,
:italic,
:locked,
:maxLength,
:mergeField,
:name,
:originalValue,
:pageNumber,
:recipientId,
:required,
:status,
:tabId,
:tabLabel,
:tabOrder,
:templateLocked,
:templateRequired,
:underline,
:value,
:width,
:xPosition,
:yPosition
]
@type t :: %__MODULE__{
:anchorCaseSensitive => String.t(),
:anchorHorizontalAlignment => String.t(),
:anchorIgnoreIfNotPresent => String.t(),
:anchorMatchWholeWord => String.t(),
:anchorString => String.t(),
:anchorUnits => String.t(),
:anchorXOffset => String.t(),
:anchorYOffset => String.t(),
:bold => String.t(),
:concealValueOnDocument => String.t(),
:conditionalParentLabel => String.t(),
:conditionalParentValue => String.t(),
:customTabId => String.t(),
:disableAutoSize => String.t(),
:documentId => String.t(),
:errorDetails => ErrorDetails,
:font => String.t(),
:fontColor => String.t(),
:fontSize => String.t(),
:italic => String.t(),
:locked => String.t(),
:maxLength => integer(),
:mergeField => MergeField,
:name => String.t(),
:originalValue => String.t(),
:pageNumber => String.t(),
:recipientId => String.t(),
:required => String.t(),
:status => String.t(),
:tabId => String.t(),
:tabLabel => String.t(),
:tabOrder => String.t(),
:templateLocked => String.t(),
:templateRequired => String.t(),
:underline => String.t(),
:value => String.t(),
:width => integer(),
:xPosition => String.t(),
:yPosition => String.t()
}
end
defimpl Poison.Decoder, for: DocuSign.Model.Title do
import DocuSign.Deserializer
def decode(value, options) do
value
|> deserialize(:errorDetails, :struct, DocuSign.Model.ErrorDetails, options)
|> deserialize(:mergeField, :struct, DocuSign.Model.MergeField, options)
end
end
| 27.352381 | 80 | 0.571727 |
1c6e4dca133812612da5fc3aa98e2e876b54fe56 | 3,179 | ex | Elixir | lib/x509/certificate/validity.ex | mobileoverlord/x509 | fdca52a0a4e1142316c05481c3125cf915cbd8aa | [
"BSD-3-Clause"
] | null | null | null | lib/x509/certificate/validity.ex | mobileoverlord/x509 | fdca52a0a4e1142316c05481c3125cf915cbd8aa | [
"BSD-3-Clause"
] | null | null | null | lib/x509/certificate/validity.ex | mobileoverlord/x509 | fdca52a0a4e1142316c05481c3125cf915cbd8aa | [
"BSD-3-Clause"
] | null | null | null | defmodule X509.Certificate.Validity do
@moduledoc """
Convenience functions for creating `:Validity` records for use in
certificates. The `:Validity` record represents the X.509 Validity
type, defining the validity of a certificate in terms of `notBefore`
and `notAfter` timestamps.
"""
import X509.ASN1
@typedoc "X.509 Time type (UTCTime or GeneralizedTime)"
@type time :: {:utcTime | :generalizedTime, charlist()}
@typedoc "`:Validity` record, as used in Erlang's `:public_key` module"
@opaque t :: X509.ASN1.record(:validity)
@default_backdate_seconds 5 * 60
@seconds_per_day 24 * 60 * 60
@doc """
Creates a new `:Validity` record with the given start and end timestamps
in DateTime format.
## Examples:
iex> {:ok, not_before, 0} = DateTime.from_iso8601("2018-01-01T00:00:00Z")
iex> {:ok, not_after, 0} = DateTime.from_iso8601("2018-12-31T23:59:59Z")
iex> X509.Certificate.Validity.new(not_before, not_after)
{:Validity, {:utcTime, '180101000000Z'}, {:utcTime, '181231235959Z'}}
iex> {:ok, not_before, 0} = DateTime.from_iso8601("2051-01-01T00:00:00Z")
iex> {:ok, not_after, 0} = DateTime.from_iso8601("2051-12-31T23:59:59Z")
iex> X509.Certificate.Validity.new(not_before, not_after)
{:Validity, {:generalizedTime, '20510101000000Z'},
{:generalizedTime, '20511231235959Z'}}
"""
@spec new(DateTime.t(), DateTime.t()) :: t()
def new(%DateTime{} = not_before, %DateTime{} = not_after) do
validity(
notBefore: to_asn1(not_before),
notAfter: to_asn1(not_after)
)
end
@doc """
Creates a new `:Validity` record with an `notAfter` value a given number of
days in the future. The `notBefore` value can be backdated (by default
#{@default_backdate_seconds} seconds) to avoid newly issued certificates
from being rejected by peers due to poorly synchronized clocks.
For CA certificates, consider using `new/2` instead, with a `not_before`
value that does not reveal the exact time when the keypair was generated.
This minimizes information leakage about the state of the RNG.
"""
@spec days_from_now(pos_integer(), non_neg_integer()) :: t()
def days_from_now(days, backdate_seconds \\ @default_backdate_seconds) do
not_before =
DateTime.utc_now()
|> shift(-backdate_seconds)
not_after = shift(not_before, days * @seconds_per_day)
new(not_before, not_after)
end
# Shifts a DateTime value by a number of seconds (positive or negative)
defp shift(datetime, seconds) do
datetime
|> DateTime.to_unix()
|> Kernel.+(seconds)
|> DateTime.from_unix!()
end
# Converts a DateTime value to ASN.1 UTCTime (for years prior to 2050) or
# GeneralizedTime (for years starting with 2050)
defp to_asn1(%DateTime{year: year} = datetime) when year < 2050 do
iso = DateTime.to_iso8601(datetime, :basic)
[_, date, time] = Regex.run(~r/^\d\d(\d{6})T(\d{6})Z$/, iso)
{:utcTime, '#{date}#{time}Z'}
end
defp to_asn1(datetime) do
iso = DateTime.to_iso8601(datetime, :basic)
[_, date, time] = Regex.run(~r/^(\d{8})T(\d{6})Z$/, iso)
{:generalizedTime, '#{date}#{time}Z'}
end
end
| 36.54023 | 79 | 0.685121 |
1c6e64b847cac895208f1b63e628ff4c1bb78b6b | 802 | ex | Elixir | lib/problem003.ex | lewapkon/eulixir | 990017cdccee7cd508269b7036e290ec777aea3d | [
"MIT"
] | null | null | null | lib/problem003.ex | lewapkon/eulixir | 990017cdccee7cd508269b7036e290ec777aea3d | [
"MIT"
] | null | null | null | lib/problem003.ex | lewapkon/eulixir | 990017cdccee7cd508269b7036e290ec777aea3d | [
"MIT"
] | null | null | null | defmodule Eulixir.Problem003 do
@moduledoc """
http://projecteuler.net/problem=3
The prime factors of 13195 are 5, 7, 13 and 29.
What is the largest prime factor of the number 600851475143 ?
"""
@doc """
iex> Eulixir.Problem003.solve(13_195)
29
"""
def solve(n) do
n
|> factorize
|> List.first
end
@doc """
iex> Eulixir.Problem003.factorize(13_195)
[29, 13, 7, 5]
"""
def factorize(n) do
factorize(n, 2, [])
end
defp factorize(n, divisor, acc) when n < divisor, do: acc
defp factorize(n, divisor, acc) when rem(n, divisor) == 0 do
factorize(div(n, divisor), divisor, [divisor | acc])
end
defp factorize(n, divisor, acc) do
factorize(n, divisor + 1, acc)
end
def solution do
solve(600_851_475_143)
end
end
| 19.560976 | 65 | 0.625935 |
1c6e667e2e1d764a16d1927686ca2df7d6c0bbec | 710 | ex | Elixir | lib/openflow/actions/set_nw_ttl.ex | shun159/tres | 1e3e7f78ba1aa4f184d4be70300e5f4703d50a2f | [
"Beerware"
] | 5 | 2019-05-25T02:25:13.000Z | 2020-10-06T17:00:03.000Z | lib/openflow/actions/set_nw_ttl.ex | shun159/tres | 1e3e7f78ba1aa4f184d4be70300e5f4703d50a2f | [
"Beerware"
] | 5 | 2018-03-29T14:42:10.000Z | 2019-11-19T07:03:09.000Z | lib/openflow/actions/set_nw_ttl.ex | shun159/tres | 1e3e7f78ba1aa4f184d4be70300e5f4703d50a2f | [
"Beerware"
] | 1 | 2019-03-30T20:48:27.000Z | 2019-03-30T20:48:27.000Z | defmodule Openflow.Action.SetNwTtl do
@moduledoc """
Set IP TTL
"""
defstruct(ttl: 0)
alias __MODULE__
@type t :: %SetNwTtl{ttl: 0..0xFF}
@spec ofpat() :: 23
def ofpat, do: 23
@doc """
Create a new set_nw_ttl action struct
## Options:
- IP TTL
```elixir
iex> %SetNwTtl{ttl: 64} = SetNwTtl.new(_ttl = 64)
```
"""
@spec new(ttl :: 0..0xFF) :: t()
def new(ttl) do
%SetNwTtl{ttl: ttl}
end
@spec to_binary(t()) :: <<_::16, _::_*8>>
def to_binary(%SetNwTtl{ttl: ttl}) do
<<23::16, 8::16, ttl::8, 0::size(3)-unit(8)>>
end
@spec read(<<_::16, _::_*8>>) :: t()
def read(<<23::16, 8::16, ttl::8, _::size(3)-unit(8)>>) do
%SetNwTtl{ttl: ttl}
end
end
| 17.75 | 60 | 0.550704 |
1c6ea1e6cbcdda5d2464f5115efc0951e5c8623a | 1,445 | ex | Elixir | apps/diet_web/lib/diet_web/channels/user_socket.ex | isavita/diet-umbrella | 0eee4d0dc4d9567888e4b69ccc7993e95d95ed29 | [
"MIT"
] | 1 | 2020-06-01T21:25:54.000Z | 2020-06-01T21:25:54.000Z | apps/diet_web/lib/diet_web/channels/user_socket.ex | isavita/diet-umbrella | 0eee4d0dc4d9567888e4b69ccc7993e95d95ed29 | [
"MIT"
] | 221 | 2019-07-20T17:20:49.000Z | 2021-08-02T06:21:10.000Z | apps/diet_web/lib/diet_web/channels/user_socket.ex | isavita/diet-umbrella | 0eee4d0dc4d9567888e4b69ccc7993e95d95ed29 | [
"MIT"
] | null | null | null | defmodule DietWeb.UserSocket do
use Phoenix.Socket
## Channels
# channel "room:*", DietWeb.RoomChannel
channel "videos:*", DietWeb.VideoChannel
# Socket params are passed from the client and can
# be used to verify and authenticate a user. After
# verification, you can put default assigns into
# the socket that will be set for all channels, ie
#
# {:ok, assign(socket, :user_id, verified_user_id)}
#
# To deny connection, return `:error`.
#
# See `Phoenix.Token` documentation for examples in
# performing token verification on connect.
#
# two weeks
@max_age 2 * 7 * 24 * 60 * 60
def connect(%{"token" => token}, socket, _connect_info) do
case Phoenix.Token.verify(socket, "user socket", token, max_age: @max_age) do
{:ok, user_id} ->
{:ok, assign(socket, :user_id, user_id)}
{:error, _reason} ->
:error
end
end
def connect(_params, _socket, _connect_info), do: :error
# Socket id's are topics that allow you to identify all sockets for a given user:
#
# def id(socket), do: "user_socket:#{socket.assigns.user_id}"
#
# Would allow you to broadcast a "disconnect" event and terminate
# all active sockets and channels for a given user:
#
# DietWeb.Endpoint.broadcast("user_socket:#{user.id}", "disconnect", %{})
#
# Returning `nil` makes this socket anonymous.
def id(socket), do: "user_socket:#{socket.assigns.user_id}"
end
| 30.744681 | 83 | 0.67128 |
1c6ecc59c381bca3091ab81ad7019862cc9150f1 | 1,184 | ex | Elixir | lib/beanie.ex | anthonyfalzetti/beanie | 72ab49e6c058a21c24b0be151744fb8d525164aa | [
"MIT"
] | 2 | 2018-07-08T10:38:34.000Z | 2020-01-14T03:48:30.000Z | lib/beanie.ex | anthonyfalzetti/beanie | 72ab49e6c058a21c24b0be151744fb8d525164aa | [
"MIT"
] | 11 | 2016-09-29T20:41:59.000Z | 2017-11-13T04:03:14.000Z | lib/beanie.ex | anthonyfalzetti/beanie | 72ab49e6c058a21c24b0be151744fb8d525164aa | [
"MIT"
] | 8 | 2016-10-03T14:20:43.000Z | 2018-10-17T01:12:44.000Z | defmodule Beanie do
use Application
# See http://elixir-lang.org/docs/stable/elixir/Application.html
# for more information on OTP Applications
def start(_type, _args) do
import Supervisor.Spec
# Define workers and child supervisors to be supervised
children = [
# Start the Ecto repository
supervisor(Beanie.Repo, []),
# Start the endpoint when the application starts
supervisor(Beanie.Endpoint, []),
# Start your own worker by calling: Beanie.Worker.start_link(arg1, arg2, arg3)
# worker(Beanie.Worker, [arg1, arg2, arg3]),
]
# See http://elixir-lang.org/docs/stable/elixir/Supervisor.html
# for other strategies and supported options
opts = [strategy: :one_for_one, name: Beanie.Supervisor]
Supervisor.start_link(children, opts)
end
def registry do
[method, args] = Application.get_env(:beanie, :docker_registry)
:erlang.apply(Beanie.RegistryAPI.Registry, method, args)
end
# Tell Phoenix to update the endpoint configuration
# whenever the application is updated.
def config_change(changed, _new, removed) do
Beanie.Endpoint.config_change(changed, removed)
:ok
end
end
| 32 | 84 | 0.711993 |
1c6eef56220ce67ca2551ee88e86d2ff23a9f742 | 2,027 | ex | Elixir | dpp/lib/collada/collada.ex | XiaoxiaoZ/Distributed-path-planner | a7467e08e60248793dc55624497fd91bdb45ca14 | [
"MIT"
] | null | null | null | dpp/lib/collada/collada.ex | XiaoxiaoZ/Distributed-path-planner | a7467e08e60248793dc55624497fd91bdb45ca14 | [
"MIT"
] | 6 | 2021-12-12T13:23:17.000Z | 2021-12-17T12:03:46.000Z | dpp/lib/collada/collada.ex | XiaoxiaoZ/Distributed-path-planner | a7467e08e60248793dc55624497fd91bdb45ca14 | [
"MIT"
] | null | null | null | defmodule COLLADA do
@moduledoc """
Provide funtions related to COLLADA .dae file import geomertries
"""
import SweetXml
@doc """
import robot(collada) file
TODO: develop information model for robot.
## Parameters
- path: COLLDA file path
## Examples
iex> {:ok, xmldoc} =import_robot("/test/irb6640.dae")
"""
@spec import_robot(String) :: {:ok, binary()}
def import_robot(path) do
{:ok, xmldoc} =File.read(Path.join(File.cwd!,path))
#docmap = XmlToMap.naive_map(xmldoc)
#robotmap = docmap["COLLADA"]["#content"]["library_visual_scenes"]["#content"]["visual_scene"]["#content"]["node"]
#robot = %{name: robotmap["-name"]}
#basemap = robotmap["#content"]
#baselink = %{name: basemap["name"],translate: basemap["node"]["#content"]["translate"], rotate: basemap["node"]["#content"]["rotate"]}
end
# import enviroment(collada) to enviroment type
def import_env do
end
def get_geometries(xmldoc) do
result = xmldoc |> xpath(
~x'//library_geometries/geometry'l,
name: ~x'./@id',
positions: ~x'./mesh/source/float_array/text()'s,
triangles: ~x'./mesh/triangles/p/text()'s,
offset: ~x'./mesh/triangles/input/@offset'i,
set: ~x'./mesh/triangles/input/@set'i
)
geometries = result
|> Enum.map(fn map ->
pos_list = String.split(map[:positions])
pos_list_f = pos_list
|> Enum.map(fn s -> Float.parse(s) |> elem(0) end)
map = %{map | positions: pos_list_f}
tri_list = String.split(map[:triangles])
tri_list_d = tri_list
|> Enum.map(fn s -> Integer.parse(s) |> elem(0) end)
map = %{map | triangles: tri_list_d}
end)
end
end | 34.355932 | 143 | 0.52294 |
1c6eff1b2f134ad05501a371b3802f9bbec91e8c | 972 | ex | Elixir | lib/bot/structs.ex | ygunayer/potcu | d7b82ad9227161c93aedec03c4a1902c1d43780b | [
"MIT"
] | 1 | 2020-05-14T18:52:05.000Z | 2020-05-14T18:52:05.000Z | lib/bot/structs.ex | ygunayer/potcu | d7b82ad9227161c93aedec03c4a1902c1d43780b | [
"MIT"
] | null | null | null | lib/bot/structs.ex | ygunayer/potcu | d7b82ad9227161c93aedec03c4a1902c1d43780b | [
"MIT"
] | null | null | null | defmodule Potcu.Bot.Structs do
alias Nostrum.Snowflake
alias Nostrum.Struct.Guild.Voice
defmodule VoiceStatus do
defstruct [:status, :channel_id, :session_id, :server]
@type status :: :not_connected | :preparing | :connecting | :connected
@type channel_id :: Snowflake.t() | nil
@type session_id :: Snowflake.t() | nil
@type server :: Voice.Server.t() | nil
@type t :: %__MODULE__{
session_id: session_id(),
channel_id: channel_id(),
server: server
}
end
defmodule BombStatus do
defstruct [:status, :target, :timer, :media]
@type status :: :idle | :counting_down | :bombing
@type target :: Snowflake.t() | nil
@type timer :: reference() | nil
@type media :: {:url, String.t()} | {:path, String.t()} | nil
end
defmodule StateData do
defstruct [:guild_id, :status, :voice, :bomb]
@type guild_id :: Snowflake.t()
@type voice :: VoiceStatus
@type bomb :: BombStatus
end
end
| 28.588235 | 74 | 0.639918 |
1c6f18f3eb4bf3332db56050754be998d63afd97 | 21,032 | ex | Elixir | elixir/codes-from-books/little-elixir/cap8/blitzy/deps/combine/lib/combine/parsers/base.ex | trxeste/wrk | 3e05e50ff621866f0361cc8494ce8f6bb4d97fae | [
"BSD-3-Clause"
] | 1 | 2017-10-16T03:00:50.000Z | 2017-10-16T03:00:50.000Z | elixir/codes-from-books/little-elixir/cap8/blitzy/deps/combine/lib/combine/parsers/base.ex | trxeste/wrk | 3e05e50ff621866f0361cc8494ce8f6bb4d97fae | [
"BSD-3-Clause"
] | null | null | null | elixir/codes-from-books/little-elixir/cap8/blitzy/deps/combine/lib/combine/parsers/base.ex | trxeste/wrk | 3e05e50ff621866f0361cc8494ce8f6bb4d97fae | [
"BSD-3-Clause"
] | null | null | null | defmodule Combine.Parsers.Base do
@moduledoc """
This module defines common abstract parsers, i.e. ignore, repeat, many, etc.
To use them, just add `import Combine.Parsers.Base` to your module, or
reference them directly.
"""
alias Combine.ParserState
use Combine.Helpers
@type predicate :: (term -> boolean)
@type transform :: (term -> term)
@type transform2 :: ((term, term) -> term)
@doc """
This parser will fail with no error.
"""
@spec zero(previous_parser) :: parser
defparser zero(%ParserState{status: :ok} = state), do: %{state | :status => :error, :error => nil}
@doc """
This parser will fail with the given error message.
"""
@spec fail(previous_parser, String.t) :: parser
defparser fail(%ParserState{status: :ok} = state, message), do: %{state | :status => :error, :error => message}
@doc """
This parser will fail fatally with the given error message.
"""
@spec fatal(previous_parser, String.t) :: parser
defparser fatal(%ParserState{status: :ok} = state, message), do: %{state | :status => :error, :error => {:fatal, message}}
@doc """
This parser succeeds if the end of the input has been reached,
otherwise it fails.
# Example
iex> import #{__MODULE__}
...> import Combine.Parsers.Text
...> Combine.parse(" ", spaces |> eof)
[" "]
"""
@spec eof(previous_parser) :: parser
defparser eof(%ParserState{status: :ok, input: <<>>} = state), do: state
defp eof_impl(%ParserState{status: :ok, line: line, column: col} = state) do
%{state | :status => :error, :error => "Expected end of input at line #{line}, column #{col}"}
end
@doc """
Applies a transformation function to the result of the given parser. If the
result returned is of the form `{:error, reason}`, the parser will fail with
that reason.
# Example
iex> import #{__MODULE__}
...> import Combine.Parsers.Text
...> Combine.parse("1234", map(integer, &(&1 * 2)))
[2468]
"""
@spec map(previous_parser, parser, transform) :: parser
defparser map(%ParserState{status: :ok} = state, parser, transform) do
case parser.(state) do
%ParserState{status: :ok, results: [h|rest]} = s ->
case transform.(h) do
{:error, reason} -> %{s | :status => :error, :error => reason}
result -> %{s | :results => [result|rest]}
end
s -> s
end
end
@doc """
Applies parser if possible. Returns the parse result if successful
or nil if not.
# Example
iex> import #{__MODULE__}
...> import Combine.Parsers.Text
...> Combine.parse("Hi", option(integer) |> word)
[nil, "Hi"]
"""
@spec option(previous_parser, parser) :: parser
defparser option(%ParserState{status: :ok, results: results} = state, parser) when is_function(parser, 1) do
case parser.(state) do
%ParserState{status: :ok} = s -> s
%ParserState{status: :error} -> %{state | :results => [nil|results]}
end
end
@doc """
Tries to apply `parser1` and if it fails, tries `parser2`, if both fail,
then this parser fails. Returns whichever result was successful otherwise.
# Example
iex> import #{__MODULE__}
iex> import Combine.Parsers.Text
...> Combine.parse("1234", either(float, integer))
[1234]
"""
@spec either(previous_parser, parser, parser) :: parser
defparser either(%ParserState{status: :ok} = state, parser1, parser2) do
case parser1.(state) do
%ParserState{status: :ok} = s1 -> s1
%ParserState{error: error1} ->
case parser2.(state) do
%ParserState{status: :ok} = s2 -> s2
%ParserState{error: error2} ->
%{state | :status => :error, :error => "#{error1}, or: #{error2}"}
end
end
end
@doc """
This parser is a generalized form of either which allows multiple parsers to be attempted.
# Example
iex> import #{__MODULE__}
iex> import Combine.Parsers.Text
...> Combine.parse("test", choice([float, integer, word]))
["test"]
"""
@spec choice(previous_parser, [parser]) :: parser
defparser choice(%ParserState{status: :ok} = state, parsers) do
try_choice(parsers, state, nil)
end
defp try_choice([parser|rest], state, nil), do: try_choice(rest, state, parser.(state))
defp try_choice([_|_], _, %ParserState{status: :ok} = success), do: success
defp try_choice([parser|rest], state, %ParserState{}), do: try_choice(rest, state, parser.(state))
defp try_choice([], _, %ParserState{status: :ok} = success), do: success
defp try_choice([], %ParserState{line: line, column: col} = state, _) do
%{state | :status => :error, :error => "Expected at least one parser to succeed at line #{line}, column #{col}."}
end
@doc """
Applies each parser in `parsers`, then sends the results to the provided function
to be transformed. The result of the transformation is the final result of this parser.
# Example
iex> import #{__MODULE__}
...> import Combine.Parsers.Text
...> Combine.parse("123", pipe([digit, digit, digit], fn digits -> {n, _} = Integer.parse(Enum.join(digits)); n end))
[123]
"""
@spec pipe(previous_parser, [parser], transform) :: parser
defparser pipe(%ParserState{status: :ok} = state, parsers, transform) when is_list(parsers) and is_function(transform, 1) do
orig_results = state.results
case do_pipe(parsers, %{state | :results => []}) do
{:ok, acc, %ParserState{status: :ok} = new_state} ->
transformed = transform.(Enum.reverse(acc))
%{new_state | :results => [transformed | orig_results]}
{:error, _acc, state} ->
state
end
end
defp do_pipe(parsers, state), do: do_pipe(parsers, state, [])
defp do_pipe([], state, acc), do: {:ok, acc, state}
defp do_pipe([parser|parsers], %ParserState{status: :ok} = current, acc) do
case parser.(%{current | :results => []}) do
%ParserState{status: :ok, results: [:__ignore]} = next -> do_pipe(parsers, %{next | :results => []}, acc)
%ParserState{status: :ok, results: []} = next -> do_pipe(parsers, next, acc)
%ParserState{status: :ok, results: rs} = next -> do_pipe(parsers, %{next | :results => []}, rs ++ acc)
%ParserState{} = next -> {:error, acc, next}
end
end
defp do_pipe(_parsers, %ParserState{} = state, acc), do: {:error, acc, state}
@doc """
Applies a sequence of parsers and returns their results as a list.
# Example
iex> import #{__MODULE__}
...> import Combine.Parsers.Text
...> Combine.parse("123", sequence([digit, digit, digit]))
[[1, 2, 3]]
...> Combine.parse("123-234", sequence([integer, char]) |> map(sequence([integer]), fn [x] -> x * 2 end))
[[123, "-"], 468]
"""
@spec sequence(previous_parser, [parser]) :: parser
defparser sequence(%ParserState{status: :ok} = state, parsers) when is_list(parsers) do
pipe(parsers, &(&1)).(state)
end
@doc """
Applies `parser1` and `parser2` in sequence, then sends their results
to the given function to be transformed. The transformed value is then
returned as the result of this parser.
# Example
iex> import #{__MODULE__}
...> import Combine.Parsers.Text
...> to_int = fn ("-", y) -> y * -1; (_, y) -> y end
...> Combine.parse("1234-234", both(integer, both(char, integer, to_int), &(&1 + &2)))
[1000]
"""
@spec both(previous_parser, parser, parser, transform2) :: parser
defparser both(%ParserState{status: :ok} = state, parser1, parser2, transform) do
pipe([parser1, parser2], fn results -> apply(transform, results) end).(state)
end
@doc """
Applies both `parser1` and `parser2`, returning the result of `parser1` only.
# Example
iex> import #{__MODULE__}
...> import Combine.Parsers.Text
...> Combine.parse("234-", pair_left(integer, char))
[234]
"""
@spec pair_left(previous_parser, parser, parser) :: parser
defparser pair_left(%ParserState{status: :ok} = state, parser1, parser2) do
pipe([parser1, parser2], fn [result1, _] -> result1 end).(state)
end
@doc """
Applies both `parser1` and `parser2`, returning the result of `parser2` only.
# Example
iex> import #{__MODULE__}
...> import Combine.Parsers.Text
...> Combine.parse("-234", pair_right(char, integer))
[234]
"""
@spec pair_right(previous_parser, parser, parser) :: parser
defparser pair_right(%ParserState{status: :ok} = state, parser1, parser2) do
pipe([parser1, parser2], fn [_, result2] -> result2 end).(state)
end
@doc """
Applies both `parser1` and `parser2`, returning both results as a tuple.
# Example
iex> import #{__MODULE__}
...> import Combine.Parsers.Text
...> Combine.parse("-234", pair_both(char, integer))
[{"-", 234}]
"""
@spec pair_both(previous_parser, parser, parser) :: parser
defparser pair_both(%ParserState{status: :ok} = state, parser1, parser2) do
pipe([parser1, parser2], fn [result1, result2] -> {result1, result2} end).(state)
end
@doc """
Applies `parser1`, `parser2`, and `parser3` in sequence, returning the result
of `parser2`.
# Example
iex> import #{__MODULE__}
...> import Combine.Parsers.Text
...> Combine.parse("(234)", between(char("("), integer, char(")")))
[234]
"""
@spec between(previous_parser, parser, parser, parser) :: parser
defparser between(%ParserState{status: :ok} = state, parser1, parser2, parser3) do
pipe([parser1, parser2, parser3], fn [_, result, _] -> result end).(state)
end
@doc """
Applies `parser` to the input `n` many times. Returns the result as a list.
# Example
iex> import #{__MODULE__}
...> import Combine.Parsers.Text
...> Combine.parse("123", times(digit, 3))
[[1,2,3]]
"""
@spec times(previous_parser, parser, pos_integer) :: parser
defparser times(%ParserState{status: :ok} = state, parser, n) when is_function(parser, 1) and is_integer(n) do
case do_times(n, parser, state) do
{:ok, acc, %ParserState{status: :ok, results: rs} = new_state} ->
res = Enum.reverse(acc)
%{new_state | :results => [res | rs]}
{:error, _acc, state} ->
state
end
end
defp do_times(count, parser, state), do: do_times(count, parser, state, [])
defp do_times(0, _parser, state, acc), do: {:ok, acc, state}
defp do_times(count, parser, %ParserState{status: :ok} = current, acc) do
case parser.(current) do
%ParserState{status: :ok, results: [:__ignore|rs]} = next -> do_times(count - 1, parser, %{next | :results => rs}, acc)
%ParserState{status: :ok, results: []} = next -> do_times(count - 1, parser, next, acc)
%ParserState{status: :ok, results: [last|rs]} = next -> do_times(count - 1, parser, %{next | :results => rs}, [last|acc])
%ParserState{} = next -> {:error, acc, next}
end
end
defp do_times(_count, _parser, %ParserState{} = state, acc), do: {:error, acc, state}
@doc """
Applies `parser` one or more times. Returns results as a list.
# Example
iex> import #{__MODULE__}
...> import Combine.Parsers.Text
...> Combine.parse("abc", many1(char))
[["a", "b", "c"]]
...> Combine.parse("abc", many1(ignore(char)))
[[]]
...> Combine.parse("12abc", digit |> digit |> many1(ignore(char)))
[1, 2, []]
"""
@spec many1(previous_parser, parser) :: parser
defparser many1(%ParserState{status: :ok, results: initial_results} = state, parser) when is_function(parser, 1) do
case many1_loop(0, [], state, parser.(state), parser) do
{results, %ParserState{status: :ok} = s} ->
results = Enum.reverse(results)
%{s | :results => [results|initial_results]}
%ParserState{} = s -> s
end
end
defp many1_loop(0, _, _, %ParserState{status: :error} = err, _parser),
do: err
defp many1_loop(iteration, acc, _last, %ParserState{status: :ok, results: []} = s, parser),
do: many1_loop(iteration + 1, acc, s, parser.(s), parser)
defp many1_loop(iteration, acc, _last, %ParserState{status: :ok, results: [:__ignore|rs]} = s, parser),
do: many1_loop(iteration + 1, acc, s, parser.(%{s | :results => rs}), parser)
defp many1_loop(iteration, acc, _last, %ParserState{status: :ok, results: [h|rs]} = s, parser),
do: many1_loop(iteration + 1, [h|acc], s, parser.(%{s | :results => rs}), parser)
defp many1_loop(_, acc, s, %ParserState{status: :error}, _parser),
do: {acc, s}
@doc """
Applies `parser` zero or more times. Returns results as a list.
# Example
iex> import #{__MODULE__}
...> import Combine.Parsers.Text
...> Combine.parse("abc", many(char))
[["a", "b", "c"]]
...> Combine.parse("", many(char))
[[]]
"""
@spec many(previous_parser, parser) :: parser
defparser many(%ParserState{status: :ok, results: results} = state, parser) when is_function(parser, 1) do
case many1(parser).(state) do
%ParserState{status: :ok} = s -> s
%ParserState{status: :error} -> %{state | :results => [[] | results]}
end
end
@doc """
Applies `parser1` one or more times, separated by `parser2`. Returns
results of `parser1` in a list.
# Example
iex> import #{__MODULE__}
...> import Combine.Parsers.Text
...> Combine.parse("1, 2, 3", sep_by1(digit, string(", ")))
[[1, 2, 3]]
"""
@spec sep_by1(previous_parser, parser, parser) :: parser
defparser sep_by1(%ParserState{status: :ok} = state, parser1, parser2) do
pipe([parser1, many(pair_right(parser2, parser1))], fn [h, t] -> [h|t] end).(state)
end
@doc """
Applies `parser1` zero or more times, separated by `parser2`. Returns
results of `parser1` in a list.
# Example
iex> import #{__MODULE__}
...> import Combine.Parsers.Text
...> Combine.parse("1, 2, 3", sep_by(digit, string(", ")))
[[1, 2, 3]]
...> Combine.parse("", sep_by(digit, string(", ")))
[[]]
"""
@spec sep_by(previous_parser, parser, parser) :: parser
defparser sep_by(%ParserState{status: :ok, results: results} = state, parser1, parser2)
when is_function(parser1, 1) and is_function(parser2, 1) do
case sep_by1_impl(state, parser1, parser2) do
%ParserState{status: :ok} = s -> s
%ParserState{status: :error} -> %{state | :results => [[] | results]}
end
end
@doc """
Applies `parser` if possible, ignores the result.
# Example
iex> import #{__MODULE__}
...> import Combine.Parsers.Text
...> Combine.parse(" abc", skip(spaces) |> word)
["abc"]
...> Combine.parse("", skip(spaces))
[]
"""
@spec skip(previous_parser, parser) :: parser
defparser skip(%ParserState{status: :ok} = state, parser) when is_function(parser, 1) do
case ignore_impl(state, option(parser)) do
%ParserState{status: :ok, results: [:__ignore|rs]} = s ->
%{s | :results => rs}
%ParserState{} = s ->
s
end
end
@doc """
Applies `parser` zero or more times, ignores the result.
# Example
iex> import #{__MODULE__}
...> import Combine.Parsers.Text
...> Combine.parse(" abc", skip_many(space) |> word)
["abc"]
...> Combine.parse("", skip_many(space))
[]
"""
@spec skip_many(previous_parser, parser) :: parser
defparser skip_many(%ParserState{status: :ok} = state, parser) when is_function(parser, 1) do
ignore_impl(state, many(parser))
end
@doc """
Applies `parser` one or more times, ignores the result.
# Example
iex> import #{__MODULE__}
...> import Combine.Parsers.Text
...> Combine.parse(" abc", skip_many1(space) |> word)
["abc"]
...> Combine.parse("", skip_many1(space))
{:error, "Expected space, but hit end of input."}
"""
@spec skip_many1(previous_parser, parser) :: parser
defparser skip_many1(%ParserState{status: :ok} = state, parser) when is_function(parser, 1) do
ignore_impl(state, many1(parser))
end
@doc """
This parser will apply the given parser to the input, and if successful,
will ignore the parse result. If the parser fails, this one fails as well.
# Example
iex> import #{__MODULE__}
...> import Combine.Parsers.Text
...> parser = ignore(char("h"))
...> Combine.parse("h", parser)
[]
...> parser = char("h") |> char("i") |> ignore(space) |> char("!")
...> Combine.parse("hi !", parser)
["h", "i", "!"]
"""
@spec ignore(previous_parser, parser) :: parser
defparser ignore(%ParserState{status: :ok} = state, parser) when is_function(parser, 1) do
case parser.(state) do
%ParserState{status: :ok, results: [_|t]} = s -> %{s | :results => [:__ignore|t]}
%ParserState{} = s -> s
end
end
@doc """
This parser applies the given parser, and if successful, passes the result to
the predicate for validation. If either the parser or the predicate assertion fail,
this parser fails.
# Example
iex> import #{__MODULE__}
...> import Combine.Parsers.Text
...> parser = satisfy(char, fn x -> x == "H" end)
...> Combine.parse("Hi", parser)
["H"]
...> parser = char("H") |> satisfy(char, fn x -> x == "i" end)
...> Combine.parse("Hi", parser)
["H", "i"]
"""
@spec satisfy(previous_parser, parser, predicate) :: parser
defparser satisfy(%ParserState{status: :ok, line: line, column: col} = state, parser, predicate)
when is_function(parser, 1) and is_function(predicate, 1) do
case parser.(state) do
%ParserState{status: :ok, results: [h|_]} = s ->
cond do
predicate.(h) -> s
true ->
%{s | :status => :error,
:error => "Could not satisfy predicate for `#{h}` at line #{line}, column #{col}",
:line => line,
:column => col
}
end
%ParserState{} = s -> s
end
end
@doc """
Applies a parser and then verifies that the result is contained in the provided list of matches.
# Example
iex> import #{__MODULE__}
...> import Combine.Parsers.Text
...> parser = one_of(char, ?a..?z |> Enum.map(&(<<&1::utf8>>)))
...> Combine.parse("abc", parser)
["a"]
...> parser = upper |> one_of(char, ["i", "I"])
...> Combine.parse("Hi", parser)
["H", "i"]
"""
@spec one_of(previous_parser, parser, Range.t | list()) :: parser
defparser one_of(%ParserState{status: :ok, line: line, column: col} = state, parser, items)
when is_function(parser, 1) do
case parser.(state) do
%ParserState{status: :ok, results: [h|_]} = s ->
cond do
h in items ->
s
true ->
stringified = Enum.join(items, ", ")
%{s | :status => :error, :error => "Expected one of [#{stringified}], but found `#{h}`, at line #{line}, column #{col}"}
end
%ParserState{} = s -> s
end
end
@doc """
Applies a parser and then verifies that the result is not contained in the provided list of matches.
# Example
iex> import #{__MODULE__}
...> import Combine.Parsers.Text
...> parser = none_of(char, ?a..?z |> Enum.map(&(<<&1::utf8>>)))
...> Combine.parse("ABC", parser)
["A"]
...> parser = upper |> none_of(char, ["i", "I"])
...> Combine.parse("Hello", parser)
["H", "e"]
"""
@spec none_of(previous_parser, parser, Range.t | list()) :: parser
defparser none_of(%ParserState{status: :ok, line: line, column: col} = state, parser, items)
when is_function(parser, 1) do
case parser.(state) do
%ParserState{status: :ok, results: [h|_]} = s ->
cond do
h in items ->
stringified = Enum.join(items, ", ")
%{s | :status => :error, :error => "Expected none of [#{stringified}], but found `#{h}`, at line #{line}, column #{col}"}
true ->
s
end
%ParserState{} = s -> s
end
end
defp none_of_impl(%ParserState{status: :ok} = state, parser, %Range{} = items),
do: none_of_impl(state, parser, items)
@doc """
Applies `parser`. If it fails, it's error is modified to contain the given label for easier troubleshooting.
# Example
iex> import #{__MODULE__}
...> import Combine.Parsers.Text
...> Combine.parse("abc", label(integer, "year"))
{:error, "Expected `year` at line 1, column 1."}
"""
@spec label(previous_parser, parser, String.t) :: parser
defparser label(%ParserState{status: :ok} = state, parser, name) when is_function(parser, 1) do
case parser.(state) do
%ParserState{status: :ok} = s -> s
%ParserState{line: line, column: col} = s ->
%{s | :error => "Expected `#{name}` at line #{line}, column #{col + 1}."}
end
end
end
| 35.829642 | 135 | 0.601132 |
1c6f25bd5b41ed3771376c2b0a44a72a76ca1f43 | 1,563 | exs | Elixir | test/base_image_sample_apps_test.exs | kayodeosagbemi/elixir-runtime | 1746adf362444e3e0cc2daa5e461be24f1cb624a | [
"Apache-2.0"
] | 170 | 2017-08-25T06:40:14.000Z | 2022-01-10T22:18:51.000Z | test/base_image_sample_apps_test.exs | kayodeosagbemi/elixir-runtime | 1746adf362444e3e0cc2daa5e461be24f1cb624a | [
"Apache-2.0"
] | 27 | 2017-09-07T05:57:37.000Z | 2022-03-22T13:40:47.000Z | test/base_image_sample_apps_test.exs | kayodeosagbemi/elixir-runtime | 1746adf362444e3e0cc2daa5e461be24f1cb624a | [
"Apache-2.0"
] | 16 | 2017-11-14T01:45:00.000Z | 2021-10-09T03:26:39.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule BaseImageSampleAppsTest do
use ExUnit.Case
import TestHelper
test "Minimal plug app" do
dockerfile = """
FROM elixir-base
COPY . /app/
RUN mix do deps.get, compile
CMD mix run --no-halt
"""
run_app_test("minimal_plug", dockerfile)
end
@apps_dir Path.join(__DIR__, "sample_apps")
@tmp_dir Path.join(__DIR__, "tmp")
def run_app_test(app_name, dockerfile_content) do
File.rm_rf!(@tmp_dir)
@apps_dir
|> Path.join(app_name)
|> File.cp_r!(@tmp_dir)
@tmp_dir
|> Path.join("Dockerfile")
|> File.write!(dockerfile_content)
File.cd!(@tmp_dir, fn ->
build_docker_image(fn image ->
run_docker_daemon(["-p", "8080:8080", image], fn _container ->
assert_cmd_output(
["curl", "-s", "-S", "http://localhost:8080"],
~r{Hello, world!},
timeout: 10,
show: true,
verbose: true
)
end)
end)
end)
end
end
| 26.491525 | 74 | 0.647473 |
1c6f4da4d6c9fdcef0d9fb43e52b767ba2b3ea15 | 936 | exs | Elixir | phoenix0/config/config.exs | JacobOscarson/elexir-0 | f4e67bb4a68c6a0cba5b410d80427e721ac7826a | [
"MIT"
] | null | null | null | phoenix0/config/config.exs | JacobOscarson/elexir-0 | f4e67bb4a68c6a0cba5b410d80427e721ac7826a | [
"MIT"
] | null | null | null | phoenix0/config/config.exs | JacobOscarson/elexir-0 | f4e67bb4a68c6a0cba5b410d80427e721ac7826a | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
#
# This configuration file is loaded before any dependency and
# is restricted to this project.
use Mix.Config
# General application configuration
config :phoenix0,
ecto_repos: [Phoenix0.Repo]
# Configures the endpoint
config :phoenix0, Phoenix0.Endpoint,
url: [host: "localhost"],
secret_key_base: "239AtpUm6I24UUmwgjDP9SFziVqxx5ASwVkz559VRsgQkoZjc51KEDWykghI/jvx",
render_errors: [view: Phoenix0.ErrorView, accepts: ~w(html json)],
pubsub: [name: Phoenix0.PubSub,
adapter: Phoenix.PubSub.PG2]
# Configures Elixir's Logger
config :logger, :console,
format: "$time $metadata[$level] $message\n",
metadata: [:request_id]
# Import environment specific config. This must remain at the bottom
# of this file so it overrides the configuration defined above.
import_config "#{Mix.env}.exs"
| 33.428571 | 86 | 0.763889 |
1c6f62aa399d435eed8bd08b6cfd55e968011b4f | 4,998 | exs | Elixir | apps/banking_challenge/test/banking_challenge/accounts_test.exs | JoaoAngelojr/banking_challenge | eec2f8b15a0c7da7daeb157c76a52ce4639ddc7c | [
"Apache-2.0"
] | null | null | null | apps/banking_challenge/test/banking_challenge/accounts_test.exs | JoaoAngelojr/banking_challenge | eec2f8b15a0c7da7daeb157c76a52ce4639ddc7c | [
"Apache-2.0"
] | null | null | null | apps/banking_challenge/test/banking_challenge/accounts_test.exs | JoaoAngelojr/banking_challenge | eec2f8b15a0c7da7daeb157c76a52ce4639ddc7c | [
"Apache-2.0"
] | null | null | null | defmodule BankingChallenge.AccountsTest do
use BankingChallenge.DataCase, async: true
alias BankingChallenge.Accounts
alias BankingChallenge.Accounts.Inputs
alias BankingChallenge.Accounts.Schemas.Account
# Triple A: Arrange, Act and Assert
describe "create_new_accout/1" do
test "fail if email is already taken" do
# 1. Arrange
email = "#{Ecto.UUID.generate()}@email.com"
Repo.insert!(%Account{email: email})
# 2. Act and 3. Assert
assert {:error, _changeset} =
Accounts.create_new_account(%Inputs.Create{owner_name: "Some Name", email: email})
end
test "successfully create an account with valid input" do
# 1. Arrange
email = "#{Ecto.UUID.generate()}@email.com"
input = %Inputs.Create{
owner_name: "Some name",
email: email,
email_confirmation: email
}
# 2. Act and 3. Assert
assert {:ok, account} = Accounts.create_new_account(input)
assert account.owner_name == "Some name"
assert account.email == email
query = from(a in Account, where: a.email == ^email)
assert [^account] = Repo.all(query)
end
end
describe "withdraw_account/1" do
test "fail if withdraw amount is bigger than balance" do
# 1. Arrange
email = "#{Ecto.UUID.generate()}@email.com"
Accounts.create_new_account(%Inputs.Create{
owner_name: "Some name",
email: email,
email_confirmation: email
})
# 2. Act and 3. Assert
assert {:error, _changeset} =
Accounts.withdraw_account(%Inputs.Withdraw{
email: email,
amount: 200_000
})
end
test "successfully withdraw from an account with valid input" do
# 1. Arrange
email = "#{Ecto.UUID.generate()}@email.com"
Accounts.create_new_account(%Inputs.Create{
owner_name: "Some name",
email: email,
email_confirmation: email
})
# 2. Act and 3. Assert
assert {:ok, _changeset} =
Accounts.withdraw_account(%Inputs.Withdraw{
email: email,
amount: 50000
})
end
end
describe "transfer/1" do
test "fail if transfer amount is bigger than source account balance" do
# 1. Arrange
source_email = "#{Ecto.UUID.generate()}@email.com"
target_email = "#{Ecto.UUID.generate()}@email.com"
Accounts.create_new_account(%Inputs.Create{
owner_name: "Source Account",
email: source_email,
email_confirmation: source_email
})
Accounts.create_new_account(%Inputs.Create{
owner_name: "Target Account",
email: target_email,
email_confirmation: target_email
})
# 2. Act and 3. Assert
assert {:error, _changeset} =
Accounts.transfer(%Inputs.Transfer{
from_email: source_email,
to_email: target_email,
amount: 200_000
})
end
test "successfully transfer between accounts with valid input" do
# 1. Arrange
source_email = "#{Ecto.UUID.generate()}@email.com"
target_email = "#{Ecto.UUID.generate()}@email.com"
Accounts.create_new_account(%Inputs.Create{
owner_name: "Source Account",
email: source_email,
email_confirmation: source_email
})
Accounts.create_new_account(%Inputs.Create{
owner_name: "Target Account",
email: target_email,
email_confirmation: target_email
})
# 2. Act and 3. Assert
assert :ok =
Accounts.transfer(%Inputs.Transfer{
from_email: source_email,
to_email: target_email,
amount: 50000
})
end
end
describe "fetch/1" do
test "successfully fetch an account from the DB" do
# 1. Arrange
assert account =
Repo.insert!(%Account{
owner_name: "Some name",
email: "#{Ecto.UUID.generate()}@email.com"
})
# 2. Act and 3. Assert
assert {:ok, account} == Accounts.fetch(account.id)
end
test "fail with not_found if no account has the given id" do
# 1. Arrange and 2. Act and 3. Assert
assert {:error, :not_found} == Accounts.fetch(Ecto.UUID.generate())
end
end
describe "get_all/0" do
test "successfully get all accounts in DB" do
# 1. Arrange
assert account1 =
Repo.insert!(%Account{
owner_name: "Some name",
email: "#{Ecto.UUID.generate()}@email.com"
})
assert account2 =
Repo.insert!(%Account{
owner_name: "Another name",
email: "#{Ecto.UUID.generate()}@email.com"
})
# 2. Act
assert accounts = Accounts.get_all()
# 3. Assert
assert length(accounts) == 2
end
end
end
| 28.397727 | 97 | 0.582633 |
1c6f65071433d416fdfa64fd679fe18030bc659b | 34,692 | ex | Elixir | lib/elixir/lib/dynamic_supervisor.ex | doughsay/elixir | 7356a47047d0b54517bd6886603f09b1121dde2b | [
"Apache-2.0"
] | 19,291 | 2015-01-01T02:42:49.000Z | 2022-03-31T21:01:40.000Z | lib/elixir/lib/dynamic_supervisor.ex | doughsay/elixir | 7356a47047d0b54517bd6886603f09b1121dde2b | [
"Apache-2.0"
] | 8,082 | 2015-01-01T04:16:23.000Z | 2022-03-31T22:08:02.000Z | lib/elixir/lib/dynamic_supervisor.ex | doughsay/elixir | 7356a47047d0b54517bd6886603f09b1121dde2b | [
"Apache-2.0"
] | 3,472 | 2015-01-03T04:11:56.000Z | 2022-03-29T02:07:30.000Z | defmodule DynamicSupervisor do
@moduledoc ~S"""
A supervisor that starts children dynamically.
The `Supervisor` module was designed to handle mostly static children
that are started in the given order when the supervisor starts. A
`DynamicSupervisor` starts with no children. Instead, children are
started on demand via `start_child/2`. When a dynamic supervisor
terminates, all children are shut down at the same time, with no guarantee
of ordering.
## Examples
A dynamic supervisor is started with no children and often a name:
children = [
{DynamicSupervisor, name: MyApp.DynamicSupervisor}
]
Supervisor.start_link(children, strategy: :one_for_one)
The options given in the child specification are documented in `start_link/1`.
Once the dynamic supervisor is running, we can start children
with `start_child/2`, which receives a child specification:
{:ok, agent1} = DynamicSupervisor.start_child(MyApp.DynamicSupervisor, {Agent, fn -> %{} end})
Agent.update(agent1, &Map.put(&1, :key, "value"))
Agent.get(agent1, & &1)
#=> %{key: "value"}
{:ok, agent2} = DynamicSupervisor.start_child(MyApp.DynamicSupervisor, {Agent, fn -> %{} end})
Agent.get(agent2, & &1)
#=> %{}
DynamicSupervisor.count_children(MyApp.DynamicSupervisor)
#=> %{active: 2, specs: 2, supervisors: 0, workers: 2}
## Scalability and partitioning
The `DynamicSupervisor` is a single process responsible for starting
other processes. In some applications, the `DynamicSupervisor` may
become a bottleneck. To address this, you can start multiple instances
of the `DynamicSupervisor` and then pick a "random" instance to start
the child on.
Instead of:
children = [
{DynamicSupervisor, name: MyApp.DynamicSupervisor}
]
and:
DynamicSupervisor.start_child(MyApp.DynamicSupervisor, {Agent, fn -> %{} end})
You can do this:
children = [
{PartitionSupervisor,
child_spec: DynamicSupervisor,
name: MyApp.DynamicSupervisors}
]
and then:
DynamicSupervisor.start_child(
{:via, PartitionSupervisor, {MyApp.DynamicSupervisors, self()}},
{Agent, fn -> %{} end}
)
In the code above, we start a partition supervisor that will by default
start a dynamic supervisor for each core in your machine. Then, instead
of calling the `DynamicSupervisor` by name, you call it through the
partition supervisor, using `self()` as the routing key. This means each
process will be assigned one of the existing dynamic supervisors.
Read the `PartitionSupervisor` docs for more information.
## Module-based supervisors
Similar to `Supervisor`, dynamic supervisors also support module-based
supervisors.
defmodule MyApp.DynamicSupervisor do
# Automatically defines child_spec/1
use DynamicSupervisor
def start_link(init_arg) do
DynamicSupervisor.start_link(__MODULE__, init_arg, name: __MODULE__)
end
@impl true
def init(_init_arg) do
DynamicSupervisor.init(strategy: :one_for_one)
end
end
See the `Supervisor` docs for a discussion of when you may want to use
module-based supervisors. A `@doc` annotation immediately preceding
`use DynamicSupervisor` will be attached to the generated `child_spec/1`
function.
## Name registration
A supervisor is bound to the same name registration rules as a `GenServer`.
Read more about these rules in the documentation for `GenServer`.
## Migrating from Supervisor's :simple_one_for_one
In case you were using the deprecated `:simple_one_for_one` strategy from
the `Supervisor` module, you can migrate to the `DynamicSupervisor` in
few steps.
Imagine the given "old" code:
defmodule MySupervisor do
use Supervisor
def start_link(init_arg) do
Supervisor.start_link(__MODULE__, init_arg, name: __MODULE__)
end
def start_child(foo, bar, baz) do
# This will start child by calling MyWorker.start_link(init_arg, foo, bar, baz)
Supervisor.start_child(__MODULE__, [foo, bar, baz])
end
@impl true
def init(init_arg) do
children = [
# Or the deprecated: worker(MyWorker, [init_arg])
%{id: MyWorker, start: {MyWorker, :start_link, [init_arg]}}
]
Supervisor.init(children, strategy: :simple_one_for_one)
end
end
It can be upgraded to the DynamicSupervisor like this:
defmodule MySupervisor do
use DynamicSupervisor
def start_link(init_arg) do
DynamicSupervisor.start_link(__MODULE__, init_arg, name: __MODULE__)
end
def start_child(foo, bar, baz) do
# If MyWorker is not using the new child specs, we need to pass a map:
# spec = %{id: MyWorker, start: {MyWorker, :start_link, [foo, bar, baz]}}
spec = {MyWorker, foo: foo, bar: bar, baz: baz}
DynamicSupervisor.start_child(__MODULE__, spec)
end
@impl true
def init(init_arg) do
DynamicSupervisor.init(
strategy: :one_for_one,
extra_arguments: [init_arg]
)
end
end
The difference is that the `DynamicSupervisor` expects the child specification
at the moment `start_child/2` is called, and no longer on the init callback.
If there are any initial arguments given on initialization, such as `[initial_arg]`,
it can be given in the `:extra_arguments` flag on `DynamicSupervisor.init/1`.
"""
@behaviour GenServer
@doc """
Callback invoked to start the supervisor and during hot code upgrades.
Developers typically invoke `DynamicSupervisor.init/1` at the end of
their init callback to return the proper supervision flags.
"""
@callback init(init_arg :: term) :: {:ok, sup_flags()} | :ignore
@typedoc "The supervisor flags returned on init"
@type sup_flags() :: %{
strategy: strategy(),
intensity: non_neg_integer(),
period: pos_integer(),
max_children: non_neg_integer() | :infinity,
extra_arguments: [term()]
}
@typedoc "Options given to `start_link` functions"
@type option :: GenServer.option()
@typedoc "Options given to `start_link` and `init/1` functions"
@type init_option ::
{:strategy, strategy()}
| {:max_restarts, non_neg_integer()}
| {:max_seconds, pos_integer()}
| {:max_children, non_neg_integer() | :infinity}
| {:extra_arguments, [term()]}
@typedoc "Supported strategies"
@type strategy :: :one_for_one
@typedoc "Return values of `start_child` functions"
@type on_start_child ::
{:ok, pid}
| {:ok, pid, info :: term}
| :ignore
| {:error, {:already_started, pid} | :max_children | term}
# In this struct, `args` refers to the arguments passed to init/1 (the `init_arg`).
defstruct [
:args,
:extra_arguments,
:mod,
:name,
:strategy,
:max_children,
:max_restarts,
:max_seconds,
children: %{},
restarts: []
]
@doc """
Returns a specification to start a dynamic supervisor under a supervisor.
See `Supervisor`.
"""
@doc since: "1.6.1"
def child_spec(opts) when is_list(opts) do
id =
case Keyword.get(opts, :name, DynamicSupervisor) do
name when is_atom(name) -> name
{:global, name} -> name
{:via, _module, name} -> name
end
%{
id: id,
start: {DynamicSupervisor, :start_link, [opts]},
type: :supervisor
}
end
@doc false
defmacro __using__(opts) do
quote location: :keep, bind_quoted: [opts: opts] do
@behaviour DynamicSupervisor
unless Module.has_attribute?(__MODULE__, :doc) do
@doc """
Returns a specification to start this module under a supervisor.
See `Supervisor`.
"""
end
def child_spec(arg) do
default = %{
id: __MODULE__,
start: {__MODULE__, :start_link, [arg]},
type: :supervisor
}
Supervisor.child_spec(default, unquote(Macro.escape(opts)))
end
defoverridable child_spec: 1
end
end
@doc """
Starts a supervisor with the given options.
This function is typically not invoked directly, instead it is invoked
when using a `DynamicSupervisor` as a child of another supervisor:
children = [
{DynamicSupervisor, name: MySupervisor}
]
If the supervisor is successfully spawned, this function returns
`{:ok, pid}`, where `pid` is the PID of the supervisor. If the supervisor
is given a name and a process with the specified name already exists,
the function returns `{:error, {:already_started, pid}}`, where `pid`
is the PID of that process.
Note that a supervisor started with this function is linked to the parent
process and exits not only on crashes but also if the parent process exits
with `:normal` reason.
## Options
* `:name` - registers the supervisor under the given name.
The supported values are described under the "Name registration"
section in the `GenServer` module docs.
* `:strategy` - the restart strategy option. The only supported
value is `:one_for_one` which means that no other child is
terminated if a child process terminates. You can learn more
about strategies in the `Supervisor` module docs.
* `:max_restarts` - the maximum number of restarts allowed in
a time frame. Defaults to `3`.
* `:max_seconds` - the time frame in which `:max_restarts` applies.
Defaults to `5`.
* `:max_children` - the maximum amount of children to be running
under this supervisor at the same time. When `:max_children` is
exceeded, `start_child/2` returns `{:error, :max_children}`. Defaults
to `:infinity`.
* `:extra_arguments` - arguments that are prepended to the arguments
specified in the child spec given to `start_child/2`. Defaults to
an empty list.
"""
@doc since: "1.6.0"
@spec start_link([option | init_option]) :: Supervisor.on_start()
def start_link(options) when is_list(options) do
keys = [:extra_arguments, :max_children, :max_seconds, :max_restarts, :strategy]
{sup_opts, start_opts} = Keyword.split(options, keys)
start_link(Supervisor.Default, init(sup_opts), start_opts)
end
@doc """
Starts a module-based supervisor process with the given `module` and `arg`.
To start the supervisor, the `c:init/1` callback will be invoked in the given
`module`, with `arg` as its argument. The `c:init/1` callback must return a
supervisor specification which can be created with the help of the `init/1`
function.
If the `c:init/1` callback returns `:ignore`, this function returns
`:ignore` as well and the supervisor terminates with reason `:normal`.
If it fails or returns an incorrect value, this function returns
`{:error, term}` where `term` is a term with information about the
error, and the supervisor terminates with reason `term`.
The `:name` option can also be given in order to register a supervisor
name, the supported values are described in the "Name registration"
section in the `GenServer` module docs.
If the supervisor is successfully spawned, this function returns
`{:ok, pid}`, where `pid` is the PID of the supervisor. If the supervisor
is given a name and a process with the specified name already exists,
the function returns `{:error, {:already_started, pid}}`, where `pid`
is the PID of that process.
Note that a supervisor started with this function is linked to the parent
process and exits not only on crashes but also if the parent process exits
with `:normal` reason.
"""
@doc since: "1.6.0"
@spec start_link(module, term, [option]) :: Supervisor.on_start()
def start_link(mod, init_arg, opts \\ []) do
GenServer.start_link(__MODULE__, {mod, init_arg, opts[:name]}, opts)
end
@doc """
Dynamically adds a child specification to `supervisor` and starts that child.
`child_spec` should be a valid child specification as detailed in the
"Child specification" section of the documentation for `Supervisor`. The child
process will be started as defined in the child specification.
If the child process start function returns `{:ok, child}` or `{:ok, child,
info}`, then child specification and PID are added to the supervisor and
this function returns the same value.
If the child process start function returns `:ignore`, then no child is added
to the supervision tree and this function returns `:ignore` too.
If the child process start function returns an error tuple or an erroneous
value, or if it fails, the child specification is discarded and this function
returns `{:error, error}` where `error` is the error or erroneous value
returned from child process start function, or failure reason if it fails.
If the supervisor already has N children in a way that N exceeds the amount
of `:max_children` set on the supervisor initialization (see `init/1`), then
this function returns `{:error, :max_children}`.
"""
@doc since: "1.6.0"
@spec start_child(
Supervisor.supervisor(),
Supervisor.child_spec()
| {module, term}
| module
| (old_erlang_child_spec :: :supervisor.child_spec())
) ::
on_start_child()
def start_child(supervisor, {_, _, _, _, _, _} = child_spec) do
validate_and_start_child(supervisor, child_spec)
end
def start_child(supervisor, child_spec) do
validate_and_start_child(supervisor, Supervisor.child_spec(child_spec, []))
end
defp validate_and_start_child(supervisor, child_spec) do
case validate_child(child_spec) do
{:ok, child} -> call(supervisor, {:start_child, child})
error -> {:error, error}
end
end
defp validate_child(%{id: _, start: {mod, _, _} = start} = child) do
restart = Map.get(child, :restart, :permanent)
type = Map.get(child, :type, :worker)
modules = Map.get(child, :modules, [mod])
shutdown =
case type do
:worker -> Map.get(child, :shutdown, 5_000)
:supervisor -> Map.get(child, :shutdown, :infinity)
end
validate_child(start, restart, shutdown, type, modules)
end
defp validate_child({_, start, restart, shutdown, type, modules}) do
validate_child(start, restart, shutdown, type, modules)
end
defp validate_child(other) do
{:invalid_child_spec, other}
end
defp validate_child(start, restart, shutdown, type, modules) do
with :ok <- validate_start(start),
:ok <- validate_restart(restart),
:ok <- validate_shutdown(shutdown),
:ok <- validate_type(type),
:ok <- validate_modules(modules) do
{:ok, {start, restart, shutdown, type, modules}}
end
end
defp validate_start({m, f, args}) when is_atom(m) and is_atom(f) and is_list(args), do: :ok
defp validate_start(mfa), do: {:invalid_mfa, mfa}
defp validate_type(type) when type in [:supervisor, :worker], do: :ok
defp validate_type(type), do: {:invalid_child_type, type}
defp validate_restart(restart) when restart in [:permanent, :temporary, :transient], do: :ok
defp validate_restart(restart), do: {:invalid_restart_type, restart}
defp validate_shutdown(shutdown) when is_integer(shutdown) and shutdown > 0, do: :ok
defp validate_shutdown(shutdown) when shutdown in [:infinity, :brutal_kill], do: :ok
defp validate_shutdown(shutdown), do: {:invalid_shutdown, shutdown}
defp validate_modules(:dynamic), do: :ok
defp validate_modules(mods) do
if is_list(mods) and Enum.all?(mods, &is_atom/1) do
:ok
else
{:invalid_modules, mods}
end
end
@doc """
Terminates the given child identified by `pid`.
If successful, this function returns `:ok`. If there is no process with
the given PID, this function returns `{:error, :not_found}`.
"""
@doc since: "1.6.0"
@spec terminate_child(Supervisor.supervisor(), pid) :: :ok | {:error, :not_found}
def terminate_child(supervisor, pid) when is_pid(pid) do
call(supervisor, {:terminate_child, pid})
end
@doc """
Returns a list with information about all children.
Note that calling this function when supervising a large number
of children under low memory conditions can cause an out of memory
exception.
This function returns a list of tuples containing:
* `id` - it is always `:undefined` for dynamic supervisors
* `child` - the PID of the corresponding child process or the
atom `:restarting` if the process is about to be restarted
* `type` - `:worker` or `:supervisor` as defined in the child
specification
* `modules` - as defined in the child specification
"""
@doc since: "1.6.0"
@spec which_children(Supervisor.supervisor()) :: [
# module() | :dynamic here because :supervisor.modules() is not exported
{:undefined, pid | :restarting, :worker | :supervisor, [module()] | :dynamic}
]
def which_children(supervisor) do
call(supervisor, :which_children)
end
@doc """
Returns a map containing count values for the supervisor.
The map contains the following keys:
* `:specs` - the number of children processes
* `:active` - the count of all actively running child processes managed by
this supervisor
* `:supervisors` - the count of all supervisors whether or not the child
process is still alive
* `:workers` - the count of all workers, whether or not the child process
is still alive
"""
@doc since: "1.6.0"
@spec count_children(Supervisor.supervisor()) :: %{
specs: non_neg_integer,
active: non_neg_integer,
supervisors: non_neg_integer,
workers: non_neg_integer
}
def count_children(supervisor) do
call(supervisor, :count_children) |> :maps.from_list()
end
@doc """
Synchronously stops the given supervisor with the given `reason`.
It returns `:ok` if the supervisor terminates with the given
reason. If it terminates with another reason, the call exits.
This function keeps OTP semantics regarding error reporting.
If the reason is any other than `:normal`, `:shutdown` or
`{:shutdown, _}`, an error report is logged.
"""
@doc since: "1.7.0"
@spec stop(Supervisor.supervisor(), reason :: term, timeout) :: :ok
def stop(supervisor, reason \\ :normal, timeout \\ :infinity) do
GenServer.stop(supervisor, reason, timeout)
end
@doc """
Receives a set of `options` that initializes a dynamic supervisor.
This is typically invoked at the end of the `c:init/1` callback of
module-based supervisors. See the "Module-based supervisors" section
in the module documentation for more information.
It accepts the same `options` as `start_link/1` (except for `:name`)
and it returns a tuple containing the supervisor options.
## Examples
def init(_arg) do
DynamicSupervisor.init(max_children: 1000)
end
"""
@doc since: "1.6.0"
@spec init([init_option]) :: {:ok, sup_flags()}
def init(options) when is_list(options) do
strategy = Keyword.get(options, :strategy, :one_for_one)
intensity = Keyword.get(options, :max_restarts, 3)
period = Keyword.get(options, :max_seconds, 5)
max_children = Keyword.get(options, :max_children, :infinity)
extra_arguments = Keyword.get(options, :extra_arguments, [])
flags = %{
strategy: strategy,
intensity: intensity,
period: period,
max_children: max_children,
extra_arguments: extra_arguments
}
{:ok, flags}
end
## Callbacks
@impl true
def init({mod, init_arg, name}) do
Process.put(:"$initial_call", {:supervisor, mod, 1})
Process.flag(:trap_exit, true)
case mod.init(init_arg) do
{:ok, flags} when is_map(flags) ->
name =
cond do
is_nil(name) -> {self(), mod}
is_atom(name) -> {:local, name}
is_tuple(name) -> name
end
state = %DynamicSupervisor{mod: mod, args: init_arg, name: name}
case init(state, flags) do
{:ok, state} -> {:ok, state}
{:error, reason} -> {:stop, {:supervisor_data, reason}}
end
:ignore ->
:ignore
other ->
{:stop, {:bad_return, {mod, :init, other}}}
end
end
defp init(state, flags) do
extra_arguments = Map.get(flags, :extra_arguments, [])
max_children = Map.get(flags, :max_children, :infinity)
max_restarts = Map.get(flags, :intensity, 1)
max_seconds = Map.get(flags, :period, 5)
strategy = Map.get(flags, :strategy, :one_for_one)
with :ok <- validate_strategy(strategy),
:ok <- validate_restarts(max_restarts),
:ok <- validate_seconds(max_seconds),
:ok <- validate_dynamic(max_children),
:ok <- validate_extra_arguments(extra_arguments) do
{:ok,
%{
state
| extra_arguments: extra_arguments,
max_children: max_children,
max_restarts: max_restarts,
max_seconds: max_seconds,
strategy: strategy
}}
end
end
defp validate_strategy(strategy) when strategy in [:one_for_one], do: :ok
defp validate_strategy(strategy), do: {:error, {:invalid_strategy, strategy}}
defp validate_restarts(restart) when is_integer(restart) and restart >= 0, do: :ok
defp validate_restarts(restart), do: {:error, {:invalid_intensity, restart}}
defp validate_seconds(seconds) when is_integer(seconds) and seconds > 0, do: :ok
defp validate_seconds(seconds), do: {:error, {:invalid_period, seconds}}
defp validate_dynamic(:infinity), do: :ok
defp validate_dynamic(dynamic) when is_integer(dynamic) and dynamic >= 0, do: :ok
defp validate_dynamic(dynamic), do: {:error, {:invalid_max_children, dynamic}}
defp validate_extra_arguments(list) when is_list(list), do: :ok
defp validate_extra_arguments(extra), do: {:error, {:invalid_extra_arguments, extra}}
@impl true
def handle_call(:which_children, _from, state) do
%{children: children} = state
reply =
for {pid, args} <- children do
case args do
{:restarting, {_, _, _, type, modules}} ->
{:undefined, :restarting, type, modules}
{_, _, _, type, modules} ->
{:undefined, pid, type, modules}
end
end
{:reply, reply, state}
end
def handle_call(:count_children, _from, state) do
%{children: children} = state
specs = map_size(children)
{active, workers, supervisors} =
Enum.reduce(children, {0, 0, 0}, fn
{_pid, {:restarting, {_, _, _, :worker, _}}}, {active, worker, supervisor} ->
{active, worker + 1, supervisor}
{_pid, {:restarting, {_, _, _, :supervisor, _}}}, {active, worker, supervisor} ->
{active, worker, supervisor + 1}
{_pid, {_, _, _, :worker, _}}, {active, worker, supervisor} ->
{active + 1, worker + 1, supervisor}
{_pid, {_, _, _, :supervisor, _}}, {active, worker, supervisor} ->
{active + 1, worker, supervisor + 1}
end)
reply = [specs: specs, active: active, supervisors: supervisors, workers: workers]
{:reply, reply, state}
end
def handle_call({:terminate_child, pid}, _from, %{children: children} = state) do
case children do
%{^pid => info} ->
:ok = terminate_children(%{pid => info}, state)
{:reply, :ok, delete_child(pid, state)}
%{} ->
{:reply, {:error, :not_found}, state}
end
end
def handle_call({:start_task, args, restart, shutdown}, from, state) do
{init_restart, init_shutdown} = Process.get(Task.Supervisor)
restart = restart || init_restart
shutdown = shutdown || init_shutdown
child = {{Task.Supervised, :start_link, args}, restart, shutdown, :worker, [Task.Supervised]}
handle_call({:start_child, child}, from, state)
end
def handle_call({:start_child, child}, _from, state) do
%{children: children, max_children: max_children} = state
if map_size(children) < max_children do
handle_start_child(child, state)
else
{:reply, {:error, :max_children}, state}
end
end
defp handle_start_child({{m, f, args} = mfa, restart, shutdown, type, modules}, state) do
%{extra_arguments: extra} = state
case reply = start_child(m, f, extra ++ args) do
{:ok, pid, _} ->
{:reply, reply, save_child(pid, mfa, restart, shutdown, type, modules, state)}
{:ok, pid} ->
{:reply, reply, save_child(pid, mfa, restart, shutdown, type, modules, state)}
_ ->
{:reply, reply, state}
end
end
defp start_child(m, f, a) do
try do
apply(m, f, a)
catch
kind, reason ->
{:error, exit_reason(kind, reason, __STACKTRACE__)}
else
{:ok, pid, extra} when is_pid(pid) -> {:ok, pid, extra}
{:ok, pid} when is_pid(pid) -> {:ok, pid}
:ignore -> :ignore
{:error, _} = error -> error
other -> {:error, other}
end
end
defp save_child(pid, mfa, restart, shutdown, type, modules, state) do
mfa = mfa_for_restart(mfa, restart)
put_in(state.children[pid], {mfa, restart, shutdown, type, modules})
end
defp mfa_for_restart({m, f, _}, :temporary), do: {m, f, :undefined}
defp mfa_for_restart(mfa, _), do: mfa
defp exit_reason(:exit, reason, _), do: reason
defp exit_reason(:error, reason, stack), do: {reason, stack}
defp exit_reason(:throw, value, stack), do: {{:nocatch, value}, stack}
@impl true
def handle_cast(_msg, state) do
{:noreply, state}
end
@impl true
def handle_info({:EXIT, pid, reason}, state) do
case maybe_restart_child(pid, reason, state) do
{:ok, state} -> {:noreply, state}
{:shutdown, state} -> {:stop, :shutdown, state}
end
end
def handle_info({:"$gen_restart", pid}, state) do
%{children: children} = state
case children do
%{^pid => restarting_args} ->
{:restarting, child} = restarting_args
case restart_child(pid, child, state) do
{:ok, state} -> {:noreply, state}
{:shutdown, state} -> {:stop, :shutdown, state}
end
# We may hit clause if we send $gen_restart and then
# someone calls terminate_child, removing the child.
%{} ->
{:noreply, state}
end
end
def handle_info(msg, state) do
:logger.error(
%{
label: {DynamicSupervisor, :unexpected_msg},
report: %{
msg: msg
}
},
%{
domain: [:otp, :elixir],
error_logger: %{tag: :error_msg},
report_cb: &__MODULE__.format_report/1
}
)
{:noreply, state}
end
@impl true
def code_change(_, %{mod: mod, args: init_arg} = state, _) do
case mod.init(init_arg) do
{:ok, flags} when is_map(flags) ->
case init(state, flags) do
{:ok, state} -> {:ok, state}
{:error, reason} -> {:error, {:supervisor_data, reason}}
end
:ignore ->
{:ok, state}
error ->
error
end
end
@impl true
def terminate(_, %{children: children} = state) do
:ok = terminate_children(children, state)
end
defp terminate_children(children, state) do
{pids, times, stacks} = monitor_children(children)
size = map_size(pids)
timers =
Enum.reduce(times, %{}, fn {time, pids}, acc ->
Map.put(acc, :erlang.start_timer(time, self(), :kill), pids)
end)
stacks = wait_children(pids, size, timers, stacks)
for {pid, {child, reason}} <- stacks do
report_error(:shutdown_error, reason, pid, child, state)
end
:ok
end
defp monitor_children(children) do
Enum.reduce(children, {%{}, %{}, %{}}, fn
{_, {:restarting, _}}, acc ->
acc
{pid, {_, restart, _, _, _} = child}, {pids, times, stacks} ->
case monitor_child(pid) do
:ok ->
times = exit_child(pid, child, times)
{Map.put(pids, pid, child), times, stacks}
{:error, :normal} when restart != :permanent ->
{pids, times, stacks}
{:error, reason} ->
{pids, times, Map.put(stacks, pid, {child, reason})}
end
end)
end
defp monitor_child(pid) do
ref = Process.monitor(pid)
Process.unlink(pid)
receive do
{:EXIT, ^pid, reason} ->
receive do
{:DOWN, ^ref, :process, ^pid, _} -> {:error, reason}
end
after
0 -> :ok
end
end
defp exit_child(pid, {_, _, shutdown, _, _}, times) do
case shutdown do
:brutal_kill ->
Process.exit(pid, :kill)
times
:infinity ->
Process.exit(pid, :shutdown)
times
time ->
Process.exit(pid, :shutdown)
Map.update(times, time, [pid], &[pid | &1])
end
end
defp wait_children(_pids, 0, timers, stacks) do
for {timer, _} <- timers do
_ = :erlang.cancel_timer(timer)
receive do
{:timeout, ^timer, :kill} -> :ok
after
0 -> :ok
end
end
stacks
end
defp wait_children(pids, size, timers, stacks) do
receive do
{:DOWN, _ref, :process, pid, reason} ->
case pids do
%{^pid => child} ->
stacks = wait_child(pid, child, reason, stacks)
wait_children(pids, size - 1, timers, stacks)
%{} ->
wait_children(pids, size, timers, stacks)
end
{:timeout, timer, :kill} ->
for pid <- Map.fetch!(timers, timer), do: Process.exit(pid, :kill)
wait_children(pids, size, Map.delete(timers, timer), stacks)
end
end
defp wait_child(pid, {_, _, :brutal_kill, _, _} = child, reason, stacks) do
case reason do
:killed -> stacks
_ -> Map.put(stacks, pid, {child, reason})
end
end
defp wait_child(pid, {_, restart, _, _, _} = child, reason, stacks) do
case reason do
{:shutdown, _} -> stacks
:shutdown -> stacks
:normal when restart != :permanent -> stacks
reason -> Map.put(stacks, pid, {child, reason})
end
end
defp maybe_restart_child(pid, reason, %{children: children} = state) do
case children do
%{^pid => {_, restart, _, _, _} = child} ->
maybe_restart_child(restart, reason, pid, child, state)
%{} ->
{:ok, state}
end
end
defp maybe_restart_child(:permanent, reason, pid, child, state) do
report_error(:child_terminated, reason, pid, child, state)
restart_child(pid, child, state)
end
defp maybe_restart_child(_, :normal, pid, _child, state) do
{:ok, delete_child(pid, state)}
end
defp maybe_restart_child(_, :shutdown, pid, _child, state) do
{:ok, delete_child(pid, state)}
end
defp maybe_restart_child(_, {:shutdown, _}, pid, _child, state) do
{:ok, delete_child(pid, state)}
end
defp maybe_restart_child(:transient, reason, pid, child, state) do
report_error(:child_terminated, reason, pid, child, state)
restart_child(pid, child, state)
end
defp maybe_restart_child(:temporary, reason, pid, child, state) do
report_error(:child_terminated, reason, pid, child, state)
{:ok, delete_child(pid, state)}
end
defp delete_child(pid, %{children: children} = state) do
%{state | children: Map.delete(children, pid)}
end
defp restart_child(pid, child, state) do
case add_restart(state) do
{:ok, %{strategy: strategy} = state} ->
case restart_child(strategy, pid, child, state) do
{:ok, state} ->
{:ok, state}
{:try_again, state} ->
send(self(), {:"$gen_restart", pid})
{:ok, state}
end
{:shutdown, state} ->
report_error(:shutdown, :reached_max_restart_intensity, pid, child, state)
{:shutdown, delete_child(pid, state)}
end
end
defp add_restart(state) do
%{max_seconds: max_seconds, max_restarts: max_restarts, restarts: restarts} = state
now = :erlang.monotonic_time(1)
restarts = add_restart([now | restarts], now, max_seconds)
state = %{state | restarts: restarts}
if length(restarts) <= max_restarts do
{:ok, state}
else
{:shutdown, state}
end
end
defp add_restart(restarts, now, period) do
for then <- restarts, now <= then + period, do: then
end
defp restart_child(:one_for_one, current_pid, child, state) do
{{m, f, args} = mfa, restart, shutdown, type, modules} = child
%{extra_arguments: extra} = state
case start_child(m, f, extra ++ args) do
{:ok, pid, _} ->
state = delete_child(current_pid, state)
{:ok, save_child(pid, mfa, restart, shutdown, type, modules, state)}
{:ok, pid} ->
state = delete_child(current_pid, state)
{:ok, save_child(pid, mfa, restart, shutdown, type, modules, state)}
:ignore ->
{:ok, delete_child(current_pid, state)}
{:error, reason} ->
report_error(:start_error, reason, {:restarting, current_pid}, child, state)
state = put_in(state.children[current_pid], {:restarting, child})
{:try_again, state}
end
end
defp report_error(error, reason, pid, child, %{name: name, extra_arguments: extra}) do
:logger.error(
%{
label: {:supervisor, error},
report: [
{:supervisor, name},
{:errorContext, error},
{:reason, reason},
{:offender, extract_child(pid, child, extra)}
]
},
%{
domain: [:otp, :sasl],
report_cb: &:logger.format_otp_report/1,
logger_formatter: %{title: "SUPERVISOR REPORT"},
error_logger: %{tag: :error_report, type: :supervisor_report}
}
)
end
defp extract_child(pid, {{m, f, args}, restart, shutdown, type, _modules}, extra) do
[
pid: pid,
id: :undefined,
mfargs: {m, f, extra ++ args},
restart_type: restart,
shutdown: shutdown,
child_type: type
]
end
@impl true
def format_status(:terminate, [_pdict, state]) do
state
end
def format_status(_, [_pdict, %{mod: mod} = state]) do
[data: [{~c"State", state}], supervisor: [{~c"Callback", mod}]]
end
## Helpers
@compile {:inline, call: 2}
defp call(supervisor, req) do
GenServer.call(supervisor, req, :infinity)
end
@doc false
def format_report(%{
label: {__MODULE__, :unexpected_msg},
report: %{msg: msg}
}) do
{'DynamicSupervisor received unexpected message: ~p~n', [msg]}
end
end
| 31.225923 | 100 | 0.642598 |
1c6f74e63d10477047bdb468614e4ec02827e7ea | 276 | ex | Elixir | lib/empex_cookbook/cookbook/ingredient.ex | ludwikbukowski/recipes | cac5711d32874c3011da8da3329b70d0e28e725e | [
"MIT"
] | 4 | 2019-02-11T12:15:36.000Z | 2021-03-22T16:23:47.000Z | lib/empex_cookbook/cookbook/ingredient.ex | ludwikbukowski/recipes | cac5711d32874c3011da8da3329b70d0e28e725e | [
"MIT"
] | null | null | null | lib/empex_cookbook/cookbook/ingredient.ex | ludwikbukowski/recipes | cac5711d32874c3011da8da3329b70d0e28e725e | [
"MIT"
] | null | null | null | defmodule EmpexCookbook.Cookbook.Ingredient do
@moduledoc """
The Ingredient Model
"""
use Ecto.Schema
import Ecto.Changeset
embedded_schema do
field(:name, :string)
end
def changeset(schema, params) do
schema
|> cast(params, [:name])
end
end
| 15.333333 | 46 | 0.681159 |
1c6f802448fe251d06d9b2cb958d662bcc3cb4dc | 270 | exs | Elixir | priv/test_repo/migrations/20190120203502_comments.exs | nickolaich/formex_ecto | 322907daa4924d1c297acc7f9e60f99bc8f2f6f3 | [
"MIT"
] | 20 | 2017-06-25T12:30:03.000Z | 2021-04-25T06:43:31.000Z | priv/test_repo/migrations/20190120203502_comments.exs | nickolaich/formex_ecto | 322907daa4924d1c297acc7f9e60f99bc8f2f6f3 | [
"MIT"
] | 8 | 2017-11-19T17:15:32.000Z | 2020-02-02T12:50:03.000Z | priv/test_repo/migrations/20190120203502_comments.exs | nickolaich/formex_ecto | 322907daa4924d1c297acc7f9e60f99bc8f2f6f3 | [
"MIT"
] | 9 | 2018-03-30T22:14:13.000Z | 2021-11-15T12:12:56.000Z | defmodule Formex.Ecto.TestRepo.Migrations.Comments do
use Ecto.Migration
def change do
create table(:article_comments) do
add :content, :string
add :assoc_id, references(:articles)
end
create index(:article_comments, [:assoc_id])
end
end
| 20.769231 | 53 | 0.711111 |
1c6f94bc3145e135f692f0fa65ae894b0c388a4b | 1,511 | exs | Elixir | mix.exs | axelson/priv_check | ba4228881edbf16ac61b0e006537de517c7f6f06 | [
"MIT"
] | 8 | 2020-03-15T19:22:02.000Z | 2021-09-28T11:00:18.000Z | mix.exs | axelson/priv_check | ba4228881edbf16ac61b0e006537de517c7f6f06 | [
"MIT"
] | 7 | 2020-03-11T06:21:57.000Z | 2020-11-15T19:48:54.000Z | mix.exs | axelson/priv_check | ba4228881edbf16ac61b0e006537de517c7f6f06 | [
"MIT"
] | null | null | null | defmodule PrivCheck.MixProject do
use Mix.Project
@version "0.2.2"
def project do
[
app: :priv_check,
version: @version,
elixir: ">= 1.10.0",
start_permanent: Mix.env() == :prod,
deps: deps(),
elixirc_paths: elixirc_paths(Mix.env()),
docs: docs(),
dialyzer: dialyzer(),
package: package()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:dialyxir, "~> 1.0", only: :dev, runtime: false},
{:ex_doc, "~> 0.21", only: :dev, runtime: false},
{:credo, "~> 1.1", only: [:dev, :test], runtime: false}
]
end
defp elixirc_paths(:test), do: ~w(lib test/support)
defp elixirc_paths(_), do: ~w(lib)
defp docs() do
[
main: "PrivCheck",
extras: ["README.md"],
source_url: "https://github.com/axelson/priv_check/",
source_ref: @version
]
end
defp dialyzer() do
[
plt_add_apps: [:mix]
]
end
defp package() do
[
description: "Check for private API usage at compile-time",
maintainers: ["Jason Axelson"],
licenses: ["MIT"],
links: %{
"Github" => "https://github.com/axelson/priv_check",
"Changelog" =>
"https://github.com/axelson/blob/#{@version}/Changelog.md##{
String.replace(@version, ".", "")
}"
}
]
end
end
| 21.898551 | 70 | 0.555261 |
1c6fb1d66442f051e46a9813ebecdab574b5ea32 | 896 | exs | Elixir | mix.exs | bot-ex/botex-telegram | 7b41080b3fcb1736274b83fbf7237a884dd27f38 | [
"MIT"
] | 1 | 2021-11-08T08:03:41.000Z | 2021-11-08T08:03:41.000Z | mix.exs | bot-ex/botex-telegram | 7b41080b3fcb1736274b83fbf7237a884dd27f38 | [
"MIT"
] | null | null | null | mix.exs | bot-ex/botex-telegram | 7b41080b3fcb1736274b83fbf7237a884dd27f38 | [
"MIT"
] | 3 | 2020-04-11T10:32:52.000Z | 2020-08-25T14:49:36.000Z | defmodule BotexTelegram.MixProject do
use Mix.Project
def project do
[
app: :botex_telegram,
version: "0.4.0",
description: "Telegram module for https://github.com/bot-ex/bot_ex",
elixir: "~> 1.9",
start_permanent: Mix.env() == :prod,
deps: deps(),
package: [
licenses: ["MIT"],
homepage_url: "https://github.com/bot-ex",
links: %{"GitHub" => "https://github.com/bot-ex/bot_ex-telegram"}
]
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:bot_ex, "~> 1.0.0"},
{:nadia, "~> 0.6"},
{:timex, "~> 3.6"},
{:earmark, "~> 1.4", only: :dev},
{:ex_doc, "~> 0.21", only: :dev},
{:jason, "~> 1.1"}
]
end
end
| 22.974359 | 74 | 0.534598 |
1c6fc579e734c8362f086f0b02726ff2edd8da24 | 2,076 | exs | Elixir | test/lib/slack_coder/build_system/log_parser_test.exs | mgwidmann/slack_coder | 3ff4375777c1bdbc94ba1491fdf4e1c0fa465edf | [
"MIT"
] | 15 | 2015-09-23T16:03:28.000Z | 2018-12-04T21:48:04.000Z | test/lib/slack_coder/build_system/log_parser_test.exs | mgwidmann/slack_coder | 3ff4375777c1bdbc94ba1491fdf4e1c0fa465edf | [
"MIT"
] | 27 | 2016-01-12T16:44:31.000Z | 2017-10-13T16:09:36.000Z | test/lib/slack_coder/build_system/log_parser_test.exs | mgwidmann/slack_coder | 3ff4375777c1bdbc94ba1491fdf4e1c0fa465edf | [
"MIT"
] | 4 | 2016-09-01T12:08:24.000Z | 2017-09-21T15:07:57.000Z | defmodule SlackCoder.BuildSystem.LogParserTest do
use ExUnit.Case
alias SlackCoder.BuildSystem.{LogParser, File}
describe "#parse" do
test "finds the right seeds, files and type for each test" do
assert [
%File{
seed: 21452,
type: :rspec,
file: {
"./spec/controllers/users/somethings_controller_spec.rb",
"375",
"User::SomethingsController PUT update with valid params redirect to homepage"
}
},
%File{
seed: nil,
type: :cucumber,
file: {
"features/something.feature",
"16",
"Scenario: Something"
}
}
] = Fixtures.Builds.failed_rspec_and_cucumber() |> LogParser.parse()
end
test "handles rspecs weird line format" do
assert [%File{
file: {"./spec/some_spec.rb", "[1:2:3]", "The description[0m"}
}] = LogParser.parse """
Randomized with seed 1234
Blah blah blah
[31mrspec ./spec/some_spec.rb[1:2:3][0m [36m# The description[0m
Blah blah blah
Randomized with seed 1234
"""
end
test "handles numbers in the file name" do
assert [%File{
file: {"./spec/controllers/v1/some_spec.rb", "54", "The description[0m"}
}] = LogParser.parse """
Randomized with seed 1234
Blah blah blah
[31mrspec ./spec/controllers/v1/some_spec.rb:54[0m [36m# The description[0m
Blah blah blah
Randomized with seed 1234
"""
end
test "minitest failures" do
[
%File{
type: :minitest,
seed: 26781,
file: {"test/user/profile/edit_test.rb", "33", "UserTest::ProfileEditing#test_admin_can_edit"}
},
%File{
type: :minitest,
seed: 26781,
file: {"test/user/profile/edit_test.rb", "43", "UserTest::ProfileEditing#test_user_can_edit"}
}
] = Fixtures.Builds.failed_minitest() |> LogParser.parse()
end
end
end
| 30.086957 | 104 | 0.555877 |
1c6fcda9ce403250c0fceb25f20a6eb58ba22c68 | 730 | ex | Elixir | web/models/test.ex | chasm/trelm | 359d14f9b84193afbb0a9039792e9c49b2e55327 | [
"MIT"
] | null | null | null | web/models/test.ex | chasm/trelm | 359d14f9b84193afbb0a9039792e9c49b2e55327 | [
"MIT"
] | 1 | 2018-12-26T09:03:18.000Z | 2018-12-26T09:03:18.000Z | web/models/test.ex | chasm/trelm | 359d14f9b84193afbb0a9039792e9c49b2e55327 | [
"MIT"
] | null | null | null | defmodule Trelm.Test do
use Trelm.Web, :model
schema "tests" do
field :description, :string
field :test, :string
timestamps
end
@required_fields ~w(description test)
@optional_fields ~w()
@doc """
Creates a changeset based on the `model` and `params`.
If no params are provided, an invalid changeset is returned
with no validation performed.
"""
def changeset(model, params \\ :empty) do
model
|> cast(params, @required_fields, @optional_fields)
end
end
defimpl Poison.Encoder, for: Trelm.Test do
def encode(model, opts) do
%{id: model.id,
description: model.description,
test: model.test || "",
status: "PENDING"} |> Poison.Encoder.encode(opts)
end
end
| 21.470588 | 61 | 0.671233 |
1c6fd4e093efef6dfedaef86975281048822a2e8 | 1,085 | ex | Elixir | lib/colly_web/channels/user_socket.ex | nidhindamodaran/colly | aaf5076afa24ea69ec084f19c5617f6808b40ade | [
"MIT"
] | null | null | null | lib/colly_web/channels/user_socket.ex | nidhindamodaran/colly | aaf5076afa24ea69ec084f19c5617f6808b40ade | [
"MIT"
] | 4 | 2020-05-30T13:25:44.000Z | 2021-05-11T13:32:44.000Z | lib/colly_web/channels/user_socket.ex | nidhinnambiar/colly | aaf5076afa24ea69ec084f19c5617f6808b40ade | [
"MIT"
] | null | null | null | defmodule CollyWeb.UserSocket do
use Phoenix.Socket
## Channels
# channel "room:*", CollyWeb.RoomChannel
# Socket params are passed from the client and can
# be used to verify and authenticate a user. After
# verification, you can put default assigns into
# the socket that will be set for all channels, ie
#
# {:ok, assign(socket, :user_id, verified_user_id)}
#
# To deny connection, return `:error`.
#
# See `Phoenix.Token` documentation for examples in
# performing token verification on connect.
@impl true
def connect(_params, socket, _connect_info) do
{:ok, socket}
end
# Socket id's are topics that allow you to identify all sockets for a given user:
#
# def id(socket), do: "user_socket:#{socket.assigns.user_id}"
#
# Would allow you to broadcast a "disconnect" event and terminate
# all active sockets and channels for a given user:
#
# CollyWeb.Endpoint.broadcast("user_socket:#{user.id}", "disconnect", %{})
#
# Returning `nil` makes this socket anonymous.
@impl true
def id(_socket), do: nil
end
| 30.138889 | 83 | 0.693088 |
1c6fdecbbef4f49dc9e4a3e05916fe70d72aafa8 | 76 | exs | Elixir | apps/theta_web/test/theta_web/views/page_view_test.exs | LangPham/thetaproject | c6479d1b761ff58fe6ae5f82e2d9de87a8658883 | [
"MIT"
] | null | null | null | apps/theta_web/test/theta_web/views/page_view_test.exs | LangPham/thetaproject | c6479d1b761ff58fe6ae5f82e2d9de87a8658883 | [
"MIT"
] | 11 | 2020-07-21T09:34:54.000Z | 2021-08-29T07:38:02.000Z | apps/theta_web/test/theta_web/views/page_view_test.exs | LangPham/thetaproject | c6479d1b761ff58fe6ae5f82e2d9de87a8658883 | [
"MIT"
] | null | null | null | defmodule ThetaWeb.PageViewTest do
use ThetaWeb.ConnCase, async: true
end
| 19 | 36 | 0.815789 |
1c6fe9a6bab70a4e372730de6b638485b1cd46f2 | 8,149 | exs | Elixir | test/ex_unit_properties_test.exs | Qqwy/stream_data | c912edb42a26da3ea5d9241ba174cdefc8dd0a29 | [
"Apache-2.0"
] | 689 | 2017-07-10T15:25:03.000Z | 2022-03-31T09:23:38.000Z | test/ex_unit_properties_test.exs | Qqwy/stream_data | c912edb42a26da3ea5d9241ba174cdefc8dd0a29 | [
"Apache-2.0"
] | 151 | 2017-07-23T15:19:49.000Z | 2022-01-31T09:33:59.000Z | test/ex_unit_properties_test.exs | Qqwy/stream_data | c912edb42a26da3ea5d9241ba174cdefc8dd0a29 | [
"Apache-2.0"
] | 67 | 2017-08-11T15:52:30.000Z | 2022-02-18T00:12:30.000Z | defmodule ExUnitPropertiesTest do
use ExUnit.Case
use ExUnitProperties
describe "gen all" do
test "supports generation and filtering clauses" do
data =
gen all [_ | _] = list <- list_of(integer()),
elem <- member_of(list),
elem != 5,
elem_not_five = elem do
{Integer.to_string(elem_not_five), list}
end
# Let's make sure that "5" isn't common at all by making the smallest size for this generator
# be 10.
data = scale(data, &max(&1, 10))
check all {string, list} <- data do
assert is_binary(string)
assert is_list(list)
assert String.to_integer(string) != 5
end
end
test "treats non-matching patterns in <- clauses as filters" do
data =
gen all :non_boolean <- boolean() do
:ok
end
assert_raise StreamData.FilterTooNarrowError, fn ->
Enum.take(data, 1)
end
end
test "supports do keyword syntax" do
gen(all _boolean <- boolean(), do: :ok)
data =
gen(
all string <- binary(),
list <- list_of(integer()),
do: {string, list}
)
check all {string, list} <- data do
assert is_binary(string)
assert is_list(list)
end
end
test "errors out if the first clause is not a generator" do
message =
"\"gen all\" and \"check all\" clauses must start with a generator (<-) clause, " <>
"got: a = 1"
assert_raise ArgumentError, message, fn ->
Code.compile_quoted(
quote do
gen(all a = 1, _ <- integer, do: :ok)
end
)
end
message =
"\"gen all\" and \"check all\" clauses must start with a generator (<-) clause, " <>
"got: true"
assert_raise ArgumentError, message, fn ->
Code.compile_quoted(
quote do
gen(all true, _ <- integer, do: :ok)
end
)
end
end
end
describe "property" do
property "supports rescue" do
raise "some error"
rescue
exception in [RuntimeError] ->
assert Exception.message(exception) == "some error"
end
property "supports catch" do
throw(:some_error)
catch
:throw, term ->
assert term == :some_error
end
test "produces error on not implemented tests" do
defmodule TestNotImplemented do
use ExUnit.Case
use ExUnitProperties
setup context do
assert context[:not_implemented]
:ok
end
property "this is not implemented yet"
end
ExUnit.Server.modules_loaded()
old_opts = ExUnit.configuration()
ExUnit.configure(autorun: false, seed: 0, colors: [enabled: false], exclude: [:exclude])
on_exit(fn -> ExUnit.configure(old_opts) end)
output =
ExUnit.CaptureIO.capture_io(fn ->
assert %{failures: 1, skipped: 0, total: 1} = ExUnit.run()
end)
assert output =~ "Not implemented\n"
assert output =~ "\n1 property, 1 failure\n"
end
end
describe "check all" do
property "can do assignment" do
{:ok, counter} = Agent.start_link(fn -> 0 end)
check all i <- integer(), string_i = Integer.to_string(i), max_runs: 10 do
Agent.update(counter, &(&1 + 1))
assert String.to_integer(string_i) == i
end
assert Agent.get(counter, & &1) == 10
end
property "runs the number of specified times" do
{:ok, counter} = Agent.start_link(fn -> 0 end)
check all :ok <- :ok, max_runs: 10 do
Agent.update(counter, &(&1 + 1))
:ok
end
assert Agent.get(counter, & &1) == 10
end
property "runs for the specified number of milliseconds" do
{:ok, counter} = Agent.start_link(fn -> 0 end)
check all :ok <- :ok, max_runs: :infinity, max_run_time: 100 do
Process.sleep(25)
Agent.update(counter, &(&1 + 1))
:ok
end
assert Agent.get(counter, & &1) in 3..5
end
property "ends at :max_runs if it ends before :max_run_time" do
{:ok, counter} = Agent.start_link(fn -> 0 end)
check all :ok <- :ok, max_runs: 5, max_run_time: 500 do
Process.sleep(1)
Agent.update(counter, &(&1 + 1))
:ok
end
assert Agent.get(counter, & &1) == 5
end
property "ends at :max_run_time if it ends before :max_runs" do
{:ok, counter} = Agent.start_link(fn -> 0 end)
check all :ok <- :ok, max_runs: 100_000, max_run_time: 100 do
Process.sleep(25)
Agent.update(counter, &(&1 + 1))
:ok
end
assert Agent.get(counter, & &1) in 3..5
end
test "supports an :initial_seed option" do
{:ok, agent1} = Agent.start_link(fn -> nil end)
{:ok, agent2} = Agent.start_link(fn -> nil end)
check all list <- list_of(integer()), initial_seed: 1, max_runs: 1, initial_size: 10 do
Agent.update(agent1, fn _ -> list end)
end
check all list <- list_of(integer()), initial_seed: 1, max_runs: 1, initial_size: 10 do
Agent.update(agent2, fn _ -> list end)
end
assert Agent.get(agent1, & &1) == Agent.get(agent2, & &1)
end
test "raises an error instead of running an infinite loop" do
message = ~r/both the :max_runs and :max_run_time options are set to :infinity/
assert_raise ArgumentError, message, fn ->
check all :ok <- :ok, max_runs: :infinity, max_run_time: :infinity do
:ok
end
end
end
property "works with errors that are not assertion errors" do
assert_raise ExUnitProperties.Error, fn ->
check all tuple <- {:ok, nil} do
:ok = tuple
end
end
end
property "shrinking" do
assert_raise ExUnit.AssertionError, fn ->
check all list <- list_of(integer()) do
assert 5 not in list
end
end
end
test "supports do keyword syntax" do
check all int <- integer(), do: assert(is_integer(int))
check all a <- binary(),
b <- binary(),
do: assert(String.starts_with?(a <> b, a))
check all int1 <- integer(),
int2 <- integer(),
sum = abs(int1) + abs(int2),
max_runs: 25,
do: assert(sum >= int1)
end
test "do keyword syntax passes in options" do
{:ok, counter} = Agent.start_link(fn -> 0 end)
check all int <- integer(),
max_runs: 25,
do: Agent.update(counter, &(&1 + 1)) && assert(is_integer(int))
assert Agent.get(counter, & &1) == 25
end
test "errors out if the first clause is not a generator" do
message =
"\"gen all\" and \"check all\" clauses must start with a generator (<-) clause, " <>
"got: a = 1"
assert_raise ArgumentError, message, fn ->
Code.compile_quoted(
quote do
gen(all a = 1, _ <- integer, do: :ok)
end
)
end
message =
"\"gen all\" and \"check all\" clauses must start with a generator (<-) clause, " <>
"got: true"
assert_raise ArgumentError, message, fn ->
Code.compile_quoted(
quote do
gen(all true, _ <- integer, do: :ok)
end
)
end
end
end
if Version.compare(System.version(), "1.6.0") in [:eq, :gt] do
describe "pick/1" do
test "when there's a random seed thanks to ExUnit setting it up" do
integer = ExUnitProperties.pick(integer())
assert is_integer(integer)
assert integer in -100..100
end
test "raises when there's no random seed in the process dictionary" do
{_pid, ref} =
spawn_monitor(fn ->
message = ~r/the random seed is not set in the current process/
assert_raise RuntimeError, message, fn ->
ExUnitProperties.pick(integer())
end
end)
assert_receive {:DOWN, ^ref, _, _, _}
end
end
end
end
| 27.43771 | 99 | 0.56105 |
1c6fecf050ae4c18f7ff55f769c709387c3dbb6a | 48,508 | ex | Elixir | lib/cldr/unit.ex | Zurga/cldr_units | d90ae5987608b741bd0f531fd760f1e142ff9293 | [
"Apache-2.0"
] | null | null | null | lib/cldr/unit.ex | Zurga/cldr_units | d90ae5987608b741bd0f531fd760f1e142ff9293 | [
"Apache-2.0"
] | null | null | null | lib/cldr/unit.ex | Zurga/cldr_units | d90ae5987608b741bd0f531fd760f1e142ff9293 | [
"Apache-2.0"
] | null | null | null | defmodule Cldr.Unit do
@moduledoc """
Supports the CLDR Units definitions which provide for the localization of many
unit types.
The primary public API defines:
* `Cldr.Unit.to_string/3` which, given a unit or unit list will
output a localized string
* `Cldr.Unit.known_units/0` identifies the available units for localization
* `Cldr.Unit.{add, sub, mult, div}/2` to support basic unit mathematics between
units of compatible type (like length or volume)
* `Cldr.Unit.compare/2` to compare one unit to another unit as long as they
are convertable.
* `Cldr.Unit.convert/2` to convert one unit to another unit as long as they
are convertable.
* `Cldr.Unit.localize/3` will convert a unit into the units preferred for a
given locale and usage
* `Cldr.Unit.preferred_units/3` which, for a given unit and locale,
will return a list of preferred units that can be applied to
`Cldr.Unit.decompose/2`
* `Cldr.Unit.decompose/2` to take a unit and return a list of units decomposed
by a list of smaller units.
"""
alias Cldr.Unit
alias Cldr.{Locale, LanguageTag, Substitution}
alias Cldr.Unit.{Math, Alias, Parser, Conversion, Conversions, Preference, Prefix}
@enforce_keys [:unit, :value, :base_conversion, :usage, :format_options]
defstruct unit: nil,
value: 0,
base_conversion: [],
usage: :default,
format_options: []
@type unit :: atom() | String.t()
@type usage :: atom()
@type style :: atom()
@type value :: Cldr.Math.number_or_decimal() | Ratio.t()
@type conversion :: Conversion.t() | {[Conversion.t(), ...], [Conversion.t(), ...]} | list()
@type locale :: Locale.locale_name() | LanguageTag.t()
@type t :: %__MODULE__{
unit: unit(),
value: value(),
base_conversion: conversion(),
usage: usage(),
format_options: []
}
@default_style :long
@styles [:long, :short, :narrow]
defdelegate convert(unit_1, to_unit), to: Conversion
defdelegate convert!(unit_1, to_unit), to: Conversion
defdelegate preferred_units(unit, backend, options), to: Preference
defdelegate preferred_units!(unit, backend, options), to: Preference
defdelegate add(unit_1, unit_2), to: Math
defdelegate sub(unit_1, unit_2), to: Math
defdelegate mult(unit_1, unit_2), to: Math
defdelegate div(unit_1, unit_2), to: Math
defdelegate add!(unit_1, unit_2), to: Math
defdelegate sub!(unit_1, unit_2), to: Math
defdelegate mult!(unit_1, unit_2), to: Math
defdelegate div!(unit_1, unit_2), to: Math
defdelegate round(unit, places, mode), to: Math
defdelegate round(unit, places), to: Math
defdelegate round(unit), to: Math
defdelegate compare(unit_1, unit_2), to: Math
@app_name Cldr.Config.app_name()
@data_dir [:code.priv_dir(@app_name), "/cldr/locales"] |> :erlang.iolist_to_binary()
@config %{data_dir: @data_dir, locales: ["en"], default_locale: "en"}
@unit_tree "en"
|> Cldr.Config.get_locale(@config)
|> Map.get(:units)
|> Map.get(:short)
|> Enum.map(fn {k, v} -> {k, Map.keys(v)} end)
|> Map.new()
@doc """
Returns the known units.
Known units means units that can
be localised directly.
## Example
=> Cldr.Unit.known_units
[:acre, :acre_foot, :ampere, :arc_minute, :arc_second, :astronomical_unit, :bit,
:bushel, :byte, :calorie, :carat, :celsius, :centiliter, :centimeter, :century,
:cubic_centimeter, :cubic_foot, :cubic_inch, :cubic_kilometer, :cubic_meter,
:cubic_mile, :cubic_yard, :cup, :cup_metric, :day, :deciliter, :decimeter,
:degree, :fahrenheit, :fathom, :fluid_ounce, :foodcalorie, :foot, :furlong,
:g_force, :gallon, :gallon_imperial, :generic, :gigabit, :gigabyte, :gigahertz,
:gigawatt, :gram, :hectare, :hectoliter, :hectopascal, :hertz, :horsepower,
:hour, :inch, ...]
"""
@translatable_units @unit_tree
|> Map.delete(:compound)
|> Map.values()
|> List.flatten()
@spec known_units :: [atom, ...]
def known_units do
@translatable_units
end
@deprecated "Use Cldr.Unit.known_units/0"
def units, do: known_units()
@doc """
Returns a new `Unit.t` struct.
## Arguments
* `value` is any float, integer, `Ratio` or `Decimal`
* `unit` is any unit returned by `Cldr.Unit.known_units/0`
* `options` is Keyword list of options. The default
is `[]`
## Options
* `:use` is the intended use of the unit. This
is used during localization to convert the unit
to that appropriate for the unit category,
usage, target territory and unit value. The `:use`
must be known for the unit's category. See
`Cldr.Unit` for more information.
## Returns
* `unit` or
* `{:error, {exception, message}}`
## Examples
iex> Cldr.Unit.new(23, :gallon)
{:ok, Cldr.Unit.new!(:gallon, 23)}
iex> Cldr.Unit.new(:gallon, 23)
{:ok, Cldr.Unit.new!(:gallon, 23)}
iex> Cldr.Unit.new(14, :gadzoots)
{:error, {Cldr.UnknownUnitError,
"Unknown unit was detected at \\"gadzoots\\""}}
"""
@spec new(unit() | value(), value() | unit(), Keyword.t()) ::
{:ok, t()} | {:error, {module(), String.t()}}
def new(value, unit, options \\ [])
def new(value, unit, options) when is_number(value) do
create_unit(value, unit, options)
end
def new(unit, value, options) when is_number(value) do
new(value, unit, options)
end
def new(%Decimal{} = value, unit, options) do
create_unit(value, unit, options)
end
def new(unit, %Decimal{} = value, options) do
new(value, unit, options)
end
def new(%Ratio{} = value, unit, options) do
create_unit(value, unit, options)
end
def new(unit, %Ratio{} = value, options) do
new(value, unit, options)
end
@default_use :default
@default_format_options []
defp create_unit(value, unit, options) do
usage = Keyword.get(options, :usage, @default_use)
format_options = Keyword.get(options, :format_options, @default_format_options)
with {:ok, unit, base_conversion} <- validate_unit(unit),
{:ok, usage} <- validate_usage(unit, usage) do
unit = %Unit{
unit: unit,
value: value,
base_conversion: base_conversion,
usage: usage,
format_options: format_options
}
{:ok, unit}
end
end
defp validate_usage(unit, usage) do
with {:ok, category} <- unit_category(unit) do
validate_category_usage(category, usage)
end
end
defp validate_category_usage(:substance_amount, _) do
{:ok, nil}
end
@default_category_usage [@default_use]
defp validate_category_usage(category, usage) do
usage_list = Map.get(unit_category_usage(), category, @default_category_usage)
if usage in usage_list do
{:ok, usage}
else
{:error, unknown_usage_error(category, usage)}
end
end
@doc """
Returns a new `Unit.t` struct or raises on error.
## Arguments
* `value` is any float, integer or `Decimal`
* `unit` is any unit returned by `Cldr.Unit.known_units/0`
## Returns
* `unit` or
* raises an exception
## Examples
iex> Cldr.Unit.new! 23, :gallon
#Cldr.Unit<:gallon, 23>
Cldr.Unit.new! 14, :gadzoots
** (Cldr.UnknownUnitError) The unit :gadzoots is not known.
(ex_cldr_units) lib/cldr/unit.ex:57: Cldr.Unit.new!/2
"""
@spec new!(unit() | value(), value() | unit()) :: t() | no_return()
def new!(unit, value, options \\ []) do
case new(unit, value, options) do
{:ok, unit} -> unit
{:error, {exception, message}} -> raise exception, message
end
end
@doc """
Returns a boolean indicating if two units are
of the same unit category.
## Arguments
* `unit_1` and `unit_2` are any units returned by
`Cldr.Unit.new/2` or a valid unit name.
## Returns
* `true` or `false`
## Examples
iex> Cldr.Unit.compatible? :foot, :meter
true
iex> Cldr.Unit.compatible? Cldr.Unit.new!(:foot, 23), :meter
true
iex> Cldr.Unit.compatible? :foot, :liter
false
iex> Cldr.Unit.compatible? "light_year_per_second", "meter_per_gallon"
false
"""
@spec compatible?(unit(), unit()) :: boolean
def compatible?(unit_1, unit_2) do
with {:ok, _unit_1, conversion_1} <- validate_unit(unit_1),
{:ok, _unit_2, conversion_2} <- validate_unit(unit_2),
{:ok, base_unit_1} <- base_unit(conversion_1),
{:ok, base_unit_2} <- base_unit(conversion_2) do
Kernel.to_string(base_unit_1) == Kernel.to_string(base_unit_2)
else
_ -> false
end
end
@doc """
Formats a number into a string according to a unit definition
for the current process's locale and backend.
The curent process's locale is set with
`Cldr.put_locale/1`.
See `Cldr.Unit.to_string/3` for full details.
"""
@spec to_string(list_or_number :: value | t() | [t()]) ::
{:ok, String.t()} | {:error, {atom, binary}}
def to_string(unit) do
locale = Cldr.get_locale()
backend = locale.backend
to_string(unit, backend, locale: locale)
end
@doc """
Formats a number into a string according to a unit definition for a locale.
During processing any `:format_options` of a `Unit.t()` are merged with
`options` with `options` taking precedence.
## Arguments
* `list_or_number` is any number (integer, float or Decimal) or a
`Cldr.Unit.t()` struct or a list of `Cldr.Unit.t()` structs
* `backend` is any module that includes `use Cldr` and therefore
is a `Cldr` backend module. The default is `Cldr.default_backend/0`.
* `options` is a keyword list of options.
## Options
* `:unit` is any unit returned by `Cldr.Unit.known_units/0`. Ignored if
the number to be formatted is a `Cldr.Unit.t()` struct
* `:locale` is any valid locale name returned by `Cldr.known_locale_names/1`
or a `Cldr.LanguageTag` struct. The default is `Cldr.get_locale/0`
* `:style` is one of those returned by `Cldr.Unit.styles`.
The current styles are `:long`, `:short` and `:narrow`.
The default is `style: :long`
* `:list_options` is a keyword list of options for formatting a list
which is passed through to `Cldr.List.to_string/3`. This is only
applicable when formatting a list of units.
* Any other options are passed to `Cldr.Number.to_string/2`
which is used to format the `number`
## Returns
* `{:ok, formatted_string}` or
* `{:error, {exception, message}}`
## Examples
iex> Cldr.Unit.to_string Cldr.Unit.new!(:gallon, 123), MyApp.Cldr
{:ok, "123 gallons"}
iex> Cldr.Unit.to_string Cldr.Unit.new!(:gallon, 1), MyApp.Cldr
{:ok, "1 gallon"}
iex> Cldr.Unit.to_string Cldr.Unit.new!(:gallon, 1), MyApp.Cldr, locale: "af"
{:ok, "1 gelling"}
iex> Cldr.Unit.to_string Cldr.Unit.new!(:gallon, 1), MyApp.Cldr, locale: "bs"
{:ok, "1 galon"}
iex> Cldr.Unit.to_string Cldr.Unit.new!(:gallon, 1234), MyApp.Cldr, format: :long
{:ok, "1 thousand gallons"}
iex> Cldr.Unit.to_string Cldr.Unit.new!(:gallon, 1234), MyApp.Cldr, format: :short
{:ok, "1K gallons"}
iex> Cldr.Unit.to_string Cldr.Unit.new!(:megahertz, 1234), MyApp.Cldr
{:ok, "1,234 megahertz"}
iex> Cldr.Unit.to_string Cldr.Unit.new!(:megahertz, 1234), MyApp.Cldr, style: :narrow
{:ok, "1,234MHz"}
iex> unit = Cldr.Unit.new!(123, :foot)
iex> Cldr.Unit.to_string unit, MyApp.Cldr
{:ok, "123 feet"}
iex> Cldr.Unit.to_string 123, MyApp.Cldr, unit: :megabyte, locale: "en", style: :unknown
{:error, {Cldr.UnknownFormatError, "The unit style :unknown is not known."}}
"""
@spec to_string(t() | list(t()), Cldr.backend() | Keyword.t(), Keyword.t()) ::
{:ok, String.t()} | {:error, {atom, binary}}
def to_string(list_or_unit, backend, options \\ [])
# Options but no backend
def to_string(list_or_unit, options, []) when is_list(options) do
locale = Cldr.get_locale()
to_string(list_or_unit, locale.backend, options)
end
# It's a list of units so we format each of them
# and combine the list
def to_string(unit_list, backend, options) when is_list(unit_list) do
with {locale, _style, options} <- normalize_options(backend, options),
{:ok, locale} <- backend.validate_locale(locale) do
options =
options
|> Keyword.put(:locale, locale)
list_options =
options
|> Keyword.get(:list_options, [])
|> Keyword.put(:locale, locale)
unit_list
|> Enum.map(&to_string!(&1, backend, options))
|> Cldr.List.to_string(backend, list_options)
end
end
def to_string(number, backend, options) when is_number(number) do
with {:ok, unit} <- new(options[:unit], number) do
to_string(unit, backend, options)
end
end
# Now we have a unit, a backend and some options but ratio
# values need to be converted to floats
def to_string(%Unit{value: %Ratio{}} = unit, backend, options) when is_list(options) do
unit = ratio_to_float(unit)
to_string(unit, backend, options)
end
def to_string(%Unit{} = unit, backend, options) when is_list(options) do
with {:ok, list} <- to_iolist(unit, backend, options) do
list
|> :erlang.iolist_to_binary()
|> String.replace(~r/([\s])+/, "\\1")
|> wrap(:ok)
end
end
@doc """
Formats a number into a iolist according to a unit definition
for the current process's locale and backend.
The curent process's locale is set with
`Cldr.put_locale/1`.
See `Cldr.Unit.to_iolist/3` for full details.
"""
@spec to_iolist(list_or_number :: value | t() | [t()]) ::
{:ok, String.t()} | {:error, {atom, binary}}
def to_iolist(unit) do
locale = Cldr.get_locale()
backend = locale.backend
to_iolist(unit, backend, locale: locale)
end
@doc """
Formats a number into an `iolist` according to a unit definition
for a locale.
During processing any `:format_options` of a `Unit.t()` are merged with
`options` with `options` taking precedence.
## Arguments
* `list_or_number` is any number (integer, float or Decimal) or a
`Cldr.Unit.t()` struct or a list of `Cldr.Unit.t()` structs
* `backend` is any module that includes `use Cldr` and therefore
is a `Cldr` backend module. The default is `Cldr.default_backend/0`.
* `options` is a keyword list of options.
## Options
* `:unit` is any unit returned by `Cldr.Unit.known_units/0`. Ignored if
the number to be formatted is a `Cldr.Unit.t()` struct
* `:locale` is any valid locale name returned by `Cldr.known_locale_names/1`
or a `Cldr.LanguageTag` struct. The default is `Cldr.get_locale/0`
* `:style` is one of those returned by `Cldr.Unit.styles`.
The current styles are `:long`, `:short` and `:narrow`.
The default is `style: :long`
* `:list_options` is a keyword list of options for formatting a list
which is passed through to `Cldr.List.to_string/3`. This is only
applicable when formatting a list of units.
* Any other options are passed to `Cldr.Number.to_string/2`
which is used to format the `number`
## Returns
* `{:ok, io_list}` or
* `{:error, {exception, message}}`
## Examples
iex> Cldr.Unit.to_iolist Cldr.Unit.new!(:gallon, 123), MyApp.Cldr
{:ok, ["123", " gallons"]}
iex> Cldr.Unit.to_iolist 123, MyApp.Cldr, unit: :megabyte, locale: "en", style: :unknown
{:error, {Cldr.UnknownFormatError, "The unit style :unknown is not known."}}
"""
@spec to_iolist(t() | number(), Cldr.backend() | Keyword.t(), Keyword.t()) ::
{:ok, list()} | {:error, {module, binary}}
def to_iolist(unit, backend, options \\ [])
# Options but no backend
def to_iolist(unit, options, []) when is_list(options) do
locale = Cldr.get_locale()
to_iolist(unit, locale.backend, options)
end
def to_iolist(%Unit{} = unit, backend, options) when is_list(options) do
with {locale, style, options} <- normalize_options(backend, options),
{:ok, locale} <- backend.validate_locale(locale),
{:ok, style} <- validate_style(style) do
number = value(unit)
options = Keyword.merge(unit.format_options, options) |> Keyword.put(:locale, locale)
{:ok, number_string} = Cldr.Number.to_string(number, backend, options)
number
|> extract_patterns(unit.base_conversion, locale, style, backend, options)
|> combine_patterns(number_string, locale, style, backend, options)
|> maybe_combine_per_unit(locale, style, backend, options)
|> wrap(:ok)
end
end
def to_iolist(number, backend, options) when is_number(number) do
with {:ok, unit} <- new(options[:unit], number) do
to_iolist(unit, backend, options)
end
end
defp wrap(term, tag) do
{tag, term}
end
@doc """
Formats a number into a string according to a unit definition
for the current process's locale and backend or raises
on error.
The curent process's locale is set with
`Cldr.put_locale/1`.
See `Cldr.Unit.to_string!/3` for full details.
"""
@spec to_string!(list_or_number :: value | t() | [t()]) ::
String.t() | no_return()
def to_string!(unit) do
locale = Cldr.get_locale()
backend = locale.backend
to_string!(unit, backend, locale: locale)
end
@doc """
Formats a number into a string according to a unit definition
for the current process's locale and backend or raises
on error.
During processing any `:format_options` of a `Unit.t()` are merged with
`options` with `options` taking precedence.
## Arguments
* `number` is any number (integer, float or Decimal) or a
`Cldr.Unit.t()` struct
* `backend` is any module that includes `use Cldr` and therefore
is a `Cldr` backend module. The default is `Cldr.default_backend/0`.
* `options` is a keyword list
## Options
* `:unit` is any unit returned by `Cldr.Unit.known_units/0`. Ignored if
the number to be formatted is a `Cldr.Unit.t()` struct
* `:locale` is any valid locale name returned by `Cldr.known_locale_names/0`
or a `Cldr.LanguageTag` struct. The default is `Cldr.get_locale/0`
* `:style` is one of those returned by `Cldr.Unit.available_styles`.
The current styles are `:long`, `:short` and `:narrow`.
The default is `style: :long`
* Any other options are passed to `Cldr.Number.to_string/2`
which is used to format the `number`
## Returns
* `formatted_string` or
* raises an exception
## Examples
iex> Cldr.Unit.to_string! Cldr.Unit.new!(:gallon, 123), MyApp.Cldr
"123 gallons"
iex> Cldr.Unit.to_string! Cldr.Unit.new!(:gallon, 1), MyApp.Cldr
"1 gallon"
iex> Cldr.Unit.to_string! Cldr.Unit.new!(:gallon, 1), MyApp.Cldr, locale: "af"
"1 gelling"
"""
@spec to_string!(t(), Cldr.backend() | Keyword.t(), Keyword.t()) ::
String.t() | no_return()
def to_string!(unit, backend, options \\ []) do
case to_string(unit, backend, options) do
{:ok, string} -> string
{:error, {exception, message}} -> raise exception, message
end
end
@doc """
Formats a number into an iolist according to a unit definition
for the current process's locale and backend or raises
on error.
The curent process's locale is set with
`Cldr.put_locale/1`.
See `Cldr.Unit.to_iolist!/3` for full details.
"""
@spec to_iolist!(list_or_number :: value | t() | [t()]) ::
list() | no_return()
def to_iolist!(unit) do
locale = Cldr.get_locale()
backend = locale.backend
to_iolist!(unit, backend, locale: locale)
end
@doc """
Formats a number into an iolist according to a unit definition
for the current process's locale and backend or raises
on error.
During processing any `:format_options` of a `Unit.t()` are merged with
`options` with `options` taking precedence.
## Arguments
* `number` is any number (integer, float or Decimal) or a
`Cldr.Unit.t()` struct
* `backend` is any module that includes `use Cldr` and therefore
is a `Cldr` backend module. The default is `Cldr.default_backend/0`.
* `options` is a keyword list
## Options
* `:unit` is any unit returned by `Cldr.Unit.known_units/0`. Ignored if
the number to be formatted is a `Cldr.Unit.t()` struct
* `:locale` is any valid locale name returned by `Cldr.known_locale_names/0`
or a `Cldr.LanguageTag` struct. The default is `Cldr.get_locale/0`
* `:style` is one of those returned by `Cldr.Unit.available_styles`.
The current styles are `:long`, `:short` and `:narrow`.
The default is `style: :long`
* Any other options are passed to `Cldr.Number.to_string/2`
which is used to format the `number`
## Returns
* `io_list` or
* raises an exception
## Examples
iex> Cldr.Unit.to_iolist! Cldr.Unit.new!(:gallon, 123), MyApp.Cldr
["123", " gallons"]
"""
@spec to_iolist!(t(), Cldr.backend() | Keyword.t(), Keyword.t()) ::
list() | no_return()
def to_iolist!(unit, backend, options \\ []) do
case to_iolist(unit, backend, options) do
{:ok, string} -> string
{:error, {exception, message}} -> raise exception, message
end
end
defp extract_patterns(number, {unit_list, per_list}, locale, style, backend, options) do
{
extract_patterns(number, unit_list, locale, style, backend, options),
extract_patterns(1, per_list, locale, style, backend, options)
}
end
# When extracting a list of patterns the objective is to use the singluar
# form of the pattern for all except the last element which uses the
# plural form indicated by the number
defp extract_patterns(number, [{unit, _conversion}], locale, style, backend, options) do
[to_pattern(number, unit, locale, style, backend, options)]
end
defp extract_patterns(number, [{unit, _conversion} | rest], locale, style, backend, options) do
[
to_pattern(1, unit, locale, style, backend, options)
| extract_patterns(number, rest, locale, style, backend, options)
]
end
# Combine the patterns merging prefix and units, applying "times" for
# compound units and the "per" pattern if required. This are some heuristics
# here than may not result in a grammatically correct result for some
# languages
defp combine_patterns({patterns, per_patterns}, number_string, locale, style, backend, options) do
{
combine_patterns(patterns, number_string, locale, style, backend, options),
combine_patterns(per_patterns, "", locale, style, backend, options)
}
end
defp combine_patterns([pattern], number_string, _locale, _style, _backend, _options) do
Substitution.substitute(number_string, pattern)
end
defp combine_patterns([pattern | rest], number_string, locale, style, backend, _options) do
units = units_for(locale, style, backend)
times_pattern = get_in(units, [:times, :compound_unit_pattern])
[
Substitution.substitute(number_string, pattern)
| Enum.map(rest, fn p ->
Substitution.substitute("", p)
|> Enum.map(&String.trim/1)
end)
]
|> join_list(times_pattern)
end
defp join_list([head, tail], times_pattern) do
Substitution.substitute([head, tail], times_pattern)
end
defp join_list([head | rest], times_pattern) do
tail = join_list(rest, times_pattern)
join_list([head, tail], times_pattern)
end
defp maybe_combine_per_unit({unit_list, per_units}, locale, style, backend, _options) do
units = units_for(locale, style, backend)
per_pattern = get_in(units, [:per, :compound_unit_pattern])
Substitution.substitute([unit_list, per_units], per_pattern)
end
defp maybe_combine_per_unit(unit_list, _locale, _style, _backend, _options) do
unit_list
end
@spec to_pattern(value(), unit(), locale(), style(), Cldr.backend(), Keyword.t()) ::
list()
defp to_pattern(number, unit, locale, style, backend, _options)
when unit in @translatable_units do
{:ok, patterns} = pattern_for(locale, style, unit, backend)
cardinal_module = Module.concat(backend, Number.Cardinal)
cardinal_module.pluralize(number, locale, patterns)
end
for {prefix, power} <- Prefix.power_units() do
localize_key = String.to_atom("power#{power}")
match = quote do: <<unquote(prefix), "_", var!(unit)::binary>>
defp to_pattern(number, unquote(match), locale, style, backend, options) do
units = units_for(locale, style, backend)
pattern = get_in(units, [unquote(localize_key), :compound_unit_pattern1])
unit = maybe_translatable_unit(unit)
pattern
|> merge_power_prefix(to_pattern(number, unit, locale, style, backend, options))
end
end
# is there an SI prefix? If so, try reformatting the unit again
for {prefix, power} <- Prefix.si_power_prefixes() do
localize_key = "10p#{power}" |> String.replace("-", "_") |> String.to_atom()
match = quote do: <<unquote(prefix), var!(unit)::binary>>
defp to_pattern(number, unquote(match), locale, style, backend, options) do
units = units_for(locale, style, backend)
pattern = get_in(units, [unquote(localize_key), :unit_prefix_pattern])
unit = maybe_translatable_unit(unit)
pattern
|> merge_SI_prefix(to_pattern(number, unit, locale, style, backend, options))
end
end
# Merging power and SI prefixes into a pattern is a heuristic since the
# underlying data does not convey those rules.
@merge_SI_prefix ~r/([^\s]+)$/u
defp merge_SI_prefix([prefix, place], [place, string]) when is_integer(place) do
[place, String.replace(string, @merge_SI_prefix, "#{prefix}\\1")]
end
defp merge_SI_prefix([prefix, place], [string, place]) when is_integer(place) do
[String.replace(string, @merge_SI_prefix, "#{prefix}\\1"), place]
end
defp merge_SI_prefix([place, prefix], [place, string]) when is_integer(place) do
[place, String.replace(string, @merge_SI_prefix, "#{prefix}\\1")]
end
defp merge_SI_prefix([place, prefix], [string, place]) when is_integer(place) do
[String.replace(string, @merge_SI_prefix, "#{prefix}\\1"), place]
end
@merge_power_prefix ~r/^(\s)+/u
defp merge_power_prefix([prefix, place], [place, string]) when is_integer(place) do
[place, String.replace(string, @merge_power_prefix, "\\1#{prefix}")]
end
defp merge_power_prefix([prefix, place], [string, place]) when is_integer(place) do
[String.replace(string, @merge_power_prefix, "\\1#{prefix}"), place]
end
defp merge_power_prefix([place, prefix], [place, string]) when is_integer(place) do
[place, String.replace(string, @merge_power_prefix, "\\1#{prefix}")]
end
defp merge_power_prefix([place, prefix], [string, place]) when is_integer(place) do
[String.replace(string, @merge_power_prefix, "\\1#{prefix}"), place]
end
@doc """
Return the value of the Unit struct
## Arguments
* `unit` is any unit returned by `Cldr.Unit.new/2`
## Returns
* an integer, float or Decimal representing the amount
of the unit
## Example
iex> Cldr.Unit.value Cldr.Unit.new!(:kilogram, 23)
23
"""
@spec value(unit :: t()) :: value()
def value(%Unit{value: value}) do
value
end
@doc """
Decomposes a unit into subunits.
Any list compatible units can be provided
however a list of units of decreasing scale
is recommended. For example `[:foot, :inch]`
or `[:kilometer, :meter, :centimeter, :millimeter]`
## Arguments
* `unit` is any unit returned by `Cldr.Unit.new/2`
* `unit_list` is a list of valid units (one or
more from the list returned by `Cldr.units/0`. All
units must be from the same unit category.
* `format_options` is a Keyword list of options
that is added to the *last* unit in `unit_list`.
The `format_options` will be applied when calling
`Cldr.Unit.to_string/3` on the `unit`. The
default is `[]`.
## Returns
* a list of units after decomposition or an error
tuple
## Examples
iex> u = Cldr.Unit.new!(10.3, :foot)
iex> Cldr.Unit.decompose u, [:foot, :inch]
[Cldr.Unit.new!(:foot, 10), Cldr.Unit.new!(:inch, Ratio.new(18, 5))]
iex> u = Cldr.Unit.new!(:centimeter, 1111)
iex> Cldr.Unit.decompose u, [:kilometer, :meter, :centimeter, :millimeter]
[Cldr.Unit.new!(:meter, 11), Cldr.Unit.new!(:centimeter, 11)]
"""
@spec decompose(unit :: Unit.t(), unit_list :: [Unit.unit()], options :: Keyword.t()) ::
[Unit.t()]
def decompose(unit, unit_list, format_options \\ [])
def decompose(unit, [], _format_options) do
[unit]
end
# This is the last unit
def decompose(unit, [h | []], format_options) do
new_unit = Conversion.convert!(unit, h)
if zero?(new_unit) do
[]
else
[%{new_unit | format_options: format_options}]
end
end
def decompose(unit, [h | t], format_options) do
new_unit = Conversion.convert!(unit, h)
{integer_unit, remainder} = int_rem(new_unit)
if zero?(integer_unit) do
decompose(remainder, t, format_options)
else
[integer_unit | decompose(remainder, t, format_options)]
end
end
@doc """
Localizes a unit according to the current
processes locale and backend.
The curent process's locale is set with
`Cldr.put_locale/1`.
See `Cldr.Unit.localize/3` for futher
details.
"""
def localize(unit) do
locale = Cldr.get_locale()
backend = locale.backend
localize(unit, backend, locale)
end
@doc """
Localizes a unit according to a territory
A territory can be derived from a `locale_name`
or `Cldr.LangaugeTag.t()`.
Use this function if you have a unit which
should be presented in a user interface using
units relevant to the audience. For example, a
unit `#Cldr.Unit100, :meter>` might be better
presented to a US audiance as `#Cldr.Unit328, :foot>`.
## Arguments
* `unit` is any unit returned by `Cldr.Unit.new/2`
* `backend` is any module that includes `use Cldr` and therefore
is a `Cldr` backend module.
* `options` is a keyword list of options
## Options
* `:locale` is any valid locale name returned by `Cldr.known_locale_names/0`
or a `Cldr.LanguageTag` struct. The default is `backend.get_locale/0`
* `:territory` is any valid territory code returned by
`Cldr.known_territories/0`. The default is the territory defined
as part of the `:locale`. The option `:territory` has a precedence
over the territory in a locale.
* `:usage` is the way in which the unit is intended
to be used. The available `usage` varyies according
to the unit category. See `Cldr.Unit.preferred_units/3`.
## Examples
iex> unit = Cldr.Unit.new!(1.83, :meter)
iex> Cldr.Unit.localize(unit, usage: :person_height, territory: :US)
[Cldr.Unit.new!(:foot, 6), Cldr.Unit.new!(:inch, Ratio.new(6485183463413016, 137269716642252725))]
"""
def localize(unit, backend, options \\ [])
def localize(%Unit{} = unit, options, []) when is_list(options) do
locale = Cldr.get_locale()
options = Keyword.merge([locale: locale], options)
localize(unit, locale.backend, options)
end
def localize(%Unit{} = unit, backend, options) when is_atom(backend) do
with {:ok, unit_list, format_options} <- Preference.preferred_units(unit, backend, options) do
decompose(unit, unit_list, format_options)
end
end
@doc """
Returns a new unit of the same unit
type but with a zero value.
## Argument
* `unit` is any unit returned by `Cldr.Unit.new/2`
## Example
iex> u = Cldr.Unit.new!(:foot, 23.3)
#Cldr.Unit<:foot, 23.3>
iex> Cldr.Unit.zero(u)
#Cldr.Unit<:foot, 0.0>
"""
def zero(%Unit{value: value} = unit) when is_integer(value) do
%Unit{unit | value: 0}
end
def zero(%Unit{value: value} = unit) when is_float(value) do
%Unit{unit | value: 0.0}
end
def zero(%Unit{} = unit) do
%Unit{unit | value: Decimal.new(0)}
end
@doc """
Returns a boolean indicating whether a given unit
has a zero value.
## Argument
* `unit` is any unit returned by `Cldr.Unit.new/2`
## Examples
iex> u = Cldr.Unit.new!(:foot, 23.3)
#Cldr.Unit<:foot, 23.3>
iex> Cldr.Unit.zero?(u)
false
iex> u = Cldr.Unit.new!(:foot, 0)
#Cldr.Unit<:foot, 0>
iex> Cldr.Unit.zero?(u)
true
"""
def zero?(%Unit{value: value}) when is_number(value) do
value == 0
end
@decimal_0 Decimal.new(0)
def zero?(%Unit{value: %Decimal{} = value}) do
Cldr.Math.decimal_compare(value, @decimal_0) == :eq
end
# Ratios that are zero are just integers
# so anything that is a %Ratio{} is not zero
def zero?(%Unit{value: %Ratio{}}) do
false
end
@doc """
Return a list of known measurement systems.
## Example
iex> Cldr.Unit.known_measurement_systems()
%{
metric: %{alias: nil, description: "Metric System"},
uksystem: %{
alias: :imperial,
description: "UK System of measurement: feet, pints, etc.; pints are 20oz"
},
ussystem: %{
alias: nil,
description: "US System of measurement: feet, pints, etc.; pints are 16oz"
}
}
"""
@measurement_systems Cldr.Config.measurement_systems()
def known_measurement_systems do
@measurement_systems
end
@category_usage Cldr.Config.units()
|> Map.get(:preferences)
|> Enum.map(fn {k, v} -> {k, Map.keys(v)} end)
|> Map.new()
@doc """
Returns a mapping between Unit categories
and the uses they define.
"""
def unit_category_usage do
@category_usage
end
@doc """
Returns a list of the known unit categories.
## Example
iex> Cldr.Unit.known_unit_categories
[:acceleration, :angle, :area, :compound, :concentr, :consumption, :coordinate, :digital,
:duration, :electric, :energy, :force, :frequency, :graphics, :length, :light, :mass,
:power, :pressure, :speed, :temperature, :torque, :volume]
"""
@unit_categories Map.keys(@unit_tree) -- [:"10p"]
def known_unit_categories do
@unit_categories
end
@doc """
Returns a mapping from unit categories to the
base unit.
"""
@base_units Cldr.Config.units() |> Map.get(:base_units) |> Map.new()
def base_units do
@base_units
end
@doc """
Returns the base unit for a given `Cldr.Unit.t()`
or `atom()`.
## Argument
* `unit` is either a `t:Cldr.Unit.t()` or an `atom`
## Returns
* `{:ok, base_unit}` or
* `{:error, {exception, reason}}`
## Example
iex> Cldr.Unit.base_unit :square_kilometer
{:ok, :square_meter}
iex> Cldr.Unit.base_unit :square_table
{:error, {Cldr.UnknownUnitError, "Unknown unit was detected at \\"table\\""}}
"""
def base_unit(unit_name) when is_atom(unit_name) or is_binary(unit_name) do
with {:ok, _unit, conversion} <- validate_unit(unit_name) do
base_unit(conversion)
end
end
# def base_unit(%{base_unit: [base_name]}) when is_atom(base_name) do
# {:ok, base_name}
# end
def base_unit(%Unit{base_conversion: conversion}) do
base_unit(conversion)
end
def base_unit(conversion) when is_list(conversion) or is_tuple(conversion) do
Parser.canonical_base_unit(conversion)
end
def unknown_base_unit_error(unit_name) do
{Cldr.Unit.UnknownBaseUnitError, "Base unit for #{inspect(unit_name)} is not known"}
end
@deprecated "Use `Cldr.Unit.known_unit_categories/0"
defdelegate unit_categories(), to: __MODULE__, as: :known_unit_categories
@doc """
Returns the units category for a given unit
## Options
* `unit` is any unit returned by
`Cldr.Unit.new/2`
## Returns
* `{:ok, category}` or
* `{:error, {exception, message}}`
## Examples
iex> Cldr.Unit.unit_category :pint_metric
{:ok, :volume}
iex> Cldr.Unit.unit_category :stone
{:ok, :mass}
"""
@spec unit_category(Unit.t() | String.t() | atom()) ::
{:ok, atom()} | {:error, {module(), String.t()}}
def unit_category(unit) do
with {:ok, _unit, conversion} <- validate_unit(unit),
{:ok, base_unit} <- base_unit(conversion) do
{:ok, Map.get(base_unit_category_map(), Kernel.to_string(base_unit))}
end
end
@deprecated "Please use `Cldr.Unit.unit_category/1"
def unit_type(unit) do
unit_category(unit)
end
@base_unit_category_map Cldr.Config.units()
|> Map.get(:base_units)
|> Enum.map(fn {k, v} -> {to_string(v), k} end)
|> Map.new()
@doc """
Returns a mapping of base units to their respective
unit categories.
Base units are a common unit for a given unit
category which are used in two scenarios:
1. When converting between units. If two units
have the same base unit they can be converted
to each other. See `Cldr.Unit.Conversion`.
2. When identifying the preferred units for a given
locale or territory, the base unit is used to
aid identification of preferences for given use
cases. See `Cldr.Unit.Preference`.
## Example
=> Cldr.Unit.base_unit_category_map
%{
"kilogram_square_meter_per_cubic_second_ampere" => :voltage,
"kilogram_meter_per_meter_square_second" => :torque,
"square_meter" => :area,
"kilogram" => :mass,
"kilogram_square_meter_per_square_second" => :energy,
"revolution" => :angle,
"candela_per_square_meter" => :luminance,
...
}
"""
@spec base_unit_category_map :: map()
def base_unit_category_map do
@base_unit_category_map
end
@doc """
Returns the known styles for a unit.
## Example
iex> Cldr.Unit.styles
[:long, :short, :narrow]
"""
def styles do
@styles
end
@doc """
Returns the default formatting style.
## Example
iex> Cldr.Unit.default_style
:long
"""
def default_style do
@default_style
end
# Returns a map of unit preferences
#
# Units of measure vary country by country. While
# most countries standardize on the metric system,
# others use the US or UK systems of measure.
#
# When presening a unit to an end user it is appropriate
# to do so using units familiar and relevant to that
# end user.
#
# The data returned by this function supports the
# opportunity to convert a give unit to meet this
# requirement.
#
# Unit preferences can vary by usage, not just territory,
# Therefore the data is structured according to unit
# category andunit usage.
@doc false
@unit_preferences Cldr.Config.units() |> Map.get(:preferences)
@spec unit_preferences() :: map()
@rounding 10
def unit_preferences do
for {category, usages} <- @unit_preferences, into: Map.new() do
usages =
for {usage, preferences} <- usages, into: Map.new() do
preferences =
Cldr.Enum.reduce_peeking(preferences, [], fn
%{regions: regions} = pref, [%{regions: regions} | _rest], acc ->
%{units: units, geq: geq} = pref
value =
Unit.new!(hd(units), geq)
|> Conversion.convert_to_base_unit!()
|> Math.round(@rounding)
|> Map.get(:value)
{:cont, acc ++ [%{pref | geq: Ratio.to_float(value)}]}
pref, _rest, acc ->
pref = %{pref | geq: 0}
{:cont, acc ++ [pref]}
end)
{usage, preferences}
end
{category, usages}
end
end
@doc false
def rounding do
@rounding
end
@doc false
def units_for(locale, style \\ default_style(), backend \\ Cldr.default_backend()) do
module = Module.concat(backend, :"Elixir.Unit")
module.units_for(locale, style)
end
@measurement_systems Cldr.Config.territories()
|> Enum.map(fn {k, v} -> {k, v.measurement_system} end)
|> Map.new()
@doc """
Returns a map of measurement systems by territory
"""
@spec measurement_systems() :: map()
def measurement_systems do
@measurement_systems
end
@doc """
Returns the default measurement system for a territory
in a given category.
## Arguments
* `territory` is any valid territory returned by
`Cldr.validate_territory/1`
* `category` is any measurement system category.
The known categories are `:default`, `:temperature`
and `:paper_size`. The default category is `:default`.
## Examples
iex> Cldr.Unit.measurement_system_for :US
:ussystem
iex> Cldr.Unit.measurement_system_for :GB
:uksystem
iex> Cldr.Unit.measurement_system_for :AU
:metric
iex> Cldr.Unit.measurement_system_for :US, :temperature
:ussystem
iex> Cldr.Unit.measurement_system_for :GB, :temperature
:uksystem
"""
@spec measurement_system_for(atom(), atom()) ::
:metric | :ussystem | :uk_system | {:error, {module(), String.t()}}
def measurement_system_for(territory, category \\ :default) do
with {:ok, territory} <- Cldr.validate_territory(territory) do
measurement_systems()
|> get_in([territory, category])
end
end
@doc false
def pattern_for(%LanguageTag{cldr_locale_name: locale_name}, style, unit, backend) do
with {:ok, style} <- validate_style(style),
{:ok, unit, _conversion} <- validate_unit(unit) do
units = units_for(locale_name, style, backend)
pattern = Map.get(units, unit)
{:ok, pattern}
end
end
def pattern_for(locale_name, style, unit, backend) do
with {:ok, locale} <- backend.validate_locale(locale_name) do
pattern_for(locale, style, unit, backend)
end
end
def per_pattern_for(%LanguageTag{cldr_locale_name: locale_name}, style, unit, backend) do
with {:ok, style} <- validate_style(style),
{:ok, unit, _conversion} <- validate_unit(unit) do
units = units_for(locale_name, style, backend)
pattern = get_in(units, [unit, :per_unit_pattern])
default_pattern = get_in(units, [:per, :compound_unit_pattern])
{:ok, pattern || default_pattern}
end
end
def per_pattern_for(locale_name, style, unit, backend) do
with {:ok, locale} <- backend.validate_locale(locale_name) do
per_pattern_for(locale, style, unit, backend)
end
end
defp normalize_options(backend, options) do
locale = Keyword.get(options, :locale, backend.get_locale())
style = Keyword.get(options, :style, @default_style)
options =
options
|> Keyword.delete(:locale)
|> Keyword.put(:style, style)
{locale, style, options}
end
@doc """
Validates a unit name and normalizes it,
A unit name can be expressed as:
* an `atom()` in which case the unit must be
localizable in CLDR directly
* or a `String.t()` in which case it is parsed
into a list of composable subunits that
can be converted but are not guaranteed to
be output as a localized string.
## Arguments
* `unit_name` is an `atom()` or `String.t()`
## Returns
* `{:ok, canonical_unit_name, conversion}` where
`canonical_unit_name` is the normalized unit name
and `conversion` is an opaque structure used
to convert this this unit into its base unit or
* `{:error, {exception, reason}}`
## Notes
A returned `unit_name` that is an atom is directly
localisable (CLDR has translation data for the unit).
A `unit_name` that is a `String.t()` is composed of
one or more unit names that need to be resolved in
order for the `unit_name` to be localised.
The difference is an implementation detail and should
not be of concern to the user of this library.
## Examples
iex> Cldr.Unit.validate_unit :meter
{
:ok,
:meter,
[meter: %Cldr.Unit.Conversion{base_unit: [:meter], factor: 1, offset: 0}]
}
iex> Cldr.Unit.validate_unit "meter"
{:ok, :meter,
[meter: %Cldr.Unit.Conversion{base_unit: [:meter], factor: 1, offset: 0}]}
iex> Cldr.Unit.validate_unit "miles_per_liter"
{:error, {Cldr.UnknownUnitError, "Unknown unit was detected at \\"s\\""}}
iex> Cldr.Unit.validate_unit "mile_per_liter"
{:ok, "mile_per_liter",
{[
mile:
%Cldr.Unit.Conversion{
base_unit: [:meter],
factor: Ratio.new(905980129838867985, 562949953421312),
offset: 0
}
],
[
liter:
%Cldr.Unit.Conversion{
base_unit: [:cubic_meter],
factor: Ratio.new(1152921504606847, 1152921504606846976),
offset: 0
}
]}}
"""
def validate_unit(unit_name) when unit_name in @translatable_units do
{:ok, unit_name, [{unit_name, Conversions.conversion_for!(unit_name)}]}
end
@aliases Alias.aliases() |> Map.keys()
def validate_unit(unit_name) when unit_name in @aliases do
unit_name
|> Alias.alias()
|> validate_unit
end
# FIXME refactor this hacky conditional
def validate_unit(unit_name) when is_binary(unit_name) do
unit_name =
unit_name
|> normalize_unit_name
|> maybe_translatable_unit
if is_atom(unit_name) do
validate_unit(unit_name)
else
with {:ok, parsed} <- Parser.parse_unit(unit_name) do
name = Parser.canonical_unit_name(parsed)
canonical_name = maybe_translatable_unit(name)
{:ok, canonical_name, parsed}
end
end
end
def validate_unit(unit_name) when is_atom(unit_name) do
unit_name
|> Atom.to_string()
|> validate_unit
end
def validate_unit(%Unit{unit: unit_name, base_conversion: base_conversion}) do
{:ok, unit_name, base_conversion}
end
def validate_unit(unknown_unit) do
{:error, unit_error(unknown_unit)}
end
@doc false
def normalize_unit_name(name) when is_binary(name) do
String.replace(name, [" ", "-"], "_")
end
def maybe_translatable_unit(name) do
atom_name = String.to_existing_atom(name)
if atom_name in known_units() do
atom_name
else
name
end
rescue
ArgumentError ->
name
end
@doc """
Validates a unit style and normalizes it to a
standard downcased atom form
"""
def validate_style(style) when style in @styles do
{:ok, style}
end
def validate_style(style) when is_binary(style) do
style
|> String.downcase()
|> String.to_existing_atom()
|> validate_style()
catch
ArgumentError ->
{:error, style_error(style)}
end
def validate_style(style) do
{:error, style_error(style)}
end
@doc """
Convert a ratio Unit to a float unit
"""
def ratio_to_float(%Unit{value: %Ratio{} = value} = unit) do
value = Ratio.to_float(value)
%{unit | value: value}
end
def ratio_to_float(%Unit{} = unit) do
unit
end
@doc false
def unit_error(nil) do
{
Cldr.UnknownUnitError,
"A unit must be provided, for example 'Cldr.Unit.string(123, unit: :meter)'."
}
end
def unit_error(unit) do
{Cldr.UnknownUnitError, "The unit #{inspect(unit)} is not known."}
end
@doc false
def unit_category_error(category) do
{Cldr.Unit.UnknownUnitCategoryError, "The unit category #{inspect(category)} is not known."}
end
@doc false
def style_error(style) do
{Cldr.UnknownFormatError, "The unit style #{inspect(style)} is not known."}
end
@doc false
def incompatible_units_error(unit_1, unit_2) do
{
Unit.IncompatibleUnitsError,
"Operations can only be performed between units of the same category. " <>
"Received #{inspect(unit_1)} and #{inspect(unit_2)}"
}
end
@doc false
def unknown_usage_error(category, usage) do
{
Cldr.Unit.UnknownUsageError,
"The unit category #{inspect(category)} does not define a usage #{inspect(usage)}"
}
end
def unit_not_translatable_error(unit) do
{
Cldr.Unit.UnitNotTranslatableError,
"The unit #{inspect(unit)} is not translatable"
}
end
defp int_rem(unit) do
integer = Unit.round(unit, 0, :down) |> Math.trunc()
remainder = Math.sub(unit, integer)
{integer, remainder}
end
end
| 28.534118 | 104 | 0.65047 |
1c703a6cf10e8aa309de391559500cb0823e1a7b | 71 | ex | Elixir | lib/theme_preview_web/views/page_view.ex | lee-dohm/atom-theme-preview | be64d8be2c2cdaaffe9366b18ea0105f409bb492 | [
"MIT"
] | 1 | 2017-11-13T05:51:57.000Z | 2017-11-13T05:51:57.000Z | lib/theme_preview_web/views/page_view.ex | lee-dohm/atom-theme-preview | be64d8be2c2cdaaffe9366b18ea0105f409bb492 | [
"MIT"
] | null | null | null | lib/theme_preview_web/views/page_view.ex | lee-dohm/atom-theme-preview | be64d8be2c2cdaaffe9366b18ea0105f409bb492 | [
"MIT"
] | null | null | null | defmodule ThemePreviewWeb.PageView do
use ThemePreviewWeb, :view
end
| 17.75 | 37 | 0.830986 |
1c707878a0cca51b4c3dd63c309ebdf4a5058cc9 | 653 | ex | Elixir | lib/plug_utm.ex | honeypotio/plug_utm | 5eef21202b11018c1bb75527e8fd26f889ebc019 | [
"MIT"
] | 5 | 2016-03-01T17:25:04.000Z | 2020-01-09T16:33:56.000Z | lib/plug_utm.ex | honeypotio/plug_utm | 5eef21202b11018c1bb75527e8fd26f889ebc019 | [
"MIT"
] | null | null | null | lib/plug_utm.ex | honeypotio/plug_utm | 5eef21202b11018c1bb75527e8fd26f889ebc019 | [
"MIT"
] | 2 | 2019-07-02T06:40:22.000Z | 2019-12-22T10:47:19.000Z | defmodule PlugUtm do
import Plug.Conn
def init(options) do
options
end
def call(conn, _options) do
conn
|> fetch_cookies
|> fetch_query_params
|> store_utm_params
end
defp store_utm_params(conn) do
conn.query_params
|> Map.keys
|> Enum.filter(&starting_with_utm/1)
|> Enum.reduce(conn, &key_to_cookie/2)
end
defp key_to_cookie(key, conn) do
"utm_" <> type = key
name = "utm.#{type}"
value = conn.query_params[key]
put_resp_cookie(conn, name, value)
end
defp starting_with_utm(param_name) do
case param_name do
"utm_" <> _ -> true
_ -> false
end
end
end
| 17.648649 | 42 | 0.638591 |
1c7083efff8b1700cf1144bb36c8f2cc5d517514 | 4,749 | exs | Elixir | apps/astarte_appengine_api/test/astarte_appengine_api_web/controllers/interface_values_controller_test.exs | matt-mazzucato/astarte | 34d84941a5019efc42321052f7f34b7d907a38f2 | [
"Apache-2.0"
] | 191 | 2018-03-30T13:23:08.000Z | 2022-03-02T12:05:32.000Z | apps/astarte_appengine_api/test/astarte_appengine_api_web/controllers/interface_values_controller_test.exs | matt-mazzucato/astarte | 34d84941a5019efc42321052f7f34b7d907a38f2 | [
"Apache-2.0"
] | 402 | 2018-03-30T13:37:00.000Z | 2022-03-31T16:47:10.000Z | apps/astarte_appengine_api/test/astarte_appengine_api_web/controllers/interface_values_controller_test.exs | matt-mazzucato/astarte | 34d84941a5019efc42321052f7f34b7d907a38f2 | [
"Apache-2.0"
] | 24 | 2018-03-30T13:29:48.000Z | 2022-02-28T11:10:26.000Z | #
# This file is part of Astarte.
#
# Copyright 2017 Ispirata Srl
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule Astarte.AppEngine.APIWeb.InterfaceValuesControllerTest do
use Astarte.AppEngine.APIWeb.ConnCase
alias Astarte.AppEngine.API.DatabaseTestHelper
alias Astarte.AppEngine.API.JWTTestHelper
setup_all do
{:ok, _client} = DatabaseTestHelper.create_test_keyspace()
on_exit(fn ->
DatabaseTestHelper.destroy_local_test_keyspace()
end)
:ok
end
setup %{conn: conn} do
DatabaseTestHelper.seed_data()
authorized_conn =
conn
|> put_req_header("accept", "application/json")
|> put_req_header("authorization", "bearer #{JWTTestHelper.gen_jwt_all_access_token()}")
{:ok, conn: authorized_conn}
end
describe "index" do
test "lists all interfaces using a invalid device id", %{conn: conn} do
conn =
get(
conn,
interface_values_path(
conn,
:index,
"autotestrealm",
"zzzzzzzzzzzf0VMRgIBAQAAAAAAAAAAAA"
)
)
assert json_response(conn, 400)["errors"] == %{"detail" => "Bad request"}
end
test "lists all interfaces", %{conn: conn} do
conn =
get(
conn,
interface_values_path(
conn,
:index,
"autotestrealm",
"f0VMRgIBAQAAAAAAAAAAAA"
)
)
assert Enum.sort(json_response(conn, 200)["data"]) == [
"com.example.PixelsConfiguration",
"com.example.TestObject",
"com.test.LCDMonitor",
"com.test.SimpleStreamTest"
]
end
test "get interface values", %{conn: conn} do
expected_reply = %{
"time" => %{"from" => 8, "to" => 20},
"lcdCommand" => "SWITCH_ON",
"weekSchedule" => %{
"2" => %{"start" => 12, "stop" => 15},
"3" => %{"start" => 15, "stop" => 16},
"4" => %{"start" => 16, "stop" => 18}
}
}
from_path_conn =
get(
conn,
interface_values_path(
conn,
:show,
"autotestrealm",
"f0VMRgIBAQAAAAAAAAAAAA",
"com.test.LCDMonitor"
)
)
assert json_response(from_path_conn, 200)["data"] == expected_reply
property_conn =
get(
conn,
"/v1/autotestrealm/devices/f0VMRgIBAQAAAAAAAAAAAA/interfaces/com.test.LCDMonitor/time/to"
)
assert json_response(property_conn, 200)["data"] == 20
expected_reply = [
%{"timestamp" => "2017-09-28T04:05:00.000Z", "value" => 0},
%{"timestamp" => "2017-09-28T04:06:00.000Z", "value" => 1},
%{"timestamp" => "2017-09-28T04:07:00.000Z", "value" => 2},
%{"timestamp" => "2017-09-29T05:07:00.000Z", "value" => 3},
%{"timestamp" => "2017-09-30T07:10:00.000Z", "value" => 4}
]
datastream_conn =
get(
conn,
"/v1/autotestrealm/devices/f0VMRgIBAQAAAAAAAAAAAA/interfaces/com.test.SimpleStreamTest/0/value"
)
assert json_response(datastream_conn, 200)["data"] == expected_reply
expected_reply = [
%{"timestamp" => "2017-09-28T04:05:00.000Z", "value" => 0},
%{"timestamp" => "2017-09-28T04:07:00.000Z", "value" => 2},
%{"timestamp" => "2017-09-30T07:10:00.000Z", "value" => 4}
]
downsampled_conn =
get(
conn,
"/v1/autotestrealm/devices/f0VMRgIBAQAAAAAAAAAAAA/interfaces/com.test.SimpleStreamTest/0/value?downsample_to=3"
)
assert json_response(downsampled_conn, 200)["data"] == expected_reply
expected_reply = [
%{"string" => "aaa", "timestamp" => "2017-09-30T07:10:00.000Z", "value" => 1.1},
%{"string" => "bbb", "timestamp" => "2017-09-30T07:12:00.000Z", "value" => 2.2},
%{"string" => "ccc", "timestamp" => "2017-09-30T07:13:00.000Z", "value" => 3.3}
]
object_conn =
get(
conn,
"/v1/autotestrealm/devices/f0VMRgIBAQAAAAAAAAAAAA/interfaces/com.example.TestObject/"
)
assert json_response(object_conn, 200)["data"] == expected_reply
end
end
end
| 29.68125 | 121 | 0.57928 |
1c7090b6fe5e054c2e55e199eb0ae347574e2344 | 4,120 | exs | Elixir | test/features/grouping_stage_test.exs | octosteve/remote_retro | 3385b0db3c2daab934ce12a2f7642a5f10ac5147 | [
"MIT"
] | null | null | null | test/features/grouping_stage_test.exs | octosteve/remote_retro | 3385b0db3c2daab934ce12a2f7642a5f10ac5147 | [
"MIT"
] | null | null | null | test/features/grouping_stage_test.exs | octosteve/remote_retro | 3385b0db3c2daab934ce12a2f7642a5f10ac5147 | [
"MIT"
] | null | null | null | defmodule GroupingStageTest do
use RemoteRetro.IntegrationCase, async: false
alias RemoteRetro.Idea
import ShorterMaps
describe "ideas in the grouping stage" do
setup [:persist_ideas_for_retro]
@tag [
retro_stage: "grouping",
ideas: [%Idea{category: "sad", body: "splinters in the codebase", x: 105.5, y: 100.1}],
]
test "appear on the interface with coordinates mapped to transforms", ~M{retro, session} do
session = visit_retro(session, retro)
idea_coordinates = parse_transform_coordinates_for_card(session, "splinters in the codebase")
assert %{"x" => "105.5", "y" => "100.1"} = idea_coordinates
end
@tag [
retro_stage: "grouping",
ideas: [%Idea{category: "sad", body: "rampant sickness", x: 80.5, y: 300.3}],
]
test "can be drag-and-dropped on one client and have their position update across all clients", ~M{retro, session, non_facilitator} do
session_one = visit_retro(session, retro)
session_two = new_authenticated_browser_session(non_facilitator)
session_two = visit_retro(session_two, retro)
idea_coordinates_before =
session_two
|> parse_transform_coordinates_for_card("rampant sickness")
drag_idea(session_one, "rampant sickness", to_center_of: ".grouping-board")
idea_coordinates_after =
session_two
|> parse_transform_coordinates_for_card("rampant sickness")
refute idea_coordinates_before == idea_coordinates_after
end
@tag [
retro_stage: "grouping",
ideas: [
%Idea{category: "sad", body: "rampant sickness", x: 0.0, y: 200.0},
%Idea{category: "sad", body: "getting sickness", x: 10.0, y: 210.0},
],
]
test "ideas dynamically remove bolding when out of proximity", ~M{retro, session} do
session = visit_retro(session, retro)
session |> assert_count_of_emboldened_ideas_to_be(2)
session |> drag_idea("rampant sickness", to_center_of: ".grouping-board")
session |> assert_count_of_emboldened_ideas_to_be(0)
end
@tag [
retro_stage: "grouping",
ideas: [
%Idea{category: "sad", body: "rampant sickness", x: 0.0, y: 200.0},
%Idea{category: "sad", body: "getting sickness", x: 10.0, y: 210.0},
],
]
test "ideas can be visible in high-contrast mode", ~M{retro, session} do
session = visit_retro(session, retro)
click(session, Query.css("button", text: "High Contrast"))
assert_count_of_high_contrast_color_borders_is(session, 2)
click(session, Query.css("button", text: "High Contrast"))
assert_count_of_high_contrast_color_borders_is(session, 0)
end
@tag [
retro_stage: "grouping",
ideas: [
%Idea{category: "sad", body: "rampant sickness", x: 0.0, y: 200.0},
%Idea{category: "sad", body: "getting sickness", x: 10.0, y: 210.0},
%Idea{category: "sad", body: "lazy commit messages", x: 500.0, y: 400.0}
]
]
test "progressing to the voting stage creates groups by proximity, isolating lone ideas as their own group",
~M{retro, session} do
session = visit_retro(session, retro)
click_and_confirm_progression_to(session, "Labeling")
assert_has(session, Query.css(".idea-group", text: "rampant sickness\ngetting sickness", count: 1))
assert_has(session, Query.css(".idea-group", text: "lazy commit messages", count: 1))
end
end
defp parse_transform_coordinates_for_card(session, idea_text) do
inline_style_string =
find(session, Query.css("p", text: idea_text))
|> Wallaby.Element.attr("style")
~r/transform: translate3d\((?<x>.*)px,\s?(?<y>.*)px,\s0px\)/
|> Regex.named_captures(inline_style_string)
end
defp assert_count_of_emboldened_ideas_to_be(session, count) do
assert_has(session, Query.xpath("//p[contains(@style, 'box-shadow')]", count: count))
end
def assert_count_of_high_contrast_color_borders_is(session, count) do
assert_has(session, Query.xpath("//p[contains(@style, 'box-shadow: rgb(0, 0, 0)')]", count: count))
end
end
| 35.213675 | 138 | 0.665777 |
1c7090e7f2fa067a0099d2fc04147047321cfba1 | 16,074 | ex | Elixir | lib/tesla/adapter/gun.ex | thiamsantos/tesla | af3707078b10793f6a534938e56b963aff82fe3c | [
"MIT"
] | 1 | 2019-06-25T07:01:01.000Z | 2019-06-25T07:01:01.000Z | lib/tesla/adapter/gun.ex | thiamsantos/tesla | af3707078b10793f6a534938e56b963aff82fe3c | [
"MIT"
] | null | null | null | lib/tesla/adapter/gun.ex | thiamsantos/tesla | af3707078b10793f6a534938e56b963aff82fe3c | [
"MIT"
] | null | null | null | if Code.ensure_loaded?(:gun) do
defmodule Tesla.Adapter.Gun do
@moduledoc """
Adapter for [gun](https://github.com/ninenines/gun).
Remember to add `{:gun, "~> 1.3"}`, `{:idna, "~> 6.0"}` and `{:castore, "~> 0.1"}` to dependencies.
In version 1.3 gun sends `host` header with port. Fixed in master branch.
Also, you need to recompile tesla after adding `:gun` dependency:
```
mix deps.clean tesla
mix deps.compile tesla
```
## Example usage
```
# set globally in config/config.exs
config :tesla, :adapter, Tesla.Adapter.Gun
# set per module
defmodule MyClient do
use Tesla
adapter Tesla.Adapter.Gun
end
```
## Adapter specific options
- `:timeout` - Time, while process, will wait for gun messages.
- `:body_as` - What will be returned in `%Tesla.Env{}` body key. Possible values - `:plain`, `:stream`, `:chunks`. Defaults to `:plain`.
- `:plain` - as binary.
- `:stream` - as stream. If you don't want to close connection (because you want to reuse it later) pass `close_conn: false` in adapter opts.
- `:chunks` - as chunks. You can get response body in chunks using `Tesla.Adapter.Gun.read_chunk/3` function.
Processing of the chunks and checking body size must be done by yourself. Example of processing function is in `test/tesla/adapter/gun_test.exs` - `Tesla.Adapter.GunTest.read_body/4`. If you don't need connection later don't forget to close it with `Tesla.Adapter.Gun.close/1`.
- `:max_body` - Max response body size in bytes. Works only with `body_as: :plain`, with other settings you need to check response body size by yourself.
- `:conn` - Opened connection pid with gun. Is used for reusing gun connections.
- `:close_conn` - Close connection or not after receiving full response body. Is used for reusing gun connections. Defaults to `true`.
- `:certificates_verification` - Add SSL certificates verification. [erlang-certifi](https://github.com/certifi/erlang-certifi) [ssl_verify_fun.erl](https://github.com/deadtrickster/ssl_verify_fun.erl)
- `:proxy` - Proxy for requests. **Socks proxy are supported only for gun master branch**. Examples: `{'localhost', 1234}`, `{{127, 0, 0, 1}, 1234}`, `{:socks5, 'localhost', 1234}`.
## [Gun options](https://ninenines.eu/docs/en/gun/1.3/manual/gun/)
- `:connect_timeout` - Connection timeout.
- `:http_opts` - Options specific to the HTTP protocol.
- `:http2_opts` - Options specific to the HTTP/2 protocol.
- `:protocols` - Ordered list of preferred protocols. Defaults: `[:http2, :http]`- for :tls, `[:http]` - for :tcp.
- `:trace` - Whether to enable dbg tracing of the connection process. Should only be used during debugging. Default: false.
- `:transport` - Whether to use TLS or plain TCP. The default varies depending on the port used. Port 443 defaults to tls. All other ports default to tcp.
- `:transport_opts` - Transport options. They are TCP options or TLS options depending on the selected transport. Default: `[]`. Gun version: 1.3
- `:tls_opts` - TLS transport options. Default: `[]`. Gun from master branch.
- `:tcp_opts` - TCP trasnport options. Default: `[]`. Gun from master branch.
- `:socks_opts` - Options for socks. Default: `[]`. Gun from master branch.
- `:ws_opts` - Options specific to the Websocket protocol. Default: `%{}`.
- `:compress` - Whether to enable permessage-deflate compression. This does not guarantee that compression will be used as it is the server that ultimately decides. Defaults to false.
- `:protocols` - A non-empty list enables Websocket protocol negotiation. The list of protocols will be sent in the sec-websocket-protocol request header. The handler module interface is currently undocumented and must be set to `gun_ws_h`.
"""
@behaviour Tesla.Adapter
alias Tesla.Multipart
# TODO: update list after update to gun 2.0
@gun_keys [
:connect_timeout,
:http_opts,
:http2_opts,
:protocols,
:retry,
:retry_timeout,
:trace,
:transport,
:socks_opts,
:ws_opts
]
@default_timeout 1_000
@impl Tesla.Adapter
def call(env, opts) do
with {:ok, status, headers, body} <- request(env, opts) do
{:ok, %{env | status: status, headers: format_headers(headers), body: body}}
end
end
@doc """
Reads chunk of the response body.
Returns `{:fin, binary()}` if all body received, otherwise returns `{:nofin, binary()}`.
"""
@spec read_chunk(pid(), reference(), keyword() | map()) ::
{:fin, binary()} | {:nofin, binary()} | {:error, atom()}
def read_chunk(pid, stream, opts) do
with {status, _} = chunk when status in [:fin, :error] <- do_read_chunk(pid, stream, opts) do
if opts[:close_conn], do: close(pid)
chunk
end
end
defp do_read_chunk(pid, stream, opts) do
receive do
{:gun_data, ^pid, ^stream, :fin, body} ->
{:fin, body}
{:gun_data, ^pid, ^stream, :nofin, part} ->
{:nofin, part}
{:DOWN, _, _, _, reason} ->
{:error, reason}
after
opts[:timeout] || @default_timeout ->
{:error, :recv_chunk_timeout}
end
end
@doc """
Brutally close the `gun` connection.
"""
@spec close(pid()) :: :ok
defdelegate close(pid), to: :gun
defp format_headers(headers) do
for {key, value} <- headers do
{String.downcase(to_string(key)), to_string(value)}
end
end
defp request(env, opts) do
request(
Tesla.Adapter.Shared.format_method(env.method),
Tesla.build_url(env.url, env.query),
format_headers(env.headers),
env.body || "",
Tesla.Adapter.opts(
[close_conn: true, body_as: :plain, send_body: :at_once, receive: true],
env,
opts
)
|> Enum.into(%{})
)
end
defp request(method, url, headers, %Stream{} = body, opts),
do: do_request(method, url, headers, body, Map.put(opts, :send_body, :stream))
defp request(method, url, headers, body, opts) when is_function(body),
do: do_request(method, url, headers, body, Map.put(opts, :send_body, :stream))
defp request(method, url, headers, %Multipart{} = mp, opts) do
headers = headers ++ Multipart.headers(mp)
body = Multipart.body(mp)
do_request(method, url, headers, body, Map.put(opts, :send_body, :stream))
end
defp request(method, url, headers, body, opts),
do: do_request(method, url, headers, body, opts)
defp do_request(method, url, headers, body, opts) do
uri = URI.parse(url)
path = Tesla.Adapter.Shared.prepare_path(uri.path, uri.query)
with {:ok, pid, opts} <- open_conn(uri, opts) do
stream = open_stream(pid, method, path, headers, body, opts)
response = read_response(pid, stream, opts)
if opts[:close_conn] and opts[:body_as] not in [:stream, :chunks] do
close(pid)
end
response
end
end
@dialyzer [{:nowarn_function, open_conn: 2}, :no_match]
defp open_conn(%{scheme: scheme, host: host, port: port}, %{conn: conn} = opts)
when is_pid(conn) do
info = :gun.info(conn)
conn_scheme =
case info do
# gun master branch support, which has `origin_scheme` in connection info
%{origin_scheme: scheme} -> scheme
%{transport: :tls} -> "https"
_ -> "http"
end
conn_host =
case :inet.ntoa(info.origin_host) do
{:error, :einval} -> info.origin_host
ip -> ip
end
if conn_scheme == scheme and to_string(conn_host) == host and info.origin_port == port do
{:ok, conn, Map.put(opts, :receive, false)}
else
{:error, :invalid_conn}
end
end
defp open_conn(uri, opts) do
opts = maybe_add_transport(uri, opts)
tls_opts =
if uri.scheme == "https" do
opts
|> fetch_tls_opts()
|> maybe_add_verify_options(opts, uri)
else
[]
end
gun_opts = Map.take(opts, @gun_keys)
with {:ok, conn} <- do_open_conn(uri, opts, gun_opts, tls_opts) do
{:ok, conn, opts}
end
end
defp maybe_add_transport(%URI{scheme: "https"}, opts), do: Map.put(opts, :transport, :tls)
defp maybe_add_transport(_, opts), do: opts
# Support for gun master branch where transport_opts, were splitted to tls_opts and tcp_opts
# https://github.com/ninenines/gun/blob/491ddf58c0e14824a741852fdc522b390b306ae2/doc/src/manual/gun.asciidoc#changelog
# TODO: remove after update to gun 2.0
defp fetch_tls_opts(%{tls_opts: tls_opts}) when is_list(tls_opts), do: tls_opts
defp fetch_tls_opts(%{transport_opts: tls_opts}) when is_list(tls_opts), do: tls_opts
defp fetch_tls_opts(_), do: []
defp maybe_add_verify_options(tls_opts, %{certificates_verification: true}, %{host: host}) do
charlist =
host
|> to_charlist()
|> :idna.encode()
security_opts = [
verify: :verify_peer,
cacertfile: CAStore.file_path(),
depth: 20,
reuse_sessions: false,
verify_fun: {&:ssl_verify_hostname.verify_fun/3, [check_hostname: charlist]}
]
Keyword.merge(security_opts, tls_opts)
end
defp maybe_add_verify_options(tls_opts, _, _), do: tls_opts
defp do_open_conn(uri, %{proxy: {proxy_host, proxy_port}}, gun_opts, tls_opts) do
connect_opts =
uri
|> tunnel_opts()
|> tunnel_tls_opts(uri.scheme, tls_opts)
with {:ok, pid} <- :gun.open(proxy_host, proxy_port, gun_opts),
{:ok, _} <- :gun.await_up(pid),
stream <- :gun.connect(pid, connect_opts),
{:response, :fin, 200, _} <- :gun.await(pid, stream) do
{:ok, pid}
end
end
defp do_open_conn(uri, %{proxy: {proxy_type, proxy_host, proxy_port}}, gun_opts, tls_opts) do
version =
proxy_type
|> to_string()
|> String.last()
|> case do
"4" -> 4
_ -> 5
end
socks_opts =
uri
|> tunnel_opts()
|> tunnel_tls_opts(uri.scheme, tls_opts)
|> Map.put(:version, version)
gun_opts =
gun_opts
|> Map.put(:protocols, [:socks])
|> Map.update(:socks_opts, socks_opts, &Map.merge(socks_opts, &1))
with {:ok, pid} <- :gun.open(proxy_host, proxy_port, gun_opts),
{:ok, _} <- :gun.await_up(pid) do
{:ok, pid}
else
{:error, {:options, {:protocols, [:socks]}}} ->
{:error, "socks protocol is not supported"}
error ->
error
end
end
@dialyzer [{:nowarn_function, do_open_conn: 4}, :no_match]
defp do_open_conn(uri, opts, gun_opts, tls_opts) do
tcp_opts = Map.get(opts, :tcp_opts, [])
# if gun used from master
opts_with_master_keys =
gun_opts
|> Map.put(:tls_opts, tls_opts)
|> Map.put(:tcp_opts, tcp_opts)
host = domain_or_ip(uri.host)
with {:ok, pid} <- gun_open(host, uri.port, opts_with_master_keys, opts) do
{:ok, pid}
else
{:error, {:options, {key, _}}} when key in [:tcp_opts, :tls_opts] ->
gun_open(host, uri.port, Map.put(gun_opts, :transport_opts, tls_opts), opts)
error ->
error
end
end
@dialyzer [{:nowarn_function, gun_open: 4}, :no_match]
defp gun_open(host, port, gun_opts, opts) do
with {:ok, pid} <- :gun.open(host, port, gun_opts),
{_, true, _} <- {:receive, opts[:receive], pid},
{_, {:ok, _}, _} <- {:up, :gun.await_up(pid), pid} do
{:ok, pid}
else
{:receive, false, pid} ->
{:ok, pid}
{:up, error, pid} ->
close(pid)
error
error ->
error
end
end
defp tunnel_opts(uri) do
host = domain_or_ip(uri.host)
%{host: host, port: uri.port}
end
defp tunnel_tls_opts(opts, "https", tls_opts) do
http2_opts = %{protocols: [:http2], transport: :tls, tls_opts: tls_opts}
Map.merge(opts, http2_opts)
end
defp tunnel_tls_opts(opts, _, _), do: opts
defp open_stream(pid, method, path, headers, body, opts) do
req_opts = %{reply_to: opts[:reply_to] || self()}
open_stream(pid, method, path, headers, body, req_opts, opts[:send_body])
end
defp open_stream(pid, method, path, headers, body, req_opts, :stream) do
stream = :gun.request(pid, method, path, headers, "", req_opts)
for data <- body, do: :ok = :gun.data(pid, stream, :nofin, data)
:gun.data(pid, stream, :fin, "")
stream
end
defp open_stream(pid, method, path, headers, body, req_opts, :at_once),
do: :gun.request(pid, method, path, headers, body, req_opts)
defp read_response(pid, stream, opts) do
receive? = opts[:receive]
receive do
{:gun_response, ^pid, ^stream, :fin, status, headers} ->
{:ok, status, headers, ""}
{:gun_response, ^pid, ^stream, :nofin, status, headers} ->
format_response(pid, stream, opts, status, headers, opts[:body_as])
{:gun_up, ^pid, _protocol} when receive? ->
read_response(pid, stream, opts)
{:gun_error, ^pid, reason} ->
{:error, reason}
{:gun_down, ^pid, _, _, _, _} when receive? ->
read_response(pid, stream, opts)
{:DOWN, _, _, _, reason} ->
{:error, reason}
after
opts[:timeout] || @default_timeout ->
{:error, :recv_response_timeout}
end
end
defp format_response(pid, stream, opts, status, headers, :plain) do
case read_body(pid, stream, opts) do
{:ok, body} ->
{:ok, status, headers, body}
{:error, error} ->
# prevent gun sending messages to owner process, if body is too large and connection is not closed
:ok = :gun.flush(stream)
{:error, error}
end
end
defp format_response(pid, stream, opts, status, headers, :stream) do
stream_body =
Stream.resource(
fn -> %{pid: pid, stream: stream} end,
fn
%{pid: pid, stream: stream} ->
case read_chunk(pid, stream, opts) do
{:nofin, part} -> {[part], %{pid: pid, stream: stream}}
{:fin, body} -> {[body], %{pid: pid, final: :fin}}
end
%{pid: pid, final: :fin} ->
{:halt, %{pid: pid}}
end,
fn %{pid: pid} ->
if opts[:close_conn], do: close(pid)
end
)
{:ok, status, headers, stream_body}
end
defp format_response(pid, stream, opts, status, headers, :chunks) do
{:ok, status, headers, %{pid: pid, stream: stream, opts: Enum.into(opts, [])}}
end
defp read_body(pid, stream, opts, acc \\ "") do
limit = opts[:max_body]
receive do
{:gun_data, ^pid, ^stream, :fin, body} ->
check_body_size(acc, body, limit)
{:gun_data, ^pid, ^stream, :nofin, part} ->
with {:ok, acc} <- check_body_size(acc, part, limit) do
read_body(pid, stream, opts, acc)
end
{:DOWN, _, _, _, reason} ->
{:error, reason}
after
opts[:timeout] || @default_timeout ->
{:error, :recv_body_timeout}
end
end
defp check_body_size(acc, part, nil), do: {:ok, acc <> part}
defp check_body_size(acc, part, limit) do
body = acc <> part
if limit - byte_size(body) >= 0 do
{:ok, body}
else
{:error, :body_too_large}
end
end
defp domain_or_ip(host) do
charlist = to_charlist(host)
case :inet.parse_address(charlist) do
{:error, :einval} ->
:idna.encode(charlist)
{:ok, ip} ->
ip
end
end
end
end
| 33.84 | 285 | 0.594687 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.