hexsha stringlengths 40 40 | size int64 2 991k | ext stringclasses 2 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 4 208 | max_stars_repo_name stringlengths 6 106 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses list | max_stars_count int64 1 33.5k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 208 | max_issues_repo_name stringlengths 6 106 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses list | max_issues_count int64 1 16.3k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 208 | max_forks_repo_name stringlengths 6 106 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses list | max_forks_count int64 1 6.91k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 2 991k | avg_line_length float64 1 36k | max_line_length int64 1 977k | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
ff36f0fb69065e13cf7b6280cd47594ef6f79aa1 | 1,103 | ex | Elixir | lib/noven_web/channels/device_socket.ex | ConnorRigby/noven | 2c34953490585b77b6c7ae8dd772da5028f6a948 | [
"Apache-2.0"
] | 8 | 2020-09-10T09:18:17.000Z | 2022-03-25T03:43:25.000Z | lib/noven_web/channels/device_socket.ex | ConnorRigby/noven | 2c34953490585b77b6c7ae8dd772da5028f6a948 | [
"Apache-2.0"
] | null | null | null | lib/noven_web/channels/device_socket.ex | ConnorRigby/noven | 2c34953490585b77b6c7ae8dd772da5028f6a948 | [
"Apache-2.0"
] | 4 | 2020-12-28T06:13:51.000Z | 2021-04-27T18:00:06.000Z | defmodule NovenWeb.DeviceSocket do
use Phoenix.Socket
require Logger
## Channels
channel "device:*", NovenWeb.DeviceChannel
@impl true
def connect(%{"token" => token}, socket, _connect_info) do
with {:ok, token} <- Base.url_decode64(token, padding: false),
token <- :crypto.hash(:sha256, token),
%Noven.Devices.Device{} = device <- Noven.Devices.get_device_by_token(token) do
{:ok,
socket
|> assign(:device, device)}
else
error ->
Logger.error("Could not authenticate device: #{inspect(error)}")
:error
end
end
# Socket id's are topics that allow you to identify all sockets for a given user:
#
# def id(socket), do: "user_socket:#{socket.assigns.user_id}"
#
# Would allow you to broadcast a "disconnect" event and terminate
# all active sockets and channels for a given user:
#
# NovenWeb.Endpoint.broadcast("user_socket:#{user.id}", "disconnect", %{})
#
# Returning `nil` makes this socket anonymous.
@impl true
def id(socket), do: "device_socket:#{socket.assigns.device.id}"
end
| 30.638889 | 88 | 0.655485 |
ff3711b20cb6b7ac9ecf44a587d0f15d40ac14c0 | 668 | exs | Elixir | test/codegen/models_test.exs | vickerysec/obmarg_kazan | 27e9ab923dd2a3b89e030de918f4d7c9da316248 | [
"MIT"
] | null | null | null | test/codegen/models_test.exs | vickerysec/obmarg_kazan | 27e9ab923dd2a3b89e030de918f4d7c9da316248 | [
"MIT"
] | null | null | null | test/codegen/models_test.exs | vickerysec/obmarg_kazan | 27e9ab923dd2a3b89e030de918f4d7c9da316248 | [
"MIT"
] | null | null | null | defmodule KazanCodegenModelsTest do
use ExUnit.Case
alias Kazan.Codegen.Models
describe "module_name" do
test "returns nil when unknown name in safe mode" do
assert Models.module_name("io.k8s.api.core.v1.nope") == nil
end
test "returns a module name when in unsafe mode" do
mod_name =
Models.module_name(
"io.k8s.api.core.v1.something",
unsafe: true
)
assert mod_name == Kazan.Apis.Core.V1.Something
end
test "returns actual core module names" do
mod_name = Models.module_name("io.k8s.api.core.v1.Binding")
assert mod_name == Kazan.Apis.Core.V1.Binding
end
end
end
| 24.740741 | 65 | 0.661677 |
ff37434b5430ecc8ae39bb2dac8bc5579f0ff32c | 3,336 | ex | Elixir | lib/ash_postgres.ex | zimt28/ash_postgres | 5f27e329bc369f12ffb44bbdffbb37d5807067fb | [
"MIT"
] | null | null | null | lib/ash_postgres.ex | zimt28/ash_postgres | 5f27e329bc369f12ffb44bbdffbb37d5807067fb | [
"MIT"
] | null | null | null | lib/ash_postgres.ex | zimt28/ash_postgres | 5f27e329bc369f12ffb44bbdffbb37d5807067fb | [
"MIT"
] | null | null | null | defmodule AshPostgres do
@moduledoc """
A postgres extension library for `Ash`.
`AshPostgres.DataLayer` provides a DataLayer, and a DSL extension to configure that data layer.
The dsl extension exposes the `postgres` section. See: `AshPostgres.DataLayer` for more.
"""
alias Ash.Dsl.Extension
@doc "The configured repo for a resource"
def repo(resource) do
Extension.get_opt(resource, [:postgres], :repo, nil, true)
end
@doc "The configured table for a resource"
def table(resource) do
Extension.get_opt(resource, [:postgres], :table, nil, true)
end
@doc "The configured references for a resource"
def references(resource) do
Extension.get_entities(resource, [:postgres, :references])
end
@doc "The configured check_constraints for a resource"
def check_constraints(resource) do
Extension.get_entities(resource, [:postgres, :check_constraints])
end
@doc "The configured polymorphic_reference_on_delete for a resource"
def polymorphic_on_delete(resource) do
Extension.get_opt(resource, [:postgres, :references], :polymorphic_on_delete, nil, true)
end
@doc "The configured polymorphic_reference_on_update for a resource"
def polymorphic_on_update(resource) do
Extension.get_opt(resource, [:postgres, :references], :polymorphic_on_update, nil, true)
end
@doc "The configured polymorphic_reference_name for a resource"
def polymorphic_name(resource) do
Extension.get_opt(resource, [:postgres, :references], :polymorphic_on_delete, nil, true)
end
@doc "The configured polymorphic? for a resource"
def polymorphic?(resource) do
Extension.get_opt(resource, [:postgres], :polymorphic?, nil, true)
end
@doc "The configured unique_index_names"
def unique_index_names(resource) do
Extension.get_opt(resource, [:postgres], :unique_index_names, [], true)
end
@doc "The configured identity_index_names"
def identity_index_names(resource) do
Extension.get_opt(resource, [:postgres], :identity_index_names, [], true)
end
@doc "The configured foreign_key_names"
def foreign_key_names(resource) do
Extension.get_opt(resource, [:postgres], :foreign_key_names, [], true)
end
@doc "Whether or not the resource should be included when generating migrations"
def migrate?(resource) do
Extension.get_opt(resource, [:postgres], :migrate?, nil, true)
end
@doc "A stringified version of the base_filter, to be used in a where clause when generating unique indexes"
def base_filter_sql(resource) do
Extension.get_opt(resource, [:postgres], :base_filter_sql, nil)
end
@doc "Skip generating unique indexes when generating migrations"
def skip_unique_indexes?(resource) do
Extension.get_opt(resource, [:postgres], :skip_unique_indexes?, [])
end
@doc "The template for a managed tenant"
def manage_tenant_template(resource) do
Extension.get_opt(resource, [:postgres, :manage_tenant], :template, nil)
end
@doc "Whether or not to create a tenant for a given resource"
def manage_tenant_create?(resource) do
Extension.get_opt(resource, [:postgres, :manage_tenant], :create?, false)
end
@doc "Whether or not to update a tenant for a given resource"
def manage_tenant_update?(resource) do
Extension.get_opt(resource, [:postgres, :manage_tenant], :update?, false)
end
end
| 34.391753 | 110 | 0.745803 |
ff37504fb53295a696de38ab5259b5aaf8c90e21 | 12,163 | ex | Elixir | lib/raft_fleet/consensus_member_adjuster.ex | liveforeverx/raft_fleet | b3c03250f8498eeb734f2855734c10bd0c928417 | [
"MIT"
] | null | null | null | lib/raft_fleet/consensus_member_adjuster.ex | liveforeverx/raft_fleet | b3c03250f8498eeb734f2855734c10bd0c928417 | [
"MIT"
] | null | null | null | lib/raft_fleet/consensus_member_adjuster.ex | liveforeverx/raft_fleet | b3c03250f8498eeb734f2855734c10bd0c928417 | [
"MIT"
] | null | null | null | use Croma
defmodule RaftFleet.ConsensusMemberAdjuster do
require Logger
alias RaftFleet.{Cluster, Manager, LeaderPidCache}
# Currently this is fixed; simply I don't know whether this should be customizable or not.
@wait_time_before_forgetting_deactivated_node 30 * 60_000
def adjust() do
case RaftFleet.query(Cluster, {:consensus_groups, Node.self()}) do
{:error, reason} ->
Logger.warn("querying all consensus groups failed: #{inspect(reason)}")
{:ok, {participating_nodes, groups, removed_groups}} ->
kill_members_of_removed_groups(removed_groups)
adjust_consensus_member_sets(participating_nodes, groups)
leader_pid = LeaderPidCache.get(Cluster)
if is_pid(leader_pid) and node(leader_pid) == Node.self() do
adjust_cluster_consensus_members(leader_pid, participating_nodes)
end
end
end
defp kill_members_of_removed_groups({removed_groups, index}) do
Enum.each(removed_groups, &stop_members_of_removed_group/1)
notify_completion_of_cleanup(index)
end
defp stop_members_of_removed_group(group_name) do
case try_status(group_name) do
%{from: from, members: members} ->
stop_member(from)
case List.delete(members, from) do
[] -> :ok
other_members -> spawn(fn -> Enum.each(other_members, &stop_member/1) end)
end
_failed ->
# Even if `status/1` times out, we have to remove at least locally running member
case Process.whereis(group_name) do
nil -> :ok
pid -> stop_member(pid)
end
end
end
defp stop_member(pid) do
try do
:gen_statem.stop(pid)
catch
:exit, _ -> :ok # Any other concurrent activity has just killed the pid; neglect it.
end
end
defp notify_completion_of_cleanup(index_or_group_name_or_nil) do
spawn(fn ->
millis = System.system_time(:milliseconds)
RaftFleet.command(Cluster, {:stopped_extra_members, Node.self(), index_or_group_name_or_nil, millis, @wait_time_before_forgetting_deactivated_node})
end)
end
defp adjust_consensus_member_sets(participating_nodes, groups) do
Enum.each(groups, fn group -> do_adjust(participating_nodes, group) end)
end
defp do_adjust(_, {_, []}), do: []
defp do_adjust(participating_nodes, {group_name, desired_member_nodes}) do
adjust_one_step(participating_nodes, group_name, desired_member_nodes)
end
defpt adjust_one_step(participating_nodes, group_name, [leader_node | _] = desired_member_nodes) do
# Note: `leader_node == Node.self()` always holds, as this node is supposed to host leader process of this `group_name`
case try_status(group_name) do
%{state_name: :leader, from: leader, members: members, unresponsive_followers: unresponsive_followers} ->
adjust_with_desired_leader(group_name, desired_member_nodes, leader, members, unresponsive_followers)
status_or_reason ->
# No leader in this node; now we have to collect statuses from all "relevant" nodes to judge what to do.
relevant_nodes = relevant_node_set(participating_nodes)
{node_with_status_pairs, node_with_error_reason_pairs} =
try_fetch_all_node_statuses_or_reasons(group_name, relevant_nodes, leader_node, status_or_reason)
noproc_nodes = for {n, :noproc} <- node_with_error_reason_pairs, into: MapSet.new(), do: n
case find_leader_from_statuses(node_with_status_pairs) do
{undesired_leader, leader_status} ->
adjust_with_undesired_leader(group_name, desired_member_nodes, undesired_leader, leader_status, node_with_status_pairs, noproc_nodes)
nil ->
adjust_with_no_leader(group_name, relevant_nodes, node_with_status_pairs, noproc_nodes)
end
end
end
defp relevant_node_set(participating_nodes) do
# We need to take both of the following types of nodes into account to correctly find all member processes:
# - participating (active) nodes, which may not be connected due to temporary netsplit
# - currently connected nodes, which may already be deactivated but may still have member processes
Enum.into(participating_nodes, MapSet.new(Node.list()))
end
defp adjust_with_desired_leader(group_name,
[leader_node | follower_nodes],
leader,
members,
unresponsive_followers) do
follower_nodes_from_leader = List.delete(members, leader) |> Enum.map(&node/1) |> Enum.sort()
cond do
(nodes_to_be_added = follower_nodes -- follower_nodes_from_leader) != [] ->
Manager.start_consensus_group_follower(group_name, Enum.random(nodes_to_be_added), leader_node)
(nodes_to_be_removed = follower_nodes_from_leader -- follower_nodes) != [] ->
target_node = Enum.random(nodes_to_be_removed)
target_pid = Enum.find(members, fn m -> node(m) == target_node end)
RaftedValue.remove_follower(leader, target_pid)
unresponsive_followers != [] ->
remove_follower_if_definitely_dead(group_name, leader, Enum.random(unresponsive_followers))
true -> :ok
end
end
defp adjust_with_undesired_leader(group_name,
desired_member_nodes,
undesired_leader,
undesired_leader_status,
node_with_status_pairs,
noproc_nodes) do
nodes_missing = desired_member_nodes -- Enum.map(node_with_status_pairs, &elem(&1, 0))
dead_follower_pids =
Map.get(undesired_leader_status || %{}, :unresponsive_followers, [])
|> Enum.filter(&(node(&1) in noproc_nodes))
cond do
(nodes_to_be_added = nodes_missing -- Enum.map(dead_follower_pids, &node/1)) != [] ->
Manager.start_consensus_group_follower(group_name, Enum.random(nodes_to_be_added), node(undesired_leader))
nodes_missing != [] and dead_follower_pids != [] ->
remove_definitely_dead_follower(group_name, undesired_leader, Enum.random(dead_follower_pids))
true ->
# As the previous cond branches don't match, there must be a member process in this node
replace_leader_with_member_in_this_node_and_log(group_name, undesired_leader)
end
end
defp adjust_with_no_leader(group_name, relevant_nodes, node_with_status_pairs, noproc_nodes) do
if MapSet.equal?(relevant_nodes, noproc_nodes) do
# Something really bad happened to this consensus group and now we are sure that there's no surviving member in `relevant_nodes`.
recheck_that_no_survivor_exists_then_remove_consensus_group(group_name, relevant_nodes)
else
# Try to restore the consensus group by removing unhealthy and/or unnecessary member(s).
{members_in_relevant_nodes, members_in_irrelevant_nodes} =
Enum.flat_map(node_with_status_pairs, fn {_n, %{members: ms}} -> ms end)
|> Enum.uniq()
|> Enum.split_with(&(node(&1) in relevant_nodes))
if members_in_irrelevant_nodes == [] do
case Enum.split_with(members_in_relevant_nodes, fn m -> node(m) in noproc_nodes end) do
{[], _} ->
:ok # Nothing we can do, just wait and retry...
{definitely_dead_members, probably_living_members} ->
force_remove_definitely_dead_member(group_name, probably_living_members, Enum.random(definitely_dead_members))
end
else
force_remove_member_in_irrelevant_node(group_name, members_in_relevant_nodes, Enum.random(members_in_irrelevant_nodes))
end
end
end
defp try_status(dest) do
try do
RaftedValue.status(dest)
catch
:exit, {reason, _} -> reason # :noproc | {:nodedown, node} | :timeout
end
end
defp try_fetch_all_node_statuses_or_reasons(group_name, relevant_nodes, node_self, status_or_reason_self) do
pairs_without_node_self =
MapSet.delete(relevant_nodes, node_self)
|> Enum.map(fn n -> {n, try_status({group_name, n})} end)
pairs = [{node_self, status_or_reason_self} | pairs_without_node_self]
Enum.split_with(pairs, &match?({_, %{}}, &1))
end
defp find_leader_from_statuses([]), do: nil
defp find_leader_from_statuses(pairs) do
(for {_node, %{state_name: :leader} = s} <- pairs, do: s)
|> case do
[] -> nil
ss ->
s = Enum.max_by(ss, &(&1.current_term))
{s.leader, s}
end
end
defp remove_follower_if_definitely_dead(group_name, leader, target_follower) do
case try_status(target_follower) do
:noproc -> remove_definitely_dead_follower(group_name, leader, target_follower)
_ -> :ok
end
end
defp remove_definitely_dead_follower(group_name, leader, target_follower) do
remove_follower_and_log(leader, target_follower, "a member (#{inspect(target_follower)}) of #{group_name} is definitely dead; remove it from the group")
end
defp remove_follower_and_log(leader, target_follower, log_prefix) do
ret = RaftedValue.remove_follower(leader, target_follower)
Logger.info("#{log_prefix}: #{inspect(ret)}")
end
defp force_remove_definitely_dead_member(group_name, members, target_member) do
log_message = "trying to force-remove a definitely dead member (#{inspect(target_member)}) of #{group_name}"
force_remove_a_member_from_existing_members_and_log(group_name, members, target_member, log_message)
end
defp force_remove_member_in_irrelevant_node(group_name, members, target_member) do
log_message = "trying to force-remove a member of #{group_name} in node #{node(target_member)} which is neither active nor connected"
force_remove_a_member_from_existing_members_and_log(group_name, members, target_member, log_message)
end
defp force_remove_a_member_from_existing_members_and_log(group_name, members, target_member, log_message) do
Logger.warn(log_message)
Enum.each(members, fn m ->
try do
RaftedValue.force_remove_member(m, target_member)
catch
:exit, {reason, _} ->
Logger.error("failed to force-remove a member #{inspect(m)} of #{group_name} from #{inspect(m)}: #{inspect(reason)}")
end
end)
end
defp replace_leader_with_member_in_this_node_and_log(group_name, current_leader) do
ret = RaftedValue.replace_leader(current_leader, Process.whereis(group_name))
Logger.info("migrating leader of #{group_name} in #{node(current_leader)} to the member in this node: #{inspect(ret)}")
end
defp recheck_that_no_survivor_exists_then_remove_consensus_group(group_name, relevant_nodes) do
# Confirm that it's actually the case after sleep, in order to exclude the situation where the consensus group is just being added.
:timer.sleep(5_000)
if Enum.all?(relevant_nodes, fn n -> try_status({group_name, n}) == :noproc end) do
ret = RaftFleet.remove_consensus_group(group_name)
Logger.error("all members of #{group_name} have failed; removing the consensus group as a last resort: #{inspect(ret)}")
end
end
defp adjust_cluster_consensus_members(leader_pid, participating_nodes) do
case RaftedValue.status(leader_pid) do
%{unresponsive_followers: []} -> :ok
%{members: member_pids, unresponsive_followers: unresponsive_pids} ->
# Try to remove the following 2 types of pids:
# - pids in irrelevant (not connected and not active) nodes should be cleaned up
# - after supervisor restart of `RaftFleet.Cluster` process, pid of the dead process should be removed from consensus
relevant_nodes = relevant_node_set(participating_nodes)
healthy_member_nodes = Enum.map(member_pids -- unresponsive_pids, &node/1)
pids_to_be_removed =
Enum.filter(unresponsive_pids, fn pid ->
n = node(pid)
(n not in relevant_nodes) or (n in healthy_member_nodes)
end)
if pids_to_be_removed != [] do
target_pid = Enum.random(pids_to_be_removed)
RaftedValue.remove_follower(leader_pid, target_pid)
end
end
end
end
| 46.780769 | 156 | 0.705254 |
ff3782d555fb52590ebec57f8b9c68ec448ccf88 | 144 | ex | Elixir | db_server/lib/db_server_web/controllers/api/chat_text_controller.ex | Graveyardillon/db_server | ce5a5884d7d1f0eacb3c5cc27066203424594cf4 | [
"MIT"
] | null | null | null | db_server/lib/db_server_web/controllers/api/chat_text_controller.ex | Graveyardillon/db_server | ce5a5884d7d1f0eacb3c5cc27066203424594cf4 | [
"MIT"
] | null | null | null | db_server/lib/db_server_web/controllers/api/chat_text_controller.ex | Graveyardillon/db_server | ce5a5884d7d1f0eacb3c5cc27066203424594cf4 | [
"MIT"
] | null | null | null | defmodule DbServerWeb.API.ChatTextController do
use DbServerWeb, :controller
def index(conn, _params) do
render(conn, :index)
end
end | 20.571429 | 47 | 0.756944 |
ff37cc6321e5580dca7f25d0024827d5ff19a9b6 | 709 | exs | Elixir | mix.exs | sgtpepper43/namex | 902a2e140d16838fd40ab6c5cc58775747937b81 | [
"MIT"
] | 1 | 2021-09-24T22:40:00.000Z | 2021-09-24T22:40:00.000Z | mix.exs | sgtpepper43/namex | 902a2e140d16838fd40ab6c5cc58775747937b81 | [
"MIT"
] | null | null | null | mix.exs | sgtpepper43/namex | 902a2e140d16838fd40ab6c5cc58775747937b81 | [
"MIT"
] | null | null | null | defmodule Namex.MixProject do
use Mix.Project
def project do
[
app: :namex,
version: "0.1.0",
elixir: "~> 1.6",
start_permanent: Mix.env() == :prod,
deps: deps(),
description: "A name parser",
package: [
licenses: ["MIT"],
maintainers: ["Trevor Fenn<sgtpepper43@gmail.com>"],
links: %{"GitHub" => "https://github.com/sgtpepper43/namex"},
files: ["lib", "mix.exs", "README*", "LICENSE*", "src"]
],
source_url: "https://github.com/sgtpepper43/namex"
]
end
def application do
[
extra_applications: [:logger]
]
end
defp deps do
[
{:ex_doc, ">= 0.0.0", only: :dev}
]
end
end
| 20.852941 | 69 | 0.534556 |
ff37d5df1334e83d4cc1db313116ee505d20654d | 67 | exs | Elixir | kousa/test/test_helper.exs | lazarospsa/dogehouse | 4400518f5b6bce929e40eada615356e8814a8d28 | [
"MIT"
] | 5 | 2021-03-02T09:00:45.000Z | 2021-03-13T04:45:46.000Z | kousa/test/test_helper.exs | lazarospsa/dogehouse | 4400518f5b6bce929e40eada615356e8814a8d28 | [
"MIT"
] | 2 | 2022-02-15T04:33:25.000Z | 2022-02-28T01:39:56.000Z | kousa/test/test_helper.exs | lazarospsa/dogehouse | 4400518f5b6bce929e40eada615356e8814a8d28 | [
"MIT"
] | 1 | 2021-03-19T13:04:24.000Z | 2021-03-19T13:04:24.000Z | ExUnit.start()
Ecto.Adapters.SQL.Sandbox.mode(Beef.Repo, :manual)
| 16.75 | 50 | 0.761194 |
ff3802439996a8584facded60b255d24f0b77624 | 1,289 | ex | Elixir | lib/screens_web/controllers/screen_api_controller.ex | mbta/screens | 4b586970f8844b19543bb2ffd4b032a89f6fa40a | [
"MIT"
] | 3 | 2021-07-27T14:11:00.000Z | 2022-01-03T14:16:43.000Z | lib/screens_web/controllers/screen_api_controller.ex | mbta/screens | 4b586970f8844b19543bb2ffd4b032a89f6fa40a | [
"MIT"
] | 444 | 2021-03-10T20:57:17.000Z | 2022-03-31T16:00:35.000Z | lib/screens_web/controllers/screen_api_controller.ex | mbta/screens | 4b586970f8844b19543bb2ffd4b032a89f6fa40a | [
"MIT"
] | null | null | null | defmodule ScreensWeb.ScreenApiController do
use ScreensWeb, :controller
require Logger
alias Screens.Config.State
plug(:check_config)
plug Corsica, [origins: "*"] when action == :show_dup
defp check_config(conn, _) do
if State.ok?() do
conn
else
conn
|> put_status(:not_found)
|> halt()
end
end
def show(conn, %{"id" => screen_id, "last_refresh" => _last_refresh, "datetime" => datetime}) do
data = Screens.ScreenData.by_screen_id_with_datetime(screen_id, datetime)
json(conn, data)
end
def show(conn, %{"id" => screen_id, "last_refresh" => last_refresh}) do
is_screen = ScreensWeb.UserAgent.is_screen_conn?(conn, screen_id)
_ = Screens.LogScreenData.log_data_request(screen_id, last_refresh, is_screen)
data =
Screens.ScreenData.by_screen_id(screen_id, is_screen,
check_disabled: true,
last_refresh: last_refresh
)
json(conn, data)
end
def show_dup(conn, %{"id" => screen_id, "rotation_index" => rotation_index}) do
is_screen = ScreensWeb.UserAgent.is_screen_conn?(conn, screen_id)
_ = Screens.LogScreenData.log_data_request(screen_id, nil, is_screen)
data = Screens.DupScreenData.by_screen_id(screen_id, rotation_index)
json(conn, data)
end
end
| 26.306122 | 98 | 0.693561 |
ff3812736afc60de06e0da0091fbaeed6f33e80d | 118 | exs | Elixir | test/relax_lib_test.exs | gialib/relax_lib | 0a94b1fe38d00b51832ef947a4c3a1c2956583ca | [
"MIT"
] | null | null | null | test/relax_lib_test.exs | gialib/relax_lib | 0a94b1fe38d00b51832ef947a4c3a1c2956583ca | [
"MIT"
] | null | null | null | test/relax_lib_test.exs | gialib/relax_lib | 0a94b1fe38d00b51832ef947a4c3a1c2956583ca | [
"MIT"
] | null | null | null | defmodule RelaxLibTest do
use ExUnit.Case
doctest RelaxLib
test "the truth" do
assert 1 + 1 == 2
end
end
| 13.111111 | 25 | 0.677966 |
ff38716a8cd8eb1683c4d7d7f2f24d9448c364ba | 2,563 | ex | Elixir | lib/radiator/auth/guardian.ex | djschilling/radiator | 382e22904d7e400a8ffba54e9ddfd2845bc2b623 | [
"MIT"
] | null | null | null | lib/radiator/auth/guardian.ex | djschilling/radiator | 382e22904d7e400a8ffba54e9ddfd2845bc2b623 | [
"MIT"
] | null | null | null | lib/radiator/auth/guardian.ex | djschilling/radiator | 382e22904d7e400a8ffba54e9ddfd2845bc2b623 | [
"MIT"
] | null | null | null | defmodule Radiator.Auth.Guardian do
@otp_app Mix.Project.config()[:app]
use Guardian, otp_app: @otp_app
alias Radiator.Auth
@doc """
Generates and returns a Bearer token for api usage.
"""
def api_session_token(%Auth.User{} = user) do
{:ok, token, _claims} =
encode_and_sign(user, %{}, ttl: {45, :minutes}, token_type: :api_session)
token
end
@doc """
Returns the expiry time of a token as `DateTime`. Returns value in the past if invalid or expired.
"""
def get_expiry_datetime(token) do
{:ok, datetime} =
case Guardian.decode_and_verify(__MODULE__, token) do
{:ok, %{"exp" => expiry_timestamp}} ->
DateTime.from_unix(expiry_timestamp)
# treat as expired
_ ->
DateTime.from_unix(0)
end
datetime
end
# Callbacks
@impl Guardian
def subject_for_token(%Auth.User{} = resource, _claims) do
# You can use any value for the subject of your token but
# it should be useful in retrieving the resource later, see
# how it being used on `resource_from_claims/1` function.
# A unique `id` is a good subject, a non-unique email address
# is a poor subject.
sub = to_string(resource.name)
{:ok, sub}
end
@impl Guardian
def subject_for_token(_, _) do
{:error, :reason_for_error}
end
@impl Guardian
def resource_from_claims(claims) when is_map(claims) do
# Here we'll look up our resource from the claims, the subject can be
# found in the `"sub"` key. In `above subject_for_token/2` we returned
# the resource id so here we'll rely on that to look it up.
username = claims["sub"]
case Radiator.Auth.Register.get_user_by_name(username) do
nil ->
{:error, :resource_not_found}
resource ->
{:ok, resource}
end
end
@impl Guardian
def resource_from_claims(_claims) do
{:error, :reason_for_error}
end
@impl Guardian
# Remove some of the optional default claims for now
# as long as they don't provide additional benefit/safety for us
# From the spec at https://tools.ietf.org/html/rfc7519
# * 'jti' JWT ID - a unique identifier for a token
# * 'aud' audience - intended audience
# * 'ndf' not before - token is invalid before that time
# * 'iat' issued at - time the token was issued
def build_claims(claims, _subject, _options) do
claims =
claims
|> Enum.reject(fn
{key, _value} when key in ["jti", "aud", "nbf", "iat"] -> true
_ -> false
end)
|> Map.new()
{:ok, claims}
end
end
| 26.978947 | 100 | 0.65041 |
ff38734f741192a3ce1fcb7e587aa3ff71f6e11a | 1,260 | ex | Elixir | lib/skeleton/types/default/string_to_float.ex | haskric/map_schema | 615d6428e168a0d3991d334cba76c2d8e5c417b6 | [
"MIT"
] | 3 | 2020-12-15T09:04:57.000Z | 2021-06-11T02:01:09.000Z | lib/skeleton/types/default/string_to_float.ex | haskric/map_schema | 615d6428e168a0d3991d334cba76c2d8e5c417b6 | [
"MIT"
] | null | null | null | lib/skeleton/types/default/string_to_float.ex | haskric/map_schema | 615d6428e168a0d3991d334cba76c2d8e5c417b6 | [
"MIT"
] | null | null | null | defmodule MapSchema.DefaultTypes.MSchemaStringToFloat do
@moduledoc false
@doc """
StringToFloat type
"""
@behaviour MapSchema.CustomType
alias MapSchema.ExuString
@spec name :: atom | String.t()
def name, do: :string_to_float
def nested?, do: false
@spec cast(value :: any) :: any | :map_schema_type_error
def cast(value) do
case ExuString.to_float!(value) do
:error -> :map_schema_type_error
cast_value -> cast_value
end
end
@doc """
Using is_float guard
-> 1. Will be execute cast(value)
## Examples
iex> alias MapSchema.DefaultTypes.MSchemaStringToFloat
iex> MSchemaStringToFloat.cast("102.332")
iex> |> MSchemaStringToFloat.is_valid?()
true
iex> alias MapSchema.DefaultTypes.MSchemaStringToFloat
iex> MSchemaStringToFloat.cast("it´s a float")
iex> |> MSchemaStringToFloat.is_valid?()
false
"""
@spec is_valid?(any) :: boolean
# It´s unecessary but I add help undestand it.
def is_valid?(:error), do: false
def is_valid?(value) do
is_float(value)
end
@spec doctest_values :: [{any, any}]
def doctest_values do
["1.25", "4.54", "3.593", "11.294", "123.45"]
|> Enum.map(fn(text) -> {"\"#{text}\"", text} end)
end
end
| 23.333333 | 60 | 0.656349 |
ff3903c3481ed80e1e0a997e464aeef9c281eb93 | 6,687 | ex | Elixir | exercises/practice/zebra-puzzle/.meta/example.ex | jaimeiniesta/elixir-1 | e8ddafeb313822645e0cd76743955a5c728a84c5 | [
"MIT"
] | 343 | 2017-06-22T16:28:28.000Z | 2022-03-25T21:33:32.000Z | exercises/practice/zebra-puzzle/.meta/example.ex | jaimeiniesta/elixir-1 | e8ddafeb313822645e0cd76743955a5c728a84c5 | [
"MIT"
] | 583 | 2017-06-19T10:48:40.000Z | 2022-03-28T21:43:12.000Z | exercises/practice/zebra-puzzle/.meta/example.ex | jaimeiniesta/elixir-1 | e8ddafeb313822645e0cd76743955a5c728a84c5 | [
"MIT"
] | 228 | 2017-07-05T07:09:32.000Z | 2022-03-27T08:59:08.000Z | defmodule ZebraPuzzle do
@nationalities ~w(englishman norwegian ukrainian japanese spaniard)a
@colors ~w(red green ivory yellow blue)a
@drinks ~w(coffee tea milk orange_juice water)a
@pets ~w(dog snails fox horse zebra)a
@cigarettes ~w{old_gold kool chesterfield lucky_strike parliament}a
@doc """
Determine who drinks the water
"""
@spec drinks_water() :: atom
def drinks_water() do
[%{nationality: nationality}] =
solve_puzzle()
|> Enum.filter(fn %{drink: drink} -> drink == :water end)
nationality
end
@doc """
Determine who owns the zebra
"""
@spec owns_zebra() :: atom
def owns_zebra() do
[%{nationality: nationality}] =
solve_puzzle()
|> Enum.filter(fn %{pet: pet} -> pet == :zebra end)
nationality
end
defp solve_puzzle() do
#
# Step 0: Consider all possible combinations of values
#
possibilities =
Enum.flat_map(1..5, fn order ->
Enum.flat_map(@colors, fn color ->
Enum.flat_map(@drinks, fn drink ->
Enum.flat_map(@nationalities, fn nationality ->
Enum.flat_map(@cigarettes, fn cigarette ->
Enum.map(@pets, fn pet ->
%{
order: order,
color: color,
drink: drink,
nationality: nationality,
cigarette: cigarette,
pet: pet
}
end)
end)
end)
end)
end)
end)
#
# Step 1: Add the direct constraints and filter possibilities
#
possibilities
# The Englishman lives in the red house.
|> filter_direct(:color, :red, :nationality, :englishman)
# The Spaniard owns the dog.
|> filter_direct(:nationality, :spaniard, :pet, :dog)
# Coffee is drunk in the green house.
|> filter_direct(:drink, :coffee, :color, :green)
# The Ukrainian drinks tea.
|> filter_direct(:drink, :tea, :nationality, :ukrainian)
# The Old Gold smoker owns snails.
|> filter_direct(:cigarette, :old_gold, :pet, :snails)
# Kools are smoked in the yellow house.
|> filter_direct(:cigarette, :kool, :color, :yellow)
# Milk is drunk in the middle house.
|> filter_direct(:drink, :milk, :order, 3)
# The Norwegian lives in the first house.
|> filter_direct(:nationality, :norwegian, :order, 1)
# The Lucky Strike smoker drinks orange juice.
|> filter_direct(:cigarette, :lucky_strike, :drink, :orange_juice)
# The Japanese smokes Parliaments.
|> filter_direct(:cigarette, :parliament, :nationality, :japanese)
#
# Step 2: Add indirect constraints (relations with neighbors)
#
|> filter_by_neighbors
#
# Step 3: Check if some values happen to be possibly in only one house,
# add those constraints, filter and back to step 2 until all is solved
#
|> filter_by_unique_relations
end
defp filter_direct(list, field_1, value_1, field_2, value_2) do
Enum.filter(list, fn element ->
cond do
element[field_1] == value_1 and element[field_2] == value_2 -> true
element[field_1] == value_1 -> false
element[field_2] == value_2 -> false
true -> true
end
end)
end
defp filter_by_neighbors(list) do
next_to = fn n -> [n - 1, n + 1] end
filtered_list =
list
# The green house is immediately to the right of the ivory house.
|> filter_indirect(:color, :green, fn n -> [n - 1] end, :color, :ivory, fn n -> [n + 1] end)
# The man who smokes Chesterfields lives in the house next to the man with the fox.
|> filter_indirect(:cigarette, :chesterfield, next_to, :pet, :fox, next_to)
# Kools are smoked in the house next to the house where the horse is kept.
|> filter_indirect(:cigarette, :kool, next_to, :pet, :horse, next_to)
# The Norwegian lives next to the blue house.
|> filter_indirect(:nationality, :norwegian, next_to, :color, :blue, next_to)
# later filters may influence earlier ones, so we loop until there is no change
if length(filtered_list) == length(list) do
list
else
filter_by_neighbors(filtered_list)
end
end
defp filter_indirect(list, field_1, value_1, order_1_to_2, field_2, value_2, order_2_to_1) do
# Get all possible neighbor houses of possibilities with field_1: value_1
# Ex: find all possible house numbers that neighbor a green house
orders_2 = get_orders(list, field_1, value_1, order_1_to_2)
# Only keep possibilities with field_2: value_2 in that neighborhood
list2 = filter_neighbors(list, field_2, value_2, orders_2)
# Same from the other perspective
orders_1 = get_orders(list2, field_2, value_2, order_2_to_1)
filter_neighbors(list2, field_1, value_1, orders_1)
end
defp get_orders(list, field, value, to_other_order) do
list
|> Enum.filter(&(&1[field] == value))
|> Enum.map(fn %{order: order} -> to_other_order.(order) end)
|> Enum.concat()
|> Enum.uniq()
|> Enum.filter(fn order -> 1 <= order and order <= 5 end)
end
defp filter_neighbors(list, field, value, orders) do
Enum.filter(list, fn element ->
cond do
element[field] == value and element.order in orders -> true
element[field] == value -> false
length(orders) == 1 and element.order == hd(orders) -> false
true -> true
end
end)
end
defp filter_by_unique_relations(list) do
# Some values happen to exist only in one particular house number
filter_parameters =
list
|> Enum.reduce(%{}, fn house, all ->
Map.update(all, house[:order], values_to_set(house), fn previous ->
Map.merge(previous, house, fn _field, val_1, val_2 -> MapSet.put(val_1, val_2) end)
end)
end)
|> Enum.map(fn {order, house} ->
house
|> Enum.filter(fn {field, value} -> field != :order and MapSet.size(value) == 1 end)
|> Enum.map(fn {field, value} -> {order, field, value |> MapSet.to_list() |> hd} end)
end)
|> Enum.concat()
# Add those values as constraints and filter
filtered_list =
filter_parameters
|> Enum.reduce(list, fn {order, f, v}, lst -> filter_direct(lst, :order, order, f, v) end)
# Run the neighbors filter again
|> filter_by_neighbors
# Loop until no more change (final solution)
if length(filtered_list) == length(list) do
filtered_list
else
filter_by_unique_relations(filtered_list)
end
end
defp values_to_set(map) do
Map.new(map, fn {field, value} -> {field, MapSet.new([value])} end)
end
end
| 34.647668 | 98 | 0.629281 |
ff390ccffb2b24e3771e6a677076d8a63a49d51c | 3,345 | exs | Elixir | apps/omg_watcher_rpc/test/omg_watcher_rpc/web/controllers/challenge_test.exs | omisego/elixir-omg | 2c68973d8f29033d137f63a6e060f12e2a7dcd59 | [
"Apache-2.0"
] | 177 | 2018-08-24T03:51:02.000Z | 2020-05-30T13:29:25.000Z | apps/omg_watcher_rpc/test/omg_watcher_rpc/web/controllers/challenge_test.exs | omisego/elixir-omg | 2c68973d8f29033d137f63a6e060f12e2a7dcd59 | [
"Apache-2.0"
] | 1,042 | 2018-08-25T00:52:39.000Z | 2020-06-01T05:15:17.000Z | apps/omg_watcher_rpc/test/omg_watcher_rpc/web/controllers/challenge_test.exs | omisego/elixir-omg | 2c68973d8f29033d137f63a6e060f12e2a7dcd59 | [
"Apache-2.0"
] | 47 | 2018-08-24T12:06:33.000Z | 2020-04-28T11:49:25.000Z | # Copyright 2019-2020 OMG Network Pte Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule OMG.WatcherRPC.Web.Controller.ChallengeTest do
use ExUnitFixtures
use ExUnit.Case, async: false
use OMG.Watcher.Fixtures
use OMG.WatcherInfo.Fixtures
alias OMG.Watcher.Utxo
alias OMG.WatcherInfo.DB
alias Support.WatcherHelper
require Utxo
@eth <<0::160>>
@tag skip: true
@tag fixtures: [:phoenix_ecto_sandbox, :alice]
test "challenge data is properly formatted", %{alice: alice} do
DB.EthEvent.insert_deposits!([%{owner: alice.addr, currency: @eth, amount: 100, blknum: 1, eth_height: 1}])
block_application = %{
transactions: [OMG.Watcher.TestHelper.create_recovered([{1, 0, 0, alice}], @eth, [{alice, 100}])],
number: 1000,
hash: <<?#::256>>,
timestamp: :os.system_time(:second),
eth_height: 1
}
{:ok, _} = DB.Block.insert_from_block_application(block_application)
utxo_pos = Utxo.position(1, 0, 0) |> Utxo.Position.encode()
%{
"input_index" => _input_index,
"utxo_pos" => _utxo_pos,
"sig" => _sig,
"txbytes" => _txbytes
} = WatcherHelper.success?("utxo.get_challenge_data", %{"utxo_pos" => utxo_pos})
end
@tag skip: true
@tag fixtures: [:phoenix_ecto_sandbox]
test "challenging non-existent utxo returns error" do
utxo_pos = Utxo.position(1, 1, 0) |> Utxo.Position.encode()
%{
"code" => "challenge:invalid",
"description" => "The challenge of particular exit is invalid because provided utxo is not spent"
} = WatcherHelper.no_success?("utxo.get_challenge_data", %{"utxo_pos" => utxo_pos})
end
@tag fixtures: [:phoenix_ecto_sandbox]
test "utxo.get_challenge_data handles improper type of parameter" do
assert %{
"object" => "error",
"code" => "operation:bad_request",
"description" => "Parameters required by this operation are missing or incorrect.",
"messages" => %{
"validation_error" => %{
"parameter" => "utxo_pos",
"validator" => ":integer"
}
}
} == WatcherHelper.no_success?("utxo.get_challenge_data", %{"utxo_pos" => "1200000120000"})
end
@tag fixtures: [:phoenix_ecto_sandbox]
test "utxo.get_exit_data handles too low utxo position inputs" do
assert %{
"object" => "error",
"code" => "operation:bad_request",
"description" => "Parameters required by this operation are missing or incorrect.",
"messages" => %{
"validation_error" => %{
"parameter" => "utxo_pos",
"validator" => "{:greater, 0}"
}
}
} = WatcherHelper.no_success?("utxo.get_challenge_data", %{"utxo_pos" => 0})
end
end
| 35.210526 | 111 | 0.633184 |
ff394d86159daea158562be2457a73e0f1f3e348 | 382 | ex | Elixir | web/views/error_view.ex | jtwebman/jtwebman_com | fedd924c1c9fb3677ad091124431e322937fb073 | [
"MIT"
] | null | null | null | web/views/error_view.ex | jtwebman/jtwebman_com | fedd924c1c9fb3677ad091124431e322937fb073 | [
"MIT"
] | null | null | null | web/views/error_view.ex | jtwebman/jtwebman_com | fedd924c1c9fb3677ad091124431e322937fb073 | [
"MIT"
] | null | null | null | defmodule JtwebmanCom.ErrorView do
use JtwebmanCom.Web, :view
def render("404.html", _assigns) do
"Page not found"
end
def render("500.html", _assigns) do
"Server internal error"
end
# In case no render clause matches or no
# template is found, let's render it as 500
def template_not_found(_template, assigns) do
render "500.html", assigns
end
end
| 21.222222 | 47 | 0.704188 |
ff395dd9974c914f67e393aba21179e5ab2add93 | 72 | exs | Elixir | test/views/page_view_test.exs | strofcon/scipse | f7597b73dac2e7ffe9f5aa0a403600d9d8ea8eeb | [
"Apache-2.0"
] | null | null | null | test/views/page_view_test.exs | strofcon/scipse | f7597b73dac2e7ffe9f5aa0a403600d9d8ea8eeb | [
"Apache-2.0"
] | null | null | null | test/views/page_view_test.exs | strofcon/scipse | f7597b73dac2e7ffe9f5aa0a403600d9d8ea8eeb | [
"Apache-2.0"
] | null | null | null | defmodule Scipse.PageViewTest do
use Scipse.ConnCase, async: true
end
| 18 | 34 | 0.805556 |
ff398c31bc7ffa44144546e4fadbfc33479e3312 | 10,963 | ex | Elixir | lib/phoenix_live_view/test/dom.ex | almirsarajcic/phoenix_live_view | 6cecf857494d4ec43d89be5d6cc4d4d4ff53780d | [
"MIT"
] | null | null | null | lib/phoenix_live_view/test/dom.ex | almirsarajcic/phoenix_live_view | 6cecf857494d4ec43d89be5d6cc4d4d4ff53780d | [
"MIT"
] | null | null | null | lib/phoenix_live_view/test/dom.ex | almirsarajcic/phoenix_live_view | 6cecf857494d4ec43d89be5d6cc4d4d4ff53780d | [
"MIT"
] | null | null | null | defmodule Phoenix.LiveViewTest.DOM do
@moduledoc false
@phx_static "data-phx-static"
@phx_component "data-phx-component"
@static :s
@components :c
def ensure_loaded! do
unless Code.ensure_loaded?(Floki) do
raise """
Phoenix LiveView requires Floki as a test dependency.
Please add to your mix.exs:
{:floki, ">= 0.30.0", only: :test}
"""
end
end
@spec parse(binary) :: [
{:comment, binary}
| {:pi | binary, binary | list, list}
| {:doctype, binary, binary, binary}
]
def parse(html) do
{:ok, parsed} = Floki.parse_document(html)
parsed
end
def all(html_tree, selector), do: Floki.find(html_tree, selector)
def maybe_one(html_tree, selector, type \\ :selector) do
case all(html_tree, selector) do
[node] ->
{:ok, node}
[] ->
{:error, :none,
"expected #{type} #{inspect(selector)} to return a single element, but got none " <>
"within: \n\n" <> inspect_html(html_tree)}
many ->
{:error, :many,
"expected #{type} #{inspect(selector)} to return a single element, " <>
"but got #{length(many)}: \n\n" <> inspect_html(many)}
end
end
def all_attributes(html_tree, name), do: Floki.attribute(html_tree, name)
def all_values({_, attributes, _}) do
for {attr, value} <- attributes, key = value_key(attr), do: {key, value}, into: %{}
end
def inspect_html(nodes) when is_list(nodes) do
for dom_node <- nodes, into: "", do: inspect_html(dom_node)
end
def inspect_html(dom_node),
do: " " <> String.replace(to_html(dom_node), "\n", "\n ") <> "\n"
defp value_key("phx-value-" <> key), do: key
defp value_key("value"), do: "value"
defp value_key(_), do: nil
def tag(node), do: elem(node, 0)
def attribute(node, key) do
with {tag, attrs, _children} when is_binary(tag) <- node,
{_, value} <- List.keyfind(attrs, key, 0) do
value
else
_ -> nil
end
end
def to_html(html_tree), do: Floki.raw_html(html_tree)
def to_text(html_tree), do: Floki.text(html_tree)
def by_id!(html_tree, id) do
case maybe_one(html_tree, "#" <> id) do
{:ok, node} -> node
{:error, _, message} -> raise message
end
end
def child_nodes({_, _, nodes}), do: nodes
def attrs({_, attrs, _}), do: attrs
def inner_html!(html_tree, id), do: html_tree |> by_id!(id) |> child_nodes()
def component_id(html_tree), do: Floki.attribute(html_tree, @phx_component) |> List.first()
def find_static_views(html_tree) do
html_tree
|> all("[#{@phx_static}]")
|> Enum.into(%{}, fn node ->
{attribute(node, "id"), attribute(node, @phx_static)}
end)
end
def find_live_views(html_tree) do
html_tree
|> all("[data-phx-session]")
|> Enum.reduce([], fn node, acc ->
id = attribute(node, "id")
static = attribute(node, "data-phx-static")
session = attribute(node, "data-phx-session")
main = attribute(node, "data-phx-main")
static = if static in [nil, ""], do: nil, else: static
found = {id, session, static}
if main == "true" do
acc ++ [found]
else
[found | acc]
end
end)
|> Enum.reverse()
end
def deep_merge(target, source) do
Map.merge(target, source, fn
_, %{} = target, %{} = source -> deep_merge(target, source)
_, _target, source -> source
end)
end
def filter(node, fun) do
node |> reverse_filter(fun) |> Enum.reverse()
end
def reverse_filter(node, fun) do
node
|> Floki.traverse_and_update([], fn node, acc ->
if fun.(node), do: {node, [node | acc]}, else: {node, acc}
end)
|> elem(1)
end
# Diff merging
def merge_diff(rendered, diff) do
{new, diff} = Map.pop(diff, @components)
rendered = deep_merge(rendered, diff)
# If we have any component, we need to get the components
# sent by the diff and remove any link between components
# statics. We cannot let those links reside in the diff
# as components can be removed at any time.
if new do
old = Map.get(rendered, @components, %{})
{acc, _} =
Enum.reduce(new, {old, %{}}, fn {cid, cdiff}, {acc, cache} ->
{value, cache} = find_component(cid, cdiff, old, new, cache)
{Map.put(acc, cid, value), cache}
end)
Map.put(rendered, @components, acc)
else
rendered
end
end
defp find_component(cid, cdiff, old, new, cache) do
case cache do
%{^cid => cached} ->
{cached, cache}
%{} ->
{res, cache} =
case cdiff do
%{@static => cid} when is_integer(cid) and cid > 0 ->
{res, cache} = find_component(cid, new[cid], old, new, cache)
{deep_merge(res, Map.delete(cdiff, @static)), cache}
%{@static => cid} when is_integer(cid) and cid < 0 ->
{deep_merge(old[-cid], Map.delete(cdiff, @static)), cache}
%{} ->
{deep_merge(Map.get(old, cid, %{}), cdiff), cache}
end
{res, Map.put(cache, cid, res)}
end
end
def drop_cids(rendered, cids) do
update_in(rendered[@components], &Map.drop(&1, cids))
end
# Diff rendering
def render_diff(rendered) do
rendered
|> Phoenix.LiveView.Diff.to_iodata(fn cid, contents ->
contents
|> IO.iodata_to_binary()
|> parse()
|> List.wrap()
|> Enum.map(walk_fun(&inject_cid_attr(&1, cid)))
|> to_html()
end)
|> IO.iodata_to_binary()
|> parse()
|> List.wrap()
end
defp inject_cid_attr({tag, attrs, children}, cid) do
{tag, [{@phx_component, to_string(cid)}] ++ attrs, children}
end
# Patching
def patch_id(id, html_tree, inner_html) do
cids_before = component_ids(id, html_tree)
phx_update_tree =
walk(inner_html, fn node ->
apply_phx_update(attribute(node, "phx-update"), html_tree, node)
end)
new_html =
walk(html_tree, fn {tag, attrs, children} = node ->
if attribute(node, "id") == id do
{tag, attrs, phx_update_tree}
else
{tag, attrs, children}
end
end)
cids_after = component_ids(id, new_html)
{new_html, cids_before -- cids_after}
end
def component_ids(id, html_tree) do
by_id!(html_tree, id)
|> Floki.children()
|> Enum.reduce([], &traverse_component_ids/2)
end
def replace_root_container(container_html, new_tag, attrs) do
reserved_attrs = ~w(id data-phx-session data-phx-static data-phx-main)
[{_container_tag, container_attrs_list, children}] = container_html
container_attrs = Enum.into(container_attrs_list, %{})
merged_attrs =
attrs
|> Enum.map(fn {attr, value} -> {String.downcase(to_string(attr)), value} end)
|> Enum.filter(fn {attr, _value} -> attr not in reserved_attrs end)
|> Enum.reduce(container_attrs_list, fn {attr, new_val}, acc ->
if Map.has_key?(container_attrs, attr) do
Enum.map(acc, fn
{^attr, _old_val} -> {attr, new_val}
{_, _} = other -> other
end)
else
acc ++ [{attr, new_val}]
end
end)
[{to_string(new_tag), merged_attrs, children}]
end
defp traverse_component_ids(current, acc) do
acc =
if id = attribute(current, @phx_component) do
[String.to_integer(id) | acc]
else
acc
end
cond do
attribute(current, @phx_static) ->
acc
children = Floki.children(current) ->
Enum.reduce(children, acc, &traverse_component_ids/2)
true ->
acc
end
end
defp apply_phx_update(type, html_tree, {tag, attrs, appended_children} = node)
when type in ["append", "prepend"] do
id = attribute(node, "id")
verify_phx_update_id!(type, id, node)
children_before = apply_phx_update_children(html_tree, id)
existing_ids = apply_phx_update_children_id(type, children_before)
new_ids = apply_phx_update_children_id(type, appended_children)
content_changed? = new_ids != existing_ids
dup_ids =
if content_changed? && new_ids do
Enum.filter(new_ids, fn id -> id in existing_ids end)
else
[]
end
{updated_existing_children, updated_appended} =
Enum.reduce(dup_ids, {children_before, appended_children}, fn dup_id, {before, appended} ->
patched_before =
walk(before, fn {tag, _, _} = node ->
cond do
attribute(node, "id") == dup_id ->
new_node = by_id!(appended, dup_id)
{tag, attrs(new_node), child_nodes(new_node)}
true ->
node
end
end)
{patched_before, Floki.filter_out(appended, "##{dup_id}")}
end)
cond do
content_changed? && type == "append" ->
{tag, attrs, updated_existing_children ++ updated_appended}
content_changed? && type == "prepend" ->
{tag, attrs, updated_appended ++ updated_existing_children}
!content_changed? ->
{tag, attrs, updated_appended}
end
end
defp apply_phx_update("ignore", _state, node) do
verify_phx_update_id!("ignore", attribute(node, "id"), node)
node
end
defp apply_phx_update(type, _state, node) when type in [nil, "replace"] do
node
end
defp apply_phx_update(other, _state, _node) do
raise ArgumentError,
"invalid phx-update value #{inspect(other)}, " <>
"expected one of \"replace\", \"append\", \"prepend\", \"ignore\""
end
defp verify_phx_update_id!(type, id, node) when id in ["", nil] do
raise ArgumentError,
"setting phx-update to #{inspect(type)} requires setting an ID on the container, " <>
"got: \n\n #{inspect_html(node)}"
end
defp verify_phx_update_id!(_type, _id, _node) do
:ok
end
defp apply_phx_update_children(html_tree, id) do
case by_id(html_tree, id) do
{_, _, children_before} -> children_before
nil -> []
end
end
defp apply_phx_update_children_id(type, children) do
for {tag, _, _} = child when is_binary(tag) <- children do
attribute(child, "id") ||
raise ArgumentError,
"setting phx-update to #{inspect(type)} requires setting an ID on each child. " <>
"No ID was found on:\n\n#{to_html(child)}"
end
end
## Helpers
defp walk(html_tree, fun) when is_function(fun, 1) do
Floki.traverse_and_update(html_tree, walk_fun(fun))
end
defp walk_fun(fun) when is_function(fun, 1) do
fn
text when is_binary(text) -> text
{:pi, _, _} = xml -> xml
{:comment, _children} = comment -> comment
{:doctype, _, _, _} = doctype -> doctype
{_tag, _attrs, _children} = node -> fun.(node)
end
end
defp by_id(html_tree, id) do
html_tree |> Floki.find("##{id}") |> List.first()
end
end
| 27.75443 | 97 | 0.598103 |
ff399031a2469d2e6b5ed139323984d9da462527 | 2,000 | exs | Elixir | config/prod.exs | inculi/Sue | 42e249aec1d9c467db63526966d9690d5c58f346 | [
"MIT"
] | 9 | 2018-03-23T11:18:21.000Z | 2021-08-06T18:38:37.000Z | config/prod.exs | inculi/Sue | 42e249aec1d9c467db63526966d9690d5c58f346 | [
"MIT"
] | 21 | 2017-12-01T05:57:10.000Z | 2021-06-06T18:53:25.000Z | config/prod.exs | inculi/Sue | 42e249aec1d9c467db63526966d9690d5c58f346 | [
"MIT"
] | 6 | 2018-03-23T11:24:21.000Z | 2021-08-06T18:40:28.000Z | use Mix.Config
# For production, don't forget to configure the url host
# to something meaningful, Phoenix uses this information
# when generating URLs.
#
# Note we also include the path to a cache manifest
# containing the digested version of static files. This
# manifest is generated by the `mix phx.digest` task,
# which you should run after static files are built and
# before starting your production server.
config :sue_web, SueWeb.Endpoint,
url: [host: "example.com", port: 80],
cache_static_manifest: "priv/static/cache_manifest.json"
# ## SSL Support
#
# To get SSL working, you will need to add the `https` key
# to the previous section and set your `:url` port to 443:
#
# config :sue_web, SueWeb.Endpoint,
# ...
# url: [host: "example.com", port: 443],
# https: [
# port: 443,
# cipher_suite: :strong,
# keyfile: System.get_env("SOME_APP_SSL_KEY_PATH"),
# certfile: System.get_env("SOME_APP_SSL_CERT_PATH"),
# transport_options: [socket_opts: [:inet6]]
# ]
#
# The `cipher_suite` is set to `:strong` to support only the
# latest and more secure SSL ciphers. This means old browsers
# and clients may not be supported. You can set it to
# `:compatible` for wider support.
#
# `:keyfile` and `:certfile` expect an absolute path to the key
# and cert in disk or a relative path inside priv, for example
# "priv/ssl/server.key". For all supported SSL configuration
# options, see https://hexdocs.pm/plug/Plug.SSL.html#configure/1
#
# We also recommend setting `force_ssl` in your endpoint, ensuring
# no data is ever sent via http, always redirecting to https:
#
# config :sue_web, SueWeb.Endpoint,
# force_ssl: [hsts: true]
#
# Check `Plug.SSL` for all available options in `force_ssl`.
# Do not print debug messages in production
config :logger, level: :info
# Finally import the config/prod.secret.exs which loads secrets
# and configuration from environment variables.
import_config "prod.secret.exs"
| 35.714286 | 66 | 0.713 |
ff3997562c1ac65e75f85b90392410bf46581b35 | 116 | ex | Elixir | test/support/repo.ex | STUDITEMPS/jehovakel_ex_ecto | 75ad8adff05f5a44062d6f338ed91c381b7be769 | [
"MIT"
] | null | null | null | test/support/repo.ex | STUDITEMPS/jehovakel_ex_ecto | 75ad8adff05f5a44062d6f338ed91c381b7be769 | [
"MIT"
] | 5 | 2021-09-27T02:14:02.000Z | 2021-12-22T09:21:24.000Z | test/support/repo.ex | STUDITEMPS/jehovakel_ex_ecto | 75ad8adff05f5a44062d6f338ed91c381b7be769 | [
"MIT"
] | 2 | 2021-06-28T06:46:14.000Z | 2021-07-05T09:27:19.000Z | defmodule JehovakelExEcto.Repo do
use Ecto.Repo, otp_app: :jehovakel_ex_ecto, adapter: Ecto.Adapters.Postgres
end
| 29 | 77 | 0.818966 |
ff399ed61ed23b0c28a02ae7013feb0144e65946 | 601 | exs | Elixir | code/ch10/list_and_recursion_5_6.exs | tannineo/programming_elixir_gte_1.6 | 25c8a3670f2f1c3559000887888e7c5fdd9deaef | [
"Unlicense"
] | null | null | null | code/ch10/list_and_recursion_5_6.exs | tannineo/programming_elixir_gte_1.6 | 25c8a3670f2f1c3559000887888e7c5fdd9deaef | [
"Unlicense"
] | null | null | null | code/ch10/list_and_recursion_5_6.exs | tannineo/programming_elixir_gte_1.6 | 25c8a3670f2f1c3559000887888e7c5fdd9deaef | [
"Unlicense"
] | null | null | null | defmodule MyEnum do
# Check `h Enum.all?`
def all?(list, func \\ & &1)
def all?([], _func), do: true
def all?([head | tail], func) do
if func.(head) do
all?(tail, func)
else
false
end
end
# Check `h Enum.each`
def each([], _func), do: :ok
def each([head | tail], func) do
func.(head)
each(tail, func)
end
# Check `h Enum.filter`
def filter([], _func), do: []
def filter([head | tail], func) do
if func.(head) do
[head | filter(tail, func)]
else
filter(tail, func)
end
end
# Check `h Enum.split`
# TODO
end
| 15.410256 | 36 | 0.540765 |
ff39a6b8e9f642b0b9fe17eb8c9a1d537d57a713 | 1,857 | ex | Elixir | clients/cloud_asset/lib/google_api/cloud_asset/v1/model/google_cloud_asset_v1_gcs_destination.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/cloud_asset/lib/google_api/cloud_asset/v1/model/google_cloud_asset_v1_gcs_destination.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/cloud_asset/lib/google_api/cloud_asset/v1/model/google_cloud_asset_v1_gcs_destination.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.CloudAsset.V1.Model.GoogleCloudAssetV1GcsDestination do
@moduledoc """
A Cloud Storage location.
## Attributes
* `uri` (*type:* `String.t`, *default:* `nil`) - Required. The uri of the Cloud Storage object. It's the same uri that is used by gsutil. Example: "gs://bucket_name/object_name". See [Viewing and Editing Object Metadata](https://cloud.google.com/storage/docs/viewing-editing-metadata) for more information. If the specified Cloud Storage object already exists and there is no [hold](https://cloud.google.com/storage/docs/object-holds), it will be overwritten with the analysis result.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:uri => String.t() | nil
}
field(:uri)
end
defimpl Poison.Decoder, for: GoogleApi.CloudAsset.V1.Model.GoogleCloudAssetV1GcsDestination do
def decode(value, options) do
GoogleApi.CloudAsset.V1.Model.GoogleCloudAssetV1GcsDestination.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.CloudAsset.V1.Model.GoogleCloudAssetV1GcsDestination do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 39.510638 | 488 | 0.754443 |
ff39db79e0d8639690129a711f1e59d7634540f7 | 1,127 | exs | Elixir | config/config.exs | line/clova-cek-sdk-elixir-sample | abf263d9ffc6846a3fd36ca2500beb8a679485ff | [
"Apache-2.0"
] | 1 | 2018-07-24T01:41:04.000Z | 2018-07-24T01:41:04.000Z | config/config.exs | line/clova-cek-sdk-elixir-sample | abf263d9ffc6846a3fd36ca2500beb8a679485ff | [
"Apache-2.0"
] | null | null | null | config/config.exs | line/clova-cek-sdk-elixir-sample | abf263d9ffc6846a3fd36ca2500beb8a679485ff | [
"Apache-2.0"
] | 1 | 2018-08-29T05:14:31.000Z | 2018-08-29T05:14:31.000Z | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# 3rd-party users, it should be done in your "mix.exs" file.
# You can configure your application as:
#
# config :air_quality, key: :value
#
# and access this configuration in your application as:
#
# Application.get_env(:air_quality, :key)
#
# You can also configure a 3rd-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
import_config "#{Mix.env()}.exs"
| 36.354839 | 73 | 0.755989 |
ff39e5092745c55d4ca11858d40baed65539f6d5 | 205 | ex | Elixir | lib/ifs_bq/db_connector.ex | philosodad/bridge_queue | 33b2ca93a5e7d41af851e8ac1eb2a3e7ed601b2e | [
"MIT"
] | null | null | null | lib/ifs_bq/db_connector.ex | philosodad/bridge_queue | 33b2ca93a5e7d41af851e8ac1eb2a3e7ed601b2e | [
"MIT"
] | null | null | null | lib/ifs_bq/db_connector.ex | philosodad/bridge_queue | 33b2ca93a5e7d41af851e8ac1eb2a3e7ed601b2e | [
"MIT"
] | null | null | null | defmodule IfsBq.DbConnector do
def add_message(message, shard_id) do
changeset = %IfsBq.Message{message_body: message, shard_id: shard_id}
{:ok, message} = IfsBq.Repo.insert(changeset)
end
end
| 29.285714 | 73 | 0.746341 |
ff39f87db7b5b0b983152e4a369ee216e20ae28d | 264 | exs | Elixir | todo/test/todo_web/views/layout_view_test.exs | mpeseke/literate-waffle | 1b8da264d841d9f1d076729936b060c79f93e1aa | [
"Apache-2.0"
] | null | null | null | todo/test/todo_web/views/layout_view_test.exs | mpeseke/literate-waffle | 1b8da264d841d9f1d076729936b060c79f93e1aa | [
"Apache-2.0"
] | 22 | 2021-12-11T13:12:45.000Z | 2022-01-29T14:20:03.000Z | todo/test/todo_web/views/layout_view_test.exs | IITA-DMCS-2122/iita_elixir | e636360edd96f0bfcde91eec212a467f10186437 | [
"MIT"
] | null | null | null | defmodule TodoWeb.LayoutViewTest do
use TodoWeb.ConnCase, async: true
# When testing helpers, you may want to import Phoenix.HTML and
# use functions such as safe_to_string() to convert the helper
# result into an HTML string.
# import Phoenix.HTML
end
| 29.333333 | 65 | 0.761364 |
ff3a1407ddfaac65e2ab6f3d75d02f66649d16d6 | 501 | exs | Elixir | apps/core/priv/prm_repo/migrations/20190423084815_legal_entity_signed_contents.exs | ehealth-ua/ehealth.api | 4ffe26a464fe40c95fb841a4aa2e147068f65ca2 | [
"Apache-2.0"
] | 8 | 2019-06-14T11:34:49.000Z | 2021-08-05T19:14:24.000Z | apps/core/priv/prm_repo/migrations/20190423084815_legal_entity_signed_contents.exs | edenlabllc/ehealth.api.public | 4ffe26a464fe40c95fb841a4aa2e147068f65ca2 | [
"Apache-2.0"
] | 1 | 2019-07-08T15:20:22.000Z | 2019-07-08T15:20:22.000Z | apps/core/priv/prm_repo/migrations/20190423084815_legal_entity_signed_contents.exs | ehealth-ua/ehealth.api | 4ffe26a464fe40c95fb841a4aa2e147068f65ca2 | [
"Apache-2.0"
] | 6 | 2018-05-11T13:59:32.000Z | 2022-01-19T20:15:22.000Z | defmodule Core.PRMRepo.Migrations.LegalEntitySignedContents do
@moduledoc false
use Ecto.Migration
def change do
create table("legal_entity_signed_contents", primary_key: false) do
add(:id, :uuid, primary_key: true)
add(:filename, :string)
add(:legal_entity_id, references(:legal_entities, type: :uuid, on_delete: :nothing))
timestamps(type: :utc_datetime, updated_at: false)
end
create(index(:legal_entity_signed_contents, [:legal_entity_id]))
end
end
| 27.833333 | 90 | 0.730539 |
ff3a1dffe111760a86fd56e050d28b4cb2b193dc | 64 | ex | Elixir | lib/example_web/views/session_view.ex | BenSchZA/kawapi-phoenix | 5ed91b1d30d91bcc85c5f120d18691d236a41e88 | [
"MIT"
] | 1 | 2020-12-18T17:23:31.000Z | 2020-12-18T17:23:31.000Z | lib/example_web/views/session_view.ex | BenSchZA/kawapi-phoenix | 5ed91b1d30d91bcc85c5f120d18691d236a41e88 | [
"MIT"
] | null | null | null | lib/example_web/views/session_view.ex | BenSchZA/kawapi-phoenix | 5ed91b1d30d91bcc85c5f120d18691d236a41e88 | [
"MIT"
] | null | null | null | defmodule ExampleWeb.SessionView do
use ExampleWeb, :view
end
| 16 | 35 | 0.8125 |
ff3a25f8dd8e5ff590594722066b6c8a5396f526 | 1,570 | ex | Elixir | clients/translate/lib/google_api/translate/v2/model/detections_resource_inner.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/translate/lib/google_api/translate/v2/model/detections_resource_inner.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/translate/lib/google_api/translate/v2/model/detections_resource_inner.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | 1 | 2018-07-28T20:50:50.000Z | 2018-07-28T20:50:50.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Translate.V2.Model.DetectionsResourceInner do
@moduledoc """
## Attributes
- confidence (float()): The confidence of the detection result of this language. Defaults to: `null`.
- isReliable (boolean()): A boolean to indicate is the language detection result reliable. Defaults to: `null`.
- language (String.t): The language we detected. Defaults to: `null`.
"""
defstruct [
:confidence,
:isReliable,
:language
]
end
defimpl Poison.Decoder, for: GoogleApi.Translate.V2.Model.DetectionsResourceInner do
def decode(value, _options) do
value
end
end
defimpl Poison.Encoder, for: GoogleApi.Translate.V2.Model.DetectionsResourceInner do
def encode(value, options) do
GoogleApi.Translate.V2.Deserializer.serialize_non_nil(value, options)
end
end
| 32.708333 | 113 | 0.751592 |
ff3a3366b001341ba8c7315feea6a6aa9b7e7cec | 671 | ex | Elixir | lib/writer_supervisor.ex | alejandrodnm/nine_digits_server | 5f3bf3b25fb6af8dae60be916441f057d6aa7f8a | [
"MIT"
] | null | null | null | lib/writer_supervisor.ex | alejandrodnm/nine_digits_server | 5f3bf3b25fb6af8dae60be916441f057d6aa7f8a | [
"MIT"
] | null | null | null | lib/writer_supervisor.ex | alejandrodnm/nine_digits_server | 5f3bf3b25fb6af8dae60be916441f057d6aa7f8a | [
"MIT"
] | null | null | null | defmodule Writer.Supervisor do
@moduledoc """
Supervises the pool of `Writer`.
"""
use Supervisor
def start_link(opts) do
Supervisor.start_link(__MODULE__, :ok, opts)
end
@doc """
Starts as many `Writer` proccesses as the concurrency level.
The concurrency level is set in the config and defaults to 5
"""
def init(:ok) do
concurrency = Application.get_env(:nine_digits, :concurrency, 5)
children =
for n <- 1..concurrency do
%{
id: {Writer, n},
start: {Writer, :start_link, [[name: String.to_atom("Writer#{n}")]]}
}
end
Supervisor.init(children, strategy: :one_for_one)
end
end
| 23.137931 | 78 | 0.634873 |
ff3a57baf13d91722f6c1ad16443ac9fefb642fa | 1,096 | exs | Elixir | test/integration/isolation_test.exs | chrismo/oban | f912ccf75a1d89e02229041d578f9263d4de0232 | [
"Apache-2.0"
] | 2,219 | 2019-04-10T01:50:19.000Z | 2022-03-30T11:20:01.000Z | test/integration/isolation_test.exs | chrismo/oban | f912ccf75a1d89e02229041d578f9263d4de0232 | [
"Apache-2.0"
] | 532 | 2019-05-16T00:22:28.000Z | 2022-03-31T19:04:02.000Z | test/integration/isolation_test.exs | chrismo/oban | f912ccf75a1d89e02229041d578f9263d4de0232 | [
"Apache-2.0"
] | 230 | 2019-05-15T14:15:18.000Z | 2022-03-23T22:59:43.000Z | defmodule Oban.Integration.IsolationTest do
use Oban.Case
use Oban.Testing, repo: Oban.Test.Repo, prefix: "private"
@moduletag :integration
test "inserting jobs with a custom prefix" do
name = start_supervised_oban!(prefix: "private")
insert!(name, %{ref: 1, action: "OK"}, [])
assert_enqueued worker: Worker
end
test "inserting and executing unique jobs with a custom prefix" do
name = start_supervised_oban!(prefix: "private", queues: [alpha: 5])
insert!(name, %{ref: 1, action: "OK"}, unique: [period: 60, fields: [:worker]])
insert!(name, %{ref: 2, action: "OK"}, unique: [period: 60, fields: [:worker]])
assert_receive {:ok, 1}
refute_receive {:ok, 2}
end
test "controlling jobs with a custom prefix" do
name = start_supervised_oban!(prefix: "private", queues: [alpha: 5])
insert!(name, %{ref: 1}, schedule_in: 10)
insert!(name, %{ref: 2}, schedule_in: 10)
insert!(name, %{ref: 3}, schedule_in: 10)
assert {:ok, 3} = Oban.cancel_all_jobs(name, Job)
assert {:ok, 3} = Oban.retry_all_jobs(name, Job)
end
end
| 29.621622 | 83 | 0.656934 |
ff3a5f4a78f75cec55d584bd53c25837ea146521 | 26,855 | ex | Elixir | lib/oban.ex | xward/oban | c1623b604c341ba14932152b6efc1e481413e7b7 | [
"Apache-2.0"
] | null | null | null | lib/oban.ex | xward/oban | c1623b604c341ba14932152b6efc1e481413e7b7 | [
"Apache-2.0"
] | 13 | 2021-11-17T11:18:04.000Z | 2022-03-23T12:18:18.000Z | lib/oban.ex | xward/oban | c1623b604c341ba14932152b6efc1e481413e7b7 | [
"Apache-2.0"
] | null | null | null | defmodule Oban do
@external_resource readme = Path.join([__DIR__, "../README.md"])
@moduledoc readme
|> File.read!()
|> String.split("<!-- MDOC -->")
|> Enum.fetch!(1)
@moduledoc since: "0.1.0"
use Supervisor
alias Ecto.{Changeset, Multi}
alias Oban.{Config, Job, Midwife, Notifier, Query, Registry, Telemetry}
alias Oban.Queue.{Drainer, Engine, Producer}
alias Oban.Queue.Supervisor, as: QueueSupervisor
@type name :: term
@type queue_name :: atom() | binary()
@type queue_option ::
{:queue, queue_name()}
| {:limit, pos_integer()}
| {:local_only, boolean()}
@type queue_state :: %{
:limit => pos_integer(),
:node => binary(),
:paused => boolean(),
:queue => queue_name(),
:running => [pos_integer()],
:started_at => DateTime.t(),
:updated_at => DateTime.t(),
optional(atom()) => any()
}
@type option ::
{:circuit_backoff, timeout()}
| {:dispatch_cooldown, pos_integer()}
| {:get_dynamic_repo, nil | (() -> pid() | atom())}
| {:log, false | Logger.level()}
| {:name, name()}
| {:node, binary()}
| {:plugins, [module() | {module() | Keyword.t()}]}
| {:prefix, binary()}
| {:queues, [{queue_name(), pos_integer() | Keyword.t()}]}
| {:repo, module()}
| {:shutdown_grace_period, timeout()}
@type drain_option ::
{:queue, queue_name()}
| {:with_limit, pos_integer()}
| {:with_recursion, boolean()}
| {:with_safety, boolean()}
| {:with_scheduled, boolean()}
@type drain_result :: %{
failure: non_neg_integer(),
snoozed: non_neg_integer(),
success: non_neg_integer()
}
@type wrapper :: %{:changesets => Job.changeset_list(), optional(atom()) => term()}
@type changesets_or_wrapper :: Job.changeset_list() | wrapper()
@doc """
Starts an `Oban` supervision tree linked to the current process.
## Options
These options are required; without them the supervisor won't start
* `:name` — used for supervisor registration, defaults to `Oban`
* `:repo` — specifies the Ecto repo used to insert and retrieve jobs
### Primary Options
These options determine what the system does at a high level, i.e. which queues to run.
* `:node` — used to identify the node that the supervision tree is running in. If no value is
provided it will use the `node` name in a distributed system, or the `hostname` in an isolated
node. See "Node Name" below.
* `:plugins` — a list or modules or module/option tuples that are started as children of an Oban
supervisor. Any supervisable module is a valid plugin, i.e. a `GenServer` or an `Agent`.
* `:prefix` — the query prefix, or schema, to use for inserting and executing jobs. An
`oban_jobs` table must exist within the prefix. See the "Prefix Support" section in the module
documentation for more details.
* `:queues` — a keyword list where the keys are queue names and the values are the concurrency
setting or a keyword list of queue options. For example, setting queues to `[default: 10,
exports: 5]` would start the queues `default` and `exports` with a combined concurrency level
of 15. The concurrency setting specifies how many jobs _each queue_ will run concurrently.
Queues accept additional override options to customize their behavior, e.g. by setting
`paused` or `dispatch_cooldown` for a specific queue.
For testing purposes `:queues` may be set to `false` or `nil`, which effectively disables all
job dispatching.
* `:log` — either `false` to disable logging or a standard log level (`:error`, `:warn`,
`:info`, `:debug`). This determines whether queries are logged or not; overriding the repo's
configured log level. Defaults to `false`, where no queries are logged.
### Twiddly Options
Additional options used to tune system behaviour. These are primarily useful for testing or
troubleshooting and don't usually need modification.
* `:circuit_backoff` — the number of milliseconds until queries are attempted after a database
error. All processes communicating with the database are equipped with circuit breakers and
will use this for the backoff. Defaults to `30_000ms`.
* `:dispatch_cooldown` — the minimum number of milliseconds a producer will wait before fetching
and running more jobs. A slight cooldown period prevents a producer from flooding with
messages and thrashing the database. The cooldown period _directly impacts_ a producer's
throughput: jobs per second for a single queue is calculated by `(1000 / cooldown) * limit`.
For example, with a `5ms` cooldown and a queue limit of `25` a single queue can run 5,000
jobs/sec.
The default is `5ms` and the minimum is `1ms`, which is likely faster than the database can
return new jobs to run.
* `:shutdown_grace_period` - the amount of time a queue will wait for executing jobs to complete
before hard shutdown, specified in milliseconds. The default is `15_000`, or 15 seconds.
## Example
To start an `Oban` supervisor within an application's supervision tree:
def start(_type, _args) do
children = [MyApp.Repo, {Oban, queues: [default: 50]}]
Supervisor.start_link(children, strategy: :one_for_one, name: MyApp.Supervisor)
end
## Node Name
When the `node` value hasn't been configured it is generated based on the environment:
* In a distributed system the node name is used
* In a Heroku environment the system environment's `DYNO` value is used
* Otherwise, the system hostname is used
"""
@doc since: "0.1.0"
@spec start_link([option()]) :: Supervisor.on_start()
def start_link(opts) when is_list(opts) do
conf = Config.new(opts)
Supervisor.start_link(__MODULE__, conf, name: Registry.via(conf.name, nil, conf))
end
@spec child_spec([option]) :: Supervisor.child_spec()
def child_spec(opts) do
opts
|> super()
|> Supervisor.child_spec(id: Keyword.get(opts, :name, __MODULE__))
end
@doc """
Returns the pid of the root Oban process for the given name.
## Example
Find the default instance:
Oban.whereis(Oban)
Find a dynamically named instance:
Oban.whereis({:oban, 1})
"""
@doc since: "2.2.0"
@spec whereis(name()) :: pid() | nil
def whereis(name), do: Registry.whereis(name)
@impl Supervisor
def init(%Config{plugins: plugins, queues: queues} = conf) do
children = [
{Notifier, conf: conf, name: Registry.via(conf.name, Notifier)},
{Midwife, conf: conf, name: Registry.via(conf.name, Midwife)}
]
children = children ++ Enum.map(plugins, &plugin_child_spec(&1, conf))
children = children ++ Enum.map(queues, &QueueSupervisor.child_spec(&1, conf))
children = children ++ [event_child_spec(conf)]
Supervisor.init(children, strategy: :one_for_one)
end
@doc """
Retrieve the config struct for a named Oban supervision tree.
"""
@doc since: "0.2.0"
@spec config(name()) :: Config.t()
def config(name \\ __MODULE__), do: Registry.config(name)
@doc """
Insert a new job into the database for execution.
This and the other `insert` variants are the recommended way to enqueue jobs because they
support features like unique jobs.
See the section on "Unique Jobs" for more details.
## Example
Insert a single job:
{:ok, job} = Oban.insert(MyApp.Worker.new(%{id: 1}))
Insert a job while ensuring that it is unique within the past 30 seconds:
{:ok, job} = Oban.insert(MyApp.Worker.new(%{id: 1}, unique: [period: 30]))
"""
@doc since: "0.7.0"
@spec insert(name(), Job.changeset()) ::
{:ok, Job.t()} | {:error, Job.changeset()} | {:error, term()}
def insert(name \\ __MODULE__, %Changeset{} = changeset) do
name
|> config()
|> Query.fetch_or_insert_job(changeset)
end
@doc """
Put a job insert operation into an `Ecto.Multi`.
Like `insert/2`, this variant is recommended over `Ecto.Multi.insert` because it supports all of
Oban's features, i.e. unique jobs.
See the section on "Unique Jobs" for more details.
## Example
Ecto.Multi.new()
|> Oban.insert("job-1", MyApp.Worker.new(%{id: 1}))
|> Oban.insert("job-2", fn _ -> MyApp.Worker.new(%{id: 2}) end)
|> MyApp.Repo.transaction()
"""
@doc since: "0.7.0"
@spec insert(
name,
multi :: Multi.t(),
multi_name :: Multi.name(),
changeset_or_fun :: Job.changeset() | Job.changeset_fun()
) :: Multi.t()
def insert(name \\ __MODULE__, multi, multi_name, changeset_or_fun)
def insert(name, %Multi{} = multi, multi_name, %Changeset{} = changeset) do
name
|> config()
|> Query.fetch_or_insert_job(multi, multi_name, changeset)
end
def insert(name, %Multi{} = multi, multi_name, fun) when is_function(fun, 1) do
name
|> config()
|> Query.fetch_or_insert_job(multi, multi_name, fun)
end
@doc """
Similar to `insert/2`, but raises an `Ecto.InvalidChangesetError` if the job can't be inserted.
## Example
job = Oban.insert!(MyApp.Worker.new(%{id: 1}))
"""
@doc since: "0.7.0"
@spec insert!(name(), Job.changeset()) :: Job.t()
def insert!(name \\ __MODULE__, %Changeset{} = changeset) do
case insert(name, changeset) do
{:ok, job} ->
job
{:error, %Changeset{} = changeset} ->
raise Ecto.InvalidChangesetError, action: :insert, changeset: changeset
{:error, reason} ->
raise RuntimeError, inspect(reason)
end
end
@doc """
Insert multiple jobs into the database for execution.
Insertion respects `prefix` and `log` settings, but it *does not use* per-job unique
configuration. You must use `insert/2,4` or `insert!/2` for per-job unique support.
There are a few important differences between this function and `c:Ecto.Repo.insert_all/3`:
1. This function always returns a list rather than a tuple of `{count, records}`
2. This function requires a list of changesets rather than a list of maps or keyword lists
## Example
1..100
|> Enum.map(&MyApp.Worker.new(%{id: &1}))
|> Oban.insert_all()
"""
@doc since: "0.9.0"
@spec insert_all(name(), changesets_or_wrapper()) :: [Job.t()]
def insert_all(name \\ __MODULE__, changesets_or_wrapper)
def insert_all(name, %{changesets: changesets}) when is_list(changesets) do
insert_all(name, changesets)
end
def insert_all(name, changesets) when is_list(changesets) do
name
|> config()
|> Query.insert_all_jobs(changesets)
end
@doc """
Put an `insert_all` operation into an `Ecto.Multi`.
This function supports the same features and has the same caveats as `insert_all/2`.
## Example
changesets = Enum.map(0..100, &MyApp.Worker.new(%{id: &1}))
Ecto.Multi.new()
|> Oban.insert_all(:jobs, changesets)
|> MyApp.Repo.transaction()
"""
@doc since: "0.9.0"
@spec insert_all(
name(),
multi :: Multi.t(),
multi_name :: Multi.name(),
changesets_or_wrapper() | Job.changeset_list_fun()
) :: Multi.t()
def insert_all(name \\ __MODULE__, multi, multi_name, changesets_or_wrapper)
def insert_all(name, multi, multi_name, %{changesets: changesets}) when is_list(changesets) do
insert_all(name, multi, multi_name, changesets)
end
def insert_all(name, %Multi{} = multi, multi_name, changesets)
when is_list(changesets) or is_function(changesets, 1) do
name
|> config()
|> Query.insert_all_jobs(multi, multi_name, changesets)
end
@doc """
Synchronously execute all available jobs in a queue.
All execution happens within the current process and it is guaranteed not to raise an error or
exit.
Draining a queue from within the current process is especially useful for testing. Jobs that are
enqueued by a process when `Ecto` is in sandbox mode are only visible to that process. Calling
`drain_queue/2` allows you to control when the jobs are executed and to wait synchronously for
all jobs to complete.
## Failures & Retries
Draining a queue uses the same execution mechanism as regular job dispatch. That means that any
job failures or crashes are captured and result in a retry. Retries are scheduled in the future
with backoff and won't be retried immediately.
By default jobs are executed in `safe` mode, just as they are in production. Safe mode catches
any errors or exits and records the formatted error in the job's `errors` array. That means
exceptions and crashes are _not_ bubbled up to the calling process.
If you expect jobs to fail, would like to track failures, or need to check for specific errors
you can pass the `with_safety: false` flag.
## Scheduled Jobs
By default, `drain_queue/2` will execute all currently available jobs. In order to execute
scheduled jobs, you may pass the `:with_scheduled` flag which will cause scheduled jobs to be
marked as `available` beforehand.
## Options
* `:queue` - a string or atom specifying the queue to drain, required
* `:with_limit` — the maximum number of jobs to drain at once. When recursion is enabled this is
how many jobs are processed per-iteration.
* `:with_recursion` — whether to keep draining a queue repeatedly when jobs insert _more_ jobs
* `:with_safety` — whether to silently catch errors when draining, default `true`
* `:with_scheduled` — whether to include any scheduled jobs when draining, default `false`
## Example
Drain a queue with three available jobs, two of which succeed and one of which fails:
Oban.drain_queue(queue: :default)
%{failure: 1, snoozed: 0, success: 2}
Drain a queue including any scheduled jobs:
Oban.drain_queue(queue: :default, with_scheduled: true)
%{failure: 0, snoozed: 0, success: 1}
Drain a queue and assert an error is raised:
assert_raise RuntimeError, fn -> Oban.drain_queue(queue: :risky, with_safety: false) end
Drain a queue repeatedly until there aren't any more jobs to run. This is particularly useful
for testing jobs that enqueue other jobs:
Oban.drain_queue(queue: :default, with_recursion: true)
%{failure: 1, snoozed: 0, success: 2}
Drain only the top (by scheduled time and priority) five jobs off a queue:
Oban.drain_queue(queue: :default, with_limit: 5)
%{failure: 0, snoozed: 0, success: 1}
Drain a queue recursively, only one job at a time:
Oban.drain_queue(queue: :default, with_limit: 1, with_recursion: true)
%{failure: 0, snoozed: 0, success: 3}
"""
@doc since: "0.4.0"
@spec drain_queue(name(), [drain_option()]) :: drain_result()
def drain_queue(name \\ __MODULE__, [_ | _] = opts) do
name
|> config()
|> Drainer.drain(opts)
end
@doc """
Start a new supervised queue.
By default this starts a new supervised queue across all nodes running Oban on the same database
and prefix. You can pass the option `local_only: true` if you prefer to start the queue only on
the local node.
## Options
* `:queue` - a string or atom specifying the queue to start, required
* `:local_only` - whether the queue will be started only on the local node, default: `false`
* `:limit` - set the concurrency limit, required
* `:paused` — set whether the queue starts in the "paused" state, optional
In addition, all engine-specific queue options are passed along after validation.
## Example
Start the `:priority` queue with a concurrency limit of 10 across the connected nodes.
Oban.start_queue(queue: :priority, limit: 10)
:ok
Start the `:media` queue with a concurrency limit of 5 only on the local node.
Oban.start_queue(queue: :media, limit: 5, local_only: true)
:ok
Start the `:media` queue in a `paused` state.
Oban.start_queue(queue: :media, limit: 5, paused: true)
:ok
"""
@doc since: "0.12.0"
@spec start_queue(name(), opts :: Keyword.t()) :: :ok
def start_queue(name \\ __MODULE__, [_ | _] = opts) do
conf = config(name)
validate_queue_opts!(opts, [:queue, :local_only])
validate_engine_meta!(conf, opts)
data =
opts
|> Map.new()
|> Map.put(:action, :start)
|> Map.put(:ident, scope_signal(conf, opts))
Notifier.notify(conf, :signal, data)
end
@doc """
Pause a running queue, preventing it from executing any new jobs. All running jobs will remain
running until they are finished.
When shutdown begins all queues are paused.
## Options
* `:queue` - a string or atom specifying the queue to pause, required
* `:local_only` - whether the queue will be paused only on the local node, default: `false`
## Example
Pause the default queue:
Oban.pause_queue(queue: :default)
:ok
Pause the default queue, but only on the local node:
Oban.pause_queue(queue: :default, local_only: true)
:ok
"""
@doc since: "0.2.0"
@spec pause_queue(name(), opts :: [queue_option()]) :: :ok
def pause_queue(name \\ __MODULE__, [_ | _] = opts) do
validate_queue_opts!(opts, [:queue, :local_only])
conf = config(name)
data = %{action: :pause, queue: opts[:queue], ident: scope_signal(conf, opts)}
Notifier.notify(conf, :signal, data)
end
@doc """
Resume executing jobs in a paused queue.
## Options
* `:queue` - a string or atom specifying the queue to resume, required
* `:local_only` - whether the queue will be resumed only on the local node, default: `false`
## Example
Resume a paused default queue:
Oban.resume_queue(:default)
:ok
Resume the default queue, but only on the local node:
Oban.resume_queue(queue: :default, local_only: true)
:ok
"""
@doc since: "0.2.0"
@spec resume_queue(name(), opts :: [queue_option()]) :: :ok
def resume_queue(name \\ __MODULE__, [_ | _] = opts) do
validate_queue_opts!(opts, [:queue, :local_only])
conf = config(name)
data = %{action: :resume, queue: opts[:queue], ident: scope_signal(conf, opts)}
Notifier.notify(conf, :signal, data)
end
@doc """
Scale the concurrency for a queue.
## Options
* `:queue` - a string or atom specifying the queue to scale, required
* `:limit` — the new concurrency limit, required
* `:local_only` — whether the queue will be scaled only on the local node, default: `false`
In addition, all engine-specific queue options are passed along after validation.
## Example
Scale a queue up, triggering immediate execution of queued jobs:
Oban.scale_queue(queue: :default, limit: 50)
:ok
Scale the queue back down, allowing executing jobs to finish:
Oban.scale_queue(queue: :default, limit: 5)
:ok
Scale the queue only on the local node:
Oban.scale_queue(queue: :default, limit: 10, local_only: true)
:ok
"""
@doc since: "0.2.0"
@spec scale_queue(name(), opts :: [queue_option()]) :: :ok
def scale_queue(name \\ __MODULE__, [_ | _] = opts) do
conf = config(name)
validate_queue_opts!(opts, [:queue, :local_only])
validate_engine_meta!(conf, opts)
data =
opts
|> Map.new()
|> Map.put(:action, :scale)
|> Map.put(:ident, scope_signal(conf, opts))
Notifier.notify(conf, :signal, data)
end
@doc """
Shutdown a queue's supervision tree and stop running jobs for that queue.
By default this action will occur across all the running nodes. Still, if you prefer to stop the
queue's supervision tree and stop running jobs for that queue only on the local node, you can
pass the option: `local_only: true`
The shutdown process pauses the queue first and allows current jobs to exit gracefully, provided
they finish within the shutdown limit.
## Options
* `:queue` - a string or atom specifying the queue to stop, required
* `:local_only` - whether the queue will be stopped only on the local node, default: `false`
## Example
Oban.stop_queue(queue: :default)
:ok
Oban.stop_queue(queue: :media, local_only: true)
:ok
"""
@doc since: "0.12.0"
@spec stop_queue(name(), opts :: [queue_option()]) :: :ok
def stop_queue(name \\ __MODULE__, [_ | _] = opts) do
validate_queue_opts!(opts, [:queue, :local_only])
conf = config(name)
data = %{action: :stop, queue: opts[:queue], ident: scope_signal(conf, opts)}
Notifier.notify(conf, :signal, data)
end
@doc """
Check the current state of a queue producer.
This allows you to introspect on a queue's health by retrieving key attributes of the producer's
state; values such as the current `limit`, the `running` job ids, and when the producer was
started.
## Options
* `:queue` - a string or atom specifying the queue to check, required
## Example
Oban.check_queue(queue: :default)
%{
limit: 10,
node: "me@local",
paused: false,
queue: "default",
running: [100, 102],
started_at: ~D[2020-10-07 15:31:00],
updated_at: ~D[2020-10-07 15:31:00]
}
"""
@doc since: "2.2.0"
@spec check_queue(name(), opts :: [{:queue, queue_name()}]) :: queue_state()
def check_queue(name \\ __MODULE__, [_ | _] = opts) do
validate_queue_opts!(opts, [:queue])
name
|> Registry.via({:producer, to_string(opts[:queue])})
|> Producer.check()
end
@doc """
Sets a job as `available`, adding attempts if already maxed out. If the job is currently
`available`, `executing` or `scheduled` it will be ignored. The job is scheduled for immediate
execution.
## Example
Retry a discarded job with the id `1`:
Oban.retry_job(1)
:ok
"""
@doc since: "2.2.0"
@spec retry_job(name :: atom(), job_id :: pos_integer()) :: :ok
def retry_job(name \\ __MODULE__, job_id) when is_integer(job_id) do
name
|> config()
|> Query.retry_job(job_id)
end
@doc """
Retries all jobs that match on the given queryable. Please note that no matter the
queryable constraints, it will never retry `available`, `executing` or `scheduled` jobs.
If no queryable is given, Oban will retry all jobs in retryable states.
## Example
Retries all retryable jobs
Oban.retry_all_jobs(Oban.Job)
{:ok, 9}
Retries all retryable jobs with priority 0
Oban.Job
|> Ecto.Query.where(priority: 0)
|> Oban.retry_all_jobs()
{:ok, 5}
"""
@doc since: "2.9.0"
@spec retry_all_jobs(name(), queryable :: Ecto.Queryable.t()) :: {:ok, non_neg_integer()}
def retry_all_jobs(name \\ __MODULE__, queryable) do
name
|> config()
|> Query.retry_all_jobs(queryable)
end
@doc """
Cancel an `executing`, `available`, `scheduled` or `retryable` job and mark it as `cancelled` to
prevent it from running. If the job is currently `executing` it will be killed and otherwise it
is ignored.
If an executing job happens to fail before it can be cancelled the state is set to `cancelled`.
However, if it manages to complete successfully then the state will still be `completed`.
## Example
Cancel a scheduled job with the id `1`:
Oban.cancel_job(1)
:ok
"""
@doc since: "1.3.0"
@spec cancel_job(name(), job_id :: pos_integer()) :: :ok
def cancel_job(name \\ __MODULE__, job_id) when is_integer(job_id) do
conf = config(name)
Engine.cancel_job(conf, %Job{id: job_id})
Notifier.notify(conf, :signal, %{action: :pkill, job_id: job_id})
end
@doc """
Cancel many jobs based on a queryable and mark them as `cancelled` to prevent them from running.
Any currently `executing` jobs are killed while the others are ignored.
If executing jobs happen to fail before cancellation then the state is set to `cancelled`.
However, any that complete successfully will remain `completed`.
Only jobs with the statuses `executing`, `available`, `scheduled`, or `retryable` can be cancelled.
## Example
Cancel all jobs:
Oban.cancel_all_jobs(Oban.Job)
{:ok, 9}
Cancel all jobs for a specific worker:
Oban.Job
|> Ecto.Query.where(worker: "MyApp.MyWorker")
|> Oban.cancel_all_jobs()
{:ok, 2}
"""
@doc since: "2.9.0"
@spec cancel_all_jobs(name(), queryable :: Ecto.Queryable.t()) :: {:ok, non_neg_integer()}
def cancel_all_jobs(name \\ __MODULE__, queryable) do
conf = config(name)
case Engine.cancel_all_jobs(conf, queryable) do
{:ok, {count, [_ | _] = executing}} ->
payload = Enum.map(executing, fn job -> %{action: :pkill, job_id: job.id} end)
Notifier.notify(conf, :signal, payload)
{:ok, count}
{:ok, {count, _executing}} ->
{:ok, count}
end
end
## Child Spec Helpers
defp plugin_child_spec({module, opts}, conf) do
name = Registry.via(conf.name, {:plugin, module})
opts =
opts
|> Keyword.put_new(:conf, conf)
|> Keyword.put_new(:name, name)
Supervisor.child_spec({module, opts}, id: {:plugin, module})
end
defp plugin_child_spec(module, conf) do
plugin_child_spec({module, []}, conf)
end
defp event_child_spec(conf) do
time = %{system_time: System.system_time()}
meta = %{pid: self(), conf: conf}
init_task = fn -> Telemetry.execute([:oban, :supervisor, :init], time, meta) end
Supervisor.child_spec({Task, init_task}, restart: :temporary)
end
## Signal Helper
defp scope_signal(conf, opts) do
if Keyword.get(opts, :local_only) do
Config.to_ident(conf)
else
:any
end
end
## Validation Helpers
defp validate_queue_opts!(opts, expected) when is_list(opts) do
unless Keyword.has_key?(opts, :queue) do
raise ArgumentError, "required option :queue is missing from #{inspect(opts)}"
end
opts
|> Keyword.take(expected)
|> Enum.each(&validate_queue_opts!/1)
end
defp validate_queue_opts!({:queue, queue}) do
unless (is_atom(queue) and not is_nil(queue)) or (is_binary(queue) and byte_size(queue) > 0) do
raise ArgumentError,
"expected :queue to be a binary or atom (except `nil`), got: #{inspect(queue)}"
end
end
defp validate_queue_opts!({:local_only, local_only}) do
unless is_boolean(local_only) do
raise ArgumentError, "expected :local_only to be a boolean, got: #{inspect(local_only)}"
end
end
defp validate_queue_opts!(option) do
raise ArgumentError, "unknown option provided #{inspect(option)}"
end
defp validate_engine_meta!(conf, opts) do
opts =
opts
|> Keyword.drop([:local_only])
|> Keyword.put(:validate, true)
with {:error, error} <- conf.engine.init(conf, opts), do: raise(error)
end
end
| 31.81872 | 101 | 0.664085 |
ff3a6a45b652a83acc24a29a96196f6a4844287a | 1,666 | ex | Elixir | lib/util/tmpdir.ex | ikeyasu/antikythera | 544fdd22e46b1f34177053d87d9e2a9708c74113 | [
"Apache-2.0"
] | null | null | null | lib/util/tmpdir.ex | ikeyasu/antikythera | 544fdd22e46b1f34177053d87d9e2a9708c74113 | [
"Apache-2.0"
] | null | null | null | lib/util/tmpdir.ex | ikeyasu/antikythera | 544fdd22e46b1f34177053d87d9e2a9708c74113 | [
"Apache-2.0"
] | null | null | null | # Copyright(c) 2015-2018 ACCESS CO., LTD. All rights reserved.
use Croma
defmodule Antikythera.Tmpdir do
alias Antikythera.Context
alias Antikythera.ExecutorPool.Id, as: EPoolId
alias AntikytheraCore.TmpdirTracker
@doc """
Creates a temporary directory which can be used as a working space for the passed function `f`.
This function is basically intended for async jobs which processes large amount of data.
For example, an async job that accumulates data into files and upload them to somewhere
can utilize this function to obtain a temporary working space.
The temporary directory is created before `f` is invoked.
When execution of `f` is finished (either successfully or by exception) the directory is automatically removed.
The function returns the return value of `f`.
Nested calls to this function is not allowed.
Instead you can freely make subdirectories of the temporary directory.
## Example
Antikythera.Tmpdir.make(context, fn tmpdir ->
path = Path.join(tmpdir, "foo")
File.open(path, [:write], fn file ->
IO.write(file, "some data 1")
IO.write(file, "some data 2")
end)
upload_to_object_storage_service("object_key", path)
end)
"""
defun make(context_or_epool_id :: v[EPoolId.t | Context.t], f :: (Path.t -> a)) :: a when a: any do
epool_id = extract_epool_id(context_or_epool_id)
{:ok, tmpdir} = TmpdirTracker.request(epool_id)
try do
f.(tmpdir)
after
TmpdirTracker.finished()
end
end
defp extract_epool_id(%Context{executor_pool_id: epool_id}), do: epool_id
defp extract_epool_id(epool_id), do: epool_id
end
| 34.708333 | 113 | 0.714286 |
ff3a7f4d54a4df99ee8cf408ac8652092f7ce275 | 63 | ex | Elixir | lib/plenario_web/views/page_view.ex | vforgione/plenario2 | 001526e5c60a1d32794a18f3fd65ead6cade1a29 | [
"Apache-2.0"
] | 13 | 2017-12-11T13:59:42.000Z | 2020-11-16T21:52:31.000Z | lib/plenario_web/views/page_view.ex | vforgione/plenario2 | 001526e5c60a1d32794a18f3fd65ead6cade1a29 | [
"Apache-2.0"
] | 310 | 2017-11-13T22:52:26.000Z | 2018-11-19T17:49:30.000Z | lib/plenario_web/views/page_view.ex | vforgione/plenario2 | 001526e5c60a1d32794a18f3fd65ead6cade1a29 | [
"Apache-2.0"
] | 3 | 2017-12-05T00:36:12.000Z | 2020-03-10T15:15:29.000Z | defmodule PlenarioWeb.PageView do
use PlenarioWeb, :view
end
| 15.75 | 33 | 0.809524 |
ff3a840fa96db593cc8463d9371e6832ad122946 | 948 | exs | Elixir | config/config.exs | thatbraxguy/real-time-quill | 288df903ddada42695c68fd0674e5976a4a848a9 | [
"MIT"
] | null | null | null | config/config.exs | thatbraxguy/real-time-quill | 288df903ddada42695c68fd0674e5976a4a848a9 | [
"MIT"
] | null | null | null | config/config.exs | thatbraxguy/real-time-quill | 288df903ddada42695c68fd0674e5976a4a848a9 | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
#
# This configuration file is loaded before any dependency and
# is restricted to this project.
use Mix.Config
# General application configuration
config :quickquill,
ecto_repos: [Quickquill.Repo]
# Configures the endpoint
config :quickquill, Quickquill.Endpoint,
url: [host: "localhost"],
secret_key_base: "TwxqKeg8EHuEnMQYLDbdnFIO+oVSXWx1vvhd5laVsKnAacvzFTasOBiWCrEicr2T",
render_errors: [view: Quickquill.ErrorView, accepts: ~w(html json)],
pubsub: [name: Quickquill.PubSub,
adapter: Phoenix.PubSub.PG2]
# Configures Elixir's Logger
config :logger, :console,
format: "$time $metadata[$level] $message\n",
metadata: [:request_id]
# Import environment specific config. This must remain at the bottom
# of this file so it overrides the configuration defined above.
import_config "#{Mix.env}.exs"
| 33.857143 | 86 | 0.766878 |
ff3a85a58e6ebc66cc1b6051bf805231a130d49a | 627 | ex | Elixir | app/community/lib/community_web/resolvers/news_resolver.ex | juhalehtonen/graphql-playground | b2515835c6a2de5db59d4c2ca605e0e9028b801b | [
"MIT"
] | null | null | null | app/community/lib/community_web/resolvers/news_resolver.ex | juhalehtonen/graphql-playground | b2515835c6a2de5db59d4c2ca605e0e9028b801b | [
"MIT"
] | null | null | null | app/community/lib/community_web/resolvers/news_resolver.ex | juhalehtonen/graphql-playground | b2515835c6a2de5db59d4c2ca605e0e9028b801b | [
"MIT"
] | null | null | null | defmodule CommunityWeb.NewsResolver do
alias Community.News
@moduledoc """
This is a RESOLVER, which are just functions mapped to GraphQL fields, with
their actual behaviour.
"""
@doc """
Return all links.
"""
def all_links(_root, _args, _info) do
links = News.list_links()
{:ok, links}
end
@doc """
Create a new link through a mutation.
"""
def create_link(_root, args, _info) do
# TODO: add detailed error message handling later
case News.create_link(args) do
{:ok, link} ->
{:ok, link}
_error ->
{:error, "could not create link"}
end
end
end
| 20.225806 | 77 | 0.631579 |
ff3acc2a97acc480c84cc71bd9cfb4b36ea2174f | 373 | exs | Elixir | priv/repo/migrations/20180712102348_create_repos_tags_join_table.exs | DavidAlphaFox/coderplanets_server | 3fd47bf3bba6cc04c9a34698201a60ad2f3e8254 | [
"Apache-2.0"
] | 1 | 2019-05-07T15:03:54.000Z | 2019-05-07T15:03:54.000Z | priv/repo/migrations/20180712102348_create_repos_tags_join_table.exs | DavidAlphaFox/coderplanets_server | 3fd47bf3bba6cc04c9a34698201a60ad2f3e8254 | [
"Apache-2.0"
] | null | null | null | priv/repo/migrations/20180712102348_create_repos_tags_join_table.exs | DavidAlphaFox/coderplanets_server | 3fd47bf3bba6cc04c9a34698201a60ad2f3e8254 | [
"Apache-2.0"
] | null | null | null | defmodule MastaniServer.Repo.Migrations.CreateReposTagsJoinTable do
use Ecto.Migration
def change do
create table(:repos_tags) do
add(:tag_id, references(:tags, on_delete: :delete_all), null: false)
add(:repo_id, references(:cms_repos, on_delete: :delete_all), null: false)
end
create(unique_index(:repos_tags, [:tag_id, :repo_id]))
end
end
| 28.692308 | 80 | 0.723861 |
ff3aec72d49038de32c92d53163625b53cd7d9a0 | 357 | ex | Elixir | lib/rill/try.ex | Carburetor/rill | 6ba477373cff28f56fbead9a316166d994da67d3 | [
"MIT"
] | 1 | 2022-01-26T15:06:14.000Z | 2022-01-26T15:06:14.000Z | lib/rill/try.ex | rill-project/rill | 16d35f9dda1ec58190fa6bda9923372834ab456b | [
"MIT"
] | null | null | null | lib/rill/try.ex | rill-project/rill | 16d35f9dda1ec58190fa6bda9923372834ab456b | [
"MIT"
] | null | null | null | defmodule Rill.Try do
defmacro try(error, do: block) do
quote do
try do
unquote(block)
rescue
unquote(error) -> nil
end
end
end
defmacro try_version(do: block) do
quote do
try do
unquote(block)
rescue
Rill.MessageStore.ExpectedVersion.Error -> nil
end
end
end
end
| 16.227273 | 54 | 0.577031 |
ff3b13e058efdeeaed56d076d967e37e54cc51d3 | 3,618 | ex | Elixir | apps/vax/lib/vax/adapter/helpers.ex | vaxine-io/vaxine | 872a83ea8d4935a52c7b850bb17ab099ee9c346b | [
"Apache-2.0"
] | 8 | 2022-03-14T15:33:08.000Z | 2022-03-30T22:06:04.000Z | apps/vax/lib/vax/adapter/helpers.ex | vaxine-io/vaxine | 872a83ea8d4935a52c7b850bb17ab099ee9c346b | [
"Apache-2.0"
] | 9 | 2022-03-15T15:48:28.000Z | 2022-03-21T23:11:34.000Z | apps/vax/lib/vax/adapter/helpers.ex | vaxine-io/vaxine | 872a83ea8d4935a52c7b850bb17ab099ee9c346b | [
"Apache-2.0"
] | null | null | null | defmodule Vax.Adapter.Helpers do
@moduledoc false
# TODO: split by purpose (?)
@spec schema_primary_key!(schema :: atom()) :: atom()
def schema_primary_key!(schema) do
case schema.__schema__(:primary_key) do
[primary_key] ->
primary_key
[] ->
raise "Vax requires all schemas to have a primary key, found none for schema #{schema}"
keys ->
raise "Vax requires all schemas to have no more than one primary key. Found #{keys} for schema #{schema}"
end
end
@spec object_key(schema_source :: binary(), primary_key :: binary()) :: binary()
def object_key(schema_source, primary_key) do
schema_source <> ":" <> primary_key
end
@spec build_object(schema_source :: binary(), primary_key :: binary(), bucket :: binary()) ::
{binary(), :antidote_crdt_map_rr, binary()}
def build_object(schema_source, primary_key, bucket) do
{object_key(schema_source, primary_key), :antidote_crdt_map_rr, bucket}
end
@spec load_map(
repo :: atom(),
schema :: Ecto.Schema.t(),
antidote_map :: :antidotec_map.antidote_map()
) :: struct() | nil
def load_map(repo, schema, map) do
map
|> Enum.map(fn {{k, _t}, v} -> {String.to_atom(k), v} end)
|> case do
[] -> nil
fields -> repo.load(schema, fields)
end
end
@spec build_insert_map(repo :: atom(), schema :: Ecto.Schema.t()) ::
:antidotec_map.antidote_map()
def build_insert_map(_repo, schema) do
schema_types = schema_types(schema)
schema
|> Map.take(schema.__struct__.__schema__(:fields))
|> Enum.reject(fn {_k, v} -> is_nil(v) end)
|> Enum.reduce(:antidotec_map.new(), fn {field, value}, map ->
update_map_value(map, schema_types, field, value, schema.__struct__)
end)
end
def build_update_map(_repo, schema, changeset) do
schema_types = schema_types(schema)
map = to_antidotec_map(schema, schema_types)
Enum.reduce(changeset.changes, map, fn {field, new_value}, map ->
update_map_value(map, schema_types, field, new_value, schema.__struct__)
end)
end
defp update_map_value(map, schema_types, field, new_value, schema) do
field_type = schema_types[field]
# todo: (?)
field_default = schema |> struct() |> Map.get(field)
antidotec_value = get_antidote_map_field_or_default(map, field, field_type, field_default)
map_key = {Atom.to_string(field), Vax.Type.crdt_type(field_type)}
value = Vax.Type.compute_change(field_type, antidotec_value, new_value)
:antidotec_map.add_or_update(map, map_key, value)
end
def get_antidote_map_field_or_default(map, field, field_type, field_default) do
map
|> elem(1)
|> Enum.find(fn {{key, _type}, _value} -> key == field end)
|> case do
nil ->
Vax.Type.client_dump(field_type, field_default)
{{_key, _type}, value} ->
value
end
end
defp schema_types(%schema_mod{} = _schema) do
schema_mod.__schema__(:fields)
|> Map.new(fn field ->
{field, schema_mod.__schema__(:type, field)}
end)
end
defp to_antidotec_map(schema, schema_types) do
crdt_types = Map.new(schema_types, fn {key, type} -> {key, Vax.Type.crdt_type(type)} end)
# TODO: maybe hook a better interface in antidote client
map = :antidotec_map.new()
map_values =
schema
|> Map.take(schema.__struct__.__schema__(:fields))
|> Map.new(fn {key, value} ->
{{key, crdt_types[key]}, Vax.Type.client_dump(schema_types[key], value)}
end)
{elem(map, 0), map_values, elem(map, 2), elem(map, 3)}
end
end
| 32.017699 | 113 | 0.655334 |
ff3b1fce6e240b962afc84443dbdc55355822e16 | 622 | ex | Elixir | deps/postgrex/lib/postgrex/extensions/int4.ex | rpillar/Top5_Elixir | 9c450d2e9b291108ff1465dc066dfe442dbca822 | [
"MIT"
] | null | null | null | deps/postgrex/lib/postgrex/extensions/int4.ex | rpillar/Top5_Elixir | 9c450d2e9b291108ff1465dc066dfe442dbca822 | [
"MIT"
] | null | null | null | deps/postgrex/lib/postgrex/extensions/int4.ex | rpillar/Top5_Elixir | 9c450d2e9b291108ff1465dc066dfe442dbca822 | [
"MIT"
] | null | null | null | defmodule Postgrex.Extensions.Int4 do
@moduledoc false
import Postgrex.BinaryUtils, warn: false
use Postgrex.BinaryExtension, send: "int4send"
@int4_range -2147483648..2147483647
def encode(_) do
range = Macro.escape(@int4_range)
quote location: :keep do
int when is_integer(int) and int in unquote(range) ->
<<4 :: int32, int :: int32>>
other ->
raise DBConnection.EncodeError, Postgrex.Utils.encode_msg(other, unquote(range))
end
end
def decode(_) do
quote location: :keep do
<<4 :: int32, int :: int32>> -> int
end
end
end
| 25.916667 | 89 | 0.636656 |
ff3b206a82c8276ba67d7d2ce0ecdd0c00e57522 | 203 | ex | Elixir | events/apps/daniel_api/lib/daniel_api.ex | MrCeleryman/elixirtute | 797e3cb29a68a54728258329b49ac4ae0787cc76 | [
"MIT"
] | null | null | null | events/apps/daniel_api/lib/daniel_api.ex | MrCeleryman/elixirtute | 797e3cb29a68a54728258329b49ac4ae0787cc76 | [
"MIT"
] | null | null | null | events/apps/daniel_api/lib/daniel_api.ex | MrCeleryman/elixirtute | 797e3cb29a68a54728258329b49ac4ae0787cc76 | [
"MIT"
] | null | null | null | defmodule DanielApi do
@moduledoc """
Documentation for DanielApi.
"""
@doc """
Hello world.
## Examples
iex> DanielApi.hello
:world
"""
def hello do
:world
end
end
| 10.684211 | 30 | 0.581281 |
ff3b37994a5c965c20b78b379b811585b270e20a | 1,530 | ex | Elixir | clients/deployment_manager/lib/google_api/deployment_manager/v2/model/import_file.ex | nuxlli/elixir-google-api | ecb8679ac7282b7dd314c3e20c250710ec6a7870 | [
"Apache-2.0"
] | null | null | null | clients/deployment_manager/lib/google_api/deployment_manager/v2/model/import_file.ex | nuxlli/elixir-google-api | ecb8679ac7282b7dd314c3e20c250710ec6a7870 | [
"Apache-2.0"
] | null | null | null | clients/deployment_manager/lib/google_api/deployment_manager/v2/model/import_file.ex | nuxlli/elixir-google-api | ecb8679ac7282b7dd314c3e20c250710ec6a7870 | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.DeploymentManager.V2.Model.ImportFile do
@moduledoc """
## Attributes
- content (String.t): The contents of the file. Defaults to: `null`.
- name (String.t): The name of the file. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:content => any(),
:name => any()
}
field(:content)
field(:name)
end
defimpl Poison.Decoder, for: GoogleApi.DeploymentManager.V2.Model.ImportFile do
def decode(value, options) do
GoogleApi.DeploymentManager.V2.Model.ImportFile.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.DeploymentManager.V2.Model.ImportFile do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 30 | 79 | 0.729412 |
ff3b4824f1c454d847625a2e7c9612f95266e9c5 | 211 | exs | Elixir | timetable_backend/test/controllers/page_controller_test.exs | heru/timetable | b5f86f53020c6e32b569628c5396b8972ca0538f | [
"MIT"
] | null | null | null | timetable_backend/test/controllers/page_controller_test.exs | heru/timetable | b5f86f53020c6e32b569628c5396b8972ca0538f | [
"MIT"
] | null | null | null | timetable_backend/test/controllers/page_controller_test.exs | heru/timetable | b5f86f53020c6e32b569628c5396b8972ca0538f | [
"MIT"
] | null | null | null | defmodule TimetableBackend.PageControllerTest do
use TimetableBackend.ConnCase
test "GET /", %{conn: conn} do
conn = get conn, "/"
assert html_response(conn, 200) =~ "Welcome to Phoenix!"
end
end
| 23.444444 | 60 | 0.701422 |
ff3b4af44fb9429af5dcbacfb1638ce090971d11 | 606 | ex | Elixir | lib/bitcoin/node/network/supervisor.ex | coinscript/bitcoinsv-elixir | 2dda03c81edc5662743ed2922abb5b1910d9c09a | [
"Apache-2.0"
] | 2 | 2019-08-12T04:53:57.000Z | 2019-09-03T03:47:33.000Z | lib/bitcoin/node/network/supervisor.ex | coinscript/bitcoinsv-elixir | 2dda03c81edc5662743ed2922abb5b1910d9c09a | [
"Apache-2.0"
] | null | null | null | lib/bitcoin/node/network/supervisor.ex | coinscript/bitcoinsv-elixir | 2dda03c81edc5662743ed2922abb5b1910d9c09a | [
"Apache-2.0"
] | null | null | null | defmodule Bitcoin.Node.Network.Supervisor do
use Bitcoin.Common
use Supervisor
require Logger
def start_link do
Supervisor.start_link(__MODULE__, name: __MODULE__)
end
def init(_) do
Logger.info("Starting Node subsystems")
[
@modules[:addr],
@modules[:discovery],
@modules[:connection_manager],
# Storage module is an abstraction on top of the actual storage engine so it doesn't have to be dynamic
Bitcoin.Node.Storage,
@modules[:inventory]
]
|> Enum.map(fn m -> worker(m, []) end)
|> supervise(strategy: :one_for_one)
end
end
| 23.307692 | 109 | 0.671617 |
ff3b536e415946498e965625bd50f98f743e0c8c | 380 | ex | Elixir | test_server/web/views/error_view.ex | xaptum-eng/phoenix-channel-client | 6adcfe3cd18c867bbf9c6a7aa6a3ceb5e0b77495 | [
"MIT"
] | 2 | 2020-10-22T14:54:39.000Z | 2020-11-04T21:52:06.000Z | test_server/web/views/error_view.ex | xaptum-eng/phoenix-channel-client | 6adcfe3cd18c867bbf9c6a7aa6a3ceb5e0b77495 | [
"MIT"
] | null | null | null | test_server/web/views/error_view.ex | xaptum-eng/phoenix-channel-client | 6adcfe3cd18c867bbf9c6a7aa6a3ceb5e0b77495 | [
"MIT"
] | null | null | null | defmodule TestServer.ErrorView do
use TestServer.Web, :view
def render("404.html", _assigns) do
"Page not found"
end
def render("500.html", _assigns) do
"Internal server error"
end
# In case no render clause matches or no
# template is found, let's render it as 500
def template_not_found(_template, assigns) do
render "500.html", assigns
end
end
| 21.111111 | 47 | 0.702632 |
ff3b56090aa9d33279a87e99a6473bc77051da46 | 7,765 | ex | Elixir | lib/ex_twiml.ex | postmates/ex_twiml | 9e2306723a206c5d1353977321901aad4715d63d | [
"MIT"
] | null | null | null | lib/ex_twiml.ex | postmates/ex_twiml | 9e2306723a206c5d1353977321901aad4715d63d | [
"MIT"
] | null | null | null | lib/ex_twiml.ex | postmates/ex_twiml | 9e2306723a206c5d1353977321901aad4715d63d | [
"MIT"
] | 2 | 2017-07-22T02:31:04.000Z | 2019-07-16T22:15:56.000Z | defmodule ExTwiml do
@moduledoc """
Contains macros to make generating TwiML from Elixir far easier and more
efficient. Just `import ExTwiml` and go!
## Examples
How to generate nested verbs, such as `<Gather>`:
# Options are passed before "do"
gather digits: 1, finish_on_key: "#" do
# More verbs here ...
end
# Generates
<Gather digits="1" finishOnKey="#"></Gather>
How to generate simpler verbs, such as `<Say>`:
# Options are passed as the second argument
say "words to say", voice: "woman"
# Generates
<Say voice="woman">words to say</Say>
How to generate command verbs, like `<Leave>` or `<Pause>`:
# If the verb has no attributes, just write the name
# of the verb:
leave
# Generates
<Leave />
# If the verb has attributes, like <Pause>, write them
# after the name of the verb:
pause length: 5
# Generates
<Pause length="5" />
A complete example:
import ExTwiml
twiml do
play "/assets/welcome.mp3"
gather digits: 1 do
say "For more menus, please press 1.", voice: "woman"
say "To speak with a real person, please press 2.", voice: "woman"
end
end
Produces the following `string`:
<?xml version="1.0" encoding="UTF-8"?>
<Response>
<Play>/assets/welcome.mp3</Play>
<Gather digits="3">
<Say voice="woman">For more menus, please press 1.</Say>
<Say voice="woman">To speak with a real person, please press 2.</Say>
</Gather>
</Response>
You'd then need to render this string to the browser.
"""
import ExTwiml.Utilities
alias ExTwiml.ReservedNameError
@verbs [
# Nested
:gather, :dial, :brand, :message, :task,
# Non-nested
:say, :number, :play, :sms, :sip, :client, :conference, :queue, :enqueue,
:redirect, :body, :media
]
@simple_verbs [:leave, :hangup, :reject, :pause, :record]
@doc """
Start creating a TwiML document. Returns the rendered TwiML as a string.
See the `ExTwiml` documentation to see how to call TwiML verbs from within
the `twiml/1` macro.
## Example
iex> import ExTwiml
...> twiml do
...> say "Hello World"
...> end
"<?xml version=\\"1.0\\" encoding=\\"UTF-8\\"?><Response><Say>Hello World</Say></Response>"
"""
defmacro twiml(do: block) do
quote do
header = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>"
# Create an Agent to store the buffer results, using var! to allow us to
# continue to be able to update the buffer through multiple macros.
#
# The buffer's state is a list of XML fragments. New fragments are
# inserted by other macros. Finally, all the fragments are joined
# together in a string.
{:ok, var!(buffer, Twiml)} = start_buffer([header])
# Wrap the whole block in a <Response> tag
tag :response do
# Postwalk the AST, expanding all of the TwiML verbs into proper
# `tag` and `text` macros. This gives the impression that there
# is a macro for each verb, when in fact it all expands to only
# two macros.
unquote(block
|> Macro.prewalk(&prewalk(&1, __CALLER__.file))
|> Macro.postwalk(&postwalk/1))
end
xml = render(var!(buffer, Twiml)) # Convert buffer to string
:ok = stop_buffer(var!(buffer, Twiml)) # Kill the Agent
xml # Return our pretty TwiML!
end
end
@doc """
Use this macro to generate a tag not yet supported by this Twiml library. Note
that you'll also need to use the `text` macro to include text within this tag.
## Examples
tag :mms, to: "1112223333", from: "2223334444" do
text "How are you doing?"
end
Will produce the following Twiml:
<Mms to="1112223333" from="2223334444">How are you doing?</Mms>
"""
defmacro tag(name, options \\ [], do: inner) do
quote do
put_buffer var!(buffer, Twiml), create_tag(:opening, unquote(name), unquote(options))
unquote(inner)
put_buffer var!(buffer, Twiml), create_tag(:closing, unquote(name))
end
end
@doc """
Adds whatever text is given to the current Twiml buffer, unmodified. As a
result, this macro is really only useful when nested inside one of the other
macros provided by this module.
"""
defmacro text(string) do
quote do
put_buffer var!(buffer, Twiml), escape_text(to_string(unquote(string)))
end
end
@doc "Start an Agent to store a given buffer state."
@spec start_buffer(list) :: {:ok, pid}
def start_buffer(state), do: Agent.start_link(fn -> state end)
@doc "Stop a buffer."
@spec stop_buffer(pid) :: atom
def stop_buffer(buff), do: Agent.stop(buff)
@doc "Update the buffer by pushing a new tag onto the beginning."
@spec put_buffer(pid, any) :: atom
def put_buffer(buff, content), do: Agent.update(buff, &[content | &1])
@doc "Get the current state of a buffer."
@spec get_buffer(pid) :: list
def get_buffer(buff), do: Agent.get(buff, &(&1)) |> Enum.reverse
@doc "Render the contents of the buffer into a string."
@spec render(pid) :: String.t
def render(buff), do: Agent.get(buff, &(&1)) |> Enum.reverse |> Enum.join
##
# Private API
##
# Check function definitions for reserved variable names
defp prewalk({:fn, _, [{:->, _, [vars, _]}]} = ast, file_name) do
assert_no_verbs!(vars, file_name)
ast
end
defp prewalk(ast, _file_name), do: ast
# {:text, [], ["Hello World"]}
defp postwalk({:text, _meta, [string]}) do
# Just add the text to the buffer. Nothing else needed.
quote do: text unquote(string)
end
# {:gather, [], [[do: inner]]}
defp postwalk({verb, _meta, [[do: inner]]}) when verb in @verbs do
compile_nested(verb, [], inner)
end
# {:gather, [], [finish_on_key: "#", [do: inner]]}
defp postwalk({verb, _meta, [options, [do: inner]]}) when verb in @verbs do
compile_nested(verb, options, inner)
end
# {:say, [], ["Hello World", [voice: "woman"]}
defp postwalk({verb, _meta, [string, options]}) when verb in @verbs do
compile_simple(verb, string, options)
end
# {:say, [], ["Hello World"]}
# {:say, [], ["Hello #{var}"]} (String interpolation)
defp postwalk({verb, _meta, [string]}) when verb in @verbs do
compile_simple(verb, string)
end
# {:pause, [], [[length: 5]]}
defp postwalk({verb, _meta, [options]}) when verb in @simple_verbs do
compile_empty(verb, options)
end
# {:leave, [], Elixir}
defp postwalk({verb, _meta, _args}) when verb in @simple_verbs do
compile_empty(verb)
end
# Don't modify any other ASTs.
defp postwalk(ast), do: ast
# For nested verbs, such as <Gather>
defp compile_nested(verb, options, inner) do
quote do
tag unquote(verb), unquote(options) do
unquote(inner)
end
end
end
# For simple verbs, such as <Say>
defp compile_simple(verb, string, options \\ []) do
quote do
tag unquote(verb), unquote(options) do
text unquote(string)
end
end
end
# For verbs without content, like <Leave> or <Pause>
defp compile_empty(verb, options \\ []) do
quote do
# Render only a single tag, with options
put_buffer var!(buffer, Twiml), create_tag(:self_closed, unquote(verb), unquote(options))
end
end
defp assert_no_verbs!(vars, file_name) when is_list(vars) do
Enum.each(vars, &assert_no_verbs!(&1, file_name))
end
defp assert_no_verbs!({name, _, _} = var, file_name)
when is_atom(name) and name in @verbs do
raise ReservedNameError, [var, file_name]
end
defp assert_no_verbs!(vars, _file_name) do
vars
end
end
| 28.759259 | 97 | 0.62962 |
ff3b735026d99cd7fb1a1f141a3373da9459c2a3 | 3,667 | ex | Elixir | lib/mix/ast_ndjson.ex | Ajwah/ex-gherkin | 401ee551312b72aa059c7f4621f014b111049acf | [
"Apache-2.0"
] | 8 | 2020-07-27T09:11:24.000Z | 2020-09-21T20:57:45.000Z | lib/mix/ast_ndjson.ex | Ajwah/ex-gherkin | 401ee551312b72aa059c7f4621f014b111049acf | [
"Apache-2.0"
] | 1 | 2021-05-14T13:30:27.000Z | 2021-05-14T13:30:27.000Z | lib/mix/ast_ndjson.ex | Ajwah/ex-gherkin | 401ee551312b72aa059c7f4621f014b111049acf | [
"Apache-2.0"
] | null | null | null | defmodule Mix.Tasks.AstNdjson do
@moduledoc """
Parse feature file to .ast.ndjson format
"""
use Mix.Task
@shortdoc "Parse feature file to .ast.ndjson format"
def run(args) do
path = validate_preconditions_fulfilled(args)
"#{path}/*.feature"
|> Path.wildcard()
|> Enum.each(fn path ->
file =
path
|> String.split()
|> List.last()
IO.puts("Parsing #{file} to `.ast.ndjson` format")
r = gherkin(file)
File.write!("#{file}.ast.ndjson", r)
end)
end
@spec validate_preconditions_fulfilled(any) :: no_return
defp validate_preconditions_fulfilled(args) do
with {_, :ok} <- {:gherkin_check, validate_gherkin_installed()},
{_, {:ok, path}} <- {:options_check, validate_options(args)} do
path
else
{:gherkin_check, {:error, {_code, message}}} -> raise message
{:options_check, {:error, message}} -> raise message
error -> error
end
end
@spec validate_options(any) :: {:ok, String.t()} | {:error, String.t()}
defp validate_options(args) do
{opts, _, _} =
OptionParser.parse(args,
switches: [
source: :string
]
)
if opts[:source] do
{:ok, opts[:source]}
else
{:error, "Kindly supply --source /path/to/feature_files"}
end
end
@spec validate_gherkin_installed :: :ok | {:error, {atom, String.t()}}
defp validate_gherkin_installed do
try do
System.cmd("gherkin", ["--help"])
:ok
rescue
error ->
details =
error
|> case do
%ErlangError{original: :system_limit} ->
{:system_limit,
"error_code: :system_limit. All available ports in the Erlang emulator are in use"}
%ErlangError{original: :enomem} ->
{:enomem, "error_code: :enomem. There was not enough memory to create the port"}
%ErlangError{original: :eagain} ->
{:eagain,
"error_code: :eagain. There are no more available operating system processes"}
%ErlangError{original: :enametoolong} ->
{:enametoolong,
"error_code: :enametoolong. The external command given was too long"}
%ErlangError{original: :emfile} ->
{:emfile,
"error_code: :emfile. There are no more available file descriptors (for the operating system process that the Erlang emulator runs in)"}
%ErlangError{original: :enfile} ->
{:enfile,
"error_code: :enfile. The file table is full (for the entire operating system)"}
%ErlangError{original: :eacces} ->
{:eacces,
"error_code: :eacces. The command does not point to an executable file\nKindly ensure `gem install gherkin` completed successfully and that `gherkin --help` on your local works."}
%ErlangError{original: :enoent} ->
{:enoent,
"error_code: :enoent. The command does not point to an existing file\nKindly ensure `gem install gherkin` completed successfully and that `gherkin --help` on your local works."}
end
{:error, details}
end
end
# defp gherkin(file) do
# "/usr/local/bin/gherkin --format ndjson --predictable-ids --ast --no-source --no-pickles #{file}"
# |> to_charlist
# |> :os.cmd
# |> to_string
# end
defp gherkin(file) do
{r, _} =
System.cmd("/usr/local/bin/gherkin", [
"--format",
"ndjson",
"--predictable-ids",
"--no-source",
"--no-pickles",
"--ast",
file
])
r
end
end
| 29.813008 | 194 | 0.578402 |
ff3b78a144bbd38888db2a947d9712e74ac2c81a | 4,000 | ex | Elixir | services/interop-proxy/lib/interop_proxy/message.ex | RemusW/orchestra | 688c0d2a25d8f73c9c5a6e27d4fe4b2460c2c198 | [
"MIT"
] | null | null | null | services/interop-proxy/lib/interop_proxy/message.ex | RemusW/orchestra | 688c0d2a25d8f73c9c5a6e27d4fe4b2460c2c198 | [
"MIT"
] | null | null | null | services/interop-proxy/lib/interop_proxy/message.ex | RemusW/orchestra | 688c0d2a25d8f73c9c5a6e27d4fe4b2460c2c198 | [
"MIT"
] | null | null | null | defmodule InteropProxy.Message do
@moduledoc """
Contains the Protobuf messages from exprotobuf.
"""
@external_resource "lib/messages/interop.proto"
use Protobuf,
from: Path.expand("../messages/interop.proto", __DIR__),
use_package_names: true
@doc ~S"""
Takes a map and turns it into a Protobuf struct recursively.
By default, exprotobuf doesn't handle nested messages so this
function will take care of that for us.
## Examples
The nested values can be in both optional and repeated fields.
iex> alias InteropProxy.Message
iex> alias InteropProxy.Message.Interop.InteropMission
iex> map = %{home_pos: %{lat: 1}, waypoints: [%{lat: 12,
...> lon: 23}]}
iex> Message.form_message map, InteropMission
%InteropProxy.Message.Interop.InteropMission{
air_drop_pos: nil,
current_mission: nil,
emergent_pos: nil,
fly_zones: [],
home_pos: %InteropProxy.Message.Interop.Position{
lat: 1,
lon: nil
},
off_axis_pos: nil,
search_area: [],
time: nil,
waypoints: [
%InteropProxy.Message.Interop.AerialPosition{
alt_msl: nil,
lat: 12,
lon: 23
}
]
}
Keys can also be strings (useful when map was converted from JSON).
iex> alias InteropProxy.Message
iex> alias InteropProxy.Message.Interop.InteropTelem
iex> map = %{:time => 12, "pos" => %{"lat" => 1, "lon" => 2}}
iex> Message.form_message map, InteropTelem
%InteropProxy.Message.Interop.InteropTelem{
pos: %InteropProxy.Message.Interop.AerialPosition{
alt_msl: nil,
lat: 1,
lon: 2
},
time: 12,
yaw: nil
}
"""
def form_message(map, module), do: do_form_message map, module, defs()
defp do_form_message(map, module, defs) do
fields = get_fields module, defs
# Taking the map and putting entries into a new struct.
Enum.reduce map, module.new, fn {key, value}, struct ->
if nested? value do
case get_nested fields, key do
# If it's a normal nested message, recursively call the
# function again to resolve more nested messages.
{mod, :optional} ->
struct
|> update(key, do_form_message(value, mod, defs))
# If it's a repeated message it's a list, so we'll do the
# above but for each element in the list.
{mod, :repeated} ->
struct
|> update(key, value |> Enum.map(&do_form_message(&1, mod, defs)))
end
else
# If we don't have anything nested, we're just entering a
# normal key-value pair
struct
|> update(key, value)
end
end
end
# Gets the list of fields for a message.
defp get_fields(module, defs) do
{_, fields} = defs
|> Enum.find(fn
{{:msg, ^module}, _} -> true
_ -> false
end)
fields
end
# Checking if a value is a nested message.
defp nested?(value) when is_map(value), do: true
defp nested?([head | _tail]) when is_map(head), do: true
defp nested?(_value), do: false
# Getting the module name and occurrence for a nested message.
defp get_nested(fields, key) do
%Protobuf.Field{type: {:msg, mod}, occurrence: occurrence} = fields
|> Enum.find(fn
%Protobuf.Field{name: ^key} when is_atom(key) ->
true
%Protobuf.Field{name: atom_key} when is_binary(key) ->
Atom.to_string(atom_key) === key
_ ->
false
end)
{mod, occurrence}
end
# Doing a normal key update.
defp update(struct, key, value) when is_atom(key) do
struct
|> Map.put(key, value)
end
# Doing a key update, but converting the string to an atom.
defp update(struct, key, value) when is_binary(key) do
struct
|> Map.put(key |> String.to_atom, value)
end
end
| 29.19708 | 78 | 0.59925 |
ff3ba3ea5353eadc5d8a49644d151e645112c55f | 4,284 | exs | Elixir | test/nx/defn/evaluator_test.exs | erlsci/nx | 4b99478523da55d65bd9e5f568d9a639cc568e26 | [
"Apache-2.0"
] | null | null | null | test/nx/defn/evaluator_test.exs | erlsci/nx | 4b99478523da55d65bd9e5f568d9a639cc568e26 | [
"Apache-2.0"
] | null | null | null | test/nx/defn/evaluator_test.exs | erlsci/nx | 4b99478523da55d65bd9e5f568d9a639cc568e26 | [
"Apache-2.0"
] | 1 | 2022-03-26T12:13:19.000Z | 2022-03-26T12:13:19.000Z | defmodule Nx.Defn.EvaluatorTest do
use ExUnit.Case, async: true
alias Nx.Tensor, as: T
import Nx.Defn
@defn_compiler Nx.Defn.Evaluator
defn add(a, b), do: {a + b, a - b}
# Check the attribute has been reset
nil = Module.get_attribute(__MODULE__, :defn_compiler)
test "can be set explicitly set" do
assert add(1, 2) == {Nx.tensor(3), Nx.tensor(-1)}
end
test "is the default compiler" do
defmodule DefaultCompiler do
import Nx.Defn
defn add(a, b), do: a + b
end
assert DefaultCompiler.add(1, 2) == Nx.tensor(3)
end
defn add_two_int(t), do: Nx.add(t, 2)
defn add_two_float(t), do: Nx.add(t, 2)
test "constant" do
assert %T{shape: {3}, type: {:u, 8}} = add_two_int(Nx.tensor([1, 2, 3], type: {:u, 8}))
assert %T{shape: {3}, type: {:bf, 16}} = add_two_float(Nx.tensor([1, 2, 3], type: {:bf, 16}))
end
defn iota(), do: Nx.iota({2, 2})
test "iota" do
assert %T{shape: {2, 2}, type: {:s, 64}} = iota()
end
defn concatenate(a, b), do: Nx.concatenate([a, b])
test "concatenate" do
assert concatenate(Nx.tensor([1, 2, 3]), Nx.tensor([4, 5, 6])) ==
Nx.tensor([1, 2, 3, 4, 5, 6])
end
defn reshape(t), do: Nx.reshape(t, {3, 2})
test "reshape" do
assert %T{shape: {3, 2}, type: {:s, 64}} = reshape(Nx.iota({2, 3}))
end
defn lu(t), do: Nx.lu(t)
test "lu" do
assert {p, l, u} = lu(Nx.tensor([[1, 0, 0], [0, 1, 0], [0, 0, -1]]))
assert p == Nx.tensor([[1, 0, 0], [0, 1, 0], [0, 0, 1]])
assert l == Nx.tensor([[1.0, 0.0, 0.0], [0.0, 1.0, 0.0], [0.0, 0.0, 1.0]])
assert u == Nx.tensor([[1.0, 0.0, 0.0], [0.0, 1.0, 0.0], [0.0, 0.0, -1.0]])
end
defn qr(t), do: Nx.qr(t)
test "qr" do
assert {q, r} = qr(Nx.iota({3, 2}))
assert q == Nx.tensor([[0.0, 1.0], [1.0, 0.0], [0.0, 0.0]])
assert r == Nx.tensor([[2.0, 3.0], [0.0, 1.0]])
end
defn svd(t), do: Nx.svd(t)
test "svd" do
assert {u, s, vt} = svd(Nx.tensor([[1, 0, 0], [0, 1, 0], [0, 0, -1]]))
assert u == Nx.tensor([[1.0, 0.0, 0.0], [0.0, 1.0, 0.0], [0.0, 0.0, 1.0]])
assert s == Nx.tensor([1.0, 1.0, 1.0])
assert vt == Nx.tensor([[1.0, 0.0, 0.0], [0.0, 1.0, 0.0], [0.0, 0.0, -1.0]])
end
defn if3(a, b, c), do: if(a, do: b, else: c)
test "if" do
assert if3(Nx.tensor(0), Nx.tensor(1, type: {:s, 16}), Nx.tensor(2, type: {:f, 32})) ==
Nx.tensor(2, type: {:f, 32})
assert if3(Nx.tensor(1), Nx.tensor(1, type: {:s, 16}), Nx.tensor(2, type: {:f, 32})) ==
Nx.tensor(1, type: {:f, 32})
assert if3(Nx.tensor(2), Nx.tensor(1, type: {:s, 16}), Nx.tensor(2, type: {:f, 32})) ==
Nx.tensor(1, type: {:f, 32})
assert if3(Nx.tensor(0), Nx.tensor([1, 2]), Nx.tensor([[3], [4]])) ==
Nx.tensor([[3, 3], [4, 4]])
assert if3(Nx.tensor(1), Nx.tensor([1, 2]), Nx.tensor([[3], [4]])) ==
Nx.tensor([[1, 2], [1, 2]])
end
defn if_tuple(a, b, c), do: if(a, do: {{a, b}, c}, else: {{c, b}, a})
test "if with tuples" do
assert if_tuple(Nx.tensor(0), Nx.tensor(10), Nx.tensor(20)) ==
{{Nx.tensor(20), Nx.tensor(10)}, Nx.tensor(0)}
assert if_tuple(Nx.tensor(1), Nx.tensor(10), Nx.tensor(20)) ==
{{Nx.tensor(1), Nx.tensor(10)}, Nx.tensor(20)}
assert if_tuple(Nx.tensor(0), Nx.tensor(10), Nx.tensor([20, 30])) ==
{{Nx.tensor([20, 30]), Nx.tensor(10)}, Nx.tensor([0, 0])}
assert if_tuple(Nx.tensor(1), Nx.tensor(10), Nx.tensor([20, 30])) ==
{{Nx.tensor([1, 1]), Nx.tensor(10)}, Nx.tensor([20, 30])}
end
defn if_tuple_match(a, b, c) do
{{x, y}, z} = if(a, do: {{a, b}, c}, else: {{c, b}, a})
x * y - z
end
test "if with matched tuples" do
assert if_tuple_match(Nx.tensor(0), Nx.tensor(10), Nx.tensor(20)) == Nx.tensor(200)
assert if_tuple_match(Nx.tensor(1), Nx.tensor(10), Nx.tensor(20)) == Nx.tensor(-10)
end
defn if_tuple_return(a, b, c) do
{xy, _} = if(a, do: {{a, b}, c}, else: {{c, b}, a})
xy
end
test "if with return tuple" do
assert if_tuple_return(Nx.tensor(0), Nx.tensor(10), Nx.tensor(20)) ==
{Nx.tensor(20), Nx.tensor(10)}
assert if_tuple_return(Nx.tensor(1), Nx.tensor(10), Nx.tensor(20)) ==
{Nx.tensor(1), Nx.tensor(10)}
end
end
| 31.043478 | 97 | 0.527077 |
ff3ba7e9105fac7cbbd9466afbb4b5938223fcdb | 518 | exs | Elixir | bench/witchcraft/monoid/bitstring_bench.exs | doma-engineering/witchcraft | c84fa6b2146e7de745105e21f672ed413df93ad3 | [
"MIT"
] | 454 | 2019-06-05T22:56:45.000Z | 2022-03-27T23:03:02.000Z | bench/witchcraft/monoid/bitstring_bench.exs | doma-engineering/witchcraft | c84fa6b2146e7de745105e21f672ed413df93ad3 | [
"MIT"
] | 26 | 2019-07-08T09:29:08.000Z | 2022-02-04T02:40:48.000Z | bench/witchcraft/monoid/bitstring_bench.exs | doma-engineering/witchcraft | c84fa6b2146e7de745105e21f672ed413df93ad3 | [
"MIT"
] | 36 | 2019-06-25T17:45:27.000Z | 2022-03-21T01:53:42.000Z | defmodule Witchcraft.Monoid.BitStringBench do
use Benchfella
use Witchcraft.Monoid
#########
# Setup #
#########
# ---------- #
# Data Types #
# ---------- #
@string "hello"
##########
# Monoid #
##########
bench "empty/1", do: empty(@string)
bench "empty?/1", do: empty?(@string)
# ---------- #
# Large Data #
# ---------- #
@big_list_a 0..100_000 |> Enum.to_list() |> inspect()
bench "$$$ empty/1", do: empty(@string)
bench "$$$ empty?/1", do: empty?(@string)
end
| 16.1875 | 56 | 0.478764 |
ff3bc8c46b29104fcf05efb04caac6669cd2576f | 6,479 | exs | Elixir | test/appsignal/utils/data_encoder_test.exs | lukerandall/appsignal-elixir | eac4a7e44354bfa2de69ea8a9b0e27157db2e4c8 | [
"MIT"
] | null | null | null | test/appsignal/utils/data_encoder_test.exs | lukerandall/appsignal-elixir | eac4a7e44354bfa2de69ea8a9b0e27157db2e4c8 | [
"MIT"
] | null | null | null | test/appsignal/utils/data_encoder_test.exs | lukerandall/appsignal-elixir | eac4a7e44354bfa2de69ea8a9b0e27157db2e4c8 | [
"MIT"
] | null | null | null | defmodule DataEncoderExampleStruct do
defstruct foo: "bar"
end
defmodule Appsignal.Utils.DataEncoderTest do
use ExUnit.Case
@moduletag :skip_env_test_no_nif
alias Appsignal.{Utils.DataEncoder, Nif}
test "encode an empty map" do
resource = DataEncoder.encode(%{})
assert {:ok, '{}'} == Nif.data_to_json(resource)
end
test "encode a map with a string key and value" do
resource = DataEncoder.encode(%{"foo" => "bar"})
assert {:ok, '{"foo":"bar"}'} == Nif.data_to_json(resource)
end
test "encode a map with an atom key and string value" do
resource = DataEncoder.encode(%{foo: "bar"})
assert {:ok, '{"foo":"bar"}'} == Nif.data_to_json(resource)
end
test "encode a map with an integer key and string value" do
resource = DataEncoder.encode(%{1 => "bar"})
assert {:ok, '{"1":"bar"}'} == Nif.data_to_json(resource)
end
test "encode a map with a map key and string value" do
resource = DataEncoder.encode(%{%{foo: "bar"} => "baz"})
assert {:ok, '{"%{foo: \\"bar\\"}":"baz"}'} == Nif.data_to_json(resource)
end
test "encode a map with a struct key and string value" do
resource = DataEncoder.encode(%{%DataEncoderExampleStruct{} => "baz"})
assert {:ok, '{"%DataEncoderExampleStruct{foo: \\"bar\\"}":"baz"}'} ==
Nif.data_to_json(resource)
end
test "encode a map with an integer value" do
resource = DataEncoder.encode(%{foo: 9_223_372_036_854_775_807})
assert {:ok, '{"foo":9223372036854775807}'} == Nif.data_to_json(resource)
end
test "encode a map with an integer too big for C-lang longs to fit" do
resource = DataEncoder.encode(%{foo: 9_223_372_036_854_775_808})
assert {:ok, '{"foo":"bigint:9223372036854775808"}'} == Nif.data_to_json(resource)
resource = DataEncoder.encode(%{foo: 9_223_372_036_854_775_809})
assert {:ok, '{"foo":"bigint:9223372036854775809"}'} == Nif.data_to_json(resource)
end
test "encode a map with a float value" do
resource = DataEncoder.encode(%{foo: 3.14159})
assert {:ok, '{"foo":3.14159}'} == Nif.data_to_json(resource)
end
test "encode a map with a boolean atom" do
resource = DataEncoder.encode(%{foo: true})
assert {:ok, '{"foo":true}'} == Nif.data_to_json(resource)
resource = DataEncoder.encode(%{foo: false})
assert {:ok, '{"foo":false}'} == Nif.data_to_json(resource)
end
test "encode a map with a nil value" do
resource = DataEncoder.encode(%{foo: nil})
assert {:ok, '{"foo":null}'} == Nif.data_to_json(resource)
end
test "encode a map with a map value" do
resource = DataEncoder.encode(%{foo: %{bar: "baz"}})
assert {:ok, '{"foo":{"bar":"baz"}}'} == Nif.data_to_json(resource)
end
test "encode a map with a list value" do
resource = DataEncoder.encode(%{foo: ["bar"]})
assert {:ok, '{"foo":["bar"]}'} == Nif.data_to_json(resource)
end
test "encode a map with an atom value" do
resource = DataEncoder.encode(%{foo: :bar})
assert {:ok, '{"foo":"bar"}'} == Nif.data_to_json(resource)
end
test "encode a map with a tuple value" do
resource = DataEncoder.encode(%{foo: {"foo", "bar", "baz"}})
assert {:ok, '{"foo":["foo","bar","baz"]}'} == Nif.data_to_json(resource)
end
test "encode a map with a PID value" do
resource = DataEncoder.encode(%{foo: self()})
assert {:ok, '{"foo":"#{inspect(self())}"}'} == Nif.data_to_json(resource)
end
test "encode an empty list" do
resource = DataEncoder.encode([])
assert {:ok, '[]'} == Nif.data_to_json(resource)
end
test "encode a list with a string item" do
resource = DataEncoder.encode(["foo"])
assert {:ok, '["foo"]'} == Nif.data_to_json(resource)
end
test "encode a list with a non-string item" do
resource = DataEncoder.encode([:bar])
assert {:ok, '["bar"]'} == Nif.data_to_json(resource)
end
test "encode a list with an integer item" do
resource = DataEncoder.encode([9_223_372_036_854_775_807])
assert {:ok, '[9223372036854775807]'} == Nif.data_to_json(resource)
end
test "encode a list with an integer item too big for C-lang longs to fit" do
resource = DataEncoder.encode([9_223_372_036_854_775_808])
assert {:ok, '["bigint:9223372036854775808"]'} == Nif.data_to_json(resource)
resource = DataEncoder.encode([9_223_372_036_854_775_809])
assert {:ok, '["bigint:9223372036854775809"]'} == Nif.data_to_json(resource)
end
test "encode a list with an float item" do
resource = DataEncoder.encode([3.14159])
assert {:ok, '[3.14159]'} == Nif.data_to_json(resource)
end
test "encode a list with a boolean atom" do
resource = DataEncoder.encode([true])
assert {:ok, '[true]'} == Nif.data_to_json(resource)
resource = DataEncoder.encode([false])
assert {:ok, '[false]'} == Nif.data_to_json(resource)
end
test "encode a list with a nil item" do
resource = DataEncoder.encode([nil])
assert {:ok, '[null]'} == Nif.data_to_json(resource)
end
test "encode a list with a map item" do
resource = DataEncoder.encode([%{bar: "baz"}])
assert {:ok, '[{"bar":"baz"}]'} == Nif.data_to_json(resource)
end
test "encode a list with a list item" do
resource = DataEncoder.encode(["foo", ["bar"]])
assert {:ok, '["foo",["bar"]]'} == Nif.data_to_json(resource)
end
test "encode a list with an improper list as string representation" do
resource = DataEncoder.encode([1, ["foo" | "bar"]])
assert {:ok, '[1,"improper_list:[\\"foo\\" | \\"bar\\"]"]'} == Nif.data_to_json(resource)
end
test "encode a map with an improper list as string representation" do
resource = DataEncoder.encode(%{foo: ["foo" | "bar"]})
assert {:ok, '{"foo":"improper_list:[\\"foo\\" | \\"bar\\"]"}'} == Nif.data_to_json(resource)
resource = DataEncoder.encode(%{foo: [1, "foo" | "bar"]})
assert {:ok, '{"foo":"improper_list:[1, \\"foo\\" | \\"bar\\"]"}'} ==
Nif.data_to_json(resource)
end
test "encode a list with a tuple item" do
resource = DataEncoder.encode(["foo", {"foo", "bar", "baz"}])
assert {:ok, '["foo",["foo","bar","baz"]]'} == Nif.data_to_json(resource)
end
test "encode a list with a PID item" do
resource = DataEncoder.encode([self()])
assert {:ok, '["#{inspect(self())}"]'} == Nif.data_to_json(resource)
end
test "encode a struct" do
resource = DataEncoder.encode(%DataEncoderExampleStruct{})
assert {:ok, '{"foo":"bar"}'} == Nif.data_to_json(resource)
end
end
| 35.404372 | 97 | 0.643155 |
ff3bcb357287b345c57a18527effb9e1723c73ee | 101 | exs | Elixir | test/web_crawler_test.exs | NAndreasson/web_crawler | 1d70622c86d388834a5ece8325c53be638b47be7 | [
"MIT"
] | 2 | 2015-06-24T15:33:30.000Z | 2016-09-26T16:20:16.000Z | test/web_crawler_test.exs | NAndreasson/web_crawler | 1d70622c86d388834a5ece8325c53be638b47be7 | [
"MIT"
] | null | null | null | test/web_crawler_test.exs | NAndreasson/web_crawler | 1d70622c86d388834a5ece8325c53be638b47be7 | [
"MIT"
] | null | null | null | defmodule WebCrawlerTest do
use ExUnit.Case
test "the truth" do
assert 1 + 1 == 2
end
end
| 12.625 | 27 | 0.663366 |
ff3bcfe42db5312daa082d448d6d93353871c3da | 347 | ex | Elixir | lib/ex_doc/formatter/html/assets.ex | MattLangley/ex_doc | 971b6b6023af0b5229a9cdd705877fda5491430e | [
"Apache-2.0",
"CC-BY-4.0"
] | 1 | 2020-04-02T13:22:58.000Z | 2020-04-02T13:22:58.000Z | lib/ex_doc/formatter/html/assets.ex | sadiqmmm/ex_doc | 4bb9a6f25dd2e522122daa48829a3d455e7f766a | [
"Apache-2.0",
"CC-BY-4.0"
] | null | null | null | lib/ex_doc/formatter/html/assets.ex | sadiqmmm/ex_doc | 4bb9a6f25dd2e522122daa48829a3d455e7f766a | [
"Apache-2.0",
"CC-BY-4.0"
] | 1 | 2020-05-11T16:54:10.000Z | 2020-05-11T16:54:10.000Z | defmodule ExDoc.Formatter.HTML.Assets do
@moduledoc false
defmacrop embed_pattern(pattern) do
["formatters/html", pattern]
|> Path.join()
|> Path.wildcard()
|> Enum.map(&{Path.basename(&1), File.read!(&1)})
end
def dist do
embed_pattern("dist/*.{css,js}")
end
def fonts do
embed_pattern("fonts/*")
end
end
| 18.263158 | 53 | 0.636888 |
ff3c0619bd8f197f525a43724fccdb2c47bb7412 | 1,912 | ex | Elixir | clients/cloud_resource_manager/lib/google_api/cloud_resource_manager/v1/model/list_org_policies_request.ex | nuxlli/elixir-google-api | ecb8679ac7282b7dd314c3e20c250710ec6a7870 | [
"Apache-2.0"
] | null | null | null | clients/cloud_resource_manager/lib/google_api/cloud_resource_manager/v1/model/list_org_policies_request.ex | nuxlli/elixir-google-api | ecb8679ac7282b7dd314c3e20c250710ec6a7870 | [
"Apache-2.0"
] | null | null | null | clients/cloud_resource_manager/lib/google_api/cloud_resource_manager/v1/model/list_org_policies_request.ex | nuxlli/elixir-google-api | ecb8679ac7282b7dd314c3e20c250710ec6a7870 | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.CloudResourceManager.V1.Model.ListOrgPoliciesRequest do
@moduledoc """
The request sent to the ListOrgPolicies method.
## Attributes
- pageSize (integer()): Size of the pages to be returned. This is currently unsupported and will be ignored. The server may at any point start using this field to limit page size. Defaults to: `null`.
- pageToken (String.t): Page token used to retrieve the next page. This is currently unsupported and will be ignored. The server may at any point start using this field. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:pageSize => any(),
:pageToken => any()
}
field(:pageSize)
field(:pageToken)
end
defimpl Poison.Decoder, for: GoogleApi.CloudResourceManager.V1.Model.ListOrgPoliciesRequest do
def decode(value, options) do
GoogleApi.CloudResourceManager.V1.Model.ListOrgPoliciesRequest.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.CloudResourceManager.V1.Model.ListOrgPoliciesRequest do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 37.490196 | 202 | 0.752615 |
ff3c07300b34b7bcc8e90fd9a4200170cd19c56a | 601 | ex | Elixir | lib/sobelow/traversal/send_file.ex | kianmeng/sobelow | 40c5f12ed6801c1f9cca13201f7f6ff31c8a7660 | [
"Apache-2.0"
] | null | null | null | lib/sobelow/traversal/send_file.ex | kianmeng/sobelow | 40c5f12ed6801c1f9cca13201f7f6ff31c8a7660 | [
"Apache-2.0"
] | null | null | null | lib/sobelow/traversal/send_file.ex | kianmeng/sobelow | 40c5f12ed6801c1f9cca13201f7f6ff31c8a7660 | [
"Apache-2.0"
] | null | null | null | defmodule Sobelow.Traversal.SendFile do
use Sobelow.Finding
@finding_type "Traversal.SendFile: Directory Traversal in `send_file`"
def run(fun, meta_file) do
confidence = if !meta_file.is_controller?, do: :low
Finding.init(@finding_type, meta_file.filename, confidence)
|> Finding.multi_from_def(fun, parse_def(fun))
|> Enum.each(&Print.add_finding(&1))
end
## send_file(conn, status, file, offset \\ 0, length \\ :all)
defp parse_def(fun) do
Parse.get_fun_vars_and_meta(fun, 2, :send_file, :Conn)
end
def details() do
Sobelow.Traversal.details()
end
end
| 27.318182 | 72 | 0.710483 |
ff3c476ce4d27ccec4f9a8c95c6c05ba8ae28874 | 2,373 | ex | Elixir | clients/elixir/generated/lib/cloud_manager_api/model/pipeline_execution_step_state__links.ex | shinesolutions/cloudmanager-api-clients | d73a25878f6cc57af954362ba8dccc90d54e6131 | [
"Apache-2.0"
] | 3 | 2020-06-23T05:31:52.000Z | 2020-11-26T05:34:57.000Z | clients/elixir/generated/lib/cloud_manager_api/model/pipeline_execution_step_state__links.ex | shinesolutions/cloudmanager-api-clients | d73a25878f6cc57af954362ba8dccc90d54e6131 | [
"Apache-2.0"
] | 2 | 2021-01-21T01:19:54.000Z | 2021-12-09T22:30:22.000Z | clients/elixir/generated/lib/cloud_manager_api/model/pipeline_execution_step_state__links.ex | shinesolutions/cloudmanager-api-clients | d73a25878f6cc57af954362ba8dccc90d54e6131 | [
"Apache-2.0"
] | 1 | 2020-11-18T11:48:13.000Z | 2020-11-18T11:48:13.000Z | # NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
# https://openapi-generator.tech
# Do not edit the class manually.
defmodule CloudManagerAPI.Model.PipelineExecutionStepStateLinks do
@moduledoc """
"""
@derive [Poison.Encoder]
defstruct [
:"http://ns.adobe.com/adobecloud/rel/execution",
:"http://ns.adobe.com/adobecloud/rel/pipeline",
:"http://ns.adobe.com/adobecloud/rel/pipeline/logs",
:"http://ns.adobe.com/adobecloud/rel/pipeline/metrics",
:"http://ns.adobe.com/adobecloud/rel/pipeline/advance",
:"http://ns.adobe.com/adobecloud/rel/pipeline/cancel",
:"http://ns.adobe.com/adobecloud/rel/program",
:"self"
]
@type t :: %__MODULE__{
:"http://ns.adobe.com/adobecloud/rel/execution" => HalLink | nil,
:"http://ns.adobe.com/adobecloud/rel/pipeline" => HalLink | nil,
:"http://ns.adobe.com/adobecloud/rel/pipeline/logs" => HalLink | nil,
:"http://ns.adobe.com/adobecloud/rel/pipeline/metrics" => HalLink | nil,
:"http://ns.adobe.com/adobecloud/rel/pipeline/advance" => HalLink | nil,
:"http://ns.adobe.com/adobecloud/rel/pipeline/cancel" => HalLink | nil,
:"http://ns.adobe.com/adobecloud/rel/program" => HalLink | nil,
:"self" => HalLink | nil
}
end
defimpl Poison.Decoder, for: CloudManagerAPI.Model.PipelineExecutionStepStateLinks do
import CloudManagerAPI.Deserializer
def decode(value, options) do
value
|> deserialize(:"http://ns.adobe.com/adobecloud/rel/execution", :struct, CloudManagerAPI.Model.HalLink, options)
|> deserialize(:"http://ns.adobe.com/adobecloud/rel/pipeline", :struct, CloudManagerAPI.Model.HalLink, options)
|> deserialize(:"http://ns.adobe.com/adobecloud/rel/pipeline/logs", :struct, CloudManagerAPI.Model.HalLink, options)
|> deserialize(:"http://ns.adobe.com/adobecloud/rel/pipeline/metrics", :struct, CloudManagerAPI.Model.HalLink, options)
|> deserialize(:"http://ns.adobe.com/adobecloud/rel/pipeline/advance", :struct, CloudManagerAPI.Model.HalLink, options)
|> deserialize(:"http://ns.adobe.com/adobecloud/rel/pipeline/cancel", :struct, CloudManagerAPI.Model.HalLink, options)
|> deserialize(:"http://ns.adobe.com/adobecloud/rel/program", :struct, CloudManagerAPI.Model.HalLink, options)
|> deserialize(:"self", :struct, CloudManagerAPI.Model.HalLink, options)
end
end
| 48.428571 | 123 | 0.711336 |
ff3c4e7df4016862aef419e3956174b59bdc2704 | 302 | ex | Elixir | lib/geo/multi_line_string.ex | ilyashuma/geo | 92c276a784237affd194b9dd0e3428bdc39ce4f8 | [
"MIT"
] | 555 | 2015-01-19T18:56:37.000Z | 2022-03-17T21:22:16.000Z | lib/geo/multi_line_string.ex | ilyashuma/geo | 92c276a784237affd194b9dd0e3428bdc39ce4f8 | [
"MIT"
] | 130 | 2015-02-12T16:48:54.000Z | 2021-12-15T17:54:18.000Z | lib/geo/multi_line_string.ex | ilyashuma/geo | 92c276a784237affd194b9dd0e3428bdc39ce4f8 | [
"MIT"
] | 100 | 2015-04-21T17:55:47.000Z | 2022-01-25T21:03:10.000Z | defmodule Geo.MultiLineString do
@moduledoc """
Defines the MultiLineString struct.
"""
@type t :: %Geo.MultiLineString{
coordinates: [[{number, number}]],
srid: integer | nil,
properties: map
}
defstruct coordinates: [], srid: nil, properties: %{}
end
| 23.230769 | 55 | 0.602649 |
ff3c639ce54dea790657df0a6c4693511c110d05 | 2,892 | ex | Elixir | clients/slides/lib/google_api/slides/v1/model/duplicate_object_request.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/slides/lib/google_api/slides/v1/model/duplicate_object_request.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/slides/lib/google_api/slides/v1/model/duplicate_object_request.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Slides.V1.Model.DuplicateObjectRequest do
@moduledoc """
Duplicates a slide or page element.
When duplicating a slide, the duplicate slide will be created immediately
following the specified slide. When duplicating a page element, the duplicate
will be placed on the same page at the same position as the original.
## Attributes
* `objectId` (*type:* `String.t`, *default:* `nil`) - The ID of the object to duplicate.
* `objectIds` (*type:* `map()`, *default:* `nil`) - The object being duplicated may contain other objects, for example when
duplicating a slide or a group page element. This map defines how the IDs
of duplicated objects are generated: the keys are the IDs of the original
objects and its values are the IDs that will be assigned to the
corresponding duplicate object. The ID of the source object's duplicate
may be specified in this map as well, using the same value of the
`object_id` field as a key and the newly desired ID as the value.
All keys must correspond to existing IDs in the presentation. All values
must be unique in the presentation and must start with an alphanumeric
character or an underscore (matches regex `[a-zA-Z0-9_]`); remaining
characters may include those as well as a hyphen or colon (matches regex
`[a-zA-Z0-9_-:]`). The length of the new ID must not be less than 5 or
greater than 50.
If any IDs of source objects are omitted from the map, a new random ID will
be assigned. If the map is empty or unset, all duplicate objects will
receive a new random ID.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:objectId => String.t(),
:objectIds => map()
}
field(:objectId)
field(:objectIds, type: :map)
end
defimpl Poison.Decoder, for: GoogleApi.Slides.V1.Model.DuplicateObjectRequest do
def decode(value, options) do
GoogleApi.Slides.V1.Model.DuplicateObjectRequest.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Slides.V1.Model.DuplicateObjectRequest do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 40.732394 | 127 | 0.726141 |
ff3c74b2092452b7dd28b8c656647615edcbf166 | 711 | ex | Elixir | lib/slip_limiter.ex | chwevans/slip | 506eeedee8a5378bea62f11f65ce093febf77864 | [
"0BSD"
] | null | null | null | lib/slip_limiter.ex | chwevans/slip | 506eeedee8a5378bea62f11f65ce093febf77864 | [
"0BSD"
] | null | null | null | lib/slip_limiter.ex | chwevans/slip | 506eeedee8a5378bea62f11f65ce093febf77864 | [
"0BSD"
] | null | null | null | defmodule Slip.Limiter do
use GenServer
require Lager
@name __MODULE__
def start_link, do: :gen_server.start_link({:local, @name}, __MODULE__, [], [])
def init(_args) do
tid = :ets.new(@name, [])
{:ok, tid}
end
def state(), do: :gen_server.call(@name, :state)
def handle_call(:state, _from, tid) do
reply = :ets.tab2list(tid)
{:reply, reply, tid}
end
def log(uid, name), do: :gen_server.cast(@name, {:log, uid, name})
def handle_cast({:log, uid, name}, tid) do
key = {uid, name}
case :ets.member(tid, key) do
true -> :ets.update_counter(tid, key, {2, 1})
false ->
:ets.insert(tid, {key, 1})
0
end
{:noreply, tid}
end
end
| 21.545455 | 81 | 0.589311 |
ff3c752b5481dd0844684135e232b07a24f9b8e2 | 1,768 | ex | Elixir | test/event/support/error/error_event_handler.ex | Aleksion/commanded | 266587f7786d27b390a355991cbb792944ac0885 | [
"MIT"
] | null | null | null | test/event/support/error/error_event_handler.ex | Aleksion/commanded | 266587f7786d27b390a355991cbb792944ac0885 | [
"MIT"
] | null | null | null | test/event/support/error/error_event_handler.ex | Aleksion/commanded | 266587f7786d27b390a355991cbb792944ac0885 | [
"MIT"
] | null | null | null | defmodule Commanded.Event.ErrorEventHandler do
@moduledoc false
use Commanded.Event.Handler, name: __MODULE__
alias Commanded.Event.FailureContext
alias Commanded.Event.ErrorAggregate.Events.ErrorEvent
def handle(%ErrorEvent{}, _metadata) do
# simulate event handling failure
{:error, :failed}
end
def error({:error, :failed}, %ErrorEvent{strategy: "retry", delay: delay} = event, %FailureContext{
context: context
}) do
context = context |> record_failure() |> Map.put(:delay, delay)
case Map.get(context, :failures) do
too_many when too_many >= 3 ->
# stop error handler after third failure
send_reply({:error, :too_many_failures, context}, event)
{:stop, :too_many_failures}
_ ->
# retry event, record failure count in context map
send_reply({:error, :failed, context}, event)
{:retry, context}
end
end
# skip event
def error({:error, :failed}, %ErrorEvent{strategy: "skip"} = event, _failure_context) do
send_reply({:error, :skipping}, event)
:skip
end
# default behaviour is to stop the event handler with the given error reason
def error({:error, reason}, %ErrorEvent{strategy: "default"} = event, _failure_context) do
send_reply({:error, :stopping}, event)
{:stop, reason}
end
# return an invalid response
def error({:error, :failed}, %ErrorEvent{strategy: "invalid"} = event, _failure_context) do
send_reply({:error, :invalid}, event)
:invalid
end
defp record_failure(context) do
Map.update(context, :failures, 1, fn failures -> failures + 1 end)
end
defp send_reply(reply, %ErrorEvent{reply_to: reply_to}) do
pid = :erlang.list_to_pid(reply_to)
send(pid, reply)
end
end
| 27.2 | 101 | 0.674208 |
ff3c8840e9bdb60e69ffd6105645eb6a7a140358 | 510 | ex | Elixir | lib/vutuv_web/views/api/post_view.ex | vutuv/vutuv | 174706cdaf28cef24e1cc06bec0884c25f2412be | [
"MIT"
] | 309 | 2016-05-03T17:16:23.000Z | 2022-03-01T09:30:22.000Z | lib/vutuv_web/views/api/post_view.ex | vutuv/vutuv | 174706cdaf28cef24e1cc06bec0884c25f2412be | [
"MIT"
] | 662 | 2016-04-27T07:45:18.000Z | 2022-01-05T07:29:19.000Z | lib/vutuv_web/views/api/post_view.ex | vutuv/vutuv | 174706cdaf28cef24e1cc06bec0884c25f2412be | [
"MIT"
] | 40 | 2016-04-27T07:46:22.000Z | 2021-12-31T05:54:34.000Z | defmodule VutuvWeb.Api.PostView do
use VutuvWeb, :view
alias VutuvWeb.Api.PostView
def render("index.json", %{posts: posts}) do
%{data: render_many(posts, PostView, "post.json")}
end
def render("show.json", %{post: post}) do
%{data: render_one(post, PostView, "post.json")}
end
def render("post.json", %{post: post}) do
%{
id: post.id,
body: post.body,
title: post.title,
user_id: post.user_id,
visibility_level: post.visibility_level
}
end
end
| 22.173913 | 54 | 0.633333 |
ff3cc7d4225e5c0f226dfd2040f41ea4a97ffa57 | 839 | ex | Elixir | lib/benchee/formatters/csv/raw.ex | PragTob/benchee_csv | 475b17647e7679cdea4b0ca39879b179cbfbc0cc | [
"MIT"
] | 9 | 2016-06-06T12:03:02.000Z | 2018-08-31T13:33:21.000Z | lib/benchee/formatters/csv/raw.ex | bencheeorg/benchee_csv | 475b17647e7679cdea4b0ca39879b179cbfbc0cc | [
"MIT"
] | 23 | 2017-02-25T15:04:47.000Z | 2019-03-17T20:01:39.000Z | lib/benchee/formatters/csv/raw.ex | bencheeorg/benchee_csv | 475b17647e7679cdea4b0ca39879b179cbfbc0cc | [
"MIT"
] | 3 | 2016-07-15T05:40:44.000Z | 2017-09-13T16:48:43.000Z | defmodule Benchee.Formatters.CSV.Raw do
@moduledoc """
Functionality for converting Benchee scenarios to raw csv.
"""
alias Benchee.Scenario
@doc false
def add_headers(measurements, scenarios) do
headers =
Enum.flat_map(scenarios, fn scenario ->
[
"#{scenario.name}#{input_part(scenario)} (Run Time Measurements)",
"#{scenario.name}#{input_part(scenario)} (Memory Usage Measurements)"
]
end)
[headers | measurements]
end
@doc false
def to_csv(scenario), do: [scenario.run_time_data.samples, scenario.memory_usage_data.samples]
@no_input Benchee.Benchmark.no_input()
defp input_part(%Scenario{input_name: nil}), do: ""
defp input_part(%Scenario{input_name: @no_input}), do: ""
defp input_part(%Scenario{input_name: name}), do: " with input #{name}"
end
| 28.931034 | 96 | 0.68534 |
ff3ccfdabc46db0c693192e13ae5c2e7a2d337ef | 936 | ex | Elixir | apps/nerves_hub_device/lib/nerves_hub_device_web/plugs/device.ex | Gazler/nerves_hub_web | 9a636a17310382819eaa6cee590e053cb47f0dcc | [
"Apache-2.0"
] | null | null | null | apps/nerves_hub_device/lib/nerves_hub_device_web/plugs/device.ex | Gazler/nerves_hub_web | 9a636a17310382819eaa6cee590e053cb47f0dcc | [
"Apache-2.0"
] | null | null | null | apps/nerves_hub_device/lib/nerves_hub_device_web/plugs/device.ex | Gazler/nerves_hub_web | 9a636a17310382819eaa6cee590e053cb47f0dcc | [
"Apache-2.0"
] | null | null | null | defmodule NervesHubDeviceWeb.Plugs.Device do
import Plug.Conn
alias NervesHubWebCore.Devices
alias NervesHubWebCore.Firmwares
def init(opts) do
opts
end
def call(conn, _opts) do
peer_data = Plug.Conn.get_peer_data(conn)
with {:ok, cert} <- Map.fetch(peer_data, :ssl_cert),
{:ok, cert} <- X509.Certificate.from_der(cert),
{:ok, cert} <- NervesHubDevice.SSL.verify_device(cert),
{:ok, device} <- Devices.get_device_by_certificate(cert),
{:ok, metadata} <- Firmwares.metadata_from_conn(conn),
{:ok, device} <- Devices.update_firmware_metadata(device, metadata),
{:ok, device} <- Devices.received_communication(device) do
assign(conn, :device, device)
else
_err ->
conn
|> put_resp_header("content-type", "application/json")
|> send_resp(403, Jason.encode!(%{status: "forbidden"}))
|> halt()
end
end
end
| 30.193548 | 77 | 0.643162 |
ff3ced686370df699690ac8c2508b77cefbda139 | 958 | exs | Elixir | apps/ewallet_db/priv/repo/migrations/20171012113733_create_minted_token_table.exs | vanmil/ewallet | 6c1aca95a83e0a9d93007670a40d8c45764a8122 | [
"Apache-2.0"
] | 2 | 2019-07-13T05:49:03.000Z | 2021-08-19T23:58:23.000Z | apps/ewallet_db/priv/repo/migrations/20171012113733_create_minted_token_table.exs | vanmil/ewallet | 6c1aca95a83e0a9d93007670a40d8c45764a8122 | [
"Apache-2.0"
] | null | null | null | apps/ewallet_db/priv/repo/migrations/20171012113733_create_minted_token_table.exs | vanmil/ewallet | 6c1aca95a83e0a9d93007670a40d8c45764a8122 | [
"Apache-2.0"
] | 3 | 2018-05-08T17:15:42.000Z | 2021-11-10T04:08:33.000Z | defmodule EWalletDB.Repo.Migrations.CreateMintedTokenTable do
use Ecto.Migration
def change do
create table(:minted_token, primary_key: false) do
add :id, :uuid, primary_key: true
add :symbol, :string, null: false
add :iso_code, :string
add :name, :string, null: false
add :description, :string
add :short_symbol, :string
add :subunit, :string
add :subunit_to_unit, :integer, null: false
add :symbol_first, :boolean, null: false, default: true
add :html_entity, :string
add :iso_numeric, :string
add :smallest_denomination, :integer
add :locked, :boolean, default: false
timestamps()
end
create unique_index(:minted_token, [:symbol])
create unique_index(:minted_token, [:iso_code])
create unique_index(:minted_token, [:name])
create unique_index(:minted_token, [:short_symbol])
create unique_index(:minted_token, [:iso_numeric])
end
end
| 31.933333 | 61 | 0.681628 |
ff3d06eaea02250f31eebe6212ecb2ddea61474f | 194 | exs | Elixir | apps/alert_processor/priv/repo/migrations/20180827190044_add_sms_opted_out_at_to_user.exs | mbta/alerts_concierge | d8e643445ef06f80ca273f2914c6959daea146f6 | [
"MIT"
] | null | null | null | apps/alert_processor/priv/repo/migrations/20180827190044_add_sms_opted_out_at_to_user.exs | mbta/alerts_concierge | d8e643445ef06f80ca273f2914c6959daea146f6 | [
"MIT"
] | 21 | 2021-03-12T17:05:30.000Z | 2022-02-16T21:48:35.000Z | apps/alert_processor/priv/repo/migrations/20180827190044_add_sms_opted_out_at_to_user.exs | mbta/alerts_concierge | d8e643445ef06f80ca273f2914c6959daea146f6 | [
"MIT"
] | 1 | 2021-12-09T15:09:53.000Z | 2021-12-09T15:09:53.000Z | defmodule AlertProcessor.Repo.Migrations.AddSmsOptedOutAtToUser do
use Ecto.Migration
def change do
alter table(:users) do
add(:sms_opted_out_at, :utc_datetime)
end
end
end
| 19.4 | 66 | 0.747423 |
ff3d09a2bd787301c1ecfcbe9bed106742ababc4 | 11,338 | ex | Elixir | lib/expo/po/parser.ex | jshmrtn/expo | 98459ed26833c1b05348bb80a66e78fc6d2e488d | [
"Apache-2.0"
] | 2 | 2022-03-31T21:12:35.000Z | 2022-03-31T21:44:59.000Z | lib/expo/po/parser.ex | jshmrtn/expo | 98459ed26833c1b05348bb80a66e78fc6d2e488d | [
"Apache-2.0"
] | 13 | 2022-03-30T17:41:25.000Z | 2022-03-31T23:35:49.000Z | lib/expo/po/parser.ex | jshmrtn/expo | 98459ed26833c1b05348bb80a66e78fc6d2e488d | [
"Apache-2.0"
] | null | null | null | # credo:disable-for-this-file Credo.Check.Refactor.PipeChainStart
defmodule Expo.Po.Parser do
@moduledoc false
import NimbleParsec
alias Expo.Po
alias Expo.Translation
alias Expo.Translations
alias Expo.Util
@bom <<0xEF, 0xBB, 0xBF>>
newline = ascii_char([?\n]) |> label("newline") |> ignore()
optional_whitespace =
ascii_char([?\s, ?\n, ?\r, ?\t])
|> times(min: 0)
|> label("whitespace")
|> ignore()
whitespace_no_nl =
ascii_char([?\s, ?\r, ?\t])
|> times(min: 1)
|> label("whitespace")
|> ignore()
double_quote = ascii_char([?"]) |> label("double quote") |> ignore()
escaped_char =
choice([
replace(string(~S(\n)), ?\n),
replace(string(~S(\t)), ?\t),
replace(string(~S(\r)), ?\r),
replace(string(~S(\")), ?\"),
replace(string(~S(\\)), ?\\)
])
string =
double_quote
|> repeat(choice([escaped_char, utf8_char(not: ?", not: ?\n)]))
|> label(lookahead_not(newline), "newline inside string")
|> concat(double_quote)
|> reduce(:to_string)
strings =
string
|> concat(optional_whitespace)
|> times(min: 1)
|> label("at least one string")
[msgctxt, msgid, msgid_plural, msgstr] =
for keyword <- [:msgctxt, :msgid, :msgid_plural, :msgstr] do
string(Atom.to_string(keyword))
|> concat(whitespace_no_nl)
|> ignore()
|> concat(strings)
|> tag(keyword)
|> label("#{keyword} followed by strings")
end
comment_content =
repeat(utf8_char(not: ?\n))
|> concat(newline)
|> reduce(:to_string)
comment =
string("#")
|> lookahead_not(utf8_char([?., ?:, ?,, ?|, ?~]))
|> ignore()
|> concat(comment_content)
|> unwrap_and_tag(:comment)
|> label("comment")
extracted_comment =
string("#.")
|> lookahead_not(utf8_char([?., ?:, ?,, ?|, ?~]))
|> ignore()
|> concat(comment_content)
|> unwrap_and_tag(:extracted_comment)
|> label("extracted_comment")
previous_msgid =
string("#|")
|> concat(whitespace_no_nl)
|> ignore()
|> concat(msgid)
|> unwrap_and_tag(:previous_msgid)
|> label("previous_msgid")
flag_content =
optional(whitespace_no_nl)
|> concat(utf8_char(not: ?\n, not: ?,) |> repeat() |> reduce(:to_string))
|> concat(optional(whitespace_no_nl))
flag =
ignore(string("#"))
|> times(
string(",")
|> ignore()
|> concat(flag_content),
min: 1
)
|> concat(newline)
|> reduce(:remove_empty_flags)
|> unwrap_and_tag(:flag)
|> label("flag")
reference_entry_line =
string(":")
|> ignore()
|> concat(unwrap_and_tag(integer(min: 1), :line))
reference_entry_file =
choice([
utf8_char(not: ?\n, not: ?,, not: ?:),
lookahead_not(string(":"), integer(min: 1))
])
|> times(min: 1)
|> reduce(:to_string)
|> unwrap_and_tag(:file)
reference_entry =
optional(whitespace_no_nl)
|> concat(reference_entry_file)
|> concat(optional(reference_entry_line))
|> concat(ignore(choice([string(","), string(" "), lookahead(newline)])))
|> reduce(:make_reference)
reference =
string("#:")
|> ignore()
|> times(reference_entry, min: 1)
|> concat(newline)
|> tag(:reference)
|> label("reference")
translation_meta =
choice([
comment,
extracted_comment,
reference,
flag,
previous_msgid
])
plural_form =
ignore(string("["))
|> integer(min: 1)
|> ignore(string("]"))
|> label("plural form (like [0])")
obsolete_prefix = string("#~") |> concat(whitespace_no_nl) |> ignore() |> tag(:obsolete)
msgstr_with_plural_form =
ignore(optional(obsolete_prefix))
|> concat(ignore(string("msgstr")))
|> concat(plural_form)
|> concat(whitespace_no_nl)
|> concat(strings)
|> reduce(:make_plural_form)
|> unwrap_and_tag(:msgstr)
translation_base =
repeat(translation_meta)
|> concat(optional(obsolete_prefix))
|> optional(msgctxt)
|> concat(optional(obsolete_prefix))
|> post_traverse(:attach_line_number)
|> concat(msgid)
singular_translation =
translation_base
|> concat(optional(obsolete_prefix))
|> concat(msgstr)
|> tag(Translation.Singular)
|> reduce(:make_translation)
|> label("singular translation")
plural_translation =
translation_base
|> concat(optional(obsolete_prefix))
|> concat(msgid_plural)
|> times(msgstr_with_plural_form, min: 1)
|> tag(Translation.Plural)
|> reduce(:make_translation)
|> label("plural translation")
translation = choice([singular_translation, plural_translation])
po_entry =
optional_whitespace
|> concat(translation)
|> concat(optional_whitespace)
|> post_traverse(:register_duplicates)
defparsecp :po_file,
times(po_entry, min: 1)
|> post_traverse(:make_translations)
|> unwrap_and_tag(:translations)
|> eos()
@spec parse(content :: String.t(), opts :: Po.parse_options()) ::
{:ok, Translations.t()}
| {:error,
{:parse_error, message :: String.t(), offending_content :: String.t(),
line :: pos_integer()}
| {:duplicate_translations,
[{message :: String.t(), new_line :: pos_integer(), old_line :: pos_integer()}]}}
def parse(content, opts) do
content = prune_bom(content, Keyword.get(opts, :file, "nofile"))
case po_file(content, context: %{detected_duplicates: [], file: Keyword.get(opts, :file)}) do
{:ok, [{:translations, translations}], "", %{detected_duplicates: []}, _line, _offset} ->
{:ok, translations}
{:ok, _result, "", %{detected_duplicates: [_head | _rest] = detected_duplicates}, _line,
_offset} ->
{:error,
{:duplicate_translations,
detected_duplicates
|> Enum.map(fn
{translation, new_line, old_line} ->
{build_duplicated_error_message(translation, new_line), new_line, old_line}
end)
|> Enum.reverse()}}
{:error, message, offending_content, _context, {line, _offset_line}, _offset} ->
{:error, {:parse_error, message, offending_content, line}}
end
end
defp make_plural_form([plural_form | strings]), do: {plural_form, strings}
defp make_reference(tokens) do
case Keyword.fetch(tokens, :line) do
{:ok, line} -> {Keyword.fetch!(tokens, :file), line}
:error -> Keyword.fetch!(tokens, :file)
end
end
defp make_translations(rest, translations, context, _line, _offset) do
{headers, top_comments, translations} =
translations |> Enum.reverse() |> Util.extract_meta_headers()
tokens = %Translations{
translations: translations,
headers: headers,
top_comments: top_comments,
file: context[:file]
}
{rest, [tokens], context}
end
defp make_translation(tokens) do
{[{type, type_attrs}], attrs} =
Keyword.split(tokens, [Translation.Singular, Translation.Plural])
attrs =
[attrs, type_attrs]
|> Enum.concat()
|> Enum.group_by(&elem(&1, 0), &elem(&1, 1))
|> Enum.map(&make_translation_attribute(type, elem(&1, 0), elem(&1, 1)))
struct!(type, attrs)
end
defp make_translation_attribute(type, key, value)
defp make_translation_attribute(_type, :msgid, [value]), do: {:msgid, value}
defp make_translation_attribute(_type, :msgctxt, [value]), do: {:msgctxt, value}
defp make_translation_attribute(Translation.Plural, :msgid_plural, [value]),
do: {:msgid_plural, value}
defp make_translation_attribute(Translation.Singular, :msgstr, [value]), do: {:msgstr, value}
defp make_translation_attribute(Translation.Plural, :msgstr, value),
do: {:msgstr, Map.new(value, fn {key, values} -> {key, values} end)}
defp make_translation_attribute(_type, :comment, value), do: {:comments, value}
defp make_translation_attribute(_type, :extracted_comment, value),
do: {:extracted_comments, value}
defp make_translation_attribute(_type, :flag, value), do: {:flags, value}
defp make_translation_attribute(_type, :previous_msgid, value),
do: {:previous_msgids, Keyword.values(value)}
defp make_translation_attribute(_type, :reference, value), do: {:references, value}
defp make_translation_attribute(_type, :obsolete, _value), do: {:obsolete, true}
defp remove_empty_flags(tokens), do: Enum.reject(tokens, &match?("", &1))
defp attach_line_number(rest, args, context, {line, _line_offset}, _offset),
do: {rest, args, Map.put(context, :entry_line_number, line)}
defp register_duplicates(
rest,
[%{} = translation] = args,
%{entry_line_number: new_line} = context,
_line,
_offset
) do
key = Translation.key(translation)
context =
case context[:duplicate_key_line_mapping][key] do
nil ->
context
old_line ->
Map.update!(context, :detected_duplicates, &[{translation, new_line, old_line} | &1])
end
context =
Map.update(
context,
:duplicate_key_line_mapping,
%{key => new_line},
&Map.put_new(&1, key, new_line)
)
{rest, args, context}
end
defp build_duplicated_error_message(%Translation.Singular{} = translation, new_line) do
id = IO.iodata_to_binary(translation.msgid)
"found duplicate on line #{new_line} for msgid: '#{id}'"
end
defp build_duplicated_error_message(%Translation.Plural{} = translation, new_line) do
id = IO.iodata_to_binary(translation.msgid)
idp = IO.iodata_to_binary(translation.msgid_plural)
"found duplicate on line #{new_line} for msgid: '#{id}' and msgid_plural: '#{idp}'"
end
# This function removes a BOM byte sequence from the start of the given string
# if this sequence is present. A BOM byte sequence
# (https://en.wikipedia.org/wiki/Byte_order_mark) is a thing that Unicode uses
# as a kind of metadata for a file; it's placed at the start of the file. GNU
# Gettext blows up if it finds a BOM sequence at the start of a file (as you
# can check with the `msgfmt` program); here, we don't blow up but we print a
# warning saying the BOM is present and suggesting to remove it.
#
# Note that `file` is used to give a nicer warning in case the BOM is
# present. This function is in fact called by both parse_string/1 and
# parse_file/1. Since parse_file/1 relies on parse_string/1, in case
# parse_file/1 is called this function is called twice but that's ok because
# in case of BOM, parse_file/1 will remove it first and parse_string/1 won't
# issue the warning again as its call to prune_bom/2 will be a no-op.
defp prune_bom(str, file)
defp prune_bom(@bom <> str, file) do
file_or_string = if file == "nofile", do: "string", else: "file"
warning =
"#{file}: warning: the #{file_or_string} being parsed starts " <>
"with a BOM byte sequence (#{inspect(@bom, binaries: :as_binaries)}). " <>
"These bytes are ignored by Gettext but it's recommended to remove " <>
"them. To know more about BOM, read https://en.wikipedia.org/wiki/Byte_order_mark."
IO.puts(:stderr, warning)
str
end
defp prune_bom(str, _file) when is_binary(str) do
str
end
end
| 30.234667 | 97 | 0.635297 |
ff3d0b039931c0374fbb02e28256c818a7030efa | 379 | ex | Elixir | lib/mathmatical/questions/question.ex | shawnonthenet/mathmatical | d0f8d9e77dc71edfdc88776daca973fcd9cd106b | [
"Apache-2.0"
] | null | null | null | lib/mathmatical/questions/question.ex | shawnonthenet/mathmatical | d0f8d9e77dc71edfdc88776daca973fcd9cd106b | [
"Apache-2.0"
] | null | null | null | lib/mathmatical/questions/question.ex | shawnonthenet/mathmatical | d0f8d9e77dc71edfdc88776daca973fcd9cd106b | [
"Apache-2.0"
] | null | null | null | defmodule Mathmatical.Questions.Question do
use Ecto.Schema
import Ecto.Changeset
schema "questions" do
field :answer, :string
field :question, :string
field :subject_id, :id
timestamps()
end
@doc false
def changeset(question, attrs) do
question
|> cast(attrs, [:question, :answer])
|> validate_required([:question, :answer])
end
end
| 18.95 | 46 | 0.6781 |
ff3d1f66c0f6db06af548eb7d3484b4e51bc5a44 | 6,243 | exs | Elixir | apps/train_loc/test/train_loc/vehicles/vehicle_test.exs | paulswartz/commuter_rail_boarding | 6be34c192d6a1ee980307d9f3d027bf4cdafa53f | [
"MIT"
] | 1 | 2022-01-30T20:53:07.000Z | 2022-01-30T20:53:07.000Z | apps/train_loc/test/train_loc/vehicles/vehicle_test.exs | paulswartz/commuter_rail_boarding | 6be34c192d6a1ee980307d9f3d027bf4cdafa53f | [
"MIT"
] | 47 | 2021-05-05T10:31:05.000Z | 2022-03-30T22:18:14.000Z | apps/train_loc/test/train_loc/vehicles/vehicle_test.exs | paulswartz/commuter_rail_boarding | 6be34c192d6a1ee980307d9f3d027bf4cdafa53f | [
"MIT"
] | 1 | 2021-05-14T00:35:08.000Z | 2021-05-14T00:35:08.000Z | defmodule TrainLoc.Vehicles.VehicleTest do
use ExUnit.Case, async: true
use Timex
import TrainLoc.Utilities.ConfigHelpers
import ExUnit.CaptureLog
alias TrainLoc.Vehicles.Vehicle
@time_format config(:time_format)
@valid_vehicle_json %{
"Heading" => 48,
"Latitude" => 42.28179,
"Longitude" => -71.15936,
"TripID" => 612,
"Speed" => 14,
"Update Time" => "2018-01-05T11:38:50.000Z",
"VehicleID" => 1827,
"WorkID" => 602
}
# this DateTime is the parsed updatetime from above
@valid_timestamp Timex.parse!(
"2018-01-05 11:38:50 America/New_York",
@time_format
)
test "converts single JSON object to Vehicle struct" do
json_obj = %{
"Heading" => 48,
"Latitude" => 42.28179,
"Longitude" => -71.15936,
"TripID" => 612,
"Speed" => 14,
"Update Time" => "2018-01-05T11:38:50.000Z",
"VehicleID" => 1827,
"WorkID" => 602
}
assert Vehicle.from_json_object(json_obj) == [
%Vehicle{
vehicle_id: 1827,
timestamp:
Timex.parse!(
"2018-01-05 11:38:50 America/New_York",
@time_format
),
block: "602",
trip: "612",
latitude: 42.28179,
longitude: -71.15936,
speed: 14,
heading: 48
}
]
end
test "converts batch JSON map to list of Vehicle structs" do
json_map = %{
"1633" => %{
"Heading" => 0,
"Latitude" => 42.37405,
"Longitude" => -71.07496,
"TripID" => 0,
"Speed" => 0,
"Update Time" => "2018-01-16T15:03:27.000Z",
"VehicleID" => 1633,
"WorkID" => 0
},
"1643" => %{
"Heading" => 168,
"Latitude" => 42.72570,
"Longitude" => -70.85867,
"TripID" => 170,
"Speed" => 9,
"Update Time" => "2018-01-16T15:03:17.000Z",
"VehicleID" => 1643,
"WorkID" => 202
},
"1652" => %{
"Heading" => 318,
"Latitude" => 42.36698,
"Longitude" => -71.06314,
"TripID" => 326,
"Speed" => 10,
"Update Time" => "2018-01-16T15:03:23.000Z",
"VehicleID" => 1652,
"WorkID" => 306
}
}
assert Vehicle.from_json_map(json_map) == [
%Vehicle{
vehicle_id: 1633,
timestamp:
Timex.parse!(
"2018-01-16 15:03:27 America/New_York",
@time_format
),
block: "000",
trip: "000",
latitude: 42.37405,
longitude: -71.07496,
speed: 0,
heading: 0
},
%Vehicle{
vehicle_id: 1643,
timestamp:
Timex.parse!(
"2018-01-16 15:03:17 America/New_York",
@time_format
),
block: "202",
trip: "170",
latitude: 42.72570,
longitude: -70.85867,
speed: 9,
heading: 168
},
%Vehicle{
vehicle_id: 1652,
timestamp:
Timex.parse!(
"2018-01-16 15:03:23 America/New_York",
@time_format
),
block: "306",
trip: "326",
latitude: 42.36698,
longitude: -71.06314,
speed: 10,
heading: 318
}
]
end
describe "log_vehicle/1" do
test "with valid vehicle" do
iso_8601 = "2015-01-23T23:50:07.000Z"
{:ok, datetime, 0} = DateTime.from_iso8601(iso_8601)
vehicle = %Vehicle{
vehicle_id: 1712,
timestamp: datetime,
block: "802",
trip: "509",
latitude: 42.36698,
longitude: -71.06314,
speed: 10,
heading: 318
}
fun = fn -> Vehicle.log_vehicle(vehicle) end
expected_logger_message =
"Vehicle - " <>
"block=#{vehicle.block} " <>
"heading=#{vehicle.heading} " <>
"latitude=#{vehicle.latitude} " <>
"longitude=#{vehicle.longitude} " <>
"speed=#{vehicle.speed} " <>
"timestamp=#{iso_8601} " <>
"trip=#{vehicle.trip} " <> "vehicle_id=#{vehicle.vehicle_id} "
assert capture_log(fun) =~ expected_logger_message
end
end
describe "from_json/1" do
test "works on valid json" do
expected = %Vehicle{
block: "602",
heading: 48,
latitude: 42.28179,
longitude: -71.15936,
speed: 14,
timestamp: @valid_timestamp,
trip: "612",
vehicle_id: 1827
}
got = Vehicle.from_json(@valid_vehicle_json)
assert got == expected
end
test "does not fail on invalid json" do
invalid_json = %{"other" => nil}
expected = %Vehicle{
block: nil,
heading: nil,
latitude: nil,
longitude: nil,
speed: nil,
timestamp: nil,
trip: nil,
vehicle_id: nil
}
got = Vehicle.from_json(invalid_json)
assert got == expected
end
test "converts lat/long of 0 to nil" do
json = %{@valid_vehicle_json | "Latitude" => 0, "Longitude" => 0}
expected = %Vehicle{
block: "602",
heading: 48,
latitude: nil,
longitude: nil,
speed: 14,
timestamp: @valid_timestamp,
trip: "612",
vehicle_id: 1827
}
got = Vehicle.from_json(json)
assert got == expected
end
test "zero-pads trip/block to 3 characters" do
json = %{@valid_vehicle_json | "WorkID" => 9, "TripID" => 10}
expected = %Vehicle{
block: "009",
heading: 48,
latitude: 42.28179,
longitude: -71.15936,
speed: 14,
timestamp: @valid_timestamp,
trip: "010",
vehicle_id: 1827
}
got = Vehicle.from_json(json)
assert got == expected
end
end
end
| 25.904564 | 72 | 0.474451 |
ff3d43e19d73d85d97594bc8af0c208457b2af31 | 1,889 | ex | Elixir | clients/vision/lib/google_api/vision/v1/model/google_cloud_vision_v1p1beta1_operation_metadata.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/vision/lib/google_api/vision/v1/model/google_cloud_vision_v1p1beta1_operation_metadata.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/vision/lib/google_api/vision/v1/model/google_cloud_vision_v1p1beta1_operation_metadata.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Vision.V1.Model.GoogleCloudVisionV1p1beta1OperationMetadata do
@moduledoc """
Contains metadata for the BatchAnnotateImages operation.
## Attributes
* `createTime` (*type:* `DateTime.t`, *default:* `nil`) - The time when the batch request was received.
* `state` (*type:* `String.t`, *default:* `nil`) - Current state of the batch operation.
* `updateTime` (*type:* `DateTime.t`, *default:* `nil`) - The time when the operation result was last updated.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:createTime => DateTime.t(),
:state => String.t(),
:updateTime => DateTime.t()
}
field(:createTime, as: DateTime)
field(:state)
field(:updateTime, as: DateTime)
end
defimpl Poison.Decoder, for: GoogleApi.Vision.V1.Model.GoogleCloudVisionV1p1beta1OperationMetadata do
def decode(value, options) do
GoogleApi.Vision.V1.Model.GoogleCloudVisionV1p1beta1OperationMetadata.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Vision.V1.Model.GoogleCloudVisionV1p1beta1OperationMetadata do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 35.641509 | 114 | 0.731075 |
ff3d4e4640617f9fb35ecae5fa91dcaa126d49af | 3,714 | exs | Elixir | test/appsignal/demo_test.exs | MeterSoft/appsignal-elixir | 52e3505b9dc90bce0795c4753a758d40bdf41463 | [
"MIT"
] | 234 | 2016-08-18T20:43:15.000Z | 2022-02-27T11:31:48.000Z | test/appsignal/demo_test.exs | MeterSoft/appsignal-elixir | 52e3505b9dc90bce0795c4753a758d40bdf41463 | [
"MIT"
] | 563 | 2016-07-25T17:45:14.000Z | 2022-03-21T11:39:29.000Z | test/appsignal/demo_test.exs | MeterSoft/appsignal-elixir | 52e3505b9dc90bce0795c4753a758d40bdf41463 | [
"MIT"
] | 86 | 2016-09-13T22:53:46.000Z | 2022-02-16T11:03:51.000Z | defmodule Appsignal.DemoTest do
use ExUnit.Case
alias Appsignal.{Demo, Span, Test}
setup do
start_supervised(Test.Nif)
start_supervised(Test.Tracer)
start_supervised(Test.Span)
start_supervised(Test.Monitor)
:ok
end
describe "send_performance_sample/0" do
setup do
Demo.send_performance_sample()
:ok
end
test "creates a root span and four child spans" do
assert {:ok,
[
{_, %Span{}},
{_, %Span{}},
{_, %Span{}},
{_, %Span{}},
{_, nil}
]} = Test.Tracer.get(:create_span)
end
test "sets the spans' names" do
assert {:ok,
[
{%Span{}, "render.phoenix_template"},
{%Span{}, "query.ecto"},
{%Span{}, "query.ecto"},
{%Span{}, "call.phoenix_endpoint"},
{%Span{}, "DemoController#hello"}
]} = Test.Span.get(:set_name)
end
test "sets the span's categories" do
assert [
{%Span{}, "appsignal:category", "render.phoenix_template"},
{%Span{}, "appsignal:category", "query.ecto"},
{%Span{}, "appsignal:category", "query.ecto"},
{%Span{}, "appsignal:category", "call.phoenix_endpoint"},
{%Span{}, "appsignal:category", "call.phoenix"}
] = attributes("appsignal:category")
end
test "set's the root span's namespace" do
assert {:ok, [{%Span{}, "http_request"}]} = Test.Span.get(:set_namespace)
end
test "sets the 'demo_sample' attribute" do
assert attribute("demo_sample", true)
end
test "sets the span's sample data" do
assert_sample_data("environment", %{
"method" => "GET",
"request_path" => "/"
})
end
test "closes all spans" do
assert {:ok, [{%Span{}}, {%Span{}}, {%Span{}}, {%Span{}}, {%Span{}}]} =
Test.Tracer.get(:close_span)
end
end
describe "send_error_sample/0" do
setup do
Demo.send_error_sample()
:ok
end
test "creates a root span" do
assert {:ok, [{_, nil}]} = Test.Tracer.get(:create_span)
end
test "sets the spans' names" do
assert {:ok, [{%Span{}, "DemoController#hello"}]} = Test.Span.get(:set_name)
end
test "sets the span's category" do
assert [
{%Span{}, "appsignal:category", "call.phoenix"}
] = attributes("appsignal:category")
end
test "sets the 'demo_sample' attribute" do
assert attribute("demo_sample", true)
end
test "adds the error to the span" do
assert {:ok, [{%Span{}, :error, %TestError{}, _}]} = Test.Span.get(:add_error)
end
test "sets the span's sample data" do
assert_sample_data("environment", %{
"method" => "GET",
"request_path" => "/"
})
end
test "closes all spans" do
assert {:ok, [{%Span{}}]} = Test.Tracer.get(:close_span)
end
end
defp assert_sample_data(asserted_key, asserted_data) do
{:ok, sample_data} = Test.Span.get(:set_sample_data)
assert Enum.any?(sample_data, fn {%Span{}, key, data} ->
key == asserted_key and data == asserted_data
end)
end
defp attributes(asserted_key) do
{:ok, attributes} = Test.Span.get(:set_attribute)
Enum.filter(attributes, fn {%Span{}, key, _data} ->
key == asserted_key
end)
end
defp attribute(asserted_key, asserted_data) do
{:ok, attributes} = Test.Span.get(:set_attribute)
Enum.any?(attributes, fn {%Span{}, key, data} ->
key == asserted_key and data == asserted_data
end)
end
end
| 27.109489 | 84 | 0.554658 |
ff3d6482536d44e4eb30347977d6ac5eb593ca17 | 4,596 | ex | Elixir | lib/phoenix/naming.ex | bitgamma/phoenix | c06fc0fa03a95f2c863c49711da58fedfa56c374 | [
"MIT"
] | null | null | null | lib/phoenix/naming.ex | bitgamma/phoenix | c06fc0fa03a95f2c863c49711da58fedfa56c374 | [
"MIT"
] | null | null | null | lib/phoenix/naming.ex | bitgamma/phoenix | c06fc0fa03a95f2c863c49711da58fedfa56c374 | [
"MIT"
] | null | null | null | defmodule Phoenix.Naming do
@moduledoc """
Conveniences for inflecting and working with names in Phoenix.
"""
@doc """
Extracts the resource name from an alias.
## Examples
iex> Phoenix.Naming.resource_name(MyApp.User)
"user"
iex> Phoenix.Naming.resource_name(MyApp.UserView, "View")
"user"
"""
@spec resource_name(String.Chars.t, String.t) :: String.t
def resource_name(alias, suffix \\ "") do
alias
|> to_string()
|> Module.split()
|> List.last()
|> unsuffix(suffix)
|> underscore()
end
@doc """
Removes the given suffix from the name if it exists.
## Examples
iex> Phoenix.Naming.unsuffix("MyApp.User", "View")
"MyApp.User"
iex> Phoenix.Naming.unsuffix("MyApp.UserView", "View")
"MyApp.User"
"""
@spec unsuffix(String.Chars.t, String.t) :: String.t
def unsuffix(value, "") do
to_string(value)
end
def unsuffix(value, suffix) do
string = to_string(value)
suffix_size = byte_size(suffix)
prefix_size = byte_size(string) - suffix_size
case string do
<<prefix::binary-size(prefix_size), ^suffix::binary>> -> prefix
_ -> string
end
end
@doc """
Finds the Base Namespace of the module with optional concat
## Examples
iex> Phoenix.Naming.base_concat(MyApp.MyChannel)
MyApp
iex> Phoenix.Naming.base_concat(MyApp.Admin.MyChannel, PubSub)
MyApp.PubSub
iex> Phoenix.Naming.base_concat(MyApp.Admin.MyChannel, "PubSub")
MyApp.PubSub
"""
def base_concat(mod, submodule \\ nil) do
mod
|> Module.split
|> hd
|> Module.concat(submodule)
end
@doc """
Converts String to underscore case.
## Examples
iex> Phoenix.Naming.underscore("MyApp")
"my_app"
iex> Phoenix.Naming.underscore(:MyApp)
"my_app"
iex> Phoenix.Naming.underscore("my-app")
"my_app"
In general, `underscore` can be thought of as the reverse of
`camelize`, however, in some cases formatting may be lost:
Phoenix.Naming.underscore "SAPExample" #=> "sap_example"
Phoenix.Naming.camelize "sap_example" #=> "SapExample"
"""
@spec underscore(String.Chars.t) :: String.t
def underscore(value) when not is_binary(value) do
underscore(to_string(value))
end
def underscore(""), do: ""
def underscore(<<h, t :: binary>>) do
<<to_lower_char(h)>> <> do_underscore(t, h)
end
defp do_underscore(<<h, t, rest :: binary>>, _) when h in ?A..?Z and not t in ?A..?Z do
<<?_, to_lower_char(h), t>> <> do_underscore(rest, t)
end
defp do_underscore(<<h, t :: binary>>, prev) when h in ?A..?Z and not prev in ?A..?Z do
<<?_, to_lower_char(h)>> <> do_underscore(t, h)
end
defp do_underscore(<<?-, t :: binary>>, _) do
<<?_>> <> do_underscore(t, ?-)
end
defp do_underscore(<< "..", t :: binary>>, _) do
<<"..">> <> underscore(t)
end
defp do_underscore(<<?.>>, _), do: <<?.>>
defp do_underscore(<<?., t :: binary>>, _) do
<<?/>> <> underscore(t)
end
defp do_underscore(<<h, t :: binary>>, _) do
<<to_lower_char(h)>> <> do_underscore(t, h)
end
defp do_underscore(<<>>, _) do
<<>>
end
defp to_lower_char(char) when char in ?A..?Z, do: char + 32
defp to_lower_char(char), do: char
@doc """
Converts String to camel case.
## Examples
iex> Phoenix.Naming.camelize("my_app")
"MyApp"
iex> Phoenix.Naming.camelize(:my_app)
"MyApp"
In general, `camelize` can be thought of as the reverse of
`underscore`, however, in some cases formatting may be lost:
Phoenix.Naming.underscore "SAPExample" #=> "sap_example"
Phoenix.Naming.camelize "sap_example" #=> "SapExample"
"""
@spec camelize(String.Chars.t) :: String.t
def camelize(value) when not is_binary(value) do
camelize(to_string(value))
end
def camelize(""), do: ""
def camelize(<<?_, t :: binary>>) do
camelize(t)
end
def camelize(<<h, t :: binary>>) do
<<to_upper_char(h)>> <> do_camelize(t)
end
defp do_camelize(<<?_, ?_, t :: binary>>) do
do_camelize(<< ?_, t :: binary >>)
end
defp do_camelize(<<?_, h, t :: binary>>) when h in ?a..?z do
<<to_upper_char(h)>> <> do_camelize(t)
end
defp do_camelize(<<?_>>) do
<<>>
end
defp do_camelize(<<?/, t :: binary>>) do
<<?.>> <> camelize(t)
end
defp do_camelize(<<h, t :: binary>>) do
<<h>> <> do_camelize(t)
end
defp do_camelize(<<>>) do
<<>>
end
defp to_upper_char(char) when char in ?a..?z, do: char - 32
defp to_upper_char(char), do: char
end
| 22.529412 | 89 | 0.611184 |
ff3d6a31434088aecae4effe5cc03efe260913fd | 2,256 | ex | Elixir | lib/potionx/repo.ex | shuv1824/potionx | a5888413b13a520d8ddf79fb26b7483e441737c3 | [
"MIT"
] | null | null | null | lib/potionx/repo.ex | shuv1824/potionx | a5888413b13a520d8ddf79fb26b7483e441737c3 | [
"MIT"
] | null | null | null | lib/potionx/repo.ex | shuv1824/potionx | a5888413b13a520d8ddf79fb26b7483e441737c3 | [
"MIT"
] | null | null | null | defmodule Potionx.Repo do
@tenant_key_org {:potionx, :organization_id}
@tenant_key_user {:potionx, :user_id}
use TypedStruct
defmodule Pagination do
typedstruct do
field :after, :string
field :before, :string
field :first, :integer
field :last, :integer
end
end
defmacro __using__(opts) do
quote do
require Ecto.Query
@scoped_by_organization unquote(opts[:scoped_by_organization]) || []
@scoped_by_user unquote(opts[:scoped_by_user]) || []
def default_options(_operation) do
[org_id: Potionx.Repo.get_org_id(), user_id: Potionx.Repo.get_user_id()]
end
def prepare_query(_operation, %{from: %{source: {_, model}}} = query, opts) do
cond do
opts[:schema_migration] ->
{query, opts}
Enum.member?(@scoped_by_organization, model) and is_nil(opts[:org_id]) ->
raise "expected organization_id to be set"
Enum.member?(@scoped_by_user, model) and is_nil(opts[:user_id]) ->
raise "expected user_id to be set"
true ->
[
{:user_id, @scoped_by_user},
{:organization_id, @scoped_by_organization}
]
|> Enum.reduce({query, opts}, fn {key, list}, {q, opts} ->
cond do
Enum.member?(list, model) ->
{
q |> Ecto.Query.where(^[{key, opts[key]}]),
opts
}
not is_nil(opts[key]) ->
{
q |> Ecto.Query.where(^[{key, opts[key]}]),
opts
}
true ->
{q, opts}
end
end)
true ->
raise "expected org_id or skip_org_id to be set"
end
end
def prepare_query(_operation, q, opts) do
{q, opts}
end
defoverridable([prepare_query: 3])
end
end
def get_org_id() do
Process.get(@tenant_key_org)
end
def get_user_id() do
Process.get(@tenant_key_user)
end
def put_org_id(org_id) do
Process.put(@tenant_key_org, org_id)
end
def put_user_id(user_id) do
Process.put(@tenant_key_user, user_id)
end
end
| 28.556962 | 84 | 0.542996 |
ff3daa32cacffb41ee8dfc8fe9616abac7c5ec7d | 244 | ex | Elixir | lib/ex_bybit/credentials.ex | fremantle-industries/ex_bybit | 5bb6b751695ee6b3952645ae7cac00630b350a86 | [
"MIT"
] | 1 | 2020-09-20T10:53:56.000Z | 2020-09-20T10:53:56.000Z | lib/ex_bybit/credentials.ex | fremantle-capital/ex_bybit | 2cc41f796232cd228c21a49fbfd8116a1ad7d7d5 | [
"MIT"
] | 7 | 2021-07-28T21:57:50.000Z | 2021-07-28T22:12:19.000Z | lib/ex_bybit/credentials.ex | fremantle-capital/ex_bybit | 2cc41f796232cd228c21a49fbfd8116a1ad7d7d5 | [
"MIT"
] | null | null | null | defmodule ExBybit.Credentials do
@type api_key :: String.t()
@type api_secret :: String.t()
@type t :: %__MODULE__{api_key: api_key, api_secret: api_secret}
@enforce_keys ~w[api_key api_secret]a
defstruct ~w[api_key api_secret]a
end
| 27.111111 | 66 | 0.733607 |
ff3dbfd8ddc76b7c5a82533c232e9c36b90d658f | 3,135 | ex | Elixir | server/apps/boardr_api/lib/boardr_api/router.ex | AlphaHydrae/boardr | 98eed02801f88c065a24bf13051c5cf96270a5f7 | [
"MIT"
] | 1 | 2021-04-08T17:26:27.000Z | 2021-04-08T17:26:27.000Z | server/apps/boardr_api/lib/boardr_api/router.ex | AlphaHydrae/boardr | 98eed02801f88c065a24bf13051c5cf96270a5f7 | [
"MIT"
] | 1 | 2022-02-13T05:50:46.000Z | 2022-02-13T05:50:46.000Z | server/apps/boardr_api/lib/boardr_api/router.ex | AlphaHydrae/boardr | 98eed02801f88c065a24bf13051c5cf96270a5f7 | [
"MIT"
] | null | null | null | defmodule BoardrApi.Router do
use BoardrApi, :router
use Plug.ErrorHandler
pipeline :api do
plug :accepts, ["json"]
plug :require_json
end
scope "/api", BoardrApi do
pipe_through :api
get "/", ApiRootController, :index
get "/stats", StatsController, :show
scope "/auth" do
post "/google", AuthController, :google
post "/local", AuthController, :local
end
resources "/games", GamesController, as: :games, name: :game, only: [:create, :index, :show] do
resources "/actions", Games.ActionsController, only: [:create, :index, :show]
resources "/board", Games.BoardController, only: [:show], singleton: true
resources "/players", Games.PlayersController, only: [:create, :show]
resources "/possible-actions", Games.PossibleActionsController, only: [:index]
end
resources "/identities", IdentitiesController, only: [:create, :index, :show]
resources "/users", UsersController, only: [:create, :show]
# Method not allowed
[&post/3, &put/3, &patch/3, &delete/3, &connect/3, &trace/3]
|> Enum.each(fn verb -> verb.("/*path", MethodNotAllowedController, :match) end)
end
def handle_errors(conn, params) do
{status, title, type, headers} =
case params do
%{reason: %Ecto.NoResultsError{}} ->
{404, "No resource found matching the request URI.", :'resource-not-found', []}
%{reason: %Phoenix.Router.NoRouteError{}} ->
{404, "No resource found matching the request URI.", :'resource-not-found', []}
%{reason: %BoardrApi.Errors.MethodNotAllowed{allowed_methods: allowed_methods}} ->
{405, "Method not supported for the request URI.", :'method-not-supported', [{"allow", Enum.join(Enum.map(allowed_methods, &String.upcase(Atom.to_string(&1))), ", ")}]}
%{reason: %Phoenix.NotAcceptableError{}} ->
{406, "The target resource does not have a representation in the requested format(s).", :'not-acceptable', []}
%{reason: %BoardrApi.Errors.UnsupportedMediaType{}} ->
{415, "Content-Type #{get_req_header(conn, "content-type")} is not supported for #{String.upcase(conn.method)} /#{Enum.join(conn.path_info, "/")}.", :'unsupported-media-type', []}
_ ->
{500, nil, nil, []}
end
conn
|> put_status(status)
|> put_resp_content_type("application/problem+json")
|> merge_resp_headers(headers)
|> merge_assigns(error_title: title, error_type: type)
|> put_view(BoardrApi.ErrorView)
|> render("error.json")
end
defp require_json(%{halted: true} = conn, _) do
conn
end
defp require_json(conn, _) do
content_type = conn |> get_req_header("content-type") |> List.first
if content_type !== nil and !content_type_is_json(content_type) do
raise %BoardrApi.Errors.UnsupportedMediaType{conn: conn, router: BoardrApi.Router}
end
conn
end
defp content_type_is_json(content_type) do
case ContentType.content_type(content_type) do
{:ok, _, type, _} -> type === "json" or String.match?(type, ~r/.\+json$/)
_ -> false
end
end
end
| 36.882353 | 189 | 0.646252 |
ff3dca281595b3ea9acd31709d94e68c75991e48 | 5,283 | exs | Elixir | mix.exs | prakriti07/reactive-interaction-gateway | c9d33064982b5ae12e9af7a300e90b3e7973d0b3 | [
"Apache-2.0"
] | null | null | null | mix.exs | prakriti07/reactive-interaction-gateway | c9d33064982b5ae12e9af7a300e90b3e7973d0b3 | [
"Apache-2.0"
] | null | null | null | mix.exs | prakriti07/reactive-interaction-gateway | c9d33064982b5ae12e9af7a300e90b3e7973d0b3 | [
"Apache-2.0"
] | 1 | 2020-07-17T05:17:32.000Z | 2020-07-17T05:17:32.000Z | defmodule RIG.MixProject do
@moduledoc false
use Mix.Project
@description """
RIG, the Reactive Interaction Gateway, provides an easy (and scaleable) way to push messages
from backend services to connected frontends (and vice versa).
"""
def project do
%{rig: rig_version, elixir: elixir_version} = versions()
[
# OTP app:
app: :rig,
# Meta data:
name: "Reactive Interaction Gateway",
description: @description,
version: rig_version,
source_url: "https://github.com/Accenture/reactive-interaction-gateway",
homepage_url: "https://accenture.github.io/reactive-interaction-gateway",
docs: docs(),
package: package(),
# Build:
elixir: elixir_version,
build_embedded: Mix.env() == :prod,
start_permanent: Mix.env() == :prod,
deps: deps(),
aliases: aliases(),
elixirc_paths: elixirc_paths(Mix.env()),
compilers: [:phoenix, :gettext] ++ Mix.compilers() ++ [:phoenix_swagger],
# Test and test coverage:
test_paths: test_paths(Mix.env()),
test_coverage: [tool: ExCoveralls],
preferred_cli_env: [
coveralls: :test,
"coveralls.detail": :test,
"coveralls.post": :test,
"coveralls.html": :test
]
]
end
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
defp test_paths(_), do: ["lib", "test"]
# Run "mix help compile.app" to learn about applications.
def application do
[
mod: {Rig.Application, []},
extra_applications: [:logger, :runtime_tools, :prometheus_ex, :prometheus_plugs],
included_applications: [:peerage]
]
end
defp versions do
{map, []} = Code.eval_file("version", ".")
map
end
defp docs do
[
# Website and documentation is built off master,
# so that's where we should link to:
source_ref: "master",
main: "api-reference",
output: "website/static/source_docs",
extras: [
"CHANGELOG.md": [title: "Changelog"]
]
]
end
defp package do
[
name: "rig",
organization: "Accenture",
maintainers: ["Kevin Bader", "Mario Macai"],
licenses: ["Apache 2.0"],
links: %{"GitHub" => "https://github.com/Accenture/reactive-interaction-gateway"}
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
# Test coverage reporting:
{:excoveralls, ">= 0.12.0", only: :test, runtime: false},
# Linting:
{:credo, ">= 1.3.0", only: [:dev, :test], runtime: false},
# Static type checks:
{:dialyxir, ">= 1.0.0-rc.6", only: [:dev], runtime: false},
# OTP releases:
{:distillery, "~> 2.1", runtime: false},
# Documentation generator:
{:ex_doc, ">= 0.21.0", only: :dev, runtime: false},
# Automatically run tests on file changes:
{:mix_test_watch, "~> 1.0", only: :dev, runtime: false},
# Read and use application configuration from environment variables:
{:confex, "~> 3.4"},
# For providing the global Phx PubSub server:
{:phoenix_pubsub, "~> 1.1"},
# for Kafka, partition from MurmurHash(key):
{:murmur, "~> 1.0"},
{:peerage, "~> 1.0"},
# For running external commands in Mix tasks:
{:porcelain, "~> 2.0"},
# HTTP request handling (wraps Cowboy):
{:plug, "~> 1.9"},
# JSON parser, for cloud_event and event_hub:
{:poison, "~> 3.0 or ~> 4.0"},
# JSON parser that's supposedly faster than poison:
{:jason, "~> 1.1"},
{:jaxon, "~> 1.0"},
# JSON Pointer (RFC 6901) implementation for subscriptions:
{:odgn_json_pointer, "~> 2.5"},
# Apache Kafka Erlang client library:
{:brod, "~> 3.9"},
# Apache Avro encoding/decoding library:
{:erlavro, "~> 2.8"},
# Apache Kafka Schema Registry wrapper library:
{:schemex, "~> 0.1.1"},
# Caching library using ETS:
{:memoize, "~> 1.3"},
# For distributed_set:
{:timex, "~> 3.6"},
{:ex2ms, "~> 1.6"},
{:uuid, "~> 1.1"},
# For doing HTTP requests, e.g., in kafka_as_http:
{:httpoison, "~> 1.6"},
# For property-based testing:
{:stream_data, "~> 0.4", only: :test},
# For JSON Web Tokens:
{:joken, "~> 1.5"},
# Web framework, for all HTTP endpoints except SSE and WS:
{:phoenix, "~> 1.4"},
{:plug_cowboy, "~> 2.1"},
{:phoenix_swagger, "~> 0.8"},
# Data validation library, e.g. used for proxy configuration:
{:vex, "~> 0.8.0"},
# SSE serialization:
{:server_sent_event, "~> 1.0"},
# A library for defining structs with a type without writing boilerplate code:
{:typed_struct, "~> 0.1.4"},
# AWS SDK
{:ex_aws, "~> 2.0"},
{:ex_aws_kinesis, "~> 2.0"},
# Mock library for testing:
{:mox, "~> 0.5", only: :test},
{:stubr, "~> 1.5.0", only: :test},
{:fake_server, "~> 2.1", only: :test},
{:socket, "~> 0.3", only: :test},
# Prometheus metrics
{:prometheus_ex, "~> 3.0"},
{:prometheus_plugs, "~> 1.1"},
# NATS client:
{:gnat, "~> 1.0.0"}
]
end
defp aliases do
[
compile: ["compile", "update_docs"]
]
end
end
| 30.715116 | 94 | 0.566913 |
ff3dcacea604d02fac7b71b01065f4c418433faf | 892 | exs | Elixir | lib/logger/test/test_helper.exs | xtian/elixir | c680eb1a3992309c272e8f808e15990ea5318d6e | [
"Apache-2.0"
] | 1 | 2017-07-25T21:46:25.000Z | 2017-07-25T21:46:25.000Z | lib/logger/test/test_helper.exs | xtian/elixir | c680eb1a3992309c272e8f808e15990ea5318d6e | [
"Apache-2.0"
] | null | null | null | lib/logger/test/test_helper.exs | xtian/elixir | c680eb1a3992309c272e8f808e15990ea5318d6e | [
"Apache-2.0"
] | 1 | 2017-07-25T21:46:48.000Z | 2017-07-25T21:46:48.000Z | Logger.configure_backend(:console, colors: [enabled: false])
ExUnit.start()
defmodule Logger.Case do
use ExUnit.CaseTemplate
import ExUnit.CaptureIO
using _ do
quote do
import Logger.Case
end
end
def msg(msg) do
~r/\d\d\:\d\d\:\d\d\.\d\d\d #{Regex.escape(msg)}/
end
def wait_for_handler(manager, handler) do
unless handler in :gen_event.which_handlers(manager) do
Process.sleep(10)
wait_for_handler(manager, handler)
end
end
def wait_for_logger() do
try do
:gen_event.which_handlers(Logger)
else
_ ->
:ok
catch
:exit, _ ->
Process.sleep(10)
wait_for_logger()
end
end
def capture_log(level \\ :debug, fun) do
Logger.configure(level: level)
capture_io(:user, fn ->
fun.()
Logger.flush()
end)
after
Logger.configure(level: :debug)
end
end
| 18.583333 | 60 | 0.625561 |
ff3dcf6bc024dac6cf9064e2e926fdacfef151f5 | 9,488 | exs | Elixir | test/controllers/json/user_controller_test.exs | pauldub/sentinel | 3230e92b68fa76e9a1f6c577bc1c271900e07c72 | [
"MIT"
] | 125 | 2016-01-29T11:46:20.000Z | 2021-06-08T09:25:38.000Z | test/controllers/json/user_controller_test.exs | pauldub/sentinel | 3230e92b68fa76e9a1f6c577bc1c271900e07c72 | [
"MIT"
] | 54 | 2016-02-18T01:11:58.000Z | 2017-10-19T11:25:26.000Z | test/controllers/json/user_controller_test.exs | britton-jb/sentinel | 3230e92b68fa76e9a1f6c577bc1c271900e07c72 | [
"MIT"
] | 29 | 2016-02-20T12:59:16.000Z | 2018-04-11T14:29:41.000Z | defmodule Json.UserControllerTest do
use Sentinel.ConnCase
alias Mix.Config
alias Sentinel.Changeset.AccountUpdater
alias Sentinel.Changeset.Confirmator
alias Sentinel.Changeset.PasswordResetter
alias Sentinel.Changeset.Registrator
@password "secret"
setup do
on_exit fn ->
Application.delete_env :sentinel, :user_model_validator
Config.persist([sentinel: [confirmable: :optional]])
Config.persist([sentinel: [invitable: true]])
end
conn =
build_conn()
|> Conn.put_req_header("content-type", "application/json")
|> Conn.put_req_header("accept", "application/json")
user = Factory.build(:user)
params = %{user: %{email: user.email, password: @password, password_confirmation: @password}}
invite_params = %{user: %{email: user.email}}
mocked_token = SecureRandom.urlsafe_base64()
mocked_confirmation_token = SecureRandom.urlsafe_base64()
mocked_password_reset_token = SecureRandom.urlsafe_base64()
welcome_email = Sentinel.Mailer.send_welcome_email(
%Sentinel.User{
unconfirmed_email: params.user.email,
email: params.user.email,
id: 1
}, mocked_token)
invite_email = Sentinel.Mailer.send_invite_email(
%Sentinel.User{
email: params.user.email,
id: 1
},
%{
confirmation_token: mocked_confirmation_token,
password_reset_token: mocked_password_reset_token
})
{
:ok,
%{
conn: conn,
params: params,
invite_params: invite_params,
mocked_token: mocked_token,
welcome_email: welcome_email,
invite_email: invite_email
}
}
end
test "default sign up", %{conn: conn, params: params, welcome_email: mocked_mail} do # green
Config.persist([sentinel: [confirmable: :optional]])
Config.persist([sentinel: [invitable: false]])
with_mock Sentinel.Mailer, [:passthrough], [send_welcome_email: fn(_, _) -> mocked_mail end] do
conn = post conn, auth_path(conn, :callback, "identity"), params
response = json_response(conn, 201)
%{"email" => email} = response
assert email == params.user.email
user = TestRepo.get_by!(User, email: params.user.email)
refute is_nil(user.hashed_confirmation_token)
assert_delivered_email mocked_mail
end
end
test "confirmable :required sign up", %{conn: conn, params: params, welcome_email: mocked_mail} do # green
Config.persist([sentinel: [confirmable: :required]])
Config.persist([sentinel: [invitable: false]])
with_mock Sentinel.Mailer, [:passthrough], [send_welcome_email: fn(_, _) -> mocked_mail end] do
conn = post conn, auth_path(conn, :callback, "identity"), params
response = json_response(conn, 201)
%{"email" => email} = response
assert email == params.user.email
user = TestRepo.get_by!(User, email: params.user.email)
refute is_nil(user.hashed_confirmation_token)
assert_delivered_email mocked_mail
end
end
test "confirmable :false sign up", %{conn: conn, params: params} do # green
Config.persist([sentinel: [confirmable: false]])
Config.persist([sentinel: [invitable: false]])
conn = post conn, auth_path(conn, :callback, "identity"), params
response = json_response(conn, 201)
%{"email" => email} = response
assert email == params.user.email
user = TestRepo.get_by!(User, email: params.user.email)
refute is_nil(user.hashed_confirmation_token)
refute_delivered_email Sentinel.Mailer.NewEmailAddress.build(user, "token")
end
test "invitable sign up", %{conn: conn, invite_params: params, invite_email: mocked_mail} do # green
Config.persist([sentinel: [invitable: true]])
Config.persist([sentinel: [confirmable: false]])
with_mock Sentinel.Mailer, [:passthrough], [send_invite_email: fn(_, _) -> mocked_mail end] do
conn = post conn, auth_path(conn, :callback, "identity"), params
response = json_response(conn, 201)
%{"email" => email} = response
assert email == params.user.email
assert_delivered_email mocked_mail
end
end
test "invitable and confirmable sign up", %{conn: conn, invite_params: params, invite_email: mocked_mail} do # green
Config.persist([sentinel: [invitable: true]])
Config.persist([sentinel: [confirmable: :optional]])
with_mock Sentinel.Mailer, [:passthrough], [send_invite_email: fn(_, _) -> mocked_mail end] do
conn = post conn, auth_path(conn, :callback, "identity"), params
response = json_response(conn, 201)
%{"email" => email} = response
assert email == params.user.email
assert_delivered_email mocked_mail
end
end
test "invitable setup password", %{conn: conn, params: params} do
Config.persist([sentinel: [confirmable: :optional]])
Config.persist([sentinel: [invitable: true]])
auth = %{
provider: "identity",
uid: params.user.email,
info: %Ueberauth.Auth.Info{email: "user0@example.com"}
}
{:ok, %{user: user, confirmation_token: confirmation_token}} =
TestRepo.transaction(fn ->
{confirmation_token, changeset} =
auth.info
|> Map.from_struct
|> Registrator.changeset
|> Confirmator.confirmation_needed_changeset
user = TestRepo.insert!(changeset)
%Sentinel.Ueberauth{uid: user.id, user_id: user.id}
|> Sentinel.Ueberauth.changeset(auth)
|> TestRepo.insert!
%{user: user, confirmation_token: confirmation_token}
end)
db_auth = TestRepo.get_by(Sentinel.Ueberauth, user_id: user.id, provider: "identity")
{password_reset_token, changeset} = PasswordResetter.create_changeset(db_auth)
TestRepo.update!(changeset)
conn = put conn, api_user_path(conn, :invited, user.id), %{confirmation_token: confirmation_token, password_reset_token: password_reset_token, password: params.user.password, password_confirmation: params.user.password}
response = json_response(conn, 200)
%{"email" => email} = response
assert email == user.email
updated_user = TestRepo.get!(User, user.id)
updated_auth = TestRepo.get!(Sentinel.Ueberauth, db_auth.id)
assert updated_user.hashed_confirmation_token == nil
assert updated_auth.hashed_password_reset_token == nil
assert updated_user.unconfirmed_email == nil
end
test "sign up with missing password without the invitable module enabled", %{conn: conn, invite_params: params} do # green
Config.persist([sentinel: [invitable: false]])
conn = post conn, auth_path(conn, :callback, "identity"), params
response = json_response(conn, 401)
assert response == %{"errors" => [%{"password" => "A password is required to login"}]}
end
test "sign up with missing email", %{conn: conn} do # green
conn = post conn, auth_path(conn, :callback, "identity"), %{"user" => %{"password" => @password}}
response = json_response(conn, 401)
assert response == %{"errors" =>
[
%{"email" => "An email is required to login"},
]
}
end
test "sign up with custom validations", %{conn: conn, params: params} do
Config.persist([sentinel: [confirmable: :optional]])
Config.persist([sentinel: [invitable: false]])
Application.put_env(:sentinel, :user_model_validator, fn (changeset, _params) ->
Ecto.Changeset.add_error(changeset, :password, "too short")
end)
conn = post conn, auth_path(conn, :callback, "identity"), params
response = json_response(conn, 401)
assert response == %{"errors" => [%{"password" => "too short"}]}
end
test "confirm user with a bad token", %{conn: conn, params: %{user: params}} do
{_, changeset} =
params
|> Registrator.changeset
|> Confirmator.confirmation_needed_changeset
user = TestRepo.insert!(changeset)
conn = get conn, api_user_path(conn, :confirm), %{id: user.id, confirmation_token: "bad_token"}
response = json_response(conn, 422)
assert response == %{"errors" => [%{"confirmation_token" => "invalid"}]}
end
test "confirm a user", %{conn: conn, params: %{user: params}} do
{token, changeset} =
params
|> Registrator.changeset
|> Confirmator.confirmation_needed_changeset
user = TestRepo.insert!(changeset)
conn = get conn, api_user_path(conn, :confirm), %{id: user.id, confirmation_token: token}
assert response(conn, 302)
updated_user = TestRepo.get! User, user.id
assert updated_user.hashed_confirmation_token == nil
assert updated_user.confirmed_at != nil
end
test "confirm a user's new email", %{conn: conn, params: %{user: user}} do
{token, registrator_changeset} =
user
|> Registrator.changeset
|> Confirmator.confirmation_needed_changeset
user =
registrator_changeset
|> TestRepo.insert!
|> Confirmator.confirmation_changeset(%{"confirmation_token" => token})
|> TestRepo.update!
{token, updater_changeset} = AccountUpdater.changeset(user, %{"email" => "new@example.com"})
updated_user = TestRepo.update!(updater_changeset)
conn = get conn, api_user_path(conn, :confirm), %{id: updated_user.id, confirmation_token: token}
assert response(conn, 302)
updated_user = TestRepo.get! User, user.id
assert updated_user.hashed_confirmation_token == nil
assert updated_user.unconfirmed_email == nil
assert updated_user.email == "new@example.com"
end
end
| 35.939394 | 223 | 0.679701 |
ff3e001681be6b68974cbf87686040313eb439cd | 5,030 | ex | Elixir | clients/composer/lib/google_api/composer/v1/model/software_config.ex | ukrbublik/elixir-google-api | 364cec36bc76f60bec94cbcad34844367a29d174 | [
"Apache-2.0"
] | null | null | null | clients/composer/lib/google_api/composer/v1/model/software_config.ex | ukrbublik/elixir-google-api | 364cec36bc76f60bec94cbcad34844367a29d174 | [
"Apache-2.0"
] | null | null | null | clients/composer/lib/google_api/composer/v1/model/software_config.ex | ukrbublik/elixir-google-api | 364cec36bc76f60bec94cbcad34844367a29d174 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Composer.V1.Model.SoftwareConfig do
@moduledoc """
Specifies the selection and configuration of software inside the environment.
## Attributes
* `airflowConfigOverrides` (*type:* `map()`, *default:* `nil`) - Optional. Apache Airflow configuration properties to override. Property keys contain the section and property names, separated by a hyphen, for example "core-dags_are_paused_at_creation". Section names must not contain hyphens ("-"), opening square brackets ("["), or closing square brackets ("]"). The property name must not be empty and must not contain an equals sign ("=") or semicolon (";"). Section and property names must not contain a period ("."). Apache Airflow configuration property names must be written in [snake_case](https://en.wikipedia.org/wiki/Snake_case). Property values can contain any character, and can be written in any lower/upper case format. Certain Apache Airflow configuration property values are [blacklisted](/composer/docs/how-to/managing/setting-airflow-configurations#airflow_configuration_blacklists), and cannot be overridden.
* `envVariables` (*type:* `map()`, *default:* `nil`) - Optional. Additional environment variables to provide to the Apache Airflow scheduler, worker, and webserver processes. Environment variable names must match the regular expression `a-zA-Z_*`. They cannot specify Apache Airflow software configuration overrides (they cannot match the regular expression `AIRFLOW__[A-Z0-9_]+__[A-Z0-9_]+`), and they cannot match any of the following reserved names: * `AIRFLOW_HOME` * `C_FORCE_ROOT` * `CONTAINER_NAME` * `DAGS_FOLDER` * `GCP_PROJECT` * `GCS_BUCKET` * `GKE_CLUSTER_NAME` * `SQL_DATABASE` * `SQL_INSTANCE` * `SQL_PASSWORD` * `SQL_PROJECT` * `SQL_REGION` * `SQL_USER`
* `imageVersion` (*type:* `String.t`, *default:* `nil`) - The version of the software running in the environment. This encapsulates both the version of Cloud Composer functionality and the version of Apache Airflow. It must match the regular expression `composer-([0-9]+\\.[0-9]+\\.[0-9]+|latest)-airflow-[0-9]+\\.[0-9]+(\\.[0-9]+.*)?`. When used as input, the server also checks if the provided version is supported and denies the request for an unsupported version. The Cloud Composer portion of the version is a [semantic version](https://semver.org) or `latest`. When the patch version is omitted, the current Cloud Composer patch version is selected. When `latest` is provided instead of an explicit version number, the server replaces `latest` with the current Cloud Composer version and stores that version number in the same field. The portion of the image version that follows *airflow-* is an official Apache Airflow repository [release name](https://github.com/apache/incubator-airflow/releases). See also [Version List](/composer/docs/concepts/versioning/composer-versions).
* `pypiPackages` (*type:* `map()`, *default:* `nil`) - Optional. Custom Python Package Index (PyPI) packages to be installed in the environment. Keys refer to the lowercase package name such as "numpy" and values are the lowercase extras and version specifier such as "==1.12.0", "[devel,gcp_api]", or "[devel]>=1.8.2, <1.9.2". To specify a package without pinning it to a version specifier, use the empty string as the value.
* `pythonVersion` (*type:* `String.t`, *default:* `nil`) - Optional. The major version of Python used to run the Apache Airflow scheduler, worker, and webserver processes. Can be set to '2' or '3'. If not specified, the default is '2'. Cannot be updated.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:airflowConfigOverrides => map(),
:envVariables => map(),
:imageVersion => String.t(),
:pypiPackages => map(),
:pythonVersion => String.t()
}
field(:airflowConfigOverrides, type: :map)
field(:envVariables, type: :map)
field(:imageVersion)
field(:pypiPackages, type: :map)
field(:pythonVersion)
end
defimpl Poison.Decoder, for: GoogleApi.Composer.V1.Model.SoftwareConfig do
def decode(value, options) do
GoogleApi.Composer.V1.Model.SoftwareConfig.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Composer.V1.Model.SoftwareConfig do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 85.254237 | 1,090 | 0.743936 |
ff3e0f73b514907a83313d0d93fefbdafb673f03 | 540 | ex | Elixir | apps/rig/lib/mix/tasks/smoke_tests.ex | arana3/reactive-interaction-gateway | 793648bcc5b8b05fc53df1f5f97818fb40ca84be | [
"Apache-2.0"
] | 518 | 2017-11-09T13:10:49.000Z | 2022-03-28T14:29:50.000Z | apps/rig/lib/mix/tasks/smoke_tests.ex | arana3/reactive-interaction-gateway | 793648bcc5b8b05fc53df1f5f97818fb40ca84be | [
"Apache-2.0"
] | 270 | 2017-11-10T00:11:34.000Z | 2022-02-27T13:08:16.000Z | apps/rig/lib/mix/tasks/smoke_tests.ex | arana3/reactive-interaction-gateway | 793648bcc5b8b05fc53df1f5f97818fb40ca84be | [
"Apache-2.0"
] | 67 | 2017-12-19T20:16:37.000Z | 2022-03-31T10:43:04.000Z | defmodule Mix.Tasks.SmokeTest do
@moduledoc """
Runs the smoke test.
"""
use Mix.Task
require Logger
@shortdoc "Runs the smoke test."
def run(_) do
Application.ensure_all_started(:porcelain)
prog = "docker-compose"
args = [
"-f",
"smoke_tests.docker-compose.yml",
"up",
"--build",
"--abort-on-container-exit"
]
stream = IO.binstream(:stdio, :line)
%Porcelain.Result{status: 0} =
Porcelain.exec(prog, args, out: stream, err: :out, dir: "smoke_tests")
end
end
| 18.62069 | 76 | 0.605556 |
ff3e39c7dd3275e1a7752dd566168ce3671fd206 | 1,928 | ex | Elixir | clients/iam/lib/google_api/iam/v1/model/patch_service_account_request.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/iam/lib/google_api/iam/v1/model/patch_service_account_request.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/iam/lib/google_api/iam/v1/model/patch_service_account_request.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.IAM.V1.Model.PatchServiceAccountRequest do
@moduledoc """
The request for PatchServiceAccount. You can patch only the `display_name` and `description` fields. You must use the `update_mask` field to specify which of these fields you want to patch. Only the fields specified in the request are guaranteed to be returned in the response. Other fields may be empty in the response.
## Attributes
* `serviceAccount` (*type:* `GoogleApi.IAM.V1.Model.ServiceAccount.t`, *default:* `nil`) -
* `updateMask` (*type:* `String.t`, *default:* `nil`) -
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:serviceAccount => GoogleApi.IAM.V1.Model.ServiceAccount.t() | nil,
:updateMask => String.t() | nil
}
field(:serviceAccount, as: GoogleApi.IAM.V1.Model.ServiceAccount)
field(:updateMask)
end
defimpl Poison.Decoder, for: GoogleApi.IAM.V1.Model.PatchServiceAccountRequest do
def decode(value, options) do
GoogleApi.IAM.V1.Model.PatchServiceAccountRequest.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.IAM.V1.Model.PatchServiceAccountRequest do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 38.56 | 322 | 0.741183 |
ff3e5697aab639f6cc12b3945ff1cd1660c6af77 | 468 | ex | Elixir | lib/mail_slurp_api/model/unread_count.ex | sumup-bank/mailslurp-client-elixir | 87ccdedf2f0f4cd3e50f5781ffb088142e3cf4e8 | [
"MIT"
] | 1 | 2021-06-17T18:07:49.000Z | 2021-06-17T18:07:49.000Z | lib/mail_slurp_api/model/unread_count.ex | sumup-bank/mailslurp-client-elixir | 87ccdedf2f0f4cd3e50f5781ffb088142e3cf4e8 | [
"MIT"
] | null | null | null | lib/mail_slurp_api/model/unread_count.ex | sumup-bank/mailslurp-client-elixir | 87ccdedf2f0f4cd3e50f5781ffb088142e3cf4e8 | [
"MIT"
] | 1 | 2021-03-16T18:55:56.000Z | 2021-03-16T18:55:56.000Z | # NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
# https://openapi-generator.tech
# Do not edit the class manually.
defmodule MailSlurpAPI.Model.UnreadCount do
@moduledoc """
"""
@derive [Poison.Encoder]
defstruct [
:"count"
]
@type t :: %__MODULE__{
:"count" => integer()
}
end
defimpl Poison.Decoder, for: MailSlurpAPI.Model.UnreadCount do
def decode(value, _options) do
value
end
end
| 18 | 91 | 0.683761 |
ff3e5cdd19caedae303fafa2a34eaef8884e2e84 | 974 | ex | Elixir | test/support/channel_case.ex | robvandenbogaard/elm-blogger | 53ba88e125c9ddf90a5e5e5a0b68914bf6d81003 | [
"BSD-3-Clause"
] | 19 | 2017-02-26T10:18:00.000Z | 2022-01-18T18:21:28.000Z | test/support/channel_case.ex | robvandenbogaard/elm-blogger | 53ba88e125c9ddf90a5e5e5a0b68914bf6d81003 | [
"BSD-3-Clause"
] | 1 | 2017-10-29T22:37:47.000Z | 2017-10-29T22:37:47.000Z | test/support/channel_case.ex | robvandenbogaard/elm-blogger | 53ba88e125c9ddf90a5e5e5a0b68914bf6d81003 | [
"BSD-3-Clause"
] | 4 | 2017-08-07T23:33:01.000Z | 2020-02-27T12:36:25.000Z | defmodule ElmBlogger.ChannelCase do
@moduledoc """
This module defines the test case to be used by
channel tests.
Such tests rely on `Phoenix.ChannelTest` and also
imports other functionality to make it easier
to build and query models.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with channels
use Phoenix.ChannelTest
alias ElmBlogger.Repo
import Ecto
import Ecto.Changeset
import Ecto.Query, only: [from: 1, from: 2]
# The default endpoint for testing
@endpoint ElmBlogger.Endpoint
end
end
setup tags do
unless tags[:async] do
Ecto.Adapters.SQL.restart_test_transaction(ElmBlogger.Repo, [])
end
:ok
end
end
| 23.190476 | 69 | 0.704312 |
ff3e7a321d9eb44aa5aaaed7916b98b69964f6b0 | 1,804 | ex | Elixir | clients/container_analysis/lib/google_api/container_analysis/v1beta1/model/set_iam_policy_request.ex | yoshi-code-bot/elixir-google-api | cdb6032f01fac5ab704803113c39f2207e9e019d | [
"Apache-2.0"
] | null | null | null | clients/container_analysis/lib/google_api/container_analysis/v1beta1/model/set_iam_policy_request.ex | yoshi-code-bot/elixir-google-api | cdb6032f01fac5ab704803113c39f2207e9e019d | [
"Apache-2.0"
] | null | null | null | clients/container_analysis/lib/google_api/container_analysis/v1beta1/model/set_iam_policy_request.ex | yoshi-code-bot/elixir-google-api | cdb6032f01fac5ab704803113c39f2207e9e019d | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.ContainerAnalysis.V1beta1.Model.SetIamPolicyRequest do
@moduledoc """
Request message for `SetIamPolicy` method.
## Attributes
* `policy` (*type:* `GoogleApi.ContainerAnalysis.V1beta1.Model.Policy.t`, *default:* `nil`) - REQUIRED: The complete policy to be applied to the `resource`. The size of the policy is limited to a few 10s of KB. An empty policy is a valid policy but certain Google Cloud services (such as Projects) might reject them.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:policy => GoogleApi.ContainerAnalysis.V1beta1.Model.Policy.t() | nil
}
field(:policy, as: GoogleApi.ContainerAnalysis.V1beta1.Model.Policy)
end
defimpl Poison.Decoder, for: GoogleApi.ContainerAnalysis.V1beta1.Model.SetIamPolicyRequest do
def decode(value, options) do
GoogleApi.ContainerAnalysis.V1beta1.Model.SetIamPolicyRequest.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.ContainerAnalysis.V1beta1.Model.SetIamPolicyRequest do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 38.382979 | 320 | 0.759978 |
ff3e84b17e311d9176c6c907da868c60d8f8d390 | 1,065 | ex | Elixir | projects/standup/lib/standup_web/channels/user_socket.ex | erik/sketches | 0a454ada58dee6db576e93cb2216dd750290329e | [
"MIT"
] | 1 | 2020-02-11T06:00:11.000Z | 2020-02-11T06:00:11.000Z | projects/standup/lib/standup_web/channels/user_socket.ex | erik/sketches | 0a454ada58dee6db576e93cb2216dd750290329e | [
"MIT"
] | 1 | 2017-09-23T19:41:29.000Z | 2017-09-25T05:12:38.000Z | projects/standup/lib/standup_web/channels/user_socket.ex | erik/sketches | 0a454ada58dee6db576e93cb2216dd750290329e | [
"MIT"
] | null | null | null | defmodule StandupWeb.UserSocket do
use Phoenix.Socket
## Channels
# channel "room:*", StandupWeb.RoomChannel
# Socket params are passed from the client and can
# be used to verify and authenticate a user. After
# verification, you can put default assigns into
# the socket that will be set for all channels, ie
#
# {:ok, assign(socket, :user_id, verified_user_id)}
#
# To deny connection, return `:error`.
#
# See `Phoenix.Token` documentation for examples in
# performing token verification on connect.
def connect(_params, socket, _connect_info) do
{:ok, socket}
end
# Socket id's are topics that allow you to identify all sockets for a given user:
#
# def id(socket), do: "user_socket:#{socket.assigns.user_id}"
#
# Would allow you to broadcast a "disconnect" event and terminate
# all active sockets and channels for a given user:
#
# StandupWeb.Endpoint.broadcast("user_socket:#{user.id}", "disconnect", %{})
#
# Returning `nil` makes this socket anonymous.
def id(_socket), do: nil
end
| 31.323529 | 83 | 0.696714 |
ff3e84d77bdd399aeea632aa8a29ed11142700e5 | 1,585 | ex | Elixir | 2017-06-23/pattern-matching/pattern_matching.ex | yfractal/shenzhen-meetup | 65ff0a7ab4d74748d5caa5c71a4179790634b266 | [
"MIT"
] | 19 | 2017-06-22T08:06:04.000Z | 2019-10-28T02:36:20.000Z | 2017-06-23/pattern-matching/pattern_matching.ex | caicaishmily/shenzhen-meetup | 59dabe90f30b8b20ea37c77c84791ffb0ab6757b | [
"MIT"
] | null | null | null | 2017-06-23/pattern-matching/pattern_matching.ex | caicaishmily/shenzhen-meetup | 59dabe90f30b8b20ea37c77c84791ffb0ab6757b | [
"MIT"
] | 6 | 2019-08-02T06:09:24.000Z | 2020-12-30T09:04:31.000Z |
# (MatchError) no match of right hand side value
# 左边尽可能地跟右边匹配上
#
# destruct complex data
#
a = 1
1 = a
# only rebinding value on the left hand side of `=` sign
2 = a
# the pin operator ^ is used to access the previously bound values
b = 3
^b = 4
# what if you do
1 = c
#
# pattern matching on list, tuple
#
list = [1, 2, 3, 4]
[a, b, c, d] = list; b
[head | tail] = list; tail
[a, a | tail] = list; tail
[a, a, b | tail] = list; tail
#
# pattern matching on map
#
# ** (CompileError) iex:14: illegal use of variable key inside map key match,
# maps can only match on existing variable by using ^key
%{key => value} = %{"foo" => "bar"}
%{key => "bar"} = %{"foo" => "bar"}
key = "foo"
%{^key => value} = %{"foo" => "bar"}; value
presenter = %{name: "yin weijun", role: "developer", languages: [:elixir, :ruby, :javascript, :scala]}
# you can pattern matching selected keys
%{name: name} = presenter
%{name: name, unknown: unknown} = presenter
#
# the pin operator with function
#
greeting = "Hello"
greet = fn
(^greeting, name) -> "Hi #{name}"
(greeting, name) -> "#{greeting}, #{name}"
end
greet.("Hello", "Sean")
greet.("Morning", "Sean")
#
#. special things
#
# ignore value with underscore _
_
length([1, [2], 3]) = 3
# another ways to look at the sign =
# 1. variables bind once per match in elixir, while variables bind once in erlang
# 2. 代数中的断言. e.g. x = y + 1
# References:
# https://elixir-lang.org/getting-started/pattern-matching.html
# chapter 2, programming elixir 1.3
# https://elixirschool.com/lessons/basics/pattern-matching/
| 16.510417 | 102 | 0.632808 |
ff3eba52636b74cf17393e8a298a2e29aa717d21 | 982 | ex | Elixir | lib/alpha_vantage.ex | Cameron-Kurth/elixir-alpha_vantage | b1c489211090b98726db536bdea4ddd20eee8bc5 | [
"MIT"
] | null | null | null | lib/alpha_vantage.ex | Cameron-Kurth/elixir-alpha_vantage | b1c489211090b98726db536bdea4ddd20eee8bc5 | [
"MIT"
] | null | null | null | lib/alpha_vantage.ex | Cameron-Kurth/elixir-alpha_vantage | b1c489211090b98726db536bdea4ddd20eee8bc5 | [
"MIT"
] | null | null | null | defmodule AlphaVantage do
@moduledoc """
A set of functions to fetch data from the [Alpha Vantage API](https://www.alphavantage.co/documentation/#) in flexible manner, as well as retrieve the valid inputs for the API.
"""
alias AlphaVantage.Gateway
@doc """
Returns data per the provided function and inputs, accepted as a keyword list or map.
Please cite the Alpha Vantage [documentation](https://www.alphavantage.co/documentation/#) for details as to what parameters and options are to be utilized for each function.
The Cryptocurrenies, Forex, StockTimeSeries, and TechnicalIndicators modules can be leveraged for more structured and explicit inputs, per function.
"""
@spec query(Keyword.t() | map) :: Gateway.response()
def query(params \\ [])
def query(params) when is_map(params), do: params |> Map.to_list() |> query
def query(params) when is_list(params) do
Gateway.query(params)
end
def query(_), do: {:error, "Invalid query."}
end
| 39.28 | 178 | 0.733198 |
ff3eee596abf5bb18618c6f7ee77d3251515510f | 365 | exs | Elixir | config/test.exs | MLSDev/ecto_extensions | a36ade6dd821a67a2569c5d5573e5c989b1d0141 | [
"MIT"
] | 7 | 2019-03-28T05:39:05.000Z | 2020-05-02T00:19:22.000Z | config/test.exs | MLSDev/ecto_extensions | a36ade6dd821a67a2569c5d5573e5c989b1d0141 | [
"MIT"
] | null | null | null | config/test.exs | MLSDev/ecto_extensions | a36ade6dd821a67a2569c5d5573e5c989b1d0141 | [
"MIT"
] | null | null | null | use Mix.Config
config :ecto_extensions, EctoExtensions.Repo,
username: System.get_env("ECTO_EXTENSIONS_DB_USERNAME") || "postgres",
password: System.get_env("ECTO_EXTENSIONS_DB_PASSWORD") || "postgres",
database: "ecto_extensions_test",
hostname: "localhost",
pool: Ecto.Adapters.SQL.Sandbox
config :ecto_extensions,
ecto_repos: [EctoExtensions.Repo]
| 30.416667 | 72 | 0.775342 |
ff3ef62ef540e83433d42828c09f65a7a2b6b66f | 568 | exs | Elixir | mix.exs | 3decibels/serex | 1ae1b92d02e7e1a7ce5ff900908dd72e9b784738 | [
"MIT"
] | 2 | 2021-02-26T04:49:37.000Z | 2022-02-07T04:50:43.000Z | mix.exs | 3decibels/serex | 1ae1b92d02e7e1a7ce5ff900908dd72e9b784738 | [
"MIT"
] | null | null | null | mix.exs | 3decibels/serex | 1ae1b92d02e7e1a7ce5ff900908dd72e9b784738 | [
"MIT"
] | null | null | null | defmodule Serex.MixProject do
use Mix.Project
def project do
[
app: :serex,
version: "0.1.0",
elixir: "~> 1.10",
start_permanent: Mix.env() == :prod,
deps: deps()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
# {:dep_from_hexpm, "~> 0.3.0"},
# {:dep_from_git, git: "https://github.com/elixir-lang/my_dep.git", tag: "0.1.0"}
]
end
end
| 19.586207 | 87 | 0.573944 |
ff3f0580618881a5316f6678092cad1fdd4b0c29 | 8,296 | exs | Elixir | .credo.exs | mosic/credo | 2053ac07df84f5ccdd79fa0150d7a89f8556feab | [
"MIT"
] | null | null | null | .credo.exs | mosic/credo | 2053ac07df84f5ccdd79fa0150d7a89f8556feab | [
"MIT"
] | null | null | null | .credo.exs | mosic/credo | 2053ac07df84f5ccdd79fa0150d7a89f8556feab | [
"MIT"
] | null | null | null | # This file contains the configuration for Credo and you are probably reading
# this after creating it with `mix credo.gen.config`.
#
# If you find anything wrong or unclear in this file, please report an
# issue on GitHub: https://github.com/rrrene/credo/issues
#
%{
#
# You can have as many configs as you like in the `configs:` field.
configs: [
%{
#
# Run any config using `mix credo -C <name>`. If no config name is given
# "default" is used.
#
name: "default",
#
# These are the files included in the analysis:
files: %{
#
# You can give explicit globs or simply directories.
# In the latter case `**/*.{ex,exs}` will be used.
#
included: [
"lib/",
"src/",
"test/",
"web/",
"apps/*/lib/",
"apps/*/src/",
"apps/*/test/",
"apps/*/web/"
],
excluded: [~r"/_build/", ~r"/deps/", ~r"/node_modules/"]
},
#
# Load and configure plugins here:
#
plugins: [],
#
# If you create your own checks, you must specify the source files for
# them here, so they can be loaded by Credo before running the analysis.
#
requires: [],
#
# If you want to enforce a style guide and need a more traditional linting
# experience, you can change `strict` to `true` below:
#
strict: false,
#
# To modify the timeout for parsing files, change this value:
#
parse_timeout: 5000,
#
# If you want to use uncolored output by default, you can change `color`
# to `false` below:
#
color: true,
#
# You can customize the parameters of any check by adding a second element
# to the tuple.
#
# To disable a check put `false` as second element:
#
# {Credo.Check.Design.DuplicatedCode, false}
#
checks: %{
enabled: [
#
## Consistency Checks
#
{Credo.Check.Consistency.ExceptionNames, []},
{Credo.Check.Consistency.LineEndings, []},
{Credo.Check.Consistency.ParameterPatternMatching, []},
{Credo.Check.Consistency.SpaceAroundOperators, []},
{Credo.Check.Consistency.SpaceInParentheses, []},
{Credo.Check.Consistency.TabsOrSpaces, []},
#
## Design Checks
#
# You can customize the priority of any check
# Priority values are: `low, normal, high, higher`
#
{Credo.Check.Design.AliasUsage,
[priority: :low, if_nested_deeper_than: 2, if_called_more_often_than: 0]},
# You can also customize the exit_status of each check.
# If you don't want TODO comments to cause `mix credo` to fail, just
# set this value to 0 (zero).
#
{Credo.Check.Design.TagTODO, [exit_status: 2]},
{Credo.Check.Design.TagFIXME, []},
#
## Readability Checks
#
{Credo.Check.Readability.AliasOrder, []},
{Credo.Check.Readability.FunctionNames, []},
{Credo.Check.Readability.LargeNumbers, []},
{Credo.Check.Readability.MaxLineLength, [priority: :low, max_length: 120]},
{Credo.Check.Readability.ModuleAttributeNames, []},
{Credo.Check.Readability.ModuleDoc, []},
{Credo.Check.Readability.ModuleNames, []},
{Credo.Check.Readability.ParenthesesInCondition, []},
{Credo.Check.Readability.ParenthesesOnZeroArityDefs, []},
{Credo.Check.Readability.PipeIntoAnonymousFunctions, []},
{Credo.Check.Readability.PredicateFunctionNames, []},
{Credo.Check.Readability.PreferImplicitTry, []},
{Credo.Check.Readability.RedundantBlankLines, []},
{Credo.Check.Readability.Semicolons, []},
{Credo.Check.Readability.SpaceAfterCommas, []},
{Credo.Check.Readability.StringSigils, []},
{Credo.Check.Readability.TrailingBlankLine, []},
{Credo.Check.Readability.TrailingWhiteSpace, []},
{Credo.Check.Readability.UnnecessaryAliasExpansion, []},
{Credo.Check.Readability.VariableNames, []},
{Credo.Check.Readability.WithSingleClause, []},
#
## Refactoring Opportunities
#
{Credo.Check.Refactor.CondStatements, []},
{Credo.Check.Refactor.CyclomaticComplexity, []},
{Credo.Check.Refactor.FunctionArity, []},
{Credo.Check.Refactor.LongQuoteBlocks, []},
{Credo.Check.Refactor.MatchInCondition, []},
{Credo.Check.Refactor.MapJoin, []},
{Credo.Check.Refactor.NegatedConditionsInUnless, []},
{Credo.Check.Refactor.NegatedConditionsWithElse, []},
{Credo.Check.Refactor.Nesting, []},
{Credo.Check.Refactor.UnlessWithElse, []},
{Credo.Check.Refactor.WithClauses, []},
{Credo.Check.Refactor.FilterFilter, []},
{Credo.Check.Refactor.RejectReject, []},
{Credo.Check.Refactor.RedundantWithClauseResult, []},
#
## Warnings
#
{Credo.Check.Warning.ApplicationConfigInModuleAttribute, []},
{Credo.Check.Warning.BoolOperationOnSameValues, []},
{Credo.Check.Warning.ExpensiveEmptyEnumCheck, []},
{Credo.Check.Warning.IExPry, []},
{Credo.Check.Warning.IoInspect, []},
{Credo.Check.Warning.OperationOnSameValues, []},
{Credo.Check.Warning.OperationWithConstantResult, []},
{Credo.Check.Warning.RaiseInsideRescue, []},
{Credo.Check.Warning.SpecWithStruct, []},
{Credo.Check.Warning.WrongTestFileExtension, []},
{Credo.Check.Warning.UnusedEnumOperation, []},
{Credo.Check.Warning.UnusedFileOperation, []},
{Credo.Check.Warning.UnusedKeywordOperation, []},
{Credo.Check.Warning.UnusedListOperation, []},
{Credo.Check.Warning.UnusedPathOperation, []},
{Credo.Check.Warning.UnusedRegexOperation, []},
{Credo.Check.Warning.UnusedStringOperation, []},
{Credo.Check.Warning.UnusedTupleOperation, []},
{Credo.Check.Warning.UnsafeExec, []}
],
disabled: [
#
# Checks scheduled for next check update (opt-in for now, just replace `false` with `[]`)
#
# Controversial and experimental checks (opt-in, just move the check to `:enabled`
# and be sure to use `mix credo --strict` to see low priority checks)
#
{Credo.Check.Consistency.MultiAliasImportRequireUse, []},
{Credo.Check.Consistency.UnusedVariableNames, []},
{Credo.Check.Design.DuplicatedCode, []},
{Credo.Check.Design.SkipTestWithoutComment, []},
{Credo.Check.Readability.AliasAs, []},
{Credo.Check.Readability.BlockPipe, []},
{Credo.Check.Readability.ImplTrue, []},
{Credo.Check.Readability.MultiAlias, []},
{Credo.Check.Readability.SeparateAliasRequire, []},
{Credo.Check.Readability.SinglePipe, []},
{Credo.Check.Readability.Specs, []},
{Credo.Check.Readability.StrictModuleLayout, []},
{Credo.Check.Readability.WithCustomTaggedTuple, []},
{Credo.Check.Refactor.ABCSize, []},
{Credo.Check.Refactor.AppendSingleItem, []},
{Credo.Check.Refactor.DoubleBooleanNegation, []},
{Credo.Check.Refactor.FilterReject, []},
{Credo.Check.Refactor.MapMap, []},
{Credo.Check.Refactor.ModuleDependencies, []},
{Credo.Check.Refactor.NegatedIsNil, []},
{Credo.Check.Refactor.PipeChainStart, []},
{Credo.Check.Refactor.RejectFilter, []},
{Credo.Check.Refactor.VariableRebinding, []},
{Credo.Check.Warning.LazyLogging, []},
{Credo.Check.Warning.LeakyEnvironment, []},
{Credo.Check.Warning.MapGetUnsafePass, []},
{Credo.Check.Warning.MixEnv, []},
{Credo.Check.Warning.UnsafeToAtom, []}
# {Credo.Check.Refactor.MapInto, []},
#
# Custom checks can be created using `mix credo.gen.check`.
#
]
}
}
]
}
| 40.077295 | 99 | 0.585945 |
ff3f22eee65486f58beeb8a59b47060df71ed563 | 1,180 | exs | Elixir | config/config.exs | kadmohardy/mbanking | 42a45f78956eac95597e72ade10cb0291ed5aa76 | [
"MIT"
] | null | null | null | config/config.exs | kadmohardy/mbanking | 42a45f78956eac95597e72ade10cb0291ed5aa76 | [
"MIT"
] | null | null | null | config/config.exs | kadmohardy/mbanking | 42a45f78956eac95597e72ade10cb0291ed5aa76 | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
#
# This configuration file is loaded before any dependency and
# is restricted to this project.
# General application configuration
use Mix.Config
config :mbanking,
ecto_repos: [Mbanking.Repo]
# Configures the endpoint
config :mbanking, MbankingWeb.Endpoint,
url: [host: "localhost"],
secret_key_base: "tO14VbtQhy3YaG5Ljktnuw+sN7T1aPO4gScHZ28RjSVecV/Av6+VRkkpyxV5ERVF",
render_errors: [view: MbankingWeb.ErrorView, accepts: ~w(json), layout: false],
pubsub_server: Mbanking.PubSub,
live_view: [signing_salt: "teQcTgxH"]
# Configures Elixir's Logger
config :logger, :console,
format: "$time $metadata[$level] $message\n",
metadata: [:request_id]
# Use Jason for JSON parsing in Phoenix
config :phoenix, :json_library, Jason
# Set Cipher configs
config :cipher,
keyphrase: "testiekeyphraseforcipher",
ivphrase: "testieivphraseforcipher",
magic_token: "magictoken"
# Import environment specific config. This must remain at the bottom
# of this file so it overrides the configuration defined above.
import_config "#{Mix.env()}.exs"
| 31.052632 | 86 | 0.770339 |
ff3f55b7dc64b04bc0f4bcfec3c6a66092f7bb52 | 1,789 | ex | Elixir | clients/civic_info/lib/google_api/civic_info/v2/model/division_search_response.ex | nuxlli/elixir-google-api | ecb8679ac7282b7dd314c3e20c250710ec6a7870 | [
"Apache-2.0"
] | null | null | null | clients/civic_info/lib/google_api/civic_info/v2/model/division_search_response.ex | nuxlli/elixir-google-api | ecb8679ac7282b7dd314c3e20c250710ec6a7870 | [
"Apache-2.0"
] | null | null | null | clients/civic_info/lib/google_api/civic_info/v2/model/division_search_response.ex | nuxlli/elixir-google-api | ecb8679ac7282b7dd314c3e20c250710ec6a7870 | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.CivicInfo.V2.Model.DivisionSearchResponse do
@moduledoc """
The result of a division search query.
## Attributes
- kind (String.t): Identifies what kind of resource this is. Value: the fixed string \"civicinfo#divisionSearchResponse\". Defaults to: `null`.
- results ([DivisionSearchResult]): Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:kind => any(),
:results => list(GoogleApi.CivicInfo.V2.Model.DivisionSearchResult.t())
}
field(:kind)
field(:results, as: GoogleApi.CivicInfo.V2.Model.DivisionSearchResult, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.CivicInfo.V2.Model.DivisionSearchResponse do
def decode(value, options) do
GoogleApi.CivicInfo.V2.Model.DivisionSearchResponse.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.CivicInfo.V2.Model.DivisionSearchResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 35.078431 | 155 | 0.745668 |
ff3f57044ffd448625abbb9918cf5e914e575eaa | 1,509 | ex | Elixir | lib/ankh/transport.ex | lucacorti/ankh | 4bbac3a362ed8711a4282cf37d4a85e6139136f8 | [
"MIT"
] | 18 | 2016-09-06T20:35:42.000Z | 2020-04-28T12:51:47.000Z | lib/ankh/transport.ex | lucacorti/ankh | 4bbac3a362ed8711a4282cf37d4a85e6139136f8 | [
"MIT"
] | 16 | 2017-11-26T21:59:41.000Z | 2020-08-17T11:27:56.000Z | lib/ankh/transport.ex | lucacorti/ankh | 4bbac3a362ed8711a4282cf37d4a85e6139136f8 | [
"MIT"
] | 1 | 2020-04-24T07:53:00.000Z | 2020-04-24T07:53:00.000Z | defprotocol Ankh.Transport do
@moduledoc """
Transport interface
"""
@typedoc "Transport socket"
@type t :: struct()
@typedoc "Size"
@type size :: non_neg_integer()
@typedoc "Socket"
@type socket :: any()
@typedoc """
Transport options
"""
@type options :: keyword()
@doc """
Creates a new transport with the passed socket
"""
@spec new(t(), socket()) :: {:ok, t()} | {:error, any()}
def new(transport, socket)
@doc """
Accepts a client connection
"""
@spec accept(t(), options()) :: {:ok, t()} | {:error, any()}
def accept(transport, options)
@doc """
Closes the connection
"""
@spec close(t()) :: {:ok, t()} | {:error, any()}
def close(transport)
@doc """
Connects to an host
"""
@spec connect(t(), URI.t(), timeout(), options()) :: {:ok, t()} | {:error, any()}
def connect(transport, uri, timeout, options)
@doc """
Sends data
"""
@spec send(t(), iodata()) :: :ok | {:error, any()}
def send(transport, data)
@doc """
Receives data
"""
@spec recv(t(), size(), timeout()) :: {:ok, iodata()} | {:error, any()}
def recv(transport, size, timeout)
@doc """
Handles transport messages
"""
@spec handle_msg(t(), any()) :: {:ok, iodata()} | {:error, any()}
def handle_msg(transport, message)
@doc """
Returns the transport negotiated protocol if any, nil otherwise
"""
@spec negotiated_protocol(t()) :: {:ok, String.t()} | {:error, :protocol_not_negotiated}
def negotiated_protocol(transport)
end
| 22.191176 | 90 | 0.589132 |
ff3fca8db990339f605f340b2c3ebec63eb038f9 | 1,341 | ex | Elixir | lib/koans/20_comprehensions.ex | jarekjaro/elixir-koans | 6401b732866d06f1a1c18e842e7b9caba11f65f4 | [
"MIT"
] | null | null | null | lib/koans/20_comprehensions.ex | jarekjaro/elixir-koans | 6401b732866d06f1a1c18e842e7b9caba11f65f4 | [
"MIT"
] | null | null | null | lib/koans/20_comprehensions.ex | jarekjaro/elixir-koans | 6401b732866d06f1a1c18e842e7b9caba11f65f4 | [
"MIT"
] | null | null | null | defmodule Comprehensions do
use Koans
@intro "A comprehension is made of three parts: generators, filters, and collectibles. We will look at how these interact with each other"
koan "The generator, `n <- [1, 2, 3, 4]`, is providing the values for our comprehension" do
assert (for n <- [1, 2, 3, 4], do: n * n) == [1, 4, 9, 16]
end
koan "Any enumerable can be a generator" do
assert (for n <- 1..4, do: n * n) == [1, 4, 9, 16]
end
koan "A generator specifies how to extract values from a collection" do
collection = [["Hello", "World"], ["Apple", "Pie"]]
assert (for [a, b] <- collection, do: "#{a} #{b}") == ["Hello World", "Apple Pie"]
end
koan "You can use multiple generators at once" do
assert (for x <- ["little", "big"], y <- ["dogs", "cats"], do: "#{x} #{y}") == [
"little dogs",
"little cats",
"big dogs",
"big cats"
]
end
koan "Use a filter to reduce your work" do
assert (for n <- [1, 2, 3, 4, 5, 6], n > 3, do: n) == [4, 5, 6]
end
koan "Add the result of a comprehension to an existing collection" do
collection = for x <- ["Pecan", "Pumpkin"], into: %{}, do: {x, "#{x} Pie"}
assert collection == %{
"Pecan" => "Pecan Pie",
"Pumpkin" => "Pumpkin Pie"
}
end
end
| 32.707317 | 140 | 0.546607 |
ff3fcf5fbb3802ca18c68a969e3cc59c5845c2d6 | 55 | ex | Elixir | testData/org/elixir_lang/parser_definition/atom_parsing_test_case/Literal.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 1,668 | 2015-01-03T05:54:27.000Z | 2022-03-25T08:01:20.000Z | testData/org/elixir_lang/parser_definition/atom_parsing_test_case/Literal.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 2,018 | 2015-01-01T22:43:39.000Z | 2022-03-31T20:13:08.000Z | testData/org/elixir_lang/parser_definition/atom_parsing_test_case/Literal.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 145 | 2015-01-15T11:37:16.000Z | 2021-12-22T05:51:02.000Z | :atom@1?
:atom@2!
:ATOM@3?
:ATOM@4!
:_atom@5?
:_atom@6! | 9.166667 | 9 | 0.581818 |
ff3fe26328c19c36e00dcd8084b1d2419c968485 | 162 | ex | Elixir | lib/reaper/partitioners/single_partitioner.ex | bbalser/reaper | 7a2e8808c877b33ffa63a745179118f938460989 | [
"Apache-2.0"
] | 26 | 2019-09-20T23:54:45.000Z | 2020-08-20T14:23:32.000Z | lib/reaper/partitioners/single_partitioner.ex | bbalser/reaper | 7a2e8808c877b33ffa63a745179118f938460989 | [
"Apache-2.0"
] | 757 | 2019-08-15T18:15:07.000Z | 2020-09-18T20:55:31.000Z | lib/reaper/partitioners/single_partitioner.ex | bbalser/reaper | 7a2e8808c877b33ffa63a745179118f938460989 | [
"Apache-2.0"
] | 9 | 2019-11-12T16:43:46.000Z | 2020-03-25T16:23:16.000Z | defmodule Reaper.Partitioners.SinglePartitioner do
@moduledoc false
@behaviour Reaper.Partitioner
def partition(_message, _path) do
"SINGLE"
end
end
| 18 | 50 | 0.771605 |
ff3ff011b3214f5969c654a297da2f4ca5fe9012 | 19,280 | exs | Elixir | lib/mix/test/mix/tasks/release_test.exs | matiasgarciaisaia/elixir | d0a3fdbfd774e0a6972513dcb82c2683400e67a0 | [
"Apache-2.0"
] | null | null | null | lib/mix/test/mix/tasks/release_test.exs | matiasgarciaisaia/elixir | d0a3fdbfd774e0a6972513dcb82c2683400e67a0 | [
"Apache-2.0"
] | null | null | null | lib/mix/test/mix/tasks/release_test.exs | matiasgarciaisaia/elixir | d0a3fdbfd774e0a6972513dcb82c2683400e67a0 | [
"Apache-2.0"
] | null | null | null | Code.require_file("../../test_helper.exs", __DIR__)
defmodule Mix.Tasks.ReleaseTest do
use MixTest.Case
@erts_version :erlang.system_info(:version)
@hostname :inet_db.gethostname()
defmacrop release_node(name), do: :"#{name}@#{@hostname}"
describe "customize" do
test "rel with eex" do
in_fixture("release_test", fn ->
Mix.Project.in_project(:release_test, ".", fn _ ->
File.mkdir_p!("rel")
for file <- ~w(rel/vm.args.eex rel/env.sh.eex rel/env.bat.eex) do
File.write!(file, """
#{file} FOR <%= @release.name %>
""")
end
root = Path.absname("_build/dev/rel/release_test")
Mix.Task.run("release")
assert_received {:mix_shell, :info, ["* assembling release_test-0.1.0 on MIX_ENV=dev"]}
assert root |> Path.join("releases/0.1.0/env.sh") |> File.read!() ==
"rel/env.sh.eex FOR release_test\n"
assert root |> Path.join("releases/0.1.0/env.bat") |> File.read!() ==
"rel/env.bat.eex FOR release_test\n"
assert root |> Path.join("releases/0.1.0/vm.args") |> File.read!() ==
"rel/vm.args.eex FOR release_test\n"
end)
end)
end
test "tar" do
in_fixture("release_test", fn ->
config = [releases: [demo: [steps: [:assemble, :tar]]]]
Mix.Project.in_project(:release_test, ".", config, fn _ ->
root = Path.absname("_build/#{Mix.env()}/rel/demo")
ignored_app_path = Path.join([root, "lib", "ignored_app-0.1.0", "ebin"])
File.mkdir_p!(ignored_app_path)
File.touch(Path.join(ignored_app_path, "ignored_app.app"))
ignored_release_path = Path.join([root, "releases", "ignored_dir"])
File.mkdir_p!(ignored_release_path)
File.touch(Path.join(ignored_release_path, "ignored"))
Mix.Task.run("release")
tar_path = Path.expand(Path.join([root, "..", "..", "demo-0.1.0.tar.gz"]))
message = "* building #{tar_path}"
assert_received {:mix_shell, :info, [^message]}
assert File.exists?(tar_path)
{:ok, files} = String.to_charlist(tar_path) |> :erl_tar.table([:compressed])
files = Enum.map(files, &to_string/1)
files_with_versions = File.ls!(Path.join(root, "lib"))
assert "bin/demo" in files
assert "releases/0.1.0/sys.config" in files
assert "releases/0.1.0/vm.args" in files
assert "releases/COOKIE" in files
assert "releases/start_erl.data" in files
for dir <- files_with_versions -- ["ignored_app-0.1.0"] do
[name | _] = String.split(dir, "-")
assert "lib/#{dir}/ebin/#{name}.app" in files
end
refute "lib/ignored_app-0.1.0/ebin/ignored_app.app" in files
refute "releases/ignored_dir/ignored" in files
end)
end)
end
test "steps" do
in_fixture("release_test", fn ->
last_step = fn release ->
send(self(), {:last_step, release})
release
end
first_step = fn release ->
send(self(), {:first_step, release})
update_in(release.steps, &(&1 ++ [last_step]))
end
config = [releases: [demo: [steps: [first_step, :assemble]]]]
Mix.Project.in_project(:release_test, ".", config, fn _ ->
Mix.Task.run("release")
assert_received {:mix_shell, :info, ["* assembling demo-0.1.0 on MIX_ENV=dev"]}
# Discard info messages from inbox for upcoming assertions
Mix.shell().flush(& &1)
{:messages,
[
{:first_step, %Mix.Release{steps: [:assemble]}},
{:last_step, %Mix.Release{steps: []}}
]} = Process.info(self(), :messages)
end)
end)
end
test "include_executables_for" do
in_fixture("release_test", fn ->
config = [releases: [release_test: [include_executables_for: []]]]
Mix.Project.in_project(:release_test, ".", config, fn _ ->
root = Path.absname("_build/dev/rel/release_test")
Mix.Task.run("release")
assert_received {:mix_shell, :info, ["* assembling release_test-0.1.0 on MIX_ENV=dev"]}
refute root |> Path.join("bin/start") |> File.exists?()
refute root |> Path.join("bin/start.bat") |> File.exists?()
refute root |> Path.join("releases/0.1.0/elixir") |> File.exists?()
refute root |> Path.join("releases/0.1.0/elixir.bat") |> File.exists?()
refute root |> Path.join("releases/0.1.0/iex") |> File.exists?()
refute root |> Path.join("releases/0.1.0/iex.bat") |> File.exists?()
end)
end)
end
end
test "assembles a bootable release with ERTS" do
in_fixture("release_test", fn ->
Mix.Project.in_project(:release_test, ".", fn _ ->
root = Path.absname("_build/dev/rel/release_test")
# Assert command
Mix.Task.run("release")
assert_received {:mix_shell, :info, ["* assembling release_test-0.1.0 on MIX_ENV=dev"]}
assert_received {:mix_shell, :info,
["\nRelease created at _build/dev/rel/release_test!" <> _]}
assert_received {:mix_shell, :info, ["* skipping runtime configuration" <> _]}
# Assert structure
assert root |> Path.join("erts-#{@erts_version}") |> File.exists?()
assert root |> Path.join("lib/release_test-0.1.0/ebin") |> File.exists?()
assert root |> Path.join("lib/release_test-0.1.0/priv/hello") |> File.exists?()
assert root |> Path.join("releases/COOKIE") |> File.exists?()
assert root |> Path.join("releases/start_erl.data") |> File.exists?()
assert root |> Path.join("releases/0.1.0/release_test.rel") |> File.exists?()
assert root |> Path.join("releases/0.1.0/sys.config") |> File.exists?()
assert root |> Path.join("releases/0.1.0/env.sh") |> File.exists?()
assert root |> Path.join("releases/0.1.0/env.bat") |> File.exists?()
assert root |> Path.join("releases/0.1.0/vm.args") |> File.exists?()
assert root
|> Path.join("releases/0.1.0/sys.config")
|> File.read!() =~ "RUNTIME_CONFIG=false"
assert root
|> Path.join("lib/release_test-0.1.0/priv")
|> File.read_link()
|> elem(0) == :error
cookie = File.read!(Path.join(root, "releases/COOKIE"))
# Assert runtime
open_port(Path.join(root, "bin/release_test"), ['start'])
assert %{
app_dir: app_dir,
cookie_env: ^cookie,
encoding: {:"£", "£", '£'},
mode: :embedded,
node: release_node("release_test"),
protocols_consolidated?: true,
release_name: "release_test",
release_node: "release_test",
release_root: release_root,
release_vsn: "0.1.0",
root_dir: root_dir,
runtime_config: :error,
static_config: {:ok, :was_set},
sys_config_env: sys_config_env,
sys_config_init: sys_config_init
} = wait_until_decoded(Path.join(root, "RELEASE_BOOTED"))
if match?({:win32, _}, :os.type()) do
# `RELEAS~1` is the DOS path name (8 character) for the `release_test` directory
assert app_dir =~ ~r"_build/dev/rel/(release_test|RELEAS~1)/lib/release_test-0\.1\.0$"
assert release_root =~ ~r"_build\\dev\\rel\\(release_test|RELEAS~1)$"
assert root_dir =~ ~r"_build/dev/rel/(release_test|RELEAS~1)$"
assert String.ends_with?(sys_config_env, "releases\\0.1.0\\sys")
assert String.ends_with?(sys_config_init, "releases\\0.1.0\\sys")
else
assert app_dir == Path.join(root, "lib/release_test-0.1.0")
assert release_root == root
assert root_dir == root
assert sys_config_env == Path.join(root, "releases/0.1.0/sys")
assert sys_config_init == Path.join(root, "releases/0.1.0/sys")
end
end)
end)
end
test "assembles a bootable release with runtime configuration" do
in_fixture("release_test", fn ->
config = [releases: [runtime_config: []]]
Mix.Project.in_project(:release_test, ".", config, fn _ ->
File.write!("config/releases.exs", """
import Config
config :release_test, :runtime, :was_set
config :release_test, :encoding, {:runtime, :"£", "£", '£'}
""")
root = Path.absname("_build/dev/rel/runtime_config")
# Assert command
Mix.Task.run("release", ["runtime_config"])
assert_received {:mix_shell, :info, ["* assembling runtime_config-0.1.0 on MIX_ENV=dev"]}
assert_received {:mix_shell, :info,
["* using config/releases.exs to configure the release at runtime"]}
# Assert structure
assert root
|> Path.join("releases/0.1.0/sys.config")
|> File.read!() =~ "RUNTIME_CONFIG=true"
# Make sys.config read-only and it should still boot
assert root
|> Path.join("releases/0.1.0/sys.config")
|> File.chmod(0o555) == :ok
# Assert runtime
open_port(Path.join(root, "bin/runtime_config"), ['start'])
assert %{
encoding: {:runtime, :"£", "£", '£'},
mode: :embedded,
node: release_node("runtime_config"),
protocols_consolidated?: true,
release_name: "runtime_config",
release_node: "runtime_config",
release_vsn: "0.1.0",
runtime_config: {:ok, :was_set},
static_config: {:ok, :was_set},
sys_config_env: sys_config_env,
sys_config_init: sys_config_init
} = wait_until_decoded(Path.join(root, "RELEASE_BOOTED"))
if match?({:win32, _}, :os.type()) do
assert sys_config_env =~ "tmp\\runtime_config-0.1.0"
assert sys_config_init =~ "tmp\\runtime_config-0.1.0"
else
assert sys_config_env =~ "tmp/runtime_config-0.1.0"
assert sys_config_init =~ "tmp/runtime_config-0.1.0"
end
end)
end)
end
test "assembles a bootable release without distribution" do
in_fixture("release_test", fn ->
config = [releases: [no_dist: []]]
Mix.Project.in_project(:release_test, ".", config, fn _ ->
root = Path.absname("_build/dev/rel/no_dist")
Mix.Task.run("release", ["no_dist"])
open_port(Path.join(root, "bin/no_dist"), ['start'], [{'RELEASE_DISTRIBUTION', 'none'}])
assert %{
mode: :embedded,
node: :nonode@nohost,
protocols_consolidated?: true,
release_name: "no_dist",
release_node: "no_dist",
release_vsn: "0.1.0"
} = wait_until_decoded(Path.join(root, "RELEASE_BOOTED"))
end)
end)
end
test "assembles a release without ERTS and with custom options" do
in_fixture("release_test", fn ->
config = [releases: [demo: [include_erts: false, cookie: "abcdefghijk"]]]
Mix.Project.in_project(:release_test, ".", config, fn _ ->
root = Path.absname("demo")
Mix.Task.run("release", ["demo", "--path", "demo", "--version", "0.2.0", "--quiet"])
refute_received {:mix_shell, :info, ["* assembling " <> _]}
refute_received {:mix_shell, :info, ["\nRelease created " <> _]}
# Assert structure
assert root |> Path.join("bin/demo") |> File.exists?()
refute root |> Path.join("erts-#{@erts_version}") |> File.exists?()
assert root |> Path.join("lib/release_test-0.1.0/ebin") |> File.exists?()
assert root |> Path.join("lib/release_test-0.1.0/priv/hello") |> File.exists?()
assert root |> Path.join("releases/COOKIE") |> File.exists?()
assert root |> Path.join("releases/start_erl.data") |> File.exists?()
assert root |> Path.join("releases/0.2.0/demo.rel") |> File.exists?()
assert root |> Path.join("releases/0.2.0/sys.config") |> File.exists?()
assert root |> Path.join("releases/0.2.0/vm.args") |> File.exists?()
# Assert runtime
open_port(Path.join(root, "bin/demo"), ['start'])
assert %{
app_dir: app_dir,
cookie_env: "abcdefghijk",
mode: :embedded,
node: release_node("demo"),
protocols_consolidated?: true,
release_name: "demo",
release_node: "demo",
release_root: release_root,
release_vsn: "0.2.0",
root_dir: root_dir,
runtime_config: :error,
static_config: {:ok, :was_set}
} = wait_until_decoded(Path.join(root, "RELEASE_BOOTED"))
if match?({:win32, _}, :os.type()) do
assert String.ends_with?(app_dir, "demo/lib/release_test-0.1.0")
assert String.ends_with?(release_root, "demo")
else
assert app_dir == Path.join(root, "lib/release_test-0.1.0")
assert release_root == root
end
assert root_dir == :code.root_dir() |> to_string()
end)
end)
end
@tag :epmd
test "executes rpc instructions" do
in_fixture("release_test", fn ->
config = [releases: [permanent1: [include_erts: false]]]
Mix.Project.in_project(:release_test, ".", config, fn _ ->
root = Path.absname("_build/dev/rel/permanent1")
Mix.Task.run("release")
script = Path.join(root, "bin/permanent1")
open_port(script, ['start'])
wait_until_decoded(Path.join(root, "RELEASE_BOOTED"))
assert System.cmd(script, ["rpc", "ReleaseTest.hello_world()"]) == {"hello world\n", 0}
assert {pid, 0} = System.cmd(script, ["pid"])
assert pid != "\n"
assert System.cmd(script, ["stop"]) == {"", 0}
end)
end)
end
test "runs eval and version commands" do
# In some Windows setups (mostly with Docker), `System.cmd/3` fails because
# the path to the command/executable and one or more arguments contain spaces.
tmp_dir = Path.join(inspect(__MODULE__), "runs_eval_and_version_commands")
in_fixture("release_test", tmp_dir, fn ->
config = [releases: [eval: [include_erts: false, cookie: "abcdefghij"]]]
Mix.Project.in_project(:release_test, ".", config, fn _ ->
File.write!("config/releases.exs", """
import Config
config :release_test, :runtime, :was_set
""")
root = Path.absname("_build/dev/rel/eval")
Mix.Task.run("release")
script = Path.join(root, "bin/eval")
{version, 0} = System.cmd(script, ["version"])
assert String.trim_trailing(version) == "eval 0.1.0"
refute File.exists?(Path.join(root, "RELEASE_BOOTED"))
{hello_world, 0} = System.cmd(script, ["eval", "IO.puts :hello_world"])
assert String.trim_trailing(hello_world) == "hello_world"
refute File.exists?(Path.join(root, "RELEASE_BOOTED"))
open_port(script, ['eval', 'Application.ensure_all_started(:release_test)'])
assert %{
cookie_env: "abcdefghij",
mode: :interactive,
node: :nonode@nohost,
protocols_consolidated?: true,
release_name: "eval",
release_node: "eval",
release_root: root,
release_vsn: "0.1.0",
runtime_config: {:ok, :was_set},
static_config: {:ok, :was_set}
} = wait_until_decoded(Path.join(root, "RELEASE_BOOTED"))
end)
end)
end
@tag :unix
test "runs in daemon mode" do
in_fixture("release_test", fn ->
config = [releases: [permanent2: [include_erts: false, cookie: "abcdefghij"]]]
Mix.Project.in_project(:release_test, ".", config, fn _ ->
root = Path.absname("_build/dev/rel/permanent2")
Mix.Task.run("release")
script = Path.join(root, "bin/permanent2")
open_port(script, ['daemon_iex'])
assert %{
app_dir: app_dir,
cookie_env: "abcdefghij",
mode: :embedded,
node: release_node("permanent2"),
protocols_consolidated?: true,
release_name: "permanent2",
release_node: "permanent2",
release_root: ^root,
release_vsn: "0.1.0",
root_dir: root_dir,
runtime_config: :error,
static_config: {:ok, :was_set},
sys_config_env: sys_config_env,
sys_config_init: sys_config_init
} = wait_until_decoded(Path.join(root, "RELEASE_BOOTED"))
assert app_dir == Path.join(root, "lib/release_test-0.1.0")
assert root_dir == :code.root_dir() |> to_string()
assert sys_config_env == Path.join(root, "releases/0.1.0/sys")
assert sys_config_init == Path.join(root, "releases/0.1.0/sys")
assert wait_until(fn ->
File.read!(Path.join(root, "tmp/log/erlang.log.1")) =~
"iex(permanent2@#{@hostname})1> "
end)
assert System.cmd(script, ["rpc", "ReleaseTest.hello_world()"]) == {"hello world\n", 0}
assert System.cmd(script, ["stop"]) == {"", 0}
end)
end)
end
test "requires confirmation if release already exists unless overwriting" do
in_fixture("release_test", fn ->
Mix.Project.in_project(:release_test, ".", fn _ ->
Mix.Task.rerun("release")
assert_received {:mix_shell, :info, ["* assembling release_test-0.1.0 on MIX_ENV=dev"]}
send(self(), {:mix_shell_input, :yes?, false})
Mix.Task.rerun("release")
refute_received {:mix_shell, :info, ["* assembling release_test-0.1.0 on MIX_ENV=dev"]}
assert_received {:mix_shell, :yes?,
["Release release_test-0.1.0 already exists. Overwrite?"]}
Mix.Task.rerun("release", ["--overwrite"])
assert_received {:mix_shell, :info, ["* assembling release_test-0.1.0 on MIX_ENV=dev"]}
end)
end)
end
test "requires a matching name" do
in_fixture("release_test", fn ->
Mix.Project.in_project(:release_test, ".", fn _ ->
assert_raise Mix.Error, ~r"Unknown release :unknown", fn ->
Mix.Task.run("release", ["unknown"])
end
end)
end)
end
defp open_port(command, args, env \\ []) do
Port.open({:spawn_executable, to_charlist(command)}, [:hide, args: args, env: env])
end
defp wait_until_decoded(file) do
wait_until(fn ->
case File.read(file) do
{:ok, bin} when byte_size(bin) > 0 -> :erlang.binary_to_term(bin)
_ -> nil
end
end)
end
defp wait_until(fun) do
if value = fun.() do
value
else
Process.sleep(10)
wait_until(fun)
end
end
end
| 38.406375 | 97 | 0.566649 |
ff400a9929a83b317a57833ed1f17020f1ac6e49 | 4,796 | ex | Elixir | lib/helper.ex | tulip/modbus | dab29a158033b50cda2e0fa574c022d01976622d | [
"Apache-2.0"
] | 11 | 2016-11-24T18:48:57.000Z | 2020-10-26T07:30:34.000Z | lib/helper.ex | tulip/modbus | dab29a158033b50cda2e0fa574c022d01976622d | [
"Apache-2.0"
] | 5 | 2016-11-28T19:33:39.000Z | 2022-03-07T18:40:34.000Z | lib/helper.ex | tulip/modbus | dab29a158033b50cda2e0fa574c022d01976622d | [
"Apache-2.0"
] | 13 | 2016-12-14T15:42:23.000Z | 2021-11-02T19:46:16.000Z | defmodule Modbus.Helper do
@moduledoc false
use Bitwise
@hi [
0x00, 0xC1, 0x81, 0x40, 0x01, 0xC0, 0x80, 0x41, 0x01, 0xC0, 0x80, 0x41, 0x00, 0xC1, 0x81,
0x40, 0x01, 0xC0, 0x80, 0x41, 0x00, 0xC1, 0x81, 0x40, 0x00, 0xC1, 0x81, 0x40, 0x01, 0xC0,
0x80, 0x41, 0x01, 0xC0, 0x80, 0x41, 0x00, 0xC1, 0x81, 0x40, 0x00, 0xC1, 0x81, 0x40, 0x01,
0xC0, 0x80, 0x41, 0x00, 0xC1, 0x81, 0x40, 0x01, 0xC0, 0x80, 0x41, 0x01, 0xC0, 0x80, 0x41,
0x00, 0xC1, 0x81, 0x40, 0x01, 0xC0, 0x80, 0x41, 0x00, 0xC1, 0x81, 0x40, 0x00, 0xC1, 0x81,
0x40, 0x01, 0xC0, 0x80, 0x41, 0x00, 0xC1, 0x81, 0x40, 0x01, 0xC0, 0x80, 0x41, 0x01, 0xC0,
0x80, 0x41, 0x00, 0xC1, 0x81, 0x40, 0x00, 0xC1, 0x81, 0x40, 0x01, 0xC0, 0x80, 0x41, 0x01,
0xC0, 0x80, 0x41, 0x00, 0xC1, 0x81, 0x40, 0x01, 0xC0, 0x80, 0x41, 0x00, 0xC1, 0x81, 0x40,
0x00, 0xC1, 0x81, 0x40, 0x01, 0xC0, 0x80, 0x41, 0x01, 0xC0, 0x80, 0x41, 0x00, 0xC1, 0x81,
0x40, 0x00, 0xC1, 0x81, 0x40, 0x01, 0xC0, 0x80, 0x41, 0x00, 0xC1, 0x81, 0x40, 0x01, 0xC0,
0x80, 0x41, 0x01, 0xC0, 0x80, 0x41, 0x00, 0xC1, 0x81, 0x40, 0x00, 0xC1, 0x81, 0x40, 0x01,
0xC0, 0x80, 0x41, 0x01, 0xC0, 0x80, 0x41, 0x00, 0xC1, 0x81, 0x40, 0x01, 0xC0, 0x80, 0x41,
0x00, 0xC1, 0x81, 0x40, 0x00, 0xC1, 0x81, 0x40, 0x01, 0xC0, 0x80, 0x41, 0x00, 0xC1, 0x81,
0x40, 0x01, 0xC0, 0x80, 0x41, 0x01, 0xC0, 0x80, 0x41, 0x00, 0xC1, 0x81, 0x40, 0x01, 0xC0,
0x80, 0x41, 0x00, 0xC1, 0x81, 0x40, 0x00, 0xC1, 0x81, 0x40, 0x01, 0xC0, 0x80, 0x41, 0x01,
0xC0, 0x80, 0x41, 0x00, 0xC1, 0x81, 0x40, 0x00, 0xC1, 0x81, 0x40, 0x01, 0xC0, 0x80, 0x41,
0x00, 0xC1, 0x81, 0x40, 0x01, 0xC0, 0x80, 0x41, 0x01, 0xC0, 0x80, 0x41, 0x00, 0xC1, 0x81,
0x40 ]
@lo [
0x00, 0xC0, 0xC1, 0x01, 0xC3, 0x03, 0x02, 0xC2, 0xC6, 0x06, 0x07, 0xC7, 0x05, 0xC5, 0xC4,
0x04, 0xCC, 0x0C, 0x0D, 0xCD, 0x0F, 0xCF, 0xCE, 0x0E, 0x0A, 0xCA, 0xCB, 0x0B, 0xC9, 0x09,
0x08, 0xC8, 0xD8, 0x18, 0x19, 0xD9, 0x1B, 0xDB, 0xDA, 0x1A, 0x1E, 0xDE, 0xDF, 0x1F, 0xDD,
0x1D, 0x1C, 0xDC, 0x14, 0xD4, 0xD5, 0x15, 0xD7, 0x17, 0x16, 0xD6, 0xD2, 0x12, 0x13, 0xD3,
0x11, 0xD1, 0xD0, 0x10, 0xF0, 0x30, 0x31, 0xF1, 0x33, 0xF3, 0xF2, 0x32, 0x36, 0xF6, 0xF7,
0x37, 0xF5, 0x35, 0x34, 0xF4, 0x3C, 0xFC, 0xFD, 0x3D, 0xFF, 0x3F, 0x3E, 0xFE, 0xFA, 0x3A,
0x3B, 0xFB, 0x39, 0xF9, 0xF8, 0x38, 0x28, 0xE8, 0xE9, 0x29, 0xEB, 0x2B, 0x2A, 0xEA, 0xEE,
0x2E, 0x2F, 0xEF, 0x2D, 0xED, 0xEC, 0x2C, 0xE4, 0x24, 0x25, 0xE5, 0x27, 0xE7, 0xE6, 0x26,
0x22, 0xE2, 0xE3, 0x23, 0xE1, 0x21, 0x20, 0xE0, 0xA0, 0x60, 0x61, 0xA1, 0x63, 0xA3, 0xA2,
0x62, 0x66, 0xA6, 0xA7, 0x67, 0xA5, 0x65, 0x64, 0xA4, 0x6C, 0xAC, 0xAD, 0x6D, 0xAF, 0x6F,
0x6E, 0xAE, 0xAA, 0x6A, 0x6B, 0xAB, 0x69, 0xA9, 0xA8, 0x68, 0x78, 0xB8, 0xB9, 0x79, 0xBB,
0x7B, 0x7A, 0xBA, 0xBE, 0x7E, 0x7F, 0xBF, 0x7D, 0xBD, 0xBC, 0x7C, 0xB4, 0x74, 0x75, 0xB5,
0x77, 0xB7, 0xB6, 0x76, 0x72, 0xB2, 0xB3, 0x73, 0xB1, 0x71, 0x70, 0xB0, 0x50, 0x90, 0x91,
0x51, 0x93, 0x53, 0x52, 0x92, 0x96, 0x56, 0x57, 0x97, 0x55, 0x95, 0x94, 0x54, 0x9C, 0x5C,
0x5D, 0x9D, 0x5F, 0x9F, 0x9E, 0x5E, 0x5A, 0x9A, 0x9B, 0x5B, 0x99, 0x59, 0x58, 0x98, 0x88,
0x48, 0x49, 0x89, 0x4B, 0x8B, 0x8A, 0x4A, 0x4E, 0x8E, 0x8F, 0x4F, 0x8D, 0x4D, 0x4C, 0x8C,
0x44, 0x84, 0x85, 0x45, 0x87, 0x47, 0x46, 0x86, 0x82, 0x42, 0x43, 0x83, 0x41, 0x81, 0x80,
0x40]
def crc(data) do
crc(data, 0xff, 0xff)
end
defp crc(<<>>, hi, lo), do: <<hi, lo>>
defp crc(data, hi, lo) do
<<first, tail::binary>> = data
index = lo ^^^ first
lo = hi ^^^ Enum.at(@hi, index)
hi = Enum.at(@lo, index)
crc(tail, hi, lo)
end
def byte_count(count) do
div(count - 1, 8) + 1
end
def bool_to_byte(value) do
#enforce 0 or 1 only
case value do
0 -> 0x00
1 -> 0xFF
end
end
def bin_to_bitlist(count, << b7::1, b6::1, b5::1, b4::1, b3::1, b2::1, b1::1, b0::1 >>) when count<=8 do
Enum.take([b0, b1, b2, b3, b4, b5, b6, b7], count)
end
def bin_to_bitlist(count, << b7::1, b6::1, b5::1, b4::1, b3::1, b2::1, b1::1, b0::1, tail::binary >>) do
[b0, b1, b2, b3, b4, b5, b6, b7] ++ bin_to_bitlist(count - 8, tail)
end
def bin_to_reglist(1, <<register::16>>) do
[register]
end
def bin_to_reglist(count, <<register::16, tail::binary>>) do
[register | bin_to_reglist(count - 1, tail)]
end
def bitlist_to_bin(values) do
lists = Enum.chunk(values, 8, 8, [0, 0, 0, 0, 0, 0, 0, 0])
list = for list8 <- lists do
[v0, v1, v2, v3, v4, v5, v6, v7] = for b <- list8 do
bool_to_byte(b) #enforce 0 or 1 only
end
<< v7::1, v6::1, v5::1, v4::1, v3::1, v2::1, v1::1, v0::1 >>
end
:erlang.iolist_to_binary(list)
end
def reglist_to_bin(values) do
list = for value <- values do
<<value::size(16)>>
end
:erlang.iolist_to_binary(list)
end
end
| 45.67619 | 106 | 0.610509 |
ff401e8034fca81b0c8d75073c4388b382e60782 | 2,455 | ex | Elixir | lib/quadquizaminos_web.ex | sparrell/quizquadaminos | 6388252b7c32aa4101486cbf45d0d57d4e3a79da | [
"MIT"
] | 1 | 2022-01-11T20:06:31.000Z | 2022-01-11T20:06:31.000Z | lib/quadquizaminos_web.ex | sFractal-Podii/quizquadaminos | 4c9023b9c6212a92741b0f21d9eb05871aa73916 | [
"MIT"
] | 484 | 2021-02-15T18:43:40.000Z | 2022-03-29T12:27:24.000Z | lib/quadquizaminos_web.ex | sFractal-Podii/quizquadaminos | 4c9023b9c6212a92741b0f21d9eb05871aa73916 | [
"MIT"
] | 8 | 2021-02-15T19:12:51.000Z | 2021-12-15T03:02:46.000Z | defmodule QuadquizaminosWeb do
@moduledoc """
The entrypoint for defining your web interface, such
as controllers, views, channels and so on.
This can be used in your application as:
use QuadquizaminosWeb, :controller
use QuadquizaminosWeb, :view
The definitions below will be executed for every view,
controller, etc, so keep them short and clean, focused
on imports, uses and aliases.
Do NOT define functions inside the quoted expressions
below. Instead, define any helper function in modules
and import those modules here.
"""
def controller do
quote do
use Phoenix.Controller, namespace: QuadquizaminosWeb
import Plug.Conn
import QuadquizaminosWeb.Gettext
alias QuadquizaminosWeb.Router.Helpers, as: Routes
end
end
def view do
quote do
use Phoenix.View,
root: "lib/quadquizaminos_web/templates",
namespace: QuadquizaminosWeb
# Import convenience functions from controllers
import Phoenix.Controller,
only: [get_flash: 1, get_flash: 2, view_module: 1, view_template: 1]
# Include shared imports and aliases for views
unquote(view_helpers())
end
end
def live_view do
quote do
use Phoenix.LiveView,
layout: {QuadquizaminosWeb.LayoutView, "live.html"}
unquote(view_helpers())
end
end
def live_component do
quote do
use Phoenix.LiveComponent
unquote(view_helpers())
end
end
def router do
quote do
use Phoenix.Router
import Plug.Conn
import Phoenix.Controller
import Phoenix.LiveView.Router
end
end
def channel do
quote do
use Phoenix.Channel
import QuadquizaminosWeb.Gettext
end
end
defp view_helpers do
quote do
# Use all HTML functionality (forms, tags, etc)
use Phoenix.HTML
# Import LiveView helpers (live_render, live_component, live_patch, etc)
import Phoenix.LiveView.Helpers
# Import basic rendering functionality (render, render_layout, etc)
import Phoenix.View
import QuadquizaminosWeb.ErrorHelpers
import QuadquizaminosWeb.Gettext
import QuadquizaminosWeb.LiveHelpers
alias QuadquizaminosWeb.Router.Helpers, as: Routes
end
end
@doc """
When used, dispatch to the appropriate controller/view/etc.
"""
defmacro __using__(which) when is_atom(which) do
apply(__MODULE__, which, [])
end
end
| 23.605769 | 78 | 0.695723 |
ff402ab3da8de9ba8af06b3d66dc0362f7fdb73e | 2,085 | exs | Elixir | mix.exs | heyorbit/elixir-server-utils | 32643ecf4042a110495ffb2eaee3016aa5276d5a | [
"MIT"
] | null | null | null | mix.exs | heyorbit/elixir-server-utils | 32643ecf4042a110495ffb2eaee3016aa5276d5a | [
"MIT"
] | null | null | null | mix.exs | heyorbit/elixir-server-utils | 32643ecf4042a110495ffb2eaee3016aa5276d5a | [
"MIT"
] | null | null | null | defmodule ServerUtils.Mixfile do
@moduledoc false
use Mix.Project
@version "0.3.4"
def project do
[
app: :server_utils,
version: @version,
elixir: "~> 1.6",
elixirc_paths: elixirc_paths(Mix.env()),
start_permanent: Mix.env() == :prod,
deps: deps(),
aliases: aliases(),
package: package(),
description: description(),
docs: [
source_ref: "v#{@version}",
main: "installation",
extra_section: "README",
formatters: ["html", "epub"],
extras: ["README.md"]
],
test_coverage: [tool: ExCoveralls],
preferred_cli_env: [
coveralls: :test,
"coveralls.detail": :test,
"coveralls.post": :test,
"coveralls.html": :test
]
]
end
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger, :runtime_tools]
]
end
defp package do
[
name: "server_utils",
files: ["lib", "mix.exs", "README*", "LICENSE*"],
maintainers: ["Adrián Quintás"],
licenses: ["MIT"],
links: %{"GitHub" => "https://github.com/orbitdigital/elixir-server-utils"}
]
end
defp description do
"Server utils to automate common tasks like pagination or authentication"
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:excoveralls, "~> 0.8", only: :test},
{:mock, "~> 0.3", only: :test},
{:dialyxir, "~> 0.5", only: [:dev], runtime: false},
{:credo, "~> 0.9", only: [:dev, :test], runtime: false},
{:plug, "~> 1.5.1"},
{:ex_doc, ">= 0.0.0", only: :dev},
{:sentry, "~> 6.2"},
{:joken, "~> 1.5"},
{:exjsx, "~> 4.0"},
{:git_hooks, "~> 0.2.0", only: :dev}
]
end
defp aliases do
[
compile: ["compile --warnings-as-errors"],
coveralls: ["coveralls.html --umbrella"],
"coveralls.html": ["coveralls.html --umbrella"]
]
end
end
| 25.120482 | 81 | 0.549161 |
ff404eabf31698d89dd2265870c1637b21a05f80 | 4,151 | ex | Elixir | lib/phoenix_mtm/helpers.ex | kevbuchanan/phoenix_mtm | be5f1e0bf602343f87b3b91f88bbe8eb5f5523be | [
"MIT"
] | null | null | null | lib/phoenix_mtm/helpers.ex | kevbuchanan/phoenix_mtm | be5f1e0bf602343f87b3b91f88bbe8eb5f5523be | [
"MIT"
] | null | null | null | lib/phoenix_mtm/helpers.ex | kevbuchanan/phoenix_mtm | be5f1e0bf602343f87b3b91f88bbe8eb5f5523be | [
"MIT"
] | null | null | null | defmodule PhoenixMTM.Helpers do
@moduledoc """
Provides HTML helpers for Phoenix.
"""
import Phoenix.HTML, only: [html_escape: 1]
import Phoenix.HTML.Form, only: [field_name: 2, field_id: 2, hidden_input: 3 ]
@doc ~S"""
Generates a list of checkboxes and labels to update a Phoenix
many_to_many relationship.
## Basic Example
<%= PhoenixMTM.Helpers.collection_checkboxes f, :tags,
Enum.map(@tags, &({&1.name, &1.id})),
selected: Enum.map(f.data.tags, &(&1.id)) %>
## Custom `<input>` and `<label>` options
<%= PhoenixMTM.Helpers.collection_checkboxes f, :tags,
Enum.map(@tags, &({&1.name, &1.id})),
selected: Enum.map(f.data.tags, &(&1.id)),
label_opts: [class: "form-input"], input_opts: [class: "form-control"] %>
## Options
* `:selected` - a list of options that should be pre-selected
* `:input_opts` - a list of attributes to be applied to each checkbox input
* `:label_opts` - a list of attributes to be applied to each checkbox label
* `:wrapper` - a function to wrap the HTML structure of each checkbox/label
* `:mapper` - a function to customize the HTML structure of each checkbox/label
## Wrapper
A `wrapper` function can be used to wrap each checkbox and label pair in one
or more HTML elements.
The wrapper function receives the pair as a single argument, and should return
a `safe` tuple as expected by Phoenix.
A simplified version of this is to call `Phoenix.HTML.Tag.content_tag`
<%= PhoenixMTM.Helpers.collection_checkboxes f, :tags,
Enum.map(@tags, &({&1.name, &1.id})),
selected: Enum.map(f.data.tags, &(&1.id)),
wrapper: &Phoenix.HTML.Tag.content_tag(:p, &1)
## Mapper
A `mapper` function can be used to customize the structure of the checkbox and
label pair.
The mapper function receives the form, field name, input options, label text,
label options, and helper options, and should return a `safe` tuple as expected
by Phoenix.
# Somewhere in your application
defmodule CustomMappers do
use PhoenixMTM.Mappers
def bootstrap(form, field, input_opts, label_content, label_opts, _opts) do
content_tag(:div, class: "checkbox") do
label(form, field, label_opts) do
[
tag(:input, input_opts),
html_escape(label_content)
]
end
end
end
end
# In your template
<%= PhoenixMTM.Helpers.collection_checkboxes f, :tags,
Enum.map(@tags, &({&1.name, &1.id})),
selected: Enum.map(f.data.tags, &(&1.id)),
mapper: &CustomMappers.bootstrap/6
"""
def collection_checkboxes(form, field, collection, opts \\ []) do
name = field_name(form, field) <> "[]"
selected = Keyword.get(opts, :selected, [])
input_opts = Keyword.get(opts, :input_opts, [])
label_opts = Keyword.get(opts, :label_opts, [])
mapper = Keyword.get(opts, :mapper, &PhoenixMTM.Mappers.unwrapped/6)
wrapper = Keyword.get(opts, :wrapper, &(&1))
mapper = if {:nested, true} in opts do
IO.write :stderr, """
warning: using nested option is deprecated. Use nested mapper instead.
#{Exception.format_stacktrace}
"""
&PhoenixMTM.Mappers.nested/6
else
mapper
end
inputs = Enum.map(collection, fn {label_content, value} ->
id = field_id(form, field) <> "_#{value}"
input_opts =
input_opts
|> Keyword.put(:type, "checkbox")
|> Keyword.put(:id, id)
|> Keyword.put(:name, name)
|> Keyword.put(:value, "#{value}")
|> put_selected(selected, value)
label_opts = label_opts ++ [for: id]
mapper.(form, field, input_opts, label_content, label_opts, opts)
|> wrapper.()
end)
html_escape(
inputs ++
hidden_input(form, field, [name: name, value: ""])
)
end
defp put_selected(opts, selected, value) do
if Enum.member?(selected, value) do
Keyword.put(opts, :checked, true)
else
opts
end
end
end
| 31.687023 | 85 | 0.619851 |
ff4078be9b8ecb3eb29f6d71dae21ef6ccfcd725 | 7,256 | exs | Elixir | lib/eex/test/eex/tokenizer_test.exs | esparta/elixir | 94fa4e5d19426f234bf20465a539083effd213d8 | [
"Apache-2.0"
] | null | null | null | lib/eex/test/eex/tokenizer_test.exs | esparta/elixir | 94fa4e5d19426f234bf20465a539083effd213d8 | [
"Apache-2.0"
] | null | null | null | lib/eex/test/eex/tokenizer_test.exs | esparta/elixir | 94fa4e5d19426f234bf20465a539083effd213d8 | [
"Apache-2.0"
] | null | null | null | Code.require_file("../test_helper.exs", __DIR__)
defmodule EEx.TokenizerTest do
use ExUnit.Case, async: true
require EEx.Tokenizer, as: T
@opts %{indentation: 0, trim: false}
test "simple chars lists" do
assert T.tokenize('foo', 1, 1, @opts) == {:ok, [{:text, 'foo'}, {:eof, 1, 4}]}
end
test "simple strings" do
assert T.tokenize("foo", 1, 1, @opts) == {:ok, [{:text, 'foo'}, {:eof, 1, 4}]}
end
test "strings with embedded code" do
assert T.tokenize('foo <% bar %>', 1, 1, @opts) ==
{:ok, [{:text, 'foo '}, {:expr, 1, 5, '', ' bar '}, {:eof, 1, 14}]}
end
test "strings with embedded equals code" do
assert T.tokenize('foo <%= bar %>', 1, 1, @opts) ==
{:ok, [{:text, 'foo '}, {:expr, 1, 5, '=', ' bar '}, {:eof, 1, 15}]}
end
test "strings with embedded slash code" do
assert T.tokenize('foo <%/ bar %>', 1, 1, @opts) ==
{:ok, [{:text, 'foo '}, {:expr, 1, 5, '/', ' bar '}, {:eof, 1, 15}]}
end
test "strings with embedded pipe code" do
assert T.tokenize('foo <%| bar %>', 1, 1, @opts) ==
{:ok, [{:text, 'foo '}, {:expr, 1, 5, '|', ' bar '}, {:eof, 1, 15}]}
end
test "strings with more than one line" do
assert T.tokenize('foo\n<%= bar %>', 1, 1, @opts) ==
{:ok, [{:text, 'foo\n'}, {:expr, 2, 1, '=', ' bar '}, {:eof, 2, 11}]}
end
test "strings with more than one line and expression with more than one line" do
string = '''
foo <%= bar
baz %>
<% foo %>
'''
exprs = [
{:text, 'foo '},
{:expr, 1, 5, '=', ' bar\n\nbaz '},
{:text, '\n'},
{:expr, 4, 1, '', ' foo '},
{:text, '\n'},
{:eof, 5, 1}
]
assert T.tokenize(string, 1, 1, @opts) == {:ok, exprs}
end
test "quotation" do
assert T.tokenize('foo <%% true %>', 1, 1, @opts) ==
{:ok, [{:text, 'foo <% true %>'}, {:eof, 1, 16}]}
end
test "quotation with do/end" do
assert T.tokenize('foo <%% true do %>bar<%% end %>', 1, 1, @opts) ==
{:ok, [{:text, 'foo <% true do %>bar<% end %>'}, {:eof, 1, 32}]}
end
test "quotation with interpolation" do
exprs = [
{:text, 'a <% b '},
{:expr, 1, 9, '=', ' c '},
{:text, ' '},
{:expr, 1, 18, '=', ' d '},
{:text, ' e %> f'},
{:eof, 1, 33}
]
assert T.tokenize('a <%% b <%= c %> <%= d %> e %> f', 1, 1, @opts) == {:ok, exprs}
end
test "improperly formatted quotation with interpolation" do
exprs = [
{:text, '<%% a <%= b %> c %>'},
{:eof, 1, 22}
]
assert T.tokenize('<%%% a <%%= b %> c %>', 1, 1, @opts) == {:ok, exprs}
end
test "eex comments" do
exprs = [
{:text, 'foo '},
{:eof, 1, 16}
]
assert T.tokenize('foo <%# true %>', 1, 1, @opts) == {:ok, exprs}
end
test "eex comments with do/end" do
exprs = [
{:text, 'foo bar'},
{:eof, 1, 32}
]
assert T.tokenize('foo <%# true do %>bar<%# end %>', 1, 1, @opts) == {:ok, exprs}
end
test "elixir comments" do
exprs = [
{:text, 'foo '},
{:expr, 1, 5, [], ' true # this is a boolean '},
{:eof, 1, 35}
]
assert T.tokenize('foo <% true # this is a boolean %>', 1, 1, @opts) == {:ok, exprs}
end
test "elixir comments with do/end" do
exprs = [
{:start_expr, 1, 1, [], ' if true do # startif '},
{:text, 'text'},
{:end_expr, 1, 31, [], ' end # closeif '},
{:eof, 1, 50}
]
assert T.tokenize('<% if true do # startif %>text<% end # closeif %>', 1, 1, @opts) ==
{:ok, exprs}
end
test "strings with embedded do end" do
exprs = [
{:text, 'foo '},
{:start_expr, 1, 5, '', ' if true do '},
{:text, 'bar'},
{:end_expr, 1, 24, '', ' end '},
{:eof, 1, 33}
]
assert T.tokenize('foo <% if true do %>bar<% end %>', 1, 1, @opts) == {:ok, exprs}
end
test "strings with embedded -> end" do
exprs = [
{:text, 'foo '},
{:start_expr, 1, 5, '', ' cond do '},
{:middle_expr, 1, 18, '', ' false -> '},
{:text, 'bar'},
{:middle_expr, 1, 35, '', ' true -> '},
{:text, 'baz'},
{:end_expr, 1, 51, '', ' end '},
{:eof, 1, 60}
]
assert T.tokenize('foo <% cond do %><% false -> %>bar<% true -> %>baz<% end %>', 1, 1, @opts) ==
{:ok, exprs}
end
test "strings with multiple callbacks" do
exprs = [
{:start_expr, 1, 1, '=', ' a fn -> '},
{:text, 'foo'},
{:middle_expr, 1, 18, '', ' end, fn -> '},
{:text, 'bar'},
{:end_expr, 1, 37, '', ' end '},
{:eof, 1, 46}
]
assert T.tokenize('<%= a fn -> %>foo<% end, fn -> %>bar<% end %>', 1, 1, @opts) ==
{:ok, exprs}
end
test "strings with callback followed by do block" do
exprs = [
{:start_expr, 1, 1, '=', ' a fn -> '},
{:text, 'foo'},
{:middle_expr, 1, 18, '', ' end do '},
{:text, 'bar'},
{:end_expr, 1, 33, '', ' end '},
{:eof, 1, 42}
]
assert T.tokenize('<%= a fn -> %>foo<% end do %>bar<% end %>', 1, 1, @opts) == {:ok, exprs}
end
test "strings with embedded keywords blocks" do
exprs = [
{:text, 'foo '},
{:start_expr, 1, 5, '', ' if true do '},
{:text, 'bar'},
{:middle_expr, 1, 24, '', ' else '},
{:text, 'baz'},
{:end_expr, 1, 37, '', ' end '},
{:eof, 1, 46}
]
assert T.tokenize('foo <% if true do %>bar<% else %>baz<% end %>', 1, 1, @opts) ==
{:ok, exprs}
end
test "trim mode" do
template = '\t<%= if true do %> \n TRUE \n <% else %>\n FALSE \n <% end %> \n\n '
exprs = [
{:start_expr, 1, 2, '=', ' if true do '},
{:text, '\n TRUE \n'},
{:middle_expr, 3, 3, '', ' else '},
{:text, '\n FALSE \n'},
{:end_expr, 5, 3, '', ' end '},
{:eof, 7, 3}
]
assert T.tokenize(template, 1, 1, %{@opts | trim: true}) == {:ok, exprs}
end
test "trim mode with comment" do
exprs = [
{:text, '\n123'},
{:eof, 2, 4}
]
assert T.tokenize(' <%# comment %> \n123', 1, 1, %{@opts | trim: true}) == {:ok, exprs}
end
test "trim mode with CRLF" do
exprs = [
{:text, '0\n'},
{:expr, 2, 3, '=', ' 12 '},
{:text, '\n34'},
{:eof, 3, 3}
]
assert T.tokenize('0\r\n <%= 12 %> \r\n34', 1, 1, %{@opts | trim: true}) == {:ok, exprs}
end
test "trim mode set to false" do
exprs = [
{:text, ' '},
{:expr, 1, 2, '=', ' 12 '},
{:text, ' \n'},
{:eof, 2, 1}
]
assert T.tokenize(' <%= 12 %> \n', 1, 1, %{@opts | trim: false}) == {:ok, exprs}
end
test "trim mode no false positives" do
assert_not_trimmed = fn x ->
assert T.tokenize(x, 1, 1, %{@opts | trim: false}) == T.tokenize(x, 1, 1, @opts)
end
assert_not_trimmed.('foo <%= "bar" %> ')
assert_not_trimmed.('\n <%= "foo" %>bar')
assert_not_trimmed.(' <%% hello %> ')
assert_not_trimmed.(' <%= 01 %><%= 23 %>\n')
end
test "raise syntax error when there is start mark and no end mark" do
assert T.tokenize('foo <% :bar', 1, 1, @opts) == {:error, 1, 12, "missing token '%>'"}
assert T.tokenize('<%# true ', 1, 1, @opts) == {:error, 1, 10, "missing token '%>'"}
end
end
| 27.278195 | 100 | 0.455485 |
ff4093895cb0cd9ee85c47ca544d2c99e9882459 | 855 | exs | Elixir | mix.exs | mononym/uber_multi | 4ebe081dcfd8fe331656e1a6328ace7e4f55bc29 | [
"MIT"
] | 2 | 2020-01-13T19:50:43.000Z | 2022-02-09T10:01:43.000Z | mix.exs | mononym/uber_multi | 4ebe081dcfd8fe331656e1a6328ace7e4f55bc29 | [
"MIT"
] | null | null | null | mix.exs | mononym/uber_multi | 4ebe081dcfd8fe331656e1a6328ace7e4f55bc29 | [
"MIT"
] | null | null | null | defmodule UberMulti.MixProject do
use Mix.Project
def project do
[
app: :uber_multi,
deps: deps(),
description: description(),
package: package(),
elixir: "~> 1.6",
start_permanent: Mix.env() == :prod,
version: "1.0.1"
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:ecto, "> 2.0.0"},
{:ex_doc, ">= 0.0.0", only: :dev, runtime: false}
]
end
defp description() do
"A helper for 'Ecto.Multi.run/3' that facilitates calling functions not written for Ecto.Multi."
end
defp package() do
[
licenses: ["MIT"],
links: %{"GitHub" => "https://github.com/mononym/uber_multi"}
]
end
end
| 20.357143 | 100 | 0.583626 |
ff40ea9f982cfa2ffd7ff25bd8f7cc4311040971 | 920 | ex | Elixir | lib/airbrakex/plug.ex | flatiron-labs/airbrakex | 1aae8afbd877b9b9adfd26a192f824ff0c1624c9 | [
"MIT"
] | null | null | null | lib/airbrakex/plug.ex | flatiron-labs/airbrakex | 1aae8afbd877b9b9adfd26a192f824ff0c1624c9 | [
"MIT"
] | null | null | null | lib/airbrakex/plug.ex | flatiron-labs/airbrakex | 1aae8afbd877b9b9adfd26a192f824ff0c1624c9 | [
"MIT"
] | null | null | null | defmodule Airbrakex.Plug do
@moduledoc """
You can plug `Airbrakex.Plug` in your web application Plug stack
to send all exception to `airbrake`
```elixir
defmodule YourApp.Router do
use Phoenix.Router
use Airbrakex.Plug
# ...
end
```
"""
alias Airbrakex.{ExceptionParser, Notifier}
defmacro __using__(_env) do
quote location: :keep do
@before_compile Airbrakex.Plug
end
end
defmacro __before_compile__(_env) do
quote location: :keep do
defoverridable call: 2
def call(conn, opts) do
try do
super(conn, opts)
rescue
exception ->
session = Map.get(conn.private, :plug_session)
error = ExceptionParser.parse(exception)
_ = Notifier.notify(error, params: conn.params, session: session)
reraise exception, System.stacktrace()
end
end
end
end
end
| 20.444444 | 77 | 0.627174 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.