hexsha stringlengths 40 40 | size int64 2 991k | ext stringclasses 2
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 4 208 | max_stars_repo_name stringlengths 6 106 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses list | max_stars_count int64 1 33.5k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 208 | max_issues_repo_name stringlengths 6 106 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses list | max_issues_count int64 1 16.3k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 208 | max_forks_repo_name stringlengths 6 106 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses list | max_forks_count int64 1 6.91k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 2 991k | avg_line_length float64 1 36k | max_line_length int64 1 977k | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
03fdbe606f77293d9c976222fe13693b899b576e | 7,118 | ex | Elixir | day24/lib/day24.ex | the-shank/advent-of-code-2018 | 3be3958adad61e62e8a7ea6ec6a868f049c7a7e4 | [
"Apache-2.0"
] | 7 | 2018-12-12T01:08:24.000Z | 2019-12-09T19:50:36.000Z | day24/lib/day24.ex | bjorng/advent-of-code-2018 | 5dd312b7473d7f2fe12f0de1fd771c3ee1931b97 | [
"Apache-2.0"
] | null | null | null | day24/lib/day24.ex | bjorng/advent-of-code-2018 | 5dd312b7473d7f2fe12f0de1fd771c3ee1931b97 | [
"Apache-2.0"
] | null | null | null | defmodule Day24 do
def part1(lines, boost \\ 0) do
groups = parse(lines)
groups = boost(groups, boost)
cycle(groups)
end
def part2(lines) do
groups = parse(lines)
Stream.iterate(1, &(&1 + 1))
|> Enum.reduce_while(nil, fn boost, _acc ->
boosted_groups = boost(groups, boost)
case cycle(boosted_groups) do
{:immune_system, units} ->
{:halt, {units, boost}}
{:infection, _units} ->
#:io.format("boost: ~p; 0/~p\n", [boost, _units])
{:cont, nil}
{:stalemate, {_imm_units, _inf_units}} ->
#:io.format("boost: ~p; stalemate: ~p/~p\n", [boost, _imm_units, _inf_units])
{:cont, nil}
end
end)
end
defp boost(groups, boost) do
groups
|> Enum.map(fn {id, group} ->
case group do
%{side: :immune_system, damage: damage} ->
{id, %{group | damage: damage + boost}}
%{} ->
{id, group}
end
end)
|> Enum.into(%{})
end
defp cycle(groups) do
#print_groups groups
old_groups = groups
selection_order = groups
|> Enum.sort_by(&selection_order/1)
|> Enum.reverse
acc = {groups, []}
{_, attack_order} = Enum.reduce(selection_order, acc, &select_target/2)
attack_order =
Enum.sort_by(attack_order, fn {attacker_id, _attacked_id} ->
attacker = Map.fetch!(groups, attacker_id)
- attacker.initiative
end)
groups = Enum.reduce(attack_order, groups, &attack/2)
case count_units(groups) do
{0, units} ->
#print_groups groups
{:infection, units}
{units, 0} ->
#print_groups groups
{:immune_system, units}
{imm_units, inf_units} ->
if groups === old_groups do
{:stalemate, {imm_units, inf_units}}
else
cycle(groups)
end
end
end
defp count_units(groups) do
Enum.reduce(groups, {0, 0}, fn {_, group}, {imm, inf} ->
case group do
%{side: :immune_system, units: units} ->
{imm + units, inf}
%{side: :infection, units: units} ->
{imm, inf + units}
end
end)
end
defp attack({attacker_id, attacked_id}, groups)
when :erlang.is_map_key(attacker_id, groups) do
%{^attacker_id => attacker, ^attacked_id => attacked} = groups
damage = damage(attacker, attacked)
%{units: units, hit_points: hit_points} = attacked
units_hit = div(damage, hit_points)
case (units - units_hit) do
units when units > 0 ->
attacked = %{attacked | units: units}
%{groups | attacked_id => attacked}
_ ->
Map.delete(groups, attacked_id)
end
end
defp attack({_, _}, groups), do: groups
defp select_target({id, group}, {avail, chosen}) do
my_side = group.side
attacked = Enum.reject(avail, fn {_, %{side: side}} ->
side == my_side
end)
|> Enum.map(fn {_, attacked} ->
target_selection_order(group, attacked)
end)
|> Enum.max_by(fn {order, _} -> order end,
fn -> {{0, nil, nil}, nil} end)
|> (fn
{{0, _, _}, _} -> nil
{{_, _, _}, attacked} -> attacked
end).()
case attacked do
nil ->
{avail, chosen}
_ ->
avail = Map.delete(avail, attacked)
{avail, [{id, attacked} | chosen]}
end
end
defp target_selection_order(attacker, attacked) do
damage = damage(attacker, attacked)
order = {damage, effective_power(attacked), attacked.initiative}
{order, attacked.id}
end
defp selection_order({id, group}) do
{{effective_power(group), group.initiative}, id}
end
defp damage(attacker, attacked) do
power = effective_power(attacker)
weapon = attacker.weapon
%{immunities: immunities, weaknesses: weaknesses} = attacked
cond do
weapon in immunities -> 0
weapon in weaknesses -> 2 * power
true -> power
end
end
defp effective_power(%{damage: damage, units: units}) do
damage * units
end
defp parse(lines) do
parse(lines, nil)
|> Enum.map_reduce({1, 1}, fn group, {imm, inf} ->
case group.side do
:immune_system ->
id = {:immune_system, imm}
group = Map.put(group, :id, id)
{{id, group}, {imm + 1, inf}}
:infection ->
id = {:infection, inf}
group = Map.put(group, :id, id)
{{id, group}, {imm, inf + 1}}
end
end)
|> (fn {groups, {_, _}} -> groups end).()
|> Map.new
end
defp parse(["Immune System:" | lines], _side) do
parse(lines, :immune_system)
end
defp parse(["Infection:" | lines], _side) do
parse(lines, :infection)
end
defp parse([line | lines], side) do
group = parse_group(line)
group = Map.put(group, :side, side)
[group | parse(lines, side)]
end
defp parse([], _side), do: []
defp parse_group(line) do
{units, line} = Integer.parse(line)
<<" units each with ", line::binary>> = line
{hit_points, line} = Integer.parse(line)
<<" hit points ", line::binary>> = line
{imm_weak, line} = parse_imm_weak(line)
<<"with an attack that does ", line::binary>> = line
{damage, line} = Integer.parse(line)
<<" ", line::binary>> = line
{weapon, line} = parse_damage(line)
<<" damage at initiative ", line::binary>> = line
{initiative, ""} = Integer.parse(line)
group = Map.new([units: units, hit_points: hit_points,
damage: damage, weapon: weapon,
initiative: initiative,
immunities: [], weaknesses: []])
Map.merge(group, imm_weak)
end
defp parse_imm_weak(<<"(", line::binary>>) do
parse_imm_weak(line, %{})
end
defp parse_imm_weak(line), do: {%{}, line}
defp parse_imm_weak(<<"immune to ", line::binary>>, acc) do
{list, line} = parse_list(line, [])
acc = Map.put(acc, :immunities, list)
parse_imm_weak(line, acc)
end
defp parse_imm_weak(<<"weak to ", line::binary>>, acc) do
{list, line} = parse_list(line, [])
acc = Map.put(acc, :weaknesses, list)
parse_imm_weak(line, acc)
end
defp parse_imm_weak(<<"; ", line::binary>>, acc) do
parse_imm_weak(line, acc)
end
defp parse_imm_weak(<<") ", line::binary>>, acc) do
{acc, line}
end
defp parse_list(line, acc) do
{item, line} = parse_damage(line)
acc = [item | acc]
case line do
<<", ", line::binary>> ->
parse_list(line, acc)
_ ->
{Enum.sort(acc), line}
end
end
defp parse_damage(line) do
case line do
<<"bludgeoning", line::binary>> ->
{:bludgeoning, line}
<<"cold", line::binary>> ->
{:cold, line}
<<"fire", line::binary>> ->
{:fire, line}
<<"radiation", line::binary>> ->
{:radiation, line}
<<"slashing", line::binary>> ->
{:slashing, line}
end
end
def print_groups(groups) do
IO.puts groups
|> Enum.sort
|> Enum.map(&print_group/1)
end
defp print_group({{side, id}, group}) do
:io_lib.format("~p group ~p contains ~p units\n",
[side, id, group.units])
end
end
| 26.362963 | 87 | 0.570806 |
03fded0e257d03870b2a2b0f85f728c6a6e4ffd1 | 1,714 | ex | Elixir | clients/ad_exchange_buyer/lib/google_api/ad_exchange_buyer/v14/model/creative_deal_ids_deal_statuses.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/ad_exchange_buyer/lib/google_api/ad_exchange_buyer/v14/model/creative_deal_ids_deal_statuses.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/ad_exchange_buyer/lib/google_api/ad_exchange_buyer/v14/model/creative_deal_ids_deal_statuses.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.AdExchangeBuyer.V14.Model.CreativeDealIdsDealStatuses do
@moduledoc """
## Attributes
* `arcStatus` (*type:* `String.t`, *default:* `nil`) - ARC approval status.
* `dealId` (*type:* `String.t`, *default:* `nil`) - External deal ID.
* `webPropertyId` (*type:* `integer()`, *default:* `nil`) - Publisher ID.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:arcStatus => String.t() | nil,
:dealId => String.t() | nil,
:webPropertyId => integer() | nil
}
field(:arcStatus)
field(:dealId)
field(:webPropertyId)
end
defimpl Poison.Decoder, for: GoogleApi.AdExchangeBuyer.V14.Model.CreativeDealIdsDealStatuses do
def decode(value, options) do
GoogleApi.AdExchangeBuyer.V14.Model.CreativeDealIdsDealStatuses.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.AdExchangeBuyer.V14.Model.CreativeDealIdsDealStatuses do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 32.339623 | 95 | 0.716453 |
03fe57b413e08903b3e177f216866fdbb7e758db | 1,899 | ex | Elixir | farmbot_core/lib/farmbot_core/ecto_migrator.ex | adamswsk/farmbot_os | d177d3b74888c1e7bcbf8f8595818708ee97f73b | [
"MIT"
] | 1 | 2021-08-23T13:36:14.000Z | 2021-08-23T13:36:14.000Z | farmbot_core/lib/farmbot_core/ecto_migrator.ex | adamswsk/farmbot_os | d177d3b74888c1e7bcbf8f8595818708ee97f73b | [
"MIT"
] | null | null | null | farmbot_core/lib/farmbot_core/ecto_migrator.ex | adamswsk/farmbot_os | d177d3b74888c1e7bcbf8f8595818708ee97f73b | [
"MIT"
] | null | null | null | defmodule FarmbotCore.EctoMigrator do
def child_spec(_opts) do
%{
id: __MODULE__,
start: {__MODULE__, :migrate, []},
type: :worker,
restart: :transient,
shutdown: 500
}
end
@doc "Replacement for Mix.Tasks.Ecto.Migrate"
def migrate do
repos = Application.get_env(:farmbot_core, :ecto_repos)
for repo <- repos, do: migrate(repo)
:ignore
end
def migrate(FarmbotCore.Asset.Repo) do
migrate(FarmbotCore.Asset.Repo, Path.join([:code.priv_dir(:farmbot_core), "asset", "migrations"]))
end
def migrate(FarmbotCore.Logger.Repo) do
migrate(FarmbotCore.Logger.Repo, Path.join([:code.priv_dir(:farmbot_core), "logger", "migrations"]))
end
def migrate(FarmbotCore.Config.Repo) do
migrate(FarmbotCore.Config.Repo, Path.join([:code.priv_dir(:farmbot_core), "config", "migrations"]))
end
def migrate(repo, migrations_path) do
opts = [all: true]
{:ok, pid, apps} = Mix.Ecto.ensure_started(repo, opts)
migrator = &Ecto.Migrator.run/4
# HERE:
migrated = migrator.(repo, migrations_path, :up, opts)
pid && repo.stop(pid)
restart_apps_if_migrated(apps, migrated)
Process.sleep(500)
end
# Pulled this out of Ecto because Ecto's version
# messes with Logger config
def restart_apps_if_migrated(_, []), do: :ok
def restart_apps_if_migrated(apps, [_|_]) do
for app <- Enum.reverse(apps) do
Application.stop(app)
end
for app <- apps do
Application.ensure_all_started(app)
end
:ok
end
@doc "Replacement for Mix.Tasks.Ecto.Drop"
def drop do
repos = Application.get_env(:farmbot_core, :ecto_repos)
for repo <- repos do
case drop(repo) do
:ok -> :ok
{:error, :already_down} -> :ok
{:error, reason} -> raise reason
end
end
end
def drop(repo) do
repo.__adapter__.storage_down(repo.config)
end
end
| 25.662162 | 104 | 0.659821 |
03fe77f4ab87d5bae0102b3cfd7470616f0c5866 | 4,153 | ex | Elixir | lib/pbkdf2/base.ex | smt116/pbkdf2_elixir | 9d654afeb44b994c955c884b79b569815524a978 | [
"BSD-3-Clause"
] | 47 | 2017-07-11T03:38:16.000Z | 2022-03-14T19:44:20.000Z | lib/pbkdf2/base.ex | smt116/pbkdf2_elixir | 9d654afeb44b994c955c884b79b569815524a978 | [
"BSD-3-Clause"
] | 17 | 2017-08-30T03:29:19.000Z | 2022-03-24T16:51:30.000Z | lib/pbkdf2/base.ex | smt116/pbkdf2_elixir | 9d654afeb44b994c955c884b79b569815524a978 | [
"BSD-3-Clause"
] | 11 | 2017-12-20T00:15:50.000Z | 2021-12-25T03:08:55.000Z | defmodule Pbkdf2.Base do
@moduledoc """
Base module for the Pbkdf2 password hashing library.
"""
use Bitwise
alias Pbkdf2.{Base64, Tools}
@max_length bsl(1, 32) - 1
@deprecated "Use Pbkdf2.gen_salt/1 (with `format: :django`) instead"
def django_salt(len) do
Tools.get_random_string(len)
end
@doc """
Hash a password using Pbkdf2.
## Options
There are four options (rounds can be used to override the value
in the config):
* `:rounds` - the number of rounds
* the amount of computation, given in number of iterations
* the default is 160_000
* this can also be set in the config file
* `:format` - the output format of the hash
* the default is `:modular` - modular crypt format
* the other available formats are:
* `:django` - the format used in django applications
* `:hex` - the hash is encoded in hexadecimal
* `:digest` - the sha algorithm that pbkdf2 will use
* the default is sha512
* `:length` - the length, in bytes, of the hash
* the default is 64 for sha512 and 32 for sha256
"""
@spec hash_password(binary, binary, keyword) :: binary
def hash_password(password, salt, opts \\ []) do
Tools.check_salt_length(byte_size(salt))
{rounds, output_fmt, {digest, length}} = get_opts(opts)
if length > @max_length do
raise ArgumentError, "length must be equal to or less than #{@max_length}"
end
password
|> create_hash(salt, digest, rounds, length)
|> format(salt, digest, rounds, output_fmt)
end
@doc """
Verify a password by comparing it with the stored Pbkdf2 hash.
"""
@spec verify_pass(binary, binary, binary, atom, binary, atom) :: boolean
def verify_pass(password, hash, salt, digest, rounds, output_fmt) do
{salt, length} =
case output_fmt do
:modular -> {Base64.decode(salt), byte_size(Base64.decode(hash))}
:django -> {salt, byte_size(Base.decode64!(hash))}
:hex -> {salt, byte_size(Base.decode16!(hash, case: :lower))}
end
password
|> create_hash(salt, digest, String.to_integer(rounds), length)
|> encode(output_fmt)
|> Tools.secure_check(hash)
end
defp get_opts(opts) do
{
Keyword.get(opts, :rounds, Application.get_env(:pbkdf2_elixir, :rounds, 160_000)),
Keyword.get(opts, :format, :modular),
case opts[:digest] do
:sha256 -> {:sha256, opts[:length] || 32}
_ -> {:sha512, opts[:length] || 64}
end
}
end
defp create_hash(password, salt, digest, rounds, length) do
digest
|> hmac_fun(password)
|> do_create_hash(salt, rounds, length, 1, [], 0)
end
defp do_create_hash(_fun, _salt, _rounds, dklen, _block_index, acc, length)
when length >= dklen do
key = acc |> Enum.reverse() |> IO.iodata_to_binary()
<<bin::binary-size(dklen), _::binary>> = key
bin
end
defp do_create_hash(fun, salt, rounds, dklen, block_index, acc, length) do
initial = fun.(<<salt::binary, block_index::integer-size(32)>>)
block = iterate(fun, rounds - 1, initial, initial)
do_create_hash(
fun,
salt,
rounds,
dklen,
block_index + 1,
[block | acc],
byte_size(block) + length
)
end
defp iterate(_fun, 0, _prev, acc), do: acc
defp iterate(fun, round, prev, acc) do
next = fun.(prev)
iterate(fun, round - 1, next, :crypto.exor(next, acc))
end
defp format(hash, salt, digest, rounds, :modular) do
"$pbkdf2-#{digest}$#{rounds}$#{Base64.encode(salt)}$#{Base64.encode(hash)}"
end
defp format(hash, salt, digest, rounds, :django) do
"pbkdf2_#{digest}$#{rounds}$#{salt}$#{Base.encode64(hash)}"
end
defp format(hash, _salt, _digest, _rounds, :hex), do: Base.encode16(hash, case: :lower)
defp encode(hash, :modular), do: Base64.encode(hash)
defp encode(hash, :django), do: Base.encode64(hash)
defp encode(hash, :hex), do: Base.encode16(hash, case: :lower)
if System.otp_release() >= "22" do
defp hmac_fun(digest, key), do: &:crypto.mac(:hmac, digest, key, &1)
else
defp hmac_fun(digest, key), do: &:crypto.hmac(digest, key, &1)
end
end
| 30.313869 | 89 | 0.642668 |
03fe9dd6fb6112a0c18d71f3550fb58d8e60a439 | 238 | exs | Elixir | priv/repo/migrations/20220505014509_user_loaded_player_state.exs | doughsay/ambry | c04e855bf06a6b00b8053c6eacb2eac14a56a37c | [
"MIT"
] | 12 | 2021-09-30T20:51:49.000Z | 2022-01-27T04:09:32.000Z | priv/repo/migrations/20220505014509_user_loaded_player_state.exs | doughsay/ambry | c04e855bf06a6b00b8053c6eacb2eac14a56a37c | [
"MIT"
] | 76 | 2021-10-01T05:45:11.000Z | 2022-03-28T04:12:39.000Z | priv/repo/migrations/20220505014509_user_loaded_player_state.exs | doughsay/ambry | c04e855bf06a6b00b8053c6eacb2eac14a56a37c | [
"MIT"
] | 2 | 2021-10-04T19:27:28.000Z | 2022-01-13T22:36:38.000Z | defmodule Ambry.Repo.Migrations.UserLoadedPlayerState do
use Ecto.Migration
def change do
alter table(:users) do
add :loaded_player_state_id, references(:player_states, on_delete: :delete_all), null: true
end
end
end
| 23.8 | 97 | 0.752101 |
03feb66d950f549966a019163b5f8f07b381a676 | 6,484 | exs | Elixir | spec/invokers/http_invoker_spec.exs | raisebook/qbot | 15ef5b00bf6b39c99e3bb728fa78093bc55ca342 | [
"MIT"
] | 3 | 2017-10-13T03:49:18.000Z | 2018-09-02T21:57:17.000Z | spec/invokers/http_invoker_spec.exs | raisebook/qbot | 15ef5b00bf6b39c99e3bb728fa78093bc55ca342 | [
"MIT"
] | 2 | 2017-05-02T07:35:27.000Z | 2017-06-14T02:27:41.000Z | spec/invokers/http_invoker_spec.exs | raisebook/qbot | 15ef5b00bf6b39c99e3bb728fa78093bc55ca342 | [
"MIT"
] | null | null | null | defmodule QBot.Invoker.HttpSpec do
use ESpec, async: false
alias SqsService.Message
alias QBot.QueueConfig
alias QBot.Invoker.Http
require Logger
describe "HttpInvoker" do
# SQS Service delivers the body with string keys
let(payload: %{"some" => "data", "wrapped" => "here"})
let(
message: %Message{
body: %{
"metadata" => %{
"CorrelationUUID" => "12345-12345-12345-12345",
"NotForTheHeader" => "DiscardMe",
"Authorization" => "Bearer supers3cret",
"Callback" => "https://raisebook.dev/graphql"
},
"payload" => payload()
}
}
)
describe "invoke/2" do
let(config: %QueueConfig{target: "https://test.endpoint/"})
subject(do: Http.invoke!(message(), config()))
before(do: allow(HTTPoison |> to(accept(:post, fn target, _, _, _ -> mock_http_call(target) end))))
it "does an HTTP POST to the target" do
subject()
expect HTTPoison |> to(accepted(:post, :any, count: 1))
[{_, {HTTPoison, :post, [target, _, _, _]}, _}] = :meck.history(HTTPoison)
expect target |> to(eq("https://test.endpoint/"))
end
it "POSTs the message body" do
subject()
expect HTTPoison |> to(accepted(:post, :any, count: 1))
[{_, {HTTPoison, :post, [_, body, _, _]}, _}] = :meck.history(HTTPoison)
expect body |> to(eq(Http.post_body(message())))
end
it "sends the headers" do
subject()
expect HTTPoison |> to(accepted(:post, :any, count: 1))
[{_, {HTTPoison, :post, [_, _, headers, _]}, _}] = :meck.history(HTTPoison)
expect headers |> to(eq(Http.http_headers(message(), config())))
end
context "Connection Refused error" do
let(config: %QueueConfig{target: "econnrefused"})
it "returns a {:no_message, _} tuple" do
expect(subject()) |> to(eq({:no_message, nil}))
end
end
context "No such domain (misconfiguration)" do
let(config: %QueueConfig{target: "nxdomain"})
it "returns a {:no_message, _} tuple" do
expect(subject()) |> to(eq({:no_message, nil}))
end
end
context "2xx status codes" do
let(config: %QueueConfig{target: "204"})
it "treated as success" do
expect subject() |> to(be_ok_result())
end
end
context "Non-success status codes" do
let(config: %QueueConfig{target: "404"})
it "returns a {:no_message, _} tuple" do
expect(subject()) |> to(eq({:no_message, nil}))
end
end
end
describe "http_headers/2" do
let(config: %QueueConfig{target: "https://test.endpoint/"})
subject(do: Http.http_headers(message(), config()))
it "pulls the correlation id from metadata into X-Request-ID" do
expect subject() |> to(have_any(&match?({"X-Request-ID", "12345-12345-12345-12345"}, &1)))
end
it "passes through auth tokens" do
expect subject() |> to(have_any(&match?({"Authorization", "Bearer supers3cret"}, &1)))
end
it "passes through callback urls" do
expect subject() |> to(have_any(&match?({"X-Callback", "https://raisebook.dev/graphql"}, &1)))
end
context "with no metadata" do
let(message: %Message{body: "body payload"})
it "returns an empty hash" do
expect subject() |> to(eq(%{}))
end
end
context "encrypted metadata value" do
let(
message: %Message{
body: %{
"metadata" => %{"Authorization" => "Bearer decrypt(3NCRYP7ED==)"},
"payload" => payload()
}
}
)
it "decrypts the metadata" do
result = {:ok, %{"Plaintext" => "raisebook-decrypt3d" |> Base.encode64()}}
allow(ExAws |> to(accept(:request, fn _ -> result end)))
expect subject() |> to(have_any(&match?({"Authorization", "Bearer raisebook-decrypt3d"}, &1)))
end
end
context "with headers in config" do
let(config: %QueueConfig{target: "https://test.endpoint/", headers: %{"Authorization" => "Bearer raisebook"}})
let(
message: %Message{
body: payload()
}
)
it "passes through auth tokens from config" do
expect subject() |> to(have_any(&match?({"Authorization", "Bearer raisebook"}, &1)))
end
context "with encrypted headers in config, if they are wrapped with decrypt(...)" do
let(
config: %QueueConfig{
target: "https://test.endpoint/",
headers: %{"Authorization" => "Bearer decrypt(raisebook)"}
}
)
it "decrypts encrypted strings" do
result = {:ok, %{"Plaintext" => "raisebook-decrypted" |> Base.encode64()}}
allow(ExAws |> to(accept(:request, fn _ -> result end)))
expect subject() |> to(have_any(&match?({"Authorization", "Bearer raisebook-decrypted"}, &1)))
end
end
end
context "with header set in metadata and config" do
let(config: %QueueConfig{target: "https://test.endpoint/", headers: %{"Authorization" => "Overridden"}})
it "the metadata value takes priority" do
expect subject() |> to(have_any(&match?({"Authorization", "Bearer supers3cret"}, &1)))
expect subject() |> to_not(have_any(&match?({"Authorization", "Overridden"}, &1)))
end
end
end
describe "post_body/1" do
subject(do: Http.post_body(message()))
context "wrapped with the payload key" do
it "returns the unwrapped payload as JSON" do
expect subject() |> to(eq(~S|{"wrapped":"here","some":"data"}|))
end
end
context "bare payload" do
let(message: %Message{body: %{"body" => "payload"}})
it "returns the whole payload as JSON" do
expect subject() |> to(eq(~S|{"body":"payload"}|))
end
end
end
end
defp mock_http_call(target) do
case target do
"econnrefused" -> {:error, :econnrefused}
"nxdomain" -> {:error, %HTTPoison.Error{reason: :nxdomain}}
"204" -> {:ok, %HTTPoison.Response{status_code: 204, body: %{}}}
"404" -> {:ok, %HTTPoison.Response{status_code: 404, body: %{}}}
_ -> {:ok, %HTTPoison.Response{status_code: 200, body: "Hello world!"}}
end
end
end
| 32.258706 | 118 | 0.563078 |
03febe62dac1f4deb19c0004c2e46559e719cd82 | 1,588 | ex | Elixir | clients/you_tube_reporting/lib/google_api/you_tube_reporting/v1/model/gdata_download_parameters.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/you_tube_reporting/lib/google_api/you_tube_reporting/v1/model/gdata_download_parameters.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/you_tube_reporting/lib/google_api/you_tube_reporting/v1/model/gdata_download_parameters.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.YouTubeReporting.V1.Model.GdataDownloadParameters do
@moduledoc """
gdata
## Attributes
* `allowGzipCompression` (*type:* `boolean()`, *default:* `nil`) - gdata
* `ignoreRange` (*type:* `boolean()`, *default:* `nil`) - gdata
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:allowGzipCompression => boolean(),
:ignoreRange => boolean()
}
field(:allowGzipCompression)
field(:ignoreRange)
end
defimpl Poison.Decoder, for: GoogleApi.YouTubeReporting.V1.Model.GdataDownloadParameters do
def decode(value, options) do
GoogleApi.YouTubeReporting.V1.Model.GdataDownloadParameters.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.YouTubeReporting.V1.Model.GdataDownloadParameters do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 31.76 | 91 | 0.736146 |
03fee26ab03933c9afb2006bc2c115eaaf84890f | 3,431 | ex | Elixir | clients/you_tube_reporting/lib/google_api/you_tube_reporting/v1/api/media.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | null | null | null | clients/you_tube_reporting/lib/google_api/you_tube_reporting/v1/api/media.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | null | null | null | clients/you_tube_reporting/lib/google_api/you_tube_reporting/v1/api/media.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.YouTubeReporting.V1.Api.Media do
@moduledoc """
API calls for all endpoints tagged `Media`.
"""
alias GoogleApi.YouTubeReporting.V1.Connection
alias GoogleApi.Gax.{Request, Response}
@doc """
Method for media download. Download is supported on the URI `/v1/media/{+name}?alt=media`.
## Parameters
- connection (GoogleApi.YouTubeReporting.V1.Connection): Connection to server
- resource_name (String.t): Name of the media that is being downloaded.
- opts (KeywordList): [optional] Optional parameters
- :alt (String.t): Data format for response.
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :access_token (String.t): OAuth access token.
- :upload_protocol (String.t): Upload protocol for media (e.g. \"raw\", \"multipart\").
- :quotaUser (String.t): Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :uploadType (String.t): Legacy upload protocol for media (e.g. \"media\", \"multipart\").
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :callback (String.t): JSONP
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :$.xgafv (String.t): V1 error format.
## Returns
{:ok, %GoogleApi.YouTubeReporting.V1.Model.GdataMedia{}} on success
{:error, info} on failure
"""
@spec youtubereporting_media_download(Tesla.Env.client(), String.t(), keyword()) ::
{:ok, GoogleApi.YouTubeReporting.V1.Model.GdataMedia.t()} | {:error, Tesla.Env.t()}
def youtubereporting_media_download(connection, resource_name, opts \\ []) do
optional_params = %{
:alt => :query,
:key => :query,
:access_token => :query,
:upload_protocol => :query,
:quotaUser => :query,
:prettyPrint => :query,
:uploadType => :query,
:fields => :query,
:callback => :query,
:oauth_token => :query,
:"$.xgafv" => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1/media/{+resourceName}", %{
"resourceName" => URI.encode_www_form(resource_name)
})
|> Request.add_optional_params(optional_params, opts)
connection
|> Connection.execute(request)
|> Response.decode(struct: %GoogleApi.YouTubeReporting.V1.Model.GdataMedia{})
end
end
| 41.841463 | 179 | 0.693092 |
03ff02c2bc5a12bd7386979d8f91de90d5b89e8d | 188 | ex | Elixir | lib/surface_bulma/dropdown/menu.ex | justin-m-morgan/surface_bulma | c31faebc818c39d06250574b913096504bd6eeec | [
"MIT"
] | null | null | null | lib/surface_bulma/dropdown/menu.ex | justin-m-morgan/surface_bulma | c31faebc818c39d06250574b913096504bd6eeec | [
"MIT"
] | null | null | null | lib/surface_bulma/dropdown/menu.ex | justin-m-morgan/surface_bulma | c31faebc818c39d06250574b913096504bd6eeec | [
"MIT"
] | null | null | null | defmodule SurfaceBulma.Dropdown.Menu do
use Surface.Component
@moduledoc """
Container for the dropdown menu items
"""
def render(assigns) do
~F"""
"""
end
end
| 12.533333 | 39 | 0.638298 |
03ff0f44c8414c63fff18bdae23359b10234b02c | 1,625 | ex | Elixir | lib/quick_polls/web/endpoint.ex | awochna/quick_polls | b599918b795728feda338e6a00f295a97faba08a | [
"MIT"
] | null | null | null | lib/quick_polls/web/endpoint.ex | awochna/quick_polls | b599918b795728feda338e6a00f295a97faba08a | [
"MIT"
] | 6 | 2017-04-10T05:01:04.000Z | 2017-04-21T06:02:03.000Z | lib/quick_polls/web/endpoint.ex | awochna/quick_polls | b599918b795728feda338e6a00f295a97faba08a | [
"MIT"
] | null | null | null | defmodule QuickPolls.Web.Endpoint do
use Phoenix.Endpoint, otp_app: :quick_polls
socket "/socket", QuickPolls.Web.UserSocket
# Serve at "/" the static files from "priv/static" directory.
#
# You should set gzip to true if you are running phoenix.digest
# when deploying your static files in production.
plug Plug.Static,
at: "/", from: :quick_polls, gzip: false,
only: ~w(css fonts images js favicon.ico robots.txt)
# Code reloading can be explicitly enabled under the
# :code_reloader configuration of your endpoint.
if code_reloading? do
socket "/phoenix/live_reload/socket", Phoenix.LiveReloader.Socket
plug Phoenix.LiveReloader
plug Phoenix.CodeReloader
end
plug Plug.RequestId
plug Plug.Logger
plug Plug.Parsers,
parsers: [:urlencoded, :multipart, :json],
pass: ["*/*"],
json_decoder: Poison
plug Plug.MethodOverride
plug Plug.Head
# The session will be stored in the cookie and signed,
# this means its contents can be read but not tampered with.
# Set :encryption_salt if you would also like to encrypt it.
plug Plug.Session,
store: :cookie,
key: "_quick_polls_key",
signing_salt: "RKq20cD1"
plug QuickPolls.Web.Router
@doc """
Dynamically loads configuration from the system environment
on startup.
It receives the endpoint configuration from the config files
and must return the updated configuration.
"""
def load_from_system_env(config) do
port = System.get_env("PORT") || raise "expected the PORT environment variable to be set"
{:ok, Keyword.put(config, :http, [:inet6, port: port])}
end
end
| 29.545455 | 93 | 0.72 |
03ff541e1ac483c31e376e8a038b2e04e9df463a | 7,895 | ex | Elixir | clients/document_ai/lib/google_api/document_ai/v1beta3/model/google_cloud_documentai_v1beta1_document_page.ex | kyleVsteger/elixir-google-api | 3a0dd498af066a4361b5b0fd66ffc04a57539488 | [
"Apache-2.0"
] | null | null | null | clients/document_ai/lib/google_api/document_ai/v1beta3/model/google_cloud_documentai_v1beta1_document_page.ex | kyleVsteger/elixir-google-api | 3a0dd498af066a4361b5b0fd66ffc04a57539488 | [
"Apache-2.0"
] | null | null | null | clients/document_ai/lib/google_api/document_ai/v1beta3/model/google_cloud_documentai_v1beta1_document_page.ex | kyleVsteger/elixir-google-api | 3a0dd498af066a4361b5b0fd66ffc04a57539488 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DocumentAI.V1beta3.Model.GoogleCloudDocumentaiV1beta1DocumentPage do
@moduledoc """
A page in a Document.
## Attributes
* `blocks` (*type:* `list(GoogleApi.DocumentAI.V1beta3.Model.GoogleCloudDocumentaiV1beta1DocumentPageBlock.t)`, *default:* `nil`) - A list of visually detected text blocks on the page. A block has a set of lines (collected into paragraphs) that have a common line-spacing and orientation.
* `detectedLanguages` (*type:* `list(GoogleApi.DocumentAI.V1beta3.Model.GoogleCloudDocumentaiV1beta1DocumentPageDetectedLanguage.t)`, *default:* `nil`) - A list of detected languages together with confidence.
* `dimension` (*type:* `GoogleApi.DocumentAI.V1beta3.Model.GoogleCloudDocumentaiV1beta1DocumentPageDimension.t`, *default:* `nil`) - Physical dimension of the page.
* `formFields` (*type:* `list(GoogleApi.DocumentAI.V1beta3.Model.GoogleCloudDocumentaiV1beta1DocumentPageFormField.t)`, *default:* `nil`) - A list of visually detected form fields on the page.
* `image` (*type:* `GoogleApi.DocumentAI.V1beta3.Model.GoogleCloudDocumentaiV1beta1DocumentPageImage.t`, *default:* `nil`) - Rendered image for this page. This image is preprocessed to remove any skew, rotation, and distortions such that the annotation bounding boxes can be upright and axis-aligned.
* `layout` (*type:* `GoogleApi.DocumentAI.V1beta3.Model.GoogleCloudDocumentaiV1beta1DocumentPageLayout.t`, *default:* `nil`) - Layout for the page.
* `lines` (*type:* `list(GoogleApi.DocumentAI.V1beta3.Model.GoogleCloudDocumentaiV1beta1DocumentPageLine.t)`, *default:* `nil`) - A list of visually detected text lines on the page. A collection of tokens that a human would perceive as a line.
* `pageNumber` (*type:* `integer()`, *default:* `nil`) - 1-based index for current Page in a parent Document. Useful when a page is taken out of a Document for individual processing.
* `paragraphs` (*type:* `list(GoogleApi.DocumentAI.V1beta3.Model.GoogleCloudDocumentaiV1beta1DocumentPageParagraph.t)`, *default:* `nil`) - A list of visually detected text paragraphs on the page. A collection of lines that a human would perceive as a paragraph.
* `tables` (*type:* `list(GoogleApi.DocumentAI.V1beta3.Model.GoogleCloudDocumentaiV1beta1DocumentPageTable.t)`, *default:* `nil`) - A list of visually detected tables on the page.
* `tokens` (*type:* `list(GoogleApi.DocumentAI.V1beta3.Model.GoogleCloudDocumentaiV1beta1DocumentPageToken.t)`, *default:* `nil`) - A list of visually detected tokens on the page.
* `transforms` (*type:* `list(GoogleApi.DocumentAI.V1beta3.Model.GoogleCloudDocumentaiV1beta1DocumentPageMatrix.t)`, *default:* `nil`) - Transformation matrices that were applied to the original document image to produce Page.image.
* `visualElements` (*type:* `list(GoogleApi.DocumentAI.V1beta3.Model.GoogleCloudDocumentaiV1beta1DocumentPageVisualElement.t)`, *default:* `nil`) - A list of detected non-text visual elements e.g. checkbox, signature etc. on the page.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:blocks =>
list(
GoogleApi.DocumentAI.V1beta3.Model.GoogleCloudDocumentaiV1beta1DocumentPageBlock.t()
)
| nil,
:detectedLanguages =>
list(
GoogleApi.DocumentAI.V1beta3.Model.GoogleCloudDocumentaiV1beta1DocumentPageDetectedLanguage.t()
)
| nil,
:dimension =>
GoogleApi.DocumentAI.V1beta3.Model.GoogleCloudDocumentaiV1beta1DocumentPageDimension.t()
| nil,
:formFields =>
list(
GoogleApi.DocumentAI.V1beta3.Model.GoogleCloudDocumentaiV1beta1DocumentPageFormField.t()
)
| nil,
:image =>
GoogleApi.DocumentAI.V1beta3.Model.GoogleCloudDocumentaiV1beta1DocumentPageImage.t()
| nil,
:layout =>
GoogleApi.DocumentAI.V1beta3.Model.GoogleCloudDocumentaiV1beta1DocumentPageLayout.t()
| nil,
:lines =>
list(
GoogleApi.DocumentAI.V1beta3.Model.GoogleCloudDocumentaiV1beta1DocumentPageLine.t()
)
| nil,
:pageNumber => integer() | nil,
:paragraphs =>
list(
GoogleApi.DocumentAI.V1beta3.Model.GoogleCloudDocumentaiV1beta1DocumentPageParagraph.t()
)
| nil,
:tables =>
list(
GoogleApi.DocumentAI.V1beta3.Model.GoogleCloudDocumentaiV1beta1DocumentPageTable.t()
)
| nil,
:tokens =>
list(
GoogleApi.DocumentAI.V1beta3.Model.GoogleCloudDocumentaiV1beta1DocumentPageToken.t()
)
| nil,
:transforms =>
list(
GoogleApi.DocumentAI.V1beta3.Model.GoogleCloudDocumentaiV1beta1DocumentPageMatrix.t()
)
| nil,
:visualElements =>
list(
GoogleApi.DocumentAI.V1beta3.Model.GoogleCloudDocumentaiV1beta1DocumentPageVisualElement.t()
)
| nil
}
field(:blocks,
as: GoogleApi.DocumentAI.V1beta3.Model.GoogleCloudDocumentaiV1beta1DocumentPageBlock,
type: :list
)
field(:detectedLanguages,
as:
GoogleApi.DocumentAI.V1beta3.Model.GoogleCloudDocumentaiV1beta1DocumentPageDetectedLanguage,
type: :list
)
field(:dimension,
as: GoogleApi.DocumentAI.V1beta3.Model.GoogleCloudDocumentaiV1beta1DocumentPageDimension
)
field(:formFields,
as: GoogleApi.DocumentAI.V1beta3.Model.GoogleCloudDocumentaiV1beta1DocumentPageFormField,
type: :list
)
field(:image,
as: GoogleApi.DocumentAI.V1beta3.Model.GoogleCloudDocumentaiV1beta1DocumentPageImage
)
field(:layout,
as: GoogleApi.DocumentAI.V1beta3.Model.GoogleCloudDocumentaiV1beta1DocumentPageLayout
)
field(:lines,
as: GoogleApi.DocumentAI.V1beta3.Model.GoogleCloudDocumentaiV1beta1DocumentPageLine,
type: :list
)
field(:pageNumber)
field(:paragraphs,
as: GoogleApi.DocumentAI.V1beta3.Model.GoogleCloudDocumentaiV1beta1DocumentPageParagraph,
type: :list
)
field(:tables,
as: GoogleApi.DocumentAI.V1beta3.Model.GoogleCloudDocumentaiV1beta1DocumentPageTable,
type: :list
)
field(:tokens,
as: GoogleApi.DocumentAI.V1beta3.Model.GoogleCloudDocumentaiV1beta1DocumentPageToken,
type: :list
)
field(:transforms,
as: GoogleApi.DocumentAI.V1beta3.Model.GoogleCloudDocumentaiV1beta1DocumentPageMatrix,
type: :list
)
field(:visualElements,
as: GoogleApi.DocumentAI.V1beta3.Model.GoogleCloudDocumentaiV1beta1DocumentPageVisualElement,
type: :list
)
end
defimpl Poison.Decoder,
for: GoogleApi.DocumentAI.V1beta3.Model.GoogleCloudDocumentaiV1beta1DocumentPage do
def decode(value, options) do
GoogleApi.DocumentAI.V1beta3.Model.GoogleCloudDocumentaiV1beta1DocumentPage.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.DocumentAI.V1beta3.Model.GoogleCloudDocumentaiV1beta1DocumentPage do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 44.857955 | 304 | 0.713996 |
03ff742344ece2179585cec5b56f49b52c80c8a7 | 539 | ex | Elixir | lib/nomad_client/model/join_response.ex | mrmstn/nomad_client | a586022e5eb4d166acba08b55b198ec079d4b118 | [
"Apache-2.0"
] | 8 | 2021-09-04T21:22:53.000Z | 2022-02-22T22:48:38.000Z | lib/nomad_client/model/join_response.ex | mrmstn/nomad_client | a586022e5eb4d166acba08b55b198ec079d4b118 | [
"Apache-2.0"
] | null | null | null | lib/nomad_client/model/join_response.ex | mrmstn/nomad_client | a586022e5eb4d166acba08b55b198ec079d4b118 | [
"Apache-2.0"
] | null | null | null | # NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
# https://openapi-generator.tech
# Do not edit the class manually.
defmodule NomadClient.Model.JoinResponse do
@moduledoc """
"""
@derive [Poison.Encoder]
defstruct [
:error,
:num_joined
]
@type t :: %__MODULE__{
:error => String.t() | nil,
:num_joined => integer() | nil
}
end
defimpl Poison.Decoder, for: NomadClient.Model.JoinResponse do
def decode(value, _options) do
value
end
end
| 19.962963 | 91 | 0.654917 |
03ffe35a0f793cf15ead63b9cb98594617d831ee | 22,118 | ex | Elixir | clients/dfa_reporting/lib/google_api/dfa_reporting/v35/api/creative_field_values.ex | renovate-bot/elixir-google-api | 1da34cd39b670c99f067011e05ab90af93fef1f6 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/dfa_reporting/lib/google_api/dfa_reporting/v35/api/creative_field_values.ex | swansoffiee/elixir-google-api | 9ea6d39f273fb430634788c258b3189d3613dde0 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/dfa_reporting/lib/google_api/dfa_reporting/v35/api/creative_field_values.ex | dazuma/elixir-google-api | 6a9897168008efe07a6081d2326735fe332e522c | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DFAReporting.V35.Api.CreativeFieldValues do
@moduledoc """
API calls for all endpoints tagged `CreativeFieldValues`.
"""
alias GoogleApi.DFAReporting.V35.Connection
alias GoogleApi.Gax.{Request, Response}
@library_version Mix.Project.config() |> Keyword.get(:version, "")
@doc """
Deletes an existing creative field value.
## Parameters
* `connection` (*type:* `GoogleApi.DFAReporting.V35.Connection.t`) - Connection to server
* `profile_id` (*type:* `String.t`) - User profile ID associated with this request.
* `creative_field_id` (*type:* `String.t`) - Creative field ID for this creative field value.
* `id` (*type:* `String.t`) - Creative Field Value ID
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %{}}` on success
* `{:error, info}` on failure
"""
@spec dfareporting_creative_field_values_delete(
Tesla.Env.client(),
String.t(),
String.t(),
String.t(),
keyword(),
keyword()
) :: {:ok, nil} | {:ok, Tesla.Env.t()} | {:ok, list()} | {:error, any()}
def dfareporting_creative_field_values_delete(
connection,
profile_id,
creative_field_id,
id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query
}
request =
Request.new()
|> Request.method(:delete)
|> Request.url(
"/dfareporting/v3.5/userprofiles/{profileId}/creativeFields/{creativeFieldId}/creativeFieldValues/{id}",
%{
"profileId" => URI.encode(profile_id, &URI.char_unreserved?/1),
"creativeFieldId" => URI.encode(creative_field_id, &URI.char_unreserved?/1),
"id" => URI.encode(id, &(URI.char_unreserved?(&1) || &1 == ?/))
}
)
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [decode: false])
end
@doc """
Gets one creative field value by ID.
## Parameters
* `connection` (*type:* `GoogleApi.DFAReporting.V35.Connection.t`) - Connection to server
* `profile_id` (*type:* `String.t`) - User profile ID associated with this request.
* `creative_field_id` (*type:* `String.t`) - Creative field ID for this creative field value.
* `id` (*type:* `String.t`) - Creative Field Value ID
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.DFAReporting.V35.Model.CreativeFieldValue{}}` on success
* `{:error, info}` on failure
"""
@spec dfareporting_creative_field_values_get(
Tesla.Env.client(),
String.t(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.DFAReporting.V35.Model.CreativeFieldValue.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def dfareporting_creative_field_values_get(
connection,
profile_id,
creative_field_id,
id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url(
"/dfareporting/v3.5/userprofiles/{profileId}/creativeFields/{creativeFieldId}/creativeFieldValues/{id}",
%{
"profileId" => URI.encode(profile_id, &URI.char_unreserved?/1),
"creativeFieldId" => URI.encode(creative_field_id, &URI.char_unreserved?/1),
"id" => URI.encode(id, &(URI.char_unreserved?(&1) || &1 == ?/))
}
)
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.DFAReporting.V35.Model.CreativeFieldValue{}])
end
@doc """
Inserts a new creative field value.
## Parameters
* `connection` (*type:* `GoogleApi.DFAReporting.V35.Connection.t`) - Connection to server
* `profile_id` (*type:* `String.t`) - User profile ID associated with this request.
* `creative_field_id` (*type:* `String.t`) - Creative field ID for this creative field value.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:body` (*type:* `GoogleApi.DFAReporting.V35.Model.CreativeFieldValue.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.DFAReporting.V35.Model.CreativeFieldValue{}}` on success
* `{:error, info}` on failure
"""
@spec dfareporting_creative_field_values_insert(
Tesla.Env.client(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.DFAReporting.V35.Model.CreativeFieldValue.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def dfareporting_creative_field_values_insert(
connection,
profile_id,
creative_field_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url(
"/dfareporting/v3.5/userprofiles/{profileId}/creativeFields/{creativeFieldId}/creativeFieldValues",
%{
"profileId" => URI.encode(profile_id, &URI.char_unreserved?/1),
"creativeFieldId" => URI.encode(creative_field_id, &URI.char_unreserved?/1)
}
)
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.DFAReporting.V35.Model.CreativeFieldValue{}])
end
@doc """
Retrieves a list of creative field values, possibly filtered. This method supports paging.
## Parameters
* `connection` (*type:* `GoogleApi.DFAReporting.V35.Connection.t`) - Connection to server
* `profile_id` (*type:* `String.t`) - User profile ID associated with this request.
* `creative_field_id` (*type:* `String.t`) - Creative field ID for this creative field value.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:ids` (*type:* `list(String.t)`) - Select only creative field values with these IDs.
* `:maxResults` (*type:* `integer()`) - Maximum number of results to return.
* `:pageToken` (*type:* `String.t`) - Value of the nextPageToken from the previous result page.
* `:searchString` (*type:* `String.t`) - Allows searching for creative field values by their values. Wildcards (e.g. *) are not allowed.
* `:sortField` (*type:* `String.t`) - Field by which to sort the list.
* `:sortOrder` (*type:* `String.t`) - Order of sorted results.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.DFAReporting.V35.Model.CreativeFieldValuesListResponse{}}` on success
* `{:error, info}` on failure
"""
@spec dfareporting_creative_field_values_list(
Tesla.Env.client(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.DFAReporting.V35.Model.CreativeFieldValuesListResponse.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def dfareporting_creative_field_values_list(
connection,
profile_id,
creative_field_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:ids => :query,
:maxResults => :query,
:pageToken => :query,
:searchString => :query,
:sortField => :query,
:sortOrder => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url(
"/dfareporting/v3.5/userprofiles/{profileId}/creativeFields/{creativeFieldId}/creativeFieldValues",
%{
"profileId" => URI.encode(profile_id, &URI.char_unreserved?/1),
"creativeFieldId" => URI.encode(creative_field_id, &URI.char_unreserved?/1)
}
)
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.DFAReporting.V35.Model.CreativeFieldValuesListResponse{}]
)
end
@doc """
Updates an existing creative field value. This method supports patch semantics.
## Parameters
* `connection` (*type:* `GoogleApi.DFAReporting.V35.Connection.t`) - Connection to server
* `profile_id` (*type:* `String.t`) - User profile ID associated with this request.
* `creative_field_id` (*type:* `String.t`) - CreativeField ID.
* `id` (*type:* `String.t`) - CreativeFieldValue ID.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:body` (*type:* `GoogleApi.DFAReporting.V35.Model.CreativeFieldValue.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.DFAReporting.V35.Model.CreativeFieldValue{}}` on success
* `{:error, info}` on failure
"""
@spec dfareporting_creative_field_values_patch(
Tesla.Env.client(),
String.t(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.DFAReporting.V35.Model.CreativeFieldValue.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def dfareporting_creative_field_values_patch(
connection,
profile_id,
creative_field_id,
id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:patch)
|> Request.url(
"/dfareporting/v3.5/userprofiles/{profileId}/creativeFields/{creativeFieldId}/creativeFieldValues",
%{
"profileId" => URI.encode(profile_id, &URI.char_unreserved?/1),
"creativeFieldId" => URI.encode(creative_field_id, &URI.char_unreserved?/1)
}
)
|> Request.add_param(:query, :id, id)
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.DFAReporting.V35.Model.CreativeFieldValue{}])
end
@doc """
Updates an existing creative field value.
## Parameters
* `connection` (*type:* `GoogleApi.DFAReporting.V35.Connection.t`) - Connection to server
* `profile_id` (*type:* `String.t`) - User profile ID associated with this request.
* `creative_field_id` (*type:* `String.t`) - Creative field ID for this creative field value.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:body` (*type:* `GoogleApi.DFAReporting.V35.Model.CreativeFieldValue.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.DFAReporting.V35.Model.CreativeFieldValue{}}` on success
* `{:error, info}` on failure
"""
@spec dfareporting_creative_field_values_update(
Tesla.Env.client(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.DFAReporting.V35.Model.CreativeFieldValue.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def dfareporting_creative_field_values_update(
connection,
profile_id,
creative_field_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:put)
|> Request.url(
"/dfareporting/v3.5/userprofiles/{profileId}/creativeFields/{creativeFieldId}/creativeFieldValues",
%{
"profileId" => URI.encode(profile_id, &URI.char_unreserved?/1),
"creativeFieldId" => URI.encode(creative_field_id, &URI.char_unreserved?/1)
}
)
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.DFAReporting.V35.Model.CreativeFieldValue{}])
end
end
| 42.698842 | 196 | 0.609458 |
03ffe5d71ffa82f6904a7839f9789315a6ebea00 | 19,637 | ex | Elixir | deps/earmark/lib/earmark/block.ex | ench0/ex_identicon | b37e86984cb372d8981b43190b0052e35718f173 | [
"MIT"
] | null | null | null | deps/earmark/lib/earmark/block.ex | ench0/ex_identicon | b37e86984cb372d8981b43190b0052e35718f173 | [
"MIT"
] | null | null | null | deps/earmark/lib/earmark/block.ex | ench0/ex_identicon | b37e86984cb372d8981b43190b0052e35718f173 | [
"MIT"
] | null | null | null | defmodule Earmark.Block do
use Earmark.Types
import Earmark.Helpers, only: [emit_error: 4]
import Earmark.Helpers.LookaheadHelpers, only: [opens_inline_code: 1, still_inline_code: 2, read_list_lines: 2]
import Earmark.Helpers.LineHelpers
import Earmark.Helpers.AttrParser
@moduledoc """
Given a list of _parsed blocks, convert them into blocks.
That list of blocks is the final representation of the
document (in internal form).
"""
alias Earmark.Line
alias Earmark.Parser
defmodule Heading, do: defstruct attrs: nil, content: nil, level: nil
defmodule Ruler, do: defstruct attrs: nil, type: nil
defmodule BlockQuote, do: defstruct attrs: nil, blocks: []
defmodule List, do: defstruct attrs: nil, type: :ul, blocks: []
defmodule ListItem, do: defstruct attrs: nil, type: :ul, spaced: true, blocks: []
defmodule Para, do: defstruct attrs: nil, lines: []
defmodule Code, do: defstruct attrs: nil, lines: [], language: nil
defmodule Html, do: defstruct attrs: nil, html: [], tag: nil
defmodule HtmlOther, do: defstruct attrs: nil, html: []
defmodule IdDef, do: defstruct attrs: nil, id: nil, url: nil, title: nil
defmodule FnDef, do: defstruct attrs: nil, id: nil, number: nil, blocks: []
defmodule FnList, do: defstruct attrs: ".footnotes", blocks: []
defmodule Ial, do: defstruct attrs: nil, content: nil
defmodule Table do
defstruct attrs: nil, rows: [], header: nil, alignments: []
def new_for_columns(n) do
%__MODULE__{alignments: Elixir.List.duplicate(:left, n)}
end
end
@type t :: %Heading{} | %Ruler{} | %BlockQuote{} | %List{} | %ListItem{} | %Para{} | %Code{} | %Html{} | %HtmlOther{} | %IdDef{} | %FnDef{} | %FnList{} | %Ial{} | %Table{}
@type ts :: list(t)
@doc false
# Given a list of `Line.xxx` structs, group them into related blocks.
# Then extract any id definitions, and build a hashdict from them. Not
# for external consumption.
@spec parse( Line.ts, String.t ) :: {ts, %{}}
def parse(lines, filename) do
lines = remove_trailing_blank_lines( lines )
blocks = lines_to_blocks(lines,filename)
links = links_from_blocks(blocks)
{ blocks, links }
end
@doc false
# Public to allow easier testing
def lines_to_blocks(lines,filename) do
lines
|> _parse([], filename)
|> assign_attributes_to_blocks([])
|> consolidate_list_items([])
end
@spec _parse(Line.ts, ts, String.t) :: ts
defp _parse([], result, _filename), do: result
###################
# setext headings #
###################
defp _parse([ %Line.Blank{},
%Line.Text{content: heading},
%Line.SetextUnderlineHeading{level: level}
|
rest
], result, filename) do
_parse(rest, [ %Heading{content: heading, level: level} | result ], filename)
end
defp _parse([ %Line.Blank{},
%Line.Text{content: heading},
%Line.Ruler{type: "-"}
|
rest
], result, filename) do
_parse(rest, [ %Heading{content: heading, level: 2} | result ], filename)
end
#################
# Other heading #
#################
defp _parse([ %Line.Heading{content: content, level: level} | rest ], result, filename) do
_parse(rest, [ %Heading{content: content, level: level} | result ], filename)
end
#########
# Ruler #
#########
defp _parse([ %Line.Ruler{type: type} | rest], result, filename) do
_parse(rest, [ %Ruler{type: type} | result ], filename)
end
###############
# Block Quote #
###############
defp _parse( lines = [ %Line.BlockQuote{} | _ ], result, filename) do
{quote_lines, rest} = Enum.split_while(lines, &blockquote_or_text?/1)
lines = for line <- quote_lines, do: line.content
{blocks, _} = Parser.parse(lines, %Earmark.Options{}, true)
_parse(rest, [ %BlockQuote{blocks: blocks} | result ], filename)
end
#########
# Table #
#########
defp _parse( lines = [ %Line.TableLine{columns: cols1},
%Line.TableLine{columns: cols2}
| _rest
], result, filename)
when length(cols1) == length(cols2)
do
columns = length(cols1)
{ table, rest } = read_table(lines, columns, Table.new_for_columns(columns))
_parse(rest, [ table | result ], filename)
end
#############
# Paragraph #
#############
defp _parse( lines = [ %Line.TableLine{} | _ ], result, filename) do
{para_lines, rest} = Enum.split_while(lines, &text?/1)
line_text = (for line <- para_lines, do: line.line)
_parse(rest, [ %Para{lines: line_text} | result ], filename)
end
defp _parse( lines = [ %Line.Text{} | _ ], result, filename)
do
{reversed_para_lines, rest, pending} = consolidate_para(lines)
case pending do
{nil, _} -> true
{pending, lnb} ->
emit_error filename, lnb, :warning, "Closing unclosed backquotes #{pending} at end of input"
end
line_text = (for line <- (reversed_para_lines |> Enum.reverse), do: line.line)
_parse(rest, [ %Para{lines: line_text} | result ], filename)
end
#########
# Lists #
#########
# We handle lists in two passes. In the first, we build list items,
# in the second we combine adjacent items into lists. This is pass one
defp _parse( [first = %Line.ListItem{type: type} | rest ], result, filename) do
{spaced, list_lines, rest, offset} =
case read_list_lines(rest, opens_inline_code(first)) do
{s, ll, r, {_btx, lnb}} ->
# emit_error filename, lnb, :warning, "Closing unclosed backquotes #{pending_btx} at end of input"
{s, ll, r, lnb}
{s, ll, r} -> {s, ll, r, 0}
end
spaced = (spaced || blank_line_in?(list_lines)) && peek(rest, Line.ListItem, type)
lines = for line <- [first | list_lines], do: properly_indent(line, 1)
{blocks, _} = Parser.parse(lines, %Earmark.Options{file: filename, line: offset}, true)
_parse(rest, [ %ListItem{type: type, blocks: blocks, spaced: spaced} | result ], filename)
end
#################
# Indented code #
#################
defp _parse( list = [%Line.Indent{} | _], result, filename) do
{code_lines, rest} = Enum.split_while(list, &indent_or_blank?/1)
code_lines = remove_trailing_blank_lines(code_lines)
code = (for line <- code_lines, do: properly_indent(line, 1))
_parse(rest, [ %Code{lines: code} | result ], filename)
end
###############
# Fenced code #
###############
defp _parse([%Line.Fence{delimiter: delimiter, language: language} | rest], result, filename) do
{code_lines, rest} = Enum.split_while(rest, fn (line) ->
!match?(%Line.Fence{delimiter: ^delimiter, language: _}, line)
end)
rest = if length(rest) == 0, do: rest, else: tl(rest)
code = (for line <- code_lines, do: line.line)
_parse(rest, [ %Code{lines: code, language: language} | result ], filename)
end
##############
# HTML block #
##############
defp _parse([ opener = %Line.HtmlOpenTag{tag: tag} | rest], result, filename) do
{html_lines, rest, unclosed} = html_match_to_closing(opener, rest)
unclosed
|> Enum.reverse()
|> Enum.each( fn %{lnb: lnb, tag: tag} ->
emit_error filename, lnb, :warning, "Failed to find closing <#{tag}>"
end)
html = (for line <- Enum.reverse(html_lines), do: line.line)
_parse(rest, [ %Html{tag: tag, html: html} | result ], filename)
end
####################
# HTML on one line #
####################
defp _parse([ %Line.HtmlOneLine{line: line} | rest], result, filename) do
_parse(rest, [ %HtmlOther{html: [ line ]} | result ], filename)
end
################
# HTML Comment #
################
defp _parse([ line = %Line.HtmlComment{complete: true} | rest], result, filename) do
_parse(rest, [ %HtmlOther{html: [ line.line ]} | result ], filename)
end
defp _parse(lines = [ %Line.HtmlComment{complete: false} | _], result, filename) do
{html_lines, rest} = Enum.split_while(lines, fn (line) ->
!(line.line =~ ~r/-->/)
end)
{html_lines, rest} = if length(rest) == 0 do
{html_lines, rest}
else
{html_lines ++ [ hd(rest) ], tl(rest)}
end
html = (for line <- html_lines, do: line.line)
_parse(rest, [ %HtmlOther{html: html} | result ], filename)
end
#################
# ID definition #
#################
# the title may be on the line following the iddef
defp _parse( [ defn = %Line.IdDef{title: title}, maybe_title | rest ], result, filename)
when title == nil
do
title = case maybe_title do
%Line.Text{content: content} -> Line.matches_id_title(content)
_ -> nil
end
if title do
_parse(rest, [ %IdDef{id: defn.id, url: defn.url, title: title} | result], filename)
else
_parse([maybe_title | rest], [ %IdDef{id: defn.id, url: defn.url} | result], filename)
end
end
# or not
defp _parse( [ defn = %Line.IdDef{} | rest ], result, filename) do
_parse(rest, [ %IdDef{id: defn.id, url: defn.url, title: defn.title} | result], filename)
end
#######################
# Footnote Definition #
#######################
defp _parse( [ defn = %Line.FnDef{id: _id} | rest ], result , filename) do
{para_lines, rest} = Enum.split_while(rest, &text?/1)
first_line = %Line.Text{line: defn.content}
para = _parse([ first_line | para_lines ], [], filename)
{indent_lines, rest} = Enum.split_while(rest, &indent_or_blank?/1)
{blocks, _ } = remove_trailing_blank_lines(indent_lines)
|> Enum.map(&(properly_indent(&1, 1)))
|> Parser.parse(%Earmark.Options{}, true)
blocks = Enum.concat(para, blocks)
_parse( rest, [ %FnDef{id: defn.id, blocks: blocks } | result ] , filename)
end
####################
# IAL (attributes) #
####################
defp _parse( [ %Line.Ial{attrs: attrs, lnb: lnb} | rest ], result, filename) do
{attributes, errors} = parse_attrs( attrs )
unless Enum.empty?( errors ), do:
emit_error(filename, lnb, :warning, "Illegal attributes #{inspect errors} ignored in IAL")
_parse(rest, [ %Ial{attrs: attributes, content: attrs} | result ], filename)
end
###############
# Blank Lines #
###############
# We've reached the point where empty lines are no longer significant
defp _parse( [ %Line.Blank{} | rest ], result, filename) do
_parse(rest, result, filename)
end
##############################################################
# Anything else... we warn, then treat it as if it were text #
##############################################################
defp _parse( [ anything | rest ], result, filename) do
emit_error filename, anything, :warning, "Unexpected line #{anything.line}"
_parse( [ %Line.Text{content: anything.line} | rest], result, filename)
end
#######################################################
# Assign attributes that follow a block to that block #
#######################################################
@spec assign_attributes_to_blocks( ts, ts ) :: ts
def assign_attributes_to_blocks([], result), do: Enum.reverse(result)
def assign_attributes_to_blocks([ %Ial{attrs: attrs}, block | rest], result) do
assign_attributes_to_blocks(rest, [ %{block | attrs: attrs} | result ])
end
def assign_attributes_to_blocks([ block | rest], result) do
assign_attributes_to_blocks(rest, [ block | result ])
end
############################################################
# Consolidate multiline inline code blocks into an element #
############################################################
@not_pending {nil, 0}
# ([#{},...]) -> {[#{}],[#{}],{'nil' | binary(),number()}}
# @spec consolidate_para( ts ) :: { ts, ts, {nil | String.t, number} }
defp consolidate_para( lines ), do: _consolidate_para( lines, [], @not_pending )
@spec _consolidate_para( ts, ts, inline_code_continuation ) :: { ts, ts, inline_code_continuation }
defp _consolidate_para( [], result, pending ) do
{result, [], pending}
end
defp _consolidate_para( [line | rest] = lines, result, pending ) do
case inline_or_text?( line, pending ) do
%{pending: still_pending, continue: true} -> _consolidate_para( rest, [line | result], still_pending )
_ -> {result, lines, @not_pending}
end
end
##################################################
# Consolidate one or more list items into a list #
##################################################
@spec consolidate_list_items( ts, ts ) :: ts
defp consolidate_list_items([], result) do
result |> Enum.map(&compute_list_spacing/1) # no need to reverse
end
# We have a list, and the next element is an item of the same type
defp consolidate_list_items(
[list = %List{type: type, blocks: items},
item = %ListItem{type: type} | rest], result)
do
items = [ item | items ] # original list is reversed
consolidate_list_items([ %{ list | blocks: items } | rest ], result)
end
# We have an item, but no open list
defp consolidate_list_items([ item = %ListItem{type: type} | rest], result) do
consolidate_list_items([ %List{ type: type, blocks: [ item ] } | rest ], result)
end
# Nothing to see here, move on
defp consolidate_list_items([ head | rest ], result) do
consolidate_list_items(rest, [ head | result ])
end
defp compute_list_spacing( list = %List{blocks: items} ) do
with spaced = any_spaced_items?(items),
unified_items = Enum.map(items, &(%{&1 | spaced: spaced}))
do
%{list | blocks: unified_items}
end
end
defp compute_list_spacing( anything_else ), do: anything_else # nop
defp any_spaced_items?([]), do: false
defp any_spaced_items?([%{spaced: true}|_]), do: true
defp any_spaced_items?([_|tail]), do: any_spaced_items?(tail)
##################################################
# Read in a table (consecutive TableLines with
# the same number of columns)
@spec read_table( ts, number, %Table{} ) :: { %Table{}, ts }
defp read_table([ %Line.TableLine{columns: cols} | rest ],
col_count,
table = %Table{})
when length(cols) == col_count
do
read_table(rest, col_count, update_in(table.rows, &[ cols | &1 ]))
end
defp read_table( rest, col_count, %Table{rows: rows}) do
rows = Enum.reverse(rows)
table = Table.new_for_columns(col_count)
table = case look_for_alignments(rows) do
nil -> %Table{table | rows: rows }
aligns -> %Table{table | alignments: aligns,
header: hd(rows),
rows: tl(tl(rows)) }
end
{ table , rest }
end
@spec look_for_alignments( [String.t] ) :: atom
defp look_for_alignments([ _first, second | _rest ]) do
if Enum.all?(second, fn row -> row =~ ~r{^:?-+:?$} end) do
second
|> Enum.map(fn row -> Regex.replace(~r/-+/, row, "-") end)
|> Enum.map(fn row -> case row do
":-:" -> :center
":-" -> :left
"-" -> :left
"-:" -> :right
end
end)
else
nil
end
end
#####################################################
# Traverse the block list and build a list of links #
#####################################################
defp links_from_blocks(blocks) do
visit(blocks, Map.new, &link_extractor/2)
end
@spec link_extractor(t, %{}) :: %{}
defp link_extractor(item = %IdDef{id: id}, result) do
Map.put(result, String.downcase(id), item)
end
defp link_extractor(_, result), do: result
##################################
# Visitor pattern for each block #
##################################
@spec visit(ts, %{}, (t, %{} -> %{})) :: %{}
defp visit([], result, _func), do: result
# Structural node BlockQuote -> descend
defp visit([ item = %BlockQuote{blocks: blocks} | rest], result, func) do
result = func.(item, result)
result = visit(blocks, result, func)
visit(rest, result, func)
end
# Structural node List -> descend
defp visit([ item = %List{blocks: blocks} | rest], result, func) do
result = func.(item, result)
result = visit(blocks, result, func)
visit(rest, result, func)
end
# Structural node ListItem -> descend
defp visit([ item = %ListItem{blocks: blocks} | rest], result, func) do
result = func.(item, result)
result = visit(blocks, result, func)
visit(rest, result, func)
end
# Leaf, leaf it alone
defp visit([ item | rest], result, func) do
result = func.(item, result)
visit(rest, result, func)
end
###################################################################
# Consume HTML, taking care of nesting. Assumes one tag per line. #
###################################################################
defp html_match_to_closing(opener, rest), do: find_closing_tags([opener], rest, [opener])
# No more open tags, happy case
defp find_closing_tags([], rest, html_lines), do: {html_lines, rest, []}
# run out of input, unhappy case
defp find_closing_tags(needed, [], html_lines), do: {html_lines, [], needed}
# still more lines, still needed closing
defp find_closing_tags(needed = [needed_hd|needed_tl], [rest_hd|rest_tl], html_lines) do
cond do
closes_tag?(rest_hd, needed_hd) -> find_closing_tags(needed_tl, rest_tl, [rest_hd|html_lines])
opens_tag?(rest_hd) -> find_closing_tags([rest_hd|needed], rest_tl, [rest_hd|html_lines])
true -> find_closing_tags(needed, rest_tl, [rest_hd|html_lines])
end
end
###########
# Helpers #
###########
defp closes_tag?(%Line.HtmlCloseTag{tag: ctag}, %Line.HtmlOpenTag{tag: otag}), do: ctag == otag
defp closes_tag?(_, _), do: false
defp opens_tag?(%Line.HtmlOpenTag{}), do: true
defp opens_tag?(_), do: false
# (_,{'nil' | binary(),number()}) -> #{}jj
@spec inline_or_text?( Line.t, inline_code_continuation ) :: %{pending: String.t, continue: boolean}
defp inline_or_text?(line, pending)
defp inline_or_text?(line = %Line.Text{}, @not_pending) do
pending = opens_inline_code(line)
%{pending: pending, continue: true}
end
defp inline_or_text?(line = %Line.TableLine{}, @not_pending) do
pending = opens_inline_code(line)
%{pending: pending, continue: true}
end
defp inline_or_text?( _line, @not_pending), do: %{pending: @not_pending, continue: false}
defp inline_or_text?( line, pending ) do
pending = still_inline_code(line, pending)
%{pending: pending, continue: true}
end
defp peek([], _, _), do: false
defp peek([head | _], struct, type) do
head.__struct__ == struct && head.type == type
end
# In case we are inside a code block we return the verbatim text
defp properly_indent(%{inside_code: true, line: line}, _level) do
line
end
# Add additional spaces for any indentation past level 1
defp properly_indent(%Line.Indent{level: level, content: content}, target_level)
when level == target_level do
content
end
defp properly_indent(%Line.Indent{level: level, content: content}, target_level)
when level > target_level do
String.duplicate(" ", level-target_level) <> content
end
defp properly_indent(line, _) do
line.content
end
defp remove_trailing_blank_lines(lines) do
lines
|> Enum.reverse
|> Enum.drop_while(&blank?/1)
|> Enum.reverse
end
end
| 34.33042 | 173 | 0.587157 |
ff000e8b3e99c85815c33f677b0afa8e357e52db | 588 | exs | Elixir | test/views/error_view_test.exs | mapmeld/superfund-me | 8bd1aeb78504e6ae068cf57dbefca05bebbb2b07 | [
"MIT"
] | null | null | null | test/views/error_view_test.exs | mapmeld/superfund-me | 8bd1aeb78504e6ae068cf57dbefca05bebbb2b07 | [
"MIT"
] | 3 | 2016-09-09T21:09:17.000Z | 2017-09-13T17:55:20.000Z | test/views/error_view_test.exs | Georeactor/superfund-me | 8bd1aeb78504e6ae068cf57dbefca05bebbb2b07 | [
"MIT"
] | null | null | null | defmodule Superfundme.ErrorViewTest do
use Superfundme.ConnCase, async: true
# Bring render/3 and render_to_string/3 for testing custom views
import Phoenix.View
test "renders 404.html" do
assert render_to_string(Superfundme.ErrorView, "404.html", []) ==
"Page not found"
end
test "render 500.html" do
assert render_to_string(Superfundme.ErrorView, "500.html", []) ==
"Server internal error"
end
test "render any other" do
assert render_to_string(Superfundme.ErrorView, "505.html", []) ==
"Server internal error"
end
end
| 26.727273 | 69 | 0.687075 |
ff001df53734e9d17916e4b44ae5f028934bbb59 | 24,970 | ex | Elixir | lib/mix/lib/mix/release.ex | timsly/elixir | a256c5578e015c0a78e801dfe536764aa0ba87c6 | [
"Apache-2.0"
] | null | null | null | lib/mix/lib/mix/release.ex | timsly/elixir | a256c5578e015c0a78e801dfe536764aa0ba87c6 | [
"Apache-2.0"
] | null | null | null | lib/mix/lib/mix/release.ex | timsly/elixir | a256c5578e015c0a78e801dfe536764aa0ba87c6 | [
"Apache-2.0"
] | null | null | null | defmodule Mix.Release do
@moduledoc """
Defines the release structure and convenience for assembling releases.
"""
@doc """
The Mix.Release struct has the following read-only fields:
* `:name` - the name of the release as an atom
* `:version` - the version of the release as a string or
`{:from_app, app_name}`
* `:path` - the path to the release root
* `:version_path` - the path to the release version inside the release
* `:applications` - a map of application with their definitions
* `:erts_source` - the erts source as a charlist (or nil)
* `:erts_version` - the erts version as a charlist
The following fields may be modified as long as they keep their defined types:
* `:boot_scripts` - a map of boot scripts with the boot script name
as key and a keyword list with **all** applications that are part of
it and their modes as value
* `:config_providers` - a list of `{config_provider, term}` tuples where the
first element is a module that implements the `Config.Provider` behaviour
and `term` is the value given to it on `c:Config.Provider.init/1`
* `:options` - a keyword list with all other user supplied release options
* `:overlays` - a list of extra files added to the release. If you have a custom
step adding extra files to a release, you can add these files to the `:overlays`
field so they are also considered on further commands, such as tar/zip. Each entry
in overlays is the relative path to the release root of each file
* `:steps` - a list of functions that receive the release and returns a release.
Must also contain the atom `:assemble` which is the internal assembling step.
May also contain the atom `:tar` to create a tarball of the release.
"""
defstruct [
:name,
:version,
:path,
:version_path,
:applications,
:boot_scripts,
:erts_source,
:erts_version,
:config_providers,
:options,
:overlays,
:steps
]
@type mode :: :permanent | :transient | :temporary | :load | :none
@type application :: atom()
@type t :: %{
name: atom(),
version: String.t(),
path: String.t(),
version_path: String.t() | {:from_app, application()},
applications: %{application() => keyword()},
boot_scripts: %{atom() => [{application(), mode()}]},
erts_version: charlist(),
erts_source: charlist() | nil,
config_providers: [{module, term}],
options: keyword(),
steps: [(t -> t) | :assemble, ...]
}
@default_apps [kernel: :permanent, stdlib: :permanent, elixir: :permanent, sasl: :permanent]
@safe_modes [:permanent, :temporary, :transient]
@unsafe_modes [:load, :none]
@significant_chunks ~w(Atom AtU8 Attr Code StrT ImpT ExpT FunT LitT Line)c
@copy_app_dirs ["priv"]
@doc false
@spec from_config!(atom, keyword, keyword) :: t
def from_config!(name, config, overrides) do
{name, apps, opts} = find_release(name, config)
unless Atom.to_string(name) =~ ~r/^[a-z][a-z0-9_]*$/ do
Mix.raise(
"Invalid release name. A release name must start with a lowercase ASCII letter, " <>
"followed by lowercase ASCII letters, numbers, or underscores, got: #{inspect(name)}"
)
end
opts =
[overwrite: false, quiet: false, strip_beams: true]
|> Keyword.merge(opts)
|> Keyword.merge(overrides)
{include_erts, opts} = Keyword.pop(opts, :include_erts, true)
{erts_source, erts_lib_dir, erts_version} = erts_data(include_erts)
loaded_apps = apps |> Keyword.keys() |> load_apps(%{}, erts_lib_dir, :maybe)
# Make sure IEx is either an active part of the release or add it as none.
{loaded_apps, apps} =
if Map.has_key?(loaded_apps, :iex) do
{loaded_apps, apps}
else
{load_apps([:iex], loaded_apps, erts_lib_dir, :maybe), apps ++ [iex: :none]}
end
start_boot = build_start_boot(loaded_apps, apps)
start_clean_boot = build_start_clean_boot(start_boot)
{path, opts} =
Keyword.pop_lazy(opts, :path, fn ->
Path.join([Mix.Project.build_path(config), "rel", Atom.to_string(name)])
end)
path = Path.absname(path)
{version, opts} =
Keyword.pop_lazy(opts, :version, fn ->
config[:version] ||
Mix.raise(
"No :version found. Please make sure a :version is set in your project definition " <>
"or inside the release the configuration"
)
end)
version =
case version do
{:from_app, app} ->
Application.load(app)
version = Application.spec(app, :vsn)
if !version do
Mix.raise(
"Could not find version for #{inspect(app)}, please make sure the application exists"
)
end
to_string(version)
"" ->
Mix.raise("The release :version cannot be an empty string")
_ ->
version
end
{config_providers, opts} = Keyword.pop(opts, :config_providers, [])
{steps, opts} = Keyword.pop(opts, :steps, [:assemble])
validate_steps!(steps)
%Mix.Release{
name: name,
version: version,
path: path,
version_path: Path.join([path, "releases", version]),
erts_source: erts_source,
erts_version: erts_version,
applications: loaded_apps,
boot_scripts: %{start: start_boot, start_clean: start_clean_boot},
config_providers: config_providers,
options: opts,
overlays: [],
steps: steps
}
end
defp find_release(name, config) do
{name, opts_fun_or_list} = lookup_release(name, config) || infer_release(config)
opts = if is_function(opts_fun_or_list, 0), do: opts_fun_or_list.(), else: opts_fun_or_list
{apps, opts} = Keyword.pop(opts, :applications, [])
if apps == [] and Mix.Project.umbrella?(config) do
bad_umbrella!()
end
app = Keyword.get(config, :app)
apps = Keyword.merge(@default_apps, apps)
if is_nil(app) or Keyword.has_key?(apps, app) do
{name, apps, opts}
else
{name, apps ++ [{app, :permanent}], opts}
end
end
defp lookup_release(nil, config) do
case Keyword.get(config, :releases, []) do
[] ->
nil
[{name, opts}] ->
{name, opts}
[_ | _] ->
case Keyword.get(config, :default_release) do
nil ->
Mix.raise(
"\"mix release\" was invoked without a name but there are multiple releases. " <>
"Please call \"mix release NAME\" or set :default_release in your project configuration"
)
name ->
lookup_release(name, config)
end
end
end
defp lookup_release(name, config) do
if opts = config[:releases][name] do
{name, opts}
else
found = Keyword.get(config, :releases, [])
Mix.raise(
"Unknown release #{inspect(name)}. " <>
"The available releases are: #{inspect(Keyword.keys(found))}"
)
end
end
defp infer_release(config) do
if Mix.Project.umbrella?(config) do
bad_umbrella!()
else
{Keyword.fetch!(config, :app), []}
end
end
defp bad_umbrella! do
Mix.raise("""
Umbrella projects require releases to be explicitly defined with \
a non-empty applications key that chooses which umbrella children \
should be part of the releases:
releases: [
foo: [
applications: [child_app_foo: :permanent]
],
bar: [
applications: [child_app_bar: :permanent]
]
]
Alternatively you can perform the release from the children applications
""")
end
defp erts_data(erts_data) when is_function(erts_data) do
erts_data(erts_data.())
end
defp erts_data(false) do
{nil, :code.lib_dir(), :erlang.system_info(:version)}
end
defp erts_data(true) do
version = :erlang.system_info(:version)
{:filename.join(:code.root_dir(), 'erts-#{version}'), :code.lib_dir(), version}
end
defp erts_data(erts_source) when is_binary(erts_source) do
if File.exists?(erts_source) do
[_, erts_version] = erts_source |> Path.basename() |> String.split("-")
erts_lib_dir = erts_source |> Path.dirname() |> Path.join("lib") |> to_charlist()
{to_charlist(erts_source), erts_lib_dir, to_charlist(erts_version)}
else
Mix.raise("Could not find ERTS system at #{inspect(erts_source)}")
end
end
defp load_apps(apps, seen, otp_root, included) do
for app <- apps, reduce: seen do
seen ->
if reentrant_seen = reentrant(seen, app, included) do
reentrant_seen
else
load_app(app, seen, otp_root, included)
end
end
end
defp reentrant(seen, app, included) do
properties = seen[app]
cond do
is_nil(properties) ->
nil
included != :maybe and properties[:included] != included ->
if properties[:included] == :maybe do
put_in(seen[app][:included], included)
else
Mix.raise(
"#{inspect(app)} is listed both as a regular application and as an included application"
)
end
true ->
seen
end
end
defp load_app(app, seen, otp_root, included) do
path = Path.join(otp_root, "#{app}-*")
case Path.wildcard(path) do
[] ->
case :code.lib_dir(app) do
{:error, :bad_name} -> Mix.raise("Could not find application #{inspect(app)}")
path -> do_load_app(app, path, seen, otp_root, false, included)
end
paths ->
path = paths |> Enum.sort() |> List.last()
do_load_app(app, to_charlist(path), seen, otp_root, true, included)
end
end
defp do_load_app(app, path, seen, otp_root, otp_app?, included) do
case :file.consult(Path.join(path, "ebin/#{app}.app")) do
{:ok, terms} ->
[{:application, ^app, properties}] = terms
value = [path: path, otp_app?: otp_app?, included: included] ++ properties
seen = Map.put(seen, app, value)
seen = load_apps(Keyword.get(properties, :applications, []), seen, otp_root, false)
load_apps(Keyword.get(properties, :included_applications, []), seen, otp_root, true)
{:error, reason} ->
Mix.raise("Could not load #{app}.app. Reason: #{inspect(reason)}")
end
end
defp build_start_boot(all_apps, specified_apps) do
specified_apps ++
Enum.sort(
for(
{app, props} <- all_apps,
not List.keymember?(specified_apps, app, 0),
do: {app, default_mode(props)}
)
)
end
defp default_mode(props) do
if props[:included] == true, do: :load, else: :permanent
end
defp build_start_clean_boot(boot) do
for({app, _mode} <- boot, do: {app, :none})
|> Keyword.put(:stdlib, :permanent)
|> Keyword.put(:kernel, :permanent)
end
defp validate_steps!(steps) do
valid_atoms = [:assemble, :tar]
if not is_list(steps) or Enum.any?(steps, &(&1 not in valid_atoms and not is_function(&1, 1))) do
Mix.raise("""
The :steps option must be a list of:
* anonymous function that receives one argument
* the atom :assemble or :tar
Got: #{inspect(steps)}
""")
end
if Enum.count(steps, &(&1 == :assemble)) != 1 do
Mix.raise("The :steps option must contain the atom :assemble once, got: #{inspect(steps)}")
end
if :assemble in Enum.drop_while(steps, &(&1 != :tar)) do
Mix.raise("The :tar step must come after :assemble")
end
if Enum.count(steps, &(&1 == :tar)) > 1 do
Mix.raise("The :steps option can only contain the atom :tar once")
end
:ok
end
@doc """
Makes the `sys.config` structure.
If there are config providers, then a value is injected into
the `:elixir` application configuration in `sys_config` to be
read during boot and trigger the providers.
It uses the following release options to customize its behaviour:
* `:reboot_system_after_config`
* `:start_distribution_during_config`
* `:prune_runtime_sys_config_after_boot`
In case there are no config providers, it doesn't change `sys_config`.
"""
@spec make_sys_config(t, keyword(), Config.Provider.config_path()) ::
:ok | {:error, String.t()}
def make_sys_config(release, sys_config, config_provider_path) do
{sys_config, runtime_config?} =
merge_provider_config(release, sys_config, config_provider_path)
path = Path.join(release.version_path, "sys.config")
args = [runtime_config?, sys_config]
format = "%% coding: utf-8~n%% RUNTIME_CONFIG=~s~n~tw.~n"
File.mkdir_p!(Path.dirname(path))
File.write!(path, :io_lib.format(format, args), [:utf8])
case :file.consult(path) do
{:ok, _} ->
:ok
{:error, reason} ->
{:error,
"Could not read configuration file. It likely has invalid configuration terms " <>
"such as functions, references, and pids. Please make sure your configuration " <>
"is made of numbers, atoms, strings, maps, tuples and lists. Reason: #{inspect(reason)}"}
end
end
defp merge_provider_config(%{config_providers: []}, sys_config, _), do: {sys_config, false}
defp merge_provider_config(release, sys_config, config_path) do
{reboot?, extra_config, initial_config} = start_distribution(release)
prune_after_boot = Keyword.get(release.options, :prune_runtime_sys_config_after_boot, false)
opts = [
extra_config: initial_config,
prune_after_boot: prune_after_boot,
reboot_after_config: reboot?
]
init = Config.Provider.init(release.config_providers, config_path, opts)
{Config.Reader.merge(sys_config, [elixir: [config_providers: init]] ++ extra_config), reboot?}
end
defp start_distribution(%{options: opts}) do
reboot? = Keyword.get(opts, :reboot_system_after_config, true)
early_distribution? = Keyword.get(opts, :start_distribution_during_config, false)
if not reboot? or early_distribution? do
{reboot?, [], []}
else
{true, [kernel: [start_distribution: false]], [kernel: [start_distribution: true]]}
end
end
@doc """
Copies the cookie to the given path.
If a cookie option was given, we compare it with
the contents of the file (if any), and ask the user
if they want to override.
If there is no option, we generate a random one
the first time.
"""
@spec make_cookie(t, Path.t()) :: :ok
def make_cookie(release, path) do
cond do
cookie = release.options[:cookie] ->
Mix.Generator.create_file(path, cookie, quiet: true)
:ok
File.exists?(path) ->
:ok
true ->
File.write!(path, random_cookie())
:ok
end
end
defp random_cookie, do: Base.encode32(:crypto.strong_rand_bytes(32))
@doc """
Makes the start_erl.data file with the
ERTS version and release versions.
"""
@spec make_start_erl(t, Path.t()) :: :ok
def make_start_erl(release, path) do
File.write!(path, "#{release.erts_version} #{release.version}")
:ok
end
@doc """
Makes boot scripts.
It receives a path to the boot file, without extension, such as
`releases/0.1.0/start` and this command will write `start.rel`,
`start.boot`, and `start.script` to the given path, returning
`{:ok, rel_path}` or `{:error, message}`.
The boot script uses the RELEASE_LIB environment variable, which must
be accordingly set with `--boot-var` and point to the release lib dir.
"""
@spec make_boot_script(t, Path.t(), [{application(), mode()}], [String.t()]) ::
:ok | {:error, String.t()}
def make_boot_script(release, path, modes, prepend_paths \\ []) do
with {:ok, rel_spec} <- build_release_spec(release, modes) do
File.write!(path <> ".rel", consultable(rel_spec), [:utf8])
sys_path = String.to_charlist(path)
sys_options = [
:silent,
:no_dot_erlang,
:no_warn_sasl,
variables: build_variables(release),
path: build_paths(release)
]
case :systools.make_script(sys_path, sys_options) do
{:ok, _module, _warnings} ->
script_path = sys_path ++ '.script'
{:ok, [{:script, rel_info, instructions}]} = :file.consult(script_path)
instructions =
instructions
|> post_stdlib_applies(release)
|> prepend_paths_to_script(prepend_paths)
script = {:script, rel_info, instructions}
File.write!(script_path, consultable(script), [:utf8])
:ok = :systools.script2boot(sys_path)
{:error, module, info} ->
message = module.format_error(info) |> to_string() |> String.trim()
{:error, message}
end
end
end
defp build_variables(release) do
for {_, properties} <- release.applications,
not Keyword.fetch!(properties, :otp_app?),
uniq: true,
do: {'RELEASE_LIB', properties |> Keyword.fetch!(:path) |> :filename.dirname()}
end
defp build_paths(release) do
for {_, properties} <- release.applications,
Keyword.fetch!(properties, :otp_app?),
do: properties |> Keyword.fetch!(:path) |> Path.join("ebin") |> to_charlist()
end
defp build_release_spec(release, modes) do
%{name: name, version: version, erts_version: erts_version, applications: apps} = release
rel_apps =
for {app, mode} <- modes do
properties = Map.get(apps, app) || throw({:error, "Unknown application #{inspect(app)}"})
children = Keyword.get(properties, :applications, [])
validate_mode!(app, mode, modes, children)
build_app_for_release(app, mode, properties)
end
{:ok, {:release, {to_charlist(name), to_charlist(version)}, {:erts, erts_version}, rel_apps}}
catch
{:error, message} -> {:error, message}
end
defp validate_mode!(app, mode, modes, children) do
safe_mode? = mode in @safe_modes
if not safe_mode? and mode not in @unsafe_modes do
throw(
{:error,
"Unknown mode #{inspect(mode)} for #{inspect(app)}. " <>
"Valid modes are: #{inspect(@safe_modes ++ @unsafe_modes)}"}
)
end
for child <- children do
child_mode = Keyword.get(modes, child)
cond do
is_nil(child_mode) ->
throw(
{:error,
"Application #{inspect(app)} is listed in the release boot, " <>
"but it depends on #{inspect(child)}, which isn't"}
)
safe_mode? and child_mode in @unsafe_modes ->
throw(
{:error,
"""
Application #{inspect(app)} has mode #{inspect(mode)} but it depends on \
#{inspect(child)} which is set to #{inspect(child_mode)}. If you really want \
to set such mode for #{inspect(child)} make sure that all applications that depend \
on it are also set to :load or :none, otherwise your release will fail to boot
"""}
)
true ->
:ok
end
end
end
defp build_app_for_release(app, mode, properties) do
vsn = Keyword.fetch!(properties, :vsn)
case Keyword.get(properties, :included_applications, []) do
[] -> {app, vsn, mode}
included_apps -> {app, vsn, mode, included_apps}
end
end
defp post_stdlib_applies(instructions, release) do
{pre, [stdlib | post]} =
Enum.split_while(
instructions,
&(not match?({:apply, {:application, :start_boot, [:stdlib, _]}}, &1))
)
pre ++
[stdlib] ++ config_provider_apply(release) ++ validate_compile_env_apply(release) ++ post
end
defp config_provider_apply(%{config_providers: []}),
do: []
defp config_provider_apply(_),
do: [{:apply, {Config.Provider, :boot, [:elixir, :config_providers]}}]
defp validate_compile_env_apply(release) do
with true <- Keyword.get(release.options, :validate_compile_env, true),
[_ | _] = compile_env <- compile_env(release) do
[{:apply, {Config.Provider, :validate_compile_env, [compile_env]}}]
else
_ -> []
end
end
defp compile_env(release) do
for {_, properties} <- release.applications,
triplet <- Keyword.get(properties, :compile_env, []),
do: triplet
end
defp prepend_paths_to_script(instructions, []), do: instructions
defp prepend_paths_to_script(instructions, prepend_paths) do
prepend_paths = Enum.map(prepend_paths, &String.to_charlist/1)
Enum.map(instructions, fn
{:path, paths} ->
if Enum.any?(paths, &List.starts_with?(&1, '$RELEASE_LIB')) do
{:path, prepend_paths ++ paths}
else
{:path, paths}
end
other ->
other
end)
end
defp consultable(term) do
:io_lib.format("%% coding: utf-8~n~tp.~n", [term])
end
@doc """
Copies ERTS if the release is configured to do so.
Returns true if the release was copied, false otherwise.
"""
@spec copy_erts(t) :: boolean()
def copy_erts(%{erts_source: nil}) do
false
end
def copy_erts(release) do
destination = Path.join(release.path, "erts-#{release.erts_version}")
File.mkdir_p!(destination)
for dir <- ~w(bin include lib src) do
source = Path.join(release.erts_source, dir)
target = Path.join(destination, dir)
File.cp_r!(source, target, fn _, _ -> false end)
end
_ = File.rm(Path.join(destination, "bin/erl"))
_ = File.rm(Path.join(destination, "bin/erl.ini"))
destination
|> Path.join("bin/erl")
|> File.write!(~S"""
#!/bin/sh
SELF=$(readlink "$0" || true)
if [ -z "$SELF" ]; then SELF="$0"; fi
BINDIR="$(cd "$(dirname "$SELF")" && pwd -P)"
ROOTDIR="$(dirname "$(dirname "$BINDIR")")"
EMU=beam
PROGNAME=$(echo "$0" | sed 's/.*\///')
export EMU
export ROOTDIR
export BINDIR
export PROGNAME
exec "$BINDIR/erlexec" ${1+"$@"}
""")
File.chmod!(Path.join(destination, "bin/erl"), 0o744)
true
end
@doc """
Copies the given application specification into the release.
It assumes the application exists in the release.
"""
@spec copy_app(t, application) :: boolean()
def copy_app(release, app) do
properties = Map.fetch!(release.applications, app)
vsn = Keyword.fetch!(properties, :vsn)
source_app = Keyword.fetch!(properties, :path)
target_app = Path.join([release.path, "lib", "#{app}-#{vsn}"])
if is_nil(release.erts_source) and Keyword.fetch!(properties, :otp_app?) do
false
else
File.rm_rf!(target_app)
File.mkdir_p!(target_app)
copy_ebin(release, Path.join(source_app, "ebin"), Path.join(target_app, "ebin"))
for dir <- @copy_app_dirs do
source_dir = Path.join(source_app, dir)
target_dir = Path.join(target_app, dir)
source_dir =
case File.read_link(source_dir) do
{:ok, link_target} -> Path.expand(link_target, source_app)
_ -> source_dir
end
File.exists?(source_dir) && File.cp_r!(source_dir, target_dir)
end
true
end
end
@doc """
Copies the ebin directory at `source` to `target`
respecting release options such a `:strip_beams`.
"""
@spec copy_ebin(t, Path.t(), Path.t()) :: boolean()
def copy_ebin(release, source, target) do
with {:ok, [_ | _] = files} <- File.ls(source) do
File.mkdir_p!(target)
strip_beams? = Keyword.get(release.options, :strip_beams, true)
for file <- files do
source_file = Path.join(source, file)
target_file = Path.join(target, file)
with true <- strip_beams? and String.ends_with?(file, ".beam"),
{:ok, binary} <- strip_beam(File.read!(source_file)) do
File.write!(target_file, binary)
else
_ -> File.copy(source_file, target_file)
end
end
true
else
_ -> false
end
end
@doc """
Strips a beam file for a release.
This keeps only significant chunks necessary for the VM operation,
discarding documentation, debug info, compile information and others.
The exact chunks that are kept are not documented and may change in
future versions.
"""
@spec strip_beam(binary()) :: {:ok, binary} | {:error, :beam_lib, :beam_lib.chnk_rsn()}
def strip_beam(binary) do
case :beam_lib.chunks(binary, @significant_chunks, [:allow_missing_chunks]) do
{:ok, {_, chunks}} ->
chunks = for {name, chunk} <- chunks, is_binary(chunk), do: {name, chunk}
{:ok, binary} = :beam_lib.build_module(chunks)
{:ok, fd} = :ram_file.open(binary, [:write, :binary])
{:ok, _} = :ram_file.compress(fd)
{:ok, binary} = :ram_file.get_file(fd)
:ok = :ram_file.close(fd)
{:ok, binary}
{:error, _, _} = error ->
error
end
end
end
| 31.251564 | 104 | 0.621706 |
ff0056abf9038c39dfb7ba6a2e0f9889c2943d51 | 4,205 | exs | Elixir | test/chopperbot/split/message_builder/line_message_builder_test.exs | flipay/chopperbot | 29d81a343442bdd8eae7627bc6eb3c7d83cd0151 | [
"MIT"
] | 5 | 2019-12-14T03:12:28.000Z | 2020-03-04T12:58:44.000Z | test/chopperbot/split/message_builder/line_message_builder_test.exs | flipay/chopperbot | 29d81a343442bdd8eae7627bc6eb3c7d83cd0151 | [
"MIT"
] | 12 | 2020-01-07T09:31:33.000Z | 2020-03-27T06:11:21.000Z | test/chopperbot/split/message_builder/line_message_builder_test.exs | flipay/chopperbot | 29d81a343442bdd8eae7627bc6eb3c7d83cd0151 | [
"MIT"
] | 1 | 2020-01-09T10:35:37.000Z | 2020-01-09T10:35:37.000Z | defmodule Chopperbot.Split.LineMessageBuilderTest do
use ExUnit.Case, async: true
alias Chopperbot.Split.{CalculatedOrdersResult, LineMessageBuilder}
describe "build_ok_message/1" do
test "builds a Line flex message from the given orders" do
calculated_orders_result = %CalculatedOrdersResult{
orders: [{"chopper", 100}, {"luffy", 200}],
total: 300
}
result = LineMessageBuilder.build_ok_message(calculated_orders_result)
assert %{
altText: "Orders summary",
contents: %{
body: %{
contents: [
%{
align: "center",
size: "sm",
text: _,
type: "text",
weight: "bold",
wrap: true
},
%{
contents: [
%{
contents: [
%{
color: "#555555",
flex: 0,
size: "sm",
text: "chopper",
type: "text"
},
%{
align: "end",
color: "#111111",
size: "sm",
text: "100.00 THB",
type: "text"
}
],
layout: "horizontal",
type: "box"
},
%{
contents: [
%{
color: "#555555",
flex: 0,
size: "sm",
text: "luffy",
type: "text"
},
%{
align: "end",
color: "#111111",
size: "sm",
text: "200.00 THB",
type: "text"
}
],
layout: "horizontal",
type: "box"
},
%{margin: "xxl", type: "separator"},
%{
contents: [
%{
color: "#555555",
size: "sm",
text: "TOTAL",
type: "text",
weight: "bold"
},
%{
align: "end",
color: "#111111",
size: "sm",
text: "300.00 THB",
type: "text",
weight: "bold"
}
],
layout: "horizontal",
type: "box"
}
],
layout: "vertical",
margin: "xxl",
spacing: "sm",
type: "box"
}
],
layout: "vertical",
type: "box"
},
type: "bubble"
},
type: "flex"
} = result
end
end
describe "build_error_message/1" do
test "builds a Line text message from the given error text" do
error_text = "invalid_options: +invalid"
result = LineMessageBuilder.build_error_message(error_text)
assert %{text: text, type: "text"} = result
assert text =~ "invalid_options: +invalid"
end
end
end
| 35.041667 | 76 | 0.274435 |
ff006f3c917fb066b70911d288da4ed067e2ff2a | 62 | ex | Elixir | 03-chapter/01_example.ex | herminiotorres/programming-elixir | 70add5ec9fe7f91129da0a4e39ab329afb9be598 | [
"MIT"
] | null | null | null | 03-chapter/01_example.ex | herminiotorres/programming-elixir | 70add5ec9fe7f91129da0a4e39ab329afb9be598 | [
"MIT"
] | null | null | null | 03-chapter/01_example.ex | herminiotorres/programming-elixir | 70add5ec9fe7f91129da0a4e39ab329afb9be598 | [
"MIT"
] | null | null | null | IO.inspect(list1 = [3, 2, 1])
IO.inspect(list2 = [4 | list1])
| 20.666667 | 31 | 0.596774 |
ff00926cf76032448d5decf12f2d759703592841 | 1,918 | exs | Elixir | mix.exs | szTheory/expool | b753e215ddbeac7bfb35f38ee1c89bfb0efd3959 | [
"MIT"
] | 7 | 2017-08-08T13:17:19.000Z | 2020-10-11T06:27:14.000Z | mix.exs | szTheory/expool | b753e215ddbeac7bfb35f38ee1c89bfb0efd3959 | [
"MIT"
] | null | null | null | mix.exs | szTheory/expool | b753e215ddbeac7bfb35f38ee1c89bfb0efd3959 | [
"MIT"
] | 1 | 2020-03-03T19:53:59.000Z | 2020-03-03T19:53:59.000Z | defmodule ExPool.Mixfile do
use Mix.Project
@url_docs "http://hexdocs.pm/expool"
@url_github "https://github.com/zackehh/expool"
def project do
[
app: :expool,
name: "Expool",
description: "Simple process pooling and task submission",
package: %{
files: [
"lib",
"mix.exs",
"LICENSE",
"README.md"
],
licenses: [ "MIT" ],
links: %{
"Docs" => @url_docs,
"GitHub" => @url_github
},
maintainers: [ "Isaac Whitfield" ]
},
version: "0.2.0",
elixir: "~> 1.1",
deps: deps,
docs: [
extras: [ "README.md" ],
source_ref: "master",
source_url: @url_github
],
test_coverage: [
tool: ExCoveralls
],
preferred_cli_env: [
"coveralls": :test,
"coveralls.detail": :test,
"coveralls.html": :test,
"coveralls.travis": :test
]
]
end
# Configuration for the OTP application
#
# Type "mix help compile.app" for more information
def application do
[applications: [:logger]]
end
# Dependencies can be Hex packages:
#
# {:mydep, "~> 0.3.0"}
#
# Or git/path repositories:
#
# {:mydep, git: "https://github.com/elixir-lang/mydep.git", tag: "0.1.0"}
#
# Type "mix help deps" for more examples and options
defp deps do
[
# documentation
{ :earmark, "~> 0.2.1", optional: true, only: :docs },
{ :ex_doc, "~> 0.11.4", optional: true, only: :docs },
# testing
{ :benchfella, "~> 0.3.2", optional: true, only: :test },
{ :benchwarmer, "~> 0.0.2", optional: true, only: :test },
{ :excoveralls, "~> 0.5.1", optional: true, only: :test },
{ :exprof, "~> 0.2.0", optional: true, only: :test },
{ :power_assert, "~> 0.0.8", optional: true, only: :test }
]
end
end
| 25.236842 | 77 | 0.518248 |
ff012581b3a451d66e855562332ee09f3b1f5026 | 1,113 | exs | Elixir | test/integration/env_test.exs | jeremyowensboggs/zigler | d1ec07ae66db5c007ff7cfc8a820fb21c661bcea | [
"MIT"
] | 349 | 2019-10-02T07:21:17.000Z | 2022-03-21T17:50:06.000Z | test/integration/env_test.exs | jeremyowensboggs/zigler | d1ec07ae66db5c007ff7cfc8a820fb21c661bcea | [
"MIT"
] | 223 | 2019-10-05T05:36:08.000Z | 2022-03-31T23:12:02.000Z | test/integration/env_test.exs | jeremyowensboggs/zigler | d1ec07ae66db5c007ff7cfc8a820fb21c661bcea | [
"MIT"
] | 20 | 2019-10-08T16:29:39.000Z | 2022-03-31T15:07:20.000Z | defmodule ZiglerTest.Integration.EnvTest do
# tests to make sure that we can include beam.env terms in the nif
# definition and have them compile correctly.
use ExUnit.Case, async: true
use Zig
~Z"""
/// nif: zeroarity_with_env/0
fn zeroarity_with_env(env: beam.env) i64 {
return 47;
}
/// nif: zeroarity_with_erlnifenv/0
fn zeroarity_with_erlnifenv(env: ?*e.ErlNifEnv) i64 {
return 47;
}
"""
test "for a zero arity function env variables are valid first arguments" do
assert 47 == zeroarity_with_env()
end
test "for a zero arity function erlnifenv variables are valid first arguments" do
assert 47 == zeroarity_with_erlnifenv()
end
~Z"""
/// nif: int_with_env/1
fn int_with_env(env: beam.env, val: i64) i64 {
return val;
}
/// nif: int_with_erlnifenv/1
fn int_with_erlnifenv(env: ?*e.ErlNifEnv, val: i64) i64 {
return val;
}
"""
test "env variables are valid first arguments" do
assert 47 == int_with_env(47)
end
test "erlnifenv variables are valid first arguments" do
assert 47 == int_with_erlnifenv(47)
end
end
| 22.26 | 83 | 0.68823 |
ff0134b5a8f22674f12bd4324392c4d9c6c1820d | 1,241 | exs | Elixir | apps/estuary/test/unit/data_writer_test.exs | jakeprem/smartcitiesdata | da309ac0d2261527278951cbae88604455207589 | [
"Apache-2.0"
] | null | null | null | apps/estuary/test/unit/data_writer_test.exs | jakeprem/smartcitiesdata | da309ac0d2261527278951cbae88604455207589 | [
"Apache-2.0"
] | 1 | 2020-01-09T21:00:10.000Z | 2020-01-09T21:00:10.000Z | apps/estuary/test/unit/data_writer_test.exs | jakeprem/smartcitiesdata | da309ac0d2261527278951cbae88604455207589 | [
"Apache-2.0"
] | null | null | null | defmodule Estuary.DataWriterTest do
use ExUnit.Case
import Mox
alias Estuary.Datasets.DatasetSchema
alias Estuary.DataWriter
alias SmartCity.TestDataGenerator, as: TDG
setup :set_mox_global
setup :verify_on_exit!
test "should insert event to history table" do
test = self()
expect(MockTable, :write, 1, fn payload, args ->
send(test, %{
payload: payload,
table: Keyword.get(args, :table),
schema: Keyword.get(args, :schema)
})
:ok
end)
author = "A nice fellow"
time_stamp = DateTime.to_unix(DateTime.utc_now())
dataset = Jason.encode!(TDG.create_dataset(%{}))
table = DatasetSchema.table_name()
schema = DatasetSchema.schema()
DataWriter.write(%{
"author" => author,
"create_ts" => time_stamp,
"data" => dataset,
"forwarded" => false,
"type" => "data:ingest:start"
})
payload = [
%{
payload: %{
"author" => author,
"create_ts" => time_stamp,
"data" => dataset,
"type" => "data:ingest:start"
}
}
]
expected = %{
payload: payload,
table: table,
schema: schema
}
assert_receive(^expected)
end
end
| 20.683333 | 53 | 0.580983 |
ff013d49c2df6afe455c83e3ddb54ef98e0ebc02 | 2,747 | ex | Elixir | clients/data_catalog/lib/google_api/data_catalog/v1beta1/model/google_cloud_datacatalog_v1beta1_column_schema.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/data_catalog/lib/google_api/data_catalog/v1beta1/model/google_cloud_datacatalog_v1beta1_column_schema.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/data_catalog/lib/google_api/data_catalog/v1beta1/model/google_cloud_datacatalog_v1beta1_column_schema.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DataCatalog.V1beta1.Model.GoogleCloudDatacatalogV1beta1ColumnSchema do
@moduledoc """
Representation of a column within a schema. Columns could be nested inside other columns.
## Attributes
* `column` (*type:* `String.t`, *default:* `nil`) - Required. Name of the column.
* `description` (*type:* `String.t`, *default:* `nil`) - Optional. Description of the column. Default value is an empty string.
* `mode` (*type:* `String.t`, *default:* `nil`) - Optional. A column's mode indicates whether the values in this column are required, nullable, etc. Only `NULLABLE`, `REQUIRED` and `REPEATED` are supported. Default mode is `NULLABLE`.
* `subcolumns` (*type:* `list(GoogleApi.DataCatalog.V1beta1.Model.GoogleCloudDatacatalogV1beta1ColumnSchema.t)`, *default:* `nil`) - Optional. Schema of sub-columns. A column can have zero or more sub-columns.
* `type` (*type:* `String.t`, *default:* `nil`) - Required. Type of the column.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:column => String.t() | nil,
:description => String.t() | nil,
:mode => String.t() | nil,
:subcolumns =>
list(
GoogleApi.DataCatalog.V1beta1.Model.GoogleCloudDatacatalogV1beta1ColumnSchema.t()
)
| nil,
:type => String.t() | nil
}
field(:column)
field(:description)
field(:mode)
field(:subcolumns,
as: GoogleApi.DataCatalog.V1beta1.Model.GoogleCloudDatacatalogV1beta1ColumnSchema,
type: :list
)
field(:type)
end
defimpl Poison.Decoder,
for: GoogleApi.DataCatalog.V1beta1.Model.GoogleCloudDatacatalogV1beta1ColumnSchema do
def decode(value, options) do
GoogleApi.DataCatalog.V1beta1.Model.GoogleCloudDatacatalogV1beta1ColumnSchema.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.DataCatalog.V1beta1.Model.GoogleCloudDatacatalogV1beta1ColumnSchema do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 37.630137 | 238 | 0.706225 |
ff014baea3cb78cdc7d31123c46d991370ff3777 | 9,433 | exs | Elixir | apps/socket/test/grapevine_socket/handler/players_test.exs | oestrich/grapevine | 7fb745a3a6e4eb68bd761baa190b2df32fa1f73d | [
"MIT"
] | 107 | 2018-10-05T18:20:32.000Z | 2022-02-28T04:02:50.000Z | apps/socket/test/grapevine_socket/handler/players_test.exs | oestrich/grapevine | 7fb745a3a6e4eb68bd761baa190b2df32fa1f73d | [
"MIT"
] | 33 | 2018-10-05T14:11:18.000Z | 2022-02-10T22:19:18.000Z | apps/socket/test/grapevine_socket/handler/players_test.exs | oestrich/grapevine | 7fb745a3a6e4eb68bd761baa190b2df32fa1f73d | [
"MIT"
] | 18 | 2019-02-03T03:08:20.000Z | 2021-12-28T04:29:36.000Z | defmodule GrapevineSocket.Handler.PlayersTest do
use GrapevineSocket.DataCase
alias GrapevineSocket.Presence
alias GrapevineSocket.Web.Router
alias GrapevineSocket.Web.State
describe "player status" do
setup [:basic_setup]
test "new sign in", %{state: state} do
state = %{state | supports: ["channels", "players"]}
GrapevineSocket.PubSub.subscribe("players:status")
frame = %{
"event" => "players/sign-in",
"payload" => %{
"name" => "Player"
}
}
assert {:ok, :skip, state} = Router.receive(state, frame)
assert state.players == ["Player"]
assert_receive %{event: "players/sign-in"}, 50
end
test "new sign in - name required", %{state: state} do
state = %{state | supports: ["channels", "players"]}
GrapevineSocket.PubSub.subscribe("players:status")
frame = %{
"event" => "players/sign-in",
"ref" => "sign-in",
"payload" => %{
"name" => ""
}
}
assert {:ok, response, state} = Router.receive(state, frame)
assert state.players == []
assert response["status"] == "failure"
end
test "new sign in - game marked as hidden", %{state: state} do
game = %{state.game | display: false}
state = %{state | game: game, supports: ["channels", "players"]}
GrapevineSocket.PubSub.subscribe("players:status")
frame = %{
"event" => "players/sign-in",
"payload" => %{
"name" => "Player"
}
}
assert {:ok, :skip, state} = Router.receive(state, frame)
assert state.players == ["Player"]
refute_receive %{event: "players/sign-in"}, 50
end
test "new sign in - game players are hidden", %{state: state} do
game = %{state.game | display_players: false}
state = %{state | game: game, supports: ["channels", "players"]}
GrapevineSocket.PubSub.subscribe("players:status")
frame = %{
"event" => "players/sign-in",
"payload" => %{
"name" => "Player"
}
}
assert {:ok, :skip, state} = Router.receive(state, frame)
assert state.players == ["Player"]
refute_receive %{event: "players/sign-in"}, 50
end
test "new sign in - already signed in, no event", %{state: state} do
state = %{state | supports: ["channels", "players"], players: ["Player"]}
GrapevineSocket.PubSub.subscribe("players:status")
frame = %{
"event" => "players/sign-in",
"payload" => %{
"name" => "Player"
}
}
assert {:ok, :skip, state} = Router.receive(state, frame)
assert state.players == ["Player"]
refute_receive %{event: "players/sign-in"}, 50
end
test "new sign in - must send a player name", %{state: state} do
state = %{state | supports: ["channels", "players"]}
GrapevineSocket.PubSub.subscribe("players:status")
frame = %{
"event" => "players/sign-in",
"payload" => %{}
}
assert {:ok, :skip, _state} = Router.receive(state, frame)
refute_receive %{event: "players/sign-in"}, 50
end
test "sign out", %{state: state} do
state = %{state | supports: ["channels", "players"], players: ["Player"]}
GrapevineSocket.PubSub.subscribe("players:status")
frame = %{
"event" => "players/sign-out",
"payload" => %{
"name" => "Player"
}
}
assert {:ok, :skip, state} = Router.receive(state, frame)
assert state.players == []
assert_receive %{event: "players/sign-out"}, 50
end
test "sign out - game is marked as hidden", %{state: state} do
game = %{state.game | display: false}
state = %{state | game: game, supports: ["channels", "players"], players: ["Player"]}
GrapevineSocket.PubSub.subscribe("players:status")
frame = %{
"event" => "players/sign-out",
"payload" => %{
"name" => "Player"
}
}
assert {:ok, :skip, state} = Router.receive(state, frame)
assert state.players == []
refute_receive %{event: "players/sign-out"}, 50
end
test "sign out - game players are hidden", %{state: state} do
game = %{state.game | display_players: false}
state = %{state | game: game, supports: ["channels", "players"], players: ["Player"]}
GrapevineSocket.PubSub.subscribe("players:status")
frame = %{
"event" => "players/sign-out",
"payload" => %{
"name" => "Player"
}
}
assert {:ok, :skip, state} = Router.receive(state, frame)
assert state.players == []
refute_receive %{event: "players/sign-out"}, 50
end
test "sign out - player is not in the known list", %{state: state} do
state = %{state | supports: ["channels", "players"]}
GrapevineSocket.PubSub.subscribe("players:status")
frame = %{
"event" => "players/sign-out",
"payload" => %{
"name" => "Player"
}
}
assert {:ok, :skip, state} = Router.receive(state, frame)
assert state.players == []
refute_receive %{event: "players/sign-out"}, 50
end
test "does not support the players feature - no ref", %{state: state} do
frame = %{
"event" => "players/sign-out",
"payload" => %{
"name" => "Player"
}
}
assert {:ok, :skip, _state} = Router.receive(state, frame)
end
test "does not support the players feature - ref", %{state: state} do
frame = %{
"event" => "players/sign-out",
"ref" => "ref",
"payload" => %{
"name" => "Player"
}
}
assert {:ok, response, _state} = Router.receive(state, frame)
assert response["ref"] == "ref"
assert response["status"] == "failure"
end
end
describe "player status udpates" do
setup [:basic_setup, :status_updates]
test "fetch all updates", %{state: state, game: game} do
frame = %{
"event" => "players/status",
"ref" => UUID.uuid4()
}
assert {:ok, :skip, _state} = Router.receive(state, frame)
game_name = game.short_name
refute_receive {:broadcast,
%{"event" => "players/status", "payload" => %{"game" => ^game_name}}},
50
assert_receive {:broadcast,
%{"event" => "players/status", "payload" => %{"game" => "EVOne"}}},
50
assert_receive {:broadcast,
%{"event" => "players/status", "payload" => %{"game" => "EVTwo"}}},
50
refute_receive {:broadcast,
%{"event" => "players/status", "payload" => %{"game" => "EVThree"}}},
50
end
test "request game status updates for a single game", %{state: state} do
frame = %{
"event" => "players/status",
"ref" => UUID.uuid4(),
"payload" => %{
"game" => "EVTwo"
}
}
assert {:ok, :skip, _state} = Router.receive(state, frame)
refute_receive {:broadcast,
%{"event" => "players/status", "payload" => %{"game" => "EVOne"}}},
50
assert_receive {:broadcast,
%{"event" => "players/status", "payload" => %{"game" => "EVTwo"}}},
50
end
end
describe "player status udpates (no setup)" do
test "request game status updates for a single game and players are hidden" do
user = create_user()
game = create_game(user)
state = setup_state(game)
game2 =
create_game(user, %{
name: "ExVenture 2",
short_name: "EVTwo",
display_players: false
})
Presence.reset()
Presence.update_game(presence_state(game2, %{players: ["Player2"]}))
frame = %{
"event" => "players/status",
"ref" => UUID.uuid4(),
"payload" => %{
"game" => "EVTwo"
}
}
assert {:ok, :skip, _state} = Router.receive(state, frame)
assert_receive {:broadcast, event}, 50
%{"event" => "players/status", "payload" => %{"game" => "EVTwo", "players" => []}} = event
end
end
def setup_state(game) do
%State{
status: "active",
supports: ["channels", "players"],
players: [],
game: game
}
end
def basic_setup(_) do
user = create_user()
game = create_game(user)
Presence.reset()
state = %State{
status: "active",
supports: ["channels"],
players: [],
game: game
}
%{state: state, user: user, game: game}
end
def status_updates(%{state: state, user: user, game: game1}) do
state = %{state | supports: ["channels", "players"]}
game2 = create_game(user, %{name: "ExVenture 1", short_name: "EVOne"})
game3 = create_game(user, %{name: "ExVenture 2", short_name: "EVTwo"})
game4 = create_game(user, %{name: "ExVenture 3", short_name: "EVThree", display: false})
Presence.update_game(presence_state(game1, %{players: ["Player1"]}))
Presence.update_game(presence_state(game2, %{players: ["Player2"]}))
Presence.update_game(presence_state(game3, %{players: ["Player3"]}))
Presence.update_game(presence_state(game4, %{players: ["Player4"]}))
%{state: state, game2: game2, game3: game3}
end
end
| 28.847095 | 96 | 0.542669 |
ff015e37c52932b49dbd532c2fd70047c0058e06 | 1,166 | exs | Elixir | mix.exs | rockwood/exbee | 99e7b17a0eb37495d75e9d7f8ba56f3f3acf87e8 | [
"Apache-2.0"
] | 6 | 2017-06-18T14:29:59.000Z | 2019-07-28T15:47:02.000Z | mix.exs | rockwood/exbee | 99e7b17a0eb37495d75e9d7f8ba56f3f3acf87e8 | [
"Apache-2.0"
] | 3 | 2018-01-28T22:00:06.000Z | 2019-10-14T11:14:47.000Z | mix.exs | rockwood/exbee | 99e7b17a0eb37495d75e9d7f8ba56f3f3acf87e8 | [
"Apache-2.0"
] | 4 | 2016-09-22T02:50:34.000Z | 2019-10-14T07:43:38.000Z | defmodule Exbee.Mixfile do
use Mix.Project
def project do
[
app: :exbee,
description: "Communicate with XBee wireless radios in Elixir",
version: "0.0.5",
elixir: "~> 1.4",
elixirc_paths: elixirc_paths(Mix.env()),
build_embedded: Mix.env() == :prod,
start_permanent: Mix.env() == :prod,
source_url: "https://github.com/rockwood/exbee",
deps: deps(),
package: package(),
docs: [
main: "readme",
extras: ["README.md"]
],
dialyzer: [
ignore_warnings: ".dialyzer-ignore-warnings",
plt_add_apps: [:mix]
]
]
end
def application do
[applications: [:logger, :nerves_uart]]
end
defp deps do
[
{:nerves_uart, "~> 1.1.0"},
{:ex_doc, "~> 0.14", only: :dev},
{:dialyxir, "~> 0.5", only: [:dev], runtime: false}
]
end
defp package do
[
name: :exbee,
maintainers: ["Kevin Rockwood"],
licenses: ["Apache 2.0"],
links: %{"GitHub" => "https://github.com/rockwood/exbee"}
]
end
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
end
| 22.862745 | 69 | 0.551458 |
ff017057c447153603bfebe171a28bf9303274dc | 8,203 | ex | Elixir | lib/hex/utils.ex | davydog187/hex | c12ebc3b751ecb900d1dc85aad7f5a12f309152a | [
"Apache-2.0"
] | null | null | null | lib/hex/utils.ex | davydog187/hex | c12ebc3b751ecb900d1dc85aad7f5a12f309152a | [
"Apache-2.0"
] | null | null | null | lib/hex/utils.ex | davydog187/hex | c12ebc3b751ecb900d1dc85aad7f5a12f309152a | [
"Apache-2.0"
] | null | null | null | defmodule Hex.Utils do
@moduledoc false
def safe_deserialize_erlang("") do
nil
end
def safe_deserialize_erlang(binary) do
case safe_binary_to_term(binary) do
{:ok, term} ->
term
:error ->
Mix.raise("Received malformed erlang from Hex API")
end
rescue
ArgumentError ->
Mix.raise("Received malformed erlang from Hex API")
end
def safe_serialize_erlang(term) do
binarify(term)
|> :erlang.term_to_binary()
end
def safe_binary_to_term!(binary, opts \\ []) do
case safe_binary_to_term(binary, opts) do
{:ok, term} ->
term
:error ->
raise ArgumentError, "unsafe terms"
end
end
def safe_binary_to_term(binary, opts \\ [])
def safe_binary_to_term(binary, opts) when is_binary(binary) do
term = :erlang.binary_to_term(binary, opts)
safe_terms(term)
{:ok, term}
catch
:throw, :safe_terms ->
:error
end
defp safe_terms(list) when is_list(list) do
safe_list(list)
end
defp safe_terms(tuple) when is_tuple(tuple) do
safe_tuple(tuple, tuple_size(tuple))
end
defp safe_terms(map) when is_map(map) do
fun = fn key, value, acc ->
safe_terms(key)
safe_terms(value)
acc
end
:maps.fold(fun, map, map)
end
defp safe_terms(other)
when is_atom(other) or is_number(other) or is_bitstring(other) or is_pid(other) or
is_reference(other) do
other
end
defp safe_terms(_other) do
throw(:safe_terms)
end
defp safe_list([]), do: :ok
defp safe_list([h | t]) when is_list(t) do
safe_terms(h)
safe_list(t)
end
defp safe_list([h | t]) do
safe_terms(h)
safe_terms(t)
end
defp safe_tuple(_tuple, 0), do: :ok
defp safe_tuple(tuple, n) do
safe_terms(:erlang.element(n, tuple))
safe_tuple(tuple, n - 1)
end
def truncate(string, options \\ []) do
length = options[:length] || 50
omission = options[:omission] || "..."
cond do
not String.valid?(string) ->
string
String.length(string) < length ->
string
true ->
String.slice(string, 0, length) <> omission
end
end
def binarify(term, opts \\ [])
def binarify(binary, _opts) when is_binary(binary) do
binary
end
def binarify(number, _opts) when is_number(number) do
number
end
def binarify(atom, _opts) when is_nil(atom) or is_boolean(atom) do
atom
end
def binarify(atom, _opts) when is_atom(atom) do
Atom.to_string(atom)
end
def binarify(list, opts) when is_list(list) do
for(elem <- list, do: binarify(elem, opts))
end
def binarify(tuple, opts) when is_tuple(tuple) do
for(elem <- Tuple.to_list(tuple), do: binarify(elem, opts))
|> List.to_tuple()
end
def binarify(map, opts) when is_map(map) do
if Keyword.get(opts, :maps, true) do
for(elem <- map, into: %{}, do: binarify(elem, opts))
else
for(elem <- map, do: binarify(elem, opts))
end
end
def print_error_result({:error, reason}) do
Hex.Shell.info(inspect(reason))
end
def print_error_result({:ok, {status, nil, _headers}}) do
print_http_code(status)
end
def print_error_result({:ok, {status, "", _headers}}) do
print_http_code(status)
end
def print_error_result({:ok, {_status, body, _headers}}) when is_binary(body) do
Hex.Shell.info(body)
end
def print_error_result({:ok, {status, body, _headers}}) when is_map(body) do
message = body["message"]
errors = body["errors"]
if message do
Hex.Shell.info(message)
end
if errors do
pretty_errors(errors)
end
unless message || errors do
print_http_code(status)
Hex.Shell.info(body)
end
end
defp pretty_errors(errors, depth \\ 0) do
Enum.each(errors, fn
{key, map} when is_map(map) ->
Hex.Shell.info(indent(depth) <> key <> ":")
pretty_errors(map, depth + 1)
{key, value} ->
message = pretty_error_message(value, depth)
Hex.Shell.info(indent(depth) <> key <> ": " <> message)
end)
end
defp pretty_error_message(message, depth) do
if message =~ "\n" do
message =
message
|> Hex.string_trim()
|> String.replace("\n", "\n" <> indent(depth + 1))
"\n" <> indent(depth + 1) <> message
else
message
end
end
defp print_http_code(code), do: Hex.Shell.info(pretty_http_code(code))
defp pretty_http_code(401), do: "Authentication failed (401)"
defp pretty_http_code(403), do: "Forbidden (403)"
defp pretty_http_code(404), do: "Entity not found (404)"
defp pretty_http_code(422), do: "Validation failed (422)"
defp pretty_http_code(code), do: "HTTP status code: #{code}"
defp indent(0), do: " "
defp indent(depth), do: " " <> indent(depth - 1)
def hexdocs_url(organization, package)
when organization in ["hexpm", nil],
do: "https://hexdocs.pm/#{package}"
def hexdocs_url(organization, package),
do: "https://#{organization}.hexdocs.pm/#{package}"
def hexdocs_url(organization, package, version)
when organization in ["hexpm", nil],
do: "https://hexdocs.pm/#{package}/#{version}"
def hexdocs_url(organization, package, version),
do: "https://#{organization}.hexdocs.pm/#{package}/#{version}"
def hexdocs_module_url(organization, package, module)
when organization in ["hexpm", nil],
do: "https://hexdocs.pm/#{package}/#{module}.html"
def hexdocs_module_url(organization, package, module),
do: "https://#{organization}.hexdocs.pm/#{package}/#{module}.html"
def hexdocs_module_url(organization, package, version, module)
when organization in ["hexpm", nil],
do: "https://hexdocs.pm/#{package}/#{version}/#{module}.html"
def hexdocs_module_url(organization, package, version, module),
do: "https://#{organization}.hexdocs.pm/#{package}/#{version}/#{module}.html"
def package_retirement_reason(:RETIRED_OTHER), do: "other"
def package_retirement_reason(:RETIRED_INVALID), do: "invalid"
def package_retirement_reason(:RETIRED_SECURITY), do: "security"
def package_retirement_reason(:RETIRED_DEPRECATED), do: "deprecated"
def package_retirement_reason(:RETIRED_RENAMED), do: "renamed"
def package_retirement_reason(other), do: other
def package_retirement_message(%{reason: reason_code, message: message}) do
"(#{package_retirement_reason(reason_code)}) #{message}"
end
def package_retirement_message(%{reason: reason_code}) do
"(#{package_retirement_reason(reason_code)})"
end
# From https://github.com/fishcakez/dialyze/blob/6698ae582c77940ee10b4babe4adeff22f1b7779/lib/mix/tasks/dialyze.ex#L168
def otp_version do
major = :erlang.system_info(:otp_release) |> List.to_string()
vsn_file = Path.join([:code.root_dir(), "releases", major, "OTP_VERSION"])
try do
{:ok, contents} = File.read(vsn_file)
String.split(contents, "\n", trim: true)
else
[full] -> full
_ -> major
catch
:error, _ -> major
end
end
def windows_repo_path_fix(path) do
case :os.type() do
{:win32, _name} -> String.replace(path, ":", "-")
{_family, _name} -> path
end
end
def lock(tuple) when elem(tuple, 0) == :hex do
if tuple_size(tuple) > 8 and Hex.Server.should_warn_lock_version?() do
Hex.Shell.warn(
"The mix.lock file was generated with a newer version of Hex. Update " <>
"your client by running `mix local.hex` to avoid losing data."
)
end
destructure(
[:hex, name, version, inner_checksum, managers, deps, repo, outer_checksum],
Tuple.to_list(tuple)
)
%{
name: to_string(name),
version: version,
inner_checksum: inner_checksum,
outer_checksum: outer_checksum,
managers: managers,
deps: lock_deps(deps),
repo: repo || "hexpm"
}
end
def lock(_) do
nil
end
defp lock_deps(nil) do
nil
end
defp lock_deps(deps) do
Enum.map(deps, fn {app, req, opts} ->
opts =
opts
|> Keyword.put_new(:repo, "hexpm")
|> Keyword.update!(:hex, &to_string/1)
{app, req, opts}
end)
end
end
| 25.317901 | 121 | 0.643179 |
ff018eb541d66783b590dce23ab5c70c90d20e8e | 61 | ex | Elixir | backend/lib/bucoliq_web/views/page_view.ex | antogon/bucoliq | 0fe4727c4312322862d30014bdfae2530cc49de1 | [
"MIT"
] | null | null | null | backend/lib/bucoliq_web/views/page_view.ex | antogon/bucoliq | 0fe4727c4312322862d30014bdfae2530cc49de1 | [
"MIT"
] | 9 | 2019-12-01T18:31:31.000Z | 2021-03-10T00:38:48.000Z | backend/lib/bucoliq_web/views/page_view.ex | antogon/bucoliq | 0fe4727c4312322862d30014bdfae2530cc49de1 | [
"MIT"
] | null | null | null | defmodule BucoliqWeb.PageView do
use BucoliqWeb, :view
end
| 15.25 | 32 | 0.803279 |
ff0195c6b7388fd89b1183660f6b0b350c6bd0a4 | 3,633 | ex | Elixir | lib/vutuv_web/controllers/password_reset_controller.ex | vutuv/vutuv | 174706cdaf28cef24e1cc06bec0884c25f2412be | [
"MIT"
] | 309 | 2016-05-03T17:16:23.000Z | 2022-03-01T09:30:22.000Z | lib/vutuv_web/controllers/password_reset_controller.ex | vutuv/vutuv | 174706cdaf28cef24e1cc06bec0884c25f2412be | [
"MIT"
] | 662 | 2016-04-27T07:45:18.000Z | 2022-01-05T07:29:19.000Z | lib/vutuv_web/controllers/password_reset_controller.ex | vutuv/vutuv | 174706cdaf28cef24e1cc06bec0884c25f2412be | [
"MIT"
] | 40 | 2016-04-27T07:46:22.000Z | 2021-12-31T05:54:34.000Z | defmodule VutuvWeb.PasswordResetController do
use VutuvWeb, :controller
alias Vutuv.{Accounts, UserProfiles}
alias VutuvWeb.{Auth.Otp, Email}
plug VutuvWeb.RateLimiter, [type: :password_reset] when action in [:create]
def new_request(conn, _params) do
render(conn, "new_request.html")
end
def create_request(conn, %{"password_reset" => %{"email" => email}}) do
if user_credential = Accounts.get_user_credential(%{"email" => email}) do
code = Otp.create(user_credential.otp_secret)
user = UserProfiles.get_user(user_credential.user_id)
Email.reset_request(email, code, user.locale)
Accounts.set_password_reset_status(user_credential, %{
password_reset_sent_at: DateTime.truncate(DateTime.utc_now(), :second),
password_resettable: true
})
end
conn
|> put_flash(
:info,
gettext("Check your inbox for instructions on how to reset your password.")
)
|> redirect(to: Routes.password_reset_path(conn, :new, email: email))
end
def new(conn, %{"email" => email}) do
render(conn, "new.html", email: email)
end
def create(conn, %{"password_reset" => %{"email" => email, "code" => code}}) do
user_credential = Accounts.get_user_credential(%{"email" => email})
if request_sent?(user_credential) && Otp.verify(code, user_credential.otp_secret) do
VutuvWeb.RateLimiter.reset_count(conn)
user = UserProfiles.get_user(user_credential.user_id)
Email.verify_success(email, user.locale)
Accounts.set_password_reset_status(user_credential, %{password_resettable: true})
conn
|> put_session(:password_reset, true)
|> redirect(to: Routes.password_reset_path(conn, :edit, email: email))
else
conn
|> put_flash(:error, gettext("Invalid code."))
|> render("new.html", email: email)
end
end
def edit(conn, %{"email" => email}) do
user_credential = Accounts.get_user_credential(%{"email" => email})
changeset = Accounts.change_update_password(user_credential)
if get_session(conn, :password_reset) && Accounts.can_reset_password?(user_credential) do
render(conn, "edit.html", email: email, changeset: changeset)
else
unauthorized(conn)
end
end
def update(conn, %{"password_reset" => %{"email" => email} = params}) do
user_credential = Accounts.get_user_credential(%{"email" => email})
if Accounts.can_reset_password?(user_credential) do
do_update(conn, user_credential, params)
else
unauthorized(conn)
end
end
defp do_update(conn, user_credential, %{"email" => email} = params) do
case Accounts.update_password(user_credential, params) do
{:ok, _user_credential} ->
user = UserProfiles.get_user(user_credential.user_id)
Email.reset_success(email, user.locale)
Accounts.set_password_reset_status(user_credential, %{
password_reset_sent_at: nil,
password_resettable: false
})
conn
|> clear_session()
|> configure_session(renew: true)
|> put_flash(:info, gettext("Your password has been reset."))
|> redirect(to: Routes.session_path(conn, :new))
{:error, %Ecto.Changeset{} = changeset} ->
render(conn, "edit.html", email: email, changeset: changeset)
end
end
defp unauthorized(conn) do
conn
|> put_flash(:error, gettext("You are not authorized to view this page."))
|> redirect(to: Routes.session_path(conn, :new))
|> halt()
end
defp request_sent?(user_credential) do
user_credential && user_credential.password_reset_sent_at
end
end
| 33.027273 | 93 | 0.678778 |
ff01972e4fdbf98f8acae990dd1b231542f9e375 | 1,371 | ex | Elixir | apps/site/test/support/conn_case.ex | noisecapella/dotcom | d5ef869412102d2230fac3dcc216f01a29726227 | [
"MIT"
] | 42 | 2019-05-29T16:05:30.000Z | 2021-08-09T16:03:37.000Z | apps/site/test/support/conn_case.ex | noisecapella/dotcom | d5ef869412102d2230fac3dcc216f01a29726227 | [
"MIT"
] | 872 | 2019-05-29T17:55:50.000Z | 2022-03-30T09:28:43.000Z | apps/site/test/support/conn_case.ex | noisecapella/dotcom | d5ef869412102d2230fac3dcc216f01a29726227 | [
"MIT"
] | 12 | 2019-07-01T18:33:21.000Z | 2022-03-10T02:13:57.000Z | defmodule SiteWeb.ConnCase do
@moduledoc """
This module defines the test case to be used by
tests that require setting up a connection.
Such tests rely on `Phoenix.ConnTest` and also
imports other functionality to make it easier
to build and query models.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
def default_conn() do
%Plug.Conn{
Phoenix.ConnTest.build_conn()
| host: "localhost"
}
end
using do
quote do
# Import conveniences for testing with connections
use Phoenix.ConnTest
import SiteWeb.Router.Helpers,
except: [
news_entry_path: 2,
news_entry_path: 3,
news_entry_path: 4,
event_path: 2,
event_path: 3,
event_path: 4,
project_path: 2,
project_path: 3,
project_update_path: 3,
project_update_path: 4
]
import SiteWeb.CmsRouterHelpers
import CMS.Factory
# The default endpoint for testing
@endpoint SiteWeb.Endpoint
import SiteWeb.ConnCase, only: [default_conn: 0]
end
end
setup _tags do
{:ok, conn: default_conn()}
end
end
| 23.637931 | 56 | 0.651349 |
ff01b6bddb878a38c815de0c0f54ccf3740fb8b0 | 121 | exs | Elixir | test/test_helper.exs | jschoch/jsonstruct | 015106e4d17daa028ec97d55ebc987fee7628a85 | [
"MIT"
] | 3 | 2017-01-15T03:27:31.000Z | 2018-03-06T17:07:06.000Z | test/test_helper.exs | jschoch/jsonstruct | 015106e4d17daa028ec97d55ebc987fee7628a85 | [
"MIT"
] | null | null | null | test/test_helper.exs | jschoch/jsonstruct | 015106e4d17daa028ec97d55ebc987fee7628a85 | [
"MIT"
] | null | null | null | ExUnit.start()
defmodule JsstND do
defstruct outer1: %{innerK1: nil,innerK2: nil},outer2: nil
use ExConstructor
end
| 17.285714 | 60 | 0.752066 |
ff01c23542d9a70d2dd2c1af7a81ef8476682e55 | 914 | ex | Elixir | clients/policy_troubleshooter/lib/google_api/policy_troubleshooter/v1beta/metadata.ex | kyleVsteger/elixir-google-api | 3a0dd498af066a4361b5b0fd66ffc04a57539488 | [
"Apache-2.0"
] | null | null | null | clients/policy_troubleshooter/lib/google_api/policy_troubleshooter/v1beta/metadata.ex | kyleVsteger/elixir-google-api | 3a0dd498af066a4361b5b0fd66ffc04a57539488 | [
"Apache-2.0"
] | null | null | null | clients/policy_troubleshooter/lib/google_api/policy_troubleshooter/v1beta/metadata.ex | kyleVsteger/elixir-google-api | 3a0dd498af066a4361b5b0fd66ffc04a57539488 | [
"Apache-2.0"
] | null | null | null | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.PolicyTroubleshooter.V1beta do
@moduledoc """
API client metadata for GoogleApi.PolicyTroubleshooter.V1beta.
"""
@discovery_revision "20210227"
def discovery_revision(), do: @discovery_revision
end
| 33.851852 | 74 | 0.766958 |
ff01ddf0a7af6ac197ac7c503fbdc59713254ca8 | 3,042 | ex | Elixir | lib/teiserver/chat/libs/room_message_lib.ex | Teifion/teiserver | 131e002160e1e948cb55e56d62370ba66a972cba | [
"MIT"
] | 6 | 2021-02-08T10:42:53.000Z | 2021-04-25T12:12:03.000Z | lib/teiserver/chat/libs/room_message_lib.ex | Teifion/teiserver | 131e002160e1e948cb55e56d62370ba66a972cba | [
"MIT"
] | null | null | null | lib/teiserver/chat/libs/room_message_lib.ex | Teifion/teiserver | 131e002160e1e948cb55e56d62370ba66a972cba | [
"MIT"
] | 2 | 2021-02-23T22:34:00.000Z | 2021-04-08T13:31:36.000Z | defmodule Teiserver.Chat.RoomMessageLib do
use CentralWeb, :library
alias Teiserver.Chat.RoomMessage
# Functions
@spec icon :: String.t()
def icon, do: "far fa-???"
@spec colours :: {String.t(), String.t(), String.t()}
def colours, do: Central.Helpers.StylingHelper.colours(:default)
@spec make_favourite(Map.t()) :: Map.t()
def make_favourite(room_message) do
%{
type_colour: colours() |> elem(0),
type_icon: icon(),
item_id: room_message.id,
item_type: "teiserver_chat_room_message",
item_colour: colours() |> elem(0),
item_icon: Teiserver.Chat.RoomMessageLib.icon(),
item_label: "#{room_message.name}",
url: "/chat/room_messages/#{room_message.id}"
}
end
# Queries
@spec query_room_messages() :: Ecto.Query.t
def query_room_messages do
from room_messages in RoomMessage
end
@spec search(Ecto.Query.t, Map.t | nil) :: Ecto.Query.t
def search(query, nil), do: query
def search(query, params) do
params
|> Enum.reduce(query, fn ({key, value}, query_acc) ->
_search(query_acc, key, value)
end)
end
@spec _search(Ecto.Query.t, Atom.t(), any()) :: Ecto.Query.t
def _search(query, _, ""), do: query
def _search(query, _, nil), do: query
def _search(query, :id, id) do
from room_messages in query,
where: room_messages.id == ^id
end
def _search(query, :user_id, user_id) do
from room_messages in query,
where: room_messages.user_id == ^user_id
end
def _search(query, :id_list, id_list) do
from room_messages in query,
where: room_messages.id in ^id_list
end
def _search(query, :simple_search, ref) do
ref_like = "%" <> String.replace(ref, "*", "%") <> "%"
from room_messages in query,
where: (
ilike(room_messages.name, ^ref_like)
)
end
def _search(query, :term, ref) do
ref_like = "%" <> String.replace(ref, "*", "%") <> "%"
from room_messages in query,
where: (
ilike(room_messages.content, ^ref_like)
)
end
@spec order_by(Ecto.Query.t, String.t | nil) :: Ecto.Query.t
def order_by(query, nil), do: query
def order_by(query, "Name (A-Z)") do
from room_messages in query,
order_by: [asc: room_messages.name]
end
def order_by(query, "Name (Z-A)") do
from room_messages in query,
order_by: [desc: room_messages.name]
end
def order_by(query, "Newest first") do
from room_messages in query,
order_by: [desc: room_messages.inserted_at]
end
def order_by(query, "Oldest first") do
from room_messages in query,
order_by: [asc: room_messages.inserted_at]
end
@spec preload(Ecto.Query.t, List.t | nil) :: Ecto.Query.t
def preload(query, nil), do: query
def preload(query, preloads) do
query = if :user in preloads, do: _preload_users(query), else: query
query
end
def _preload_users(query) do
from room_messages in query,
left_join: users in assoc(room_messages, :user),
preload: [user: users]
end
end
| 26.684211 | 72 | 0.647272 |
ff02151ef26126d053d0be9053c8d4e673f538fa | 597 | ex | Elixir | lib/slacker/web_api.ex | michaelherold/slacker | 11b4444af38f828256b3fd2018b2f95dcb4d450d | [
"MIT"
] | 94 | 2015-05-20T22:50:09.000Z | 2021-01-22T23:49:17.000Z | lib/slacker/web_api.ex | michaelherold/slacker | 11b4444af38f828256b3fd2018b2f95dcb4d450d | [
"MIT"
] | 5 | 2015-11-06T03:29:35.000Z | 2016-06-27T18:58:25.000Z | lib/slacker/web_api.ex | michaelherold/slacker | 11b4444af38f828256b3fd2018b2f95dcb4d450d | [
"MIT"
] | 22 | 2015-10-08T01:23:02.000Z | 2017-11-07T00:33:29.000Z | defmodule Slacker.WebAPI do
use HTTPoison.Base
@url_base Application.get_env(:slacker, :url_base) || "https://slack.com/api/"
def post(path, body) do
path
|> super(body)
|> check_response
end
defp process_url(path) do
@url_base <> path
end
defp process_response_body(body) do
body
|> Poison.decode!
|> Enum.reduce(%{}, fn {k, v}, map -> Dict.put(map, String.to_atom(k), v) end)
end
defp check_response({:ok, %{status_code: 200, body: %{ok: true} = body}}) do
{:ok, body}
end
defp check_response({_, response}), do: {:error, response}
end
| 22.111111 | 82 | 0.634841 |
ff023fe2521b47b1bf28ee646e3b97aeaafd35f5 | 1,756 | exs | Elixir | graphical/mix.exs | joshmcarthur/learning-elixir | 5ccfdd61dcfaba82c05559fb9c0e6f99cf4319e7 | [
"MIT"
] | null | null | null | graphical/mix.exs | joshmcarthur/learning-elixir | 5ccfdd61dcfaba82c05559fb9c0e6f99cf4319e7 | [
"MIT"
] | null | null | null | graphical/mix.exs | joshmcarthur/learning-elixir | 5ccfdd61dcfaba82c05559fb9c0e6f99cf4319e7 | [
"MIT"
] | null | null | null | defmodule Graphical.Mixfile do
use Mix.Project
def project do
[
app: :graphical,
version: "0.0.1",
elixir: "~> 1.4",
elixirc_paths: elixirc_paths(Mix.env),
compilers: [:phoenix, :gettext] ++ Mix.compilers,
start_permanent: Mix.env == :prod,
aliases: aliases(),
deps: deps()
]
end
# Configuration for the OTP application.
#
# Type `mix help compile.app` for more information.
def application do
[
mod: {Graphical.Application, []},
extra_applications: [:logger, :runtime_tools]
]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
# Specifies your project dependencies.
#
# Type `mix help deps` for examples and options.
defp deps do
[
{:phoenix, "~> 1.3.2"},
{:phoenix_pubsub, "~> 1.0"},
{:phoenix_ecto, "~> 3.2"},
{:postgrex, ">= 0.0.0"},
{:gettext, "~> 0.11"},
{:cowboy, "~> 1.0"},
{:poison, "~> 3.1.0"},
{:absinthe, "~> 1.4.6"},
{:absinthe_plug, "~> 1.4.2"},
{:absinthe_ecto, "~> 0.1.3"},
{:faker, "~> 0.9"},
{:comeonin, "~> 4.0"},
{:bcrypt_elixir, "~> 1.0"},
{:guardian, "~> 1.0"}
]
end
# Aliases are shortcuts or tasks specific to the current project.
# For example, to create, migrate and run the seeds file at once:
#
# $ mix ecto.setup
#
# See the documentation for `Mix` for more info on aliases.
defp aliases do
[
"ecto.setup": ["ecto.create", "ecto.migrate", "run priv/repo/seeds.exs"],
"ecto.reset": ["ecto.drop", "ecto.setup"],
"test": ["ecto.create --quiet", "ecto.migrate", "test"]
]
end
end
| 26.208955 | 79 | 0.559795 |
ff025568a4ed60ab7118e3649ef20a822a09545e | 3,362 | ex | Elixir | lib/central/account/libs/auth_lib.ex | icexuick/teiserver | 22f2e255e7e21f977e6b262acf439803626a506c | [
"MIT"
] | null | null | null | lib/central/account/libs/auth_lib.ex | icexuick/teiserver | 22f2e255e7e21f977e6b262acf439803626a506c | [
"MIT"
] | null | null | null | lib/central/account/libs/auth_lib.ex | icexuick/teiserver | 22f2e255e7e21f977e6b262acf439803626a506c | [
"MIT"
] | null | null | null | defmodule Central.Account.AuthLib do
require Logger
alias Central.Account.AuthGroups.Server
@spec icon :: String.t()
def icon(), do: "far fa-address-card"
@spec get_all_permission_sets() :: Map.t()
def get_all_permission_sets do
Server.get_all()
end
@spec get_all_permissions() :: [String.t()]
def get_all_permissions do
Server.get_all()
|> Enum.map(fn {_, ps} -> ps end)
|> List.flatten()
|> split_permissions
end
@spec split_permissions([String.t()]) :: [String.t()]
def split_permissions(permission_list) do
sections =
permission_list
|> Enum.map(fn p ->
p
|> String.split(".")
|> Enum.take(2)
|> Enum.join(".")
end)
|> Enum.uniq()
modules =
permission_list
|> Enum.map(fn p -> p |> String.split(".") |> hd end)
|> Enum.uniq()
permission_list ++ sections ++ modules
end
def add_permission_set(module, section, auths) do
permissions =
auths
|> Enum.map(fn a ->
"#{module}.#{section}.#{a}"
end)
Server.add(module, section, permissions)
end
def allow_any?(conn, perms) do
Enum.any?(
perms
|> Enum.map(fn p -> allow?(conn, p) end)
)
end
# If you don't need permissions then lets not bother checking
@spec allow?(Map.t() | Plug.Conn.t() | [String.t()], String.t() | [String.t()]) :: boolean
def allow?(_, nil), do: true
def allow?(_, ""), do: true
def allow?(_, []), do: true
# Handle conn
def allow?(%Plug.Conn{} = conn, permission_required) do
allow?(conn.assigns[:current_user], permission_required)
end
# This allows us to use something with permissions in it
def allow?(%{permissions: permissions}, permission_required) do
allow?(permissions, permission_required)
end
# Socket
def allow?(%Phoenix.LiveView.Socket{} = socket, permission_required) do
allow?(socket.assigns[:current_user], permission_required)
end
# Handle users
def allow?(%{} = user, permission_required) do
allow?(user.permissions, permission_required)
end
def allow?(permissions_held, permission_required) when is_list(permission_required) do
Enum.all?(
permission_required,
fn p ->
allow?(permissions_held, p)
end
)
end
def allow?(permissions_held, permission_required) do
Logger.debug("Permission test, has: #{Kernel.inspect permissions_held}, needs: #{Kernel.inspect permission_required}")
cond do
# Enum.member?(Application.get_env(:centaur, CentralWeb)[:universal_permissions], permission_required) -> true
permissions_held == nil ->
Logger.debug("AuthLib.allow?() -> No permissions held")
false
# Developers always have permission
Enum.member?(permissions_held, "admin.dev.developer") && permission_required != "debug" ->
true
# Standard "do you have permission" response
Enum.member?(permissions_held, permission_required) ->
true
# Default to not having permission
true ->
Logger.debug("AuthLib.allow?() -> Permission not found: #{permission_required}")
false
end
end
# This is used as part of the permission system getting the current user
@spec current_user(Plug.Conn.t()) :: User.t() | nil
def current_user(conn) do
conn.assigns[:current_user]
end
end
| 27.112903 | 122 | 0.644557 |
ff0294d432b079623961697aec1633974c24ee57 | 10,603 | exs | Elixir | test/set_locale_test.exs | VinogradovAlexandr/set_locale | 2b02ffb14707f63344d8c5548a61a443b7cf7298 | [
"WTFPL"
] | null | null | null | test/set_locale_test.exs | VinogradovAlexandr/set_locale | 2b02ffb14707f63344d8c5548a61a443b7cf7298 | [
"WTFPL"
] | null | null | null | test/set_locale_test.exs | VinogradovAlexandr/set_locale | 2b02ffb14707f63344d8c5548a61a443b7cf7298 | [
"WTFPL"
] | null | null | null | defmodule SetLocaleTest do
use ExUnit.Case
doctest SetLocale
use Phoenix.ConnTest
defmodule MyGettext do
use Gettext, otp_app: :set_locale
end
@cookie_key "locale"
@default_options %SetLocale.Config{gettext: MyGettext, default_locale: "en-gb"}
@default_options_with_cookie %SetLocale.Config{gettext: MyGettext, default_locale: "en-gb", cookie_key: @cookie_key}
describe "init" do
test "it supports a legacy config" do
assert SetLocale.init([MyGettext, "en-gb"]) == %SetLocale.Config{
gettext: SetLocaleTest.MyGettext,
default_locale: "en-gb",
cookie_key: nil
}
end
test "it sets cookie_key to nil if not given" do
assert SetLocale.init(gettext: MyGettext, default_locale: "en-gb") == %SetLocale.Config{
gettext: SetLocaleTest.MyGettext,
default_locale: "en-gb",
cookie_key: nil
}
end
test "it forwards cookie_key option" do
assert SetLocale.init(gettext: MyGettext, default_locale: "en-gb", cookie_key: "locale") == %SetLocale.Config{
gettext: SetLocaleTest.MyGettext,
default_locale: "en-gb",
cookie_key: "locale"
}
end
end
describe "when no locale is given and there is no cookie" do
test "when a root path is requested, it should redirect to default locale" do
assert Gettext.get_locale(MyGettext) == "en"
conn = Phoenix.ConnTest.build_conn(:get, "/", %{})
|> Plug.Conn.fetch_cookies()
|> SetLocale.call(@default_options)
assert redirected_to(conn) == "/en-gb"
end
test "when headers contain accept-language, it should redirect to that locale if supported" do
assert Gettext.get_locale(MyGettext) == "en"
conn = Phoenix.ConnTest.build_conn(:get, "/", %{})
|> Plug.Conn.fetch_cookies()
|> Plug.Conn.put_req_header("accept-language", "de, en-gb;q=0.8, nl;q=0.9, en;q=0.7")
|> SetLocale.call(@default_options)
assert redirected_to(conn) == "/nl"
end
test "when headers contain accept-language with full language tags with country variants,
it should redirect to the language if country variant is not supported" do
assert Gettext.get_locale(MyGettext) == "en"
conn = Phoenix.ConnTest.build_conn(:get, "/", %{})
|> Plug.Conn.fetch_cookies()
|> Plug.Conn.put_req_header("accept-language", "de, en-gb;q=0.8, nl-nl;q=0.9, en;q=0.7, *;q=0.5")
|> SetLocale.call(@default_options)
assert redirected_to(conn) == "/nl"
end
test "when headers contain accept-language but none is accepted, it should redirect to the default locale" do
assert Gettext.get_locale(MyGettext) == "en"
conn = Phoenix.ConnTest.build_conn(:get, "/", %{})
|> Plug.Conn.fetch_cookies()
|> Plug.Conn.put_req_header("accept-language", "de, fr;q=0.9")
|> SetLocale.call(@default_options)
assert redirected_to(conn) == "/en-gb"
end
test "when headers contain accept-language in incorrect format or language tags with larger range it does not fail" do
assert Gettext.get_locale(MyGettext) == "en"
conn = Phoenix.ConnTest.build_conn(:get, "/", %{})
|> Plug.Conn.fetch_cookies()
|> Plug.Conn.put_req_header("accept-language", ",, hell#foo-bar-baz-1234%, zh-Hans-CN;q=0.5")
|> SetLocale.call(@default_options)
assert redirected_to(conn) == "/en-gb"
end
test "it redirects to a prefix with default locale" do
conn = Phoenix.ConnTest.build_conn(:get, "/foo/bar/baz", %{})
|> Plug.Conn.fetch_cookies()
|> SetLocale.call(@default_options)
assert redirected_to(conn) == "/en-gb/foo/bar/baz"
end
end
describe "when no locale is given but there is an cookie" do
test "when a root path is requested, it should redirect to cookie locale" do
assert Gettext.get_locale(MyGettext) == "en"
conn = Phoenix.ConnTest.build_conn(:get, "/", %{})
|> Plug.Conn.put_resp_cookie(@cookie_key, "nl")
|> Plug.Conn.fetch_cookies()
|> SetLocale.call(@default_options_with_cookie)
assert redirected_to(conn) == "/nl"
end
test "when headers contain accept-language, it should redirect to cookie locale" do
assert Gettext.get_locale(MyGettext) == "en"
conn = Phoenix.ConnTest.build_conn(:get, "/", %{})
|> Plug.Conn.put_resp_cookie(@cookie_key, "nl")
|> Plug.Conn.fetch_cookies()
|> Plug.Conn.put_req_header("accept-language", "de, en-gb;q=0.8, en;q=0.7")
|> SetLocale.call(@default_options_with_cookie)
assert redirected_to(conn) == "/nl"
end
test "it redirects to a prefix with cookie locale" do
conn = Phoenix.ConnTest.build_conn(:get, "/foo/bar/baz", %{})
|> Plug.Conn.put_resp_cookie(@cookie_key, "nl")
|> Plug.Conn.fetch_cookies()
|> SetLocale.call(@default_options_with_cookie)
assert redirected_to(conn) == "/nl/foo/bar/baz"
end
end
describe "when an unsupported locale is given and there is no cookie" do
test "it redirects to a prefix with default locale" do
conn = Phoenix.ConnTest.build_conn(:get, "/de-at/foo/bar/baz", %{"locale" => "de-at"})
|> Plug.Conn.fetch_cookies()
|> SetLocale.call(@default_options)
assert redirected_to(conn) == "/en-gb/foo/bar/baz"
end
end
describe "when an unsupported locale is given but there is a cookie" do
test "it redirects to a prefix with cookie locale" do
conn = Phoenix.ConnTest.build_conn(:get, "/de-at/foo/bar/baz", %{"locale" => "de-at"})
|> Plug.Conn.put_resp_cookie(@cookie_key, "nl")
|> Plug.Conn.fetch_cookies()
|> SetLocale.call(@default_options_with_cookie)
assert redirected_to(conn) == "/nl/foo/bar/baz"
end
end
describe "when the locale is no locale, but a part of the url and there is no cookie" do
test "it redirects to a prefix with default locale" do
conn = Phoenix.ConnTest.build_conn(:get, "/foo/bar", %{"locale" => "foo"})
|> Plug.Conn.fetch_cookies()
|> SetLocale.call(@default_options)
assert redirected_to(conn) == "/en-gb/foo/bar"
end
test "when headers contain referer with valid locale in the path, it should use redirect to that locale if supported" do
conn = Phoenix.ConnTest.build_conn(:get, "/foo/bar", %{"locale" => "foo"})
|> Plug.Conn.fetch_cookies()
|> Plug.Conn.put_req_header("referer", "/nl/origin")
|> SetLocale.call(@default_options)
assert redirected_to(conn) == "/nl/foo/bar"
end
test "when headers contain referer without valid locale in the path, it should ignore it and use the default" do
conn = Phoenix.ConnTest.build_conn(:get, "/foo/bar", %{"locale" => "foo"})
|> Plug.Conn.fetch_cookies()
|> Plug.Conn.put_req_header("referer", "/origin")
|> SetLocale.call(@default_options)
assert redirected_to(conn) == "/en-gb/foo/bar"
end
test "when headers contain accept-language, it should redirect to the header locale if supported" do
conn = Phoenix.ConnTest.build_conn(:get, "/foo/bar", %{"locale" => "foo"})
|> Plug.Conn.fetch_cookies()
|> Plug.Conn.put_req_header("accept-language", "de, en-gb;q=0.8, nl;q=0.9, en;q=0.7")
|> SetLocale.call(@default_options)
assert redirected_to(conn) == "/nl/foo/bar"
end
test "when headers contain accept-language, but none is accepted, it should redirect to the default locale" do
conn = Phoenix.ConnTest.build_conn(:get, "/foo/bar", %{"locale" => "foo"})
|> Plug.Conn.fetch_cookies()
|> Plug.Conn.put_req_header("accept-language", "de, fr;q=0.9")
|> SetLocale.call(@default_options)
assert redirected_to(conn) == "/en-gb/foo/bar"
end
end
describe "when the locale is no locale, but a part of the url and there is a cookie" do
test "it redirects to a prefix with cookie locale" do
conn = Phoenix.ConnTest.build_conn(:get, "/foo/bar", %{"locale" => "foo"})
|> Plug.Conn.put_resp_cookie(@cookie_key, "nl")
|> Plug.Conn.fetch_cookies()
|> SetLocale.call(@default_options_with_cookie)
assert redirected_to(conn) == "/nl/foo/bar"
end
test "when headers contain accept-language, it should redirect to the cookie locale" do
conn = Phoenix.ConnTest.build_conn(:get, "/foo/bar", %{"locale" => "foo"})
|> Plug.Conn.put_resp_cookie(@cookie_key, "nl")
|> Plug.Conn.fetch_cookies()
|> Plug.Conn.put_req_header("accept-language", "de, en-gb;q=0.8, en;q=0.7")
|> SetLocale.call(@default_options_with_cookie)
assert redirected_to(conn) == "/nl/foo/bar"
end
end
describe "when an existing locale is given" do
test "with sibling: it should only assign it" do
conn = Phoenix.ConnTest.build_conn(:get, "/en-gb/foo/bar/baz", %{"locale" => "en-gb"})
|> Plug.Conn.fetch_cookies()
|> SetLocale.call(@default_options)
assert conn.status == nil
assert conn.assigns == %{locale: "en-gb"}
assert Gettext.get_locale(MyGettext) == "en-gb"
end
test "without sibling: it should only assign it" do
conn = Phoenix.ConnTest.build_conn(:get, "/nl/foo/bar/baz", %{"locale" => "nl"})
|> Plug.Conn.fetch_cookies()
|> SetLocale.call(@default_options)
assert conn.status == nil
assert conn.assigns == %{locale: "nl"}
assert Gettext.get_locale(MyGettext) == "nl"
end
test "it should fallback to parent language when sibling does not exist, ie. nl-be should use nl" do
conn = Phoenix.ConnTest.build_conn(:get, "/nl-be/foo/bar/baz", %{"locale" => "nl-be"})
|> Plug.Conn.fetch_cookies()
|> SetLocale.call(@default_options)
assert redirected_to(conn) == "/nl/foo/bar/baz"
end
test "should keep query strings as is" do
conn = Phoenix.ConnTest.build_conn(:get, "/de-at/foo/bar?foo=bar&baz=true", %{"locale" => "de-at"})
|> Plug.Conn.fetch_cookies()
|> SetLocale.call(@default_options)
assert redirected_to(conn) == "/en-gb/foo/bar?foo=bar&baz=true"
end
end
end
| 39.563433 | 124 | 0.620202 |
ff02fabf0d5d96efc74966ae5d4768787de913cc | 1,754 | ex | Elixir | clients/fusion_tables/lib/google_api/fusion_tables/v2/model/column_base_column.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | clients/fusion_tables/lib/google_api/fusion_tables/v2/model/column_base_column.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | clients/fusion_tables/lib/google_api/fusion_tables/v2/model/column_base_column.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.FusionTables.V2.Model.ColumnBaseColumn do
@moduledoc """
Identifier of the base column. If present, this column is derived from the specified base column.
## Attributes
- columnId (integer()): The id of the column in the base table from which this column is derived. Defaults to: `null`.
- tableIndex (integer()): Offset to the entry in the list of base tables in the table definition. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:columnId => any(),
:tableIndex => any()
}
field(:columnId)
field(:tableIndex)
end
defimpl Poison.Decoder, for: GoogleApi.FusionTables.V2.Model.ColumnBaseColumn do
def decode(value, options) do
GoogleApi.FusionTables.V2.Model.ColumnBaseColumn.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.FusionTables.V2.Model.ColumnBaseColumn do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 34.392157 | 120 | 0.740593 |
ff0314354c058ecf563392fc23e1f94be3523e9f | 2,218 | exs | Elixir | config/prod.exs | masato25/open_lambda | b8b51371126dc59539a71c80d7f81b08408c7ae1 | [
"Apache-2.0"
] | null | null | null | config/prod.exs | masato25/open_lambda | b8b51371126dc59539a71c80d7f81b08408c7ae1 | [
"Apache-2.0"
] | null | null | null | config/prod.exs | masato25/open_lambda | b8b51371126dc59539a71c80d7f81b08408c7ae1 | [
"Apache-2.0"
] | null | null | null | use Mix.Config
# For production, we configure the host to read the PORT
# from the system environment. Therefore, you will need
# to set PORT=80 before running your server.
#
# You should also configure the url host to something
# meaningful, we use this information when generating URLs.
#
# Finally, we also include the path to a manifest
# containing the digested version of static files. This
# manifest is generated by the mix phoenix.digest task
# which you typically run after static files are built.
config :fastweb_lambda, OpenLambda.Endpoint,
http: [port: {:system, "PORT"}],
url: [host: "example.com", port: 80],
cache_static_manifest: "priv/static/manifest.json"
# Do not print debug messages in production
config :logger, level: :info
# ## SSL Support
#
# To get SSL working, you will need to add the `https` key
# to the previous section and set your `:url` port to 443:
#
# config :fastweb_lambda, OpenLambda.Endpoint,
# ...
# url: [host: "example.com", port: 443],
# https: [port: 443,
# keyfile: System.get_env("SOME_APP_SSL_KEY_PATH"),
# certfile: System.get_env("SOME_APP_SSL_CERT_PATH")]
#
# Where those two env variables return an absolute path to
# the key and cert in disk or a relative path inside priv,
# for example "priv/ssl/server.key".
#
# We also recommend setting `force_ssl`, ensuring no data is
# ever sent via http, always redirecting to https:
#
# config :fastweb_lambda, OpenLambda.Endpoint,
# force_ssl: [hsts: true]
#
# Check `Plug.SSL` for all available options in `force_ssl`.
# ## Using releases
#
# If you are doing OTP releases, you need to instruct Phoenix
# to start the server for all endpoints:
#
# config :phoenix, :serve_endpoints, true
#
# Alternatively, you can configure exactly which server to
# start per endpoint:
#
# config :fastweb_lambda, OpenLambda.Endpoint, server: true
#
# You will also need to set the application root to `.` in order
# for the new static assets to be served after a hot upgrade:
#
# config :fastweb_lambda, OpenLambda.Endpoint, root: "."
# Finally import the config/prod.secret.exs
# which should be versioned separately.
import_config "prod.secret.exs"
| 33.606061 | 67 | 0.718665 |
ff0316bbe51347a92abcdd53a6353c7c31de1248 | 4,599 | ex | Elixir | lib/livebook_web/live/session_live/input_cell_settings_component.ex | FE-box/livebook | 44ae4ecf941decb1b3b0b8b4a0811aa6f9aaf83d | [
"Apache-2.0"
] | null | null | null | lib/livebook_web/live/session_live/input_cell_settings_component.ex | FE-box/livebook | 44ae4ecf941decb1b3b0b8b4a0811aa6f9aaf83d | [
"Apache-2.0"
] | null | null | null | lib/livebook_web/live/session_live/input_cell_settings_component.ex | FE-box/livebook | 44ae4ecf941decb1b3b0b8b4a0811aa6f9aaf83d | [
"Apache-2.0"
] | null | null | null | defmodule LivebookWeb.SessionLive.InputCellSettingsComponent do
use LivebookWeb, :live_component
alias Livebook.Session
alias Livebook.Notebook.Cell
@impl true
def update(assigns, socket) do
cell = assigns.cell
socket =
socket
|> assign(assigns)
|> assign(:current_type, cell.type)
|> assign_new(:attrs, fn ->
Map.take(cell, [:name, :type, :reactive, :props])
end)
|> assign_new(:valid, fn -> true end)
{:ok, socket}
end
@impl true
def render(assigns) do
~H"""
<div class="p-6 pb-4 flex flex-col space-y-8">
<h3 class="text-2xl font-semibold text-gray-800">
Cell settings
</h3>
<form
phx-submit="save"
phx-change="validate"
phx-target={@myself}
spellcheck="false"
autocomplete="off">
<div class="flex flex-col space-y-6">
<div>
<div class="input-label">Type</div>
<.select name="attrs[type]" selected={@attrs.type} options={input_types()} />
</div>
<div>
<div class="input-label">Name</div>
<input type="text" class="input" name="attrs[name]" value={@attrs.name} autofocus />
</div>
<.extra_fields type={@attrs.type} props={@attrs.props} />
<.switch_checkbox
name="attrs[reactive]"
label="Reactive (reevaluates dependent cells on change)"
checked={@attrs.reactive} />
</div>
<div class="mt-8 flex justify-end space-x-2">
<%= live_patch "Cancel", to: @return_to, class: "button button-outlined-gray" %>
<button class="button button-blue" type="submit" disabled={not @valid}>
Save
</button>
</div>
</form>
</div>
"""
end
defp extra_fields(%{type: :range} = assigns) do
~H"""
<div class="flex space-x-4">
<div class="flex-grow">
<div class="input-label">Min</div>
<input type="number" class="input" name="attrs[props][min]" value={@props.min} />
</div>
<div class="flex-grow">
<div class="input-label">Max</div>
<input type="number" class="input" name="attrs[props][max]" value={@props.max} />
</div>
<div class="flex-grow">
<div class="input-label">Step</div>
<input type="number" class="input" name="attrs[props][step]" value={@props.step} />
</div>
</div>
"""
end
defp extra_fields(assigns), do: ~H""
@impl true
def handle_event("validate", params, socket) do
{valid?, attrs} = validate_attrs(params["attrs"], socket.assigns.attrs)
{:noreply, socket |> assign(attrs: attrs) |> assign(:valid, valid?)}
end
def handle_event("save", params, socket) do
{true, attrs} = validate_attrs(params["attrs"], socket.assigns.attrs)
attrs =
if attrs.type != socket.assigns.current_type do
Map.put(attrs, :value, default_value(attrs.type, attrs.props))
else
attrs
end
Session.set_cell_attributes(socket.assigns.session_id, socket.assigns.cell.id, attrs)
{:noreply, push_patch(socket, to: socket.assigns.return_to)}
end
defp validate_attrs(data, prev_attrs) do
name = data["name"]
type = data["type"] |> String.to_existing_atom()
reactive = Map.has_key?(data, "reactive")
{props_valid?, props} =
if type == prev_attrs.type do
data |> Map.get("props", %{}) |> validate_props(type)
else
{true, Cell.Input.default_props(type)}
end
valid? = name != "" and props_valid?
{valid?, %{name: name, type: type, reactive: reactive, props: props}}
end
defp validate_props(data, :range) do
min = parse_number(data["min"])
max = parse_number(data["max"])
step = parse_number(data["step"])
valid? = min != nil and max != nil and step != nil and min < max and step > 0
data = %{min: min, max: max, step: step}
{valid?, data}
end
defp validate_props(_data, _type) do
{true, %{}}
end
defp parse_number(string) do
case Float.parse(string) do
{number, _} ->
integer = round(number)
if integer == number, do: integer, else: number
:error ->
nil
end
end
defp default_value(:color, _props), do: "#3E64FF"
defp default_value(:range, %{min: min}), do: to_string(min)
defp default_value(_type, _props), do: ""
defp input_types do
[
color: "Color",
number: "Number",
password: "Password",
text: "Text",
textarea: "Textarea",
url: "URL",
range: "Range"
]
end
end
| 28.74375 | 96 | 0.584257 |
ff033c8a1d225b7070f4655b0526aaa720cf57e4 | 1,999 | ex | Elixir | lib/honeydew/please/projectors/list.ex | elixir-cqrs/honeydew | 888f86c829187eaca28ef1af69a40a337e46630a | [
"MIT"
] | null | null | null | lib/honeydew/please/projectors/list.ex | elixir-cqrs/honeydew | 888f86c829187eaca28ef1af69a40a337e46630a | [
"MIT"
] | null | null | null | lib/honeydew/please/projectors/list.ex | elixir-cqrs/honeydew | 888f86c829187eaca28ef1af69a40a337e46630a | [
"MIT"
] | null | null | null | defmodule Honeydew.Please.Projectors.List do
@moduledoc """
Projector for List read model.
"""
use Commanded.Projections.Ecto,
application: Honeydew.App,
name: "please_list_projection",
repo: Honeydew.Repo
alias Honeydew.Please.Events.{
ListMade,
ListCompleted,
ListDiscarded,
ListReactivated
}
alias Honeydew.Please.Projections.List
alias HoneydewWeb.Endpoint
project(%ListMade{list_id: list_id, name: name, notes: notes}, fn multi ->
Ecto.Multi.insert(multi, :please_list, %List{
list_id: list_id,
name: name,
notes: notes,
status: :active
})
end)
project(%ListCompleted{list_id: list_id, notes: notes}, fn multi ->
update_list(multi, list_id,
set: [
notes: notes,
status: :completed
]
)
end)
project(%ListDiscarded{list_id: list_id, notes: notes}, fn multi ->
update_list(multi, list_id,
set: [
notes: notes,
status: :discarded
]
)
end)
project(%ListReactivated{list_id: list_id, notes: notes}, fn multi ->
update_list(multi, list_id,
set: [
notes: notes,
status: :discarded
]
)
end)
def after_update(%ListMade{} = event, _metadata, _changes) do
list = %List{
list_id: event.list_id,
name: event.name,
notes: event.notes,
status: :active
}
list
|> broadcast("list_made")
:ok
end
def after_update(%ListDiscarded{} = event, _metadata, _changes) do
list = %List{
list_id: event.list_id,
notes: event.notes,
status: :discarded
}
list
|> broadcast("list_discarded")
:ok
end
defp update_list(multi, list_id, updates) do
Ecto.Multi.update_all(multi, :please_list, list_query(list_id), updates)
end
defp list_query(list_id) do
from(l in List, where: l.list_id == ^list_id)
end
defp broadcast(%List{} = list, event) do
Endpoint.broadcast("lists", event, list.list_id)
end
end
| 21.042105 | 76 | 0.629315 |
ff035606292640972be01c408d6105a7f3411d5d | 5,007 | ex | Elixir | clients/slides/lib/google_api/slides/v1/model/presentation.ex | yoshi-code-bot/elixir-google-api | cdb6032f01fac5ab704803113c39f2207e9e019d | [
"Apache-2.0"
] | null | null | null | clients/slides/lib/google_api/slides/v1/model/presentation.ex | yoshi-code-bot/elixir-google-api | cdb6032f01fac5ab704803113c39f2207e9e019d | [
"Apache-2.0"
] | null | null | null | clients/slides/lib/google_api/slides/v1/model/presentation.ex | yoshi-code-bot/elixir-google-api | cdb6032f01fac5ab704803113c39f2207e9e019d | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Slides.V1.Model.Presentation do
@moduledoc """
A Google Slides presentation.
## Attributes
* `layouts` (*type:* `list(GoogleApi.Slides.V1.Model.Page.t)`, *default:* `nil`) - The layouts in the presentation. A layout is a template that determines how content is arranged and styled on the slides that inherit from that layout.
* `locale` (*type:* `String.t`, *default:* `nil`) - The locale of the presentation, as an IETF BCP 47 language tag.
* `masters` (*type:* `list(GoogleApi.Slides.V1.Model.Page.t)`, *default:* `nil`) - The slide masters in the presentation. A slide master contains all common page elements and the common properties for a set of layouts. They serve three purposes: - Placeholder shapes on a master contain the default text styles and shape properties of all placeholder shapes on pages that use that master. - The master page properties define the common page properties inherited by its layouts. - Any other shapes on the master slide appear on all slides using that master, regardless of their layout.
* `notesMaster` (*type:* `GoogleApi.Slides.V1.Model.Page.t`, *default:* `nil`) - The notes master in the presentation. It serves three purposes: - Placeholder shapes on a notes master contain the default text styles and shape properties of all placeholder shapes on notes pages. Specifically, a `SLIDE_IMAGE` placeholder shape contains the slide thumbnail, and a `BODY` placeholder shape contains the speaker notes. - The notes master page properties define the common page properties inherited by all notes pages. - Any other shapes on the notes master appear on all notes pages. The notes master is read-only.
* `pageSize` (*type:* `GoogleApi.Slides.V1.Model.Size.t`, *default:* `nil`) - The size of pages in the presentation.
* `presentationId` (*type:* `String.t`, *default:* `nil`) - The ID of the presentation.
* `revisionId` (*type:* `String.t`, *default:* `nil`) - Output only. The revision ID of the presentation. Can be used in update requests to assert the presentation revision hasn't changed since the last read operation. Only populated if the user has edit access to the presentation. The revision ID is not a sequential number but a nebulous string. The format of the revision ID may change over time, so it should be treated opaquely. A returned revision ID is only guaranteed to be valid for 24 hours after it has been returned and cannot be shared across users. If the revision ID is unchanged between calls, then the presentation has not changed. Conversely, a changed ID (for the same presentation and user) usually means the presentation has been updated. However, a changed ID can also be due to internal factors such as ID format changes.
* `slides` (*type:* `list(GoogleApi.Slides.V1.Model.Page.t)`, *default:* `nil`) - The slides in the presentation. A slide inherits properties from a slide layout.
* `title` (*type:* `String.t`, *default:* `nil`) - The title of the presentation.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:layouts => list(GoogleApi.Slides.V1.Model.Page.t()) | nil,
:locale => String.t() | nil,
:masters => list(GoogleApi.Slides.V1.Model.Page.t()) | nil,
:notesMaster => GoogleApi.Slides.V1.Model.Page.t() | nil,
:pageSize => GoogleApi.Slides.V1.Model.Size.t() | nil,
:presentationId => String.t() | nil,
:revisionId => String.t() | nil,
:slides => list(GoogleApi.Slides.V1.Model.Page.t()) | nil,
:title => String.t() | nil
}
field(:layouts, as: GoogleApi.Slides.V1.Model.Page, type: :list)
field(:locale)
field(:masters, as: GoogleApi.Slides.V1.Model.Page, type: :list)
field(:notesMaster, as: GoogleApi.Slides.V1.Model.Page)
field(:pageSize, as: GoogleApi.Slides.V1.Model.Size)
field(:presentationId)
field(:revisionId)
field(:slides, as: GoogleApi.Slides.V1.Model.Page, type: :list)
field(:title)
end
defimpl Poison.Decoder, for: GoogleApi.Slides.V1.Model.Presentation do
def decode(value, options) do
GoogleApi.Slides.V1.Model.Presentation.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Slides.V1.Model.Presentation do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 70.521127 | 849 | 0.728181 |
ff03a6e26b833bac3f2792d4724cedf1f14fc96c | 1,266 | exs | Elixir | redis_cache/config/config.exs | cabol/nebulex_examples | 21bd2673eecfc0b5c7a2807a3a47488d99a7d725 | [
"MIT"
] | 25 | 2017-07-30T01:27:54.000Z | 2021-12-10T16:41:03.000Z | redis_cache/config/config.exs | cabol/nebulex_examples | 21bd2673eecfc0b5c7a2807a3a47488d99a7d725 | [
"MIT"
] | 2 | 2019-11-12T09:41:26.000Z | 2021-11-08T13:51:24.000Z | redis_cache/config/config.exs | cabol/nebulex_examples | 21bd2673eecfc0b5c7a2807a3a47488d99a7d725 | [
"MIT"
] | 1 | 2019-01-08T19:06:54.000Z | 2019-01-08T19:06:54.000Z | import Config
# Redis Standalone
config :redis_cache, RedisCache.Standalone,
conn_opts: [
host: "127.0.0.1",
port: 6379
]
# Redis in client-cluster mode
config :redis_cache, RedisCache.ClientCluster,
mode: :client_side_cluster,
nodes: [
node1: [
conn_opts: [
host: "127.0.0.1",
port: 9001
]
],
node2: [
pool_size: 2,
conn_opts: [
url: "redis://127.0.0.1:9002"
]
],
node3: [
conn_opts: [
host: "127.0.0.1",
port: 9003
]
]
]
# Redis Cluster mode
config :redis_cache, RedisCache.RedisCluster,
mode: :redis_cluster,
master_nodes: [
[
host: "127.0.0.1",
port: 7000
],
[
url: "redis://127.0.0.1:7001"
],
[
url: "redis://127.0.0.1:7002"
]
],
conn_opts: [
host: "127.0.0.1"
]
# Multilevel cache – L1 -> Local and L2 -> RedisCluster
config :redis_cache, RedisCache.Multilevel,
model: :inclusive,
levels: [
{
RedisCache.Multilevel.L1,
gc_interval: :timer.seconds(3600) * 12
},
{
# It can be also the Redis Cluster or the Client-Cluster
RedisCache.Multilevel.L2,
conn_opts: [
host: "127.0.0.1",
port: 6379
]
}
]
| 18.085714 | 62 | 0.541864 |
ff03b887e52cb1d86598f8fca4b4d0babb62f869 | 1,844 | ex | Elixir | api_server/test/support/model_case.ex | Spippolo/wheredafuckRU | 4ff399532ab4847a390bfb0ce193ec5732c04595 | [
"MIT"
] | null | null | null | api_server/test/support/model_case.ex | Spippolo/wheredafuckRU | 4ff399532ab4847a390bfb0ce193ec5732c04595 | [
"MIT"
] | null | null | null | api_server/test/support/model_case.ex | Spippolo/wheredafuckRU | 4ff399532ab4847a390bfb0ce193ec5732c04595 | [
"MIT"
] | null | null | null | defmodule WheredafuckruApi.ModelCase do
@moduledoc """
This module defines the test case to be used by
model tests.
You may define functions here to be used as helpers in
your model tests. See `errors_on/2`'s definition as reference.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
alias WheredafuckruApi.Repo
import Ecto
import Ecto.Changeset
import Ecto.Query
import WheredafuckruApi.ModelCase
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(WheredafuckruApi.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(WheredafuckruApi.Repo, {:shared, self()})
end
:ok
end
@doc """
Helper for returning list of errors in a struct when given certain data.
## Examples
Given a User schema that lists `:name` as a required field and validates
`:password` to be safe, it would return:
iex> errors_on(%User{}, %{password: "password"})
[password: "is unsafe", name: "is blank"]
You could then write your assertion like:
assert {:password, "is unsafe"} in errors_on(%User{}, %{password: "password"})
You can also create the changeset manually and retrieve the errors
field directly:
iex> changeset = User.changeset(%User{}, password: "password")
iex> {:password, "is unsafe"} in changeset.errors
true
"""
def errors_on(struct, data) do
struct.__struct__.changeset(struct, data)
|> Ecto.Changeset.traverse_errors(&WheredafuckruApi.ErrorHelpers.translate_error/1)
|> Enum.flat_map(fn {key, errors} -> for msg <- errors, do: {key, msg} end)
end
end
| 27.939394 | 87 | 0.694143 |
ff03d3c4681e05e4ac11e2b43273caf6c159bd7d | 294 | ex | Elixir | lib/cardigan/random.ex | Nagasaki45/gigalixir | 8d0d96ddc2eb0d44a25651cfd28c07cc401139c8 | [
"MIT"
] | 2 | 2020-05-05T06:07:16.000Z | 2020-05-09T02:12:32.000Z | lib/cardigan/random.ex | Nagasaki45/gigalixir | 8d0d96ddc2eb0d44a25651cfd28c07cc401139c8 | [
"MIT"
] | 21 | 2020-05-05T16:06:57.000Z | 2020-07-07T17:25:46.000Z | lib/cardigan/random.ex | Nagasaki45/gigalixir | 8d0d96ddc2eb0d44a25651cfd28c07cc401139c8 | [
"MIT"
] | null | null | null | defmodule Cardigan.Random do
def id(size) do
alphabets = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"
numbers = "0123456789"
vals = (alphabets <> String.downcase(alphabets) <> numbers) |> String.split("")
1..size
|> Enum.map(fn _ -> Enum.random(vals) end)
|> Enum.join("")
end
end
| 24.5 | 83 | 0.639456 |
ff03d8e31942a1c4e09ddd3369e4da5c892d0811 | 1,938 | ex | Elixir | clients/ad_exchange_buyer/lib/google_api/ad_exchange_buyer/v2beta1/model/list_client_users_response.ex | nuxlli/elixir-google-api | ecb8679ac7282b7dd314c3e20c250710ec6a7870 | [
"Apache-2.0"
] | null | null | null | clients/ad_exchange_buyer/lib/google_api/ad_exchange_buyer/v2beta1/model/list_client_users_response.ex | nuxlli/elixir-google-api | ecb8679ac7282b7dd314c3e20c250710ec6a7870 | [
"Apache-2.0"
] | null | null | null | clients/ad_exchange_buyer/lib/google_api/ad_exchange_buyer/v2beta1/model/list_client_users_response.ex | nuxlli/elixir-google-api | ecb8679ac7282b7dd314c3e20c250710ec6a7870 | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.AdExchangeBuyer.V2beta1.Model.ListClientUsersResponse do
@moduledoc """
## Attributes
- nextPageToken (String.t): A token to retrieve the next page of results. Pass this value in the ListClientUsersRequest.pageToken field in the subsequent call to the clients.invitations.list method to retrieve the next page of results. Defaults to: `null`.
- users ([ClientUser]): The returned list of client users. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:nextPageToken => any(),
:users => list(GoogleApi.AdExchangeBuyer.V2beta1.Model.ClientUser.t())
}
field(:nextPageToken)
field(:users, as: GoogleApi.AdExchangeBuyer.V2beta1.Model.ClientUser, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.AdExchangeBuyer.V2beta1.Model.ListClientUsersResponse do
def decode(value, options) do
GoogleApi.AdExchangeBuyer.V2beta1.Model.ListClientUsersResponse.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.AdExchangeBuyer.V2beta1.Model.ListClientUsersResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 38 | 258 | 0.759546 |
ff04021294ce9c75b9b6056fbe246a840feb0144 | 1,267 | exs | Elixir | test/cog/chat/slack/templates/embedded/permission_revoke_test.exs | matusf/cog | 71708301c7dc570fb0d3498a50f47a70ef957788 | [
"Apache-2.0"
] | 1,003 | 2016-02-23T17:21:12.000Z | 2022-02-20T14:39:35.000Z | test/cog/chat/slack/templates/embedded/permission_revoke_test.exs | matusf/cog | 71708301c7dc570fb0d3498a50f47a70ef957788 | [
"Apache-2.0"
] | 906 | 2016-02-22T22:54:19.000Z | 2022-03-11T15:19:43.000Z | test/cog/chat/slack/templates/embedded/permission_revoke_test.exs | matusf/cog | 71708301c7dc570fb0d3498a50f47a70ef957788 | [
"Apache-2.0"
] | 95 | 2016-02-23T13:42:31.000Z | 2021-11-30T14:39:55.000Z | defmodule Cog.Chat.Slack.Templates.Embedded.PermissionRevokeTest do
use Cog.TemplateCase
test "permission-revoke template" do
data = %{"results" => [%{"permission" => %{"bundle" => "site",
"name" => "foo"},
"role" => %{"name" => "ops"}}]}
expected = "Revoked permission 'site:foo' from role 'ops'"
assert_rendered_template(:slack, :embedded, "permission-revoke", data, expected)
end
test "permission-revoke template with multiple inputs" do
data = %{"results" => [%{"permission" => %{"bundle" => "site", "name" => "foo"},
"role" => %{"name" => "ops"}},
%{"permission" => %{"bundle" => "site", "name" => "bar"},
"role" => %{"name" => "dev"}},
%{"permission" => %{"bundle" => "site", "name" => "baz"},
"role" => %{"name" => "sec"}}]}
expected = """
Revoked permission 'site:foo' from role 'ops'
Revoked permission 'site:bar' from role 'dev'
Revoked permission 'site:baz' from role 'sec'
""" |> String.strip
assert_rendered_template(:slack, :embedded, "permission-revoke", data, expected)
end
end
| 40.870968 | 84 | 0.501973 |
ff040505b5acd9759d8c9b32ec3fdcd49bd20c39 | 42,924 | ex | Elixir | lib/ash/filter/filter.ex | doawoo/ash | 9d59ae5611785bbd2668b0865c743116633afac1 | [
"MIT"
] | null | null | null | lib/ash/filter/filter.ex | doawoo/ash | 9d59ae5611785bbd2668b0865c743116633afac1 | [
"MIT"
] | null | null | null | lib/ash/filter/filter.ex | doawoo/ash | 9d59ae5611785bbd2668b0865c743116633afac1 | [
"MIT"
] | null | null | null | defmodule Ash.Filter do
alias Ash.Actions.SideLoad
alias Ash.Engine.Request
alias Ash.Error.Query.{
AggregatesNotSupported,
InvalidFilterValue,
NoSuchAttributeOrRelationship,
NoSuchFilterPredicate,
ReadActionRequired
}
alias Ash.Query.Function.IsNil
alias Ash.Query.Operator.{
Eq,
GreaterThan,
GreaterThanOrEqual,
In,
LessThan,
LessThanOrEqual
}
alias Ash.Query.{Expression, Not, Ref}
alias Ash.Query.{Aggregate, Function, Operator}
@functions [
IsNil
]
@operators [
Ash.Query.Operator.IsNil,
Eq,
In,
LessThan,
GreaterThan,
LessThanOrEqual,
GreaterThanOrEqual
]
@builtins @functions ++ @operators
@operator_aliases [
eq: Eq,
equals: Eq,
gt: GreaterThan,
greater_than: GreaterThan,
lt: LessThan,
less_than: LessThan,
gte: GreaterThanOrEqual,
greater_than_or_equal: GreaterThanOrEqual,
lte: LessThanOrEqual,
less_than_or_equal: LessThanOrEqual
]
@moduledoc """
The representation of a filter in Ash.
Ash filters are stored as nested `Ash.Query.Expression{}` and `%Ash.Query.Not{}` structs,
terminating in an operator or a function struct. An expression is simply a boolean operator
and the left and right hand side of that operator.
## Filter Templates
Filter templates are simplified fielter statements (they only support atom keys), that have substitutions in them.
Currently, the substitutions are `{:_actor, :field}` and `{:_actor, :_primary_key}`
You can pass a filter template to `build_filter_from_template/2` with an actor, and it will return the new result
Additionally, you can ask if the filter template contains an actor reference via `template_references_actor?/1`
## Writing a filter
### Built In Predicates
#{Enum.map_join(@operators, "\n", &"* `#{&1.operator()}`")}
#{
Enum.map_join(@operator_aliases, "\n", fn {key, val} ->
"* `#{key}` (alias for `#{val.operator()}`)"
end)
}
### Expression syntax
The expression syntax ultimately just builds the keyword list style filter, but with lots of conveniences that
would be very annoying to do manually.
Examples
```elixir
Ash.Query.filter(resource, name == "Zardoz")
Ash.Query.filter(resource, first_name == "Zar" and last_name == "Doz")
Ash.Query.filter(resource, first_name == "Zar" and last_name in ["Doz", "Daz"] and high_score > 10)
Ash.Query.filter(resource, first_name == "Zar" or last_name == "Doz" or (high_score > 10 and high_score < -10))
```
### Keyword list syntax
A filter is a nested keyword list (with some exceptions, like `true` for everything and `false` for nothing).
The key is the "predicate" (A.K.A condition) and the value is the parameter. You can use `and` and `or` to create
nested filters. Datalayers can expose custom predicates. Eventually, you will be able to define your own custom
predicates, which will be a mechanism for you to attach complex filters supported by the data layer to your queries.
** Important **
In a given keyword list, all predicates are considered to be "ands". So `[or: [first_name: "Tom", last_name: "Bombadil"]]` doesn't
mean 'First name == "tom" or last_name == "bombadil"'. To say that, you want to provide a list of filters,
like so: `[or: [[first_name: "Tom"], [last_name: "Bombadil"]]]`
Some example filters:
```elixir
Ash.Query.filter(resource, [name: "Zardoz"]))
Ash.Query.filter(resource, [first_name: "Zar", last_name: "Doz"])
Ash.Query.filter(resource, [first_name: "Zar", last_name: [in: ["Doz", "Daz"]], high_score: [greater_than: 10]])
Ash.Query.filter(resource, [or: [
[first_name: "Zar"],
[last_name: "Doz"],
[or: [
[high_score: [greater_than: 10]]],
[high_score: [less_than: -10]]
]
]])
```
"""
@builtin_operators Enum.map(@operators, &{&1.operator(), &1}) ++ @operator_aliases
@builtin_functions Enum.map(@functions, &{&1.name(), &1})
@string_builtin_operators Enum.into(@builtin_operators, %{}, fn {key, value} ->
{to_string(key), value}
end)
@string_builtin_functions Enum.into(@builtin_functions, %{}, fn {key, value} ->
{to_string(key), value}
end)
defstruct [:resource, :expression]
@type t :: %__MODULE__{}
def builtins, do: @builtins
def builtin_functions, do: @functions
def builtin_operators, do: @operators
defmodule Simple do
@moduledoc "Represents a simplified filter, with a simple list of predicates"
defstruct [:resource, :predicates]
defmodule Not do
@moduledoc "A negated predicate"
defstruct [:predicate]
end
end
def parse!(resource, statement, aggregates \\ %{}) do
case parse(resource, statement, aggregates) do
{:ok, filter} ->
filter
{:error, error} ->
raise error
end
end
def parse(resource, statement, aggregates \\ %{}) do
context = %{
resource: resource,
relationship_path: [],
aggregates: aggregates
}
case parse_expression(statement, context) do
{:ok, expression} ->
{:ok, %__MODULE__{expression: expression, resource: resource}}
{:error, error} ->
{:error, error}
end
end
@doc "transform an expression based filter to a simple filter, which is just a list of predicates"
def to_simple_filter(%{resource: resource, expression: expression}) do
predicates = get_predicates(expression)
%Simple{resource: resource, predicates: predicates}
end
@doc "Replace any actor value references in a template with the values from a given actor"
def build_filter_from_template(template, actor) do
walk_filter_template(template, fn
{:_actor, :_primary_key} ->
if actor do
Map.take(actor, Ash.Resource.primary_key(actor.__struct__))
else
false
end
{:_actor, field} ->
Map.get(actor || %{}, field)
other ->
other
end)
end
@doc "Whether or not a given template contains an actor reference"
def template_references_actor?({:_actor, _}), do: true
def template_references_actor?(filter) when is_list(filter) do
Enum.any?(filter, &template_references_actor?/1)
end
def template_references_actor?(filter) when is_map(filter) do
Enum.any?(fn {key, value} ->
template_references_actor?(key) || template_references_actor?(value)
end)
end
def template_references_actor?(tuple) when is_tuple(tuple) do
Enum.any?(Tuple.to_list(tuple), &template_references_actor?/1)
end
def template_references_actor?(_), do: false
defp walk_filter_template(filter, mapper) when is_list(filter) do
case mapper.(filter) do
^filter ->
Enum.map(filter, &walk_filter_template(&1, mapper))
other ->
walk_filter_template(other, mapper)
end
end
defp walk_filter_template(filter, mapper) when is_map(filter) do
case mapper.(filter) do
^filter ->
Enum.into(filter, %{}, &walk_filter_template(&1, mapper))
other ->
walk_filter_template(other, mapper)
end
end
defp walk_filter_template(tuple, mapper) when is_tuple(tuple) do
case mapper.(tuple) do
^tuple ->
tuple
|> Tuple.to_list()
|> Enum.map(&walk_filter_template(&1, mapper))
|> List.to_tuple()
other ->
walk_filter_template(other, mapper)
end
end
defp walk_filter_template(value, mapper), do: mapper.(value)
defp get_predicates(expr, acc \\ [])
defp get_predicates(true, acc), do: acc
defp get_predicates(false, _), do: false
defp get_predicates(_, false), do: false
defp get_predicates(%Expression{op: :and, left: left, right: right}, acc) do
acc = get_predicates(left, acc)
get_predicates(right, acc)
end
defp get_predicates(%Not{expression: expression}, acc) do
expression
|> get_predicates()
|> Enum.reduce(acc, fn predicate, acc ->
[%Simple.Not{predicate: predicate} | acc]
end)
end
defp get_predicates(%{__predicate__?: true} = predicate, acc), do: [predicate | acc]
def used_aggregates(filter) do
filter
|> list_predicates()
|> Enum.flat_map(fn
%{__operator__?: true, left: left, right: right} ->
[left, right]
|> Enum.filter(fn
%Ref{attribute: %Aggregate{}} ->
true
_ ->
false
end)
|> Enum.map(& &1.attribute)
%{__function__?: true, arguments: arguments} ->
arguments
|> Enum.filter(fn
%Ash.Query.Ref{attribute: %Aggregate{}} ->
true
_ ->
false
end)
|> Enum.map(& &1.attribute)
end)
end
def run_other_data_layer_filters(api, resource, %{expression: expression} = filter) do
case do_run_other_data_layer_filters(expression, api, resource) do
{:ok, new_expression} -> {:ok, %{filter | expression: new_expression}}
{:error, error} -> {:error, error}
end
end
def run_other_data_layer_filters(_, _, filter) when filter in [nil, true, false],
do: {:ok, filter}
defp do_run_other_data_layer_filters(
%Expression{op: :or, left: left, right: right},
api,
resource
) do
with {:ok, left} <- do_run_other_data_layer_filters(left, api, resource),
{:ok, right} <- do_run_other_data_layer_filters(right, api, resource) do
{:ok, Expression.optimized_new(:or, left, right)}
end
end
defp do_run_other_data_layer_filters(%Expression{op: :and} = expression, api, resource) do
expression
|> relationship_paths(:ands_only)
|> filter_paths_that_change_data_layers(resource)
|> case do
[] ->
{:ok, expression}
paths ->
paths
|> do_run_other_data_layer_filter_paths(expression, resource, api)
|> case do
{:ok, result} -> do_run_other_data_layer_filters(result, api, resource)
{:error, error} -> {:error, error}
end
end
|> case do
{:ok, %Expression{op: :and, left: left, right: right}} ->
with {:ok, new_left} <- do_run_other_data_layer_filters(left, api, resource),
{:ok, new_right} <- do_run_other_data_layer_filters(right, api, resource) do
{:ok, Expression.optimized_new(:and, new_left, new_right)}
end
end
end
defp do_run_other_data_layer_filters(%Not{expression: expression}, api, resource) do
case do_run_other_data_layer_filters(expression, api, resource) do
{:ok, expr} -> {:ok, Not.new(expr)}
{:error, error} -> {:error, error}
end
end
defp do_run_other_data_layer_filters(%{__predicate__?: true} = predicate, api, resource) do
predicate
|> relationship_paths(:ands_only)
|> filter_paths_that_change_data_layers(resource)
|> Enum.find_value(fn path ->
case split_expression_by_relationship_path(predicate, path) do
{nil, _} ->
nil
{for_path, nil} ->
{path, for_path}
end
end)
|> case do
nil ->
{:ok, predicate}
{path, new_predicate} ->
relationship = Ash.Resource.relationship(resource, path)
fetch_related_data(resource, path, new_predicate, api, relationship)
end
end
defp do_run_other_data_layer_filters(other, _api, _resource), do: {:ok, other}
defp do_run_other_data_layer_filter_paths(paths, expression, resource, api) do
Enum.reduce_while(paths, {:ok, expression}, fn path, {:ok, expression} ->
{for_path, without_path} = split_expression_by_relationship_path(expression, path)
relationship = Ash.Resource.relationship(resource, path)
query =
relationship.destination
|> Ash.Query.new(api)
|> Map.put(:filter, %__MODULE__{
expression: for_path,
resource: relationship.destination
})
case filter_related_in(query, relationship, :lists.droplast(path)) do
{:ok, new_predicate} ->
{:cont, {:ok, Expression.optimized_new(:and, without_path, new_predicate)}}
{:error, error} ->
{:halt, {:error, error}}
end
end)
end
defp fetch_related_data(
resource,
path,
new_predicate,
api,
%{type: :many_to_many, join_relationship: join_relationship, through: through} =
relationship
) do
if Ash.Resource.data_layer(through) == Ash.Resource.data_layer(resource) &&
Ash.Resource.data_layer_can?(resource, {:join, through}) do
filter = %__MODULE__{
resource: relationship.destination,
expression: new_predicate
}
relationship.destination
|> Ash.Query.new(api)
|> Ash.Query.do_filter(filter)
|> filter_related_in(
relationship,
:lists.droplast(path) ++ [join_relationship]
)
else
filter = %__MODULE__{
resource: through,
expression: new_predicate
}
relationship.destination
|> Ash.Query.new(api)
|> Ash.Query.do_filter(filter)
|> api.read()
|> case do
{:ok, results} ->
relationship.through
|> Ash.Query.new(api)
|> Ash.Query.do_filter([
{relationship.destination_field_on_join_table,
in: Enum.map(results, &Map.get(&1, relationship.destination_field))}
])
|> filter_related_in(
Ash.Resource.relationship(resource, join_relationship),
:lists.droplast(path)
)
{:error, error} ->
{:error, error}
end
end
end
defp fetch_related_data(
_resource,
path,
new_predicate,
api,
relationship
) do
filter = %__MODULE__{
resource: relationship.destination,
expression: new_predicate
}
relationship.destination
|> Ash.Query.new(api)
|> Ash.Query.do_filter(filter)
|> filter_related_in(relationship, :lists.droplast(path))
end
defp filter_related_in(query, relationship, path) do
case query.api.read(query) do
{:error, error} ->
{:error, error}
{:ok, records} ->
records_to_expression(
records,
relationship,
path
)
end
end
defp records_to_expression([], _, _), do: {:ok, false}
defp records_to_expression([single_record], relationship, path) do
Ash.Query.Operator.new(
Eq,
%Ref{
relationship_path: path,
resource: relationship.source,
attribute: Ash.Resource.attribute(relationship.source, relationship.source_field)
},
Map.get(single_record, relationship.destination_field)
)
end
defp records_to_expression(records, relationship, path) do
Enum.reduce_while(records, {:ok, nil}, fn record, {:ok, expression} ->
case records_to_expression([record], relationship, path) do
{:ok, operator} ->
{:cont, {:ok, Expression.optimized_new(:and, expression, operator)}}
{:error, error} ->
{:halt, {:error, error}}
end
end)
end
defp filter_paths_that_change_data_layers(paths, resource, acc \\ [])
defp filter_paths_that_change_data_layers([], _resource, acc), do: acc
defp filter_paths_that_change_data_layers([path | rest], resource, acc) do
case shortest_path_to_changed_data_layer(resource, path) do
{:ok, path} ->
new_rest = Enum.reject(rest, &List.starts_with?(&1, path))
filter_paths_that_change_data_layers(new_rest, resource, [path | acc])
:error ->
filter_paths_that_change_data_layers(rest, resource, acc)
end
end
defp shortest_path_to_changed_data_layer(resource, path, acc \\ [])
defp shortest_path_to_changed_data_layer(_resource, [], _acc), do: :error
defp shortest_path_to_changed_data_layer(resource, [relationship | rest], acc) do
relationship = Ash.Resource.relationship(resource, relationship)
if relationship.type == :many_to_many do
if Ash.Resource.data_layer_can?(resource, {:join, relationship.through}) do
shortest_path_to_changed_data_layer(relationship.destination, rest, [
relationship.name | acc
])
else
{:ok, Enum.reverse([relationship.name | acc])}
end
else
if Ash.Resource.data_layer_can?(resource, {:join, relationship.destination}) do
shortest_path_to_changed_data_layer(relationship.destination, rest, [
relationship.name | acc
])
else
{:ok, Enum.reverse([relationship.name | acc])}
end
end
end
def put_at_path(value, []), do: value
def put_at_path(value, [key | rest]), do: [{key, put_at_path(value, rest)}]
def relationship_paths(filter_or_expression, kind \\ :all)
def relationship_paths(nil, _), do: []
def relationship_paths(%{expression: nil}, _), do: []
def relationship_paths(%__MODULE__{expression: expression}, kind),
do: relationship_paths(expression, kind)
def relationship_paths(expression, kind) do
expression
|> do_relationship_paths(kind)
|> List.wrap()
|> List.flatten()
|> Enum.uniq()
|> Enum.map(fn {path} -> path end)
end
def add_to_filter!(base, addition, op \\ :and, aggregates \\ %{}) do
case add_to_filter(base, addition, op, aggregates) do
{:ok, value} ->
value
{:error, error} ->
raise Ash.Error.to_ash_error(error)
end
end
def add_to_filter(base, addition, op \\ :and, aggregates \\ %{})
def add_to_filter(nil, %__MODULE__{} = addition, _, _), do: {:ok, addition}
def add_to_filter(
%__MODULE__{} = base,
%__MODULE__{} = addition,
op,
_
) do
{:ok,
%{
base
| expression: Expression.optimized_new(op, base.expression, addition.expression)
}}
end
def add_to_filter(%__MODULE__{} = base, statement, op, aggregates) do
case parse(base.resource, statement, aggregates) do
{:ok, filter} -> add_to_filter(base, filter, op, aggregates)
{:error, error} -> {:error, error}
end
end
@doc """
Returns true if the second argument is a strict subset (always returns the same or less data) of the first
"""
def strict_subset_of(nil, _), do: true
def strict_subset_of(_, nil), do: false
def strict_subset_of(%{resource: resource}, %{resource: other_resource})
when resource != other_resource,
do: false
def strict_subset_of(filter, candidate) do
Ash.SatSolver.strict_filter_subset(filter, candidate)
end
def strict_subset_of?(filter, candidate) do
strict_subset_of(filter, candidate) == true
end
def relationship_filter_request_paths(filter) do
filter
|> relationship_paths()
|> Enum.map(&[:filter, &1])
end
def read_requests(_, nil), do: {:ok, []}
def read_requests(api, filter) do
filter
|> Ash.Filter.relationship_paths()
|> Enum.map(fn path ->
{path, filter_expression_by_relationship_path(filter, path, true)}
end)
|> Enum.reduce_while({:ok, []}, fn {path, scoped_filter}, {:ok, requests} ->
%{resource: resource} = scoped_filter
with %{errors: []} = query <- Ash.Query.new(resource, api),
%{errors: []} = query <- Ash.Query.do_filter(query, scoped_filter),
{:action, action} when not is_nil(action) <-
{:action, Ash.Resource.primary_action(resource, :read)} do
request =
Request.new(
resource: resource,
api: api,
query:
Request.resolve(
[[:data, :authorization_filter]],
fn %{
data: %{
authorization_filter: authorization_filter
}
} ->
if authorization_filter do
relationship =
Ash.Resource.relationship(
resource,
List.first(path)
)
case SideLoad.reverse_relationship_path(
relationship,
tl(path)
) do
:error ->
{:ok, query}
{:ok, reverse_relationship} ->
filter = put_at_path(authorization_filter, reverse_relationship)
{:ok, Ash.Query.do_filter(query, filter)}
end
else
{:ok, query}
end
end
),
async?: false,
path: [:filter, path],
strict_check_only?: true,
action: action,
name: "authorize filter #{Enum.join(path, ".")}",
data: []
)
{:cont, {:ok, [request | requests]}}
else
{:error, error} -> {:halt, {:error, error}}
%{errors: errors} -> {:halt, {:error, errors}}
{:action, nil} -> {:halt, {:error, ReadActionRequired.exception(resource: resource)}}
end
end)
end
def map(%__MODULE__{expression: nil} = filter, _) do
filter
end
def map(%__MODULE__{expression: expression} = filter, func) do
%{filter | expression: do_map(func.(expression), func)}
end
def map(expression, func) do
do_map(func.(expression), func)
end
def do_map(expression, func) do
case expression do
{:halt, expr} ->
expr
%Expression{left: left, right: right} = expr ->
%{expr | left: do_map(left, func), right: do_map(right, func)}
%Not{expression: not_expr} = expr ->
%{expr | expression: do_map(not_expr, func)}
%{__operator__?: true, left: left, right: right} = op ->
%{op | left: do_map(left, func), right: do_map(right, func)}
%{__function__?: true, arguments: arguments} = func ->
%{func | arguments: Enum.map(arguments, &do_map(&1, func))}
other ->
func.(other)
end
end
def list_predicates(%__MODULE__{expression: expression}) do
list_predicates(expression)
end
def list_predicates(expression) do
case expression do
%Expression{left: left, right: right} ->
list_predicates(left) ++ list_predicates(right)
%Not{expression: not_expr} ->
list_predicates(not_expr)
%{__predicate__?: true} = pred ->
[pred]
_ ->
[]
end
end
def filter_expression_by_relationship_path(filter, path, scope? \\ false) do
%__MODULE__{
resource: Ash.Resource.related(filter.resource, path),
expression: do_filter_expression_by_relationship_path(filter.expression, path, scope?)
}
end
defp split_expression_by_relationship_path(%{expression: expression}, path) do
split_expression_by_relationship_path(expression, path)
end
defp split_expression_by_relationship_path(
%Expression{op: op, left: left, right: right},
path
) do
{new_for_path_left, new_without_path_left} = split_expression_by_relationship_path(left, path)
{new_for_path_right, new_without_path_right} =
split_expression_by_relationship_path(right, path)
{Expression.optimized_new(op, new_for_path_left, new_for_path_right),
Expression.optimized_new(op, new_without_path_left, new_without_path_right)}
end
defp split_expression_by_relationship_path(%Not{expression: expression}, path) do
{new_for_path, new_without_path} = split_expression_by_relationship_path(expression, path)
{Not.new(new_for_path), Not.new(new_without_path)}
end
defp split_expression_by_relationship_path(
%{
__operator__?: true,
left: %Ref{relationship_path: predicate_path} = left,
right: %Ref{relationship_path: predicate_path}
} = predicate,
path
) do
if List.starts_with?(predicate_path, path) do
new_path = Enum.drop(predicate_path, length(path))
{%{
predicate
| left: %{
left
| relationship_path: new_path
}
}, nil}
else
{nil, predicate}
end
end
defp split_expression_by_relationship_path(
%{__operator__?: true, right: %Ref{}},
_path
) do
raise "Refs not currently supported on the right side of operators with different relationship paths"
end
defp split_expression_by_relationship_path(
%{__operator__?: true, left: %Ref{relationship_path: predicate_path} = ref} = predicate,
path
) do
if List.starts_with?(predicate_path, path) do
new_path = Enum.drop(predicate_path, length(path))
{%{predicate | left: %{ref | relationship_path: new_path}}, nil}
else
{nil, predicate}
end
end
defp split_expression_by_relationship_path(
%{__function__?: true, arguments: arguments} = func,
path
) do
arguments
|> Enum.filter(&match?(%Ref{}, &1))
|> Enum.map(& &1.relationship_path)
|> Enum.uniq()
|> case do
[] ->
{func, func}
[predicate_path] ->
if List.starts_with?(predicate_path, path) do
new_args =
Enum.map(arguments, fn
%Ref{relationship_path: predicate_path} = ref ->
%{ref | relationship_path: Enum.drop(predicate_path, length(path))}
arg ->
arg
end)
{%{func | arguments: new_args}, nil}
else
{nil, func}
end
_ ->
raise "Refs for multiple relationship paths not supported in a single function call"
end
end
defp do_filter_expression_by_relationship_path(
%Expression{op: op, left: left, right: right},
path,
scope?
) do
new_left = do_filter_expression_by_relationship_path(left, path, scope?)
new_right = do_filter_expression_by_relationship_path(right, path, scope?)
Expression.optimized_new(op, new_left, new_right)
end
defp do_filter_expression_by_relationship_path(%Not{expression: expression}, path, scope?) do
new_expression = do_filter_expression_by_relationship_path(expression, path, scope?)
Not.new(new_expression)
end
defp do_filter_expression_by_relationship_path(
%{__operator__?: true, left: left, right: right} = op,
path,
scope?
) do
if scope? do
%{op | left: scope_ref(left, path), right: scope_ref(right, path)}
else
[left, right]
|> Enum.filter(&match?(%Ref{}, &1))
|> Enum.any?(&List.starts_with?(&1.relationship_path, path))
|> case do
true ->
nil
false ->
op
end
end
end
defp do_filter_expression_by_relationship_path(
%{__function__?: true, arguments: arguments} = func,
path,
scope?
) do
if scope? do
%{func | arguments: Enum.map(arguments, &scope_ref(&1, path))}
else
arguments
|> Enum.filter(&match?(%Ref{}, &1))
|> Enum.any?(&List.starts_with?(&1.relationship_path, path))
|> case do
true ->
nil
false ->
func
end
end
end
defp do_filter_expression_by_relationship_path(other, _path, _scope) do
other
end
defp scope_ref(%Ref{} = ref, path) do
if List.starts_with?(ref.relationship_path, path) do
%{ref | relationship_path: Enum.drop(ref.relationship_path, Enum.count(path))}
else
ref
end
end
defp scope_ref(other, _), do: other
defp do_relationship_paths(%Ref{relationship_path: path}, _) do
{path}
end
defp do_relationship_paths(%Expression{op: :or}, :ands_only) do
[]
end
defp do_relationship_paths(%Expression{left: left, right: right}, kind) do
[do_relationship_paths(left, kind), do_relationship_paths(right, kind)]
end
defp do_relationship_paths(%Not{expression: expression}, kind) do
do_relationship_paths(expression, kind)
end
defp do_relationship_paths(%{__operator__?: true, left: left, right: right}, kind) do
[do_relationship_paths(left, kind), do_relationship_paths(right, kind)]
end
defp do_relationship_paths(%{__operator__?: true, arguments: arguments}, kind) do
Enum.map(arguments, &do_relationship_paths(&1, kind))
end
defp do_relationship_paths(_, _), do: []
defp parse_expression(%__MODULE__{expression: expression}, context),
do: {:ok, add_to_predicate_path(expression, context)}
defp parse_expression(statement, context) when is_map(statement) or is_list(statement) do
Enum.reduce_while(statement, {:ok, nil}, fn expression_part, {:ok, expression} ->
case add_expression_part(expression_part, context, expression) do
{:ok, new_expression} ->
{:cont, {:ok, new_expression}}
{:error, error} ->
{:halt, {:error, error}}
end
end)
end
defp parse_expression(statement, context) do
parse_expression([statement], context)
end
defp add_expression_part(boolean, _context, expression) when is_boolean(boolean),
do: {:ok, Expression.optimized_new(:and, expression, boolean)}
defp add_expression_part(%__MODULE__{expression: adding_expression}, context, expression) do
{:ok,
Expression.optimized_new(
:and,
expression,
add_to_predicate_path(adding_expression, context)
)}
end
defp add_expression_part(%resource{} = record, context, expression) do
if resource == context.resource do
pkey_filter = record |> Map.take(Ash.Resource.primary_key(resource)) |> Map.to_list()
add_expression_part(pkey_filter, context, expression)
else
{:error,
InvalidFilterValue.exception(
value: record,
message: "Records must match the resource being filtered"
)}
end
end
defp add_expression_part({not_key, nested_statement}, context, expression)
when not_key in [:not, "not"] do
case parse_expression(nested_statement, context) do
{:ok, nested_expression} ->
{:ok, Expression.optimized_new(:and, expression, Not.new(nested_expression))}
{:error, error} ->
{:error, error}
end
end
defp add_expression_part({or_key, nested_statements}, context, expression)
when or_key in [:or, "or"] do
with {:ok, nested_expression} <- parse_and_join(nested_statements, :or, context),
:ok <- validate_data_layers_support_boolean_filters(nested_expression) do
{:ok, Expression.optimized_new(:and, expression, nested_expression)}
end
end
defp add_expression_part({and_key, nested_statements}, context, expression)
when and_key in [:and, "and"] do
case parse_and_join(nested_statements, :and, context) do
{:ok, nested_expression} ->
{:ok, Expression.optimized_new(:and, expression, nested_expression)}
{:error, error} ->
{:error, error}
end
end
defp add_expression_part({%Ref{} = ref, nested_statement}, context, expression) do
new_context = %{
relationship_path: ref.relationship_path,
resource: Ash.Resource.related(context.resource, ref.relationship_path),
aggregates: context.aggregates
}
add_expression_part({ref.attribute.name, nested_statement}, new_context, expression)
end
defp add_expression_part({field, nested_statement}, context, expression)
when is_atom(field) or is_binary(field) do
aggregates =
Enum.flat_map(context.aggregates, fn {key, _} ->
[key, to_string(key)]
end)
cond do
function_module = get_function(field, Ash.Resource.data_layer_functions(context.resource)) ->
with {:ok, args} <-
hydrate_refs(List.wrap(nested_statement), context.resource, context.aggregates),
{:ok, function} <-
Function.new(
function_module,
args,
%Ref{
relationship_path: context.relationship_path,
resource: context.resource
}
) do
{:ok, Expression.optimized_new(:and, expression, function)}
end
rel = Ash.Resource.relationship(context.resource, field) ->
context =
context
|> Map.update!(:relationship_path, fn path -> path ++ [rel.name] end)
|> Map.put(:resource, rel.destination)
if is_list(nested_statement) || is_map(nested_statement) do
case parse_expression(nested_statement, context) do
{:ok, nested_expression} ->
{:ok, Expression.optimized_new(:and, expression, nested_expression)}
{:error, error} ->
{:error, error}
end
else
with [field] <- Ash.Resource.primary_key(context.resource),
attribute <- Ash.Resource.attribute(context.resource, field),
{:ok, casted} <-
Ash.Type.cast_input(attribute.type, nested_statement) do
add_expression_part({field, casted}, context, expression)
else
_other ->
{:error,
InvalidFilterValue.exception(
value: inspect(nested_statement),
message:
"A single value must be castable to the primary key of the resource: #{
inspect(context.resource)
}"
)}
end
end
attr = Ash.Resource.attribute(context.resource, field) ->
case parse_predicates(nested_statement, attr, context) do
{:ok, nested_statement} ->
{:ok, Expression.optimized_new(:and, expression, nested_statement)}
{:error, error} ->
{:error, error}
end
field in aggregates ->
field =
if is_binary(field) do
String.to_existing_atom(field)
else
field
end
add_aggregate_expression(context, nested_statement, field, expression)
(op_module = get_operator(field, Ash.Resource.data_layer_operators(context.resource))) &&
match?([_, _ | _], nested_statement) ->
with {:ok, [left, right]} <-
hydrate_refs(nested_statement, context.resource, context.aggregates),
{:ok, operator} <- Operator.new(op_module, left, right) do
{:ok, Expression.optimized_new(:and, expression, operator)}
end
true ->
{:error,
NoSuchAttributeOrRelationship.exception(
attribute_or_relationship: field,
resource: context.resource
)}
end
end
defp add_expression_part(value, context, expression) when is_map(value) do
# Can't call `parse_expression/2` here because it will loop
value
|> Map.to_list()
|> Enum.reduce_while({:ok, nil}, fn {key, value}, {:ok, expression} ->
case add_expression_part({key, value}, context, expression) do
{:ok, new_expression} ->
{:cont, {:ok, new_expression}}
{:error, error} ->
{:halt, {:error, error}}
end
end)
|> case do
{:ok, new_expression} ->
{:ok, Expression.optimized_new(:and, expression, new_expression)}
{:error, error} ->
{:error, error}
end
end
defp add_expression_part(value, context, expression) when is_list(value) do
Enum.reduce_while(value, {:ok, expression}, fn value, {:ok, expression} ->
case add_expression_part(value, context, expression) do
{:ok, expression} -> {:cont, {:ok, expression}}
{:error, error} -> {:halt, {:error, error}}
end
end)
end
defp add_expression_part(value, _, _) do
{:error, InvalidFilterValue.exception(value: value)}
end
defp hydrate_refs(list, resource, aggregates) do
list
|> Enum.reduce_while({:ok, []}, fn
%Ref{attribute: attribute} = ref, {:ok, acc} when is_atom(attribute) ->
case Ash.Resource.related(resource, ref.relationship_path) do
nil ->
{:halt, {:error, "Invalid reference #{inspect(ref)}"}}
related ->
do_hydrate_ref(ref, attribute, related, aggregates, acc)
end
other, {:ok, acc} ->
{:cont, {:ok, [other | acc]}}
end)
|> case do
{:ok, refs} -> {:ok, Enum.reverse(refs)}
{:error, error} -> {:error, error}
end
end
defp do_hydrate_ref(ref, field, related, aggregates, acc) do
cond do
Map.has_key?(aggregates, field) ->
{:cont, {:ok, [%{ref | attribute: Map.get(aggregates, field)} | acc]}}
attribute = Ash.Resource.attribute(related, field) ->
{:cont, {:ok, [%{ref | attribute: attribute} | acc]}}
relationship = Ash.Resource.relationship(related, field) ->
case Ash.Resource.primary_key(relationship.destination) do
[key] ->
new_ref = %{
ref
| relationship_path: ref.relationship_path ++ [relationship.name],
attribute: Ash.Resource.attribute(relationship.destination, key)
}
{:cont, {:ok, [new_ref | acc]}}
_ ->
{:halt, {:error, "Invalid reference #{inspect(ref)}"}}
end
true ->
{:halt, {:error, "Invalid reference #{inspect(ref)}"}}
end
end
defp add_aggregate_expression(context, nested_statement, field, expression) do
if Ash.Resource.data_layer_can?(context.resource, :aggregate_filter) do
case parse_predicates(nested_statement, Map.get(context.aggregates, field), context) do
{:ok, nested_statement} ->
{:ok, Expression.optimized_new(:and, expression, nested_statement)}
{:error, error} ->
{:error, error}
end
else
{:error, AggregatesNotSupported.exception(resource: context.resource, feature: "filtering")}
end
end
defp validate_data_layers_support_boolean_filters(%Expression{
op: :or,
left: left,
right: right
}) do
left_resources =
left
|> map(fn
%Ref{} = ref ->
[ref.resource]
_ ->
[]
end)
|> List.flatten()
|> Enum.uniq()
right_resources =
right
|> map(fn
%Ref{} = ref ->
[ref.resource]
_ ->
[]
end)
|> List.flatten()
|> Enum.uniq()
left_resources
|> Enum.filter(&(&1 in right_resources))
|> Enum.reduce_while(:ok, fn resource, :ok ->
if Ash.Resource.data_layer_can?(resource, :boolean_filter) do
{:cont, :ok}
else
{:halt, {:error, "Data layer for #{resource} does not support boolean filters"}}
end
end)
end
defp validate_data_layers_support_boolean_filters(_), do: :ok
defp add_to_predicate_path(expression, context) do
case expression do
%Not{expression: expression} = not_expr ->
%{not_expr | expression: add_to_predicate_path(expression, context)}
%Expression{left: left, right: right} = expression ->
%{
expression
| left: add_to_predicate_path(left, context),
right: add_to_predicate_path(right, context)
}
%{__operator__?: true, left: left, right: right} = op ->
left = add_to_ref_path(left, context.relationship_path)
right = add_to_ref_path(right, context.relationship_path)
%{op | left: left, right: right}
%{__function__?: true, arguments: args} = func ->
%{func | arguments: Enum.map(args, &add_to_ref_path(&1, context.relationship_path))}
other ->
other
end
end
defp add_to_ref_path(%Ref{relationship_path: relationship_path} = ref, to_add) do
%{ref | relationship_path: to_add ++ relationship_path}
end
defp add_to_ref_path(other, _), do: other
defp parse_and_join(statements, op, context) do
Enum.reduce_while(statements, {:ok, nil}, fn statement, {:ok, expression} ->
case parse_expression(statement, context) do
{:ok, nested_expression} ->
{:cont, {:ok, Expression.optimized_new(op, expression, nested_expression)}}
{:error, error} ->
{:halt, {:error, error}}
end
end)
end
defp parse_predicates(value, field, context) when not is_list(value) and not is_map(value) do
parse_predicates([eq: value], field, context)
end
defp parse_predicates(values, attr, context) do
if is_map(values) || Keyword.keyword?(values) do
Enum.reduce_while(values, {:ok, nil}, fn {key, value}, {:ok, expression} ->
case get_operator(key, Ash.Resource.data_layer_operators(context.resource)) do
nil ->
error = NoSuchFilterPredicate.exception(key: key, resource: context.resource)
{:halt, {:error, error}}
operator_module ->
left = %Ref{
attribute: attr,
relationship_path: context.relationship_path,
resource: context.resource
}
with {:ok, [left, right]} <-
hydrate_refs([left, value], context.resource, context.aggregates),
{:ok, operator} <- Operator.new(operator_module, left, right) do
if is_boolean(operator) do
{:cont, {:ok, operator}}
else
if Ash.Resource.data_layer_can?(context.resource, {:filter_operator, operator}) do
{:cont, {:ok, Expression.optimized_new(:and, expression, operator)}}
else
{:halt,
{:error, "data layer does not support the operator #{inspect(operator)}"}}
end
end
else
{:error, error} -> {:halt, {:error, error}}
end
end
end)
else
error = InvalidFilterValue.exception(value: values)
{:error, error}
end
end
defp get_function(key, data_layer_functions) when is_atom(key) do
@builtin_functions[key] || Enum.find(data_layer_functions, &(&1.name() == key))
end
defp get_function(key, data_layer_functions) when is_binary(key) do
Map.get(@string_builtin_functions, key) ||
Enum.find(data_layer_functions, &(&1.name() == key))
end
defp get_operator(key, data_layer_operators) when is_atom(key) do
@builtin_operators[key] || Enum.find(data_layer_operators, &(&1.operator() == key))
end
defp get_operator(key, data_layer_operators) when is_binary(key) do
Map.get(@string_builtin_operators, key) ||
Enum.find(data_layer_operators, &(&1.name() == key))
end
defp get_operator(_, _), do: nil
defimpl Inspect do
import Inspect.Algebra
@custom_colors [
number: :cyan
]
def inspect(
%{expression: expression},
opts
) do
opts = %{opts | syntax_colors: Keyword.merge(opts.syntax_colors, @custom_colors)}
concat(["#Ash.Filter<", to_doc(expression, opts), ">"])
end
end
end
| 30.616262 | 132 | 0.620003 |
ff04073db2201e871a1015c83322064e1f900ab3 | 830 | ex | Elixir | lib/watcher/supervisor.ex | jtmoulia/spawndir | e1d34ff57a4de211cdf798e64a16205c5b9487d0 | [
"BSD-3-Clause"
] | 1 | 2015-06-16T20:06:22.000Z | 2015-06-16T20:06:22.000Z | lib/watcher/supervisor.ex | jtmoulia/spawndir | e1d34ff57a4de211cdf798e64a16205c5b9487d0 | [
"BSD-3-Clause"
] | null | null | null | lib/watcher/supervisor.ex | jtmoulia/spawndir | e1d34ff57a4de211cdf798e64a16205c5b9487d0 | [
"BSD-3-Clause"
] | null | null | null | defmodule Watcher.Supervisor do
use Supervisor
## Public interface
def add!(cmd, opts \\ [], watcher_supervisor \\ __MODULE__) do
case _add(watcher_supervisor, cmd, opts) do
{:error, error} -> raise "Was unable to properly start the watcher supervisor: #{inspect(error)}"
{:ok, pid} -> pid
end
end
def add(cmd, opts \\ [], watcher_supervisor \\ __MODULE__) do
_add(watcher_supervisor, cmd, opts)
end
def start_link(opts \\ []) do
Supervisor.start_link __MODULE__, :ok, opts
end
## Helper functions
defp _add(watcher_supervisor, cmd, opts) do
Supervisor.start_child watcher_supervisor, [cmd, opts]
end
## Supervisor callbacks
def init(:ok) do
children = [
worker(Watcher.Port, [])
]
supervise children, strategy: :simple_one_for_one
end
end
| 23.714286 | 103 | 0.668675 |
ff043aec3caae937d0c7727f022da2fae5725a53 | 1,658 | ex | Elixir | web/web.ex | bitriot/phoenix_base | 15ec83a9acf46202102f2b006d577972f5564b2f | [
"MIT"
] | null | null | null | web/web.ex | bitriot/phoenix_base | 15ec83a9acf46202102f2b006d577972f5564b2f | [
"MIT"
] | null | null | null | web/web.ex | bitriot/phoenix_base | 15ec83a9acf46202102f2b006d577972f5564b2f | [
"MIT"
] | null | null | null | defmodule PhoenixBase.Web do
@moduledoc """
A module that keeps using definitions for controllers,
views and so on.
This can be used in your application as:
use PhoenixBase.Web, :controller
use PhoenixBase.Web, :view
The definitions below will be executed for every view,
controller, etc, so keep them short and clean, focused
on imports, uses and aliases.
Do NOT define functions inside the quoted expressions
below.
"""
def model do
quote do
use Ecto.Schema
import Ecto
import Ecto.Changeset
import Ecto.Query
end
end
def controller do
quote do
use Phoenix.Controller
alias PhoenixBase.Repo
import Ecto
import Ecto.Query
import PhoenixBase.Router.Helpers
import PhoenixBase.Gettext
end
end
def view do
quote do
use Phoenix.View, root: "web/templates"
# Import convenience functions from controllers
import Phoenix.Controller, only: [get_csrf_token: 0, get_flash: 2, view_module: 1]
# Use all HTML functionality (forms, tags, etc)
use Phoenix.HTML
import PhoenixBase.Router.Helpers
import PhoenixBase.ErrorHelpers
import PhoenixBase.Gettext
end
end
def router do
quote do
use Phoenix.Router
end
end
def channel do
quote do
use Phoenix.Channel
alias PhoenixBase.Repo
import Ecto
import Ecto.Query
import PhoenixBase.Gettext
end
end
@doc """
When used, dispatch to the appropriate controller/view/etc.
"""
defmacro __using__(which) when is_atom(which) do
apply(__MODULE__, which, [])
end
end
| 20.219512 | 88 | 0.67491 |
ff047461a648344ea7caf7e0df6eb0d7d1029814 | 861 | ex | Elixir | web/models/job.ex | mindsigns/soroban | c56962e1164a51cb5e383bbbfda880f098f181f1 | [
"MIT"
] | 1 | 2020-02-09T03:03:04.000Z | 2020-02-09T03:03:04.000Z | web/models/job.ex | mindsigns/soroban | c56962e1164a51cb5e383bbbfda880f098f181f1 | [
"MIT"
] | null | null | null | web/models/job.ex | mindsigns/soroban | c56962e1164a51cb5e383bbbfda880f098f181f1 | [
"MIT"
] | null | null | null | defmodule Soroban.Job do
@moduledoc """
Soroban.Job model
"""
use Soroban.Web, :model
alias Money
schema "jobs" do
field :date, :date
field :reference, :string
field :caller, :string
field :type, :string
field :description, :string
field :zone, :string
field :service, :string
field :details, :string
field :total, Money.Ecto.Type
field :fees_advanced, Money.Ecto.Type, default: 0
belongs_to :client, Soroban.Client
timestamps()
end
@doc """
Builds a changeset based on the `struct` and `params`.
"""
def changeset(struct, params \\ %{}) do
struct
|> cast(params, [:date, :reference, :caller, :type, :description, :zone, :service, :details, :fees_advanced, :total, :client_id])
|> validate_required([:date, :caller, :type, :description, :service, :total, :client_id])
end
end
| 23.27027 | 133 | 0.651568 |
ff048a593e0b87cb3f8e5224104731ff437a1229 | 710 | exs | Elixir | day16/test/day16_test.exs | the-shank/advent-of-code-2018 | 3be3958adad61e62e8a7ea6ec6a868f049c7a7e4 | [
"Apache-2.0"
] | 7 | 2018-12-12T01:08:24.000Z | 2019-12-09T19:50:36.000Z | day16/test/day16_test.exs | bjorng/advent-of-code-2018 | 5dd312b7473d7f2fe12f0de1fd771c3ee1931b97 | [
"Apache-2.0"
] | null | null | null | day16/test/day16_test.exs | bjorng/advent-of-code-2018 | 5dd312b7473d7f2fe12f0de1fd771c3ee1931b97 | [
"Apache-2.0"
] | null | null | null | defmodule Day16Test do
use ExUnit.Case
doctest Machine
test "part one example" do
assert Day16.part1(example_part1()) == 1
end
test "part one real data" do
assert Day16.part1(input_part1()) == 677
end
test "part two real data" do
assert Day16.part2(input_part1(), input_part2()) == 540
end
defp example_part1() do
"""
Before: [3, 2, 1, 1]
9 2 1 2
After: [3, 2, 2, 1]
"""
|> String.trim
|> String.split("\n")
end
defp input_part1 do
'input1.txt'
|> File.read!
|> String.trim
|> String.split("\n", trim: true)
end
defp input_part2 do
'input2.txt'
|> File.read!
|> String.trim
|> String.split("\n", trim: true)
end
end
| 16.904762 | 59 | 0.595775 |
ff04aba77e7b25c296ec47812a9b1f1e39246593 | 1,835 | ex | Elixir | deps/makeup_elixir/lib/makeup/lexers/elixir_lexer/helper.ex | BandanaPandey/nary_tree | fb1eeb69e38e43c9f9ffb54297cef52dff5c928d | [
"MIT"
] | 13 | 2018-09-19T21:03:29.000Z | 2022-01-27T04:06:32.000Z | deps/makeup_elixir/lib/makeup/lexers/elixir_lexer/helper.ex | mwindholtz/limbo | 84da57c72ddb2f1dd78bf2992410268c5ad1b2d4 | [
"Apache-2.0"
] | 1 | 2020-05-26T04:16:57.000Z | 2020-05-26T04:16:57.000Z | deps/makeup_elixir/lib/makeup/lexers/elixir_lexer/helper.ex | mwindholtz/limbo | 84da57c72ddb2f1dd78bf2992410268c5ad1b2d4 | [
"Apache-2.0"
] | 3 | 2020-05-21T04:32:08.000Z | 2021-07-28T05:14:01.000Z | defmodule Makeup.Lexers.ElixirLexer.Helper do
@moduledoc false
import NimbleParsec
alias Makeup.Lexer.Combinators
def with_optional_separator(combinator, separator) when is_binary(separator) do
combinator |> repeat(string(separator) |> concat(combinator))
end
# Allows escaping of the first character of a right delimiter.
# This is used in sigils that don't support interpolation or character escapes but
# must support escaping of the right delimiter.
def escape_delim(rdelim) do
rdelim_first_char = String.slice(rdelim, 0..0)
string("\\" <> rdelim_first_char)
end
def sigil(ldelim, rdelim, ranges, middle, ttype, attrs \\ %{}) do
left = string("~") |> utf8_string(ranges, 1) |> string(ldelim)
right = string(rdelim)
choices = middle ++ [utf8_char([])]
left
|> repeat_until(choice(choices), [right])
|> concat(right)
|> optional(utf8_string([?a..?z, ?A..?Z], min: 1))
|> traverse({Combinators, :collect_raw_chars_and_binaries, [ttype, attrs]})
end
def escaped(literal) when is_binary(literal) do
string("\\" <> literal)
end
def keyword_matcher(kind, fun_name, words) do
heads = for {ttype, words} <- words do
for word <- words do
case kind do
:defp ->
quote do
defp unquote(fun_name)([{:name, attrs, unquote(ttype)} | tokens]) do
[{unquote(ttype), attrs, unquote(word)} | unquote(fun_name)(tokens)]
end
end |> IO.inspect
:def ->
quote do
def unquote(fun_name)([{:name, attrs, unquote(ttype)} | tokens]) do
[{unquote(ttype), attrs, unquote(word)} | unquote(fun_name)(tokens)]
end
end
end
end
end
quote do
unquote_splicing(heads)
end
end
end | 31.101695 | 84 | 0.619619 |
ff04be7f3fc378fe65d724bebfde25c116272669 | 874 | ex | Elixir | clients/chat/lib/google_api/chat/v1/metadata.ex | corp-momenti/elixir-google-api | fe1580e305789ab2ca0741791b8ffe924bd3240c | [
"Apache-2.0"
] | null | null | null | clients/chat/lib/google_api/chat/v1/metadata.ex | corp-momenti/elixir-google-api | fe1580e305789ab2ca0741791b8ffe924bd3240c | [
"Apache-2.0"
] | null | null | null | clients/chat/lib/google_api/chat/v1/metadata.ex | corp-momenti/elixir-google-api | fe1580e305789ab2ca0741791b8ffe924bd3240c | [
"Apache-2.0"
] | null | null | null | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Chat.V1 do
@moduledoc """
API client metadata for GoogleApi.Chat.V1.
"""
@discovery_revision "20211015"
def discovery_revision(), do: @discovery_revision
end
| 32.37037 | 74 | 0.756293 |
ff04c86b4840fd39ddf0932ff5966977a0b6c53d | 2,027 | ex | Elixir | clients/video_intelligence/lib/google_api/video_intelligence/v1/model/google_cloud_videointelligence_v1p1beta1__entity.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | clients/video_intelligence/lib/google_api/video_intelligence/v1/model/google_cloud_videointelligence_v1p1beta1__entity.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | clients/video_intelligence/lib/google_api/video_intelligence/v1/model/google_cloud_videointelligence_v1p1beta1__entity.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1p1beta1_Entity do
@moduledoc """
Detected entity from video analysis.
## Attributes
* `description` (*type:* `String.t`, *default:* `nil`) - Textual description, e.g., `Fixed-gear bicycle`.
* `entityId` (*type:* `String.t`, *default:* `nil`) - Opaque entity ID. Some IDs may be available in
[Google Knowledge Graph Search
API](https://developers.google.com/knowledge-graph/).
* `languageCode` (*type:* `String.t`, *default:* `nil`) - Language code for `description` in BCP-47 format.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:description => String.t(),
:entityId => String.t(),
:languageCode => String.t()
}
field(:description)
field(:entityId)
field(:languageCode)
end
defimpl Poison.Decoder,
for: GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1p1beta1_Entity do
def decode(value, options) do
GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1p1beta1_Entity.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.VideoIntelligence.V1.Model.GoogleCloudVideointelligenceV1p1beta1_Entity do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 33.783333 | 111 | 0.724223 |
ff04ec46ad48a6289b8eb39e3ab2f40517e876b3 | 2,769 | ex | Elixir | clients/dialogflow/lib/google_api/dialogflow/v2/model/google_cloud_dialogflow_v2_session_entity_type.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/dialogflow/lib/google_api/dialogflow/v2/model/google_cloud_dialogflow_v2_session_entity_type.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/dialogflow/lib/google_api/dialogflow/v2/model/google_cloud_dialogflow_v2_session_entity_type.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2SessionEntityType do
@moduledoc """
Represents a session entity type.
Extends or replaces a developer entity type at the user session level (we
refer to the entity types defined at the agent level as "developer entity
types").
Note: session entity types apply to all queries, regardless of the language.
## Attributes
* `entities` (*type:* `list(GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2EntityTypeEntity.t)`, *default:* `nil`) - Required. The collection of entities associated with this session entity
type.
* `entityOverrideMode` (*type:* `String.t`, *default:* `nil`) - Required. Indicates whether the additional data should override or
supplement the developer entity type definition.
* `name` (*type:* `String.t`, *default:* `nil`) - Required. The unique identifier of this session entity type. Format:
`projects/<Project ID>/agent/sessions/<Session ID>/entityTypes/<Entity Type
Display Name>`.
`<Entity Type Display Name>` must be the display name of an existing entity
type in the same agent that will be overridden or supplemented.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:entities =>
list(GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2EntityTypeEntity.t()),
:entityOverrideMode => String.t(),
:name => String.t()
}
field(
:entities,
as: GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2EntityTypeEntity,
type: :list
)
field(:entityOverrideMode)
field(:name)
end
defimpl Poison.Decoder,
for: GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2SessionEntityType do
def decode(value, options) do
GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2SessionEntityType.decode(value, options)
end
end
defimpl Poison.Encoder,
for: GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2SessionEntityType do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 37.418919 | 199 | 0.739256 |
ff04ec704ea5d375ba4312292099d842f1bc20a9 | 2,076 | ex | Elixir | clients/sas_portal/lib/google_api/sas_portal/v1alpha1/model/sas_portal_nrqz_validation.ex | yoshi-code-bot/elixir-google-api | cdb6032f01fac5ab704803113c39f2207e9e019d | [
"Apache-2.0"
] | null | null | null | clients/sas_portal/lib/google_api/sas_portal/v1alpha1/model/sas_portal_nrqz_validation.ex | yoshi-code-bot/elixir-google-api | cdb6032f01fac5ab704803113c39f2207e9e019d | [
"Apache-2.0"
] | null | null | null | clients/sas_portal/lib/google_api/sas_portal/v1alpha1/model/sas_portal_nrqz_validation.ex | yoshi-code-bot/elixir-google-api | cdb6032f01fac5ab704803113c39f2207e9e019d | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.SASPortal.V1alpha1.Model.SasPortalNrqzValidation do
@moduledoc """
Information about National Radio Quiet Zone validation.
## Attributes
* `caseId` (*type:* `String.t`, *default:* `nil`) - Validation case id.
* `cpiId` (*type:* `String.t`, *default:* `nil`) - CPI who signed the validation.
* `latitude` (*type:* `float()`, *default:* `nil`) - Device latitude associated with the validation.
* `longitude` (*type:* `float()`, *default:* `nil`) - Device longitude associated with the validation.
* `state` (*type:* `String.t`, *default:* `nil`) - State of the NRQZ validation info.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:caseId => String.t() | nil,
:cpiId => String.t() | nil,
:latitude => float() | nil,
:longitude => float() | nil,
:state => String.t() | nil
}
field(:caseId)
field(:cpiId)
field(:latitude)
field(:longitude)
field(:state)
end
defimpl Poison.Decoder, for: GoogleApi.SASPortal.V1alpha1.Model.SasPortalNrqzValidation do
def decode(value, options) do
GoogleApi.SASPortal.V1alpha1.Model.SasPortalNrqzValidation.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.SASPortal.V1alpha1.Model.SasPortalNrqzValidation do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 35.186441 | 106 | 0.69605 |
ff05033f33f9027f99cca554c10100a3961b21ab | 1,301 | ex | Elixir | socket_gallows/lib/socket_gallows_web/endpoint.ex | jeethridge/elixir-hangman | ff1202fd1f54cad887180c900670306a20fe4339 | [
"Unlicense"
] | null | null | null | socket_gallows/lib/socket_gallows_web/endpoint.ex | jeethridge/elixir-hangman | ff1202fd1f54cad887180c900670306a20fe4339 | [
"Unlicense"
] | null | null | null | socket_gallows/lib/socket_gallows_web/endpoint.ex | jeethridge/elixir-hangman | ff1202fd1f54cad887180c900670306a20fe4339 | [
"Unlicense"
] | null | null | null | defmodule SocketGallowsWeb.Endpoint do
use Phoenix.Endpoint, otp_app: :socket_gallows
socket "/socket", SocketGallowsWeb.UserSocket,
websocket: true,
longpoll: false
# Serve at "/" the static files from "priv/static" directory.
#
# You should set gzip to true if you are running phx.digest
# when deploying your static files in production.
plug Plug.Static,
at: "/",
from: :socket_gallows,
gzip: false,
only: ~w(css fonts images js favicon.ico robots.txt)
# Code reloading can be explicitly enabled under the
# :code_reloader configuration of your endpoint.
if code_reloading? do
socket "/phoenix/live_reload/socket", Phoenix.LiveReloader.Socket
plug Phoenix.LiveReloader
plug Phoenix.CodeReloader
end
plug Plug.RequestId
plug Plug.Logger
plug Plug.Parsers,
parsers: [:urlencoded, :multipart, :json],
pass: ["*/*"],
json_decoder: Phoenix.json_library()
plug Plug.MethodOverride
plug Plug.Head
# The session will be stored in the cookie and signed,
# this means its contents can be read but not tampered with.
# Set :encryption_salt if you would also like to encrypt it.
plug Plug.Session,
store: :cookie,
key: "_socket_gallows_key",
signing_salt: "TTFTc8do"
plug SocketGallowsWeb.Router
end
| 27.680851 | 69 | 0.717909 |
ff051c43f27c7f63b2bdb2c8bcd19ba515b83a9d | 4,189 | ex | Elixir | clients/health_care/lib/google_api/health_care/v1beta1/model/message.ex | EVLedger/elixir-google-api | 61edef19a5e2c7c63848f7030c6d8d651e4593d4 | [
"Apache-2.0"
] | null | null | null | clients/health_care/lib/google_api/health_care/v1beta1/model/message.ex | EVLedger/elixir-google-api | 61edef19a5e2c7c63848f7030c6d8d651e4593d4 | [
"Apache-2.0"
] | null | null | null | clients/health_care/lib/google_api/health_care/v1beta1/model/message.ex | EVLedger/elixir-google-api | 61edef19a5e2c7c63848f7030c6d8d651e4593d4 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.HealthCare.V1beta1.Model.Message do
@moduledoc """
A complete HL7v2 message.
See http://www.hl7.org/implement/standards/index.cfm?ref=common for details
on the standard.
## Attributes
* `createTime` (*type:* `DateTime.t`, *default:* `nil`) - Output only. The datetime when the message was created. Set by the server.
* `data` (*type:* `String.t`, *default:* `nil`) - Raw message bytes.
* `labels` (*type:* `map()`, *default:* `nil`) - User-supplied key-value pairs used to organize HL7v2 stores.
Label keys must be between 1 and 63 characters long, have a UTF-8 encoding
of maximum 128 bytes, and must conform to the
following PCRE regular expression:
\\p{Ll}\\p{Lo}{0,62}
Label values are optional, must be between 1 and 63 characters long, have
a UTF-8 encoding of maximum 128 bytes, and must conform to the
following PCRE regular expression: [\\p{Ll}\\p{Lo}\\p{N}_-]{0,63}
No more than 64 labels can be associated with a given store.
* `messageType` (*type:* `String.t`, *default:* `nil`) - The message type for this message. MSH-9.1.
* `name` (*type:* `String.t`, *default:* `nil`) - Resource name of the Message, of the form
`projects/{project_id}/datasets/{dataset_id}/hl7V2Stores/{hl7_v2_store_id}/messages/{message_id}`.
Assigned by the server.
* `parsedData` (*type:* `GoogleApi.HealthCare.V1beta1.Model.ParsedData.t`, *default:* `nil`) - Output only. The parsed version of the raw message data.
* `patientIds` (*type:* `list(GoogleApi.HealthCare.V1beta1.Model.PatientId.t)`, *default:* `nil`) - All patient IDs listed in the PID-2, PID-3, and PID-4 segments of this
message.
* `schematizedData` (*type:* `GoogleApi.HealthCare.V1beta1.Model.SchematizedData.t`, *default:* `nil`) - The parsed version of the raw message data schematized according to this
store's schemas and type definitions.
* `sendFacility` (*type:* `String.t`, *default:* `nil`) - The hospital that this message came from. MSH-4.
* `sendTime` (*type:* `DateTime.t`, *default:* `nil`) - The datetime the sending application sent this message. MSH-7.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:createTime => DateTime.t(),
:data => String.t(),
:labels => map(),
:messageType => String.t(),
:name => String.t(),
:parsedData => GoogleApi.HealthCare.V1beta1.Model.ParsedData.t(),
:patientIds => list(GoogleApi.HealthCare.V1beta1.Model.PatientId.t()),
:schematizedData => GoogleApi.HealthCare.V1beta1.Model.SchematizedData.t(),
:sendFacility => String.t(),
:sendTime => DateTime.t()
}
field(:createTime, as: DateTime)
field(:data)
field(:labels, type: :map)
field(:messageType)
field(:name)
field(:parsedData, as: GoogleApi.HealthCare.V1beta1.Model.ParsedData)
field(:patientIds, as: GoogleApi.HealthCare.V1beta1.Model.PatientId, type: :list)
field(:schematizedData, as: GoogleApi.HealthCare.V1beta1.Model.SchematizedData)
field(:sendFacility)
field(:sendTime, as: DateTime)
end
defimpl Poison.Decoder, for: GoogleApi.HealthCare.V1beta1.Model.Message do
def decode(value, options) do
GoogleApi.HealthCare.V1beta1.Model.Message.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.HealthCare.V1beta1.Model.Message do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 46.032967 | 181 | 0.693722 |
ff052be9e14c1658c9cc7f7eacc0de0233ea4e31 | 2,738 | ex | Elixir | lib/commissar/authorization.ex | binarypaladin/commissar | d0d0b54f059949481d95be13037957b62fe94cd7 | [
"MIT"
] | null | null | null | lib/commissar/authorization.ex | binarypaladin/commissar | d0d0b54f059949481d95be13037957b62fe94cd7 | [
"MIT"
] | null | null | null | lib/commissar/authorization.ex | binarypaladin/commissar | d0d0b54f059949481d95be13037957b62fe94cd7 | [
"MIT"
] | null | null | null | defmodule Commissar.Authorization do
@moduledoc """
Authorizers add a convenient way of laying out policies in a manner that makes
it easy to read. Defining policies in a module that uses
`Commissar.Authorizer` also adds a catch-all policy that returns `:continue`,
allowing your own policies to simply focus on `:ok` and `:error` responses.
This carries over when exporting policies from other authorizers. Anonymous
functions will need to explicitly return `:continue` if they do not `:ok`
or `:error`.
Keep in mind, if you just want to roll with anonymous functions or organize
your policies in some other manner, it's completely possible to use
`Commissar` without authorizers at all as long as the functions being being
passed in conform to `Commissar.policy` type.
"""
defmacro __using__(_) do
quote do
@behaviour unquote(__MODULE__)
@before_compile unquote(__MODULE__)
@doc """
Similar to `authorize/3` but returns a boolean response instead. This
should be used when you have no use for any potential denial reasons.
"""
@spec allow?(any(), any(), any()) :: boolean()
def allow?(subject, action, context) do
Commissar.allow?(authorize(subject, action, context))
end
@doc """
Checks to see whether a subject attempting an action is allowed to do so
on a context with a given set of policies.
"""
@spec authorize(any(), any(), any()) :: :ok | {:error | any()}
def authorize(subject, action, context) do
Commissar.authorize(subject, action, context, export_policies())
end
@doc """
Exports a single policy from this authorizer to used as a policy in
another.
"""
def export_policy(policy_name),
do: Commissar.export_policy(__MODULE__, policy_name)
@doc """
Exports _all_ policies from this authorizer for use in another.
"""
def export_policies, do: Commissar.export_policies(__MODULE__)
end
end
@doc """
Adds a catch-all so you only need to define actual allows and denies.
"""
defmacro __before_compile__(_) do
quote do
def policy(_name, _action, _subject, _context), do: :continue
end
end
@doc """
A policy definition taking a name (an atom that can be used by
`export_policy/1`, and `policies/0`), an action, a subject, and a context.
"""
@callback policy(atom(), any(), any(), any()) :: Commissar.policy_result()
@doc """
A list of policies in the form of either atoms or functions that will be
authorized in order by `authorize/3`.
"""
@callback policies() :: [atom() | [atom()] | Commissar.policy() | [Commissar.policy()]]
@optional_callbacks [policy: 4]
end
| 36.026316 | 89 | 0.677502 |
ff05349b3818cb5adfff4e655a2101c72ecc4e9b | 7,358 | ex | Elixir | lib/ex_rabbit_m_q/state.ex | StoiximanServices/exrabbitmq | 66b37555a5aa50a3b938c7bac2b8485df58f0639 | [
"MIT"
] | 14 | 2017-10-23T11:56:13.000Z | 2021-08-24T10:38:52.000Z | lib/ex_rabbit_m_q/state.ex | StoiximanServices/exrabbitmq | 66b37555a5aa50a3b938c7bac2b8485df58f0639 | [
"MIT"
] | 10 | 2017-09-18T15:32:10.000Z | 2020-03-20T07:25:47.000Z | lib/ex_rabbit_m_q/state.ex | StoiximanServices/exrabbitmq | 66b37555a5aa50a3b938c7bac2b8485df58f0639 | [
"MIT"
] | 7 | 2017-09-18T15:32:39.000Z | 2019-07-10T11:13:57.000Z | defmodule ExRabbitMQ.State do
@moduledoc """
Provides functions for saving or getting the state such as the configuration, connection and channel information of a
`ExRabbitMQ.Consumer` or `ExRabbitMQ.Producer` process in its process dictionary.
"""
@type connection_status :: :connected | :disconnected
@type buffered_message ::
{payload :: binary, exchange :: binary, routing_key :: binary, opts :: keyword}
alias ExRabbitMQ.Config.Connection, as: ConnectionConfig
alias ExRabbitMQ.Config.Environment, as: XRMQEnvironmentConfig
alias ExRabbitMQ.Config.Session, as: XRMQSessionConfig
alias ExRabbitMQ.Constants
@doc """
Get the `ExRabbitMQ.Config.Connection` struct from the process dictionary.
"""
@spec get_connection_config :: ConnectionConfig.t() | nil
def get_connection_config do
Process.get(Constants.connection_config_key())
end
@doc """
Set the `ExRabbitMQ.Config.Connection` struct in the process dictionary.
"""
@spec set_connection_config(ConnectionConfig.t() | nil) :: term | nil
def set_connection_config(config) do
Process.put(Constants.connection_config_key(), config)
end
@doc """
Get the `ExRabbitMQ.Connection` pid from the process dictionary.
"""
@spec get_connection_pid :: pid | nil
def get_connection_pid do
Process.get(Constants.connection_pid_key())
end
@doc """
Set the `ExRabbitMQ.Connection` pid in the process dictionary.
"""
@spec set_connection_pid(pid | nil) :: term | nil
def set_connection_pid(connection_pid) do
Process.put(Constants.connection_pid_key(), connection_pid)
end
@doc """
Get the `ExRabbitMQ.ChannelRipper` pid from the process dictionary.
"""
@spec get_channel_ripper_pid :: pid | nil
def get_channel_ripper_pid do
Process.get(Constants.channel_ripper_pid_key())
end
@doc """
Set the `ExRabbitMQ.ChannelRipper` pid in the process dictionary.
"""
@spec set_channel_ripper_pid(pid | nil) :: term | nil
def set_channel_ripper_pid(channel_ripper_pid) do
Process.put(Constants.channel_ripper_pid_key(), channel_ripper_pid)
end
@doc """
Get the `AMQP.Channel` struct and the channel pid from the process dictionary.
"""
@spec get_channel_info :: {AMQP.Channel.t() | nil, pid | nil}
def get_channel_info do
{Process.get(Constants.channel_key()), Process.get(Constants.channel_monitor_key())}
end
@doc """
Set the `AMQP.Channel` struct and the channel pid in the process dictionary.
"""
@spec set_channel_info(AMQP.Channel.t() | nil, reference | nil) ::
term | nil
def set_channel_info(channel, channel_monitor) do
Process.put(Constants.channel_key(), channel)
Process.put(Constants.channel_monitor_key(), channel_monitor)
end
@doc """
Get the `ExRabbitMQ.Config.Session` struct from the process dictionary.
"""
@spec get_session_config :: XRMQSessionConfig.t() | nil
def get_session_config do
Process.get(Constants.session_config_key())
end
@doc """
Set the `ExRabbitMQ.Config.Session` struct in the process dictionary.
"""
@spec set_session_config(XRMQSessionConfig.t()) :: term | nil
def set_session_config(config) do
Process.put(Constants.session_config_key(), config)
end
@doc """
Get the current connection status from the process dictionary.
"""
@spec get_connection_status :: connection_status
def get_connection_status do
Process.get(Constants.connection_status_key(), :disconnected)
end
@doc """
Set the current connection status in the process dictionary.
"""
@spec set_connection_status(connection_status) :: connection_status
def set_connection_status(status) when status in [:connected, :disconnected] do
Process.put(Constants.connection_status_key(), status)
end
@doc """
Get whether or not a consumer should automatically start consuming on connection from the process dictionary.
"""
@spec get_auto_consume_on_connection :: boolean
def get_auto_consume_on_connection do
Process.get(Constants.auto_consume_on_connection_key(), true)
end
@doc """
Set whether or not a consumer should automatically start consuming on connection in the process dictionary.
"""
@spec set_auto_consume_on_connection(boolean) :: boolean
def set_auto_consume_on_connection(auto_consume_on_connection)
when is_boolean(auto_consume_on_connection) do
Process.put(Constants.auto_consume_on_connection_key(), auto_consume_on_connection)
end
@doc """
Get and clear the buffered messages of a producer from the process dictionary.
The messages of a producer start being buffered when the connection to RabbitMQ is lost.
"""
@spec get_clear_buffered_messages :: {non_neg_integer, [buffered_message]}
def get_clear_buffered_messages do
count = clear_buffered_messages_count()
buffered_messages = Process.delete(Constants.buffered_messages_key()) || []
{count, buffered_messages}
end
@doc """
Add a message to the buffered messages of a producer in the process dictionary.
"""
@spec add_buffered_message(buffered_message) :: [buffered_message]
def add_buffered_message(message) do
buffered_messages = Process.get(Constants.buffered_messages_key(), [])
buffered_messages = [message | buffered_messages]
increase_buffered_messages_count()
Process.put(Constants.buffered_messages_key(), buffered_messages)
end
@doc """
Get the buffered messages count of a producer from the process dictionary.
"""
@spec get_buffered_messages_count :: non_neg_integer
def get_buffered_messages_count do
Process.get(Constants.buffered_messages_count_key(), 0)
end
@doc """
Set the buffered messages count of a producer to 0, in the process dictionary.
"""
@spec clear_buffered_messages_count() :: term | nil
def clear_buffered_messages_count() do
Process.delete(Constants.buffered_messages_count_key()) || 0
end
@doc """
Get the byte count of messages seen so far, in KBs, from the process dictionary.
"""
@spec get_kb_of_messages_seen_so_far :: non_neg_integer | nil
def get_kb_of_messages_seen_so_far do
Process.get(Constants.kb_of_messages_seen_so_far_key(), 0)
end
@doc """
Check whether the process must hibernate, based on the KBs of messages seen so far.
"""
@spec hibernate? :: boolean
def hibernate?() do
kb_so_far = get_kb_of_messages_seen_so_far()
hibernate? = kb_so_far >= XRMQEnvironmentConfig.kb_of_messages_seen_so_far_threshold()
if hibernate?,
do: Process.put(Constants.kb_of_messages_seen_so_far_key(), 0),
else: Process.put(Constants.kb_of_messages_seen_so_far_key(), kb_so_far)
hibernate?
end
@doc """
Add to the byte count of messages seen so far, in KBs, in the process dictionary.
"""
@spec add_kb_of_messages_seen_so_far(non_neg_integer) :: term | nil
def add_kb_of_messages_seen_so_far(kb) when is_number(kb) and kb >= 0 do
kb_so_far = get_kb_of_messages_seen_so_far()
Process.put(Constants.kb_of_messages_seen_so_far_key(), kb_so_far + kb)
end
# Increase the buffered messages count of a producer by one, in the process dictionary.
@spec increase_buffered_messages_count() :: non_neg_integer
defp increase_buffered_messages_count() do
count = get_buffered_messages_count()
Process.put(Constants.buffered_messages_count_key(), count + 1)
end
end
| 34.383178 | 119 | 0.746942 |
ff05521257016d0137d3781d1e65232518ac853b | 783 | ex | Elixir | lib/couchdb_adapter/http/urls/server.ex | sbezugliy/couchdb_adapter | 01304052185405a6420d502c8f25164ced77f493 | [
"Apache-2.0"
] | null | null | null | lib/couchdb_adapter/http/urls/server.ex | sbezugliy/couchdb_adapter | 01304052185405a6420d502c8f25164ced77f493 | [
"Apache-2.0"
] | null | null | null | lib/couchdb_adapter/http/urls/server.ex | sbezugliy/couchdb_adapter | 01304052185405a6420d502c8f25164ced77f493 | [
"Apache-2.0"
] | null | null | null | defmodule CouchDB.HTTP.Urls.Server do
import CouchDB.Environment
def info, do: "/"
def all_dbs, do: "/_all_dbs"
def active_tasks, do: "/_active_tasks"
def config, do: admin_path() <> "/_config"
def config(section), do: config() <> "/" <> section
def config(section, key), do: config(section) <> "/" <> key
def db_updates, do: "/_db_updates"
def log, do: "/_log"
def membership, do: "/_membership"
def stats, do: "/_stats"
def utils, do: "/_utils/"
def favicon, do: "/favicon.ico"
def uuids(count \\ 1), do: "/_uuids?count=" <> to_string(count)
def replicate, do: "/_replicate"
def restart, do: "/_restart"
def session, do: "/_session"
def nodes, do: "/_nodes"
def nodes(node_name), do: "/_nodes/" <> to_string(node_name)
end
| 18.642857 | 65 | 0.633461 |
ff05a1c2563b9425e73cb1932cf662753687ed35 | 83 | exs | Elixir | config/config.exs | Celeo/simple_irc_server | 090dfdff236c51216922733412339ef477fe43da | [
"MIT"
] | null | null | null | config/config.exs | Celeo/simple_irc_server | 090dfdff236c51216922733412339ef477fe43da | [
"MIT"
] | null | null | null | config/config.exs | Celeo/simple_irc_server | 090dfdff236c51216922733412339ef477fe43da | [
"MIT"
] | null | null | null | import Config
config :logger,
level: :info
import_config "#{config_env()}.exs"
| 11.857143 | 35 | 0.710843 |
ff05a52a35f03e0c955f066e4bd056f44b1b1cc4 | 599 | ex | Elixir | web/router.ex | tommcgurl/phoenix_elm_jukebox | de522b700f484e9b7f9f73a826695c0cf06472b5 | [
"MIT"
] | null | null | null | web/router.ex | tommcgurl/phoenix_elm_jukebox | de522b700f484e9b7f9f73a826695c0cf06472b5 | [
"MIT"
] | null | null | null | web/router.ex | tommcgurl/phoenix_elm_jukebox | de522b700f484e9b7f9f73a826695c0cf06472b5 | [
"MIT"
] | null | null | null | defmodule PhoenixElmJukebox.Router do
use PhoenixElmJukebox.Web, :router
pipeline :browser do
plug :accepts, ["html"]
plug :fetch_session
plug :fetch_flash
plug :protect_from_forgery
plug :put_secure_browser_headers
end
pipeline :api do
plug :accepts, ["json"]
end
scope "/", PhoenixElmJukebox do
pipe_through :browser # Use the default browser stack
get "/", PageController, :index
resources "/messages", MessageController
end
# Other scopes may use custom stacks.
# scope "/api", PhoenixElmJukebox do
# pipe_through :api
# end
end
| 20.655172 | 57 | 0.699499 |
ff05c69abed9c8b6f492f23f8b901d3a71741c27 | 522 | ex | Elixir | lib/brando/blueprint/villain/blocks/svg_block.ex | brandocms/brando | 4198e0c0920031bd909969055064e4e2b7230d21 | [
"MIT"
] | 4 | 2020-10-30T08:40:38.000Z | 2022-01-07T22:21:37.000Z | lib/brando/blueprint/villain/blocks/svg_block.ex | brandocms/brando | 4198e0c0920031bd909969055064e4e2b7230d21 | [
"MIT"
] | 1,162 | 2020-07-05T11:20:15.000Z | 2022-03-31T06:01:49.000Z | lib/brando/blueprint/villain/blocks/svg_block.ex | brandocms/brando | 4198e0c0920031bd909969055064e4e2b7230d21 | [
"MIT"
] | null | null | null | defmodule Brando.Blueprint.Villain.Blocks.SvgBlock do
defmodule Data do
use Brando.Blueprint,
application: "Brando",
domain: "Villain",
schema: "SvgBlockData",
singular: "svg_block_data",
plural: "svg_block_datas",
gettext_module: Brando.Gettext
@primary_key false
data_layer :embedded
identifier "{{ entry.type }}"
attributes do
attribute :class, :text
attribute :code, :text
end
end
use Brando.Blueprint.Villain.Block,
type: "svg"
end
| 21.75 | 53 | 0.659004 |
ff05ebde2f6b31460040c52ad1e26b320d4d3272 | 5,401 | ex | Elixir | web/controllers/auth_controller.ex | mntns/artus | 958380f42612ec0bc9d059037cf7b59dfbe1cfa9 | [
"MIT"
] | null | null | null | web/controllers/auth_controller.ex | mntns/artus | 958380f42612ec0bc9d059037cf7b59dfbe1cfa9 | [
"MIT"
] | null | null | null | web/controllers/auth_controller.ex | mntns/artus | 958380f42612ec0bc9d059037cf7b59dfbe1cfa9 | [
"MIT"
] | null | null | null | defmodule Artus.AuthController do
use Artus.Web, :controller
import Ecto.Query
alias Artus.{User, EventLogger}
alias Comeonin.Bcrypt
alias Artus.Auth.Guardian
@doc "Render login page"
def login(conn, _params) do
render conn, "login.html"
end
@doc "Logs user in"
def auth(conn, %{"mail" => mail, "password" => password}) do
mail
|> authenticate_user(password)
|> auth_reply(conn)
end
@doc "Sends authentication response to user"
defp auth_reply({:ok, user}, conn) do
conn
|> log_login(user)
|> put_flash(:success, "Welcome back!")
|> Guardian.Plug.sign_in(user)
|> redirect(to: "/")
end
defp auth_reply({:error, reason}, conn) do
conn
|> put_flash(:error, to_string(reason))
|> redirect(to: auth_path(conn, :login))
end
@doc "Signs user out"
def logout(conn, _params) do
if user = conn.assigns.user do
EventLogger.log(:auth_logout, "#{user.name} logged out")
end
conn
|> Guardian.Plug.sign_out()
|> put_flash(:info, "Successfully logged out!")
|> redirect(to: "/")
end
def activate(conn, %{"code" => code}) do
case check_activation_code(code) do
{:ok, user} ->
render conn, "activate.html", %{activation_code: code, activation_user: user}
:error ->
conn
|> put_flash(:error, "Invalid activation code.")
|> redirect(to: page_path(conn, :index))
end
end
@doc "Renders 'forgot password' page"
def forgot(conn, _params) do
render conn, "forgot.html"
end
def send_reset(conn, %{"mail" => mail}) do
case user = get_user_by_mail(mail) do
nil ->
conn
|> put_flash(:error, "User not found.")
|> redirect(to: auth_path(conn, :forgot))
x ->
# TODO: Fix this shit and add time checking
reset_code = UUID.uuid4()
changeset = User.changeset(user, %{activation_code: reset_code})
changeset
|> Repo.update!()
|> Artus.Email.password_reset_email()
|> Artus.Mailer.deliver_now()
conn
|> put_flash(:success, "Password reset link sent successfully.")
|> redirect(to: auth_path(conn, :forgot))
end
end
def set_pass(conn, %{"code" => code, "password" => pass, "password_c" => pass_c}) do
case check_activation_code(code) do
{:ok, user} ->
do_set_pass(conn, user, pass, pass_c, code)
:error ->
conn
|> put_flash(:error, "Expired activation code.")
|> redirect(to: page_path(conn, :index))
end
end
defp do_set_pass(conn, user, pass, pass_c, code \\ "") do
case pass do
^pass_c ->
hash = Comeonin.Bcrypt.hashpwsalt(pass)
user
|> User.changeset(%{activation_code: "", activated: true, hash: hash})
|> Repo.update!()
Artus.EventLogger.log(:auth_reset, "#{user.name} reset their password")
conn
|> put_flash(:info, "Successfully set password! Please log in now.")
|> redirect(to: auth_path(conn, :login))
_ ->
conn
|> put_flash(:error, "Passwords do not match!")
|> redirect(to: auth_path(conn, :activate, code))
end
end
def reset_pass(conn, %{"code" => code, "password" => pass, "password_c" => pass_c}) do
case check_reset_code(code) do
{:ok, user} ->
do_set_pass(conn, user, pass, pass_c, code)
:error ->
conn
|> put_flash(:error, "Expired reset code.")
|> redirect(to: page_path(conn, :index))
end
end
def reset(conn, %{"code" => code}) do
case check_reset_code(code) do
{:ok, user} ->
render conn, "reset.html", %{reset_code: code, reset_user: user}
:error ->
conn
|> put_flash(:error, "Invalid reset code.")
|> redirect(to: page_path(conn, :index))
end
end
defp log_login(conn, user) do
Artus.EventLogger.log(:auth_login, "#{user.name} logged in")
datetime_now = NaiveDateTime.utc_now()
datetime_last = case is_nil(user.last_login) do
true -> ~N[0000-01-01 00:00:00]
false -> user.last_login
end
threshold = (60 * 60 * 24 * 3)
changeset = Artus.User.changeset(user, %{last_login: datetime_now})
Repo.update!(changeset)
case (NaiveDateTime.diff(datetime_now, datetime_last, :seconds)) do
x when x > threshold ->
put_flash(conn, :tutorial, "#{user.name}")
_ ->
conn
end
end
defp check_activation_code(code) do
query = from u in User,
where: u.activated == false and u.activation_code == ^code
case Repo.one(query) do
nil -> :error
x -> {:ok, x}
end
end
defp check_reset_code(code) do
query = from u in User,
where: u.activation_code == ^code
case Repo.one(query) do
nil -> :error
x -> {:ok, x}
end
end
@doc "Gets user by email address"
defp get_user_by_mail(mail) do
query = from u in User, where: u.mail == ^mail
Repo.one(query)
end
@doc "Authenticates user via email adress and password"
defp authenticate_user(email, password) do
case get_user_by_mail(email) do
nil ->
Bcrypt.dummy_checkpw()
{:error, :invalid_credentials}
user ->
if Bcrypt.checkpw(password, user.hash) do
{:ok, user}
else
{:error, :invalid_credentials}
end
end
end
end
| 26.605911 | 88 | 0.598223 |
ff05ecf52987eda49d9aeb3718b7459d09a5a32a | 12,317 | ex | Elixir | apps/site/lib/site_web/controllers/customer_support_controller.ex | mbta/crispy-spoon | 7ef28a1a6adc73899b007e334b9220f7a48a60fa | [
"MIT"
] | null | null | null | apps/site/lib/site_web/controllers/customer_support_controller.ex | mbta/crispy-spoon | 7ef28a1a6adc73899b007e334b9220f7a48a60fa | [
"MIT"
] | null | null | null | apps/site/lib/site_web/controllers/customer_support_controller.ex | mbta/crispy-spoon | 7ef28a1a6adc73899b007e334b9220f7a48a60fa | [
"MIT"
] | null | null | null | defmodule SiteWeb.CustomerSupportController do
@moduledoc "Handles the customer support page and form submissions."
use SiteWeb, :controller
alias Routes.Route
require Logger
@allowed_attachment_types ~w(image/bmp image/gif image/jpeg image/png image/tiff image/webp)
@content_blocks [
%{
header: %{text: "Call Us", iconSvgText: nil},
id: "call_us",
initially_expanded: true
},
%{
header: %{text: "Lost and Found", iconSvgText: nil},
id: "lost_and_found",
initially_expanded: true
},
%{
header: %{text: "Get Service Updates", iconSvgText: nil},
id: "service_updates",
initially_expanded: true
},
%{
header: %{text: "Transit Police", iconSvgText: nil},
id: "transit_police",
initially_expanded: false
},
%{
header: %{text: "Request Public Records", iconSvgText: nil},
id: "request_public_records",
initially_expanded: false
},
%{
header: %{text: "Write to Us", iconSvgText: nil},
id: "write_to_us",
initially_expanded: false
},
%{
header: %{text: "Report Fraud, Waste, or Abuse", iconSvgText: nil},
id: "report",
initially_expanded: false
},
%{
header: %{text: "Report a Railroad Crossing Gate Issue", iconSvgText: nil},
id: "rail_road",
initially_expanded: false
}
]
@support_datetime_selector_fields %{
controls: "support-datepicker",
year: "support_date_time_year",
month: "support_date_time_month",
day: "support_date_time_day",
hour: "support_date_time_hour",
minute: "support_date_time_minute",
amPm: "support_date_time_am_pm",
dateEl: %{
container: "support-date",
input: "support-date-input",
select: "support-date-select",
label: "support-date-label"
},
timeEl: %{
container: "support-time",
select: "support-time-select",
label: "support-time-label"
}
}
plug(Turbolinks.Plug.NoCache)
plug(:set_service_options)
plug(:assign_ip)
plug(:meta_description)
plug(:assign_datetime_selector_fields)
plug(:assign_all_options_per_mode)
def index(conn, params) do
comments = Map.get(params, "comments", nil)
render_form(conn, %{comments: comments})
end
def thanks(conn, _params) do
render(
conn,
"index.html",
breadcrumbs: [Breadcrumb.build("Customer Support")],
show_form: false
)
end
def submit(conn, %{"support" => form_data, "g-recaptcha-response" => recaptcha_response}) do
now = Util.now() |> Util.to_local_time() |> DateTime.truncate(:second)
params =
form_data
|> Map.put("recaptcha_response", recaptcha_response)
# date and time is not mandatory so if it's blank or in the future, we set it to now
|> Map.update("date_time", now, &validate_incident_date_time/1)
case do_validation(params) do
[] ->
do_submit(conn, params)
errors ->
conn
|> put_status(400)
|> render_form(%{errors: errors, comments: Map.get(params, "comments")})
end
end
def submit(conn, params) do
Logger.warn("recaptcha validation missing")
comments =
case params do
%{"support" => form_data} ->
Map.get(form_data, "comments", nil)
_ ->
nil
end
conn
|> put_status(400)
|> render_form(%{errors: ["recaptcha"], comments: comments})
end
@spec render_expandable_blocks(Plug.Conn.t(), list) :: [Phoenix.HTML.safe()]
def render_expandable_blocks(assigns, content_blocks \\ @content_blocks) do
content_blocks
|> Enum.map(fn block ->
view_template = "_#{block.id}.html"
try do
SiteWeb.PartialView.render(
"_expandable_block.html",
Map.merge(assigns, %{
header: block.header,
id: block.id,
initially_expanded: block.initially_expanded,
view_template: view_template
})
)
rescue
# We still want to render the page, so we just return empty content:
_ -> ""
end
end)
end
@spec do_submit(Plug.Conn.t(), map) :: Plug.Conn.t()
defp do_submit(%Plug.Conn{assigns: %{ip_address: {:ok, ip}}} = conn, data) do
rate_limit_interval = Application.get_env(:feedback, :rate_limit, 60_000)
case Hammer.check_rate("submit-feedback:#{ip}", rate_limit_interval, 1) do
{:allow, _count} ->
{:ok, pid} = Task.start(__MODULE__, :send_ticket, [data])
conn = Plug.Conn.put_private(conn, :ticket_task, pid)
redirect(conn, to: customer_support_path(conn, :thanks))
{:deny, _limit} ->
_ = Logger.warn("ip=#{ip} Support form rate limit exceeded for IP address")
conn
|> put_status(:too_many_requests)
|> render_form(%{errors: ["rate limit"]})
end
end
defp render_form(conn, %{errors: errors, comments: comments}) do
render(
conn,
"index.html",
breadcrumbs: [Breadcrumb.build("Customer Support")],
errors: errors,
show_form: true,
comments: comments
)
end
defp render_form(conn, %{errors: errors}) do
render(
conn,
"index.html",
breadcrumbs: [Breadcrumb.build("Customer Support")],
errors: errors,
show_form: true
)
end
defp render_form(conn, %{comments: comments}) do
render(
conn,
"index.html",
breadcrumbs: [Breadcrumb.build("Customer Support")],
errors: [],
show_form: true,
comments: comments
)
end
@spec do_validation(map) :: [String.t()]
defp do_validation(params) do
validators =
if params["no_request_response"] == "off" do
[
&validate_comments/1,
&validate_service/1,
&validate_subject/1,
&validate_photos/1,
&validate_vehicle/1,
&validate_name/1,
&validate_email/1,
&validate_privacy/1,
&validate_recaptcha/1
]
else
[
&validate_comments/1,
&validate_service/1,
&validate_subject/1,
&validate_photos/1,
&validate_vehicle/1,
&validate_recaptcha/1
]
end
Site.Validation.validate(validators, params)
end
@spec validate_comments(map) :: :ok | String.t()
defp validate_comments(%{"comments" => ""}), do: "comments"
defp validate_comments(_), do: :ok
@spec validate_service(map) :: :ok | String.t()
defp validate_service(%{"service" => service}) do
if Feedback.Message.valid_service?(service) do
:ok
else
"service"
end
end
defp validate_service(_), do: "service"
@spec validate_subject(map) :: :ok | String.t()
defp validate_subject(%{"subject" => subject, "service" => service}) do
if Feedback.Message.valid_subject_for_service?(subject, service) do
:ok
else
"subject"
end
end
defp validate_subject(_), do: "subject"
@spec validate_name(map) :: :ok | String.t()
defp validate_name(%{"first_name" => ""}), do: "first_name"
defp validate_name(%{"last_name" => ""}), do: "last_name"
defp validate_name(_), do: :ok
@spec validate_email(map) :: :ok | String.t()
defp validate_email(%{"email" => email}) do
case Regex.run(~r/^[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Za-z]{2,4}$/, email) do
nil -> "email"
[_] -> :ok
end
end
defp validate_email(_), do: "email"
@spec validate_privacy(map) :: :ok | String.t()
defp validate_privacy(%{"privacy" => "on"}), do: :ok
defp validate_privacy(_), do: "privacy"
@spec validate_vehicle(map) :: :ok | String.t()
defp validate_vehicle(%{"vehicle" => vehicle_number}) do
if Regex.match?(~r/^[0-9]*$/, vehicle_number), do: :ok, else: "vehicle"
end
defp validate_vehicle(_), do: :ok
@spec validate_photos(map) :: :ok | String.t()
defp validate_photos(%{"photos" => photos}) when is_list(photos) do
if Enum.all?(photos, &valid_upload?/1), do: :ok, else: "photos"
end
defp validate_photos(_), do: :ok
defp valid_upload?(%Plug.Upload{filename: filename}) do
MIME.from_path(filename) in @allowed_attachment_types
end
# Errors we'd expect to see from reCAPTCHA assuming no bugs in the library.
# See: https://developers.google.com/recaptcha/docs/verify#error_code_reference
@expected_recaptcha_errors [
:challenge_failed,
:invalid_input_response,
:missing_input_response,
:timeout_or_duplicate,
# https://github.com/samueljseay/recaptcha/issues/58
:"invalid-input-response"
]
@spec validate_recaptcha(map) :: :ok | String.t()
defp validate_recaptcha(%{"recaptcha_response" => response}) do
case Recaptcha.verify(response) do
{:ok, _} ->
:ok
{:error, [error]} when error in @expected_recaptcha_errors ->
_ = Logger.warn("recaptcha failed_validation=#{error}")
"recaptcha"
end
end
@spec validate_incident_date_time(map) :: DateTime.t()
defp validate_incident_date_time(incident_date_time) do
now = Util.now() |> Util.to_local_time() |> DateTime.truncate(:second)
parsed_date_time =
case Util.parse(incident_date_time) do
{:error, :invalid_date} ->
now
parsed_dt ->
parsed_dt
end
local_parsed_date_time = Util.convert_using_timezone(parsed_date_time, "America/New_York")
# if date and time is in the future, set it to now
# otherwise leave as it is
if DateTime.compare(local_parsed_date_time, now) in [:lt, :eq] do
local_parsed_date_time
else
now
end
end
def send_ticket(params) do
Feedback.Repo.send_ticket(%Feedback.Message{
photos: params["photos"],
email: params["email"],
phone: params["phone"],
first_name: params["first_name"],
last_name: params["last_name"],
comments: params["comments"],
service: params["service"],
subject: params["subject"],
no_request_response: params["no_request_response"] == "on",
incident_date_time: params["date_time"],
mode: params["mode"],
line: params["route"],
vehicle: params["vehicle"],
ticket_number: params["ticket_number"]
})
end
@spec get_options_per_mode() :: map
def get_options_per_mode() do
bus_ferry_cr_options =
for route_type <- 2..4, into: %{} do
options =
Routes.Repo.by_type(route_type)
|> Enum.map(fn route ->
route.name
end)
mode = "#{Route.type_atom(route_type)}_options"
{mode, options}
end
subway_options =
Enum.map(SiteWeb.ViewHelpers.subway_lines() -- [:silver_line], fn mode ->
SiteWeb.ViewHelpers.mode_name(mode)
end)
bus_ferry_cr_options |> Map.put("subway_options", subway_options)
end
@spec get_routes_for_mode(Plug.Conn.t(), atom) :: list
def get_routes_for_mode(conn, mode) do
opts = conn.assigns[:all_options_per_mode]
str = Atom.to_string(mode)
opts["#{str}_options"] || []
end
defp set_service_options(conn, _) do
assign(conn, :service_options, Feedback.Message.service_options())
end
defp assign_ip(conn, _) do
assign(conn, :ip_address, get_ip(conn))
end
defp get_ip(conn) do
conn
|> Plug.Conn.get_req_header("x-forwarded-for")
|> do_get_ip(conn)
end
defp do_get_ip([<<ip::binary>>], %Plug.Conn{}) do
{:ok, ip}
end
defp do_get_ip([], %Plug.Conn{remote_ip: {a, b, c, d}}) do
{:ok, Enum.join([to_string(a), to_string(b), to_string(c), to_string(d)], ".")}
end
defp do_get_ip(_, %Plug.Conn{}) do
:error
end
defp meta_description(conn, _) do
conn
|> assign(
:meta_description,
"Contact the MBTA customer support team and view additional contact numbers for the Transit Police, " <>
"lost and found, and accessibility."
)
end
@spec assign_datetime_selector_fields(Plug.Conn.t(), Keyword.t()) :: Plug.Conn.t()
defp assign_datetime_selector_fields(conn, _) do
conn
|> assign(:support_datetime_selector_fields, @support_datetime_selector_fields)
end
@spec assign_all_options_per_mode(Plug.Conn.t(), Keyword.t()) :: Plug.Conn.t()
defp assign_all_options_per_mode(conn, _) do
assign(conn, :all_options_per_mode, get_options_per_mode())
end
end
| 27.993182 | 110 | 0.630105 |
ff0641b821101ffb593237826f8e180a90e93d67 | 5,554 | ex | Elixir | apps/nerves_hub_www/lib/nerves_hub_www_web/router.ex | valiot/nerves_hub_web | 01aef0b9b6b07d9f5c5440f7df0d2a3ec026daa8 | [
"Apache-2.0"
] | null | null | null | apps/nerves_hub_www/lib/nerves_hub_www_web/router.ex | valiot/nerves_hub_web | 01aef0b9b6b07d9f5c5440f7df0d2a3ec026daa8 | [
"Apache-2.0"
] | null | null | null | apps/nerves_hub_www/lib/nerves_hub_www_web/router.ex | valiot/nerves_hub_web | 01aef0b9b6b07d9f5c5440f7df0d2a3ec026daa8 | [
"Apache-2.0"
] | null | null | null | defmodule NervesHubWWWWeb.Router do
use NervesHubWWWWeb, :router
pipeline :browser do
plug(:accepts, ["html"])
plug(:fetch_session)
plug(:fetch_flash)
plug(:fetch_live_flash)
plug(:put_root_layout, {NervesHubWWWWeb.LayoutView, :root})
plug(:protect_from_forgery)
plug(:put_secure_browser_headers)
plug(NervesHubWWWWeb.Plugs.SetLocale)
end
pipeline :logged_in do
plug(NervesHubWWWWeb.Plugs.EnsureLoggedIn)
end
pipeline :org do
plug(NervesHubWWWWeb.Plugs.Org)
end
pipeline :product do
plug(NervesHubWWWWeb.Plugs.Product)
end
pipeline :device do
plug(NervesHubWWWWeb.Plugs.Device)
end
pipeline :deployment do
plug(NervesHubWWWWeb.Plugs.Deployment)
end
pipeline :firmware do
plug(NervesHubWWWWeb.Plugs.Firmware)
end
scope "/", NervesHubWWWWeb do
# Use the default browser stack
pipe_through(:browser)
get("/", HomeController, :index)
get("/login", SessionController, :new)
post("/login", SessionController, :create)
get("/logout", SessionController, :delete)
get("/register", AccountController, :new)
post("/register", AccountController, :create)
get("/password-reset", PasswordResetController, :new)
post("/password-reset", PasswordResetController, :create)
get("/password-reset/:token", PasswordResetController, :new_password_form)
put("/password-reset/:token", PasswordResetController, :reset)
get("/invite/:token", AccountController, :invite)
post("/invite/:token", AccountController, :accept_invite)
scope "/policy" do
get("/tos", PolicyController, :tos)
get("/privacy", PolicyController, :privacy)
get("/coc", PolicyController, :coc)
end
get("/sponsors", SponsorController, :index)
get("/nerves_key", NervesKeyController, :index)
end
scope "/", NervesHubWWWWeb do
pipe_through([:browser, :logged_in])
scope "/settings/:org_name" do
pipe_through(:org)
get("/", OrgController, :edit)
put("/", OrgController, :update)
get("/invite", OrgController, :invite)
post("/invite", OrgController, :send_invite)
get("/certificates", OrgCertificateController, :index)
post("/certificates", OrgCertificateController, :create)
get("/certificates/new", OrgCertificateController, :new)
delete("/certificates/:serial", OrgCertificateController, :delete)
get("/users", OrgUserController, :index)
get("/users/:user_id", OrgUserController, :edit)
put("/users/:user_id", OrgUserController, :update)
delete("/users/:user_id", OrgUserController, :delete)
resources("/keys", OrgKeyController)
end
scope "/account/:user_name" do
get("/", AccountController, :edit)
put("/", AccountController, :update)
scope "/certificates" do
get("/", AccountCertificateController, :index)
get("/new", AccountCertificateController, :new)
get("/:id", AccountCertificateController, :show)
delete("/:id", AccountCertificateController, :delete)
post("/create", AccountCertificateController, :create)
get("/:id/download", AccountCertificateController, :download)
end
get("/organizations", OrgController, :index)
end
get("/org/new", OrgController, :new)
post("/org", OrgController, :create)
scope "/org/:org_name" do
pipe_through(:org)
get("/", ProductController, :index)
put("/", ProductController, :update)
get("/new", ProductController, :new)
post("/", ProductController, :create)
scope "/:product_name" do
pipe_through(:product)
get("/", ProductController, :show)
get("/edit", ProductController, :edit)
delete("/", ProductController, :delete)
scope "/devices" do
get("/", DeviceController, :index)
post("/", DeviceController, :create)
get("/new", DeviceController, :new)
scope "/:device_identifier" do
pipe_through(:device)
get("/", DeviceController, :show)
get("/console", DeviceController, :console)
get("/edit", DeviceController, :edit)
patch("/", DeviceController, :update)
put("/", DeviceController, :update)
delete("/", DeviceController, :delete)
end
end
scope "/firmware" do
get("/", FirmwareController, :index)
get("/upload", FirmwareController, :upload)
post("/upload", FirmwareController, :do_upload)
scope "/:firmware_uuid" do
pipe_through(:firmware)
get("/", FirmwareController, :show)
get("/download", FirmwareController, :download)
delete("/", FirmwareController, :delete)
end
end
scope "/deployments" do
get("/", DeploymentController, :index)
post("/", DeploymentController, :create)
get("/new", DeploymentController, :new)
scope "/:deployment_name" do
pipe_through(:deployment)
get("/show", DeploymentController, :show)
get("/edit", DeploymentController, :edit)
patch("/", DeploymentController, :update)
put("/", DeploymentController, :update)
delete("/", DeploymentController, :delete)
end
end
end
end
end
if Mix.env() in [:dev] do
scope "/dev" do
pipe_through([:browser])
forward("/mailbox", Bamboo.SentEmailViewerPlug, base_path: "/dev/mailbox")
end
end
end
| 30.021622 | 80 | 0.631797 |
ff06479a087734376b0f7386ef0e6e1445c711d7 | 197 | ex | Elixir | lib/hxl/ast/body.ex | drowzy/hcl | afb65a3fe4c86d90ba02ab8402d99019bb6509bc | [
"Apache-2.0"
] | 18 | 2021-09-28T23:15:46.000Z | 2021-12-21T15:04:40.000Z | lib/hxl/ast/body.ex | drowzy/hxl | afb65a3fe4c86d90ba02ab8402d99019bb6509bc | [
"Apache-2.0"
] | 4 | 2021-09-28T14:50:16.000Z | 2022-03-29T13:22:49.000Z | lib/hxl/ast/body.ex | drowzy/hcl | afb65a3fe4c86d90ba02ab8402d99019bb6509bc | [
"Apache-2.0"
] | null | null | null | defmodule HXL.Ast.Body do
@moduledoc false
defstruct [:statements]
@type statement :: HXL.Ast.Attr | HXL.Ast.Block
@type t :: %__MODULE__{
statements: [statement]
}
end
| 17.909091 | 49 | 0.634518 |
ff065432bfaa401f083f0308778ff244793bbbc1 | 2,712 | ex | Elixir | apps/telepath/lib/telepath/data/seedbox.ex | julien-leclercq/telepath | b73e47ae0fabb18ab565a8c015099b1ba948810c | [
"MIT"
] | 1 | 2019-10-30T19:38:12.000Z | 2019-10-30T19:38:12.000Z | apps/telepath/lib/telepath/data/seedbox.ex | julien-leclercq/telepath | b73e47ae0fabb18ab565a8c015099b1ba948810c | [
"MIT"
] | 3 | 2018-05-23T11:53:24.000Z | 2022-02-10T15:53:08.000Z | apps/telepath/lib/telepath/data/seedbox.ex | julien-leclercq/telepath | b73e47ae0fabb18ab565a8c015099b1ba948810c | [
"MIT"
] | null | null | null | defmodule Telepath.Data.Seedbox do
@moduledoc """
This module provides data structure and functions to manipulate seedboxes
"""
alias Ecto.Changeset
alias Kaur.Result
alias Telepath.Seedbox
alias Telepath.Seedbox.{Auth, Impl, Repository}
import Changeset
use Ecto.Schema
require Logger
@max_port :math.pow(2, 16) - 1
@min_port 0
@params [:host, :id, :name, :port]
@required_params [:host, :port]
schema "seedboxes" do
field(:accessible, :boolean, default: false)
field(:host, :string)
field(:name, :string, default: "")
field(:port, :integer)
field(:remote, :boolean, default: true)
field(:session, {:map, :string})
field(:session_id, :string)
field(:torrents, {:array, :string}, default: [])
embeds_one(:auth, Auth, on_replace: :update)
end
@spec changeset(%__MODULE__{}, map) :: Ecto.Changeset.t()
def changeset(%__MODULE__{} = seedbox, params) do
seedbox
|> cast(params, @params)
|> validate_required(@required_params)
|> validate_number(:port, greater_than: @min_port)
|> validate_number(:port, less_than: @max_port)
|> cast_embed(:auth, with: &auth_changeset/2)
end
@spec create(map) :: {:ok, %__MODULE__{}} | {:error, term()}
def create(seedbox_params) do
seedbox_params
|> Impl.create()
|> Result.tap_error(fn _ -> Logger.error("SEEDBOX:CREATE invalid data") end)
|> Result.and_then(fn box ->
box
|> Repository.create()
|> Result.map(fn _ -> box end)
end)
end
def dispatch_call(call) do
Supervisor.which_children(Seedbox.Supervisor)
|> Enum.map(fn {_, pid, _, _} -> Task.async(call.(pid)) end)
|> Enum.map(&Task.await/1)
|> Result.sequence()
end
def get(pid) when is_pid(pid) do
pid
|> GenServer.call(:state, :infinity)
end
def get(id) when is_binary(id) do
Repository.find(id)
|> Result.and_then(fn {pid, _} -> get(pid) end)
end
def list() do
async_get = fn pid ->
fn -> get(pid) end
end
dispatch_call(async_get)
end
def update(id, params) do
id
|> Repository.find()
|> Result.either(fn _ -> Result.error(:not_found) end, fn {pid, _} ->
pid
|> GenServer.call({:update, params}, :infinity)
end)
end
def delete(id) do
id
|> Repository.find()
|> Result.either(fn _ -> Result.error(:not_found) end, fn {pid, _} ->
pid
|> GenServer.stop(:normal)
|> case do
:ok -> {:ok, id}
_ -> Result.error("impossible to stop process (this is not normal)")
end
end)
end
# ---- Private ----
defp auth_changeset(auth \\ %Auth{}, params) do
auth
|> cast(params, [:username, :password])
end
end
| 24.432432 | 80 | 0.6191 |
ff06870e125c3cd77212819bca9189a28e9fdb6f | 1,732 | ex | Elixir | clients/alert_center/lib/google_api/alert_center/v1beta1/model/batch_delete_alerts_request.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/alert_center/lib/google_api/alert_center/v1beta1/model/batch_delete_alerts_request.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/alert_center/lib/google_api/alert_center/v1beta1/model/batch_delete_alerts_request.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.AlertCenter.V1beta1.Model.BatchDeleteAlertsRequest do
@moduledoc """
A request to perform batch delete on alerts.
## Attributes
* `alertId` (*type:* `list(String.t)`, *default:* `nil`) - Required. list of alert IDs.
* `customerId` (*type:* `String.t`, *default:* `nil`) - Optional. The unique identifier of the G Suite organization account of the
customer the alerts are associated with.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:alertId => list(String.t()),
:customerId => String.t()
}
field(:alertId, type: :list)
field(:customerId)
end
defimpl Poison.Decoder, for: GoogleApi.AlertCenter.V1beta1.Model.BatchDeleteAlertsRequest do
def decode(value, options) do
GoogleApi.AlertCenter.V1beta1.Model.BatchDeleteAlertsRequest.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.AlertCenter.V1beta1.Model.BatchDeleteAlertsRequest do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 33.960784 | 134 | 0.732679 |
ff06a2326f42d5bb50ce93b5220a156ad74f6e8e | 1,999 | ex | Elixir | clients/content/lib/google_api/content/v21/model/account_business_information.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/content/lib/google_api/content/v21/model/account_business_information.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/content/lib/google_api/content/v21/model/account_business_information.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Content.V21.Model.AccountBusinessInformation do
@moduledoc """
## Attributes
* `address` (*type:* `GoogleApi.Content.V21.Model.AccountAddress.t`, *default:* `nil`) - The address of the business.
* `customerService` (*type:* `GoogleApi.Content.V21.Model.AccountCustomerService.t`, *default:* `nil`) - The customer service information of the business.
* `phoneNumber` (*type:* `String.t`, *default:* `nil`) - The phone number of the business.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:address => GoogleApi.Content.V21.Model.AccountAddress.t(),
:customerService => GoogleApi.Content.V21.Model.AccountCustomerService.t(),
:phoneNumber => String.t()
}
field(:address, as: GoogleApi.Content.V21.Model.AccountAddress)
field(:customerService, as: GoogleApi.Content.V21.Model.AccountCustomerService)
field(:phoneNumber)
end
defimpl Poison.Decoder, for: GoogleApi.Content.V21.Model.AccountBusinessInformation do
def decode(value, options) do
GoogleApi.Content.V21.Model.AccountBusinessInformation.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Content.V21.Model.AccountBusinessInformation do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 37.716981 | 158 | 0.74037 |
ff06a5e9755d979bf4ec29980e40e44ff08ce4b0 | 715 | ex | Elixir | lib/survey_api_web/gettext.ex | kamidev/survey_ap | 483314842cf2e8279e1224e83b57d61a5da143ad | [
"MIT"
] | 6 | 2020-02-04T16:18:30.000Z | 2020-10-31T06:00:03.000Z | lib/survey_api_web/gettext.ex | kamidev/survey_ap | 483314842cf2e8279e1224e83b57d61a5da143ad | [
"MIT"
] | 114 | 2019-11-14T03:48:17.000Z | 2022-03-17T12:38:14.000Z | lib/survey_api_web/gettext.ex | kamidev/survey_ap | 483314842cf2e8279e1224e83b57d61a5da143ad | [
"MIT"
] | 1 | 2021-07-08T01:48:56.000Z | 2021-07-08T01:48:56.000Z | defmodule SurveyAPIWeb.Gettext do
@moduledoc """
A module providing Internationalization with a gettext-based API.
By using [Gettext](https://hexdocs.pm/gettext),
your module gains a set of macros for translations, for example:
import SurveyAPIWeb.Gettext
# Simple translation
gettext "Here is the string to translate"
# Plural translation
ngettext "Here is the string to translate",
"Here are the strings to translate",
3
# Domain-based translation
dgettext "errors", "Here is the error message to translate"
See the [Gettext Docs](https://hexdocs.pm/gettext) for detailed usage.
"""
use Gettext, otp_app: :survey_api
end
| 28.6 | 72 | 0.683916 |
ff06bb05e6c34183507e7b2f538af6365ac55011 | 925 | ex | Elixir | lib/vroom/vehicle/step.ex | wesleimp/vroom-elixir | 5c0bc5a8c9ce02f6415eb06a568f3310dfd8977d | [
"MIT"
] | 5 | 2021-04-14T15:02:59.000Z | 2021-04-28T19:18:00.000Z | lib/vroom/vehicle/step.ex | sleipnir/vroom-elixir | 3e4963aa796a7ce84f21ef89744f054c8c6a6c34 | [
"MIT"
] | 5 | 2021-04-13T22:03:29.000Z | 2021-05-12T12:47:41.000Z | lib/vroom/vehicle/step.ex | sleipnir/vroom-elixir | 3e4963aa796a7ce84f21ef89744f054c8c6a6c34 | [
"MIT"
] | 2 | 2021-04-14T16:39:35.000Z | 2021-04-28T19:17:53.000Z | defmodule VROOM.Vehicle.Step do
@moduledoc """
Defines a Vehicle Step object.
The Vehicle.Step struct has the following properties:
- `type`: a string (either start, job, pickup, delivery, break or end)
- `id`: id of the task to be performed at this step if type value is job, pickup, delivery or break;
- `service_at`: hard constraint on service time;
- `service_after`: hard constraint on service time lower bound;
- `service_before`: hard constraint on service time upper bound;
https://github.com/VROOM-Project/vroom/blob/master/docs/API.md#vehicles
"""
@type t() :: %__MODULE__{
type: binary(),
id: integer() | nil,
service_at: integer() | nil,
service_after: integer() | nil,
service_before: integer() | nil
}
@derive Jason.Encoder
defstruct [
:type,
:id,
:service_at,
:service_after,
:service_before
]
end
| 27.205882 | 102 | 0.64973 |
ff06e37e5a6bd76e8b69b98ab6fcd77a72c11d1a | 1,698 | ex | Elixir | coherence_demo_no_confirm/lib/coherence_demo_web/controllers/coherence/redirects.ex | hotpyn/coherence-setup | fc10bb15d993ae0dd13a19fd178bdfb4ee13d6b6 | [
"MIT"
] | null | null | null | coherence_demo_no_confirm/lib/coherence_demo_web/controllers/coherence/redirects.ex | hotpyn/coherence-setup | fc10bb15d993ae0dd13a19fd178bdfb4ee13d6b6 | [
"MIT"
] | null | null | null | coherence_demo_no_confirm/lib/coherence_demo_web/controllers/coherence/redirects.ex | hotpyn/coherence-setup | fc10bb15d993ae0dd13a19fd178bdfb4ee13d6b6 | [
"MIT"
] | null | null | null | defmodule Coherence.Redirects do
@moduledoc """
Define controller action redirection functions.
This module contains default redirect functions for each of the controller
actions that perform redirects. By using this Module you get the following
functions:
* session_create/2
* session_delete/2
* password_create/2
* password_update/2,
* unlock_create_not_locked/2
* unlock_create_invalid/2
* unlock_create/2
* unlock_edit_not_locked/2
* unlock_edit/2
* unlock_edit_invalid/2
* registration_create/2
* invitation_create/2
* confirmation_create/2
* confirmation_edit_invalid/2
* confirmation_edit_expired/2
* confirmation_edit/2
* confirmation_edit_error/2
You can override any of the functions to customize the redirect path. Each
function is passed the `conn` and `params` arguments from the controller.
## Examples
import CoherenceDemoWeb.Router.Helpers
# override the log out action back to the log in page
def session_delete(conn, _), do: redirect(conn, to: session_path(conn, :new))
# redirect the user to the login page after registering
def registration_create(conn, _), do: redirect(conn, to: session_path(conn, :new))
# disable the user_return_to feature on login
def session_create(conn, _), do: redirect(conn, to: landing_path(conn, :index))
"""
use Redirects
# Uncomment the import below if adding overrides
# import CoherenceDemoWeb.Router.Helpers
# Add function overrides below
# Example usage
# Uncomment the following line to return the user to the login form after logging out
# def session_delete(conn, _), do: redirect(conn, to: session_path(conn, :new))
end
| 30.872727 | 88 | 0.743227 |
ff06f60059f56cb453a5fe9a4249618bb1a74455 | 3,759 | exs | Elixir | apps/bytepack/priv/repo/seeds.exs | dashbitco/bytepack_archive | 79f8e62149d020f2afcc501592ed399f7ce7a60b | [
"Unlicense"
] | 313 | 2020-12-03T17:26:24.000Z | 2022-03-18T09:05:14.000Z | apps/bytepack/priv/repo/seeds.exs | dashbitco/bytepack_archive | 79f8e62149d020f2afcc501592ed399f7ce7a60b | [
"Unlicense"
] | null | null | null | apps/bytepack/priv/repo/seeds.exs | dashbitco/bytepack_archive | 79f8e62149d020f2afcc501592ed399f7ce7a60b | [
"Unlicense"
] | 57 | 2020-12-03T17:41:53.000Z | 2022-03-17T17:28:16.000Z | Bytepack.Repo.query!("TRUNCATE TABLE users RESTART IDENTITY CASCADE")
Bytepack.Repo.query!("TRUNCATE TABLE orgs RESTART IDENTITY CASCADE")
unless Code.ensure_loaded?(Bytepack.AccountsFixtures) do
Code.require_file(Path.expand("../../test/support/fixtures/accounts_fixtures.ex", __DIR__))
end
unless Code.ensure_loaded?(Bytepack.PackagesFixtures) do
Code.require_file(Path.expand("../../test/support/fixtures/packages_fixtures.ex", __DIR__))
end
unless Code.ensure_loaded?(Bytepack.SalesFixtures) do
Code.require_file(Path.expand("../../test/support/fixtures/sales_fixtures.ex", __DIR__))
end
alias Bytepack.{
AccountsFixtures,
PackagesFixtures,
SalesFixtures,
Orgs
}
import Bytepack.AuditLog, only: [system: 0]
alice = AccountsFixtures.staff_fixture(%{email: "alice@example.com", password: "secret123456"})
bob = AccountsFixtures.user_fixture(%{email: "bob@example.com", password: "secret123456"})
carol = AccountsFixtures.user_fixture(%{email: "carol@example.com", password: "secret123456"})
AccountsFixtures.user_fixture(%{
email: "dave@example.com",
password: "secret123456",
confirmed: false
})
{:ok, acme} =
Orgs.create_org(system(), alice, %{name: "Acme", slug: "acme", email: "acme@example.com"})
{:ok, bob_invitation} = Orgs.create_invitation(system(), acme, %{email: bob.email}, "/")
{:ok, _} = Orgs.create_invitation(system(), acme, %{email: carol.email}, "/")
Orgs.accept_invitation!(system(), bob, bob_invitation.id)
tarball = PackagesFixtures.hex_package_tarball("foo-1.0.0/foo-1.0.0.tar")
{:ok, _} = Bytepack.Hex.publish(system(), acme, tarball)
tarball = PackagesFixtures.hex_package_tarball("foo-1.1.0/foo-1.1.0.tar")
{:ok, _} = Bytepack.Hex.publish(system(), acme, tarball)
tarball = PackagesFixtures.hex_package_tarball("bar-1.0.0/bar-1.0.0.tar")
{:ok, _} = Bytepack.Hex.publish(system(), acme, tarball)
tarball = PackagesFixtures.hex_package_tarball("baz-1.0.0/baz-1.0.0.tar")
{:ok, _} = Bytepack.Hex.publish(system(), acme, tarball)
# hardcode alice's tokens for automated tests against local dev server
Orgs.get_membership!(alice, acme.slug)
|> Ecto.Changeset.change(%{
write_token: Base.url_decode64!("PIDRtu8F0Dax_HozFlcjFaICU1X3LKLC"),
read_token: Base.url_decode64!("cdAMJ-dyrIzxTN9JoLG0Ub5zR9A43gCp")
})
|> Bytepack.Repo.update!()
Bytepack.Orgs.enable_as_seller(acme)
seller = Bytepack.Sales.get_seller!(acme)
{:ok, _} =
Bytepack.Sales.update_seller(
system(),
seller,
%{
email: "acme@example.com",
legal_name: "Acme Inc.",
address_city: "Gothan",
address_line1: "5th av",
address_country: "BR"
}
)
foo = Bytepack.Packages.get_available_package_by!(acme, type: "hex", name: "foo")
bar = Bytepack.Packages.get_available_package_by!(acme, type: "hex", name: "bar")
foo_product =
SalesFixtures.product_fixture(
acme,
%{
name: "Acme Foo",
description: "Lorem ipsum.",
url: "http://localhost:4000",
package_ids: [foo.id]
}
)
foo_bar_product =
SalesFixtures.product_fixture(
acme,
%{
name: "Acme Foo & Bar",
description: "Lorem ipsum.",
url: "http://localhost:4000",
package_ids: [foo.id, bar.id]
}
)
{:ok, los_pollos} =
Orgs.create_org(system(), alice, %{
name: "Los Pollos Hermanos",
slug: "los-pollos",
email: "los-pollos@example.com"
})
sale = SalesFixtures.sale_fixture(seller, foo_bar_product, email: alice.email)
{:ok, _} =
Bytepack.Sales.deliver_create_sale_email(
sale,
&"http://localhost:4000/purchases/claim/#{sale.id}/#{&1}"
)
Bytepack.Sales.complete_sale!(system(), sale, los_pollos)
SalesFixtures.sale_fixture(seller, foo_product, email: bob.email)
SalesFixtures.sale_fixture(seller, foo_product, email: carol.email)
| 30.811475 | 95 | 0.708433 |
ff071f3e623dde0d2246f66cdded1b0a842c0603 | 2,375 | exs | Elixir | test/elixir/test/local_docs_test.exs | mtenrero/couchdb-vetcontrol | b7ede3ededdf0072c73f08d8f1217cb723b03f7a | [
"Apache-2.0"
] | null | null | null | test/elixir/test/local_docs_test.exs | mtenrero/couchdb-vetcontrol | b7ede3ededdf0072c73f08d8f1217cb723b03f7a | [
"Apache-2.0"
] | null | null | null | test/elixir/test/local_docs_test.exs | mtenrero/couchdb-vetcontrol | b7ede3ededdf0072c73f08d8f1217cb723b03f7a | [
"Apache-2.0"
] | null | null | null | defmodule LocalDocsTest do
use CouchTestCase
@moduletag :local_docs
@moduledoc """
Test CouchDB _local_docs
"""
setup_all do
db_name = random_db_name()
{:ok, _} = create_db(db_name)
on_exit(fn -> delete_db(db_name) end)
resp1 = Couch.put(
"/#{db_name}/_local/foo",
body: %{
_id: "foo",
bar: "baz"
}
)
assert resp1.status_code == 201
resp2 = Couch.put(
"/#{db_name}/_local/foo2",
body: %{
_id: "foo",
bar: "baz2"
}
)
assert resp2.status_code == 201
{:ok, [db_name: db_name]}
end
test "GET with no parameters", context do
resp = Couch.get(
"/#{context[:db_name]}/_local_docs"
)
assert resp.status_code == 200
assert length(Map.get(resp, :body)["rows"]) == 2
end
test "GET with multiple keys", context do
resp = Couch.get(
"/#{context[:db_name]}/_local_docs",
query: %{
:keys => "[\"_local/foo\", \"_local/foo2\"]",
}
)
assert resp.status_code == 200
assert length(Map.get(resp, :body)["rows"]) == 2
end
test "POST with empty body", context do
resp = Couch.post(
"/#{context[:db_name]}/_local_docs",
body: %{}
)
assert resp.status_code == 200
assert length(Map.get(resp, :body)["rows"]) == 2
end
test "POST with keys and limit", context do
resp = Couch.post(
"/#{context[:db_name]}/_local_docs",
body: %{
:keys => ["_local/foo", "_local/foo2"],
:limit => 1
}
)
assert resp.status_code == 200
assert length(Map.get(resp, :body)["rows"]) == 1
end
test "POST with query parameter and JSON body", context do
resp = Couch.post(
"/#{context[:db_name]}/_local_docs",
query: %{
:limit => 1
},
body: %{
:keys => ["_local/foo", "_local/foo2"]
}
)
assert resp.status_code == 200
assert length(Map.get(resp, :body)["rows"]) == 1
end
test "POST edge case with colliding parameters - query takes precedence", context do
resp = Couch.post(
"/#{context[:db_name]}/_local_docs",
query: %{
:limit => 0
},
body: %{
:keys => ["_local/foo", "_local/foo2"],
:limit => 2
}
)
assert resp.status_code == 200
assert Enum.empty?(Map.get(resp, :body)["rows"])
end
end
| 21.396396 | 86 | 0.546105 |
ff07439327011794b84118c246aaabf199765abe | 7,044 | ex | Elixir | lib/brando/json_ld/schema.ex | brandocms/brando | 4198e0c0920031bd909969055064e4e2b7230d21 | [
"MIT"
] | 4 | 2020-10-30T08:40:38.000Z | 2022-01-07T22:21:37.000Z | lib/brando/json_ld/schema.ex | brandocms/brando | 4198e0c0920031bd909969055064e4e2b7230d21 | [
"MIT"
] | 1,162 | 2020-07-05T11:20:15.000Z | 2022-03-31T06:01:49.000Z | lib/brando/json_ld/schema.ex | brandocms/brando | 4198e0c0920031bd909969055064e4e2b7230d21 | [
"MIT"
] | null | null | null | defmodule Brando.JSONLD.Schema do
@deprecated "Move to blueprints"
#! TODO: Delete when moving to Blueprints
@moduledoc """
Schema definitions for JSONLD schemas
"""
@doc false
defmacro __using__(_) do
quote do
import Brando.JSONLD.Schema, only: [json_ld_schema: 2]
Module.register_attribute(__MODULE__, :json_ld_fields, accumulate: true)
@before_compile unquote(__MODULE__)
end
end
defmacro __before_compile__(env) do
json_ld_fields = Module.get_attribute(env.module, :json_ld_fields)
json_ld_schema = Module.get_attribute(env.module, :json_ld_schema)
quote do
def extract_json_ld(data, extra_fields \\ []) do
fields = unquote(json_ld_fields) ++ extra_fields
Enum.reduce(fields, struct(unquote(json_ld_schema)), fn
{name, {:references, target}}, acc ->
result = %{"@id": "#{Brando.Utils.hostname()}/##{target}"}
Map.put(acc, name, result)
{name, value}, acc
when is_integer(value) or is_binary(value) ->
Map.put(acc, name, value)
{name, {:string, path}}, acc
when is_list(path) ->
result = get_in(data, Enum.map(path, &Access.key/1))
Map.put(acc, name, result)
{name, {{:string, path}, {:mutator, mutation_function}}}, acc
when is_list(path) ->
value = get_in(data, Enum.map(path, &Access.key/1))
result = mutation_function.(value)
Map.put(acc, name, result)
{name, {:string, mutation_function}}, acc
when is_function(mutation_function) ->
result = mutation_function.(data)
Map.put(acc, name, result)
{name, {schema, path}}, acc
when is_list(path) ->
value = get_in(data, Enum.map(path, &Access.key/1))
result = schema.build(value)
Map.put(acc, name, result)
{name, {schema, populator_function}}, acc
when is_function(populator_function) ->
pf_result = populator_function.(data)
result = schema.build(pf_result)
Map.put(acc, name, result)
{name, {{schema, path}, mutation_function}}, acc
when not is_binary(schema) and
is_list(path) and
is_function(mutation_function) ->
value = get_in(data, Enum.map(path, &Access.key/1))
mf_result = mutation_function.(value)
result = schema.build(mf_result)
Map.put(acc, name, result)
end)
end
end
end
# coveralls-ignore-start
defmacro json_ld_schema(schema_module, do: block) do
do_json_ld_schema(schema_module, block)
end
# coveralls-ignore-stop
defp do_json_ld_schema(schema_module, block) do
quote do
Module.put_attribute(__MODULE__, :json_ld_schema, unquote(schema_module))
try do
import Brando.JSONLD.Schema
unquote(block)
after
:ok
end
end
end
@doc """
Defines a JSON LD field.
This macro defines
* a field name
* a path to extract the data from
* a mutator/generator function
"""
defmacro field(name, {:references, target}) do
quote do
Module.put_attribute(
__MODULE__,
:json_ld_fields,
{unquote(name), {:references, unquote(target)}}
)
end
end
# populate a field with a path without mutator function
defmacro field(name, :string, path) when is_list(path) do
quote do
Module.put_attribute(
__MODULE__,
:json_ld_fields,
{unquote(name), {:string, unquote(path)}}
)
end
end
# populate a field with a path with mutator function
defmacro field(name, :string, path, mutation_function) when is_list(path) do
quote do
Module.put_attribute(
__MODULE__,
:json_ld_fields,
{unquote(name), {{:string, unquote(path)}, {:mutator, unquote(mutation_function)}}}
)
end
end
# populate a field without a path with mutator function
defmacro field(name, :string, mutation_function) when is_function(mutation_function) do
quote do
Module.put_attribute(
__MODULE__,
:json_ld_fields,
{unquote(name), {:string, unquote(mutation_function)}}
)
end
end
# populate a field as a schema with populator function
defmacro field(name, schema, nil) do
raise "=> JSONLD/Schema >> Populating a field as schema requires a populator function - #{
name
} - #{inspect(schema)}"
end
defmacro field(name, schema, _) when is_binary(schema) do
raise "=> JSONLD/Schema >> Populating a field as schema requires a schema as second arg - #{
name
} - #{inspect(schema)}"
end
defmacro field(name, schema, path) when is_list(path) do
quote do
Module.put_attribute(
__MODULE__,
:json_ld_fields,
{unquote(name), {unquote(schema), unquote(path)}}
)
end
end
defmacro field(name, schema, populator_function) do
quote do
Module.put_attribute(
__MODULE__,
:json_ld_fields,
{unquote(name), {unquote(schema), unquote(populator_function)}}
)
end
end
# populate a field as a schema with a path and populator function
defmacro field(name, schema, path, mutation_function)
when not is_binary(schema) and is_list(path) do
quote do
Module.put_attribute(
__MODULE__,
:json_ld_fields,
{unquote(name), {{unquote(schema), unquote(path)}, unquote(mutation_function)}}
)
end
end
@doc """
Allows us to have same formatting when adding additional json_ld fields in a controller
"""
def convert_format(fields) do
Enum.reduce(fields, [], fn
{name, {:references, target}}, acc ->
[{name, {:references, target}} | acc]
{name, :string, path}, acc when is_list(path) ->
[{name, {:string, path}} | acc]
{name, :string, path, mutation_function}, acc when is_list(path) ->
[{name, {{:string, path}, {:mutator, mutation_function}}} | acc]
{name, :string, mutation_function}, acc when is_function(mutation_function) ->
[{name, {:string, mutation_function}} | acc]
{name, schema, nil}, _acc ->
raise "=> JSONLD/Schema >> Populating a field as schema requires a populator function - #{
name
} - #{inspect(schema)}"
{name, schema, _}, _acc when is_binary(schema) ->
raise "=> JSONLD/Schema >> Populating a field as schema requires a schema as second arg - #{
name
} - #{inspect(schema)}"
{name, schema, path}, acc when is_list(path) ->
[{name, {schema, path}} | acc]
{name, schema, populator_function}, acc ->
[{name, {schema, populator_function}} | acc]
{name, schema, path, mutation_function}, acc
when not is_binary(schema) and is_list(path) ->
[{name, {{schema, path}, mutation_function}} | acc]
end)
end
end
| 30.493506 | 100 | 0.611442 |
ff0748e53e7609757f3b07f6019ac23be64430b9 | 650 | ex | Elixir | scaffolds_actor_model/elixir/lib/actor_model/application.ex | lodykas/code_retreat | d22b3377fa655ef9064df7796c44a34b7dc33a58 | [
"MIT"
] | 7 | 2016-06-22T18:08:26.000Z | 2021-03-15T11:50:16.000Z | scaffolds_actor_model/elixir/lib/actor_model/application.ex | lodykas/code_retreat | d22b3377fa655ef9064df7796c44a34b7dc33a58 | [
"MIT"
] | 27 | 2016-06-07T12:45:44.000Z | 2021-03-24T08:26:29.000Z | scaffolds_actor_model/elixir/lib/actor_model/application.ex | lodykas/code_retreat | d22b3377fa655ef9064df7796c44a34b7dc33a58 | [
"MIT"
] | 11 | 2016-06-09T13:12:38.000Z | 2022-01-20T15:15:05.000Z | defmodule ActorModel.Application do
# See https://hexdocs.pm/elixir/Application.html
# for more information on OTP Applications
@moduledoc false
use Application
@impl true
def start(_type, _args) do
children = [
# Starts a worker by calling: ActorModel.Worker.start_link(arg)
{ActorModel.Worker, name: :Quick, friend_name: :Flupke},
{ActorModel.Worker, name: :Flupke, friend_name: :Quick}
]
# See https://hexdocs.pm/elixir/Supervisor.html
# for other strategies and supported options
opts = [strategy: :one_for_one, name: ActorModel.Supervisor]
Supervisor.start_link(children, opts)
end
end
| 29.545455 | 69 | 0.712308 |
ff0759fbb3013c248f4a97e2c74ebf574ec8ece5 | 2,304 | ex | Elixir | lib/reddex/auth.ex | christopher-dG/reddex | 9357a93a2dbe175dfb8ab593c3422c9bb2cccd9d | [
"MIT"
] | null | null | null | lib/reddex/auth.ex | christopher-dG/reddex | 9357a93a2dbe175dfb8ab593c3422c9bb2cccd9d | [
"MIT"
] | null | null | null | lib/reddex/auth.ex | christopher-dG/reddex | 9357a93a2dbe175dfb8ab593c3422c9bb2cccd9d | [
"MIT"
] | null | null | null | defmodule Reddex.Auth do
@moduledoc false
alias Tesla.Middleware
import Reddex.Utils
require Logger
use GenServer
@url "https://www.reddit.com/api/v1/access_token"
def start_link(_opts) do
GenServer.start_link(__MODULE__, "", name: __MODULE__)
end
@impl true
def init(_state) do
if Enum.any?([:username, :password, :client_id, :client_secret, :user_agent], fn k ->
is_nil(Application.get_env(:reddex, k))
end) do
{:stop, :missing_credentials}
else
send(__MODULE__, :refresh)
{:ok, ""}
end
end
@impl true
def handle_call(:token, _from, state) do
{:reply, state, state}
end
@impl true
def handle_info(:refresh, _state) do
Logger.debug("Refreshing OAuth token")
%{token: token, expiry: expiry} = get_token()
Logger.debug("Refreshed OAuth token")
Process.send_after(__MODULE__, :refresh, expiry - 120_000)
{:noreply, token}
end
# Gets an OAuth token.
@spec get_token(non_neg_integer) :: %{token: String.t(), expiry: integer}
defp get_token(n \\ 0) do
exp_wait(n)
client =
Tesla.client([
{Middleware.Headers, [{"user-agent", Application.get_env(:reddex, :user_agent)}]},
{Middleware.BasicAuth,
username: Application.get_env(:reddex, :client_id),
password: Application.get_env(:reddex, :client_secret)},
Middleware.FormUrlencoded,
{Middleware.DecodeJson, engine_opts: [keys: :atoms]}
])
body = %{
grant_type: "password",
username: Application.get_env(:reddex, :username),
password: Application.get_env(:reddex, :password)
}
case Tesla.post(client, @url, body) do
{:ok, %{body: %{access_token: token, expires_in: expiry}}} ->
%{token: "bearer " <> token, expiry: expiry * 1000}
{:ok, %{status: status, body: body}} ->
Logger.warn("""
OAuth token refresh failed (retrying)
status code = #{status}
body = #{inspect(body)}
""")
get_token(n + 1)
{:error, reason} ->
Logger.warn("OAuth token refresh failed (retrying): #{inspect(reason)}")
get_token(n + 1)
end
end
@doc "Retrieves an OAuth token."
@spec token :: String.t()
def token do
GenServer.call(__MODULE__, :token)
end
end
| 26.482759 | 90 | 0.623264 |
ff07640e54e3438f03d81c997734e2741bda7e7e | 3,890 | exs | Elixir | test/oauth_xyz/model/key_request_test.exs | ritou/elixir-oauth-xyz | 110d4eadb16fa5c106ae0f6fad49c0424bdbf477 | [
"MIT"
] | 2 | 2020-04-22T13:22:25.000Z | 2020-12-01T12:01:30.000Z | test/oauth_xyz/model/key_request_test.exs | ritou/elixir-oauth-xyz | 110d4eadb16fa5c106ae0f6fad49c0424bdbf477 | [
"MIT"
] | 3 | 2019-12-05T01:32:09.000Z | 2019-12-09T01:15:32.000Z | test/oauth_xyz/model/key_request_test.exs | ritou/elixir-oauth-xyz-web | 110d4eadb16fa5c106ae0f6fad49c0424bdbf477 | [
"MIT"
] | null | null | null | defmodule OAuthXYZ.Model.KeyRequestTest do
use OAuthXYZ.DataCase
alias OAuthXYZ.Model.KeyRequest
test "parse" do
# NOTE: https://oauth.xyz/transactionrequest/
key = KeyRequest.parse("7C7C4AZ9KHRS6X63AJAO")
assert key.handle
refute key.proof
jwk_keys = [
%{
"kty" => "RSA",
"e" => "AQAB",
"kid" => "xyz-1",
"alg" => "RS256",
"n" =>
"kOB5rR4Jv0GMeLaY6_It_r3ORwdf8ci_JtffXyaSx8xYJCCNaOKNJn_Oz0YhdHbXTeWO5AoyspDWJbN5w_7bdWDxgpD-y6jnD1u9YhBOCWObNPFvpkTM8LC7SdXGRKx2k8Me2r_GssYlyRpqvpBlY5-ejCywKRBfctRcnhTTGNztbbDBUyDSWmFMVCHe5mXT4cL0BwrZC6S-uu-LAx06aKwQOPwYOGOslK8WPm1yGdkaA1uF_FpS6LS63WYPHi_Ap2B7_8Wbw4ttzbMS_doJvuDagW8A1Ip3fXFAHtRAcKw7rdI4_Xln66hJxFekpdfWdiPQddQ6Y1cK2U3obvUg7w"
}
]
key =
KeyRequest.parse(%{
"proof" => "jwsd",
"jwks" => %{
"keys" => jwk_keys
}
})
assert key.jwks == %{"keys" => jwk_keys}
assert key.proof == "jwsd"
refute key.handle
key =
KeyRequest.parse(%{
"proof" => "httpsig",
"jwks" => %{
"keys" => [
%{
"kty" => "RSA",
"e" => "AQAB",
"kid" => "xyz-1",
"alg" => "RS256",
"n" =>
"kOB5rR4Jv0GMeLaY6_It_r3ORwdf8ci_JtffXyaSx8xYJCCNaOKNJn_Oz0YhdHbXTeWO5AoyspDWJbN5w_7bdWDxgpD-y6jnD1u9YhBOCWObNPFvpkTM8LC7SdXGRKx2k8Me2r_GssYlyRpqvpBlY5-ejCywKRBfctRcnhTTGNztbbDBUyDSWmFMVCHe5mXT4cL0BwrZC6S-uu-LAx06aKwQOPwYOGOslK8WPm1yGdkaA1uF_FpS6LS63WYPHi_Ap2B7_8Wbw4ttzbMS_doJvuDagW8A1Ip3fXFAHtRAcKw7rdI4_Xln66hJxFekpdfWdiPQddQ6Y1cK2U3obvUg7w"
}
]
}
})
assert key.jwks == %{"keys" => jwk_keys}
assert key.proof == "httpsig"
refute key.handle
cert =
"MIIEHDCCAwSgAwIBAgIBATANBgkqhkiG9w0BAQsFADCBmjE3MDUGA1UEAwwuQmVzcG9rZSBFbmdpbmVlcmluZyBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eTELMAkGA1UECAwCTUExCzAJBgNVBAYTAlVTMRkwFwYJKoZIhvcNAQkBFgpjYUBic3BrLmlvMRwwGgYDVQQKDBNCZXNwb2tlIEVuZ2luZWVyaW5nMQwwCgYDVQQLDANNVEkwHhcNMTkwNDEwMjE0MDI5WhcNMjQwNDA4MjE0MDI5WjB8MRIwEAYDVQQDDAlsb2NhbGhvc3QxCzAJBgNVBAgMAk1BMQswCQYDVQQGEwJVUzEgMB4GCSqGSIb3DQEJARYRdGxzY2xpZW50QGJzcGsuaW8xHDAaBgNVBAoME0Jlc3Bva2UgRW5naW5lZXJpbmcxDDAKBgNVBAsMA01USTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMmaXQHbs/wc1RpsQ6Orzf6rN+q2ijaZbQxD8oi+XaaN0P/gnE13JqQduvdq77OmJ4bQLokqsd0BexnI07Njsl8nkDDYpe8rNve5TjyUDCfbwgS7U1CluYenXmNQbaYNDOmCdHwwUjV4kKREg6DGAx22Oq7+VHPTeeFgyw4kQgWRSfDENWY3KUXJlb/vKR6lQ+aOJytkvj8kVZQtWupPbvwoJe0na/ISNAOhL74w20DWWoDKoNltXsEtflNljVoi5nqsmZQcjfjt6LO0T7O1OX3Cwu2xWx8KZ3n/2ocuRqKEJHqUGfeDtuQNt6Jz79v/OTr8puLWaD+uyk6NbtGjoQsCAwEAAaOBiTCBhjAJBgNVHRMEAjAAMAsGA1UdDwQEAwIF4DBsBgNVHREEZTBjgglsb2NhbGhvc3SCD3Rsc2NsaWVudC5sb2NhbIcEwKgBBIERdGxzY2xpZW50QGJzcGsuaW+GF2h0dHA6Ly90bHNjbGllbnQubG9jYWwvhhNzc2g6dGxzY2xpZW50LmxvY2FsMA0GCSqGSIb3DQEBCwUAA4IBAQCKKv8WlLrT4Z5NazaUrYtlTF+2v0tvZBQ7qzJQjlOqAcvxry/d2zyhiRCRS/v318YCJBEv4Iq2W3I3JMMyAYEe2573HzT7rH3xQP12yZyRQnetdiVM1Z1KaXwfrPDLs72hUeELtxIcfZ0M085jLboXhufHI6kqm3NCyCCTihe2ck5RmCc5l2KBO/vAHF0ihhFOOOby1v6qbPHQcxAU6rEb907/p6BW/LV1NCgYB1QtFSfGxowqb9FRIMD2kvMSmO0EMxgwZ6k6spa+jk0IsI3klwLW9b+Tfn/daUbIDctxeJneq2anQyU2znBgQl6KILDSF4eaOqlBut/KNZHHazJh"
key =
KeyRequest.parse(%{
"proof" => "mtls",
"cert" => cert
})
assert key.proof == "mtls"
assert key.cert == cert
refute key.handle
cert_256 = "bwcK0esc3ACC3DB2Y5_lESsXE8o9ltc05O89jdN-dg2"
key =
KeyRequest.parse(%{
"proof" => "mtls",
"cert#256" => cert_256
})
assert key.proof == "mtls"
assert key.cert_256 == cert_256
refute key.handle
did = "did:example:CV3BVVXK2PWWLCRQLRFU#xyz-1"
key =
KeyRequest.parse(%{
"proof" => "httpsig",
"did" => did
})
assert key.proof == "httpsig"
assert key.did == did
refute key.handle
end
end
| 40.947368 | 1,416 | 0.743188 |
ff0764a9e09a1ae4ccb12fa50a1cbefae125d5c9 | 211 | ex | Elixir | lib/shoegaze_bot.ex | codebundleio/shoegaze_bot | 97db73e66471a96e581bff14ef60713c70a297fa | [
"MIT"
] | null | null | null | lib/shoegaze_bot.ex | codebundleio/shoegaze_bot | 97db73e66471a96e581bff14ef60713c70a297fa | [
"MIT"
] | null | null | null | lib/shoegaze_bot.ex | codebundleio/shoegaze_bot | 97db73e66471a96e581bff14ef60713c70a297fa | [
"MIT"
] | null | null | null | defmodule ShoegazeBot do
@moduledoc """
Documentation for ShoegazeBot.
"""
@doc """
Hello world.
## Examples
iex> ShoegazeBot.hello()
:world
"""
def hello do
:world
end
end
| 11.105263 | 32 | 0.587678 |
ff07831942f4bfb00f6e88585c6a84dac2e15197 | 221 | ex | Elixir | lib/rfx/ops/filesys/mv_dir.ex | pcorey/rfx | db5be95d93b7aba0cf9799db273d8583c21bfc26 | [
"MIT"
] | 31 | 2021-05-29T22:57:04.000Z | 2022-03-13T16:24:57.000Z | lib/rfx/ops/filesys/mv_dir.ex | pcorey/rfx | db5be95d93b7aba0cf9799db273d8583c21bfc26 | [
"MIT"
] | 4 | 2021-06-04T23:34:38.000Z | 2021-07-16T16:01:20.000Z | lib/rfx/ops/filesys/mv_dir.ex | pcorey/rfx | db5be95d93b7aba0cf9799db273d8583c21bfc26 | [
"MIT"
] | 4 | 2021-06-11T13:10:04.000Z | 2022-02-11T13:33:16.000Z | defmodule Rfx.Ops.Filesys.MvDir do
@moduledoc false
# Generate Change Requests
# - rename file
# - rename associated file (test or source)
# - project-wide edit text: Old Module Name to New Module Name
end
| 20.090909 | 64 | 0.710407 |
ff07b3c8543fdaafdb5d08a156d632eabcf29da2 | 195 | ex | Elixir | apps/rtc/lib/rtc_web/controllers/page_controller.ex | michaeljguarino/forge | 50ee583ecb4aad5dee4ef08fce29a8eaed1a0824 | [
"Apache-2.0"
] | 59 | 2021-09-16T19:29:39.000Z | 2022-03-31T20:44:24.000Z | apps/rtc/lib/rtc_web/controllers/page_controller.ex | svilenkov/plural | ac6c6cc15ac4b66a3b5e32ed4a7bee4d46d1f026 | [
"Apache-2.0"
] | 111 | 2021-08-15T09:56:37.000Z | 2022-03-31T23:59:32.000Z | apps/rtc/lib/rtc_web/controllers/page_controller.ex | svilenkov/plural | ac6c6cc15ac4b66a3b5e32ed4a7bee4d46d1f026 | [
"Apache-2.0"
] | 4 | 2021-12-13T09:43:01.000Z | 2022-03-29T18:08:44.000Z | defmodule RtcWeb.PageController do
use RtcWeb, :controller
def index(conn, _params) do
render(conn, "index.html")
end
def health(conn, _) do
json(conn, %{pong: true})
end
end
| 16.25 | 34 | 0.671795 |
ff07d019c42e7a585529cbf207d920a4742fbdca | 1,024 | exs | Elixir | mix.exs | kianmeng/uuid | 965c76b7343530cf940a808f497eef37d0a332e6 | [
"MIT"
] | null | null | null | mix.exs | kianmeng/uuid | 965c76b7343530cf940a808f497eef37d0a332e6 | [
"MIT"
] | null | null | null | mix.exs | kianmeng/uuid | 965c76b7343530cf940a808f497eef37d0a332e6 | [
"MIT"
] | null | null | null | #-*-Mode:elixir;coding:utf-8;tab-width:2;c-basic-offset:2;indent-tabs-mode:()-*-
# ex: set ft=elixir fenc=utf-8 sts=2 ts=2 sw=2 et nomod:
defmodule Uuid.Mixfile do
use Mix.Project
def project do
[app: :uuid,
version: "2.0.4",
language: :erlang,
erlc_options: [
{:d, :erlang.list_to_atom('ERLANG_OTP_VERSION_' ++ :erlang.system_info(:otp_release))},
:debug_info,
:warn_export_vars,
:warn_unused_import,
#:warn_missing_spec,
:warnings_as_errors],
description: description(),
package: package(),
deps: deps()]
end
def application do
[applications: [
:quickrand]]
end
defp deps do
[{:quickrand, ">= 2.0.4"}]
end
defp description do
"Erlang UUID Implementation"
end
defp package do
[name: :uuid_erl,
files: ~w(src include doc rebar.config README.markdown LICENSE),
maintainers: ["Michael Truog"],
licenses: ["MIT"],
links: %{"GitHub" => "https://github.com/okeuday/uuid"}]
end
end
| 23.272727 | 94 | 0.62207 |
ff07e1f52cdab8748497282aa2d65bdcc0898ed3 | 547 | exs | Elixir | config/test.exs | niku/nigiwaiki | 52c37d2abb2d19084d6bda69c10773ed87701135 | [
"MIT"
] | 2 | 2017-12-07T10:57:47.000Z | 2018-02-04T09:01:05.000Z | config/test.exs | niku/nigiwaiki | 52c37d2abb2d19084d6bda69c10773ed87701135 | [
"MIT"
] | null | null | null | config/test.exs | niku/nigiwaiki | 52c37d2abb2d19084d6bda69c10773ed87701135 | [
"MIT"
] | null | null | null | use Mix.Config
# We don't run a server during test. If one is required,
# you can enable the server option below.
config :nigiwiki, NigiwikiWeb.Endpoint,
http: [port: 4001],
server: false
# Print only warnings and errors during test
config :logger, level: :warn
# Configure your database
config :nigiwiki, Nigiwiki.Repo,
adapter: Ecto.Adapters.Postgres,
username: "postgres",
password: System.get_env("TRAVIS_POSTGRESQL_PASSWORD") || "postgres",
database: "nigiwiki_test",
hostname: "localhost",
pool: Ecto.Adapters.SQL.Sandbox
| 27.35 | 71 | 0.744059 |
ff08090a8fe987b1f62594371924042c8cc3085c | 899 | ex | Elixir | lib/glimesh/api/scopes.ex | MemoryLeakDeath/glimesh.tv | 1462c4b939da899f5e3f67c3f28850025d59a10f | [
"MIT"
] | null | null | null | lib/glimesh/api/scopes.ex | MemoryLeakDeath/glimesh.tv | 1462c4b939da899f5e3f67c3f28850025d59a10f | [
"MIT"
] | null | null | null | lib/glimesh/api/scopes.ex | MemoryLeakDeath/glimesh.tv | 1462c4b939da899f5e3f67c3f28850025d59a10f | [
"MIT"
] | null | null | null | defmodule Glimesh.Api.Scopes do
@moduledoc """
Glimesh Scopes Policy
"""
@behaviour Bodyguard.Policy
alias Glimesh.Accounts.User
alias Glimesh.Api.Access
def authorize(:public, %Access{} = ua, _params), do: scope_check(ua, :public)
def authorize(:email, %Access{} = ua, %User{} = accessing_user) do
scope_check(ua, :email) && ua.user.id == accessing_user.id
end
def authorize(:chat, %Access{} = ua, _params), do: scope_check(ua, :chat)
def authorize(:streamkey, %Access{} = ua, _params), do: scope_check(ua, :streamkey)
def authorize(:follow, %Access{} = ua, _params), do: scope_check(ua, :follow)
def authorize(:stream_mutations, %Access{is_admin: true}, _params) do
true
end
def authorize(_, _, _), do: false
defp scope_check(%Access{} = ua, scope) do
# Verifies the key exists AND is true
Map.get(ua.scopes, scope, false) == true
end
end
| 28.09375 | 85 | 0.677419 |
ff0818ef33f350a08c04bec5f08fa191d6e801fe | 22,581 | exs | Elixir | test/io/sauce_binary_test.exs | nocursor/saucexages | 33e986a652306b2c54ad4891db7a27d78ed0d7cf | [
"MIT"
] | 7 | 2018-11-01T15:47:05.000Z | 2021-05-19T10:07:23.000Z | test/io/sauce_binary_test.exs | nocursor/saucexages | 33e986a652306b2c54ad4891db7a27d78ed0d7cf | [
"MIT"
] | null | null | null | test/io/sauce_binary_test.exs | nocursor/saucexages | 33e986a652306b2c54ad4891db7a27d78ed0d7cf | [
"MIT"
] | null | null | null | Code.require_file("test/support/sauce_helpers.exs")
defmodule Saucexages.SauceBinaryTest do
use ExUnit.Case, async: true
require Saucexages.IO.SauceBinary
require Saucexages.Sauce
alias Saucexages.IO.SauceBinary
alias Saucexages.Sauce
test "split_all/1 splits a binary containing a SAUCE into its 3 parts" do
ansi_bin = SaucePack.path(:ansi) |> File.read!()
{contents_bin, sauce_bin, comment_bin} = SauceBinary.split_all(ansi_bin)
assert is_binary(contents_bin)
assert is_binary(sauce_bin)
assert is_binary(comment_bin)
refute contents_bin == <<>>
refute sauce_bin == <<>>
refute comment_bin == <<>>
assert :binary.part(sauce_bin, 0, 5) == Sauce.sauce_id()
assert :binary.part(comment_bin, 0, 5) == Sauce.comment_id()
assert byte_size(sauce_bin) == Sauce.sauce_record_byte_size()
assert byte_size(comment_bin) >= Sauce.minimum_comment_block_byte_size()
assert byte_size(contents_bin) < byte_size(ansi_bin)
assert byte_size(contents_bin) + byte_size(sauce_bin) + byte_size(comment_bin) == byte_size(ansi_bin)
end
test "split_all/1 splits a binary without comments" do
ansi_bin = SaucePack.path(:ansi_nocomments) |> File.read!()
{contents_bin, sauce_bin, comment_bin} = SauceBinary.split_all(ansi_bin)
assert is_binary(contents_bin)
assert is_binary(sauce_bin)
assert is_binary(comment_bin)
refute contents_bin == <<>>
refute sauce_bin == <<>>
assert comment_bin == <<>>
assert :binary.part(sauce_bin, 0, 5) == Sauce.sauce_id()
assert byte_size(sauce_bin) == Sauce.sauce_record_byte_size()
assert byte_size(contents_bin) + byte_size(sauce_bin) + byte_size(comment_bin) == byte_size(ansi_bin)
end
test "split_all/1 splits a binary without a SAUCE" do
bin = SaucePack.path(:no_sauce) |> File.read!()
{contents_bin, sauce_bin, comment_bin} = SauceBinary.split_all(bin)
assert is_binary(contents_bin)
assert is_binary(sauce_bin)
assert is_binary(comment_bin)
refute contents_bin == <<>>
assert sauce_bin == <<>>
assert comment_bin == <<>>
assert byte_size(contents_bin) + byte_size(sauce_bin) + byte_size(comment_bin) == byte_size(bin)
end
test "split_sauce/1 splits a binary into only its sauce components" do
ansi_bin = SaucePack.path(:ansi) |> File.read!()
{sauce_bin, comment_bin} = SauceBinary.split_sauce(ansi_bin)
assert is_binary(sauce_bin)
assert is_binary(comment_bin)
refute sauce_bin == <<>>
refute comment_bin == <<>>
assert :binary.part(sauce_bin, 0, 5) == Sauce.sauce_id()
assert :binary.part(comment_bin, 0, 5) == Sauce.comment_id()
assert byte_size(sauce_bin) == Sauce.sauce_record_byte_size()
assert byte_size(comment_bin) >= Sauce.minimum_comment_block_byte_size()
end
test "split_sauce/1 splits a binary without SAUCE comments" do
bin = SaucePack.path(:ansi_nocomments) |> File.read!()
{sauce_bin, comment_bin} = SauceBinary.split_sauce(bin)
assert is_binary(sauce_bin)
assert is_binary(comment_bin)
refute sauce_bin == <<>>
assert comment_bin == <<>>
assert :binary.part(sauce_bin, 0, 5) == Sauce.sauce_id()
assert byte_size(sauce_bin) == Sauce.sauce_record_byte_size()
end
test "split_sauce/1 splits a binary without a SAUCE" do
bin = SaucePack.path(:no_sauce) |> File.read!()
{sauce_bin, comment_bin} = SauceBinary.split_sauce(bin)
assert is_binary(sauce_bin)
assert is_binary(comment_bin)
assert sauce_bin == <<>>
assert comment_bin == <<>>
end
test "split_record/1 splits a binary containing a SAUCE into its sauce record and contents" do
ansi_bin = SaucePack.path(:ansi) |> File.read!()
{contents_bin, sauce_bin} = SauceBinary.split_record(ansi_bin)
assert is_binary(contents_bin)
assert is_binary(sauce_bin)
refute contents_bin == <<>>
refute sauce_bin == <<>>
assert :binary.part(sauce_bin, 0, 5) == Sauce.sauce_id()
assert byte_size(sauce_bin) == Sauce.sauce_record_byte_size()
assert byte_size(contents_bin) < byte_size(ansi_bin)
end
test "split_record/1 splits a binary without comments" do
ansi_bin = SaucePack.path(:ansi_nocomments) |> File.read!()
{contents_bin, sauce_bin} = SauceBinary.split_record(ansi_bin)
assert is_binary(contents_bin)
assert is_binary(sauce_bin)
refute contents_bin == <<>>
refute sauce_bin == <<>>
assert :binary.part(sauce_bin, 0, 5) == Sauce.sauce_id()
assert byte_size(sauce_bin) == Sauce.sauce_record_byte_size()
assert byte_size(contents_bin) < byte_size(ansi_bin)
end
test "split_record/1 splits a binary without a SAUCE" do
bin = SaucePack.path(:no_sauce) |> File.read!()
{contents_bin, sauce_bin} = SauceBinary.split_record(bin)
assert is_binary(contents_bin)
assert is_binary(sauce_bin)
refute contents_bin == <<>>
assert sauce_bin == <<>>
assert byte_size(contents_bin) == byte_size(bin)
end
test "split_with/1 splits a binary containing a SAUCE into its 3 parts, using a pre-determined comment line count to split comments" do
ansi_bin = SaucePack.path(:ansi) |> File.read!()
{contents_bin, sauce_bin, comment_bin} = SauceBinary.split_with(ansi_bin, 5)
assert is_binary(contents_bin)
assert is_binary(sauce_bin)
assert is_binary(comment_bin)
refute contents_bin == <<>>
refute sauce_bin == <<>>
refute comment_bin == <<>>
assert :binary.part(sauce_bin, 0, 5) == Sauce.sauce_id()
assert :binary.part(comment_bin, 0, 5) == Sauce.comment_id()
assert byte_size(sauce_bin) == Sauce.sauce_record_byte_size()
assert byte_size(comment_bin) >= Sauce.minimum_comment_block_byte_size()
assert byte_size(contents_bin) < byte_size(ansi_bin)
assert byte_size(contents_bin) + byte_size(sauce_bin) + byte_size(comment_bin) == byte_size(ansi_bin)
end
test "split_with/1 will not return comments if the comment lines count does not match the actual comment lines" do
ansi_bin = SaucePack.path(:ansi) |> File.read!()
# there are 5 lines, not 2
{contents_bin, sauce_bin, comment_bin} = SauceBinary.split_with(ansi_bin, 2)
assert is_binary(contents_bin)
assert is_binary(sauce_bin)
assert is_binary(comment_bin)
refute contents_bin == <<>>
refute sauce_bin == <<>>
assert comment_bin == <<>>
# ensure we still got the SAUCE despite the mistake
assert :binary.part(sauce_bin, 0, 5) == Sauce.sauce_id()
assert byte_size(sauce_bin) == Sauce.sauce_record_byte_size()
end
test "clean_contents/1 returns contents before any EOF character, if one exists" do
ansi_bin = SaucePack.path(:ansi) |> File.read!()
contents_bin = SauceBinary.clean_contents(ansi_bin)
assert is_binary(contents_bin)
refute contents_bin == <<>>
assert byte_size(contents_bin) < byte_size(ansi_bin)
assert :binary.match(contents_bin, <<Sauce.eof_character()>>) == :nomatch
assert SauceBinary.clean_contents(<<1, 2, 3>>) == <<1, 2, 3>>
end
test "contents/1 returns the contents in a SAUCE file" do
ansi_bin = SaucePack.path(:ansi) |> File.read!()
{:ok, contents_bin} = SauceBinary.contents(ansi_bin)
assert is_binary(contents_bin)
refute contents_bin == <<>>
assert byte_size(contents_bin) < byte_size(ansi_bin)
#maintains the EOF character as part of the contents
refute :binary.match(contents_bin, <<Sauce.eof_character()>>) == :nomatch
assert SauceBinary.contents(<<1, 2, 3>>) == {:ok, <<1, 2, 3>>}
# Force eof termination, for example when we want to re-write a binary to disk or treat EOF uniformly elsewhere
assert SauceBinary.contents(<<1, 2, 3>>, true) == {:ok, <<1, 2, 3, Sauce.eof_character()>>}
end
test "contents_size/1 returns the contents size in a SAUCE file" do
ansi_bin = SaucePack.path(:ansi) |> File.read!()
contents_size = SauceBinary.contents_size(ansi_bin)
assert contents_size > 0
assert contents_size < byte_size(ansi_bin)
end
test "maybe_sauce_record/1 returns data if the given binary is a SAUCE record" do
ansi_bin = SaucePack.path(:ansi) |> File.read!()
{:ok, sauce_bin} = SauceBinary.sauce_record(ansi_bin)
assert SauceBinary.maybe_sauce_record(sauce_bin)
assert SauceBinary.maybe_sauce_record(<<1, 2, 3>>) == <<>>
end
test "maybe_comments/1 returns data if the given binary is a SAUCE comments block" do
ansi_bin = SaucePack.path(:ansi) |> File.read!()
{:ok, {comments_bin, _}} = SauceBinary.comments(ansi_bin)
assert SauceBinary.maybe_comments(comments_bin)
assert SauceBinary.maybe_comments(<<1, 2, 3>>) == <<>>
end
test "matches_sauce?/1 returns if the given binary is a SAUCE record" do
ansi_bin = SaucePack.path(:ansi) |> File.read!()
{:ok, sauce_bin} = SauceBinary.sauce_record(ansi_bin)
assert SauceBinary.matches_sauce?(sauce_bin) == true
assert SauceBinary.matches_sauce?(<<1, 2, 3>>) == false
end
test "matches_comment_block?/1 returns if the given binary is a SAUCE comments block" do
ansi_bin = SaucePack.path(:ansi) |> File.read!()
{:ok, {comments_bin, _}} = SauceBinary.comments(ansi_bin)
assert SauceBinary.matches_comment_block?(comments_bin) == true
assert SauceBinary.matches_comment_block?(<<1, 2, 3>>) == false
end
test "verify_sauce_record/1 checks if the given binary is a SAUCE record" do
ansi_bin = SaucePack.path(:ansi) |> File.read!()
{:ok, sauce_bin} = SauceBinary.sauce_record(ansi_bin)
assert SauceBinary.verify_sauce_record(sauce_bin) == :ok
assert SauceBinary.verify_sauce_record(<<1, 2, 3>>) == {:error, :no_sauce}
end
test "verify_comment_block/1 returns if the given binary is a SAUCE comments block" do
ansi_bin = SaucePack.path(:ansi) |> File.read!()
{:ok, {comments_bin, _}} = SauceBinary.comments(ansi_bin)
assert SauceBinary.verify_comment_block(comments_bin) == :ok
assert SauceBinary.verify_comment_block(<<1, 2, 3>>) == {:error, :no_comments}
end
test "read_field!/2 dynamically reads and returns raw SAUCE fields" do
ansi_bin = SaucePack.path(:ansi) |> File.read!()
{:ok, sauce_bin} = SauceBinary.sauce_record(ansi_bin)
assert SauceBinary.read_field!(sauce_bin, :sauce_id) == "SAUCE"
assert SauceBinary.read_field!(sauce_bin, :version) == "00"
assert SauceBinary.read_field!(sauce_bin, :title) == "ACiD 1994 Member/Board Listing "
assert SauceBinary.read_field!(sauce_bin, :version) == "00"
assert SauceBinary.read_field!(sauce_bin, :author) == " "
assert SauceBinary.read_field!(sauce_bin, :group) == "ACiD Productions "
assert SauceBinary.read_field!(sauce_bin, :date) == "19940831"
assert SauceBinary.read_field!(sauce_bin, :file_size) == <<196, 34, 0, 0>>
assert SauceBinary.read_field!(sauce_bin, :data_type) == <<1>>
assert SauceBinary.read_field!(sauce_bin, :file_type) == <<1>>
assert SauceBinary.read_field!(sauce_bin, :t_info_1) == <<80, 0>>
assert SauceBinary.read_field!(sauce_bin, :t_info_2) == <<97, 0>>
assert SauceBinary.read_field!(sauce_bin, :t_info_3) == <<16, 0>>
assert SauceBinary.read_field!(sauce_bin, :t_info_4) == <<0, 0>>
assert SauceBinary.read_field!(sauce_bin, :comment_lines) == <<5>>
assert SauceBinary.read_field!(sauce_bin, :t_flags) == <<0>>
assert SauceBinary.read_field!(sauce_bin, :t_info_s) == <<0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0>>
assert_raise ArgumentError, fn ->
SauceBinary.read_field!(<<1, 2, 3>>, :sauce_id) end
end
test "read_field/2 dynamically reads and returns raw SAUCE fields" do
ansi_bin = SaucePack.path(:ansi) |> File.read!()
{:ok, sauce_bin} = SauceBinary.sauce_record(ansi_bin)
assert SauceBinary.read_field(sauce_bin, :sauce_id) == {:ok, "SAUCE"}
assert SauceBinary.read_field(sauce_bin, :version) == {:ok, "00"}
assert SauceBinary.read_field(sauce_bin, :title) == {:ok, "ACiD 1994 Member/Board Listing "}
assert SauceBinary.read_field(sauce_bin, :version) == {:ok, "00"}
assert SauceBinary.read_field(sauce_bin, :author) == {:ok, " "}
assert SauceBinary.read_field(sauce_bin, :group) == {:ok, "ACiD Productions "}
assert SauceBinary.read_field(sauce_bin, :date) == {:ok, "19940831"}
assert SauceBinary.read_field(sauce_bin, :file_size) == {:ok, <<196, 34, 0, 0>>}
assert SauceBinary.read_field(sauce_bin, :data_type) == {:ok, <<1>>}
assert SauceBinary.read_field(sauce_bin, :file_type) == {:ok, <<1>>}
assert SauceBinary.read_field(sauce_bin, :t_info_1) == {:ok, <<80, 0>>}
assert SauceBinary.read_field(sauce_bin, :t_info_2) == {:ok, <<97, 0>>}
assert SauceBinary.read_field(sauce_bin, :t_info_3) == {:ok, <<16, 0>>}
assert SauceBinary.read_field(sauce_bin, :t_info_4) == {:ok, <<0, 0>>}
assert SauceBinary.read_field(sauce_bin, :comment_lines) == {:ok, <<5>>}
assert SauceBinary.read_field(sauce_bin, :t_flags) == {:ok, <<0>>}
assert SauceBinary.read_field(sauce_bin, :t_info_s) == {:ok, <<0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0>>}
assert SauceBinary.read_field(<<"SAUCE">>, :sauce_id) == {:error, :no_sauce}
assert SauceBinary.read_field(<<"SAUCE00">>, :version) == {:error, :no_sauce}
assert SauceBinary.read_field(<<"SAUCE">>, :title) == {:error, :no_sauce}
assert SauceBinary.read_field(<<1, 2, 3>>, :author) == {:error, :no_sauce}
assert SauceBinary.read_field(<<>>, :group) == {:error, :no_sauce}
end
test "write_field/3 dynamically writes raw SAUCE fields" do
ansi_bin = SaucePack.path(:ansi) |> File.read!()
{:ok, sauce_bin} = SauceBinary.sauce_record(ansi_bin)
assert {:ok, edited_bin} = SauceBinary.write_field(sauce_bin, :title, "ACiD 1994 Member/Board Listing Modified!")
assert SauceBinary.read_field(edited_bin, :title) == {:ok, "ACiD 1994 Member/Board Listing Modi"}
author = String.pad_trailing("TASManiac", Sauce.field_size(:author), <<32>>)
assert {:ok, edited_bin2} = SauceBinary.write_field(sauce_bin, :author, author)
assert SauceBinary.read_field(edited_bin2, :author) == {:ok, author}
assert {:error, :no_sauce} = SauceBinary.write_field(<<0, 1>>, :title, "ACiD 1994 Member/Board Listing Modified!")
# fields data must be of the proper size as this is a raw interface to the binary
assert_raise ArgumentError, fn -> SauceBinary.write_field(sauce_bin, :title, "Jed, ruler of Ansimation!!!!!!!!") end
end
test "sauce_handle/1 retrieves a SAUCE record binary and a line count used as a pointer to retrieve comments" do
ansi_bin = SaucePack.path(:ansi) |> File.read!()
assert {:ok, {sauce_bin, comment_lines}} = SauceBinary.sauce_handle(ansi_bin)
assert comment_lines == 5
refute sauce_bin == <<>>
assert :binary.part(sauce_bin, 0, 5) == Sauce.sauce_id()
assert byte_size(sauce_bin) == Sauce.sauce_record_byte_size()
end
test "sauce/1 retrieves a SAUCE block binary and returns it as a binary of sauce record and comment block" do
ansi_bin = SaucePack.path(:ansi) |> File.read!()
assert {:ok, {sauce_bin, comment_bin}} = SauceBinary.sauce(ansi_bin)
refute comment_bin == <<>>
refute sauce_bin == <<>>
assert :binary.part(sauce_bin, 0, 5) == Sauce.sauce_id()
assert :binary.part(comment_bin, 0, 5) == Sauce.comment_id()
assert byte_size(sauce_bin) == Sauce.sauce_record_byte_size()
assert byte_size(comment_bin) == Sauce.comment_block_byte_size(5)
end
test "sauce_record/1 returns a SAUCE record binary" do
ansi_bin = SaucePack.path(:ansi) |> File.read!()
assert {:ok, sauce_bin} = SauceBinary.sauce_record(ansi_bin)
refute sauce_bin == <<>>
assert :binary.part(sauce_bin, 0, 5) == Sauce.sauce_id()
assert byte_size(sauce_bin) == Sauce.sauce_record_byte_size()
assert SauceBinary.sauce_record(<<1, 2, 3>>) == {:error, :no_sauce}
end
test "sauce_record!/1 returns a SAUCE record binary with no error info" do
ansi_bin = SaucePack.path(:ansi) |> File.read!()
assert sauce_bin = SauceBinary.sauce_record!(ansi_bin)
refute sauce_bin == <<>>
assert :binary.part(sauce_bin, 0, 5) == Sauce.sauce_id()
assert byte_size(sauce_bin) == Sauce.sauce_record_byte_size()
assert SauceBinary.sauce_record!(<<1, 2, 3>>) == <<>>
end
test "sauce?/1 check if a binary has a SAUCE record" do
ansi_bin = SaucePack.path(:ansi) |> File.read!()
sauce_bin = SauceBinary.sauce_record!(ansi_bin)
# It is a SAUCE if it either *is* a SAUCE or *has* a SAUCE
assert SauceBinary.sauce?(sauce_bin) == true
assert SauceBinary.sauce?(ansi_bin) == true
# not SAUCE
assert SauceBinary.sauce?(<<>>) == false
assert SauceBinary.sauce?(<<1, 2, 3>>) == false
assert SauceBinary.sauce?(<<"SAUCE00">>) == false
# length check
assert SauceBinary.sauce?(:binary.copy(<<0>>, Sauce.sauce_record_byte_size())) == false
# SAUCE must be at the end of a binary
assert SauceBinary.sauce?(<<sauce_bin::binary-size(Sauce.sauce_record_byte_size()), 0>>) == false
end
test "comments/1 returns a SAUCE comment block with the number of lines that should be present" do
ansi_bin = SaucePack.path(:ansi) |> File.read!()
assert {:ok, {comments_bin, lines}} = SauceBinary.comments(ansi_bin)
assert lines == 5
refute comments_bin == <<>>
assert :binary.part(comments_bin, 0, 5) == Sauce.comment_id()
assert byte_size(comments_bin) == Sauce.comment_block_byte_size(lines)
assert SauceBinary.comments(<<1, 2, 3>>) == {:error, :no_sauce}
end
test "comments/1 returns an error if no comments are present in a valid SAUCE block" do
ansi_bin = SaucePack.path(:ansi_nocomments) |> File.read!()
assert {:error, :no_comments} = SauceBinary.comments(ansi_bin)
end
test "comments?/1 returns if a SAUCE block has a comment block" do
ansi_bin = SaucePack.path(:ansi) |> File.read!()
assert SauceBinary.comments?(ansi_bin) == true
filler = :binary.copy(<<"A">>, 64)
# a comments fragment is still not a valid comments block because it can't exist in isolation without a SAUCE
assert SauceBinary.comments?(<<Sauce.comment_id(), filler::binary-size(64)>>) == false
ansi_bin2 = SaucePack.path(:ansi_nocomments) |> File.read!()
assert SauceBinary.comments?(ansi_bin2) == false
assert SauceBinary.comments?(<<>>) == false
# too small
filler = :binary.copy(<<0>>, 63)
assert SauceBinary.comments?(<<Sauce.comment_id(), filler::binary-size(63)>>) == false
assert SauceBinary.comments?(:binary.copy(<<0>>, Sauce.minimum_comment_block_byte_size())) == false
# not a valid SAUCE anymore
assert SauceBinary.comments?(<<ansi_bin::binary, 0>>) == false
end
test "comments_fragment/1 returns a SAUCE comment block with the number of lines that should be present" do
ansi_bin = SaucePack.path(:ansi) |> File.read!()
assert {:ok, comments_bin} = SauceBinary.comments_fragment(ansi_bin)
refute comments_bin == <<>>
assert :binary.part(comments_bin, 0, 5) == Sauce.comment_id()
assert byte_size(comments_bin) == Sauce.comment_block_byte_size(5)
assert SauceBinary.comments_fragment(<<1, 2, 3>>) == {:error, :no_sauce}
filler = :binary.copy(<<"A">>, 64)
frag = <<Sauce.comment_id(), filler::binary-size(64)>>
assert {:ok, comments_frag_bin} = SauceBinary.comments_fragment(frag)
assert {:ok, _} = SauceBinary.comments_fragment(<<frag::binary, 0>>)
assert {:error, :no_sauce} = SauceBinary.comments_fragment(filler)
ansi_bin2 = SaucePack.path(:ansi_nocomments) |> File.read!()
assert {:error, :no_comments} = SauceBinary.comments_fragment(ansi_bin2)
end
test "comments_fragment?/1 returns if a binary is a comment block fragment" do
ansi_bin = SaucePack.path(:ansi) |> File.read!()
assert SauceBinary.comments_fragment?(ansi_bin) == true
assert SauceBinary.comments_fragment?(<<1, 2, 3>>) == false
filler = :binary.copy(<<"A">>, 64)
frag = <<Sauce.comment_id(), filler::binary-size(64)>>
assert SauceBinary.comments_fragment?(frag) == true
assert SauceBinary.comments_fragment?(<<frag::binary, 0>>) == true
assert SauceBinary.comments_fragment?(filler) == false
ansi_bin2 = SaucePack.path(:ansi_nocomments) |> File.read!()
assert SauceBinary.comments_fragment?(ansi_bin2) == false
end
test "count_comment_lines/1 returns the number of lines that should be present in a comments block dynamically" do
ansi_bin = SaucePack.path(:ansi) |> File.read!()
assert {:ok, lines} = SauceBinary.count_comment_lines(ansi_bin)
assert lines == 5
end
test "count_comment_lines/1 returns an error if no comments are present in a valid SAUCE block" do
ansi_bin = SaucePack.path(:ansi_nocomments) |> File.read!()
assert {:error, :no_comments} = SauceBinary.count_comment_lines(ansi_bin)
end
test "count_comment_lines/1 returns an error if the source binary does not contain a SAUCE" do
assert {:error, :no_sauce} = SauceBinary.count_comment_lines(<<>>)
assert {:error, :no_sauce} = SauceBinary.count_comment_lines(<<"SAUCE">>)
assert {:error, :no_sauce} = SauceBinary.count_comment_lines(<<1, 2, 3>>)
end
test "comment_lines/1 returns the number of comment lines according to the SAUCE record" do
ansi_bin = SaucePack.path(:ansi) |> File.read!()
{:ok, lines} = assert SauceBinary.comment_lines(ansi_bin)
assert lines == 5
end
test "comment_lines/1 returns if no comments are present in a valid SAUCE block" do
ansi_bin = SaucePack.path(:ansi_nocomments) |> File.read!()
assert {:ok, 0} = SauceBinary.comment_lines(ansi_bin)
end
test "comment_lines/1 returns an error if the source binary does not contain a SAUCE" do
assert {:error, :no_sauce} = SauceBinary.comment_lines(<<>>)
assert {:error, :no_sauce} = SauceBinary.comment_lines(<<"SAUCE">>)
assert {:error, :no_sauce} = SauceBinary.comment_lines(<<1, 2, 3>>)
end
test "comment_block_lines/1 returns a list of comment line binaries" do
ansi_bin = SaucePack.path(:ansi) |> File.read!()
assert {:ok, line_bins} = SauceBinary.comment_block_lines(ansi_bin)
assert Enum.count(line_bins) == 5
assert SauceBinary.comment_block_lines(<<1, 2, 3>>) == {:error, :no_sauce}
end
test "eof_terminate/1 adds an EOF character to the end of a binary" do
eof_bin = SauceBinary.eof_terminate(<<1, 2>>)
assert byte_size(eof_bin) == 3
assert :binary.last(eof_bin) == Sauce.eof_character()
assert SauceBinary.eof_terminated?(eof_bin)
end
end | 43.592664 | 137 | 0.695098 |
ff0892e1fe491546dc6df4a861bce2c8cd6865bf | 4,820 | ex | Elixir | lib/dark_ecto/sql_formatter.ex | dark-elixir/dark_ecto | 006a52e6d1a807e8f3c0f00a29780dc2967e57d9 | [
"Apache-2.0"
] | null | null | null | lib/dark_ecto/sql_formatter.ex | dark-elixir/dark_ecto | 006a52e6d1a807e8f3c0f00a29780dc2967e57d9 | [
"Apache-2.0"
] | null | null | null | lib/dark_ecto/sql_formatter.ex | dark-elixir/dark_ecto | 006a52e6d1a807e8f3c0f00a29780dc2967e57d9 | [
"Apache-2.0"
] | null | null | null | defmodule DarkEcto.SQLFormatter do
@moduledoc """
SQL formatter.
See (Original Source)[https://github.com/mrdziuban/sql-formatter/blob/master/elixirscript/src/SQLFormatter.exjs]
"""
defmodule T do
@moduledoc false
defstruct str: nil,
shift_arr: [],
tab: nil,
arr: [],
parens_level: 0,
deep: 0
end
@sep "~::~"
@num_spaces 2
@doc """
Format an SQL `binary`.
"""
@spec format(String.t()) :: String.t()
def format(binary) when is_binary(binary) do
tab = String.duplicate(" ", @num_spaces)
split_by_quotes =
binary
|> String.replace(~r/\s+/, " ")
|> String.replace(~r/'/, "#{@sep}'")
|> String.split(@sep)
input = %T{
str: "",
shift_arr: create_shift_arr(tab),
tab: tab,
arr:
split_by_quotes
|> length()
|> (fn l -> upto(l - 1) end).()
|> Enum.map(fn i -> split_if_even(i, Enum.at(split_by_quotes, i), tab) end)
|> Enum.reduce([], fn x, acc -> Enum.concat(acc, x) end)
}
formatted_query =
input
|> gen_output(0, length(input.arr))
|> Map.get(:str)
|> String.replace(~r/\s+\n/, "\n")
|> String.replace(~r/\n+/, "\n")
|> String.trim()
"""
#{formatted_query}
"""
end
defp gen_output(acc, i, max) when i < max do
original_el = Enum.at(acc.arr, i)
parens_level = subquery_level(original_el, acc.parens_level)
arr =
if Regex.match?(~r/SELECT|SET/, original_el) do
List.replace_at(
acc.arr,
i,
Regex.replace(~r/,\s+/, original_el, ",\n#{acc.tab}#{acc.tab}")
)
else
acc.arr
end
el = Enum.at(arr, i)
{str, deep} =
if Regex.match?(~r/\(\s*SELECT/, el) do
{"#{acc.str}#{Enum.at(acc.shift_arr, acc.deep + 1)}#{el}", acc.deep + 1}
else
{if(Regex.match?(~r/'/, el),
do: "#{acc.str}#{el}",
else: "#{acc.str}#{Enum.at(acc.shift_arr, acc.deep)}#{el}"
), if(parens_level < 1 && acc.deep != 0, do: acc.deep - 1, else: acc.deep)}
end
gen_output(%{acc | str: str, arr: arr, parens_level: parens_level, deep: deep}, i + 1, max)
end
defp gen_output(acc, _, _), do: acc
defp upto(i), do: upto([], i)
defp upto(arr, i) when i < 0, do: arr
defp upto(arr, i), do: upto([i | arr], i - 1)
defp create_shift_arr(tab), do: Enum.map(upto(99), &"\n#{String.duplicate(tab, &1)}")
defp subquery_level(str, level) do
level -
(String.length(Regex.replace(~r/\(/, str, "")) -
String.length(Regex.replace(~r/\)/, str, "")))
end
defp all_replacements(tab) do
[
{~r/ AND /i, @sep <> tab <> "AND "},
{~r/ BETWEEN /i, @sep <> tab <> "BETWEEN "},
{~r/ CASE /i, @sep <> tab <> "CASE "},
{~r/ ELSE /i, @sep <> tab <> "ELSE "},
{~r/ END /i, @sep <> tab <> "END "},
{~r/ FROM /i, @sep <> "FROM "},
{~r/ GROUP\s+BY /i, @sep <> "GROUP BY "},
{~r/ HAVING /i, @sep <> "HAVING "},
{~r/ IN /i, " IN "},
{~r/ JOIN /i, @sep <> "JOIN "},
{~r/ CROSS(~::~)+JOIN /i, @sep <> "CROSS JOIN "},
{~r/ INNER(~::~)+JOIN /i, @sep <> "INNER JOIN "},
{~r/ LEFT(~::~)+JOIN /i, @sep <> "LEFT JOIN "},
{~r/ RIGHT(~::~)+JOIN /i, @sep <> "RIGHT JOIN "},
{~r/ ON /i, @sep <> tab <> "ON "},
{~r/ OR /i, @sep <> tab <> "OR "},
{~r/ ORDER\s+BY /i, @sep <> "ORDER BY "},
{~r/ OVER /i, @sep <> tab <> "OVER "},
{~r/\(\s*SELECT /i, @sep <> "(SELECT "},
{~r/\)\s*SELECT /i, ")" <> @sep <> "SELECT "},
{~r/ THEN /i, " THEN" <> @sep <> tab},
{~r/ UNION /i, @sep <> "UNION" <> @sep},
{~r/ USING /i, @sep <> "USING "},
{~r/ WHEN /i, @sep <> tab <> "WHEN "},
{~r/ WHERE /i, @sep <> "WHERE "},
{~r/ WITH /i, @sep <> "WITH "},
{~r/ SET /i, @sep <> "SET "},
{~r/ ALL /i, " ALL "},
{~r/ AS /i, " AS "},
{~r/ ASC /i, " ASC "},
{~r/ DESC /i, " DESC "},
{~r/ DISTINCT /i, " DISTINCT "},
{~r/ EXISTS /i, " EXISTS "},
{~r/ NOT /i, " NOT "},
{~r/ NULL /i, " NULL "},
{~r/ LIKE /i, " LIKE "},
{~r/\s*SELECT /i, "SELECT "},
{~r/\s*UPDATE /i, "UPDATE "},
{~r/\s*DELETE /i, "DELETE "},
{~r/(~::~)+/, @sep}
]
end
defp do_replace([], str), do: str
defp do_replace([h], str), do: Regex.replace(elem(h, 0), str, elem(h, 1))
defp do_replace([h | t], str), do: do_replace(t, Regex.replace(elem(h, 0), str, elem(h, 1)))
defp split_sql(str, tab) do
tab
|> all_replacements()
|> do_replace(Regex.replace(~r/\s+/, str, " "))
|> String.split(@sep)
end
defp split_if_even(i, str, tab) when rem(i, 2) == 0, do: split_sql(str, tab)
defp split_if_even(_, str, _), do: [str]
end
| 29.570552 | 114 | 0.479876 |
ff08a42bafd675434dc72bbbf12a9f5ad18f9d9b | 2,393 | exs | Elixir | test/ex_bitmex/rest/orders/amend_test.exs | trexnix/ex_bitmex | 7669b6bb6dadaf5de57b49c3a571e36d6fa712b9 | [
"MIT"
] | null | null | null | test/ex_bitmex/rest/orders/amend_test.exs | trexnix/ex_bitmex | 7669b6bb6dadaf5de57b49c3a571e36d6fa712b9 | [
"MIT"
] | null | null | null | test/ex_bitmex/rest/orders/amend_test.exs | trexnix/ex_bitmex | 7669b6bb6dadaf5de57b49c3a571e36d6fa712b9 | [
"MIT"
] | null | null | null | defmodule ExBitmex.Rest.Orders.AmendTest do
use ExUnit.Case, async: false
use ExVCR.Mock, adapter: ExVCR.Adapter.Hackney
setup_all do
HTTPoison.start()
:ok
end
@credentials %ExBitmex.Credentials{
api_key: System.get_env("BITMEX_API_KEY"),
api_secret: System.get_env("BITMEX_SECRET")
}
test ".amend returns the order response" do
use_cassette "rest/orders/amend_ok" do
assert {:ok, order, _} =
ExBitmex.Rest.Orders.amend(
@credentials,
%{
orderID: "8d6f2649-7477-4db5-e32a-d8d5bf99dd9b",
leavesQty: 3
}
)
assert order == %ExBitmex.Order{
side: "Buy",
transact_time: "2018-11-30T06:06:28.444Z",
ord_type: "Limit",
display_qty: nil,
stop_px: nil,
settl_currency: "XBt",
triggered: "",
order_id: "2e10cc61-f94d-4be2-97e0-14669dda2938",
currency: "USD",
peg_offset_value: nil,
price: 1,
peg_price_type: "",
text: "Amended leavesQty: Amended via API.\nSubmitted via API.",
working_indicator: true,
multi_leg_reporting_type: "SingleSecurity",
timestamp: "2018-11-30T06:06:28.444Z",
cum_qty: 0,
ord_rej_reason: "",
avg_px: nil,
order_qty: 3,
simple_order_qty: nil,
ord_status: "New",
time_in_force: "GoodTillCancel",
cl_ord_link_id: "",
simple_leaves_qty: nil,
leaves_qty: 3,
ex_destination: "XBME",
symbol: "XBTUSD",
account: 10000,
cl_ord_id: "",
simple_cum_qty: nil,
exec_inst: "",
contingency_type: ""
}
end
end
test ".amend returns an error tuple when there is a timeout" do
use_cassette "rest/orders/amend_timeout" do
assert {:error, :timeout, nil} =
ExBitmex.Rest.Orders.amend(
@credentials,
%{
orderID: "8d6f2649-7477-4db5-e32a-d8d5bf99dd9b",
leavesQty: 3
}
)
end
end
end
| 31.077922 | 79 | 0.49603 |
ff08b5ac506c715e3539ba46710a88599d77c1c9 | 1,731 | ex | Elixir | lib/sanbase_web/graphql/middlewares/jwt_auth.ex | santiment/sanbase2 | 9ef6e2dd1e377744a6d2bba570ea6bd477a1db31 | [
"MIT"
] | 81 | 2017-11-20T01:20:22.000Z | 2022-03-05T12:04:25.000Z | lib/sanbase_web/graphql/middlewares/jwt_auth.ex | rmoorman/sanbase2 | 226784ab43a24219e7332c49156b198d09a6dd85 | [
"MIT"
] | 359 | 2017-10-15T14:40:53.000Z | 2022-01-25T13:34:20.000Z | lib/sanbase_web/graphql/middlewares/jwt_auth.ex | rmoorman/sanbase2 | 226784ab43a24219e7332c49156b198d09a6dd85 | [
"MIT"
] | 16 | 2017-11-19T13:57:40.000Z | 2022-02-07T08:13:02.000Z | defmodule SanbaseWeb.Graphql.Middlewares.JWTAuth do
@moduledoc """
Authenticate that the request contains a valid JWT token and that the user has
enough san tokens to access the data. If the san tokens are not specified
it is assumed that 0 tokens are required.
Example:
query do
field :project, :project do
arg :id, non_null(:id)
middleware SanbaseWeb.Graphql.Middlewares.JWTAuth, san_tokens: 200
resolve &ProjectResolver.project/3
end
end
This is going to require 200 SAN tokens to access the project query.
"""
@behaviour Absinthe.Middleware
alias Absinthe.Resolution
alias SanbaseWeb.Graphql.Middlewares.Helpers
@doc ~s"""
Decides whether the user has access or not.
The user must have accepted the privacy policy in order to access resources
that require JWT authentication. There are some mutations (the mutation for
accepting the privacy policy) that should not fail if the privacy policy
is not accepted - they provide a special configuration to achieve this
behaviour
The user also must have the required number of SAN tokens to access some resources.
The queries and mutations that require such SAN balance check provide a special
configuration.
"""
def call(
%Resolution{
context: %{
auth: %{
auth_method: :user_token,
current_user: current_user,
san_balance: san_balance
}
}
} = resolution,
opts
) do
Helpers.handle_user_access(current_user, san_balance, opts, resolution)
end
def call(resolution, _) do
resolution
|> Resolution.put_result({:error, :unauthorized})
end
end
| 29.338983 | 85 | 0.687464 |
ff08b865f773659b91bca5cfbe1c35d8b3dd4349 | 742 | ex | Elixir | lib/foundation_phoenix_web/gettext.ex | PanyPy/foundation_phoenix | 571aaa0b274c1428b4cf8e370777ae82c9167eb2 | [
"MIT"
] | null | null | null | lib/foundation_phoenix_web/gettext.ex | PanyPy/foundation_phoenix | 571aaa0b274c1428b4cf8e370777ae82c9167eb2 | [
"MIT"
] | 2 | 2021-03-10T11:28:36.000Z | 2021-05-11T07:00:39.000Z | lib/foundation_phoenix_web/gettext.ex | PanyPy/foundation_phoenix | 571aaa0b274c1428b4cf8e370777ae82c9167eb2 | [
"MIT"
] | null | null | null | defmodule FoundationPhoenixWeb.Gettext do
@moduledoc """
A module providing Internationalization with a gettext-based API.
By using [Gettext](https://hexdocs.pm/gettext),
your module gains a set of macros for translations, for example:
import FoundationPhoenixWeb.Gettext
# Simple translation
gettext("Here is the string to translate")
# Plural translation
ngettext("Here is the string to translate",
"Here are the strings to translate",
3)
# Domain-based translation
dgettext("errors", "Here is the error message to translate")
See the [Gettext Docs](https://hexdocs.pm/gettext) for detailed usage.
"""
use Gettext, otp_app: :foundation_phoenix
end
| 29.68 | 72 | 0.691375 |
ff08e0e5685a594512e1384368c3028cac53707c | 7,209 | ex | Elixir | lib/logbook.ex | VoiSmart/logbook | e8166c2f4ee20e5849947b3d0c1cf3a7915fc42a | [
"Apache-2.0"
] | 2 | 2021-09-02T13:44:48.000Z | 2021-09-08T12:28:55.000Z | lib/logbook.ex | VoiSmart/logbook | e8166c2f4ee20e5849947b3d0c1cf3a7915fc42a | [
"Apache-2.0"
] | 2 | 2020-06-24T20:45:45.000Z | 2021-09-02T13:02:50.000Z | lib/logbook.ex | VoiSmart/logbook | e8166c2f4ee20e5849947b3d0c1cf3a7915fc42a | [
"Apache-2.0"
] | 1 | 2019-06-07T16:32:53.000Z | 2019-06-07T16:32:53.000Z | defmodule Logbook do
@moduledoc ~S"""
A category (or tags) based logger for Elixir.
Logbook is a wrapper aroud Elixir Logger that enables a to specify one or more tags
for each invocation in order to be able to set different log levels for each tag.
Tagging logs is useful to track log informations around different modules and
enable only one (or more) specific tags at different log levels than the default
Logger instead of having (for example) all debug logs enabled.
In the following example when calling `delete_all` and having the `:audit` tag level
set to at least `:info`, both "Deleting user..." and "Deleting domain" logs will be produced.
If only `:domain` or `:user` tags have log level set to `:info` only the corresponding logs
will be produced.
## Example
require Logbook
def delete_user(user) do
Logbook.info([:audit, :user], "Deleting user #{inspect(user)}")
# ...
end
def delete_domain(domain) do
Logbook.info([:audit, :domain], "Deleting domain #{inspect(domain)}")
# ...
end
def delete_all(user, domain) do
delete_domain(domain)
delete_user(user)
end
Log levels for each tag can be set using `Logbook.set_level/2`:
# For a single tag
Logbook.set_level(:audit, :info)
# or for many tags at once
Logbook.set_level([:audit, :user, :domain], :info)
Is possible to set the default level for all tags, by setting the `:default_tag_level`
config option for `:logbook` app (defaults to `:warning`):
import Config
config :logbook, :default_tag_level, :warning
The `:default_tag_level` option is used when Logbook sees tags for the first time
during runtime and set them internally with the above level.
As a bonus, Logbook also creates a module-level tag automatically, in order to
be able to enable log statements at once in a single module:
defmodule Foo
require Logbook
def delete_user(user) do
Logbook.info([:audit, :user], "Deleting user #{inspect(user)}")
# ...
end
def delete_domain(domain) do
Logbook.info([:audit, :domain], "Deleting domain #{inspect(domain)}")
# ...
end
end
With the above example is possible to `Logbook.set_level(Foo, :info)` to enable
all Logbook calls inside the module `Foo`.
As with `:default_tag_level` is possible to set also default module-level logging
with:
import Config
config :logbook, :default_module_level, :warning
By default `:default_module_level` is set to `:none` (no module-level logging).
`Logbook` supports all `Logger` levels, along with the additional `:none` level
that disables it for the specified tag/module.
Being a wrapper for `Logger`, if the `Logger` log level is less that `Logbook`
log level, the logs will not be produced, because are filtered by `Logger` log levels.
Example:
Logger.configure(level: :warning)
Logbook.set_level(:foo, :debug)
Logbook.debug(:foo, "This will not be emitted")
"""
alias Logbook.LogTags
@logger_levels [:emergency, :alert, :critical, :error, :warning, :notice, :info, :debug]
@logbook_levels @logger_levels ++ [:none]
@type level ::
:emergency
| :alert
| :critical
| :error
| :warn
| :warning
| :notice
| :info
| :debug
| :none
@type tag_or_tags :: atom | [atom]
@doc """
Sets log level for the specific tag or list of tags.
"""
@spec set_level(tag_or_tags, level) :: :ok
def set_level(tag_or_tags, :warn) do
set_level(tag_or_tags, :warning)
end
def set_level(tag, level) when is_atom(tag) and level in @logbook_levels do
LogTags.set_level([tag], level)
end
def set_level(tags, level) when is_list(tags) and level in @logbook_levels do
LogTags.set_level(tags, level)
end
@doc """
Checks wheter the tag has the specified log level equal o higher than the configured one.
iex> Logbook.set_level(:foo, :info)
:ok
iex> Logbook.enabled?(:foo, :debug)
false
iex> Logbook.enabled?(:foo, :warning)
true
If a list of tags is passed, returns `true` if any of the tag log level is equal or lower than
the passed one.
"""
@spec enabled?(tag_or_tags, level) :: boolean
def enabled?(tag_or_tags, :warn) do
enabled?(tag_or_tags, :warning)
end
def enabled?(tag_or_tags, level) when level in @logbook_levels do
LogTags.enabled?(tag_or_tags, level)
end
@doc """
Like `enabled?/2` checks if the given module has a configured log level equal
or lower than the given level.
"""
@spec module_enabled?(module(), level) :: boolean
def module_enabled?(module, :warn) when is_atom(module) do
module_enabled?(module, :warning)
end
def module_enabled?(module, level) when is_atom(module) and level in @logbook_levels do
LogTags.module_enabled?(module, level)
end
@spec reset() :: :ok
def reset do
LogTags.reset()
end
@doc """
Returns a map containing the tags/modules seen at runtime with the corresponding
configured log level. This list is built at runtime, so if a `Logbook` loggin fun
has never be called, the corresponding tag will not be shown here.
"""
@spec tags() :: map()
def tags do
LogTags.tags()
end
for level <- @logger_levels do
@doc since: "2.0.0"
defmacro unquote(level)(tag_or_tags, chardata_or_fun, metadata \\ []) do
do_log(unquote(level), tag_or_tags, chardata_or_fun, metadata, __CALLER__)
end
defp macro_logger(unquote(level)) do
level = unquote(level)
quote do
require Logger
&(unquote(Logger).unquote(level) / unquote(2))
end
end
end
@deprecated "Use warning/2 instead"
defmacro warn(tag_or_tags, chardata_or_fun, metadata \\ []) do
do_log(:warning, tag_or_tags, chardata_or_fun, metadata, __CALLER__)
end
defp do_log(level, tag_or_tags, chardata_or_fun, metadata, caller) do
logger = macro_logger(level)
{module, tags, tag_or_tags} = macro_preprocess(tag_or_tags, caller)
quote do
level = unquote(level)
logger = unquote(logger)
# enrich metadata
md = Keyword.put(unquote(metadata), :tags, %Logbook.Tags{tags: unquote(tags)})
should_log =
Logbook.enabled?(unquote(tag_or_tags), level) ||
Logbook.module_enabled?(unquote(module), level)
case should_log do
false ->
:ok
true ->
logger.(unquote(chardata_or_fun), md)
end
end
end
defp macro_preprocess(tag, caller) when is_atom(tag) do
%{module: module, function: _fun, file: _file, line: _line} = caller
{module, [tag], tag}
end
defp macro_preprocess(tags, caller) when is_list(tags) do
%{module: module, function: _fun, file: _file, line: _line} = caller
Enum.each(tags, fn tag when is_atom(tag) -> tag end)
{module, tags, tags}
end
defp macro_preprocess({:@, _, _} = module_attr, caller) do
module_attr
|> Macro.expand_once(caller)
|> macro_preprocess(caller)
end
end
| 29.304878 | 96 | 0.661395 |
ff0977609542455175022e8ddc1ac0e2e1c755d2 | 8,237 | ex | Elixir | lib/absinthe/phase/document/validation/fields_on_correct_type.ex | Rabbet/absinthe | 0764d7eb6ea9bdf9ccd957fa27bf1e6b26968f89 | [
"MIT"
] | 2 | 2021-04-22T23:45:04.000Z | 2021-05-07T01:01:15.000Z | lib/absinthe/phase/document/validation/fields_on_correct_type.ex | Rabbet/absinthe | 0764d7eb6ea9bdf9ccd957fa27bf1e6b26968f89 | [
"MIT"
] | null | null | null | lib/absinthe/phase/document/validation/fields_on_correct_type.ex | Rabbet/absinthe | 0764d7eb6ea9bdf9ccd957fa27bf1e6b26968f89 | [
"MIT"
] | null | null | null | defmodule Absinthe.Phase.Document.Validation.FieldsOnCorrectType do
@moduledoc false
# Validates document to ensure that all fields are provided on the correct type.
alias Absinthe.{Blueprint, Phase, Schema, Type}
use Absinthe.Phase
@doc """
Run the validation.
"""
@spec run(Blueprint.t(), Keyword.t()) :: Phase.result_t()
def run(input, _options \\ []) do
result = Blueprint.prewalk(input, &handle_node(&1, input))
{:ok, result}
end
@spec handle_node(Blueprint.node_t(), Schema.t()) :: Blueprint.node_t()
defp handle_node(%Blueprint.Document.Operation{schema_node: nil} = node, _) do
error = %Phase.Error{
phase: __MODULE__,
message: "Operation \"#{node.type}\" not supported",
locations: [node.source_location]
}
node
|> flag_invalid(:unknown_operation)
|> put_error(error)
end
defp handle_node(
%{selections: selections, schema_node: parent_schema_node} = node,
%{schema: schema} = input
)
when not is_nil(parent_schema_node) do
possible_parent_types = possible_types(parent_schema_node, schema)
selections =
Enum.map(selections, fn
%Blueprint.Document.Field{schema_node: nil} = field ->
type = named_type(parent_schema_node, schema)
field
|> flag_invalid(:unknown_field)
|> put_error(
error(
field,
type.name,
suggested_type_names(field.name, type, input),
suggested_field_names(field.name, type, input)
)
)
%Blueprint.Document.Fragment.Spread{errors: []} = spread ->
fragment = Enum.find(input.fragments, &(&1.name == spread.name))
possible_child_types = possible_types(fragment.schema_node, schema)
if Enum.any?(possible_child_types, &(&1 in possible_parent_types)) do
spread
else
spread_error(spread, possible_parent_types, possible_child_types, schema)
end
%Blueprint.Document.Fragment.Inline{} = fragment ->
possible_child_types = possible_types(fragment.schema_node, schema)
if Enum.any?(possible_child_types, &(&1 in possible_parent_types)) do
fragment
else
spread_error(fragment, possible_parent_types, possible_child_types, schema)
end
other ->
other
end)
%{node | selections: selections}
end
defp handle_node(node, _) do
node
end
defp idents_to_names(idents, schema) do
for ident <- idents do
Absinthe.Schema.lookup_type(schema, ident).name
end
end
defp spread_error(spread, parent_types_idents, child_types_idents, schema) do
parent_types = idents_to_names(parent_types_idents, schema)
child_types = idents_to_names(child_types_idents, schema)
msg = """
Fragment spread has no type overlap with parent.
Parent possible types: #{inspect(parent_types)}
Spread possible types: #{inspect(child_types)}
"""
error = %Phase.Error{
phase: __MODULE__,
message: msg,
locations: [spread.source_location]
}
spread
|> flag_invalid(:invalid_spread)
|> put_error(error)
end
defp possible_types(%{type: type}, schema) do
possible_types(type, schema)
end
defp possible_types(type, schema) do
schema
|> Absinthe.Schema.lookup_type(type)
|> case do
%Type.Object{identifier: identifier} ->
[identifier]
%Type.Interface{identifier: identifier} ->
schema.__absinthe_interface_implementors__
|> Map.fetch!(identifier)
%Type.Union{types: types} ->
types
_ ->
[]
end
end
@spec named_type(Type.t(), Schema.t()) :: Type.named_t()
defp named_type(%Type.Field{} = node, schema) do
Schema.lookup_type(schema, node.type)
end
defp named_type(%{name: _} = node, _) do
node
end
# Generate the error for a field
@spec error(Blueprint.node_t(), String.t(), [String.t()], [String.t()]) :: Phase.Error.t()
defp error(field_node, parent_type_name, type_suggestions, field_suggestions) do
%Phase.Error{
phase: __MODULE__,
message:
error_message(field_node.name, parent_type_name, type_suggestions, field_suggestions),
locations: [field_node.source_location]
}
end
@suggest 5
@doc """
Generate an error for a field
"""
@spec error_message(String.t(), String.t(), [String.t()], [String.t()]) :: String.t()
def error_message(field_name, type_name, type_suggestions \\ [], field_suggestions \\ [])
def error_message(field_name, type_name, [], []) do
~s(Cannot query field "#{field_name}" on type "#{type_name}".)
end
def error_message(field_name, type_name, [], field_suggestions) do
error_message(field_name, type_name) <>
" Did you mean " <> to_quoted_or_list(field_suggestions |> Enum.take(@suggest)) <> "?"
end
def error_message(field_name, type_name, type_suggestions, []) do
error_message(field_name, type_name) <>
" Did you mean to use an inline fragment on " <>
to_quoted_or_list(type_suggestions |> Enum.take(@suggest)) <> "?"
end
def error_message(field_name, type_name, type_suggestions, _) do
error_message(field_name, type_name, type_suggestions)
end
defp suggested_type_names(external_field_name, type, blueprint) do
internal_field_name =
case blueprint.adapter.to_internal_name(external_field_name, :field) do
nil -> external_field_name
internal_field_name -> internal_field_name
end
possible_types = find_possible_types(internal_field_name, type, blueprint.schema)
possible_interfaces =
find_possible_interfaces(internal_field_name, possible_types, blueprint.schema)
possible_interfaces
|> Enum.map(& &1.name)
|> Enum.concat(Enum.map(possible_types, & &1.name))
|> Enum.sort()
end
defp suggested_field_names(external_field_name, %{fields: _} = type, blueprint) do
internal_field_name =
case blueprint.adapter.to_internal_name(external_field_name, :field) do
nil -> external_field_name
internal_field_name -> internal_field_name
end
Map.values(type.fields)
|> Enum.map(& &1.name)
|> Absinthe.Utils.Suggestion.sort_list(internal_field_name)
|> Enum.map(&blueprint.adapter.to_external_name(&1, :field))
|> Enum.sort()
end
defp suggested_field_names(_, _, _) do
[]
end
defp find_possible_interfaces(field_name, possible_types, schema) do
possible_types
|> types_to_interface_idents
|> Enum.uniq()
|> sort_by_implementation_count(possible_types)
|> Enum.map(&Schema.lookup_type(schema, &1))
|> types_with_field(field_name)
end
defp sort_by_implementation_count(iface_idents, types) do
Enum.sort_by(iface_idents, fn iface ->
count =
Enum.count(types, fn
%{interfaces: ifaces} ->
Enum.member?(ifaces, iface)
_ ->
false
end)
count
end)
|> Enum.reverse()
end
defp types_to_interface_idents(types) do
Enum.flat_map(types, fn
%{interfaces: ifaces} ->
ifaces
_ ->
[]
end)
end
defp find_possible_types(field_name, type, schema) do
schema
|> Schema.concrete_types(Type.unwrap(type))
|> types_with_field(field_name)
end
defp types_with_field(types, field_name) do
Enum.filter(types, &type_with_field?(&1, field_name))
end
defp type_with_field?(%{fields: fields}, field_name) do
Map.values(fields)
|> Enum.find(&(&1.name == field_name))
end
defp type_with_field?(_, _) do
false
end
defp to_quoted_or_list([a]), do: ~s("#{a}")
defp to_quoted_or_list([a, b]), do: ~s("#{a}" or "#{b}")
defp to_quoted_or_list(other), do: to_longer_quoted_or_list(other)
defp to_longer_quoted_or_list(list, acc \\ "")
defp to_longer_quoted_or_list([word], acc), do: acc <> ~s(, or "#{word}")
defp to_longer_quoted_or_list([word | rest], "") do
rest
|> to_longer_quoted_or_list(~s("#{word}"))
end
defp to_longer_quoted_or_list([word | rest], acc) do
rest
|> to_longer_quoted_or_list(acc <> ~s(, "#{word}"))
end
end
| 28.50173 | 94 | 0.658614 |
ff099b803db03e58522da7a58e7c12359da5614c | 2,136 | exs | Elixir | test/validations/acceptance_test.exs | emjrdev/vex | c4a863ed39d4723ccf45231252d81c0f0df45de1 | [
"MIT"
] | 560 | 2015-01-12T00:07:27.000Z | 2022-02-07T03:21:44.000Z | test/validations/acceptance_test.exs | emjrdev/vex | c4a863ed39d4723ccf45231252d81c0f0df45de1 | [
"MIT"
] | 55 | 2015-02-16T18:59:57.000Z | 2021-12-23T12:34:25.000Z | test/validations/acceptance_test.exs | emjrdev/vex | c4a863ed39d4723ccf45231252d81c0f0df45de1 | [
"MIT"
] | 63 | 2015-02-12T03:49:50.000Z | 2021-12-12T00:11:01.000Z | defmodule AcceptanceTestRecord do
defstruct accepts_terms: false
use Vex.Struct
validates(:accepts_terms, acceptance: true)
end
defmodule CustomAcceptanceTestRecord do
defstruct accepts_terms: false
use Vex.Struct
validates(:accepts_terms, acceptance: [as: "yes"])
end
defmodule AcceptanceTest do
use ExUnit.Case
test "keyword list, provided basic acceptance validation" do
assert Vex.valid?([accepts_terms: true], accepts_terms: [acceptance: true])
assert Vex.valid?([accepts_terms: "anything"], accepts_terms: [acceptance: true])
assert !Vex.valid?([accepts_terms: nil], accepts_terms: [acceptance: true])
end
test "keyword list, included presence validation" do
assert Vex.valid?(accepts_terms: true, _vex: [accepts_terms: [acceptance: true]])
assert Vex.valid?(accepts_terms: "anything", _vex: [accepts_terms: [acceptance: true]])
assert !Vex.valid?(accepts_terms: false, _vex: [accepts_terms: [acceptance: true]])
end
test "keyword list, provided custom acceptance validation" do
assert Vex.valid?([accepts_terms: "yes"], accepts_terms: [acceptance: [as: "yes"]])
assert !Vex.valid?([accepts_terms: false], accepts_terms: [acceptance: [as: "yes"]])
assert !Vex.valid?([accepts_terms: true], accepts_terms: [acceptance: [as: "yes"]])
end
test "keyword list, included custom validation" do
assert Vex.valid?(accepts_terms: "yes", _vex: [accepts_terms: [acceptance: [as: "yes"]]])
assert !Vex.valid?(accepts_terms: false, _vex: [accepts_terms: [acceptance: [as: "yes"]]])
assert !Vex.valid?(accepts_terms: true, _vex: [accepts_terms: [acceptance: [as: "yes"]]])
end
test "record, included basic presence validation" do
assert Vex.valid?(%AcceptanceTestRecord{accepts_terms: "yes"})
assert Vex.valid?(%AcceptanceTestRecord{accepts_terms: true})
end
test "record, included custom presence validation" do
assert Vex.valid?(%CustomAcceptanceTestRecord{accepts_terms: "yes"})
assert !Vex.valid?(%CustomAcceptanceTestRecord{accepts_terms: true})
assert !Vex.valid?(%CustomAcceptanceTestRecord{accepts_terms: false})
end
end
| 40.301887 | 94 | 0.731273 |
ff099cc995745dcb891952c0ba0c1bad0b27afe5 | 1,274 | exs | Elixir | test/changelog_web/controllers/podcast_controller_test.exs | wojtekmach/changelog.com | d4a8a7703c5f07a3da63bffd770f4642488cf8fd | [
"MIT"
] | null | null | null | test/changelog_web/controllers/podcast_controller_test.exs | wojtekmach/changelog.com | d4a8a7703c5f07a3da63bffd770f4642488cf8fd | [
"MIT"
] | null | null | null | test/changelog_web/controllers/podcast_controller_test.exs | wojtekmach/changelog.com | d4a8a7703c5f07a3da63bffd770f4642488cf8fd | [
"MIT"
] | null | null | null | defmodule ChangelogWeb.PodcastControllerTest do
use ChangelogWeb.ConnCase
test "getting the podcasts index", %{conn: conn} do
p1 = insert(:podcast)
p2 = insert(:podcast)
conn = get(conn, podcast_path(conn, :index))
assert conn.status == 200
assert conn.resp_body =~ p1.name
assert conn.resp_body =~ p2.name
end
test "getting a draft podcast page", %{conn: conn} do
p = insert(:podcast, status: :draft)
assert_raise Ecto.NoResultsError, fn ->
get(conn, podcast_path(conn, :show, p.slug))
end
end
test "getting a podcast page", %{conn: conn} do
p = insert(:podcast)
conn = get(conn, podcast_path(conn, :show, p.slug))
assert html_response(conn, 200) =~ p.name
end
test "getting a podcast page with a published episode", %{conn: conn} do
p = insert(:podcast)
e = insert(:published_episode, podcast: p)
i = episode_news_item(e) |> insert()
conn = get(conn, podcast_path(conn, :show, p.slug))
assert html_response(conn, 200) =~ p.name
assert String.contains?(conn.resp_body, i.headline)
end
test "getting a podcast page that doesn't exist", %{conn: conn} do
assert_raise Ecto.NoResultsError, fn ->
get conn, podcast_path(conn, :show, "bad-show")
end
end
end
| 29.627907 | 74 | 0.66562 |
ff09b8220410a44484998cf5bd6c5c6298799daf | 1,803 | ex | Elixir | clients/vision/lib/google_api/vision/v1/model/operation_metadata.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/vision/lib/google_api/vision/v1/model/operation_metadata.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/vision/lib/google_api/vision/v1/model/operation_metadata.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Vision.V1.Model.OperationMetadata do
@moduledoc """
Contains metadata for the BatchAnnotateImages operation.
## Attributes
* `createTime` (*type:* `DateTime.t`, *default:* `nil`) - The time when the batch request was received.
* `state` (*type:* `String.t`, *default:* `nil`) - Current state of the batch operation.
* `updateTime` (*type:* `DateTime.t`, *default:* `nil`) - The time when the operation result was last updated.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:createTime => DateTime.t() | nil,
:state => String.t() | nil,
:updateTime => DateTime.t() | nil
}
field(:createTime, as: DateTime)
field(:state)
field(:updateTime, as: DateTime)
end
defimpl Poison.Decoder, for: GoogleApi.Vision.V1.Model.OperationMetadata do
def decode(value, options) do
GoogleApi.Vision.V1.Model.OperationMetadata.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Vision.V1.Model.OperationMetadata do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 34.018868 | 114 | 0.713256 |
ff09cd32567a55a6fcfd308a437f5d4e8b9e66df | 93 | exs | Elixir | test/marshal_decode_helper_test.exs | barruumrex/marshal | 5d2532ecf5428e75096b2e6b5bdfe9d476a603a0 | [
"MIT"
] | 1 | 2016-06-21T00:10:10.000Z | 2016-06-21T00:10:10.000Z | test/marshal_decode_helper_test.exs | barruumrex/marshal | 5d2532ecf5428e75096b2e6b5bdfe9d476a603a0 | [
"MIT"
] | null | null | null | test/marshal_decode_helper_test.exs | barruumrex/marshal | 5d2532ecf5428e75096b2e6b5bdfe9d476a603a0 | [
"MIT"
] | null | null | null | defmodule Marshal.Decode.HelperTest do
use ExUnit.Case
doctest Marshal.Decode.Helper
end
| 18.6 | 38 | 0.817204 |
ff09e47311ff6328bd258acdf6c488b22d237f85 | 10,307 | ex | Elixir | host_core/lib/host_core/actors/actor_module.ex | sleipnir/wasmcloud-otp | aef10f2f07257e65c527be6030006aaed2b85ff9 | [
"Apache-2.0"
] | null | null | null | host_core/lib/host_core/actors/actor_module.ex | sleipnir/wasmcloud-otp | aef10f2f07257e65c527be6030006aaed2b85ff9 | [
"Apache-2.0"
] | null | null | null | host_core/lib/host_core/actors/actor_module.ex | sleipnir/wasmcloud-otp | aef10f2f07257e65c527be6030006aaed2b85ff9 | [
"Apache-2.0"
] | null | null | null | defmodule HostCore.Actors.ActorModule do
# Do not automatically restart this process
use GenServer, restart: :transient
alias HostCore.CloudEvent
@op_health_check "HealthRequest"
@thirty_seconds 30_000
require Logger
alias HostCore.WebAssembly.Imports
defmodule State do
defstruct [
:guest_request,
:guest_response,
:host_response,
:guest_error,
:host_error,
:instance,
:instance_id,
:api_version,
:invocation,
:claims,
:subscription,
:ociref
]
end
defmodule Invocation do
defstruct [:operation, :payload]
end
@doc """
Starts the Actor module
"""
def start_link(opts) do
GenServer.start_link(__MODULE__, opts)
end
def current_invocation(pid) do
GenServer.call(pid, :get_invocation)
end
def api_version(pid) do
GenServer.call(pid, :get_api_ver)
end
def claims(pid) do
GenServer.call(pid, :get_claims)
end
def instance_id(pid) do
GenServer.call(pid, :get_instance_id)
end
def ociref(pid) do
GenServer.call(pid, :get_ociref)
end
def halt(pid) do
GenServer.call(pid, :halt_and_cleanup)
end
def health_check(pid) do
GenServer.call(pid, :health_check)
end
def live_update(pid, bytes, claims) do
GenServer.call(pid, {:live_update, bytes, claims}, @thirty_seconds)
end
@impl true
def init({claims, bytes, oci}) do
start_actor(claims, bytes, oci)
end
def handle_call({:live_update, bytes, claims}, _from, agent) do
Logger.debug("Actor #{claims.public_key} performing live update")
imports = %{
wapc: Imports.wapc_imports(agent),
wasmbus: Imports.wasmbus_imports(agent)
}
# shut down the previous Wasmex instance to avoid orphaning it
old_instance = Agent.get(agent, fn content -> content.instance end)
GenServer.stop(old_instance, :normal)
instance_id = Agent.get(agent, fn content -> content.instance_id end)
{:ok, instance} = Wasmex.start_link(%{bytes: bytes, imports: imports})
api_version =
case Wasmex.call_function(instance, :__wasmbus_rpc_version, []) do
{:ok, [v]} -> v
_ -> 0
end
Agent.update(agent, fn state ->
%State{state | claims: claims, api_version: api_version, instance: instance}
end)
Wasmex.call_function(instance, :start, [])
Wasmex.call_function(instance, :wapc_init, [])
publish_actor_updated(claims.public_key, claims.revision, instance_id)
Logger.debug("Actor #{claims.public_key} updated")
{:reply, :ok, agent}
end
def handle_call(:get_api_ver, _from, agent) do
{:reply, Agent.get(agent, fn content -> content.api_version end), agent}
end
def handle_call(:get_claims, _from, agent) do
{:reply, Agent.get(agent, fn content -> content.claims end), agent}
end
def handle_call(:get_instance_id, _from, agent) do
{:reply, Agent.get(agent, fn content -> content.instance_id end), agent}
end
def handle_call(:get_ociref, _from, agent) do
{:reply, Agent.get(agent, fn content -> content.ociref end), agent}
end
@impl true
def handle_call(:get_invocation, _from, agent) do
Logger.info("Getting invocation")
{:reply, Agent.get(agent, fn content -> content.invocation end), agent}
end
@impl true
def handle_call(:health_check, _from, agent) do
{:reply, perform_health_check(agent), agent}
end
@impl true
def handle_call(:halt_and_cleanup, _from, agent) do
# Add cleanup if necessary here...
Logger.info("Actor instance termination requested")
subscription = Agent.get(agent, fn content -> content.subscription end)
public_key = Agent.get(agent, fn content -> content.claims.public_key end)
instance_id = Agent.get(agent, fn content -> content.instance_id end)
Gnat.unsub(:lattice_nats, subscription)
publish_actor_stopped(public_key, instance_id)
{:stop, :normal, :ok, agent}
end
@impl true
def handle_info(:do_health, agent) do
perform_health_check(agent)
Process.send_after(self(), :do_health, @thirty_seconds)
{:noreply, agent}
end
@impl true
def handle_info(
{:msg,
%{
body: body,
reply_to: reply_to,
topic: topic
}},
agent
) do
Logger.info("Received invocation on #{topic}")
iid = Agent.get(agent, fn content -> content.instance_id end)
# TODO - handle failure
{:ok, inv} = Msgpax.unpack(body)
# TODO - perform antiforgery check
ir =
case perform_invocation(agent, inv["operation"], inv["msg"]) do
{:ok, response} ->
%{
msg: response,
invocation_id: inv["id"],
instance_id: iid
}
{:error, error} ->
%{
msg: nil,
error: error,
invocation_id: inv["id"],
instance_id: iid
}
end
Gnat.pub(:lattice_nats, reply_to, ir |> Msgpax.pack!() |> IO.iodata_to_binary())
{:noreply, agent}
end
defp start_actor(claims, bytes, oci) do
Logger.info("Actor module starting")
Registry.register(Registry.ActorRegistry, claims.public_key, claims)
HostCore.Claims.Manager.put_claims(claims)
{:ok, agent} = Agent.start_link(fn -> %State{claims: claims, instance_id: UUID.uuid4()} end)
prefix = HostCore.Host.lattice_prefix()
topic = "wasmbus.rpc.#{prefix}.#{claims.public_key}"
Logger.info("Subscribing to #{topic}")
{:ok, subscription} = Gnat.sub(:lattice_nats, self(), topic, queue_group: topic)
Agent.update(agent, fn state -> %State{state | subscription: subscription, ociref: oci} end)
Process.send_after(self(), :do_health, @thirty_seconds)
imports = %{
wapc: Imports.wapc_imports(agent),
wasmbus: Imports.wasmbus_imports(agent)
}
publish_oci_map(oci, claims.public_key)
Wasmex.start_link(%{bytes: bytes, imports: imports})
|> prepare_module(agent)
end
defp perform_invocation(agent, operation, payload) do
Logger.info("performing invocation #{operation}")
raw_state = Agent.get(agent, fn content -> content end)
raw_state = %State{
raw_state
| guest_response: nil,
guest_request: nil,
guest_error: nil,
host_response: nil,
host_error: nil,
invocation: %Invocation{operation: operation, payload: payload}
}
Agent.update(agent, fn _content -> raw_state end)
Logger.info("Agent state updated")
# invoke __guest_call
# if it fails, set guest_error, return 1
# if it succeeeds, set guest_response, return 0
Wasmex.call_function(raw_state.instance, :__guest_call, [
byte_size(operation),
byte_size(payload)
])
|> to_guest_call_result(agent)
end
defp to_guest_call_result({:ok, [res]}, agent) do
Logger.info("OK result")
state = Agent.get(agent, fn content -> content end)
case res do
1 -> {:ok, state.guest_response}
0 -> {:error, state.guest_error}
end
end
defp to_guest_call_result({:error, err}, _agent) do
{:error, err}
end
defp perform_health_check(agent) do
payload = %{placeholder: true} |> Msgpax.pack!() |> IO.iodata_to_binary()
res =
try do
perform_invocation(agent, @op_health_check, payload)
rescue
_e -> {:error, "Failed to invoke actor module"}
end
case res do
{:ok, _payload} -> publish_check_passed(agent)
{:error, reason} -> publish_check_failed(agent, reason)
end
res
end
defp prepare_module({:ok, instance}, agent) do
api_version =
case Wasmex.call_function(instance, :__wasmbus_rpc_version, []) do
{:ok, [v]} -> v
_ -> 0
end
claims = Agent.get(agent, fn content -> content.claims end)
instance_id = Agent.get(agent, fn content -> content.instance_id end)
Wasmex.call_function(instance, :start, [])
Wasmex.call_function(instance, :wapc_init, [])
Agent.update(agent, fn content ->
%State{content | api_version: api_version, instance: instance}
end)
publish_actor_started(claims.public_key, api_version, instance_id)
{:ok, agent}
end
def publish_oci_map("", _pk) do
# No Op
end
def publish_oci_map(nil, _pk) do
# No Op
end
def publish_oci_map(oci, pk) do
HostCore.Refmaps.Manager.put_refmap(oci, pk)
end
def publish_actor_started(actor_pk, api_version, instance_id) do
prefix = HostCore.Host.lattice_prefix()
msg =
%{
public_key: actor_pk,
api_version: api_version,
instance_id: instance_id
}
|> CloudEvent.new("actor_started")
topic = "wasmbus.evt.#{prefix}"
Gnat.pub(:control_nats, topic, msg)
end
def publish_actor_updated(actor_pk, revision, instance_id) do
prefix = HostCore.Host.lattice_prefix()
msg =
%{
public_key: actor_pk,
revision: revision,
instance_id: instance_id
}
|> CloudEvent.new("actor_updated")
topic = "wasmbus.evt.#{prefix}"
Gnat.pub(:control_nats, topic, msg)
end
def publish_actor_stopped(actor_pk, instance_id) do
prefix = HostCore.Host.lattice_prefix()
msg =
%{
public_key: actor_pk,
instance_id: instance_id
}
|> CloudEvent.new("actor_stopped")
topic = "wasmbus.evt.#{prefix}"
Gnat.pub(:control_nats, topic, msg)
end
defp publish_check_passed(agent) do
prefix = HostCore.Host.lattice_prefix()
claims = Agent.get(agent, fn content -> content.claims end)
iid = Agent.get(agent, fn content -> content.instance_id end)
msg =
%{
public_key: claims.public_key,
instance_id: iid
}
|> CloudEvent.new("health_check_passed")
topic = "wasmbus.evt.#{prefix}"
Gnat.pub(:control_nats, topic, msg)
nil
end
defp publish_check_failed(agent, reason) do
prefix = HostCore.Host.lattice_prefix()
claims = Agent.get(agent, fn content -> content.claims end)
iid = Agent.get(agent, fn content -> content.instance_id end)
msg =
%{
public_key: claims.public_key,
instance_id: iid,
reason: reason
}
|> CloudEvent.new("health_check_failed")
topic = "wasmbus.evt.#{prefix}"
Gnat.pub(:control_nats, topic, msg)
nil
end
end
| 25.83208 | 96 | 0.650626 |
ff0a58c84412bfab4cceb9aa3ecebb247943009c | 1,843 | exs | Elixir | config/prod.exs | pzingg/phoenix_16_example | 7f4160de837229101e18a4e1cc97a00f4af5aaa7 | [
"MIT"
] | null | null | null | config/prod.exs | pzingg/phoenix_16_example | 7f4160de837229101e18a4e1cc97a00f4af5aaa7 | [
"MIT"
] | null | null | null | config/prod.exs | pzingg/phoenix_16_example | 7f4160de837229101e18a4e1cc97a00f4af5aaa7 | [
"MIT"
] | null | null | null | import Config
# For production, don't forget to configure the url host
# to something meaningful, Phoenix uses this information
# when generating URLs.
#
# Note we also include the path to a cache manifest
# containing the digested version of static files. This
# manifest is generated by the `mix phx.digest` task,
# which you should run after static files are built and
# before starting your production server.
config :example_16, Example16Web.Endpoint,
url: [host: "example.com", port: 80],
cache_static_manifest: "priv/static/cache_manifest.json"
# Do not print debug messages in production
config :logger, level: :info
# ## SSL Support
#
# To get SSL working, you will need to add the `https` key
# to the previous section and set your `:url` port to 443:
#
# config :example_16, Example16Web.Endpoint,
# ...,
# url: [host: "example.com", port: 443],
# https: [
# ...,
# port: 443,
# cipher_suite: :strong,
# keyfile: System.get_env("SOME_APP_SSL_KEY_PATH"),
# certfile: System.get_env("SOME_APP_SSL_CERT_PATH")
# ]
#
# The `cipher_suite` is set to `:strong` to support only the
# latest and more secure SSL ciphers. This means old browsers
# and clients may not be supported. You can set it to
# `:compatible` for wider support.
#
# `:keyfile` and `:certfile` expect an absolute path to the key
# and cert in disk or a relative path inside priv, for example
# "priv/ssl/server.key". For all supported SSL configuration
# options, see https://hexdocs.pm/plug/Plug.SSL.html#configure/1
#
# We also recommend setting `force_ssl` in your endpoint, ensuring
# no data is ever sent via http, always redirecting to https:
#
# config :example_16, Example16Web.Endpoint,
# force_ssl: [hsts: true]
#
# Check `Plug.SSL` for all available options in `force_ssl`.
| 35.442308 | 66 | 0.706457 |
ff0a64ad8de33223c0a7af38204d23a29a3a1136 | 340 | ex | Elixir | lib/supabase_surface/components/icons/icon_divide.ex | treebee/supabase-surface | 5a184ca92323c085dd81e2fc8aa8c10367f2382e | [
"Apache-2.0"
] | 5 | 2021-06-08T08:02:43.000Z | 2022-02-09T23:13:46.000Z | lib/supabase_surface/components/icons/icon_divide.ex | treebee/supabase-surface | 5a184ca92323c085dd81e2fc8aa8c10367f2382e | [
"Apache-2.0"
] | null | null | null | lib/supabase_surface/components/icons/icon_divide.ex | treebee/supabase-surface | 5a184ca92323c085dd81e2fc8aa8c10367f2382e | [
"Apache-2.0"
] | 1 | 2021-07-14T05:20:31.000Z | 2021-07-14T05:20:31.000Z | defmodule SupabaseSurface.Components.Icons.IconDivide do
use SupabaseSurface.Components.Icon
@impl true
def render(assigns) do
icon_size = IconContainer.get_size(assigns.size)
~F"""
<IconContainer assigns={assigns}>
{Feathericons.divide(width: icon_size, height: icon_size)}
</IconContainer>
"""
end
end
| 22.666667 | 64 | 0.717647 |
ff0a6acf3663e7b3762e210f4fafa9d346ce416b | 1,349 | exs | Elixir | test/xdr/ledger_entries/data_value_test.exs | einerzg/stellar_base | 2d10c5fc3b8159efc5de10b5c7c665e3b57b3d8f | [
"MIT"
] | 3 | 2021-08-17T20:32:45.000Z | 2022-03-13T20:26:02.000Z | test/xdr/ledger_entries/data_value_test.exs | einerzg/stellar_base | 2d10c5fc3b8159efc5de10b5c7c665e3b57b3d8f | [
"MIT"
] | 45 | 2021-08-12T20:19:41.000Z | 2022-03-27T21:00:10.000Z | test/xdr/ledger_entries/data_value_test.exs | einerzg/stellar_base | 2d10c5fc3b8159efc5de10b5c7c665e3b57b3d8f | [
"MIT"
] | 2 | 2021-09-22T23:11:13.000Z | 2022-01-23T03:19:11.000Z | defmodule StellarBase.XDR.DataValueTest do
use ExUnit.Case
alias StellarBase.XDR.DataValue
describe "DataValue" do
setup do
%{
data_value: DataValue.new("GCIZ3GSM5"),
binary: <<0, 0, 0, 9, 71, 67, 73, 90, 51, 71, 83, 77, 53, 0, 0, 0>>
}
end
test "new/1" do
%DataValue{value: data_value} =
DataValue.new(<<0, 0, 0, 9, 71, 67, 73, 90, 51, 71, 83, 77, 53, 0, 0, 0>>)
16 = String.length(data_value)
end
test "encode_xdr/1", %{data_value: data_value, binary: binary} do
{:ok, ^binary} = DataValue.encode_xdr(data_value)
end
test "encode_xdr!/1", %{data_value: data_value, binary: binary} do
^binary = DataValue.encode_xdr!(data_value)
end
test "decode_xdr/2", %{data_value: data_value, binary: binary} do
{:ok, {^data_value, ""}} = DataValue.decode_xdr(binary)
end
test "decode_xdr/2 with an invalid binary" do
{:error, :not_binary} = DataValue.decode_xdr(123)
end
test "decode_xdr!/2", %{data_value: data_value, binary: binary} do
{^data_value, ""} = DataValue.decode_xdr!(binary)
end
test "invalid length" do
{:error, :invalid_length} =
DataValue.encode_xdr(%DataValue{
value: "GCIZ3GSM5XL7OUS4UP64THMDZ7CZ3ZWNTMGKKUPT4V3UMXIKUS4UP64THMDZ7CZ3Z"
})
end
end
end
| 27.530612 | 84 | 0.623425 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.