hexsha stringlengths 40 40 | size int64 2 991k | ext stringclasses 2 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 4 208 | max_stars_repo_name stringlengths 6 106 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses list | max_stars_count int64 1 33.5k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 208 | max_issues_repo_name stringlengths 6 106 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses list | max_issues_count int64 1 16.3k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 208 | max_forks_repo_name stringlengths 6 106 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses list | max_forks_count int64 1 6.91k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 2 991k | avg_line_length float64 1 36k | max_line_length int64 1 977k | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
0845a163c04ff84d097148eef1d5bd7d40b9d0be | 3,020 | ex | Elixir | lib/game/gossip.ex | jgsmith/ex_venture | 546adaa8fe80d45a72fde6de8d8d6906902c12d4 | [
"MIT"
] | 2 | 2019-05-14T11:36:44.000Z | 2020-07-01T08:54:04.000Z | lib/game/gossip.ex | nickwalton/ex_venture | d8ff1b0181db03f9ddcb7610ae7ab533feecbfbb | [
"MIT"
] | null | null | null | lib/game/gossip.ex | nickwalton/ex_venture | d8ff1b0181db03f9ddcb7610ae7ab533feecbfbb | [
"MIT"
] | 1 | 2021-01-29T14:12:40.000Z | 2021-01-29T14:12:40.000Z | defmodule Game.Gossip do
@moduledoc """
Callback module for Gossip
"""
require Logger
alias Game.Channel
alias Game.Channels
alias Game.Character
alias Game.Events.PlayerSignedIn
alias Game.Events.PlayerSignedOut
alias Game.Message
alias Game.Session
@behaviour Gossip.Client.Core
@behaviour Gossip.Client.Players
@behaviour Gossip.Client.Tells
@behaviour Gossip.Client.Games
@impl true
def user_agent() do
ExVenture.version()
end
@impl true
def channels() do
Enum.map(Channels.gossip_channels(), & &1.gossip_channel)
end
@impl true
def players() do
Session.Registry.connected_players()
|> Enum.map(& &1.player.name)
end
@impl true
def authenticated(), do: :ok
@impl true
def message_broadcast(message) do
with {:ok, channel} <- Channels.gossip_channel(message.channel),
true <- Squabble.node_is_leader?() do
message = Message.gossip_broadcast(channel, message)
Channel.broadcast(channel.name, message)
:ok
else
_ ->
:ok
end
end
@impl true
def player_sign_in(game_name, player_name) do
Logger.info(fn ->
"Gossip - new player sign in #{player_name}@#{game_name}"
end)
case Squabble.node_is_leader?() do
true ->
name = "#{player_name}@#{game_name}"
event = %PlayerSignedIn{character: Character.simple_gossip(name)}
Session.Registry.connected_players()
|> Enum.each(fn %{player: player} ->
Character.notify(player, event)
end)
false ->
:ok
end
end
@impl true
def player_sign_out(game_name, player_name) do
Logger.info(fn ->
"Gossip - new player sign out #{player_name}@#{game_name}"
end)
case Squabble.node_is_leader?() do
true ->
name = "#{player_name}@#{game_name}"
event = %PlayerSignedOut{character: Character.simple_gossip(name)}
Session.Registry.connected_players()
|> Enum.each(fn %{player: player} ->
Character.notify(player, event)
end)
false ->
:ok
end
end
@impl true
def player_update(game_name, player_names) do
Logger.debug(fn ->
"Received update for game #{game_name} - #{inspect(player_names)}"
end)
end
@impl true
def tell_receive(from_game, from_player, to_player, message) do
Logger.info(fn ->
"Received a new tell from #{from_player}@#{from_game} to #{to_player}"
end)
with true <- Squabble.node_is_leader?(),
{:ok, player} <- Session.Registry.find_player(to_player) do
player_name = "#{from_player}@#{from_game}"
Channel.tell(
Character.to_simple(player),
Character.simple_gossip(player_name),
Message.tell(Character.simple_gossip(player_name), message)
)
:ok
else
_ ->
:ok
end
end
@impl true
def game_update(_game), do: :ok
@impl true
def game_connect(_game), do: :ok
@impl true
def game_disconnect(_game), do: :ok
end
| 22.37037 | 76 | 0.64106 |
0845f1b5fafc0cf0bf0492642a0ce57fca8fd05d | 213 | ex | Elixir | lib/edgedb/protocol/enums/transaction_state.ex | nsidnev/edgedb-elixir | bade2b9daba2e83bfaa5915b2addb74f41610968 | [
"MIT"
] | 30 | 2021-05-19T08:54:44.000Z | 2022-03-11T22:52:25.000Z | lib/edgedb/protocol/enums/transaction_state.ex | nsidnev/edgedb-elixir | bade2b9daba2e83bfaa5915b2addb74f41610968 | [
"MIT"
] | 3 | 2021-11-17T21:26:01.000Z | 2022-03-12T09:49:25.000Z | lib/edgedb/protocol/enums/transaction_state.ex | nsidnev/edgedb-elixir | bade2b9daba2e83bfaa5915b2addb74f41610968 | [
"MIT"
] | 3 | 2021-08-29T14:55:41.000Z | 2022-03-12T01:30:35.000Z | defmodule EdgeDB.Protocol.Enums.TransactionState do
use EdgeDB.Protocol.Enum
defenum(
values: [
not_in_transaction: 0x49,
in_transaction: 0x54,
in_failed_transaction: 0x45
]
)
end
| 17.75 | 51 | 0.694836 |
08461c9168f53511391477fe82ee649fd31aa575 | 968 | ex | Elixir | spec/support/element/trivial_sink.ex | mkaput/membrane-core | f65ae3d847f2c10f3ab20d0c7aa75b0faa274ec7 | [
"Apache-2.0"
] | null | null | null | spec/support/element/trivial_sink.ex | mkaput/membrane-core | f65ae3d847f2c10f3ab20d0c7aa75b0faa274ec7 | [
"Apache-2.0"
] | null | null | null | spec/support/element/trivial_sink.ex | mkaput/membrane-core | f65ae3d847f2c10f3ab20d0c7aa75b0faa274ec7 | [
"Apache-2.0"
] | null | null | null | defmodule Membrane.Support.Element.TrivialSink do
@moduledoc """
This is minimal sample sink element for use in specs.
Modify with caution as many specs may depend on its shape.
"""
use Membrane.Element.Base.Sink
def_known_sink_pads sink: {:always, {:pull, demand_in: :buffers}, :any}
@impl true
def handle_init(_options) do
{:ok, %{timer: nil}}
end
@impl true
def handle_prepare(:stopped, %Ctx.Prepare{}, state), do: {:ok, state}
def handle_prepare(:playing, %Ctx.Prepare{}, %{timer: timer}) do
if timer do
:timer.cancel(timer)
end
{:ok, %{timer: nil}}
end
@impl true
def handle_play(%Ctx.Play{}, state) do
{:ok, timer} = :timer.send_interval(500, :tick)
{:ok, %{state | timer: timer}}
end
@impl true
def handle_other(:tick, %Ctx.Other{}, state) do
{{:ok, demand: {:sink, 2}}, state}
end
@impl true
def handle_write1(:sink, _buf, %Ctx.Write{}, state) do
{:ok, state}
end
end
| 22 | 73 | 0.640496 |
084626ebc5a9871d52c8e725f278a27d9dde3f03 | 898 | ex | Elixir | lib/groupher_server/cms/models/community_subscriber.ex | coderplanets/coderplanets_server | 3663e56340d6d050e974c91f7e499d8424fc25e9 | [
"Apache-2.0"
] | 240 | 2018-11-06T09:36:54.000Z | 2022-02-20T07:12:36.000Z | lib/groupher_server/cms/models/community_subscriber.ex | coderplanets/coderplanets_server | 3663e56340d6d050e974c91f7e499d8424fc25e9 | [
"Apache-2.0"
] | 363 | 2018-07-11T03:38:14.000Z | 2021-12-14T01:42:40.000Z | lib/groupher_server/cms/models/community_subscriber.ex | mydearxym/mastani_server | f24034a4a5449200165cf4a547964a0961793eab | [
"Apache-2.0"
] | 22 | 2019-01-27T11:47:56.000Z | 2021-02-28T13:17:52.000Z | defmodule GroupherServer.CMS.Model.CommunitySubscriber do
@moduledoc false
alias __MODULE__
use Ecto.Schema
import Ecto.Changeset
alias GroupherServer.{Accounts, CMS}
alias Accounts.Model.User
alias CMS.Model.Community
@required_fields ~w(user_id community_id)a
@type t :: %CommunitySubscriber{}
schema "communities_subscribers" do
belongs_to(:user, User, foreign_key: :user_id)
belongs_to(:community, Community, foreign_key: :community_id)
timestamps(type: :utc_datetime)
end
@doc false
def changeset(%CommunitySubscriber{} = community_subscriber, attrs) do
community_subscriber
|> cast(attrs, @required_fields)
|> validate_required(@required_fields)
|> foreign_key_constraint(:community_id)
|> foreign_key_constraint(:user_id)
|> unique_constraint(:user_id, name: :communities_subscribers_user_id_community_id_index)
end
end
| 27.212121 | 93 | 0.760579 |
08463b609ce7f2bc11f3b805ddfa24cb72740c43 | 2,005 | ex | Elixir | clients/apigee/lib/google_api/apigee/v1/model/google_iam_v1_audit_log_config.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/apigee/lib/google_api/apigee/v1/model/google_iam_v1_audit_log_config.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/apigee/lib/google_api/apigee/v1/model/google_iam_v1_audit_log_config.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Apigee.V1.Model.GoogleIamV1AuditLogConfig do
@moduledoc """
Provides the configuration for logging a type of permissions. Example: { "audit_log_configs": [ { "log_type": "DATA_READ", "exempted_members": [ "user:jose@example.com" ] }, { "log_type": "DATA_WRITE" } ] } This enables 'DATA_READ' and 'DATA_WRITE' logging, while exempting jose@example.com from DATA_READ logging.
## Attributes
* `exemptedMembers` (*type:* `list(String.t)`, *default:* `nil`) - Specifies the identities that do not cause logging for this type of permission. Follows the same format of Binding.members.
* `logType` (*type:* `String.t`, *default:* `nil`) - The log type that this config enables.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:exemptedMembers => list(String.t()) | nil,
:logType => String.t() | nil
}
field(:exemptedMembers, type: :list)
field(:logType)
end
defimpl Poison.Decoder, for: GoogleApi.Apigee.V1.Model.GoogleIamV1AuditLogConfig do
def decode(value, options) do
GoogleApi.Apigee.V1.Model.GoogleIamV1AuditLogConfig.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Apigee.V1.Model.GoogleIamV1AuditLogConfig do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 40.1 | 316 | 0.731172 |
08466689fba361041982077e44d158d6509db4f6 | 114 | ex | Elixir | lib/extripe/country_spec.ex | NarrativeApp/extripe | fc0c475e79fbf925c673ac4a67c27814c92839b6 | [
"MIT"
] | 28 | 2016-02-17T06:13:20.000Z | 2022-01-31T20:25:26.000Z | lib/extripe/country_spec.ex | NarrativeApp/extripe | fc0c475e79fbf925c673ac4a67c27814c92839b6 | [
"MIT"
] | 17 | 2016-02-16T19:08:54.000Z | 2021-07-23T04:10:15.000Z | lib/extripe/country_spec.ex | NarrativeApp/extripe | fc0c475e79fbf925c673ac4a67c27814c92839b6 | [
"MIT"
] | 6 | 2016-02-27T18:05:29.000Z | 2019-08-01T20:27:35.000Z | defmodule Extripe.CountrySpec do
use Extripe.Actions.CRUD, only: [:index, :show], resource: "country_specs"
end
| 28.5 | 76 | 0.763158 |
0846bfe836d7d20fea8fa023e474040216f49449 | 474 | ex | Elixir | lib/k6/template/grpc.ex | dallagi/elixir-k6 | f6b6db55e766019e97378ea28a3ea2e9070c2e08 | [
"MIT"
] | 2 | 2021-11-28T13:13:39.000Z | 2022-01-12T15:36:44.000Z | lib/k6/template/grpc.ex | dallagi/elixir-k6 | f6b6db55e766019e97378ea28a3ea2e9070c2e08 | [
"MIT"
] | 2 | 2022-01-28T19:02:55.000Z | 2022-02-09T08:48:02.000Z | lib/k6/template/grpc.ex | dallagi/elixir-k6 | f6b6db55e766019e97378ea28a3ea2e9070c2e08 | [
"MIT"
] | 1 | 2022-01-28T09:24:45.000Z | 2022-01-28T09:24:45.000Z | defmodule K6.Template.Grpc do
@moduledoc """
Generates a grpc template
"""
use K6.Template
@impl true
def create(filename, opts) do
url = Keyword.get(opts, :url, "localhost:9001")
copy_template(template_path("grpc.js"), filename, url: url)
copy_template(template_path("definitions/hello.proto"), proto_path(filename), [])
end
defp proto_path(filename) do
filename
|> Path.dirname()
|> Path.join("definitions/hello.proto")
end
end
| 23.7 | 85 | 0.685654 |
0846c1031e0772858438c4128895ffee04566197 | 2,070 | ex | Elixir | clients/domains/lib/google_api/domains/v1alpha2/model/ds_record.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | clients/domains/lib/google_api/domains/v1alpha2/model/ds_record.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/domains/lib/google_api/domains/v1alpha2/model/ds_record.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Domains.V1alpha2.Model.DsRecord do
@moduledoc """
Defines a Delegation Signer (DS) record, which is needed to enable DNSSEC for a domain. It contains a digest (hash) of a DNSKEY record that must be present in the domain's DNS zone.
## Attributes
* `algorithm` (*type:* `String.t`, *default:* `nil`) - The algorithm used to generate the referenced DNSKEY.
* `digest` (*type:* `String.t`, *default:* `nil`) - The digest generated from the referenced DNSKEY.
* `digestType` (*type:* `String.t`, *default:* `nil`) - The hash function used to generate the digest of the referenced DNSKEY.
* `keyTag` (*type:* `integer()`, *default:* `nil`) - The key tag of the record. Must be set in range 0 -- 65535.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:algorithm => String.t(),
:digest => String.t(),
:digestType => String.t(),
:keyTag => integer()
}
field(:algorithm)
field(:digest)
field(:digestType)
field(:keyTag)
end
defimpl Poison.Decoder, for: GoogleApi.Domains.V1alpha2.Model.DsRecord do
def decode(value, options) do
GoogleApi.Domains.V1alpha2.Model.DsRecord.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Domains.V1alpha2.Model.DsRecord do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 36.964286 | 183 | 0.704831 |
0846ee9208d9b96ad68f57fa9c7b36f36421ccbe | 1,919 | ex | Elixir | examples/typed_gen_server/lib/typed_gen_server/stage1.ex | Fl4m3Ph03n1x/gradient | 60d7d3fe2ebdf68747325c1e852959f8b92fdcee | [
"Apache-2.0"
] | null | null | null | examples/typed_gen_server/lib/typed_gen_server/stage1.ex | Fl4m3Ph03n1x/gradient | 60d7d3fe2ebdf68747325c1e852959f8b92fdcee | [
"Apache-2.0"
] | 1 | 2022-03-06T09:43:52.000Z | 2022-03-06T09:43:52.000Z | examples/typed_gen_server/lib/typed_gen_server/stage1.ex | Fl4m3Ph03n1x/gradient | 60d7d3fe2ebdf68747325c1e852959f8b92fdcee | [
"Apache-2.0"
] | null | null | null | defmodule TypedGenServer.Stage1.Server do
use GenServer
use GradualizerEx.TypeAnnotation
## Start IEx with:
## iex -S mix run --no-start
##
## Then use the following to recheck the file on any change:
## recompile(); GradualizerEx.type_check_file(:code.which( TypedGenServer.Stage1.Server ), [:infer])
## Try switching between the definitions and see what happens
@type message :: Contract.Echo.req() | Contract.Hello.req()
#@type message :: Contract.Echo.req()
#@type message :: {:echo_req, String.t()} | {:hello, String.t()}
@type state :: map()
def start_link() do
GenServer.start_link(__MODULE__, %{})
end
@spec echo(pid(), String.t()) :: String.t()
# @spec echo(pid(), String.t()) :: {:echo_req, String.t()}
def echo(pid, message) do
case annotate_type( GenServer.call(pid, {:echo_req, message}), Contract.Echo.res() ) do
#case call_echo(pid, message) do
## Try changing the pattern or the returned response
{:echo_res, response} -> response
end
end
#@spec call_echo(pid(), String.t()) :: Contract.Echo.res()
#defp call_echo(pid, message) do
# GenServer.call(pid, {:echo_req, message})
#end
@spec hello(pid(), String.t()) :: :ok
def hello(pid, name) do
case GenServer.call(pid, {:hello, name}) |> annotate_type(Contract.Hello.res()) do
:ok -> :ok
end
end
@impl true
def init(state) do
{:ok, state}
end
@impl true
def handle_call(m, from, state) do
{:noreply, handle(m, from, state)}
end
@spec handle(message(), any, any) :: state()
## Try breaking the pattern match, e.g. by changing 'echo_req'
def handle({:echo_req, payload}, from, state) do
GenServer.reply(from, {:echo_res, payload})
state
end
## Try commenting out the following clause
def handle({:hello, name}, from, state) do
IO.puts("Hello, #{name}!")
GenServer.reply(from, :ok)
state
end
end
| 28.220588 | 104 | 0.646691 |
0846f35e7bbabc1fcf3f7d8e6a34b488a7bb142f | 16,555 | exs | Elixir | test/graph/processing_test.exs | gridgentoo/cuda | e758c05674605e4601c568160904bcd5e2e7be94 | [
"MIT"
] | 4 | 2019-09-18T14:25:02.000Z | 2021-12-16T12:09:10.000Z | test/graph/processing_test.exs | gridgentoo/cuda | e758c05674605e4601c568160904bcd5e2e7be94 | [
"MIT"
] | null | null | null | test/graph/processing_test.exs | gridgentoo/cuda | e758c05674605e4601c568160904bcd5e2e7be94 | [
"MIT"
] | 1 | 2021-11-08T11:50:08.000Z | 2021-11-08T11:50:08.000Z | defmodule Cuda.Graph.ProcessingTest do
use ExUnit.Case
alias Cuda.Graph.Processing
alias Cuda.Graph.GraphProto, as: GProto
# alias Cuda.Graph.NodeProto, as: NProto
import Cuda.Test.GraphHelpers
import Processing
# alias Cuda.Test.GraphHelpers.Single
# alias Cuda.Test.GraphHelpers.Double
def traverser(:move, {{%{id: node}, %{id: pin}}, {%{id: to_node}, %{id: to_pin}}}, st) do
{:ok, st ++ [{:move, {node, pin}, {to_node, to_pin}}]}
end
def traverser(action, {%{id: node}, %{id: pin}}, st) do
{:ok, st ++ [{action, {node, pin}}]}
end
describe "dfs/2" do
test "traverses graph" do
# [i]──▶[input (a) output]──▶[o]
{:ok, result} = dfs(graph(:i1_single1_o1), &traverser/3, [])
assert [{:enter, {:g, :i}}, {:move, {:g, :i}, {:a, :input}},
{:enter, {:a, :input}}, {:move, {:a, :output}, {:g, :o}},
{:enter, {:g, :o}}, {:leave, {:g, :o}},
{:leave, {:a, :input}},
{:leave, {:g, :i}}] = result
# [i]─┬─▶[input (a) output]──▶[o1]
# └─▶[input (b) output]──▶[o2]
{:ok, result} = dfs(graph(:i1_single2_o2), &traverser/3, [])
assert [{:enter, {:g, :i}}, {:move, {:g, :i}, {:b, :input}},
{:enter, {:b, :input}}, {:move, {:b, :output}, {:g, :o2}},
{:enter, {:g, :o2}}, {:leave, {:g, :o2}},
{:leave, {:b, :input}},
{:move, {:g, :i}, {:a, :input}},
{:enter, {:a, :input}}, {:move, {:a, :output}, {:g, :o1}},
{:enter, {:g, :o1}}, {:leave, {:g, :o1}},
{:leave, {:a, :input}},
{:leave, {:g, :i}}] = result
# [i1]──▶⎡input1 (a) output1⎤──▶[o1]
# [i2]──▶⎣input2 output2⎦──▶[o2]
{:ok, result} = dfs(graph(:i2_double1_o2), &traverser/3, [])
assert [{:enter, {:g, :i1}}, {:move, {:g, :i1}, {:a, :input1}},
{:enter, {:a, :input1}}, {:move, {:a, :output1}, {:g, :o1}},
{:enter, {:g, :o1}}, {:leave, {:g, :o1}},
{:move, {:a, :output2}, {:g, :o2}},
{:enter, {:g, :o2}}, {:leave, {:g, :o2}},
{:leave, {:a, :input1}},
{:leave, {:g, :i1}},
{:enter, {:g, :i2}}, {:move, {:g, :i2}, {:a, :input2}},
{:enter, {:a, :input2}}, {:leave, {:a, :input2}},
{:leave, {:g, :i2}}] = result
# [i]──▶⎡input1 (a) output1⎤──▶[o]
# ┌─▶⎣input2 output2⎦─┐
# └───────────────────────┘
{:ok, result} = dfs(graph(:i1_double1_o1), &traverser/3, [])
assert [{:enter, {:g, :i}}, {:move, {:g, :i}, {:a, :input1}},
{:enter, {:a, :input1}}, {:move, {:a, :output1}, {:g, :o}},
{:enter, {:g, :o}}, {:leave, {:g, :o}},
{:move, {:a, :output2}, {:a, :input2}},
{:enter, {:a, :input2}}, {:leave, {:a, :input2}},
{:leave, {:a, :input1}},
{:leave, {:g, :i}}] = result
end
test "raises on unconnected pins" do
# [i]──▶[input (a) output]─x─▶[o]
assert_raise(CompileError, fn ->
dfs(graph(:unconnected), &traverser/3, [])
end)
end
end
describe "dfs_reverse" do
test "traverses graph" do
cb = fn
action, {{node1, _pin1}, {node2, _pin2}}, state -> {:ok, state ++ [{action, {node1.id, node2.id}}]}
action, {node, _pin}, state -> {:ok, state ++ [{action, node.id}]}
end
{:ok, result} = dfs_reverse(graph(:longest_chain_test), cb, [])
assert [enter: :graph, move: {:graph, :n}, enter: :n, move: {:n, :m},
enter: :m, move: {:m, :j}, enter: :j, move: {:j, :e}, enter: :e,
move: {:e, :c}, enter: :c, move: {:c, :a}, enter: :a,
move: {:a, :graph}, enter: :graph, leave: :graph, leave: :a,
leave: :c, leave: :e, move: {:j, :f}, enter: :f, move: {:f, :c},
enter: :c, leave: :c, leave: :f, leave: :j, leave: :m,
move: {:n, :k}, enter: :k, move: {:k, :g}, enter: :g, move: {:g, :c},
enter: :c, leave: :c, leave: :g, move: {:k, :h}, enter: :h,
move: {:h, :d}, enter: :d, move: {:d, :a}, enter: :a, leave: :a,
move: {:d, :b}, enter: :b, move: {:b, :graph}, enter: :graph,
leave: :graph, leave: :b, leave: :d, leave: :h, leave: :k, leave: :n,
leave: :graph, enter: :graph, move: {:graph, :o}, enter: :o,
move: {:o, :l}, enter: :l, move: {:l, :i}, enter: :i, move: {:i, :d},
enter: :d, leave: :d, leave: :i, leave: :l, leave: :o,
leave: :graph] = result
# [i]──▶[input (a) output]──▶[o]
{:ok, result} = dfs_reverse(graph(:i1_single1_o1), cb, [])
assert [enter: :g, move: {:g, :a}, enter: :a, move: {:a, :g},
enter: :g, leave: :g, leave: :a, leave: :g] = result
# [i]─┬─▶[input (a) output]──▶[o1]
# └─▶[input (b) output]──▶[o2]
{:ok, result} = dfs_reverse(graph(:i1_single2_o2), cb, [])
assert [enter: :g, move: {:g, :a}, enter: :a, move: {:a, :g}, enter: :g,
leave: :g, leave: :a, leave: :g, enter: :g, move: {:g, :b},
enter: :b, leave: :b, leave: :g] = result
# [i1]──▶⎡input1 (a) output1⎤──▶[o1]
# [i2]──▶⎣input2 output2⎦──▶[o2]
{:ok, result} = dfs_reverse(graph(:i2_double1_o2), cb, [])
assert [enter: :g, move: {:g, :a}, enter: :a, move: {:a, :g}, enter: :g,
leave: :g, move: {:a, :g}, enter: :g, leave: :g, leave: :a,
leave: :g, enter: :g, move: {:g, :a}, enter: :a, leave: :a,
leave: :g] = result
# [i]──▶⎡input1 (a) output1⎤──▶[o]
# ┌─▶⎣input2 output2⎦─┐
# └───────────────────────┘
{:ok, result} = dfs_reverse(graph(:i1_double1_o1), cb, [])
assert [enter: :g, move: {:g, :a}, enter: :a, move: {:a, :g}, enter: :g,
leave: :g, move: {:a, :a}, enter: :a, leave: :a, leave: :a,
leave: :g] = result
end
test "raises on unconnected pins" do
# [i]──▶[input (a) output]─x─▶[o]
assert_raise(CompileError, fn ->
dfs_reverse(graph(:unconnected), fn _,_,st -> {:ok, st} end, [])
end)
end
end
describe "topology_sort/1" do
test "sorts nodes in topology order" do
# [i]──▶[input (a) output]─┬──────────────────────▶[o1]
# └─▶[input (b) output]──▶[o2]
graph = graph(:i1_single1_single1_o2)
assert [%{id: :b}, %{id: :a}] = graph.nodes
{:ok, result} = topology_sort(graph)
assert [{:g, :i}, {:a, :input}, {:g, :o1}, {:b, :input}, {:g, :o2}] = result
end
test "detects loops" do
# [i]──▶⎡input1 (a) output1⎤──▶[o]
# ┌─▶⎣input2 output2⎦─┐
# └───────────────────────┘
assert topology_sort(graph(:i1_double1_o1)) == {:error, :loop}
end
test "raises on unconnected pins" do
# [i]──▶[input (a) output]─x─▶[o]
assert_raise(CompileError, fn -> topology_sort(graph(:unconnected)) end)
end
end
describe "loop?/1" do
test "detects loops" do
# [i]──▶⎡input1 (a) output1⎤──▶[o]
# ┌─▶⎣input2 output2⎦─┐
# └───────────────────────┘
assert loop?(graph(:i1_double1_o1)) == true
end
test "raises on unconnected pins" do
# [i]──▶[input (a) output]─x─▶[o]
assert_raise(CompileError, fn -> loop?(graph(:unconnected)) end)
end
test "returns false for non-loop graphs" do
# [i]──▶[input (a) output]──▶[o]
assert loop?(graph(:i1_single1_o1)) == false
# [i1]──▶⎡input1 (a) output1⎤──▶[o1]
# [i2]──▶⎣input2 output2⎦──▶[o2]
assert loop?(graph(:i2_double1_o2)) == false
end
end
describe "expand/1" do
test "expands graph nodes" do
graph = expand(graph(:i1_graph1_o1))
assert [%{id: {:x, :a}}] = graph.nodes
assert [{{{:x, :a}, :output}, {:__self__, :o}},
{{:__self__, :i}, {{:x, :a}, :input}}] = graph.links
end
end
describe "longest_chain/2" do
defp normalize([]), do: []
defp normalize(result) do
result
|> nodes2ids()
|> Enum.map(&(length(&1)))
|> Enum.sort()
end
test "finds the longest chain in graph" do
assert :longest_chain_test
|> graph()
|> longest_chain(:gpu)
# NOTE: We are temporary disable 2-inputs rule in longest chain
|> normalize() == [2, 2, 3] # [1, 1, 1, 2, 2]
# [i1]──▶⎡input1 (a) output1⎤──▶[o1]
# [i2]──▶⎣input2 output2⎦──▶[o2]
assert :i2_double1_o2
|> graph()
|> longest_chain(:gpu)
|> normalize() == []
# [i]──▶[input (a) output]─┬──────────────────────▶[o1]
# └─▶[input (b) output]──▶[o2]
assert :i1_single1_single1_o2
|> graph()
|> longest_chain(:virtual)
|> normalize() == [2]
# [i1]──▶[input (a) output]──┬──[input (b) output]──▶[input (d) output]──▶[o1]
# └─▶[input (c) output]───────────────────────▶[o2]
assert :i1_single4_o2
|> graph()
|> longest_chain(:virtual)
|> normalize() == [1, 3]
# [i1]──▶[input (a) output]──┬──[input (b) output]───────────────────────▶[o1]
# └─▶[input (c) output]──▶[input (d) output]──▶[o2]
assert :i1_single4_o2_inverse
|> graph()
|> longest_chain(:virtual)
|> normalize() == [1, 3]
# ┌───▶[input (a) output]───▶[input (c) output]───▶[o1]
# [i1]─│
# └───▶[input (b) output]─────────────────────────▶[o2]
assert :i1_single3_o2
|> graph()
|> longest_chain(:virtual)
|> normalize() == [1, 2]
# ┌───▶[input (a) output]─────────────────────────▶[o1]
# [i1]─│
# └───▶[input (b) output]───▶[input (c) output]───▶[o2]
assert :i1_single3_o2_inverse
|> graph()
|> longest_chain(:virtual)
|> normalize() == [1, 2]
# [i1]─────▶[input (a) output]─────────────────────────▶[o1]
# [ (b) producer]───▶[input (c) output]───▶[o2]
assert :i1_producer1_single2_o2
|> graph()
|> longest_chain(:virtual)
|> normalize() == [1, 1]
end
test "detects loops" do
# [i]──▶⎡input1 (a) output1⎤──▶[o]
# ┌─▶⎣input2 output2⎦─┐
# └───────────────────────┘
assert_raise(CompileError, fn -> longest_chain(graph(:i1_double1_o1), :virtual) end)
end
test "raises on unconnected pins" do
# [i]──▶[input (a) output]─x─▶[o]
assert_raise(CompileError, fn -> longest_chain(graph(:unconnected), :virtual) end)
end
end
describe "move/3" do
test "node moved into empty nested graph" do
# [i1]──▶[input (a) output]──┬──[input (b) output]──▶[input (d) output]──▶[o1]
# └─▶[input (c) output]───────────────────────▶[o2]
graph = nested_graph(:i1_single4_o2)
graph = move(graph, :nested, :b)
assert graph |> GProto.node(:nested) |> GProto.node(:b) != nil
end
test "pins of the moved node are copied into nested graph" do
# [i1]──▶[input (a) output]──┬──[input (b) output]──▶[input (d) output]──▶[o1]
# └─▶[input (c) output]───────────────────────▶[o2]
graph = nested_graph(:i1_single4_o2)
b = GProto.node(graph, :b)
graph = move(graph, :nested, :b)
nested = GProto.node(graph, :nested)
assert length(b.pins) == length(nested.pins)
end
test "checks connection between moved node and its neighbours" do
# [i1]──▶[input (a) output]──┬──[input (b) output]──▶[input (d) output]──▶[o1]
# └─▶[input (c) output]───────────────────────▶[o2]
graph = nested_graph(:i1_single4_o2)
graph = move(graph, :nested, :b)
assert connected?(graph, :a, :b)
assert connected?(graph, :b, :d)
end
test "when two nodes have shared link, the first already in nested graph, and the second moves there, shared pin of nested graph will be removed" do
# [i1]──▶[input (a) output]──┬──[input (b) output]──▶[input (d) output]──▶[o1]
# └─▶[input (c) output]───────────────────────▶[o2]
graph = nested_graph(:i1_single4_o2)
graph = move(graph, :nested, :d)
old_pin = graph |> GProto.node(:nested)
old_pin = old_pin.pins |> Enum.find(&(&1.type == :input))
graph = move(graph, :nested, :b)
new_pin = graph |> GProto.node(:nested)
new_pin = new_pin.pins |> Enum.find(&(&1.type == :input))
assert old_pin.id != new_pin.id
graph = nested_graph(:i1_single4_o2)
graph = move(graph, :nested, :b)
old_pin = graph |> GProto.node(:nested)
old_pin = old_pin.pins |> Enum.find(&(&1.type == :output))
graph = move(graph, :nested, :d)
new_pin = graph |> GProto.node(:nested)
new_pin = new_pin.pins |> Enum.find(&(&1.type == :output))
assert old_pin.id != new_pin.id
end
test "when two nodes have shared link, the first already in nested graph, and the second moves there, they have direct shared link into nested graph" do
# [i1]──▶[input (a) output]──┬──[input (b) output]──▶[input (d) output]──▶[o1]
# └─▶[input (c) output]───────────────────────▶[o2]
graph = nested_graph(:i1_single4_o2)
graph = graph
|> move(:nested, :d)
|> move(:nested, :b)
n = GProto.node(graph, :nested)
assert Enum.any?(n.links, fn
{{:b, _}, {:d, _}} -> true
_ -> false
end)
end
test "save nested graph pin when more than one node connected to it, and one of it moved to nested graph" do
# [i1]──▶[input (a) output]──┬──[input (b) output]──▶[input (d) output]──▶[o1]
# └─▶[input (c) output]───────────────────────▶[o2]
graph = nested_graph(:i1_single4_o2)
graph = move(graph, :nested, :a)
pin = graph |> GProto.node(:nested)
pin = pin.pins |> Enum.find(&(&1.type == :output))
graph = move(graph, :nested, :b)
n = graph |> GProto.node(:nested)
n = n.pins
assert Enum.any?(n, &(&1.id == pin.id))
end
test "Node and nested graph have links from the same output, when node moved to the nested graph no pins created for this link, but redirected to existing one" do
graph = nested_graph(:network_test)
graph = move(graph, :nested, [:conv, :fc, :error, :back_fc])
pin = Enum.reduce(graph.links, nil, fn
{{:__self__, :input}, {:nested, pin}}, _ -> pin
_, acc -> acc
end)
graph = move(graph, :nested, :back_conv)
nested = Enum.find(graph.nodes, nil, & &1.id == :nested)
assert not is_nil(Enum.find(nested.links, fn
{{:__self__, ^pin}, {:back_conv, :input1}} -> true
_ -> false
end))
end
test "general test" do
# [i1]──▶[input (a) output]──▶[input (b) output]──▶[input (c) output]──▶[o1]
graph = nested_graph(:i1_single3_o1)
graph = move(graph, :nested, [:a, :b, :c])
n = GProto.node(graph, :nested)
assert Enum.any?(n.nodes, &(&1.id == :a))
assert Enum.any?(n.nodes, &(&1.id == :b))
assert Enum.any?(n.nodes, &(&1.id == :c))
end
end
describe "precompile_wrap/2" do
test "wrap graph gpu nodes into computation_graph" do
graph = graph(:longest_chain_test)
chains = graph
|> longest_chain()
|> nodes2ids()
|> sort_node_ids()
graph = precompile_wrap(graph)
nodes = graph.nodes
|> Enum.reduce([], fn
%{type: :computation_graph, nodes: nodes}, acc ->
[nodes2ids(nodes) | acc]
_, acc ->
acc
end)
|> sort_node_ids()
assert nodes == chains
end
end
describe "flat/1" do
test "Flattens graph with one graph node" do
g = graph(:i1_single3_o1)
flatten = g
|> precompile_wrap(:virtual)
|> Processing.flat()
assert g.id == flatten.id
assert g.links == flatten.links
assert g.nodes == flatten.nodes
end
test "Graph consists of more than one node" do
g = graph(:i1_single3_o1)
g = %{g | nodes: List.update_at(g.nodes, 0, &(%{&1 | type: :gpu}))}
|> precompile_wrap(:virtual)
flatten = Processing.flat(g)
assert g == flatten
end
end
end
| 39.51074 | 166 | 0.47756 |
0847007171c1947be54a684495903136a1cdc5fa | 3,989 | ex | Elixir | lib/hello/cms.ex | noahjames404/elixir-pheonix-framework | c1587709d67ef7c9bad247d4fe4ec80e23e4041b | [
"MIT"
] | null | null | null | lib/hello/cms.ex | noahjames404/elixir-pheonix-framework | c1587709d67ef7c9bad247d4fe4ec80e23e4041b | [
"MIT"
] | null | null | null | lib/hello/cms.ex | noahjames404/elixir-pheonix-framework | c1587709d67ef7c9bad247d4fe4ec80e23e4041b | [
"MIT"
] | null | null | null | defmodule Hello.CMS do
@moduledoc """
The CMS context.
"""
import Ecto.Query, warn: false
alias Hello.Repo
alias Hello.CMS.{Page, Author}
alias Hello.Accounts
@doc """
Returns the list of pages.
## Examples
iex> list_pages()
[%Page{}, ...]
"""
def list_pages do
Repo.all(Page)
Page
|> Repo.all()
|> Repo.preload(author: [user: :credential])
end
@doc """
Gets a single page.
Raises `Ecto.NoResultsError` if the Page does not exist.
## Examples
iex> get_page!(123)
%Page{}
iex> get_page!(456)
** (Ecto.NoResultsError)
"""
def get_page!(id) do
Page
|>Repo.get!(id)
|>Repo.preload(author: [user: :credential])
end
@doc """
Creates a page.
## Examples
iex> create_page(%{field: value})
{:ok, %Page{}}
iex> create_page(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_page(%Author{} = author, attrs \\ %{}) do
%Page{}
|> Page.changeset(attrs)
|> Ecto.Changeset.put_change(:author_id, author.id)
|> Repo.insert()
end
def ensure_author_exist(%Accounts.User{} = user) do
%Author{user_id: user.id}
|> Ecto.Changeset.change()
|> Ecto.Changeset.unique_constraint(:user_id)
|> Repo.insert()
|> handle_existing_author()
end
defp handle_existing_author({:ok, author}), do: author
defp handle_existing_author({:error, changeset}) do
Repo.get_by!(Author, user_id: changeset.data.user_id)
end
@doc """
Updates a page.
## Examples
iex> update_page(page, %{field: new_value})
{:ok, %Page{}}
iex> update_page(page, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_page(%Page{} = page, attrs) do
page
|> Page.changeset(attrs)
|> Repo.update()
end
@doc """
Deletes a page.
## Examples
iex> delete_page(page)
{:ok, %Page{}}
iex> delete_page(page)
{:error, %Ecto.Changeset{}}
"""
def delete_page(%Page{} = page) do
Repo.delete(page)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking page changes.
## Examples
iex> change_page(page)
%Ecto.Changeset{data: %Page{}}
"""
def change_page(%Page{} = page, attrs \\ %{}) do
Page.changeset(page, attrs)
end
alias Hello.CMS.Author
@doc """
Returns the list of authors.
## Examples
iex> list_authors()
[%Author{}, ...]
"""
def list_authors do
Repo.all(Author)
end
@doc """
Gets a single author.
Raises `Ecto.NoResultsError` if the Author does not exist.
## Examples
iex> get_author!(123)
%Author{}
iex> get_author!(456)
** (Ecto.NoResultsError)
"""
def get_author!(id) do
Author
|> Repo.get!(id)
|> Repo.preload(user: :credential)
end
@doc """
Creates a author.
## Examples
iex> create_author(%{field: value})
{:ok, %Author{}}
iex> create_author(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_author(attrs \\ %{}) do
%Author{}
|> Author.changeset(attrs)
|> Repo.insert()
end
@doc """
Updates a author.
## Examples
iex> update_author(author, %{field: new_value})
{:ok, %Author{}}
iex> update_author(author, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_author(%Author{} = author, attrs) do
author
|> Author.changeset(attrs)
|> Repo.update()
end
@doc """
Deletes a author.
## Examples
iex> delete_author(author)
{:ok, %Author{}}
iex> delete_author(author)
{:error, %Ecto.Changeset{}}
"""
def delete_author(%Author{} = author) do
Repo.delete(author)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking author changes.
## Examples
iex> change_author(author)
%Ecto.Changeset{data: %Author{}}
"""
def change_author(%Author{} = author, attrs \\ %{}) do
Author.changeset(author, attrs)
end
end
| 17.650442 | 61 | 0.585109 |
084722c0b2d253b1ced1a44c53c460ab8b1d71a9 | 1,404 | ex | Elixir | apps/bytepack_web/lib/bytepack_web/views/error_helpers.ex | dashbitco/bytepack_archive | 79f8e62149d020f2afcc501592ed399f7ce7a60b | [
"Unlicense"
] | 313 | 2020-12-03T17:26:24.000Z | 2022-03-18T09:05:14.000Z | apps/bytepack_web/lib/bytepack_web/views/error_helpers.ex | dashbitco/bytepack_archive | 79f8e62149d020f2afcc501592ed399f7ce7a60b | [
"Unlicense"
] | null | null | null | apps/bytepack_web/lib/bytepack_web/views/error_helpers.ex | dashbitco/bytepack_archive | 79f8e62149d020f2afcc501592ed399f7ce7a60b | [
"Unlicense"
] | 57 | 2020-12-03T17:41:53.000Z | 2022-03-17T17:28:16.000Z | defmodule BytepackWeb.ErrorHelpers do
@moduledoc """
Functions for generating error related things
"""
use Phoenix.HTML
@doc """
Traverses a changeset and translate the errors within.
It returns a map that has lists with error messages or other maps in case
of nested associations.
"""
def error_map(changeset) do
Ecto.Changeset.traverse_errors(changeset, &translate_error/1)
end
@doc """
It does the same as `error_map/1` but it will return
only the first error message for given a field. Errors from
associations and similar are discarded.
"""
def error_map_unwrapped(changeset) do
changeset
|> error_map()
|> unpack_messages()
end
defp unpack_messages(errors) do
for {key, [message | _]} <- errors, into: %{} do
{Atom.to_string(key), message}
end
end
@doc """
Generates tag for inlined form input errors.
"""
def error_tag(form, field, opts \\ []) do
Enum.map(Keyword.get_values(form.errors, field), fn error ->
content_tag(:span, translate_error(error), Keyword.merge([class: "invalid-feedback"], opts))
end)
end
# Translates an error message using gettext.
defp translate_error({msg, opts}) do
if count = opts[:count] do
Gettext.dngettext(BytepackWeb.Gettext, "errors", msg, msg, count, opts)
else
Gettext.dgettext(BytepackWeb.Gettext, "errors", msg, opts)
end
end
end
| 27 | 98 | 0.688034 |
08473d67f5f7a0bd90a25225db9422b35552f9de | 3,484 | exs | Elixir | apps/fz_http/mix.exs | bhardwajRahul/firezone | 836bfda9e28350443f2093f810872f2bee7c6cdc | [
"Apache-2.0"
] | null | null | null | apps/fz_http/mix.exs | bhardwajRahul/firezone | 836bfda9e28350443f2093f810872f2bee7c6cdc | [
"Apache-2.0"
] | null | null | null | apps/fz_http/mix.exs | bhardwajRahul/firezone | 836bfda9e28350443f2093f810872f2bee7c6cdc | [
"Apache-2.0"
] | null | null | null | defmodule FzHttp.MixProject do
use Mix.Project
def version do
# Use dummy version for dev and test
System.get_env("VERSION", "0.0.0+git.0.deadbeef")
end
def project do
[
app: :fz_http,
version: version(),
build_path: "../../_build",
config_path: "../../config/config.exs",
deps_path: "../../deps",
lockfile: "../../mix.lock",
elixir: "~> 1.12",
elixirc_paths: elixirc_paths(Mix.env()),
compilers: [:phoenix, :gettext] ++ Mix.compilers(),
start_permanent: Mix.env() == :prod,
test_coverage: [tool: ExCoveralls],
preferred_cli_env: [
coveralls: :test,
"coveralls.detail": :test,
"coveralls.post": :test,
"coveralls.html": :test
],
aliases: aliases(),
deps: deps()
]
end
# Configuration for the OTP application.
#
# Type `mix help compile.app` for more information.
def application do
[
mod: {FzHttp.Application, []},
extra_applications: [
:logger,
:runtime_tools,
:ueberauth_okta,
:ueberauth_identity
],
registered: [:fz_http_server]
]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
# Specifies your project dependencies.
#
# Type `mix help deps` for examples and options.
defp deps do
[
{:fz_common, in_umbrella: true},
{:decimal, "~> 2.0"},
{:phoenix, "~> 1.6"},
{:cloak, "~> 1.1"},
{:cloak_ecto, "~> 1.2"},
{:excoveralls, "~> 0.14", only: :test},
{:floki, ">= 0.0.0", only: :test},
{:mox, "~> 1.0.1", only: :test},
{:guardian, "~> 2.0"},
{:guardian_db, "~> 2.0"},
{:openid_connect, "~> 0.2.2"},
{:ueberauth, "~> 0.7"},
{:ueberauth_google, "~> 0.10"},
{:ueberauth_okta, "~> 0.2"},
{:ueberauth_identity, "~> 0.4"},
{:httpoison, "~> 1.8"},
{:argon2_elixir, "~> 2.0"},
{:phoenix_pubsub, "~> 2.0"},
{:phoenix_ecto, "~> 4.4"},
{:ecto_sql, "~> 3.7"},
{:ecto_network, "~> 1.3"},
{:hammer, "~> 6.0"},
{:hammer_plug, "~> 2.1"},
{:inflex, "~> 2.1"},
{:plug, "~> 1.13"},
{:postgrex, "~> 0.15.10"},
{:phoenix_html, "~> 3.1.0"},
{:phoenix_live_reload, "~> 1.3", only: :dev},
{:phoenix_live_view, "~> 0.17.5"},
{:gettext, "~> 0.18"},
{:jason, "~> 1.2"},
{:phoenix_swoosh, "~> 1.0"},
{:gen_smtp, "~> 1.0"},
# XXX: Change this when hex package is updated
{:cidr, github: "firezone/cidr-elixir"},
{:telemetry, "~> 1.0"},
{:plug_cowboy, "~> 2.5"},
{:credo, "~> 1.5", only: [:dev, :test], runtime: false}
]
end
# Aliases are shortcuts or tasks specific to the current project.
# For example, to create, migrate and run the seeds file at once:
#
# $ mix ecto.setup
#
# See the documentation for `Mix` for more info on aliases.
defp aliases do
[
"ecto.seed": "run priv/repo/seeds.exs",
"ecto.setup": ["ecto.create", "ecto.migrate", "run priv/repo/seeds.exs"],
"ecto.reset": ["ecto.drop", "ecto.setup"],
test: [
"ecto.create --quiet",
"ecto.migrate",
"test"
],
"assets.compile": &compile_assets/1
]
end
defp compile_assets(_) do
Mix.shell().cmd("cd assets && ./node_modules/.bin/webpack --mode development", quiet: false)
end
end
| 28.325203 | 96 | 0.53186 |
08474df6652301baac42790d4e5030826381474f | 2,209 | ex | Elixir | kousa/lib/beef/room_permissions.ex | LeonardSSH/dogehouse | 584055ad407bc37fa35cdf36ebb271622e29d436 | [
"MIT"
] | 9 | 2021-03-17T03:56:18.000Z | 2021-09-24T22:45:14.000Z | kousa/lib/beef/room_permissions.ex | ActuallyTomas/dogehouse | 8c3d2cd1d7e99e173f0658759467a391c4a90c4e | [
"MIT"
] | 12 | 2021-07-06T12:51:13.000Z | 2022-03-16T12:38:18.000Z | kousa/lib/beef/room_permissions.ex | ActuallyTomas/dogehouse | 8c3d2cd1d7e99e173f0658759467a391c4a90c4e | [
"MIT"
] | 4 | 2021-07-15T20:33:50.000Z | 2022-03-27T12:46:47.000Z | defmodule Beef.RoomPermissions do
import Ecto.Query
def insert(data) do
%Beef.Schemas.RoomPermission{}
|> Beef.Schemas.RoomPermission.insert_changeset(data)
|> Beef.Repo.insert(on_conflict: :nothing)
end
def upsert(data, set, returning \\ true) do
%Beef.Schemas.RoomPermission{}
|> Beef.Schemas.RoomPermission.insert_changeset(data)
|> Beef.Repo.insert(
on_conflict: [set: set],
conflict_target: [:userId, :roomId],
returning: returning
)
end
def speaker?(user_id, room_id) do
not is_nil(
Beef.Repo.one(
from(rp in Beef.Schemas.RoomPermission,
where: rp.roomId == ^room_id and rp.userId == ^user_id and rp.isSpeaker == true
)
)
)
end
def listener?(user_id, room_id) do
not speaker?(user_id, room_id)
end
def mod?(user_id, room_id) do
not is_nil(
Beef.Repo.one(
from(rp in Beef.Schemas.RoomPermission,
where: rp.roomId == ^room_id and rp.userId == ^user_id and rp.isMod == true
)
)
)
end
def asked_to_speak?(user_id, room_id) do
not is_nil(
Beef.Repo.one(
from(rp in Beef.Schemas.RoomPermission,
where: rp.roomId == ^room_id and rp.userId == ^user_id and rp.askedToSpeak == true
)
)
)
end
def get(user_id, room_id) do
from(rp in Beef.Schemas.RoomPermission,
where: rp.userId == ^user_id and rp.roomId == ^room_id,
limit: 1
)
|> Beef.Repo.one()
end
def ask_to_speak(user_id, room_id) do
upsert(%{roomId: room_id, userId: user_id, askedToSpeak: true}, askedToSpeak: true)
end
def set_speaker(user_id, room_id, speaker?, returning \\ false) do
upsert(
%{roomId: room_id, userId: user_id, isSpeaker: speaker?},
[isSpeaker: speaker?],
returning
)
end
def set_is_mod(user_id, room_id, is_mod) do
upsert(
%{roomId: room_id, userId: user_id, isMod: is_mod},
[isMod: is_mod],
false
)
end
def make_listener(user_id, room_id) do
upsert(
%{roomId: room_id, userId: user_id, isSpeaker: false, askedToSpeak: false},
[isSpeaker: false, askedToSpeak: false],
false
)
end
end
| 24.544444 | 92 | 0.627886 |
084769f3322c2835ecded5055ccba2aa2a84a691 | 2,205 | ex | Elixir | lib/scenic/primitive/group.ex | mikeover/scenic | 4b61c4996ed2d06b8cdf94f88c8a0522160e10b5 | [
"Apache-2.0"
] | null | null | null | lib/scenic/primitive/group.ex | mikeover/scenic | 4b61c4996ed2d06b8cdf94f88c8a0522160e10b5 | [
"Apache-2.0"
] | null | null | null | lib/scenic/primitive/group.ex | mikeover/scenic | 4b61c4996ed2d06b8cdf94f88c8a0522160e10b5 | [
"Apache-2.0"
] | null | null | null | #
# Created by Boyd Multerer on 5/6/17.
# Copyright © 2017 Kry10 Industries. All rights reserved.
#
defmodule Scenic.Primitive.Group do
use Scenic.Primitive
alias Scenic.Primitive
# alias Scenic.Graph
# import IEx
# ============================================================================
# data verification and serialization
# --------------------------------------------------------
def build(nil, opts), do: build([], opts)
def build(ids, opts) do
verify!(ids)
Primitive.build(__MODULE__, ids, opts)
end
# --------------------------------------------------------
def info(data),
do: """
#{IO.ANSI.red()}#{__MODULE__} data must be a list of valid uids of other elements in the graph.
#{IO.ANSI.yellow()}Received: #{inspect(data)}
#{IO.ANSI.default_color()}
"""
# --------------------------------------------------------
def verify(ids) when is_list(ids) do
case Enum.all?(ids, fn id -> is_integer(id) end) do
true -> {:ok, ids}
false -> :invalid_data
end
end
def verify(_), do: :invalid_data
# ============================================================================
# filter and gather styles
def valid_styles(), do: [:all]
def filter_styles(styles) when is_map(styles), do: styles
# ============================================================================
# apis to manipulate the list of child ids
# ----------------------------------------------------------------------------
def insert_at(%Primitive{module: __MODULE__, data: uid_list} = p, index, uid) do
Map.put(
p,
:data,
List.insert_at(uid_list, index, uid)
)
end
# ----------------------------------------------------------------------------
def delete(%Primitive{module: __MODULE__, data: uid_list} = p, uid) do
Map.put(
p,
:data,
Enum.reject(uid_list, fn xid -> xid == uid end)
)
end
# ----------------------------------------------------------------------------
def increment(%Primitive{module: __MODULE__, data: uid_list} = p, offset) do
Map.put(
p,
:data,
Enum.map(uid_list, fn xid -> xid + offset end)
)
end
end
| 28.269231 | 101 | 0.437188 |
084775cbcc20aa69d47d2aa963d4b10892878130 | 69 | ex | Elixir | web/views/api_view.ex | kentcdodds/changelog.com | e1c0d7ee5d47dc83dd443d623adb0f07e4acb28d | [
"MIT"
] | null | null | null | web/views/api_view.ex | kentcdodds/changelog.com | e1c0d7ee5d47dc83dd443d623adb0f07e4acb28d | [
"MIT"
] | null | null | null | web/views/api_view.ex | kentcdodds/changelog.com | e1c0d7ee5d47dc83dd443d623adb0f07e4acb28d | [
"MIT"
] | null | null | null | defmodule Changelog.ApiView do
use Changelog.Web, :public_view
end
| 17.25 | 33 | 0.811594 |
08477d3d43cda28ff619f987fc740dd8004dc9ea | 3,266 | exs | Elixir | test/integration/integration_test.exs | sevenmind/KaufmannEx | 44225125946921850316c272db53175bb1658fb7 | [
"MIT"
] | 84 | 2018-03-20T08:19:10.000Z | 2022-01-30T07:40:56.000Z | test/integration/integration_test.exs | sevenmind/KaufmannEx | 44225125946921850316c272db53175bb1658fb7 | [
"MIT"
] | 23 | 2018-03-29T15:15:56.000Z | 2019-12-04T14:53:57.000Z | test/integration/integration_test.exs | sevenmind/KaufmannEx | 44225125946921850316c272db53175bb1658fb7 | [
"MIT"
] | 8 | 2018-07-03T18:18:27.000Z | 2022-03-08T14:04:09.000Z | defmodule IntegrationTest.SubscriberListener do
use KaufmannEx.EventHandler
require Logger
def publish(pid, noise \\ "") do
message_body = %{
payload: %{message: pid_to_binary(pid) <> "::" <> noise},
meta: %{
message_id: Nanoid.generate(),
emitter_service: KaufmannEx.Config.service_name(),
emitter_service_id: KaufmannEx.Config.service_id(),
callback_id: nil,
message_name: "command.test",
timestamp: DateTime.to_string(DateTime.utc_now()),
callback_topic: nil
}
}
:ok = KaufmannEx.Publisher.publish(:"command.test", message_body)
end
def given_event(%{name: :"command.test", payload: %{message: pid}} = event) do
# Process.sleep(400)
pid =
pid
|> String.split("::")
|> Enum.at(0)
pid
|> pid_from_string()
|> send({:hello, pid})
[]
end
def given_event(other) do
Logger.debug("Uhandled event: " <> inspect(other))
end
def publish_and_wait do
:ok = publish(self())
receive do
{:hello, pid} -> Logger.debug("Publish Callback received")
after
2000 ->
{:error, :timeout}
end
end
# Thanks to https://github.com/koudelka/visualixir/blob/master/lib/visualixir/tracer.ex
defp pid_to_binary(pid) when is_pid(pid) do
"#PID" <> (pid |> :erlang.pid_to_list() |> :erlang.list_to_binary())
end
def pid_from_string("#PID" <> string) do
string
|> :erlang.binary_to_list()
|> :erlang.list_to_pid()
end
end
defmodule IntegrationTest do
use ExUnit.Case
@moduletag :integration
setup_all do
Application.put_env(
:kaufmann_ex,
:schema_registry_uri,
System.get_env("SCHEMA_REGISTRY_PATH")
)
KaufmannEx.ReleaseTasks.migrate_schemas("sample_application/priv/schemas/")
Application.put_env(
:kaufmann_ex,
:event_handler_mod,
IntegrationTest.SubscriberListener
)
# Ensure topic is defined, raise error if not
KafkaEx.metadata(topic: "rapids")
# Start supervision tree
{:ok, kaufmann_supervisor} = start_supervised(KaufmannEx.Supervisor)
# Ensure subscriber is working
IntegrationTest.SubscriberListener.publish_and_wait()
[kaufmann_supervisor: kaufmann_supervisor]
end
# this test fails b/c starting a kafka consumergroup can take >20 seconds
# i.e. we have to wait for kafka to trigger a consumer reballance
test "publish and consume" do
assert :ok = IntegrationTest.SubscriberListener.publish(self())
assert_receive {:hello, _}
end
describe "GenConsumer handles timeout" do
test "inspection of supervision tree", %{kaufmann_supervisor: kaufmann_supervisor} do
{KafkaEx.ConsumerGroup, k_consumer_group, :supervisor, [KafkaEx.ConsumerGroup]} =
kaufmann_supervisor
|> Supervisor.which_children()
|> Enum.find(fn
{KafkaEx.ConsumerGroup, _, _, _} -> true
_ -> false
end)
assert [
{KafkaEx.ConsumerGroup.Manager, _, :worker, [KafkaEx.ConsumerGroup.Manager]}
] = Supervisor.which_children(k_consumer_group)
assert %{active: _, specs: _, supervisors: _, workers: _} =
Supervisor.count_children(kaufmann_supervisor)
end
end
end
| 26.770492 | 91 | 0.659522 |
0847b185b3c87ca3c533db0523594c460ad8932b | 2,316 | exs | Elixir | test/bbb_lti_web/controllers/api/clients_controller_test.exs | IBM/bbb_l | 01d2a2ad9a7056b02f6f0d85f17949126256c549 | [
"Apache-2.0"
] | null | null | null | test/bbb_lti_web/controllers/api/clients_controller_test.exs | IBM/bbb_l | 01d2a2ad9a7056b02f6f0d85f17949126256c549 | [
"Apache-2.0"
] | null | null | null | test/bbb_lti_web/controllers/api/clients_controller_test.exs | IBM/bbb_l | 01d2a2ad9a7056b02f6f0d85f17949126256c549 | [
"Apache-2.0"
] | null | null | null | defmodule BbbLtiWeb.Api.ClientsControllerTest do
use BbbLtiWeb.ConnCase
alias BbbLti.Clients
@client_params %{
clientId: "@portal/some-id",
clientSecret: "some-random-string"
}
describe "basic auth:" do
test "fails when missing basic auth", %{conn: conn} do
conn = post(conn, Routes.api_clients_path(conn, :get_or_create))
assert response(conn, 401) =~ "Unauthorized"
end
@tag authenticate_api: %{username: "foo", password: "bar"}
test "fails when basic auth invalid", %{conn: conn} do
conn = post(conn, Routes.api_clients_path(conn, :get_or_create))
assert response(conn, 401) =~ "Unauthorized"
end
@tag :authenticate_api
test "basic auth succeeds but bad params", %{conn: conn} do
conn = post(conn, Routes.api_clients_path(conn, :get_or_create))
assert json_response(conn, 400)
end
@tag :authenticate_api
test "basic request succeeds", %{conn: conn} do
conn = post(conn, Routes.api_clients_path(conn, :get_or_create, @client_params))
assert json_response(conn, 200)
end
end
describe "#get_or_create:" do
@tag :authenticate_api
test "not found so create", %{conn: conn} do
assert [] == Clients.list_credentials()
conn = post(conn, Routes.api_clients_path(conn, :get_or_create, @client_params))
[item] = Clients.list_credentials()
assert item.client_id == @client_params.clientId
assert json_response(conn, 200)
end
@tag :authenticate_api
test "found credentials", %{conn: conn} do
Clients.create_credential(%{
"client_id" => @client_params.clientId,
"client_secret" => @client_params.clientSecret
})
assert [item] = Clients.list_credentials()
assert item.client_id == @client_params.clientId
conn = post(conn, Routes.api_clients_path(conn, :get_or_create, @client_params))
assert json_response(conn, 200) == %{
"error" => false,
"message" => "client with client id: #{@client_params.clientId} created"
}
end
@tag :authenticate_api
test "bad params", %{conn: conn} do
conn =
post(conn, Routes.api_clients_path(conn, :get_or_create, %{client_id: 12, secret: 32}))
assert json_response(conn, 400)
end
end
end
| 32.166667 | 95 | 0.658031 |
0847c8f397cdd775dc52f6d818c571b9d8c99bb3 | 2,483 | ex | Elixir | clients/dialogflow/lib/google_api/dialogflow/v2/model/google_cloud_dialogflow_v2beta1_intent_message_list_select_item.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | 1 | 2019-01-03T22:30:36.000Z | 2019-01-03T22:30:36.000Z | clients/dialogflow/lib/google_api/dialogflow/v2/model/google_cloud_dialogflow_v2beta1_intent_message_list_select_item.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | null | null | null | clients/dialogflow/lib/google_api/dialogflow/v2/model/google_cloud_dialogflow_v2beta1_intent_message_list_select_item.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2beta1IntentMessageListSelectItem do
@moduledoc """
An item in the list.
## Attributes
- info (GoogleCloudDialogflowV2beta1IntentMessageSelectItemInfo): Required. Additional information about this option. Defaults to: `null`.
- description (String.t): Optional. The main text describing the item. Defaults to: `null`.
- image (GoogleCloudDialogflowV2beta1IntentMessageImage): Optional. The image to display. Defaults to: `null`.
- title (String.t): Required. The title of the list item. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:info =>
GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2beta1IntentMessageSelectItemInfo.t(),
:description => any(),
:image =>
GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2beta1IntentMessageImage.t(),
:title => any()
}
field(
:info,
as: GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2beta1IntentMessageSelectItemInfo
)
field(:description)
field(:image, as: GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2beta1IntentMessageImage)
field(:title)
end
defimpl Poison.Decoder,
for: GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2beta1IntentMessageListSelectItem do
def decode(value, options) do
GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2beta1IntentMessageListSelectItem.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.Dialogflow.V2.Model.GoogleCloudDialogflowV2beta1IntentMessageListSelectItem do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 36.514706 | 140 | 0.754732 |
0847dd3702af68a2b3a2008ab1811395dec065fe | 182 | exs | Elixir | test/controllers/page_controller_test.exs | RobertDober/Ashboard | c17218614b515f8f3db85d3975bde8e878457019 | [
"Apache-2.0"
] | null | null | null | test/controllers/page_controller_test.exs | RobertDober/Ashboard | c17218614b515f8f3db85d3975bde8e878457019 | [
"Apache-2.0"
] | null | null | null | test/controllers/page_controller_test.exs | RobertDober/Ashboard | c17218614b515f8f3db85d3975bde8e878457019 | [
"Apache-2.0"
] | null | null | null | defmodule Ashboard.PageControllerTest do
use Ashboard.ConnCase
test "GET /" do
conn = get conn(), "/"
assert html_response(conn, 200) =~ "Welcome to Phoenix!"
end
end
| 20.222222 | 60 | 0.681319 |
0848422f2c9252ef23f07616971827c4a55f7ab4 | 373 | ex | Elixir | lib/ex_rss_web/views/error_view.ex | cruessler/exrss | 6ac17b7533d78460a1c34cabaae86fec317f460a | [
"MIT"
] | 4 | 2020-02-16T07:18:35.000Z | 2021-12-09T14:43:10.000Z | lib/ex_rss_web/views/error_view.ex | cruessler/exrss | 6ac17b7533d78460a1c34cabaae86fec317f460a | [
"MIT"
] | 27 | 2019-10-16T18:35:19.000Z | 2022-03-13T16:39:57.000Z | lib/ex_rss_web/views/error_view.ex | cruessler/exrss | 6ac17b7533d78460a1c34cabaae86fec317f460a | [
"MIT"
] | null | null | null | defmodule ExRssWeb.ErrorView do
use ExRssWeb, :view
def render("404.html", _assigns) do
"Page not found"
end
def render("500.html", _assigns) do
"Internal server error"
end
# In case no render clause matches or no
# template is found, let's render it as 500
def template_not_found(_template, assigns) do
render("500.html", assigns)
end
end
| 20.722222 | 47 | 0.697051 |
08484e12e4fb4c2a13acd582bbd7da71bdc4c6cb | 381 | ex | Elixir | dingen/lib/dingen_web/tenant/tenant_enforcement_plug.ex | rmoorman/dingen-2018011-tenants | 02f3fa618b9a266340d4a4993420dc5641cec08e | [
"MIT"
] | null | null | null | dingen/lib/dingen_web/tenant/tenant_enforcement_plug.ex | rmoorman/dingen-2018011-tenants | 02f3fa618b9a266340d4a4993420dc5641cec08e | [
"MIT"
] | null | null | null | dingen/lib/dingen_web/tenant/tenant_enforcement_plug.ex | rmoorman/dingen-2018011-tenants | 02f3fa618b9a266340d4a4993420dc5641cec08e | [
"MIT"
] | null | null | null | defmodule DingenWeb.Tenant.EnforcementPlug do
import Plug.Conn
alias Plug.Conn
alias DingenWeb.Tenant.LookupPlug
###
### Plug interface
###
def init(opts), do: opts
def call(%Conn{} = conn, _opts) do
case LookupPlug.get_tenant(conn) do
nil ->
conn
|> resp(404, "Not found")
|> halt()
_ ->
conn
end
end
end
| 15.24 | 45 | 0.577428 |
08485dd6d68277238a581fb56f415602c832c4f7 | 102 | exs | Elixir | test/strichliste_elixir_web/views/layout_view_test.exs | DEvil0000/strichliste_elixir | 33efe808ced1dd3e3650212a506e8c3322277b2b | [
"MIT"
] | null | null | null | test/strichliste_elixir_web/views/layout_view_test.exs | DEvil0000/strichliste_elixir | 33efe808ced1dd3e3650212a506e8c3322277b2b | [
"MIT"
] | null | null | null | test/strichliste_elixir_web/views/layout_view_test.exs | DEvil0000/strichliste_elixir | 33efe808ced1dd3e3650212a506e8c3322277b2b | [
"MIT"
] | 1 | 2019-05-24T18:18:24.000Z | 2019-05-24T18:18:24.000Z | defmodule StrichlisteElixirWeb.LayoutViewTest do
use StrichlisteElixirWeb.ConnCase, async: true
end
| 25.5 | 48 | 0.862745 |
08487e7c26401ffda2835dc642cd8e7f48205db4 | 65 | ex | Elixir | web/views/design_group_view.ex | houshuang/survey | 948acaf20840af82af1d9af3147acca94cb4fcf8 | [
"Apache-2.0"
] | 48 | 2015-06-29T21:20:25.000Z | 2021-05-09T04:27:41.000Z | web/views/design_group_view.ex | houshuang/survey | 948acaf20840af82af1d9af3147acca94cb4fcf8 | [
"Apache-2.0"
] | null | null | null | web/views/design_group_view.ex | houshuang/survey | 948acaf20840af82af1d9af3147acca94cb4fcf8 | [
"Apache-2.0"
] | 15 | 2015-06-29T21:13:57.000Z | 2021-07-27T10:02:40.000Z | defmodule Survey.DesignGroupView do
use Survey.Web, :view
end
| 13 | 35 | 0.784615 |
08487e88068a2ac7827a7ac393aae25c2de23024 | 1,988 | ex | Elixir | clients/compute/lib/google_api/compute/v1/model/route_list_warning_data.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/compute/lib/google_api/compute/v1/model/route_list_warning_data.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/model/route_list_warning_data.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.Compute.V1.Model.RouteListWarningData do
@moduledoc """
## Attributes
* `key` (*type:* `String.t`, *default:* `nil`) - [Output Only] A key that provides more detail on the warning being returned. For example, for warnings where there are no results in a list request for a particular zone, this key might be scope and the key value might be the zone name. Other examples might be a key indicating a deprecated resource and a suggested replacement, or a warning about invalid network settings (for example, if an instance attempts to perform IP forwarding but is not enabled for IP forwarding).
* `value` (*type:* `String.t`, *default:* `nil`) - [Output Only] A warning data value corresponding to the key.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:key => String.t(),
:value => String.t()
}
field(:key)
field(:value)
end
defimpl Poison.Decoder, for: GoogleApi.Compute.V1.Model.RouteListWarningData do
def decode(value, options) do
GoogleApi.Compute.V1.Model.RouteListWarningData.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Compute.V1.Model.RouteListWarningData do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 39.76 | 527 | 0.737928 |
08488325fbb020c1fbbda01d66b22c09134a1d09 | 1,890 | ex | Elixir | lib/rig_kafka/config.ex | steveoliver/reactive-interaction-gateway | 59b6dc994fd0f098bed19b7bf1e699513ac87167 | [
"Apache-2.0"
] | 518 | 2017-11-09T13:10:49.000Z | 2022-03-28T14:29:50.000Z | lib/rig_kafka/config.ex | steveoliver/reactive-interaction-gateway | 59b6dc994fd0f098bed19b7bf1e699513ac87167 | [
"Apache-2.0"
] | 270 | 2017-11-10T00:11:34.000Z | 2022-02-27T13:08:16.000Z | lib/rig_kafka/config.ex | steveoliver/reactive-interaction-gateway | 59b6dc994fd0f098bed19b7bf1e699513ac87167 | [
"Apache-2.0"
] | 67 | 2017-12-19T20:16:37.000Z | 2022-03-31T10:43:04.000Z | defmodule RigKafka.Config do
@moduledoc """
Kafka connection configuration.
"""
@type broker :: {
host :: String.t(),
port :: pos_integer()
}
@type topic :: String.t()
@type ssl_config :: %{
path_to_key_pem: String.t(),
key_password: String.t(),
path_to_cert_pem: String.t(),
path_to_ca_cert_pem: String.t()
}
@type sasl_config :: {
:plain,
username :: String.t(),
password :: String.t()
}
@type t :: %{
brokers: [broker],
consumer_topics: [topic],
server_id: atom,
client_id: atom,
group_id: String.t(),
ssl: nil | ssl_config,
sasl: nil | sasl_config
}
@enforce_keys [
:brokers,
:serializer,
:schema_registry_host,
:consumer_topics,
:client_id,
:group_id
]
defstruct brokers: [],
serializer: nil,
schema_registry_host: nil,
consumer_topics: [],
server_id: nil,
client_id: nil,
group_id: nil,
ssl: nil,
sasl: nil
# ---
def new(config) do
uuid = UUID.uuid4()
%__MODULE__{
brokers: Map.get(config, :brokers, []),
serializer: Map.get(config, :serializer),
schema_registry_host: Map.get(config, :schema_registry_host),
consumer_topics: Map.get(config, :consumer_topics, []),
server_id: Map.get(config, :server_id) || String.to_atom("rig_kafka_#{uuid}"),
client_id: Map.get(config, :client_id) || String.to_atom("brod_client_#{uuid}"),
group_id: Map.get(config, :group_id) || "brod_group_#{uuid}",
ssl: Map.get(config, :ssl),
sasl: Map.get(config, :sasl)
}
end
# ---
def valid?(%{brokers: brokers}) do
# TODO we could do a lot more here
length(brokers) > 0
end
end
| 24.230769 | 86 | 0.547619 |
08489ae4105b057ad785497e981ff916619a5bf3 | 622 | ex | Elixir | integration_test/support/factory.ex | fartek/barna | cdcc7a89fa3e66459568863cf7713651abb0c688 | [
"MIT"
] | null | null | null | integration_test/support/factory.ex | fartek/barna | cdcc7a89fa3e66459568863cf7713651abb0c688 | [
"MIT"
] | null | null | null | integration_test/support/factory.ex | fartek/barna | cdcc7a89fa3e66459568863cf7713651abb0c688 | [
"MIT"
] | null | null | null | defmodule Barna.Integration.Factory do
alias Barna.Integration.{Address, Comment, TestRepo, User}
def insert!(factory_name, attributes \\ []) do
factory_name |> build(attributes) |> TestRepo.insert!()
end
def build(factory_name, attributes) do
factory_name |> build() |> struct(attributes)
end
def build(:user) do
%User{
name: "John",
email: "john@doe.com",
address: nil,
comments: []
}
end
def build(:address) do
%Address{street_name: "Test street"}
end
def build(:comment) do
%Comment{title: "comment title", message: "comment message"}
end
end
| 21.448276 | 64 | 0.649518 |
0848bed0ce91e8cbdb98e225eeea091f6c728943 | 1,318 | ex | Elixir | apps/blunt/lib/blunt/dispatch_strategy/pipeline_resolver.ex | blunt-elixir/blunt | a88b88984022db7ba2110204248fdb541121e3a0 | [
"MIT"
] | 1 | 2022-03-07T11:54:47.000Z | 2022-03-07T11:54:47.000Z | apps/blunt/lib/blunt/dispatch_strategy/pipeline_resolver.ex | elixir-cqrs/cqrs_tools | afbf82da522a10d2413547a46f316ed3aadebba5 | [
"MIT"
] | null | null | null | apps/blunt/lib/blunt/dispatch_strategy/pipeline_resolver.ex | elixir-cqrs/cqrs_tools | afbf82da522a10d2413547a46f316ed3aadebba5 | [
"MIT"
] | null | null | null | defmodule Blunt.DispatchStrategy.PipelineResolver do
alias Blunt.{Behaviour, Config, DispatchContext}
defmodule Error do
defexception [:message]
end
@type message_type :: atom()
@type message_module :: atom()
@type pipeline_module :: atom()
@type behaviour_module :: atom()
@type context :: DispatchContext.command_context() | DispatchContext.query_context()
@callback resolve(message_type, message_module) :: {:ok, pipeline_module} | :error
@spec get_pipeline!(context, behaviour_module) :: pipeline_module
@spec get_pipeline(context, behaviour_module) :: {:ok, pipeline_module} | {:error, String.t()} | :error
@doc false
def get_pipeline(%{message_type: type, message_module: message_module}, behaviour_module) do
with {:ok, pipeline_module} <- Config.pipeline_resolver!().resolve(type, message_module) do
Behaviour.validate(pipeline_module, behaviour_module)
end
end
@doc false
def get_pipeline!(%{message_type: type, message_module: message_module} = context, behaviour_module) do
case get_pipeline(context, behaviour_module) do
{:ok, pipeline} -> pipeline
{:error, reason} -> raise Error, message: reason
:error -> raise Error, message: "No #{inspect(behaviour_module)} found for #{type}: #{inspect(message_module)}"
end
end
end
| 37.657143 | 117 | 0.726859 |
0849261d916829c6b6cd2ed48b470834b8d6bc97 | 7,652 | ex | Elixir | lib/parse/posix/parser.ex | Adzz/timex | a9c97e2dd9cb5bed286d5a1d688f3eea8e73e6ea | [
"MIT"
] | null | null | null | lib/parse/posix/parser.ex | Adzz/timex | a9c97e2dd9cb5bed286d5a1d688f3eea8e73e6ea | [
"MIT"
] | null | null | null | lib/parse/posix/parser.ex | Adzz/timex | a9c97e2dd9cb5bed286d5a1d688f3eea8e73e6ea | [
"MIT"
] | null | null | null | defmodule Timex.Parse.Timezones.Posix do
@moduledoc """
Parses POSIX-style timezones:
## Format
POSIX-style timezones are of the format: `local_timezone,date/time,date/time`
Where `date` is in the `Mm.n.d` format, and where:
- `Mm` (1-12) for 12 months
- `n` (1-5) 1 for the first week and 5 for the last week in the month
- `d` (0-6) 0 for Sunday and 6 for Saturday
## Example
TZ = `CST6CDT,M3.2.0/2:00:00,M11.1.0/2:00:00`
This would represents a change to daylight saving time at 2:00 AM on the second Sunday
in March and change back at 2:00 AM on the first Sunday in November, and keep 6 hours time
offset from GMT every year. The breakdown of the string is:
- `CST6CDT` is the timezone name
- `CST` is the standard abbreviation
- `6` is the hours of time difference from GMT
- `CDT` is the DST abbreviation
- `,M3` is the third month
- `.2` is the second occurrence of the day in the month
- `.0` is Sunday
- `/2:00:00` is the time
- `,M11` is the eleventh month
- `.1` is the first occurrence of the day in the month
- `.0` is Sunday
- `/2:00:00` is the time
"""
defmodule PosixTimezone do
@doc """
## Spec
## dst_start/dst_end
- `month`: 1-12
- `week`: week of the month
- `day_of_week`: 0-6, 0 is Sunday
- `time`: {hour, minute, second}
"""
defstruct name: "",
std_name: "",
dst_name: "",
diff: 0,
dst_start: nil,
dst_end: nil
end
alias PosixTimezone, as: TZ
# Start parsing provided zone name
def parse(tz) when is_binary(tz) do
case parse_posix(tz, :std_name, %TZ{:diff => "0"}) do
{:ok, %TZ{:std_name => std, :dst_name => dst, :diff => diff} = res} ->
{:ok, %{res | :name => "#{std}#{diff}#{dst}"}}
{:error, _} = err ->
err
{:error, :invalid_time, :dst_start} ->
{:error, :invalid_dst_start_time}
{:error, :invalid_time, :dst_end} ->
{:error, :invalid_dst_end_time}
end
end
# Alpha character for standard name
defp parse_posix(<<c::utf8, rest::binary>>, :std_name, %TZ{:std_name => acc} = result)
when c in ?A..?Z do
parse_posix(rest, :std_name, %{result | :std_name => <<acc::binary, c::utf8>>})
end
# Transition from standard name to diff from UTC
defp parse_posix(<<c::utf8, rest::binary>>, :std_name, %TZ{:diff => acc} = result)
when c in ?0..?9 do
parse_posix(rest, :diff, %{result | :diff => <<acc::binary, c::utf8>>})
end
# Digit for diff from UTC
defp parse_posix(<<c::utf8, rest::binary>>, :diff, %TZ{:diff => acc} = result)
when c in ?0..?9 do
parse_posix(rest, :diff, %{result | :diff => <<acc::binary, c::utf8>>})
end
# Transition from diff to DST name
defp parse_posix(
<<c::utf8, rest::binary>>,
:diff,
%TZ{:diff => diff, :dst_name => acc} = result
)
when c in ?A..?Z do
# Convert diff to integer value
parse_posix(rest, :dst_name, %{
result
| :diff => String.to_integer(diff),
:dst_name => <<acc::binary, c::utf8>>
})
end
# Alpha character for DST name
defp parse_posix(<<c::utf8, rest::binary>>, :dst_name, %{:dst_name => acc} = result)
when c in ?A..?Z do
parse_posix(rest, :dst_name, %{result | :dst_name => <<acc::binary, c::utf8>>})
end
# Times
defp parse_posix(<<?,, ?M, ?1, c::utf8, rest::binary>>, :dst_name, result) when c in ?0..?2 do
start = %{month: String.to_integer(<<?1, c::utf8>>), week: nil, day_of_week: nil, time: nil}
parse_week(rest, :dst_start, %{result | :dst_start => start})
end
defp parse_posix(<<?,, ?M, ?1, c::utf8, rest::binary>>, :dst_start, result) when c in ?0..?2 do
new_end = %{month: String.to_integer(<<?1, c::utf8>>), week: nil, day_of_week: nil, time: nil}
parse_week(rest, :dst_end, %{result | :dst_end => new_end})
end
defp parse_posix(<<?,, ?M, c::utf8, rest::binary>>, :dst_name, result) when c in ?1..?9 do
start = %{month: String.to_integer(<<c::utf8>>), week: nil, day_of_week: nil, time: nil}
parse_week(rest, :dst_start, %{result | :dst_start => start})
end
defp parse_posix(<<?,, ?M, c::utf8, rest::binary>>, :dst_start, result) when c in ?1..?9 do
new_end = %{month: String.to_integer(<<c::utf8>>), week: nil, day_of_week: nil, time: nil}
parse_week(rest, :dst_end, %{result | :dst_end => new_end})
end
# Reached end of input with all parts parsed
defp parse_posix(<<>>, :dst_name, result), do: {:ok, result}
defp parse_posix(<<>>, :dst_end, result), do: {:ok, result}
# Invalid character for current state
defp parse_posix(<<_c::utf8, _rest::binary>>, _, _result), do: {:error, :not_posix}
# Empty before all parts are processed
defp parse_posix(<<>>, _, _result), do: {:error, :not_posix}
defp parse_week(<<?., c::utf8, rest::binary>>, :dst_start, %{:dst_start => start} = result)
when c in ?1..?5 do
new_start = %{start | :week => String.to_integer(<<c::utf8>>)}
parse_weekday(rest, :dst_start, %{result | :dst_start => new_start})
end
defp parse_week(<<?., c::utf8, rest::binary>>, :dst_end, %{:dst_end => dst_end} = result)
when c in ?1..?5 do
new_end = %{dst_end | :week => String.to_integer(<<c::utf8>>)}
parse_weekday(rest, :dst_end, %{result | :dst_end => new_end})
end
defp parse_week(_rest, state, _result), do: {:error, :"invalid_#{state}_week"}
defp parse_weekday(<<?., c::utf8, rest::binary>>, :dst_start, %{:dst_start => start} = result)
when c in ?0..?6 do
new_start = %{start | :day_of_week => String.to_integer(<<c::utf8>>)}
parse_time(rest, :dst_start, %{result | :dst_start => new_start})
end
defp parse_weekday(<<?., c::utf8, rest::binary>>, :dst_end, %{:dst_end => dst_end} = result)
when c in ?0..?6 do
new_end = %{dst_end | :day_of_week => String.to_integer(<<c::utf8>>)}
parse_time(rest, :dst_end, %{result | :dst_end => new_end})
end
defp parse_weekday(_rest, state, _result), do: {:error, :"invalid_#{state}_weekday"}
defp parse_time(
<<?/, h1::utf8, h2::utf8, ?:, m1::utf8, m2::utf8, ?:, s1::utf8, s2::utf8, rest::binary>>,
state,
result
)
when h1 in ?0..?9 and h2 in ?0..?9 and m1 in ?0..?9 and m2 in ?0..9 and s1 in ?0..?9 and
s2 in ?0..?9 do
parse_time(
<<h1::utf8, h2::utf8>>,
<<m1::utf8, m2::utf8>>,
<<s1::utf8, s2::utf8>>,
rest,
state,
result
)
end
defp parse_time(
<<?/, h::utf8, ?:, m1::utf8, m2::utf8, ?:, s1::utf8, s2::utf8, rest::binary>>,
state,
result
)
when h in ?1..?9 and m1 in ?0..?9 and m2 in ?0..9 and s1 in ?0..?9 and s2 in ?0..?9 do
parse_time(<<h::utf8>>, <<m1::utf8, m2::utf8>>, <<s1::utf8, s2::utf8>>, rest, state, result)
end
defp parse_time(_rest, _state, _result), do: {:error, :not_posix}
defp parse_time(hs, ms, ss, rest, state, result) do
hour = String.to_integer(hs)
mins = String.to_integer(ms)
secs = String.to_integer(ss)
case {hour, mins, secs} do
{h, m, s} when h > 0 and h < 25 and m >= 0 and m < 60 and s >= 0 and s < 60 ->
case state do
:dst_start ->
new_start = %{result.dst_start | :time => {h, m, s}}
parse_posix(rest, :dst_start, %{result | :dst_start => new_start})
:dst_end ->
new_end = %{result.dst_end | :time => {h, m, s}}
parse_posix(rest, :dst_end, %{result | :dst_end => new_end})
end
_ ->
{:error, :invalid_time, state}
end
end
end
| 34.624434 | 98 | 0.581417 |
08493f901a93449004fdd95140727773ccb306d5 | 614 | ex | Elixir | backend/apps/students_crm_v2/lib/students_crm_v2/interactions/academic_group/create.ex | KyivKrishnaAcademy/students_crm_v2 | e0ad9b3c5e52dfef5ab8f9179f3c593f935786e6 | [
"MIT"
] | null | null | null | backend/apps/students_crm_v2/lib/students_crm_v2/interactions/academic_group/create.ex | KyivKrishnaAcademy/students_crm_v2 | e0ad9b3c5e52dfef5ab8f9179f3c593f935786e6 | [
"MIT"
] | 50 | 2018-07-29T09:17:35.000Z | 2019-02-26T05:23:34.000Z | backend/apps/students_crm_v2/lib/students_crm_v2/interactions/academic_group/create.ex | KyivKrishnaAcademy/students_crm_v2 | e0ad9b3c5e52dfef5ab8f9179f3c593f935786e6 | [
"MIT"
] | null | null | null | defmodule StudentsCrmV2.Interactions.AcademicGroup.Create do
@moduledoc false
alias StudentsCrmV2.Interactions.AcademicGroup.CreateOrUpdate
alias StudentsCrmV2.Repo
alias StudentsCrmV2.Models.{
AcademicGroup,
User
}
@fields ~w[name established_on tenant_id description]a
@required_fields ~w[name established_on tenant_id]a
@spec execute(params :: map(), author :: User.t()) :: {:ok, AcademicGroup.t()} | {:error, :unauthorized}
def execute(params, author) do
%AcademicGroup{}
|> CreateOrUpdate.execute(params, author, @fields, @required_fields)
|> Repo.insert()
end
end
| 27.909091 | 106 | 0.732899 |
08494297ecd38213aa0fcbac1dd5b0465e4db591 | 2,021 | ex | Elixir | lib/runner.ex | Imtiyaaz1234/elixir-koans | e1a8a55e3acbb88bb598becf3f40cb52f912e60a | [
"MIT"
] | 1 | 2021-08-02T08:02:33.000Z | 2021-08-02T08:02:33.000Z | lib/runner.ex | Imtiyaaz1234/elixir-koans | e1a8a55e3acbb88bb598becf3f40cb52f912e60a | [
"MIT"
] | null | null | null | lib/runner.ex | Imtiyaaz1234/elixir-koans | e1a8a55e3acbb88bb598becf3f40cb52f912e60a | [
"MIT"
] | null | null | null | defmodule Runner do
use GenServer
def koan?(koan) do
case Code.ensure_loaded(koan) do
{:module, _} -> Keyword.has_key?(koan.__info__(:functions), :all_koans)
_ -> false
end
end
def modules do
{:ok, modules} = :application.get_key(:elixir_koans, :modules)
modules
|> Stream.map(&(&1.module_info |> get_in([:compile, :source])))
# Paths are charlists
|> Stream.map(&to_string/1)
|> Stream.zip(modules)
|> Stream.filter(fn {_path, mod} -> koan?(mod) end)
|> Stream.map(fn {path, mod} -> {path_to_number(path), mod} end)
|> Enum.sort_by(fn {number, _mod} -> number end)
|> Enum.map(fn {_number, mod} -> mod end)
end
@koan_path_pattern ~r/lib\/koans\/(\d+)_\w+.ex$/
def path_to_number(path) do
[_path, number] = Regex.run(@koan_path_pattern, path)
String.to_integer(number)
end
def modules_to_run(start_module), do: Enum.drop_while(modules(), &(&1 != start_module))
def init(args) do
{:ok, args}
end
def start_link do
GenServer.start_link(__MODULE__, [], name: __MODULE__)
end
def handle_cast({:run, modules}, _) do
flush()
send(self(), :run_modules)
{:noreply, modules}
end
def handle_info(:run_modules, []) do
{:noreply, []}
end
def handle_info(:run_modules, [module | rest]) do
Display.clear_screen()
case run_module(module) do
:passed ->
send(self(), :run_modules)
{:noreply, rest}
_ ->
{:noreply, []}
end
end
def run(modules) do
GenServer.cast(__MODULE__, {:run, modules})
end
defp run_module(module) do
module
|> Execute.run_module(&track/3)
|> display
end
defp track(:passed, module, koan), do: Tracker.completed(module, koan)
defp track(_, _, _), do: nil
defp display({:failed, error, module, name}) do
Display.show_failure(error, module, name)
:failed
end
defp display(_), do: :passed
defp flush do
receive do
_ -> flush()
after
0 -> :ok
end
end
end
| 21.731183 | 89 | 0.616032 |
0849433bcf6deaf4170982850893c01c3ee678be | 3,899 | exs | Elixir | test/unit/ntriples_encoder_test.exs | marcelotto/rdf-ex | 12adce69eb2dbff027cbc83aaaf912067aea1b02 | [
"MIT"
] | 53 | 2017-06-25T22:20:44.000Z | 2020-04-27T17:27:51.000Z | test/unit/ntriples_encoder_test.exs | marcelotto/rdf-ex | 12adce69eb2dbff027cbc83aaaf912067aea1b02 | [
"MIT"
] | 7 | 2017-06-25T00:29:11.000Z | 2020-03-11T00:23:47.000Z | test/unit/ntriples_encoder_test.exs | rdf-elixir/rdf-ex | 7d4280ec9a912ef6ee9fc96ecdfdf26647016d6a | [
"MIT"
] | 3 | 2020-07-03T13:25:36.000Z | 2021-04-04T12:33:51.000Z | defmodule RDF.NTriples.EncoderTest do
use ExUnit.Case, async: false
alias RDF.NTriples
doctest NTriples.Encoder
alias RDF.Graph
alias RDF.NS.XSD
import RDF.Sigils
import RDF.Test.Case, only: [stream_to_string: 1]
use RDF.Vocabulary.Namespace
defvocab EX, base_iri: "http://example.org/#", terms: [], strict: false
test "stream_support?/0" do
assert NTriples.Encoder.stream_support?()
end
describe "serializing a graph" do
test "an empty graph is serialized to an empty string" do
assert NTriples.Encoder.encode!(Graph.new()) == ""
end
test "statements with IRIs only" do
assert NTriples.Encoder.encode!(
Graph.new([
{EX.S1, EX.p1(), EX.O1},
{EX.S1, EX.p1(), EX.O2},
{EX.S1, EX.p2(), EX.O3},
{EX.S2, EX.p3(), EX.O4}
])
) ==
"""
<http://example.org/#S1> <http://example.org/#p1> <http://example.org/#O1> .
<http://example.org/#S1> <http://example.org/#p1> <http://example.org/#O2> .
<http://example.org/#S1> <http://example.org/#p2> <http://example.org/#O3> .
<http://example.org/#S2> <http://example.org/#p3> <http://example.org/#O4> .
"""
end
test "statements with literals" do
assert NTriples.Encoder.encode!(
Graph.new([
{EX.S1, EX.p1(), ~L"foo"},
{EX.S1, EX.p1(), ~L"foo"en},
{EX.S1, EX.p2(), 42},
{EX.S2, EX.p3(), RDF.literal("strange things", datatype: EX.custom())}
])
) ==
"""
<http://example.org/#S1> <http://example.org/#p1> "foo"@en .
<http://example.org/#S1> <http://example.org/#p1> "foo" .
<http://example.org/#S1> <http://example.org/#p2> "42"^^<#{XSD.integer()}> .
<http://example.org/#S2> <http://example.org/#p3> "strange things"^^<#{EX.custom()}> .
"""
end
test "statements with blank nodes" do
assert NTriples.Encoder.encode!(
Graph.new([
{EX.S1, EX.p1(), RDF.bnode(1)},
{EX.S1, EX.p1(), RDF.bnode("foo")},
{EX.S1, EX.p1(), RDF.bnode(:bar)}
])
) ==
"""
<http://example.org/#S1> <http://example.org/#p1> _:b1 .
<http://example.org/#S1> <http://example.org/#p1> _:bar .
<http://example.org/#S1> <http://example.org/#p1> _:foo .
"""
end
test "string escaping" do
assert NTriples.Encoder.encode!(
Graph.new([
{EX.S, EX.p(), ~s["foo"\n\r"bar"]},
{EX.S, EX.p(), RDF.literal(~s["foo"\n\r"bar"], language: "en")}
])
) ==
"""
<http://example.org/#S> <http://example.org/#p> "\\"foo\\"\\n\\r\\"bar\\""@en .
<http://example.org/#S> <http://example.org/#p> "\\"foo\\"\\n\\r\\"bar\\"" .
"""
end
end
describe "stream/2" do
graph =
Graph.new([
{EX.S1, EX.p1(), EX.O1},
{EX.S2, EX.p2(), RDF.bnode("foo")},
{EX.S3, EX.p3(), ~L"foo"},
{EX.S3, EX.p3(), ~L"foo"en}
])
expected_result = """
<http://example.org/#S1> <http://example.org/#p1> <http://example.org/#O1> .
<http://example.org/#S2> <http://example.org/#p2> _:foo .
<http://example.org/#S3> <http://example.org/#p3> "foo"@en .
<http://example.org/#S3> <http://example.org/#p3> "foo" .
"""
assert NTriples.Encoder.stream(graph, mode: :string)
|> stream_to_string() ==
expected_result
assert NTriples.Encoder.stream(graph, mode: :iodata)
|> stream_to_string() ==
expected_result
end
end
| 33.904348 | 101 | 0.479097 |
08495f07f5617baf89bfb3e375c04f4d3f76aa6a | 327 | ex | Elixir | lib/server_router.ex | adz/cowboy_sockets_with_elm_experiment | 39370908233df8ad64b6347cd5c78c85e2cb6d71 | [
"MIT"
] | null | null | null | lib/server_router.ex | adz/cowboy_sockets_with_elm_experiment | 39370908233df8ad64b6347cd5c78c85e2cb6d71 | [
"MIT"
] | null | null | null | lib/server_router.ex | adz/cowboy_sockets_with_elm_experiment | 39370908233df8ad64b6347cd5c78c85e2cb6d71 | [
"MIT"
] | null | null | null | defmodule Server.Router do
use Plug.Router
plug Plug.Static, at: "/", from: "static"
plug :match
plug :dispatch
match _ do
send_resp(conn, 200,
"""
<h1>PREPARE</h1>
<h2>FOR WEIRD SQUARE</h2>
<script>
document.location = "/index.html"
</script>
""")
end
end
| 17.210526 | 43 | 0.547401 |
08496bd3efead37ced254efd514f7e3373896ed2 | 368 | ex | Elixir | test/support/split_generator.ex | montebrown/money | 4a4dcccb37e00f8cfbf5094b8e089a1c13e923d7 | [
"Apache-2.0"
] | 426 | 2016-10-10T08:53:20.000Z | 2022-03-17T04:28:00.000Z | test/support/split_generator.ex | montebrown/money | 4a4dcccb37e00f8cfbf5094b8e089a1c13e923d7 | [
"Apache-2.0"
] | 131 | 2016-12-03T22:43:52.000Z | 2022-02-13T22:35:57.000Z | test/support/split_generator.ex | montebrown/money | 4a4dcccb37e00f8cfbf5094b8e089a1c13e923d7 | [
"Apache-2.0"
] | 47 | 2017-02-18T08:11:26.000Z | 2022-01-26T19:31:34.000Z | defmodule GenerateSplits do
require ExUnitProperties
def generate_money do
ExUnitProperties.gen all(
value <- StreamData.float(min: 0.0, max: 999_999_999_999_999.9),
split <- StreamData.integer(1..101)
) do
{Money.new(:USD, Float.to_string(value)), split}
end
end
end
| 28.307692 | 91 | 0.567935 |
0849763839f7ad9570b21ee538c256a26ede2ac4 | 1,888 | exs | Elixir | mix.exs | langens-jonathan/mu-authorization | 3b411460b81b87581af7c7f302b1d3bec4610608 | [
"MIT"
] | 1 | 2019-09-05T23:00:48.000Z | 2019-09-05T23:00:48.000Z | mix.exs | langens-jonathan/mu-authorization | 3b411460b81b87581af7c7f302b1d3bec4610608 | [
"MIT"
] | 7 | 2020-10-27T20:42:06.000Z | 2021-11-15T07:41:15.000Z | mix.exs | langens-jonathan/mu-authorization | 3b411460b81b87581af7c7f302b1d3bec4610608 | [
"MIT"
] | 6 | 2016-04-06T09:28:43.000Z | 2021-08-09T12:29:16.000Z | defmodule MuAuthorization.MixProject do
use Mix.Project
@github_url "https://github.com/mu-semtech/mu-authorization"
def project do
[
app: :"mu-authorization",
version: "0.5.0",
elixir: "~> 1.9",
start_permanent: Mix.env() == :prod,
erlc_paths: ["parser-generator"],
deps: deps(),
docs: [
main: "readme",
extras: ["README.md", "CHANGELOG.md"]
],
test_coverage: [tool: ExCoveralls],
preferred_cli_env: [
coveralls: :test,
"coveralls.detail": :test,
"coveralls.post": :test,
"coveralls.html": :test
],
name: "mu-authorization",
description:
"A proxy server that offers a authorization/delta wrapper for a SPARQL endpoint.",
source_url: @github_url,
homepage_url: @github_url,
files: ~w(mix.exs lib LICENSE.md README.md CHANGELOG.md),
package: [
maintainers: ["Versteden Aad", "Langens Jonathan"],
licenses: ["MIT"],
links: %{
"GitHub" => @github_url
}
]
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger, :httpoison, :poison, :plug, :cowboy],
mod: {SparqlServer, []},
env: []
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:accessible, "~> 0.2.1"},
{:ex_doc, "~> 0.18", only: :dev, runtime: false},
{:exprof, "~> 0.2.0"},
{:junit_formatter, "~> 2.1", only: :test},
{:credo, "~> 1.4", only: [:dev, :test]},
{:excoveralls, "~> 0.8", only: :test},
{:dialyxir, "~> 1.0.0-rc.6", only: [:dev], runtime: false},
{:httpoison, "~> 1.1"},
{:poison, "~> 3.1"},
{:poolboy, "~> 1.5.1"},
{:plug, "~> 1.5"},
{:cowboy, "~> 2.4"},
{:observer_cli, "~> 1.5"}
]
end
end
| 27.362319 | 90 | 0.53072 |
0849c863b6d9baf4ae92b48cb36035d2dce00295 | 424 | ex | Elixir | lib/requestbx_web/views/error_view.ex | semlabs/requestbx | b9dfb33efd04f067ede1a8e2659f84c92abbed67 | [
"MIT"
] | null | null | null | lib/requestbx_web/views/error_view.ex | semlabs/requestbx | b9dfb33efd04f067ede1a8e2659f84c92abbed67 | [
"MIT"
] | 1 | 2018-01-19T15:23:36.000Z | 2018-01-19T15:23:36.000Z | lib/requestbx_web/views/error_view.ex | semlabs/requestbx | b9dfb33efd04f067ede1a8e2659f84c92abbed67 | [
"MIT"
] | null | null | null | defmodule RequestbxWeb.ErrorView do
use RequestbxWeb, :view
def render("404.json", _assigns) do
%{errors: %{detail: "Page not found"}}
end
def render("500.json", _assigns) do
%{errors: %{detail: "Internal server error"}}
end
# In case no render clause matches or no
# template is found, let's render it as 500
def template_not_found(_template, assigns) do
render "500.json", assigns
end
end
| 23.555556 | 49 | 0.688679 |
0849f45a17413f93ae49b0e46f6a1c297a7b48c3 | 2,145 | ex | Elixir | lib/realtime_signs.ex | mbta/realtime_signs | 3fd8cbc26ce2b0820e608e60fe12135dab5def69 | [
"MIT"
] | 1 | 2022-01-24T12:39:05.000Z | 2022-01-24T12:39:05.000Z | lib/realtime_signs.ex | mbta/realtime_signs | 3fd8cbc26ce2b0820e608e60fe12135dab5def69 | [
"MIT"
] | 40 | 2021-05-05T10:14:25.000Z | 2022-03-31T18:34:15.000Z | lib/realtime_signs.ex | mbta/realtime_signs | 3fd8cbc26ce2b0820e608e60fe12135dab5def69 | [
"MIT"
] | 1 | 2022-03-20T21:08:12.000Z | 2022-03-20T21:08:12.000Z | defmodule RealtimeSigns do
require Logger
alias RealtimeSignsConfig, as: Config
def start(_type, _args) do
import Supervisor.Spec, warn: false
Logger.info(
"Starting realtime_signs version #{inspect(Application.spec(:realtime_signs, :vsn))}"
)
runtime_config()
children =
[
:hackney_pool.child_spec(:default, []),
:hackney_pool.child_spec(:arinc_pool, []),
worker(Engine.Health, []),
worker(Engine.Config, []),
worker(Engine.Predictions, []),
worker(Engine.ScheduledHeadways, []),
worker(Engine.Departures, []),
worker(Engine.Static, []),
worker(Engine.Alerts, []),
worker(MessageQueue, [])
] ++
http_updater_children() ++
[
supervisor(Signs.Supervisor, [])
]
opts = [strategy: :one_for_one, name: __MODULE__]
{:ok, _} = Logger.add_backend(Sentry.LoggerBackend)
Supervisor.start_link(children, opts)
end
@spec runtime_config() :: :ok
def runtime_config do
env = System.get_env()
:ok = Config.update_env(env, :sign_head_end_host, "SIGN_HEAD_END_HOST")
:ok = Config.update_env(env, :sign_ui_url, "SIGN_UI_URL")
:ok = Config.update_env(env, :sign_ui_api_key, "SIGN_UI_API_KEY", private?: true)
:ok = Config.update_env(env, :trip_update_url, "TRIP_UPDATE_URL")
:ok = Config.update_env(env, :vehicle_positions_url, "VEHICLE_POSITIONS_URL")
:ok = Config.update_env(env, :s3_bucket, "SIGNS_S3_BUCKET")
:ok = Config.update_env(env, :s3_path, "SIGNS_S3_PATH")
:ok = Config.update_env(env, :api_v3_key, "API_V3_KEY", private?: true)
:ok = Config.update_env(env, :api_v3_url, "API_V3_URL")
:ok =
Config.update_env(env, :filter_uncertain_predictions?, "FILTER_UNCERTAIN_PREDICTIONS",
type: :boolean
)
:ok =
Config.update_env(env, :number_of_http_updaters, "NUMBER_OF_HTTP_UPDATERS", type: :integer)
end
def http_updater_children do
num_children = Application.get_env(:realtime_signs, :number_of_http_updaters)
for i <- 1..num_children do
{PaEss.HttpUpdater, i}
end
end
end
| 32.014925 | 97 | 0.660606 |
0849fceab4fe72d94d86cb3dfbf1b73492014f15 | 3,108 | ex | Elixir | lib/erlnote_web/schema/accounts_types.ex | alchexmist/erlnote | e1f164e63616316e1d3869ebfae5ed2ae96c3ccd | [
"Apache-2.0"
] | null | null | null | lib/erlnote_web/schema/accounts_types.ex | alchexmist/erlnote | e1f164e63616316e1d3869ebfae5ed2ae96c3ccd | [
"Apache-2.0"
] | 1 | 2019-11-02T13:46:12.000Z | 2019-11-02T13:46:12.000Z | lib/erlnote_web/schema/accounts_types.ex | alchexmist/erlnote | e1f164e63616316e1d3869ebfae5ed2ae96c3ccd | [
"Apache-2.0"
] | null | null | null | defmodule ErlnoteWeb.Schema.AccountsTypes do
use Absinthe.Schema.Notation
alias ErlnoteWeb.Resolvers
object :credential do
field :email, :string
field :password_hash, :string
end
# object :user do
# field :id, :id
# field :name, :string
# field :username, :string
# field :credentials, list_of(:credential)
# end
enum :get_user_filter_type do
value :id #, as: "id" # Con el "as" se reciben string en lugar de atoms.
value :username #, as: "username"
end
@desc "Filtering options for get user"
input_object :get_user_filter do
@desc "ID or USERNAME"
field :type, non_null(:get_user_filter_type)
@desc "String value"
field :value, non_null(:string)
end
input_object :user_credential_input do
field :email, non_null(:string)
field :password, non_null(:string)
end
# You can't use object type for user input; you need to create input object type.
# userAccount es un alias para createUserAccount en la respuesta.
# mutation CreateUserAccount($accountData: UserAccountInput!) {
# userAccount: createUserAccount(input: $accountData) {
# id
# name
# username
# credentials {
# email
# password_hash
# }
# }
# }
# QUERY VARIABLES
# {
# "accountData": {
# "username": "whitehat",
# "name": "White Hat",
# "credentials": [
# {
# "password": "12345678910",
# "email": "whitehat@example.com"
# }
# ]
# }
# }
input_object :user_account_input do
field :name, non_null(:string)
field :username, non_null(:string)
field :credentials, non_null(list_of(:user_credential_input))
# field :email, non_null(:string)
# field :password, non_null(:string)
end
object :session do
field :token, :string
field :user, :user
end
object :accounts_queries do
# query UserList {
# users {
# id
# name
# username
# }
# }
@desc "The list of available users in the system"
field :users, list_of(:user) do
resolve &Resolvers.Accounts.users/3
end
# query UserByUsername ($term: String) { //BROKEN!!!
# user(username: $term) {
# name
# id
# username
# }
# }
# POST Request Body
# Variables => JSON Encoded
# “{
# "query": "query UserByUsername ($term: String) { user(username: $term) { name id username} }",
# "variables": "{\"term\": \"asm\"}"
# }”
# query UserById{
# user (filter: {type: ID, value: "1"}) {
# name
# id
# username
# }
# }
# query UserByUsername {
# user (filter: {type: USERNAME, value: "asm"}) {
# name
# id
# username
# }
# }
@desc "Get a user of the system"
field :user, :user do
arg :filter, non_null(:get_user_filter)
resolve &Resolvers.Accounts.user/3
end
# End :account_queries
end
end | 25.064516 | 108 | 0.562098 |
084a1a5ef78d0f642386f90d4850c4398badfb48 | 2,200 | exs | Elixir | config/prod.exs | lukkor/exlog | f2812d25e7712c41db5a9bf5bf3977255916c8e1 | [
"MIT"
] | null | null | null | config/prod.exs | lukkor/exlog | f2812d25e7712c41db5a9bf5bf3977255916c8e1 | [
"MIT"
] | null | null | null | config/prod.exs | lukkor/exlog | f2812d25e7712c41db5a9bf5bf3977255916c8e1 | [
"MIT"
] | null | null | null | use Mix.Config
# For production, we often load configuration from external
# sources, such as your system environment. For this reason,
# you won't find the :http configuration below, but set inside
# Exlog.Web.Endpoint.load_from_system_env/1 dynamically.
# Any dynamic configuration should be moved to such function.
#
# Don't forget to configure the url host to something meaningful,
# Phoenix uses this information when generating URLs.
#
# Finally, we also include the path to a cache manifest
# containing the digested version of static files. This
# manifest is generated by the mix phoenix.digest task
# which you typically run after static files are built.
config :exlog, Exlog.Web.Endpoint,
on_init: {Exlog.Web.Endpoint, :load_from_system_env, []},
url: [host: "example.com", port: 80],
cache_static_manifest: "priv/static/cache_manifest.json"
# Do not print debug messages in production
config :logger, level: :info
# ## SSL Support
#
# To get SSL working, you will need to add the `https` key
# to the previous section and set your `:url` port to 443:
#
# config :exlog, Exlog.Web.Endpoint,
# ...
# url: [host: "example.com", port: 443],
# https: [:inet6,
# port: 443,
# keyfile: System.get_env("SOME_APP_SSL_KEY_PATH"),
# certfile: System.get_env("SOME_APP_SSL_CERT_PATH")]
#
# Where those two env variables return an absolute path to
# the key and cert in disk or a relative path inside priv,
# for example "priv/ssl/server.key".
#
# We also recommend setting `force_ssl`, ensuring no data is
# ever sent via http, always redirecting to https:
#
# config :exlog, Exlog.Web.Endpoint,
# force_ssl: [hsts: true]
#
# Check `Plug.SSL` for all available options in `force_ssl`.
# ## Using releases
#
# If you are doing OTP releases, you need to instruct Phoenix
# to start the server for all endpoints:
#
# config :phoenix, :serve_endpoints, true
#
# Alternatively, you can configure exactly which server to
# start per endpoint:
#
# config :exlog, Exlog.Web.Endpoint, server: true
#
# Finally import the config/prod.secret.exs
# which should be versioned separately.
import_config "prod.secret.exs"
| 33.846154 | 67 | 0.718182 |
084a2029a664a75706585a5bb23b9d622c39a6b8 | 264 | ex | Elixir | lib/nostalgic_games_web/controllers/fallback_controller.ex | rafaelcorazzi/nostalgic_games | 34734aa3a89194730d0cc5e137f3db5f597979d4 | [
"MIT"
] | null | null | null | lib/nostalgic_games_web/controllers/fallback_controller.ex | rafaelcorazzi/nostalgic_games | 34734aa3a89194730d0cc5e137f3db5f597979d4 | [
"MIT"
] | null | null | null | lib/nostalgic_games_web/controllers/fallback_controller.ex | rafaelcorazzi/nostalgic_games | 34734aa3a89194730d0cc5e137f3db5f597979d4 | [
"MIT"
] | null | null | null | defmodule NostalgicGamesWeb.FallbackController do
use NostalgicGamesWeb, :controller
def call(conn, {:error, result}) do
conn
|> put_status(:bad_request)
|> put_view(NostalgicGamesWeb.ErrorView)
|> render("400.json", result: result)
end
end
| 24 | 49 | 0.719697 |
084a278d2e35a24d351abc728e3e6c7c4645d512 | 2,452 | ex | Elixir | web/controllers/html_cachegroup_parameter_controller.ex | rob05c/tox | f54847ca058ad24b909341ad65d595a4069d2471 | [
"Apache-2.0"
] | 2 | 2016-11-16T17:24:21.000Z | 2019-02-15T05:38:27.000Z | web/controllers/html_cachegroup_parameter_controller.ex | rob05c/tox | f54847ca058ad24b909341ad65d595a4069d2471 | [
"Apache-2.0"
] | null | null | null | web/controllers/html_cachegroup_parameter_controller.ex | rob05c/tox | f54847ca058ad24b909341ad65d595a4069d2471 | [
"Apache-2.0"
] | null | null | null | defmodule Tox.HtmlCachegroupParameterController do
use Tox.Web, :controller
alias Tox.CachegroupParameter
def index(conn, _params) do
cachegroupparameters = Repo.all(CachegroupParameter)
render(conn, "index.html", cachegroupparameters: cachegroupparameters)
end
def new(conn, _params) do
changeset = CachegroupParameter.changeset(%CachegroupParameter{})
render(conn, "new.html", changeset: changeset)
end
def create(conn, %{"cachegroup_parameter" => cachegroup_parameter_params}) do
changeset = CachegroupParameter.changeset(%CachegroupParameter{}, cachegroup_parameter_params)
case Repo.insert(changeset) do
{:ok, _cachegroup_parameter} ->
conn
|> put_flash(:info, "Cachegroup parameter created successfully.")
|> redirect(to: html_cachegroup_parameter_path(conn, :index))
{:error, changeset} ->
render(conn, "new.html", changeset: changeset)
end
end
def show(conn, %{"id" => id}) do
cachegroup_parameter = Repo.get!(CachegroupParameter, id)
render(conn, "show.html", cachegroup_parameter: cachegroup_parameter)
end
def edit(conn, %{"id" => id}) do
cachegroup_parameter = Repo.get!(CachegroupParameter, id)
changeset = CachegroupParameter.changeset(cachegroup_parameter)
render(conn, "edit.html", cachegroup_parameter: cachegroup_parameter, changeset: changeset)
end
def update(conn, %{"id" => id, "cachegroup_parameter" => cachegroup_parameter_params}) do
cachegroup_parameter = Repo.get!(CachegroupParameter, id)
changeset = CachegroupParameter.changeset(cachegroup_parameter, cachegroup_parameter_params)
case Repo.update(changeset) do
{:ok, cachegroup_parameter} ->
conn
|> put_flash(:info, "Cachegroup parameter updated successfully.")
|> redirect(to: html_cachegroup_parameter_path(conn, :show, cachegroup_parameter))
{:error, changeset} ->
render(conn, "edit.html", cachegroup_parameter: cachegroup_parameter, changeset: changeset)
end
end
def delete(conn, %{"id" => id}) do
cachegroup_parameter = Repo.get!(CachegroupParameter, id)
# Here we use delete! (with a bang) because we expect
# it to always work (and if it does not, it will raise).
Repo.delete!(cachegroup_parameter)
conn
|> put_flash(:info, "Cachegroup parameter deleted successfully.")
|> redirect(to: html_cachegroup_parameter_path(conn, :index))
end
end
| 37.151515 | 99 | 0.719005 |
084a515aa745486af7cbb322ba5865e10f2e2caa | 197 | exs | Elixir | priv/repo/migrations/20170924205210_add_ectology_to_rooms.exs | stevegrossi/ex_venture | e02d5a63fdb882d92cfb4af3e15f7b48ad7054aa | [
"MIT"
] | 2 | 2019-05-14T11:36:44.000Z | 2020-07-01T08:54:04.000Z | priv/repo/migrations/20170924205210_add_ectology_to_rooms.exs | nickwalton/ex_venture | d8ff1b0181db03f9ddcb7610ae7ab533feecbfbb | [
"MIT"
] | null | null | null | priv/repo/migrations/20170924205210_add_ectology_to_rooms.exs | nickwalton/ex_venture | d8ff1b0181db03f9ddcb7610ae7ab533feecbfbb | [
"MIT"
] | 1 | 2021-01-29T14:12:40.000Z | 2021-01-29T14:12:40.000Z | defmodule Data.Repo.Migrations.AddEctologyToRooms do
use Ecto.Migration
def change do
alter table(:rooms) do
add :ecology, :string, default: "default", null: false
end
end
end
| 19.7 | 60 | 0.705584 |
084a52536b560e0a25f900f8abf77084eb58d0b4 | 1,402 | ex | Elixir | lib/romeo/connection/features.ex | crewstad/romeo | 94ebe08c9de92c21a5d10018df0512d05bc1fa7a | [
"MIT"
] | 78 | 2015-11-17T14:24:27.000Z | 2022-01-18T05:24:23.000Z | lib/romeo/connection/features.ex | crewstad/romeo | 94ebe08c9de92c21a5d10018df0512d05bc1fa7a | [
"MIT"
] | 38 | 2015-12-16T07:35:28.000Z | 2021-04-06T08:52:01.000Z | lib/romeo/connection/features.ex | crewstad/romeo | 94ebe08c9de92c21a5d10018df0512d05bc1fa7a | [
"MIT"
] | 56 | 2015-11-23T17:57:52.000Z | 2022-01-20T16:17:29.000Z | defmodule Romeo.Connection.Features do
@moduledoc """
Parses XMPP Stream features.
"""
use Romeo.XML
@type t :: %__MODULE__{}
defstruct [
amp?: false,
compression?: false,
registration?: false,
stream_management?: false,
tls?: false,
mechanisms: []
]
def parse_stream_features(features) do
%__MODULE__{
amp?: supports?(features, "amp"),
compression?: supports?(features, "compression"),
registration?: supports?(features, "register"),
stream_management?: supports?(features, "sm"),
tls?: supports?(features, "starttls"),
mechanisms: supported_auth_mechanisms(features)
}
end
def supported_auth_mechanisms(features) do
case Romeo.XML.subelement(features, "mechanisms") do
xml when Record.is_record(xml, :xmlel) ->
mechanisms = xmlel(xml, :children)
for mechanism <- mechanisms, into: [], do: Romeo.XML.cdata(mechanism)
nil -> []
end
end
def supports?(features, "compression") do
case Romeo.XML.subelement(features, "compression") do
xml when Record.is_record(xml, :xmlel) ->
methods = xmlel(xml, :children)
for method <- methods, into: [], do: Romeo.XML.cdata(method)
_ -> false
end
end
def supports?(features, feature) do
case Romeo.XML.subelement(features, feature) do
nil -> false
_ -> true
end
end
end
| 26.45283 | 77 | 0.639087 |
084a5a7988d72dd5e29b91ed6027edce087b7432 | 2,617 | ex | Elixir | lib/tasks/mfa.ex | kianmeng/elixir_git_hooks | 3abff7e651214cdd54e04dd6e3221fa97ceefbcb | [
"MIT"
] | null | null | null | lib/tasks/mfa.ex | kianmeng/elixir_git_hooks | 3abff7e651214cdd54e04dd6e3221fa97ceefbcb | [
"MIT"
] | null | null | null | lib/tasks/mfa.ex | kianmeng/elixir_git_hooks | 3abff7e651214cdd54e04dd6e3221fa97ceefbcb | [
"MIT"
] | null | null | null | defmodule GitHooks.Tasks.MFA do
@moduledoc """
Represents a `{module, function, arity}` (a.k.a. `mfa`) that will be evaluated
by the Kernel module.
An `mfa` should be configured as `{module, function, arity}`. The function of
the module **will always receive the hook arguments** and the arity is
expected to match the same number to avoid any unexpected behaviour.
See [Elixir documentation](https://hexdocs.pm/elixir/typespecs.html#types-and-their-syntax) for more information.
For example:
```elixir
config :git_hooks,
hooks: [
pre_commit: [
{MyModule, :my_function, 1}
]
]
```
"""
@typedoc """
Represents an `mfa` to be executed.
"""
@type t :: %__MODULE__{
module: atom,
function: atom,
args: [any],
result: term
}
defstruct [:module, :function, args: [], result: nil]
@doc """
Creates a new `mfa` struct.
### Example
iex> #{__MODULE__}.new({MyModule, :my_function, 1}, :pre_commit, ["commit message"])
%#{__MODULE__}{module: MyModule, function: :my_function, args: ["commit message"]}
"""
@spec new(mfa(), GitHooks.git_hook_type(), GitHooks.git_hook_args()) :: __MODULE__.t()
def new({module, function, arity}, git_hook_type, git_hook_args) do
expected_arity = length(git_hook_args)
if arity != expected_arity do
raise """
Invalid #{module}.#{function} arity for #{git_hook_type}, expected #{expected_arity} but got #{
arity
}. Check the Git hooks documentation to fix the expected parameters.
"""
end
%__MODULE__{
module: module,
function: function,
args: git_hook_args
}
end
end
defimpl GitHooks.Task, for: GitHooks.Tasks.MFA do
alias GitHooks.Tasks.MFA
alias GitHooks.Printer
# Kernel.apply will throw a error if something fails
def run(
%MFA{
module: module,
function: function,
args: args
} = mfa,
_opts
) do
result = Kernel.apply(module, function, args)
Map.put(mfa, :result, result)
rescue
error ->
IO.warn(inspect(error))
Map.put(mfa, :result, error)
end
def success?(%MFA{result: :ok}), do: true
def success?(%MFA{result: _}), do: false
def print_result(%MFA{module: module, function: function, result: :ok} = mix_task) do
Printer.success("`#{module}.#{function}` was successful")
mix_task
end
def print_result(%MFA{module: module, function: function, result: _} = mix_task) do
Printer.error("`#{module}.#{function}` execution failed")
mix_task
end
end
| 25.910891 | 115 | 0.631639 |
084a6ecdf4ad4984aae3f8618740ded23d2fb45e | 609 | ex | Elixir | lib/news/user_password.ex | randomlabs/news | 6aa200858bac69613af1de91420c6425f4517853 | [
"MIT"
] | 3 | 2015-08-12T20:45:57.000Z | 2015-10-26T09:20:10.000Z | lib/news/user_password.ex | randomlabs/news | 6aa200858bac69613af1de91420c6425f4517853 | [
"MIT"
] | 15 | 2015-08-12T16:20:42.000Z | 2015-10-12T16:12:15.000Z | lib/news/user_password.ex | randomlabs/news | 6aa200858bac69613af1de91420c6425f4517853 | [
"MIT"
] | 1 | 2015-08-20T17:43:10.000Z | 2015-08-20T17:43:10.000Z | defmodule News.UserPassword do
alias News.Repo
import Ecto.Changeset, only: [put_change: 3]
import Comeonin.Bcrypt, only: [hashpwsalt: 1]
@doc """
Generates a password for the user changeset and stores it to the changeset as encrypted_password.
"""
def generate_password(changeset) do
put_change(changeset, :hash, hashpwsalt(changeset.params["password"]))
end
@doc """
Generates the password for the changeset and then stores it to the database.
"""
def generate_password_and_store_user(changeset) do
changeset
|> generate_password
|> Repo.insert!
end
end
| 26.478261 | 101 | 0.719212 |
084a7a30568461b9d06ef6e104fac81883885bfa | 1,178 | exs | Elixir | mix.exs | c4710n/scrivener_html_semi | dbf647508d07e98a5c484f36c0f0ebc2dae777ea | [
"MIT"
] | null | null | null | mix.exs | c4710n/scrivener_html_semi | dbf647508d07e98a5c484f36c0f0ebc2dae777ea | [
"MIT"
] | null | null | null | mix.exs | c4710n/scrivener_html_semi | dbf647508d07e98a5c484f36c0f0ebc2dae777ea | [
"MIT"
] | null | null | null | defmodule Scrivener.HTML.MixProject do
use Mix.Project
@version "3.1.1"
@github_url "https://github.com/c4710n/scrivener_html_semi"
def project do
[
app: :scrivener_html_semi,
description: "HTML helpers for Scrivener.",
version: @version,
elixir: "~> 1.11",
start_permanent: Mix.env() == :prod,
deps: deps(),
package: package(),
aliases: aliases(),
# Docs
source_url: @github_url,
homepage_url: @github_url,
docs: [
main: "readme",
extras: ["README.md"]
]
]
end
def application do
[
extra_applications: [:logger]
]
end
defp deps do
[
{:scrivener, "~> 1.2 or ~> 2.0"},
{:phoenix_html, ">= 0.0.0"},
{:ex_doc, "~> 0.23", only: :dev, runtime: false}
]
end
defp package do
[
licenses: ["MIT"],
links: %{GitHub: @github_url}
]
end
defp aliases do
[publish: ["hex.publish", "tag"], tag: &tag_release/1]
end
defp tag_release(_) do
Mix.shell().info("Tagging release as #{@version}")
System.cmd("git", ["tag", "#{@version}"])
System.cmd("git", ["push", "--tags"])
end
end
| 19.966102 | 61 | 0.550934 |
084a86d22a4312864222056c74b13fcae8898a54 | 1,016 | exs | Elixir | config/prod.exs | jrieger/cforum_ex | 61f6ce84708cb55bd0feedf69853dae64146a7a0 | [
"MIT"
] | 16 | 2019-04-04T06:33:33.000Z | 2021-08-16T19:34:31.000Z | config/prod.exs | jrieger/cforum_ex | 61f6ce84708cb55bd0feedf69853dae64146a7a0 | [
"MIT"
] | 294 | 2019-02-10T11:10:27.000Z | 2022-03-30T04:52:53.000Z | config/prod.exs | jrieger/cforum_ex | 61f6ce84708cb55bd0feedf69853dae64146a7a0 | [
"MIT"
] | 10 | 2019-02-10T10:39:24.000Z | 2021-07-06T11:46:05.000Z | use Mix.Config
# For production, we configure the host to read the PORT
# from the system environment. Therefore, you will need
# to set PORT=80 before running your server.
#
# You should also configure the url host to something
# meaningful, we use this information when generating URLs.
#
# Finally, we also include the path to a manifest
# containing the digested version of static files. This
# manifest is generated by the mix phoenix.digest task
# which you typically run after static files are built.
config :cforum, CforumWeb.Endpoint,
cache_static_manifest: "priv/static/cache_manifest.json",
server: true,
root: ".",
version: Mix.Project.config()[:version]
# Do not print debug messages in production
config :logger, level: :info
config :cforum,
paginator: [
per_page: 50,
distance: 3
],
environment: Mix.env(),
search_dict: "german"
config :appsignal, :config,
otp_app: :cforum,
active: true
config :cforum, Cforum.Repo,
adapter: Ecto.Adapters.Postgres,
pool_size: 20
| 26.736842 | 59 | 0.740157 |
084a90f4669c95b05859ba28855cf792b4a4a9f8 | 395 | exs | Elixir | 30daysofcode/day27_testing.exs | nathanchere/HackerRank_Elixir | 607e4ae10d94edb20296c8979179648af4af3ca6 | [
"MIT"
] | 2 | 2016-09-20T11:56:33.000Z | 2016-10-20T20:11:04.000Z | 30daysofcode/day27_testing.exs | nathanchere/HackerRank_Elixir | 607e4ae10d94edb20296c8979179648af4af3ca6 | [
"MIT"
] | null | null | null | 30daysofcode/day27_testing.exs | nathanchere/HackerRank_Elixir | 607e4ae10d94edb20296c8979179648af4af3ca6 | [
"MIT"
] | null | null | null | # HackerRank "30 Days of Code" - Day 27 - Testing
# This 'excercise' is a complete waste of space and the
# imbecile who put it together should be taken out into
# a remote field and shot.
IO.puts("5")
IO.puts("5 3")
IO.puts("-1 90 999 100 0")
IO.puts("4 2")
IO.puts("0 -1 2 1")
IO.puts("3 3")
IO.puts("-1 0 1")
IO.puts("6 1")
IO.puts("-1 0 1 -1 2 3")
IO.puts("7 3")
IO.puts("-1 0 1 2 3 4 5")
| 21.944444 | 55 | 0.622785 |
084aa20e47188117e8f9967dab70bfbcb2c7bc19 | 154 | exs | Elixir | .formatter.exs | geometerio/euclid | 9a9e059ec77d87858ae7878df8d4d75dc01c57f8 | [
"MIT-0"
] | 4 | 2021-06-14T13:54:05.000Z | 2021-10-22T02:55:16.000Z | .formatter.exs | geometerio/euclid | 9a9e059ec77d87858ae7878df8d4d75dc01c57f8 | [
"MIT-0"
] | 3 | 2021-06-15T21:45:51.000Z | 2022-01-14T20:08:32.000Z | .formatter.exs | geometerio/euclid | 9a9e059ec77d87858ae7878df8d4d75dc01c57f8 | [
"MIT-0"
] | null | null | null | [
import_deps: [],
inputs: ["*.{ex,exs}", "{config,lib,test}/**/*.{ex,exs}"],
line_length: 150,
locals_without_parens: [],
subdirectories: []
]
| 19.25 | 60 | 0.577922 |
084ac039c85bbe7dc7499b96989d0993c823ef40 | 1,214 | ex | Elixir | clients/memcache/lib/google_api/memcache/v1beta2/model/zone_metadata.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/memcache/lib/google_api/memcache/v1beta2/model/zone_metadata.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/memcache/lib/google_api/memcache/v1beta2/model/zone_metadata.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Memcache.V1beta2.Model.ZoneMetadata do
@moduledoc """
## Attributes
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{}
end
defimpl Poison.Decoder, for: GoogleApi.Memcache.V1beta2.Model.ZoneMetadata do
def decode(value, options) do
GoogleApi.Memcache.V1beta2.Model.ZoneMetadata.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Memcache.V1beta2.Model.ZoneMetadata do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 28.904762 | 77 | 0.759473 |
084adeac8751cfc42886a1ca4c35e84201e4de67 | 1,868 | ex | Elixir | clients/web_risk/lib/google_api/web_risk/v1/model/google_cloud_webrisk_v1_search_uris_response_threat_uri.ex | renovate-bot/elixir-google-api | 1da34cd39b670c99f067011e05ab90af93fef1f6 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/web_risk/lib/google_api/web_risk/v1/model/google_cloud_webrisk_v1_search_uris_response_threat_uri.ex | swansoffiee/elixir-google-api | 9ea6d39f273fb430634788c258b3189d3613dde0 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/web_risk/lib/google_api/web_risk/v1/model/google_cloud_webrisk_v1_search_uris_response_threat_uri.ex | dazuma/elixir-google-api | 6a9897168008efe07a6081d2326735fe332e522c | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.WebRisk.V1.Model.GoogleCloudWebriskV1SearchUrisResponseThreatUri do
@moduledoc """
Contains threat information on a matching uri.
## Attributes
* `expireTime` (*type:* `DateTime.t`, *default:* `nil`) - The cache lifetime for the returned match. Clients must not cache this response past this timestamp to avoid false positives.
* `threatTypes` (*type:* `list(String.t)`, *default:* `nil`) - The ThreatList this threat belongs to.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:expireTime => DateTime.t() | nil,
:threatTypes => list(String.t()) | nil
}
field(:expireTime, as: DateTime)
field(:threatTypes, type: :list)
end
defimpl Poison.Decoder,
for: GoogleApi.WebRisk.V1.Model.GoogleCloudWebriskV1SearchUrisResponseThreatUri do
def decode(value, options) do
GoogleApi.WebRisk.V1.Model.GoogleCloudWebriskV1SearchUrisResponseThreatUri.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.WebRisk.V1.Model.GoogleCloudWebriskV1SearchUrisResponseThreatUri do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 33.963636 | 187 | 0.737687 |
084b04bac1fcb89c35b9f217298233aa5cdbc383 | 696 | exs | Elixir | mix.exs | leomindez/Elixir-Cards | f6cd96c2d7872055fa683ccfd344d40929c56a16 | [
"MIT"
] | 1 | 2020-09-03T18:06:25.000Z | 2020-09-03T18:06:25.000Z | mix.exs | leomindez/Elixir-Cards | f6cd96c2d7872055fa683ccfd344d40929c56a16 | [
"MIT"
] | null | null | null | mix.exs | leomindez/Elixir-Cards | f6cd96c2d7872055fa683ccfd344d40929c56a16 | [
"MIT"
] | null | null | null | defmodule Cards.Mixfile do
use Mix.Project
def project do
[app: :cards,
version: "0.1.0",
elixir: "~> 1.3",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
deps: deps()]
end
# Configuration for the OTP application
#
# Type "mix help compile.app" for more information
def application do
[applications: [:logger]]
end
# Dependencies can be Hex packages:
#
# {:mydep, "~> 0.3.0"}
#
# Or git/path repositories:
#
# {:mydep, git: "https://github.com/elixir-lang/mydep.git", tag: "0.1.0"}
#
# Type "mix help deps" for more examples and options
defp deps do
[
{:ex_doc,"~> 0.12"}
]
end
end
| 19.885714 | 77 | 0.590517 |
084b3a8d0eae5d06b37690cf6c98ed4f059f0bd5 | 3,050 | exs | Elixir | lib/mix/test/mix/tasks/profile.cprof_test.exs | andrewtimberlake/elixir | a1c4ffc897f9407fe7e739e20e697805fbbff810 | [
"Apache-2.0"
] | 1 | 2019-10-11T01:36:26.000Z | 2019-10-11T01:36:26.000Z | lib/mix/test/mix/tasks/profile.cprof_test.exs | andrewtimberlake/elixir | a1c4ffc897f9407fe7e739e20e697805fbbff810 | [
"Apache-2.0"
] | 1 | 2019-04-25T12:52:49.000Z | 2019-04-25T13:27:31.000Z | lib/mix/test/mix/tasks/profile.cprof_test.exs | andrewtimberlake/elixir | a1c4ffc897f9407fe7e739e20e697805fbbff810 | [
"Apache-2.0"
] | null | null | null | Code.require_file("../../test_helper.exs", __DIR__)
defmodule Mix.Tasks.Profile.CprofTest do
use MixTest.Case
import ExUnit.CaptureIO
alias Mix.Tasks.Profile.Cprof
@expr "Enum.each(1..5, &String.Chars.Integer.to_string/1)"
test "profiles evaluated expression", context do
in_tmp(context.test, fn ->
assert capture_io(fn ->
Cprof.run(["-e", @expr])
end) =~ ~r(String\.Chars\.Integer\.to_string\/1 *\d)
end)
end
test "profiles the script", context do
in_tmp(context.test, fn ->
profile_script_name = "profile_script.ex"
File.write!(profile_script_name, @expr)
assert capture_io(fn ->
Cprof.run([profile_script_name])
end) =~ ~r(String\.Chars\.Integer\.to_string\/1 *\d)
end)
end
test "filters based on limit", context do
in_tmp(context.test, fn ->
refute capture_io(fn ->
Cprof.run(["--limit", "5", "-e", @expr])
end) =~ ~r(:erlang\.trace_pattern\/3 *\d)
end)
end
test "filters based on module", context do
in_tmp(context.test, fn ->
refute capture_io(fn ->
Cprof.run(["--module", "Enum", "-e", @expr])
end) =~ ~r(String\.Chars\.Integer\.to_string\/1 *\d)
end)
end
test "Module matching", context do
in_tmp(context.test, fn ->
refute capture_io(fn ->
Cprof.run(["--matching", "Enum", "-e", @expr])
end) =~ ~r(String\.Chars\.Integer\.to_string\/1 *\d)
end)
end
test "Module.function matching", context do
in_tmp(context.test, fn ->
refute capture_io(fn ->
Cprof.run(["--matching", "Enum.each", "-e", @expr])
end) =~ ~r(anonymous fn\/3 in Enum\.each\/2 *\d)
end)
end
test "Module.function/arity matching", context do
in_tmp(context.test, fn ->
assert capture_io(fn ->
Cprof.run(["--matching", "Enum.each/8", "-e", @expr])
end) =~ ~r(Profile done over 0 matching functions)
end)
end
test "errors on missing files", context do
in_tmp(context.test, fn ->
msg = "No files matched pattern \"non-existent\" given to --require"
assert_raise Mix.Error, msg, fn ->
capture_io(fn -> Cprof.run(["-r", "non-existent"]) end)
end
assert_raise Mix.Error, msg, fn ->
capture_io(fn -> Cprof.run(["-pr", "non-existent"]) end)
end
assert_raise Mix.Error, "No such file: non-existent", fn ->
capture_io(fn -> Cprof.run(["non-existent"]) end)
end
File.mkdir_p!("lib")
assert_raise Mix.Error, "No such file: lib", fn ->
capture_io(fn -> Cprof.run(["lib"]) end)
end
end)
end
test "warmup", context do
in_tmp(context.test, fn ->
assert capture_io(fn ->
Cprof.run(["-e", @expr])
end) =~ "Warmup..."
refute capture_io(fn ->
Cprof.run(["-e", "Enum.each(1..5, fn(_) -> MapSet.new end)", "--no-warmup"])
end) =~ "Warmup..."
end)
end
end
| 28.504673 | 91 | 0.562295 |
084b50902b1d19f85f9d399238810deb76e37627 | 3,170 | ex | Elixir | lib/codes/codes_c18.ex | badubizzle/icd_code | 4c625733f92b7b1d616e272abc3009bb8b916c0c | [
"Apache-2.0"
] | null | null | null | lib/codes/codes_c18.ex | badubizzle/icd_code | 4c625733f92b7b1d616e272abc3009bb8b916c0c | [
"Apache-2.0"
] | null | null | null | lib/codes/codes_c18.ex | badubizzle/icd_code | 4c625733f92b7b1d616e272abc3009bb8b916c0c | [
"Apache-2.0"
] | null | null | null | defmodule IcdCode.ICDCode.Codes_C18 do
alias IcdCode.ICDCode
def _C180 do
%ICDCode{full_code: "C180",
category_code: "C18",
short_code: "0",
full_name: "Malignant neoplasm of cecum",
short_name: "Malignant neoplasm of cecum",
category_name: "Malignant neoplasm of cecum"
}
end
def _C181 do
%ICDCode{full_code: "C181",
category_code: "C18",
short_code: "1",
full_name: "Malignant neoplasm of appendix",
short_name: "Malignant neoplasm of appendix",
category_name: "Malignant neoplasm of appendix"
}
end
def _C182 do
%ICDCode{full_code: "C182",
category_code: "C18",
short_code: "2",
full_name: "Malignant neoplasm of ascending colon",
short_name: "Malignant neoplasm of ascending colon",
category_name: "Malignant neoplasm of ascending colon"
}
end
def _C183 do
%ICDCode{full_code: "C183",
category_code: "C18",
short_code: "3",
full_name: "Malignant neoplasm of hepatic flexure",
short_name: "Malignant neoplasm of hepatic flexure",
category_name: "Malignant neoplasm of hepatic flexure"
}
end
def _C184 do
%ICDCode{full_code: "C184",
category_code: "C18",
short_code: "4",
full_name: "Malignant neoplasm of transverse colon",
short_name: "Malignant neoplasm of transverse colon",
category_name: "Malignant neoplasm of transverse colon"
}
end
def _C185 do
%ICDCode{full_code: "C185",
category_code: "C18",
short_code: "5",
full_name: "Malignant neoplasm of splenic flexure",
short_name: "Malignant neoplasm of splenic flexure",
category_name: "Malignant neoplasm of splenic flexure"
}
end
def _C186 do
%ICDCode{full_code: "C186",
category_code: "C18",
short_code: "6",
full_name: "Malignant neoplasm of descending colon",
short_name: "Malignant neoplasm of descending colon",
category_name: "Malignant neoplasm of descending colon"
}
end
def _C187 do
%ICDCode{full_code: "C187",
category_code: "C18",
short_code: "7",
full_name: "Malignant neoplasm of sigmoid colon",
short_name: "Malignant neoplasm of sigmoid colon",
category_name: "Malignant neoplasm of sigmoid colon"
}
end
def _C188 do
%ICDCode{full_code: "C188",
category_code: "C18",
short_code: "8",
full_name: "Malignant neoplasm of overlapping sites of colon",
short_name: "Malignant neoplasm of overlapping sites of colon",
category_name: "Malignant neoplasm of overlapping sites of colon"
}
end
def _C189 do
%ICDCode{full_code: "C189",
category_code: "C18",
short_code: "9",
full_name: "Malignant neoplasm of colon, unspecified",
short_name: "Malignant neoplasm of colon, unspecified",
category_name: "Malignant neoplasm of colon, unspecified"
}
end
end
| 32.680412 | 75 | 0.616404 |
084b97636976517f7d9a5bff1a8dd0e886cb777b | 797 | exs | Elixir | implements/series/series.exs | MickeyOoh/Exercises | 3b34e7fdab4a09e0269d20c68531b4fb75bb7f16 | [
"MIT"
] | null | null | null | implements/series/series.exs | MickeyOoh/Exercises | 3b34e7fdab4a09e0269d20c68531b4fb75bb7f16 | [
"MIT"
] | 1 | 2018-06-19T18:59:41.000Z | 2018-06-19T18:59:41.000Z | implements/series/series.exs | MickeyOoh/Exercises | 3b34e7fdab4a09e0269d20c68531b4fb75bb7f16 | [
"MIT"
] | null | null | null | defmodule StringSeries do
@doc """
Given a string `s` and a positive integer `size`, return all substrings
of that size. If `size` is greater than the length of `s`, or less than 1,
return an empty list.
"""
@spec slices(s :: String.t(), size :: integer) :: list(String.t())
def slices(_, size) when size < 1, do: []
def slices(s, size) do
#pickup(s, String.length(s), size, [])
s
|> String.graphemes()
|> Enum.chunk_every(size, 1, :discard)
|> Enum.map( &Enum.join/1)
end
def pickup(_,len,size, results)
when len < size, do: Enum.reverse(results)
def pickup(s, _, size, results) do
items = String.slice(s, 0, size)
results = [items | results]
str = String.slice(s, 1..-1)
pickup(str,String.length(str), size, results)
end
end
| 31.88 | 76 | 0.621079 |
084bed83cc02f7b26c1acb7f039ff4a8bdee9006 | 19,730 | ex | Elixir | testData/org/elixir_lang/parser_definition/unmatched_expression_parsing_test_case/UnqualifiedNoArgumentsStabElseStabBlock.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 1,668 | 2015-01-03T05:54:27.000Z | 2022-03-25T08:01:20.000Z | testData/org/elixir_lang/parser_definition/unmatched_expression_parsing_test_case/UnqualifiedNoArgumentsStabElseStabBlock.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 2,018 | 2015-01-01T22:43:39.000Z | 2022-03-31T20:13:08.000Z | testData/org/elixir_lang/parser_definition/unmatched_expression_parsing_test_case/UnqualifiedNoArgumentsStabElseStabBlock.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 145 | 2015-01-15T11:37:16.000Z | 2021-12-22T05:51:02.000Z | #
# identifier do expression else * end
#
identifier do one else two end
identifier do one; else; two; end
identifier do
one else two
end
identifier do
one
else
two
end
identifier do one else -> end
identifier do one else -> ; end
identifier do
one else ->
end
identifier do
one
else
->
end
identifier do one else -> two end
identifier do one; else -> two; end
identifier do
one else -> two
end
identifier do
one
else
->
two
end
identifier do one else () -> two end
identifier do one; else () -> two; end
identifier do
one else () -> two
end
identifier do
one
else
()
->
two
end
identifier do one else two -> end
identifier do one else two ->; end
identifier do
one
else
two
->
end
identifier do one else (key: value) -> end
identifier do one else (key: value) ->; end
identifier do
one
else
(key: value)
->
end
identifier do one else (two, three) -> end
identifier do one else (two, three) ->; end
identifier do
one
else
(two, three)
->
end
identifier do one else (two, key: value) -> end
identifier do one else (two, key: value) ->; end
identifier do
one
else
(two, key: value)
->
end
identifier do one else (two, three: 3) when four -> end
identifier do one else (two, three: 3) when four ->; end
identifier do
one
else
(two, three: 3) when four
->
end
identifier do one else (two, three: 3) when four -> five end
identifier do one else (two, three: 3) when four -> five; end
identifier do
one else (two, three: 3) when four
->
five
end
identifier do one else key: value -> end
identifier do one else key: value ->; end
identifier do
one else key: value
->
end
identifier do one else two, three -> end
identifier do one else two, three ->; end
identifier do
one
else
two, three
->
end
identifier do one else two, key: value -> end
identifier do one else two, key: value ->; end
identifier do
one
else
two, key: value
->
end
identifier do one else two, three: 3 when four -> end
identifier do one else two, three: 3 when four ->; end
identifier do
one
else two, three: 3 when four
->
end
identifier do one else two, three: 3 when four -> five end
identifier do one else two, three: 3 when four -> five; end
identifier do
one else two, three: 3 when four
->
five
end
identifier do
one
else
(two_a, three_a: 3) when four_a
->
five_a
else
(two_b, three_b: -3) when four_b
->
five_b
end
#
# identifier do -> else * end
#
identifier do -> else two end
identifier do ->; else; two; end
identifier do
-> else two
end
identifier do
->
else
two
end
identifier do -> else -> end
identifier do -> else -> ; end
identifier do
-> else ->
end
identifier do
->
else
->
end
identifier do -> else -> two end
identifier do ->; else -> two; end
identifier do
-> else -> two
end
identifier do
->
else
->
two
end
identifier do -> else () -> two end
identifier do ->; else () -> two; end
identifier do
-> else () -> two
end
identifier do
->
else
()
->
two
end
identifier do -> else two -> end
identifier do -> else two ->; end
identifier do
->
else
two
->
end
identifier do -> else (key: value) -> end
identifier do -> else (key: value) ->; end
identifier do
->
else
(key: value)
->
end
identifier do -> else (two, three) -> end
identifier do -> else (two, three) ->; end
identifier do
->
else
(two, three)
->
end
identifier do -> else (two, key: value) -> end
identifier do -> else (two, key: value) ->; end
identifier do
->
else
(two, key: value)
->
end
identifier do -> else (two, three: 3) when four -> end
identifier do -> else (two, three: 3) when four ->; end
identifier do
->
else
(two, three: 3) when four
->
end
identifier do -> else (two, three: 3) when four -> five end
identifier do -> else (two, three: 3) when four -> five; end
identifier do
-> else (two, three: 3) when four
->
five
end
identifier do -> else key: value -> end
identifier do -> else key: value ->; end
identifier do
-> else key: value
->
end
identifier do -> else two, three -> end
identifier do -> else two, three ->; end
identifier do
->
else
two, three
->
end
identifier do -> else two, key: value -> end
identifier do -> else two, key: value ->; end
identifier do
->
else
two, key: value
->
end
identifier do -> else two, three: 3 when four -> end
identifier do -> else two, three: 3 when four ->; end
identifier do
->
else two, three: 3 when four
->
end
identifier do -> else two, three: 3 when four -> five end
identifier do -> else two, three: 3 when four -> five; end
identifier do
-> else two, three: 3 when four
->
five
end
identifier do
->
else
(two_a, three_a: 3) when four_a
->
five_a
else
(two_b, three_b: -3) when four_b
->
five_b
end
#
# identifier do -> expression else * end
#
identifier do -> one else two end
identifier do -> one; else; two; end
identifier do
-> one else two
end
identifier do
->
one
else
two
end
identifier do -> one else -> end
identifier do -> one else -> ; end
identifier do
-> one else ->
end
identifier do
->
one
else
->
end
identifier do -> one else -> two end
identifier do -> one; else -> two; end
identifier do
-> one else -> two
end
identifier do
->
one
else
->
two
end
identifier do -> one else () -> two end
identifier do -> one; else () -> two; end
identifier do
-> one else () -> two
end
identifier do
->
one
else
()
->
two
end
identifier do -> one else two -> end
identifier do -> one else two ->; end
identifier do
->
one
else
two
->
end
identifier do -> one else (key: value) -> end
identifier do -> one else (key: value) ->; end
identifier do
->
one
else
(key: value)
->
end
identifier do -> one else (two, three) -> end
identifier do -> one else (two, three) ->; end
identifier do
->
one
else
(two, three)
->
end
identifier do -> one else (two, key: value) -> end
identifier do -> one else (two, key: value) ->; end
identifier do
->
one
else
(two, key: value)
->
end
identifier do -> one else (two, three: 3) when four -> end
identifier do -> one else (two, three: 3) when four ->; end
identifier do
->
one
else
(two, three: 3) when four
->
end
identifier do -> one else (two, three: 3) when four -> five end
identifier do -> one else (two, three: 3) when four -> five; end
identifier do
->
one
else (two, three: 3) when four
->
five
end
identifier do -> one else key: value -> end
identifier do -> one else key: value ->; end
identifier do
->
one
else key: value
->
end
identifier do -> one else two, three -> end
identifier do -> one else two, three ->; end
identifier do
->
one
else
two, three
->
end
identifier do -> one else two, key: value -> end
identifier do -> one else two, key: value ->; end
identifier do
->
one
else
two, key: value
->
end
identifier do -> one else two, three: 3 when four -> end
identifier do -> one else two, three: 3 when four ->; end
identifier do
->
one
else two, three: 3 when four
->
end
identifier do -> one else two, three: 3 when four -> five end
identifier do -> one else two, three: 3 when four -> five; end
identifier do
->
one
else two, three: 3 when four
->
five
end
identifier do
->
one
else
(two_a, three_a: 3) when four_a
->
five_a
else
(two_b, three_b: -3) when four_b
->
five_b
end
#
# identifier do () -> expression else * end
#
identifier do () -> one else two end
identifier do () -> one; else; two; end
identifier do
() -> one else two
end
identifier do
()
->
one
else
two
end
identifier do () -> one else -> end
identifier do () -> one else -> ; end
identifier do
() -> one else ->
end
identifier do
()
->
one
else
->
end
identifier do () -> one else -> two end
identifier do () -> one; else -> two; end
identifier do
() -> one else -> two
end
identifier do
()
->
one
else
->
two
end
identifier do () -> one else () -> two end
identifier do () -> one; else () -> two; end
identifier do
() -> one else () -> two
end
identifier do
()
->
one
else
()
->
two
end
identifier do () -> one else two -> end
identifier do () -> one else two ->; end
identifier do
()
->
one
else
two
->
end
identifier do () -> one else (key: value) -> end
identifier do () -> one else (key: value) ->; end
identifier do
()
->
one
else
(key: value)
->
end
identifier do () -> one else (two, three) -> end
identifier do () -> one else (two, three) ->; end
identifier do
()
->
one
else
(two, three)
->
end
identifier do () -> one else (two, key: value) -> end
identifier do () -> one else (two, key: value) ->; end
identifier do
()
->
one
else
(two, key: value)
->
end
identifier do () -> one else (two, three: 3) when four -> end
identifier do () -> one else (two, three: 3) when four ->; end
identifier do
()
->
one
else
(two, three: 3) when four
->
end
identifier do () -> one else (two, three: 3) when four -> five end
identifier do () -> one else (two, three: 3) when four -> five; end
identifier do
()
->
one
else (two, three: 3) when four
->
five
end
identifier do () -> one else key: value -> end
identifier do () -> one else key: value ->; end
identifier do
()
->
one
else key: value
->
end
identifier do () -> one else two, three -> end
identifier do () -> one else two, three ->; end
identifier do
()
->
one
else
two, three
->
end
identifier do () -> one else two, key: value -> end
identifier do () -> one else two, key: value ->; end
identifier do
()
->
one
else
two, key: value
->
end
identifier do () -> one else two, three: 3 when four -> end
identifier do () -> one else two, three: 3 when four ->; end
identifier do
()
->
one
else two, three: 3 when four
->
end
identifier do () -> one else two, three: 3 when four -> five end
identifier do () -> one else two, three: 3 when four -> five; end
identifier do
()
->
one
else two, three: 3 when four
->
five
end
identifier do
()
->
one
else
(two_a, three_a: 3) when four_a
->
five_a
else
(two_b, three_b: -3) when four_b
->
five_b
end
#
# identifier do expression -> else * end
#
identifier do one -> else two end
identifier do one ->; else; two; end
identifier do
one -> else two
end
identifier do
one
->
else
two
end
identifier do one -> else -> end
identifier do one -> else -> ; end
identifier do
one -> else ->
end
identifier do
one
->
else
->
end
identifier do one -> else -> two end
identifier do one ->; else -> two; end
identifier do
one -> else -> two
end
identifier do
one
->
else
->
two
end
identifier do one -> else () -> two end
identifier do one ->; else () -> two; end
identifier do
one -> else () -> two
end
identifier do
one
->
else
()
->
two
end
identifier do one -> else two -> end
identifier do one -> else two ->; end
identifier do
one
->
else
two
->
end
identifier do one -> else (key: value) -> end
identifier do one -> else (key: value) ->; end
identifier do
one
->
else
(key: value)
->
end
identifier do one -> else (two, three) -> end
identifier do one -> else (two, three) ->; end
identifier do
one
->
else
(two, three)
->
end
identifier do one -> else (two, key: value) -> end
identifier do one -> else (two, key: value) ->; end
identifier do
one
->
else
(two, key: value)
->
end
identifier do one -> else (two, three: 3) when four -> end
identifier do one -> else (two, three: 3) when four ->; end
identifier do
one
->
else
(two, three: 3) when four
->
end
identifier do one -> else (two, three: 3) when four -> five end
identifier do one -> else (two, three: 3) when four -> five; end
identifier do
one
->
else (two, three: 3) when four
->
five
end
identifier do one -> else key: value -> end
identifier do one -> else key: value ->; end
identifier do
one
->
else key: value
->
end
identifier do one -> else two, three -> end
identifier do one -> else two, three ->; end
identifier do
one
->
else
two, three
->
end
identifier do one -> else two, key: value -> end
identifier do one -> else two, key: value ->; end
identifier do
one
->
else
two, key: value
->
end
identifier do one -> else two, three: 3 when four -> end
identifier do one -> else two, three: 3 when four ->; end
identifier do
one
->
else two, three: 3 when four
->
end
identifier do one -> else two, three: 3 when four -> five end
identifier do one -> else two, three: 3 when four -> five; end
identifier do
one
->
else two, three: 3 when four
->
five
end
identifier do
one
->
else
(two_a, three_a: 3) when four_a
->
five_a
else
(two_b, three_b: -3) when four_b
->
five_b
end
#
# identifier do (key: value) -> else * end
#
identifier do (key: value) -> else two end
identifier do (key: value) ->; else; two; end
identifier do
(key: value) -> else two
end
identifier do
(key: value)
->
else
two
end
identifier do (key: value) -> else -> end
identifier do (key: value) -> else -> ; end
identifier do
(key: value) -> else ->
end
identifier do
(key: value)
->
else
->
end
identifier do (key: value) -> else -> two end
identifier do (key: value) ->; else -> two; end
identifier do
(key: value) -> else -> two
end
identifier do
(key: value)
->
else
->
two
end
identifier do (key: value) -> else () -> two end
identifier do (key: value) ->; else () -> two; end
identifier do
(key: value) -> else () -> two
end
identifier do
(key: value)
->
else
()
->
two
end
identifier do (key: value) -> else two -> end
identifier do (key: value) -> else two ->; end
identifier do
(key: value)
->
else
two
->
end
identifier do (key: value) -> else (key: value) -> end
identifier do (key: value) -> else (key: value) ->; end
identifier do
(key: value)
->
else
(key: value)
->
end
identifier do (key: value) -> else (two, three) -> end
identifier do (key: value) -> else (two, three) ->; end
identifier do
(key: value)
->
else
(two, three)
->
end
identifier do (key: value) -> else (two, key: value) -> end
identifier do (key: value) -> else (two, key: value) ->; end
identifier do
(key: value)
->
else
(two, key: value)
->
end
identifier do (key: value) -> else (two, three: 3) when four -> end
identifier do (key: value) -> else (two, three: 3) when four ->; end
identifier do
(key: value)
->
else
(two, three: 3) when four
->
end
identifier do (key: value) -> else (two, three: 3) when four -> five end
identifier do (key: value) -> else (two, three: 3) when four -> five; end
identifier do
(key: value)
->
else (two, three: 3) when four
->
five
end
identifier do (key: value) -> else key: value -> end
identifier do (key: value) -> else key: value ->; end
identifier do
(key: value)
->
else key: value
->
end
identifier do (key: value) -> else two, three -> end
identifier do (key: value) -> else two, three ->; end
identifier do
(key: value)
->
else
two, three
->
end
identifier do (key: value) -> else two, key: value -> end
identifier do (key: value) -> else two, key: value ->; end
identifier do
(key: value)
->
else
two, key: value
->
end
identifier do (key: value) -> else two, three: 3 when four -> end
identifier do (key: value) -> else two, three: 3 when four ->; end
identifier do
(key: value)
->
else two, three: 3 when four
->
end
identifier do (key: value) -> else two, three: 3 when four -> five end
identifier do (key: value) -> else two, three: 3 when four -> five; end
identifier do
(key: value)
->
else two, three: 3 when four
->
five
end
identifier do
(key: value)
->
else
(two_a, three_a: 3) when four_a
->
five_a
else
(two_b, three_b: -3) when four_b
->
five_b
end
#
# identifier do (one, two) -> else * end
#
identifier do (one, two) -> else three end
identifier do (one, two) ->; else; three; end
identifier do
(one, two) -> else three
end
identifier do
(one, two)
->
else
three
end
identifier do (one, two) -> else -> end
identifier do (one, two) -> else -> ; end
identifier do
(one, two) -> else ->
end
identifier do
(one, two)
->
else
->
end
identifier do (one, two) -> else -> three end
identifier do (one, two) ->; else -> three; end
identifier do
(one, two) -> else -> two
end
identifier do
(one, two)
->
else
->
three
end
identifier do (one, two) -> else () -> three end
identifier do (one, two) ->; else () -> three; end
identifier do
(one, two) -> else () -> three
end
identifier do
(one, two)
->
else
()
->
three
end
identifier do (one, two) -> else three -> end
identifier do (one, two) -> else three ->; end
identifier do
(one, two)
->
else
three
->
end
identifier do (one, two) -> else (three, four) -> end
identifier do (one, two) -> else (three, four) ->; end
identifier do
(one, two)
->
else
(three, four)
->
end
identifier do (one, two) -> else (three, four) -> end
identifier do (one, two) -> else (three, four) ->; end
identifier do
(one, two)
->
else
(three, four)
->
end
identifier do (one, two) -> else (three, key: value) -> end
identifier do (one, two) -> else (three, key: value) ->; end
identifier do
(one, two)
->
else
(three, key: value)
->
end
identifier do (one, two) -> else (three, four: 4) when five -> end
identifier do (one, two) -> else (three, four: 4) when five ->; end
identifier do
(one, two)
->
else
(three, four: 4) when five
->
end
identifier do (one, two) -> else (three, four: 4) when five -> six end
identifier do (one, two) -> else (three, four: 4) when five -> six; end
identifier do
(one, two)
->
else (three, four: 4) when five
->
six
end
identifier do (one, two) -> else key: value -> end
identifier do (one, two) -> else key: value ->; end
identifier do
(one, two)
->
else key: value
->
end
identifier do (one, two) -> else three, four -> end
identifier do (one, two) -> else three, four ->; end
identifier do
(one, two)
->
else
three, four
->
end
identifier do (one, two) -> else three, key: value -> end
identifier do (one, two) -> else three, key: value ->; end
identifier do
(one, two)
->
else
three, key: value
->
end
identifier do (one, two) -> else three, four: 4 when five -> end
identifier do (one, two) -> else three, four: 4 when five ->; end
identifier do
(one, two)
->
else three, four: 4 when five
->
end
identifier do (one, two) -> else three, four: 4 when five -> six end
identifier do (one, two) -> else three, four: 4 when five -> six; end
identifier do
(one, two)
->
else three, four: 4 when five
->
six
end
identifier do
(one, two)
->
else
(three_a, four_a: 4) when five_a
->
six_a
else
(three_b, four_b: -4) when five_b
->
six_b
end
#
# 8 more variants left to the reader
#
| 16.252059 | 73 | 0.592955 |
084c02b0e23a21511a719f5169f03e23fba571fe | 82,695 | exs | Elixir | test/ecto/changeset_test.exs | MalmarPadecain/ecto | 78ff739c44cf4c53f4dbd1a510a5778e6e7d757b | [
"Apache-2.0"
] | null | null | null | test/ecto/changeset_test.exs | MalmarPadecain/ecto | 78ff739c44cf4c53f4dbd1a510a5778e6e7d757b | [
"Apache-2.0"
] | null | null | null | test/ecto/changeset_test.exs | MalmarPadecain/ecto | 78ff739c44cf4c53f4dbd1a510a5778e6e7d757b | [
"Apache-2.0"
] | null | null | null | defmodule Ecto.ChangesetTest do
use ExUnit.Case, async: true
import Ecto.Changeset
require Ecto.Query
defmodule SocialSource do
use Ecto.Schema
@primary_key false
embedded_schema do
field :origin
field :url
end
def changeset(schema \\ %SocialSource{}, params) do
cast(schema, params, ~w(origin url)a)
end
end
defmodule Category do
use Ecto.Schema
@primary_key {:category_id, :id, autogenerate: true}
schema "categories" do
field :name, :string
has_many :posts, Ecto.ChangesetTest.Post
end
end
defmodule Comment do
use Ecto.Schema
schema "comments" do
belongs_to :post, Ecto.ChangesetTest.Post
end
end
defmodule Email do
use Ecto.Type
def type, do: :string
def cast(val) when is_binary(val), do: {:ok, val}
def cast(_), do: :error
def load(val) when is_binary(val), do: {:ok, val}
def load(_), do: :error
def dump(val) when is_binary(val), do: {:ok, val}
def dump(_), do: :error
def equal?(email_a, email_b) when is_binary(email_a) and is_binary(email_b) do
[username_a, domain_a] = String.split(email_a, "@")
[username_b, domain_b] = String.split(email_b, "@")
[significant_a | _] = String.split(username_a, "+")
[significant_b | _] = String.split(username_b, "+")
significant_a == significant_b && domain_a == domain_b
end
def equal?(a, b), do: a == b
end
defmodule StringArray do
use Ecto.Type
def type, do: {:array, :string}
def cast(val), do: {:ok, val}
def load(val), do: {:ok, val}
def dump(val), do: {:ok, val}
end
defmodule Post do
use Ecto.Schema
schema "posts" do
field :token, :integer, primary_key: true
field :title, :string, default: ""
field :author_email, Email
field :body
field :uuid, :binary_id
field :color, :binary
field :decimal, :decimal
field :upvotes, :integer, default: 0
field :topics, {:array, :string}
field :virtual, :string, virtual: true
field :published_at, :naive_datetime
field :source, :map
field :permalink, :string, source: :url
belongs_to :category, Ecto.ChangesetTest.Category, references: :category_id, source: :cat_id
has_many :comments, Ecto.ChangesetTest.Comment, on_replace: :delete
has_one :comment, Ecto.ChangesetTest.Comment
end
end
defmodule NoSchemaPost do
defstruct [:title, :upvotes]
end
defmodule SinglePkSchema do
use Ecto.Schema
schema "posts" do
field :body
field :published_at, :naive_datetime
end
end
defp changeset(schema \\ %Post{}, params) do
cast(schema, params, ~w(id token title author_email body upvotes decimal color topics virtual)a)
end
defmodule CustomError do
use Ecto.Type
def type, do: :any
def cast(_), do: {:error, message: "custom error message", reason: :foobar}
def load(_), do: :error
def dump(_), do: :error
end
defmodule CustomErrorWithType do
use Ecto.Type
def type, do: :any
def cast(_), do: {:error, message: "custom error message", reason: :foobar, type: :some_type}
def load(_), do: :error
def dump(_), do: :error
end
defmodule CustomErrorWithoutMessage do
use Ecto.Type
def type, do: :any
def cast(_), do: {:error, reason: :foobar}
def load(_), do: :error
def dump(_), do: :error
end
defmodule CustomErrorTest do
use Ecto.Schema
schema "custom_error" do
field :custom_error, CustomError
field :custom_error_without_message, CustomErrorWithoutMessage
field :custom_error_with_type, CustomErrorWithType
field :array_custom_error, {:array, CustomError}
field :map_custom_error, {:map, CustomError}
end
end
## cast/4
test "cast/4: with valid string keys" do
params = %{"title" => "hello", "body" => "world"}
struct = %Post{}
changeset = cast(struct, params, ~w(title body)a)
assert changeset.params == params
assert changeset.data == struct
assert changeset.changes == %{title: "hello", body: "world"}
assert changeset.errors == []
assert validations(changeset) == []
assert changeset.required == []
assert changeset.valid?
end
test "cast/4: with valid atom keys" do
params = %{title: "hello", body: "world"}
struct = %Post{}
changeset = cast(struct, params, ~w(title body)a)
assert changeset.params == %{"title" => "hello", "body" => "world"}
assert changeset.data == struct
assert changeset.changes == %{title: "hello", body: "world"}
assert changeset.errors == []
assert validations(changeset) == []
assert changeset.required == []
assert changeset.valid?
end
test "cast/4: with empty values" do
params = %{"title" => "", "body" => nil}
struct = %Post{title: "foo", body: "bar"}
changeset = cast(struct, params, ~w(title body)a)
assert changeset.changes == %{title: "", body: nil}
end
test "cast/4: with nested empty values" do
params = %{"topics" => ["", "bar", ""]}
struct = %Post{topics: ["foo"]}
changeset = cast(struct, params, ~w(topics)a)
assert changeset.changes == %{topics: ["bar"]}
end
test "cast/4: with custom empty values" do
params = %{"title" => "empty", "body" => nil}
struct = %Post{title: "foo", body: "bar"}
changeset = cast(struct, params, ~w(title body)a, empty_values: ["empty"])
assert changeset.changes == %{title: "", body: nil}
assert changeset.empty_values == [""]
end
test "cast/4: with matching empty values" do
params = %{"title" => "", "body" => nil}
struct = %Post{title: "", body: nil}
changeset = cast(struct, params, ~w(title body)a)
assert changeset.changes == %{}
end
test "cast/4: with data and types" do
data = {%{title: "hello"}, %{title: :string, upvotes: :integer}}
params = %{"title" => "world", "upvotes" => "0"}
changeset = cast(data, params, ~w(title upvotes)a)
assert changeset.params == params
assert changeset.data == %{title: "hello"}
assert changeset.changes == %{title: "world", upvotes: 0}
assert changeset.errors == []
assert changeset.valid?
assert apply_changes(changeset) == %{title: "world", upvotes: 0}
end
test "cast/4: with data struct and types" do
data = {%NoSchemaPost{title: "hello"}, %{title: :string, upvotes: :integer}}
params = %{"title" => "world", "upvotes" => "0"}
changeset = cast(data, params, ~w(title upvotes)a)
assert changeset.params == params
assert changeset.data == %NoSchemaPost{title: "hello"}
assert changeset.changes == %{title: "world", upvotes: 0}
assert changeset.errors == []
assert changeset.valid?
assert apply_changes(changeset) == %NoSchemaPost{title: "world", upvotes: 0}
end
test "cast/4: with dynamic embed" do
data = {
%{
title: "hello"
},
%{
title: :string,
source: {
:embed,
%Ecto.Embedded{
cardinality: :one,
field: :source,
on_cast: &SocialSource.changeset(&1, &2),
on_replace: :raise,
owner: nil,
related: SocialSource,
unique: true
}
}
}
}
params = %{"title" => "world", "source" => %{"origin" => "facebook", "url" => "http://example.com/social"}}
changeset =
data
|> cast(params, ~w(title)a)
|> cast_embed(:source, required: true)
assert changeset.params == params
assert changeset.data == %{title: "hello"}
assert %{title: "world", source: %Ecto.Changeset{}} = changeset.changes
assert changeset.errors == []
assert changeset.valid?
assert apply_changes(changeset) ==
%{title: "world", source: %Ecto.ChangesetTest.SocialSource{origin: "facebook", url: "http://example.com/social"}}
end
test "cast/4: with changeset" do
base_changeset = cast(%Post{title: "valid"}, %{}, ~w(title)a)
|> validate_required(:title)
|> validate_length(:title, min: 3)
|> unique_constraint(:title)
# No changes
changeset = cast(base_changeset, %{}, ~w())
assert changeset.valid?
assert changeset.changes == %{}
assert changeset.required == [:title]
assert length(validations(changeset)) == 1
assert length(constraints(changeset)) == 1
# Value changes
changeset = cast(changeset, %{body: "new body"}, ~w(body)a)
assert changeset.valid?
assert changeset.changes == %{body: "new body"}
assert changeset.required == [:title]
assert length(validations(changeset)) == 1
assert length(constraints(changeset)) == 1
# Nil changes
changeset = cast(changeset, %{body: nil}, ~w(body)a)
assert changeset.valid?
assert changeset.changes == %{body: nil}
assert changeset.required == [:title]
assert length(validations(changeset)) == 1
assert length(constraints(changeset)) == 1
end
test "cast/4: struct with :invalid parameters" do
changeset = cast(%Post{}, :invalid, ~w(title body)a)
assert changeset.data == %Post{}
assert changeset.params == nil
assert changeset.changes == %{}
assert changeset.errors == []
assert validations(changeset) == []
refute changeset.valid?
end
test "cast/4: changeset with :invalid parameters" do
changeset = cast(%Post{}, %{"title" => "sample"}, ~w(title)a)
changeset = cast(changeset, :invalid, ~w(body)a)
assert changeset.data == %Post{}
assert changeset.params == %{"title" => "sample"}
assert changeset.changes == %{title: "sample"}
assert changeset.errors == []
assert validations(changeset) == []
refute changeset.valid?
end
test "cast/4: field is marked as invalid" do
params = %{"body" => :world}
struct = %Post{}
changeset = cast(struct, params, ~w(body)a)
assert changeset.changes == %{}
assert changeset.errors == [body: {"is invalid", [type: :string, validation: :cast]}]
refute changeset.valid?
end
test "cast/4: field has a custom invalid error message" do
params = %{"custom_error" => :error}
struct = %CustomErrorTest{}
changeset = cast(struct, params, ~w(custom_error)a)
assert changeset.errors == [custom_error: {"custom error message", [type: Ecto.ChangesetTest.CustomError, validation: :cast, reason: :foobar]}]
refute changeset.valid?
end
test "cast/4: ignores the :type parameter in custom errors" do
params = %{"custom_error_with_type" => :error}
struct = %CustomErrorTest{}
changeset = cast(struct, params, ~w(custom_error_with_type)a)
assert changeset.errors == [custom_error_with_type: {"custom error message", [type: Ecto.ChangesetTest.CustomErrorWithType, validation: :cast, reason: :foobar]}]
refute changeset.valid?
end
test "cast/4: field has a custom invalid error message without message" do
params = %{"custom_error_without_message" => :error}
struct = %CustomErrorTest{}
changeset = cast(struct, params, ~w(custom_error_without_message)a)
assert changeset.errors == [custom_error_without_message: {"is invalid", [type: Ecto.ChangesetTest.CustomErrorWithoutMessage, validation: :cast, reason: :foobar]}]
refute changeset.valid?
end
test "cast/4: field has a custom invalid error message on an array field" do
params = %{"array_custom_error" => [:error]}
struct = %CustomErrorTest{}
changeset = cast(struct, params, ~w(array_custom_error)a)
assert changeset.errors == [array_custom_error: {"is invalid", [type: {:array, Ecto.ChangesetTest.CustomError}, validation: :cast]}]
refute changeset.valid?
end
test "cast/4: field has a custom invalid error message on a map field" do
params = %{"map_custom_error" => %{foo: :error}}
struct = %CustomErrorTest{}
changeset = cast(struct, params, ~w(map_custom_error)a)
assert changeset.errors == [map_custom_error: {"is invalid", [type: {:map, Ecto.ChangesetTest.CustomError}, validation: :cast]}]
refute changeset.valid?
end
test "cast/4: fails on invalid field" do
assert_raise ArgumentError, ~r"unknown field `:unknown`", fn ->
cast(%Post{}, %{}, ~w(unknown)a)
end
end
test "cast/4: fails on bad arguments" do
assert_raise Ecto.CastError, ~r"expected params to be a :map, got:", fn ->
cast(%Post{}, %Post{}, ~w(unknown)a)
end
assert_raise Ecto.CastError, ~r"expected params to be a :map, got:", fn ->
cast(%Post{}, "foo", ~w(unknown)a)
end
assert_raise Ecto.CastError, ~r"mixed keys", fn ->
cast(%Post{}, %{"title" => "foo", title: "foo"}, ~w())
end
assert_raise FunctionClauseError, fn ->
cast(%Post{}, %{}, %{})
end
assert_raise FunctionClauseError, fn ->
cast(%Post{}, %{"title" => "foo"}, nil)
end
end
test "cast/4: protects against atom injection" do
assert_raise ArgumentError, fn ->
cast(%Post{}, %{}, ~w(surely_never_saw_this_atom_before)a)
end
end
test "cast/4: required field (via validate_required/2) of wrong type is marked as invalid" do
params = %{"body" => :world}
struct = %Post{}
changeset = cast(struct, params, [:body])
|> validate_required([:body])
assert changeset.changes == %{}
assert changeset.errors == [body: {"is invalid", [type: :string, validation: :cast]}]
refute changeset.valid?
end
test "cast/4: does not validate types in data" do
params = %{}
struct = %Post{title: 100, decimal: "string"}
changeset = cast(struct, params, ~w(title decimal)a)
assert changeset.params == %{}
assert changeset.data == struct
assert changeset.changes == %{}
assert changeset.errors == []
assert validations(changeset) == []
assert changeset.required == []
assert changeset.valid?
end
test "cast/4: semantic comparison" do
changeset = cast(%Post{decimal: Decimal.new(1)}, %{decimal: "1.0"}, ~w(decimal)a)
assert changeset.changes == %{}
changeset = cast(%Post{decimal: Decimal.new(1)}, %{decimal: "1.1"}, ~w(decimal)a)
assert changeset.changes == %{decimal: Decimal.new("1.1")}
changeset = cast(%Post{decimal: nil}, %{decimal: nil}, ~w(decimal)a)
assert changeset.changes == %{}
{data, types} = {%{x: [Decimal.new(1)]}, %{x: {:array, :decimal}}}
changeset = cast({data, types}, %{x: [Decimal.new("1.0")]}, ~w(x)a)
assert changeset.changes == %{}
changeset = cast({data, types}, %{x: [Decimal.new("1.1")]}, ~w(x)a)
assert changeset.changes == %{x: [Decimal.new("1.1")]}
changeset = cast({%{x: [nil]}, types}, %{x: [nil]}, ~w(x)a)
assert changeset.changes == %{}
{data, types} = {%{x: %{decimal: nil}}, %{x: {:map, :decimal}}}
changeset = cast({data, types}, data, ~w(x)a)
assert changeset.changes == %{}
end
## Changeset functions
test "merge/2: merges changes" do
cs1 = cast(%Post{}, %{title: "foo"}, ~w(title)a)
cs2 = cast(%Post{}, %{body: "bar"}, ~w(body)a)
assert merge(cs1, cs2).changes == %{body: "bar", title: "foo"}
cs1 = cast(%Post{}, %{title: "foo"}, ~w(title)a)
cs2 = cast(%Post{}, %{title: "bar"}, ~w(title)a)
changeset = merge(cs1, cs2)
assert changeset.valid?
assert changeset.params == %{"title" => "bar"}
assert changeset.changes == %{title: "bar"}
end
test "merge/2: merges errors" do
cs1 = cast(%Post{}, %{}, ~w(title)a) |> validate_required(:title)
cs2 = cast(%Post{}, %{}, ~w(title body)a) |> validate_required([:title, :body])
changeset = merge(cs1, cs2)
refute changeset.valid?
assert changeset.errors ==
[title: {"can't be blank", [validation: :required]}, body: {"can't be blank", [validation: :required]}]
end
test "merge/2: merges validations" do
cs1 = cast(%Post{}, %{title: "Title"}, ~w(title)a)
|> validate_length(:title, min: 1, max: 10)
cs2 = cast(%Post{}, %{body: "Body"}, ~w(body)a)
|> validate_format(:body, ~r/B/)
changeset = merge(cs1, cs2)
assert changeset.valid?
assert length(validations(changeset)) == 2
assert Enum.find(validations(changeset), &match?({:body, {:format, _}}, &1))
assert Enum.find(validations(changeset), &match?({:title, {:length, _}}, &1))
end
test "merge/2: repo opts" do
cs1 = %Post{} |> change() |> Map.put(:repo_opts, [a: 1, b: 2])
cs2 = %Post{} |> change() |> Map.put(:repo_opts, [b: 3, c: 4])
changeset = merge(cs1, cs2)
assert changeset.repo_opts == [a: 1, b: 3, c: 4]
end
test "merge/2: merges constraints" do
cs1 = cast(%Post{}, %{title: "Title"}, ~w(title)a)
|> unique_constraint(:title)
cs2 = cast(%Post{}, %{body: "Body"}, ~w(body)a)
|> unique_constraint(:body)
changeset = merge(cs1, cs2)
assert changeset.valid?
assert length(constraints(changeset)) == 2
end
test "merge/2: merges parameters" do
empty = cast(%Post{}, %{}, ~w(title)a)
cs1 = cast(%Post{}, %{body: "foo"}, ~w(body)a)
cs2 = cast(%Post{}, %{body: "bar"}, ~w(body)a)
assert merge(cs1, cs2).params == %{"body" => "bar"}
assert merge(cs1, empty).params == %{"body" => "foo"}
assert merge(empty, cs2).params == %{"body" => "bar"}
assert merge(empty, empty).params == %{}
end
test "merge/2: gives required fields precedence over optional ones" do
cs1 = cast(%Post{}, %{}, ~w(title)a) |> validate_required(:title)
cs2 = cast(%Post{}, %{}, ~w(title)a)
changeset = merge(cs1, cs2)
assert changeset.required == [:title]
end
test "merge/2: doesn't duplicate required or optional fields" do
cs1 = cast(%Post{}, %{}, ~w(title body)a) |> validate_required([:title, :body])
cs2 = cast(%Post{}, %{}, ~w(body title)a) |> validate_required([:body, :title])
changeset = merge(cs1, cs2)
assert Enum.sort(changeset.required) == [:body, :title]
end
test "merge/2: merges the :repo field when either one is nil" do
changeset = merge(%Ecto.Changeset{repo: :foo}, %Ecto.Changeset{repo: nil})
assert changeset.repo == :foo
changeset = merge(%Ecto.Changeset{repo: nil}, %Ecto.Changeset{repo: :bar})
assert changeset.repo == :bar
end
test "merge/2: merges the :action field when either one is nil" do
changeset = merge(%Ecto.Changeset{action: :insert}, %Ecto.Changeset{repo: nil})
assert changeset.action == :insert
changeset = merge(%Ecto.Changeset{action: nil}, %Ecto.Changeset{action: :update})
assert changeset.action == :update
end
test "merge/2: fails when the :data, :repo or :action field are not equal" do
cs1 = cast(%Post{title: "foo"}, %{}, ~w(title)a)
cs2 = cast(%Post{title: "bar"}, %{}, ~w(title)a)
assert_raise ArgumentError, "different :data when merging changesets", fn ->
merge(cs1, cs2)
end
assert_raise ArgumentError, "different repos (`:foo` and `:bar`) when merging changesets", fn ->
merge(%Ecto.Changeset{repo: :foo}, %Ecto.Changeset{repo: :bar})
end
assert_raise ArgumentError, "different actions (`:insert` and `:update`) when merging changesets", fn ->
merge(%Ecto.Changeset{action: :insert}, %Ecto.Changeset{action: :update})
end
end
test "change/2 with a struct" do
changeset = change(%Post{})
assert changeset.valid?
assert changeset.data == %Post{}
assert changeset.changes == %{}
changeset = change(%Post{body: "bar"}, body: "bar")
assert changeset.valid?
assert changeset.data == %Post{body: "bar"}
assert changeset.changes == %{}
changeset = change(%Post{body: "bar"}, %{body: "bar", title: "foo"})
assert changeset.valid?
assert changeset.data == %Post{body: "bar"}
assert changeset.changes == %{title: "foo"}
changeset = change(%Post{}, body: "bar")
assert changeset.valid?
assert changeset.data == %Post{}
assert changeset.changes == %{body: "bar"}
changeset = change(%Post{}, %{body: "bar"})
assert changeset.valid?
assert changeset.data == %Post{}
assert changeset.changes == %{body: "bar"}
end
test "change/2 with data and types" do
datatypes = {%{title: "hello"}, %{title: :string}}
changeset = change(datatypes)
assert changeset.valid?
assert changeset.data == %{title: "hello"}
assert changeset.changes == %{}
changeset = change(datatypes, title: "world")
assert changeset.valid?
assert changeset.data == %{title: "hello"}
assert changeset.changes == %{title: "world"}
assert apply_changes(changeset) == %{title: "world"}
changeset = change(datatypes, title: "hello")
assert changeset.valid?
assert changeset.data == %{title: "hello"}
assert changeset.changes == %{}
assert apply_changes(changeset) == %{title: "hello"}
end
test "change/2 with a changeset" do
base_changeset = cast(%Post{upvotes: 5}, %{title: "title"}, ~w(title)a)
assert change(base_changeset) == base_changeset
changeset = change(base_changeset, %{body: "body"})
assert changeset.changes == %{title: "title", body: "body"}
changeset = change(base_changeset, %{title: "new title"})
assert changeset.changes == %{title: "new title"}
changeset = change(base_changeset, title: "new title")
assert changeset.changes == %{title: "new title"}
changeset = change(base_changeset, body: nil)
assert changeset.changes == %{title: "title"}
changeset = change(base_changeset, %{upvotes: nil})
assert changeset.changes == %{title: "title", upvotes: nil}
changeset = change(base_changeset, %{upvotes: 5})
assert changeset.changes == %{title: "title"}
changeset = change(base_changeset, %{upvotes: 10})
assert changeset.changes == %{title: "title", upvotes: 10}
changeset = change(base_changeset, %{title: "new title", upvotes: 5})
assert changeset.changes == %{title: "new title"}
end
test "change/2 semantic comparison" do
post = %Post{decimal: Decimal.new("1.0")}
changeset = change(post, decimal: Decimal.new(1))
assert changeset.changes == %{}
end
test "change/2 with unknown field" do
post = %Post{}
assert_raise ArgumentError, ~r"unknown field `:unknown`", fn ->
change(post, unknown: Decimal.new(1))
end
end
test "change/2 with non-atom field" do
post = %Post{}
assert_raise ArgumentError, ~r"must be atoms, got: `\"bad\"`", fn ->
change(post, %{"bad" => 42})
end
end
test "fetch_field/2" do
changeset = changeset(%Post{body: "bar"}, %{"title" => "foo"})
assert fetch_field(changeset, :title) == {:changes, "foo"}
assert fetch_field(changeset, :body) == {:data, "bar"}
assert fetch_field(changeset, :other) == :error
end
test "fetch_field!/2" do
changeset = changeset(%Post{body: "bar"}, %{"title" => "foo"})
assert fetch_field!(changeset, :title) == "foo"
assert fetch_field!(changeset, :body) == "bar"
assert_raise KeyError, ~r/key :other not found in/, fn ->
fetch_field!(changeset, :other)
end
end
test "get_field/3" do
changeset = changeset(%Post{body: "bar"}, %{"title" => "foo"})
assert get_field(changeset, :title) == "foo"
assert get_field(changeset, :body) == "bar"
assert get_field(changeset, :body, "other") == "bar"
assert get_field(changeset, :other) == nil
assert get_field(changeset, :other, "other") == "other"
end
test "get_field/3 with associations" do
post = %Post{comments: [%Comment{}]}
changeset = change(post) |> put_assoc(:comments, [])
assert get_field(changeset, :comments) == []
end
test "fetch_change/2" do
changeset = changeset(%{"title" => "foo", "body" => nil, "upvotes" => nil})
assert fetch_change(changeset, :title) == {:ok, "foo"}
assert fetch_change(changeset, :body) == :error
assert fetch_change(changeset, :upvotes) == {:ok, nil}
end
test "fetch_change!/2" do
changeset = changeset(%{"title" => "foo", "body" => nil, "upvotes" => nil})
assert fetch_change!(changeset, :title) == "foo"
assert_raise KeyError, "key :body not found in: %{title: \"foo\", upvotes: nil}", fn ->
fetch_change!(changeset, :body)
end
assert fetch_change!(changeset, :upvotes) == nil
end
test "get_change/3" do
changeset = changeset(%{"title" => "foo", "body" => nil, "upvotes" => nil})
assert get_change(changeset, :title) == "foo"
assert get_change(changeset, :body) == nil
assert get_change(changeset, :body, "other") == "other"
assert get_change(changeset, :upvotes) == nil
assert get_change(changeset, :upvotes, "other") == nil
end
test "update_change/3" do
changeset =
changeset(%{"title" => "foo"})
|> update_change(:title, & &1 <> "bar")
assert changeset.changes.title == "foobar"
changeset =
changeset(%{"upvotes" => nil})
|> update_change(:upvotes, & &1 || 10)
assert changeset.changes.upvotes == 10
changeset =
changeset(%{})
|> update_change(:title, & &1 || "bar")
assert changeset.changes == %{}
changeset =
changeset(%Post{title: "mytitle"}, %{title: "MyTitle"})
|> update_change(:title, &String.downcase/1)
assert changeset.changes == %{}
end
test "put_change/3 and delete_change/2" do
base_changeset = change(%Post{upvotes: 5})
changeset = put_change(base_changeset, :title, "foo")
assert changeset.changes.title == "foo"
changeset = delete_change(changeset, :title)
assert changeset.changes == %{}
changeset = put_change(base_changeset, :title, "bar")
assert changeset.changes.title == "bar"
changeset = put_change(base_changeset, :body, nil)
assert changeset.changes == %{}
changeset = put_change(base_changeset, :upvotes, 5)
assert changeset.changes == %{}
changeset = put_change(changeset, :upvotes, 10)
assert changeset.changes.upvotes == 10
changeset = put_change(base_changeset, :upvotes, nil)
assert changeset.changes.upvotes == nil
end
test "force_change/3" do
changeset = change(%Post{upvotes: 5})
changeset = force_change(changeset, :title, "foo")
assert changeset.changes.title == "foo"
changeset = force_change(changeset, :title, "bar")
assert changeset.changes.title == "bar"
changeset = force_change(changeset, :upvotes, 5)
assert changeset.changes.upvotes == 5
end
test "apply_changes/1" do
post = %Post{}
category = %Category{name: "bar"}
assert post.title == ""
changeset = post
|> changeset(%{"title" => "foo"})
|> put_assoc(:category, category)
changed_post = apply_changes(changeset)
assert changed_post.__struct__ == post.__struct__
assert changed_post.title == "foo"
assert changed_post.category_id == category.category_id
changeset = post
|> changeset(%{"title" => "foo"})
|> put_assoc(:category, nil)
changed_post = apply_changes(changeset)
assert changed_post.__struct__ == post.__struct__
assert changed_post.title == "foo"
assert changed_post.category_id == nil
assert changed_post.category == nil
end
describe "apply_action/2" do
test "valid changeset" do
post = %Post{}
assert post.title == ""
changeset = changeset(post, %{"title" => "foo"})
assert changeset.valid?
assert {:ok, changed_post} = apply_action(changeset, :update)
assert changed_post.__struct__ == post.__struct__
assert changed_post.title == "foo"
end
test "invalid changeset" do
changeset =
%Post{}
|> changeset(%{"title" => "foo"})
|> validate_length(:title, min: 10)
refute changeset.valid?
changeset_new_action = %Ecto.Changeset{changeset | action: :update}
assert {:error, ^changeset_new_action} = apply_action(changeset, :update)
end
test "invalid action" do
assert_raise ArgumentError, ~r/expected action to be an atom/, fn ->
%Post{}
|> changeset(%{})
|> apply_action("invalid_action")
end
end
end
describe "apply_action!/2" do
test "valid changeset" do
changeset = changeset(%Post{}, %{"title" => "foo"})
post = apply_action!(changeset, :update)
assert post.title == "foo"
end
test "invalid changeset" do
changeset =
%Post{}
|> changeset(%{"title" => "foo"})
|> validate_length(:title, min: 10)
assert_raise Ecto.InvalidChangesetError, fn ->
apply_action!(changeset, :update)
end
end
end
## Validations
test "add_error/3" do
changeset =
changeset(%{})
|> add_error(:foo, "bar")
assert changeset.errors == [foo: {"bar", []}]
changeset =
changeset(%{})
|> add_error(:foo, "bar", additional: "information")
assert changeset.errors == [foo: {"bar", [additional: "information"]}]
end
test "validate_change/3" do
# When valid
changeset =
changeset(%{"title" => "hello"})
|> validate_change(:title, fn :title, "hello" -> [] end)
assert changeset.valid?
assert changeset.errors == []
# When invalid with binary
changeset =
changeset(%{"title" => "hello"})
|> validate_change(:title, fn :title, "hello" -> [title: "oops"] end)
refute changeset.valid?
assert changeset.errors == [title: {"oops", []}]
# When invalid with tuple
changeset =
changeset(%{"title" => "hello"})
|> validate_change(:title, fn :title, "hello" -> [title: {"oops", type: "bar"}] end)
refute changeset.valid?
assert changeset.errors == [title: {"oops", type: "bar"}]
# When missing
changeset =
changeset(%{})
|> validate_change(:title, fn :title, "hello" -> [title: "oops"] end)
assert changeset.valid?
assert changeset.errors == []
# When nil
changeset =
changeset(%{"title" => nil})
|> validate_change(:title, fn :title, "hello" -> [title: "oops"] end)
assert changeset.valid?
assert changeset.errors == []
# When virtual
changeset =
changeset(%{"virtual" => "hello"})
|> validate_change(:virtual, fn :virtual, "hello" -> [] end)
assert changeset.valid?
assert changeset.errors == []
# When unknown field
assert_raise ArgumentError, ~r/unknown field :bad in/, fn ->
changeset(%{"title" => "hello"})
|> validate_change(:bad, fn _, _ -> [] end)
end
end
test "validate_change/4" do
changeset =
changeset(%{"title" => "hello"})
|> validate_change(:title, :oops, fn :title, "hello" -> [title: "oops"] end)
refute changeset.valid?
assert changeset.errors == [title: {"oops", []}]
assert validations(changeset) == [title: :oops]
changeset =
changeset(%{})
|> validate_change(:title, :oops, fn :title, "hello" -> [title: "oops"] end)
assert changeset.valid?
assert changeset.errors == []
assert validations(changeset) == [title: :oops]
end
test "validate_required/2" do
# When valid
changeset =
changeset(%{"title" => "hello", "body" => "something"})
|> validate_required(:title)
assert changeset.valid?
assert changeset.errors == []
# When missing
changeset = changeset(%{}) |> validate_required(:title)
refute changeset.valid?
assert changeset.required == [:title]
assert changeset.errors == [title: {"can't be blank", [validation: :required]}]
# When nil
changeset =
changeset(%{title: nil, body: "\n"})
|> validate_required([:title, :body], message: "is blank")
refute changeset.valid?
assert changeset.required == [:title, :body]
assert changeset.changes == %{}
assert changeset.errors == [title: {"is blank", [validation: :required]}, body: {"is blank", [validation: :required]}]
# When :trim option is false
changeset = changeset(%{title: " "}) |> validate_required(:title, trim: false)
assert changeset.valid?
assert changeset.errors == []
changeset = changeset(%{color: <<12, 12, 12>>}) |> validate_required(:color, trim: false)
assert changeset.valid?
assert changeset.errors == []
# When unknown field
assert_raise ArgumentError, ~r/unknown field :bad in/, fn ->
changeset(%{"title" => "hello", "body" => "something"})
|> validate_required(:bad)
end
# When field is not an atom
assert_raise ArgumentError, ~r/expects field names to be atoms, got: `"title"`/, fn ->
changeset(%{"title" => "hello"})
|> validate_required("title")
end
# When field is nil
assert_raise FunctionClauseError, fn ->
changeset(%{"title" => "hello"})
|> validate_required(nil)
end
end
test "validate_format/3" do
changeset =
changeset(%{"title" => "foo@bar"})
|> validate_format(:title, ~r/@/)
assert changeset.valid?
assert changeset.errors == []
assert validations(changeset) == [title: {:format, ~r/@/}]
changeset =
changeset(%{"title" => "foobar"})
|> validate_format(:title, ~r/@/)
refute changeset.valid?
assert changeset.errors == [title: {"has invalid format", [validation: :format]}]
assert validations(changeset) == [title: {:format, ~r/@/}]
changeset =
changeset(%{"title" => "foobar"})
|> validate_format(:title, ~r/@/, message: "yada")
assert changeset.errors == [title: {"yada", [validation: :format]}]
end
test "validate_inclusion/3" do
changeset =
changeset(%{"title" => "hello"})
|> validate_inclusion(:title, ~w(hello))
assert changeset.valid?
assert changeset.errors == []
assert validations(changeset) == [title: {:inclusion, ~w(hello)}]
changeset =
changeset(%{"title" => "hello"})
|> validate_inclusion(:title, ~w(world))
refute changeset.valid?
assert changeset.errors == [title: {"is invalid", [validation: :inclusion, enum: ~w(world)]}]
assert validations(changeset) == [title: {:inclusion, ~w(world)}]
changeset =
changeset(%{"title" => "hello"})
|> validate_inclusion(:title, ~w(world), message: "yada")
assert changeset.errors == [title: {"yada", [validation: :inclusion, enum: ~w(world)]}]
end
test "validate_inclusion/3 with decimal does semantic comparison" do
changeset =
{%{}, %{value: :decimal}}
|> Ecto.Changeset.cast(%{value: 0}, [:value])
|> validate_inclusion(:value, Enum.map([0.0, 0.2], &Decimal.from_float/1))
assert changeset.valid?
end
test "validate_inclusion/3 with custom type and custom equal function" do
changeset =
changeset(%{"author_email" => "carl+1@example.com"})
|> validate_inclusion(:author_email, ["carl@example.com"])
assert changeset.valid?
assert changeset.errors == []
assert validations(changeset) == [author_email: {:inclusion, ["carl@example.com"]}]
end
test "validate_subset/3" do
changeset =
changeset(%{"topics" => ["cat", "dog"]})
|> validate_subset(:topics, ~w(cat dog))
assert changeset.valid?
assert changeset.errors == []
assert validations(changeset) == [topics: {:subset, ~w(cat dog)}]
changeset =
changeset(%{"topics" => ["cat", "laptop"]})
|> validate_subset(:topics, ~w(cat dog))
refute changeset.valid?
assert changeset.errors == [topics: {"has an invalid entry", [validation: :subset, enum: ~w(cat dog)]}]
assert validations(changeset) == [topics: {:subset, ~w(cat dog)}]
changeset =
changeset(%{"topics" => ["laptop"]})
|> validate_subset(:topics, ~w(cat dog), message: "yada")
assert changeset.errors == [topics: {"yada", [validation: :subset, enum: ~w(cat dog)]}]
end
test "validate_subset/3 with decimal does semantic comparison" do
changeset =
{%{}, %{value: {:array, :decimal}}}
|> Ecto.Changeset.cast(%{value: [0, 0.2]}, [:value])
|> validate_subset(:value, Enum.map([0.0, 0.2], &Decimal.from_float/1))
assert changeset.valid?
end
test "validate_subset/3 with custom type uses underlying type" do
# backwards compatibility test
changeset =
{%{}, %{value: StringArray}}
|> Ecto.Changeset.cast(%{value: ["a", "b"]}, [:value])
|> validate_subset(:value, ["a", "b"])
assert changeset.valid?
end
test "validate_exclusion/3" do
changeset =
changeset(%{"title" => "world"})
|> validate_exclusion(:title, ~w(hello))
assert changeset.valid?
assert changeset.errors == []
assert validations(changeset) == [title: {:exclusion, ~w(hello)}]
changeset =
changeset(%{"title" => "world"})
|> validate_exclusion(:title, ~w(world))
refute changeset.valid?
assert changeset.errors == [title: {"is reserved", [validation: :exclusion, enum: ~w(world)]}]
assert validations(changeset) == [title: {:exclusion, ~w(world)}]
changeset =
changeset(%{"title" => "world"})
|> validate_exclusion(:title, ~w(world), message: "yada")
assert changeset.errors == [title: {"yada", [validation: :exclusion, enum: ~w(world)]}]
end
test "validate_exclusion/3 with decimal does semantic comparison" do
decimals = Enum.map([0.0, 0.2], &Decimal.from_float/1)
changeset =
{%{}, %{value: :decimal}}
|> Ecto.Changeset.cast(%{value: 0}, [:value])
|> validate_exclusion(:value, decimals)
assert changeset.errors == [value: {"is reserved", [validation: :exclusion, enum: decimals]}]
end
test "validate_length/3 with string" do
changeset = changeset(%{"title" => "world"}) |> validate_length(:title, min: 3, max: 7)
assert changeset.valid?
assert changeset.errors == []
assert validations(changeset) == [title: {:length, [min: 3, max: 7]}]
changeset = changeset(%{"title" => "world"}) |> validate_length(:title, min: 5, max: 5)
assert changeset.valid?
changeset = changeset(%{"title" => "world"}) |> validate_length(:title, is: 5)
assert changeset.valid?
changeset = changeset(%{"title" => "world"}) |> validate_length(:title, min: 6)
refute changeset.valid?
assert changeset.errors == [title: {"should be at least %{count} character(s)", count: 6, validation: :length, kind: :min, type: :string}]
changeset = changeset(%{"title" => "world"}) |> validate_length(:title, max: 4)
refute changeset.valid?
assert changeset.errors == [title: {"should be at most %{count} character(s)", count: 4, validation: :length, kind: :max, type: :string}]
changeset = changeset(%{"title" => "world"}) |> validate_length(:title, is: 10)
refute changeset.valid?
assert changeset.errors == [title: {"should be %{count} character(s)", count: 10, validation: :length, kind: :is, type: :string}]
changeset = changeset(%{"title" => "world"}) |> validate_length(:title, is: 10, message: "yada")
assert changeset.errors == [title: {"yada", count: 10, validation: :length, kind: :is, type: :string}]
changeset = changeset(%{"title" => "\u0065\u0301"}) |> validate_length(:title, max: 1)
assert changeset.valid?
changeset = changeset(%{"title" => "\u0065\u0301"}) |> validate_length(:title, max: 1, count: :codepoints)
refute changeset.valid?
assert changeset.errors == [title: {"should be at most %{count} character(s)", count: 1, validation: :length, kind: :max, type: :string}]
end
test "validate_length/3 with binary" do
changeset =
changeset(%{"body" => <<0, 1, 2, 3>>})
|> validate_length(:body, count: :bytes, min: 3, max: 7)
assert changeset.valid?
assert changeset.errors == []
assert validations(changeset) == [body: {:length, [count: :bytes, min: 3, max: 7]}]
changeset =
changeset(%{"body" => <<0, 1, 2, 3, 4>>})
|> validate_length(:body, count: :bytes, min: 5, max: 5)
assert changeset.valid?
changeset =
changeset(%{"body" => <<0, 1, 2, 3, 4>>}) |> validate_length(:body, count: :bytes, is: 5)
assert changeset.valid?
changeset =
changeset(%{"body" => <<0, 1, 2, 3, 4>>}) |> validate_length(:body, count: :bytes, min: 6)
refute changeset.valid?
assert changeset.errors == [
body:
{"should be at least %{count} byte(s)", count: 6, validation: :length, kind: :min, type: :binary}
]
changeset =
changeset(%{"body" => <<0, 1, 2, 3, 4>>}) |> validate_length(:body, count: :bytes, max: 4)
refute changeset.valid?
assert changeset.errors == [
body: {"should be at most %{count} byte(s)", count: 4, validation: :length, kind: :max, type: :binary}
]
changeset =
changeset(%{"body" => <<0, 1, 2, 3, 4>>}) |> validate_length(:body, count: :bytes, is: 10)
refute changeset.valid?
assert changeset.errors == [
body: {"should be %{count} byte(s)", count: 10, validation: :length, kind: :is, type: :binary}
]
changeset =
changeset(%{"body" => <<0, 1, 2, 3, 4>>})
|> validate_length(:body, count: :bytes, is: 10, message: "yada")
assert changeset.errors == [body: {"yada", count: 10, validation: :length, kind: :is, type: :binary}]
end
test "validate_length/3 with list" do
changeset = changeset(%{"topics" => ["Politics", "Security", "Economy", "Elections"]}) |> validate_length(:topics, min: 3, max: 7)
assert changeset.valid?
assert changeset.errors == []
assert validations(changeset) == [topics: {:length, [min: 3, max: 7]}]
changeset = changeset(%{"topics" => ["Politics", "Security"]}) |> validate_length(:topics, min: 2, max: 2)
assert changeset.valid?
changeset = changeset(%{"topics" => ["Politics", "Security", "Economy"]}) |> validate_length(:topics, is: 3)
assert changeset.valid?
changeset = changeset(%{"topics" => ["Politics", "Security"]}) |> validate_length(:topics, min: 6, foo: true)
refute changeset.valid?
assert changeset.errors == [topics: {"should have at least %{count} item(s)", count: 6, validation: :length, kind: :min, type: :list}]
changeset = changeset(%{"topics" => ["Politics", "Security", "Economy"]}) |> validate_length(:topics, max: 2)
refute changeset.valid?
assert changeset.errors == [topics: {"should have at most %{count} item(s)", count: 2, validation: :length, kind: :max, type: :list}]
changeset = changeset(%{"topics" => ["Politics", "Security"]}) |> validate_length(:topics, is: 10)
refute changeset.valid?
assert changeset.errors == [topics: {"should have %{count} item(s)", count: 10, validation: :length, kind: :is, type: :list}]
changeset = changeset(%{"topics" => ["Politics", "Security"]}) |> validate_length(:topics, is: 10, message: "yada")
assert changeset.errors == [topics: {"yada", count: 10, validation: :length, kind: :is, type: :list}]
end
test "validate_length/3 with associations" do
post = %Post{comments: [%Comment{id: 1}]}
changeset = change(post) |> put_assoc(:comments, []) |> validate_length(:comments, min: 1)
assert changeset.errors == [comments: {"should have at least %{count} item(s)", count: 1, validation: :length, kind: :min, type: :list}]
changeset = change(post) |> put_assoc(:comments, [%Comment{id: 2}, %Comment{id: 3}]) |> validate_length(:comments, max: 2)
assert changeset.valid?
end
test "validate_number/3" do
changeset = changeset(%{"upvotes" => 3})
|> validate_number(:upvotes, greater_than: 0)
assert changeset.valid?
assert changeset.errors == []
assert validations(changeset) == [upvotes: {:number, [greater_than: 0]}]
# Single error
changeset = changeset(%{"upvotes" => -1})
|> validate_number(:upvotes, greater_than: 0)
refute changeset.valid?
assert changeset.errors == [upvotes: {"must be greater than %{number}", validation: :number, kind: :greater_than, number: 0}]
assert validations(changeset) == [upvotes: {:number, [greater_than: 0]}]
# Non equality error
changeset = changeset(%{"upvotes" => 1})
|> validate_number(:upvotes, not_equal_to: 1)
refute changeset.valid?
assert changeset.errors == [upvotes: {"must be not equal to %{number}", validation: :number, kind: :not_equal_to, number: 1}]
assert validations(changeset) == [upvotes: {:number, [not_equal_to: 1]}]
# Multiple validations
changeset = changeset(%{"upvotes" => 3})
|> validate_number(:upvotes, greater_than: 0, less_than: 100)
assert changeset.valid?
assert changeset.errors == []
assert validations(changeset) == [upvotes: {:number, [greater_than: 0, less_than: 100]}]
# Multiple validations with multiple errors
changeset = changeset(%{"upvotes" => 3})
|> validate_number(:upvotes, greater_than: 100, less_than: 0)
refute changeset.valid?
assert changeset.errors == [upvotes: {"must be greater than %{number}", validation: :number, kind: :greater_than, number: 100}]
# Multiple validations with custom message errors
changeset = changeset(%{"upvotes" => 3})
|> validate_number(:upvotes, greater_than: 100, less_than: 0, message: "yada")
assert changeset.errors == [upvotes: {"yada", validation: :number, kind: :greater_than, number: 100}]
end
test "validate_number/3 with decimal" do
changeset = changeset(%{"decimal" => Decimal.new(1)})
|> validate_number(:decimal, greater_than: Decimal.new(-3))
assert changeset.valid?
changeset = changeset(%{"decimal" => Decimal.new(-3)})
|> validate_number(:decimal, less_than: Decimal.new(1))
assert changeset.valid?
changeset = changeset(%{"decimal" => Decimal.new(-1)})
|> validate_number(:decimal, equal_to: Decimal.new(-1))
assert changeset.valid?
changeset = changeset(%{"decimal" => Decimal.new(0)})
|> validate_number(:decimal, not_equal_to: Decimal.new(-1))
assert changeset.valid?
changeset = changeset(%{"decimal" => Decimal.new(-3)})
|> validate_number(:decimal, less_than_or_equal_to: Decimal.new(-1))
assert changeset.valid?
changeset = changeset(%{"decimal" => Decimal.new(-3)})
|> validate_number(:decimal, less_than_or_equal_to: Decimal.new(-3))
assert changeset.valid?
changeset = changeset(%{"decimal" => Decimal.new(-1)})
|> validate_number(:decimal, greater_than_or_equal_to: Decimal.new("-1.5"))
assert changeset.valid?
changeset = changeset(%{"decimal" => Decimal.new("1.5")})
|> validate_number(:decimal, greater_than_or_equal_to: Decimal.new("1.5"))
assert changeset.valid?
changeset = changeset(%{"decimal" => Decimal.new("4.9")})
|> validate_number(:decimal, greater_than_or_equal_to: 4.9)
assert changeset.valid?
changeset = changeset(%{"decimal" => Decimal.new(5)})
|> validate_number(:decimal, less_than: 4)
refute changeset.valid?
end
test "validate_number/3 with bad options" do
assert_raise ArgumentError, ~r"unknown option :min given to validate_number/3", fn ->
validate_number(changeset(%{"upvotes" => 1}), :upvotes, min: Decimal.new("1.5"))
end
end
test "validate_number/3 with bad value" do
assert_raise ArgumentError, "expected field `virtual` to be a decimal, integer, or float, got: \"Oops\"", fn ->
validate_number(changeset(%{"virtual" => "Oops"}), :virtual, greater_than: 0)
end
end
test "validate_number/3 with bad target" do
# Number value
assert_raise ArgumentError, "expected option `greater_than` to be a decimal, integer, or float, got: 0..10", fn ->
validate_number(changeset(%{"upvotes" => 11}), :upvotes, greater_than: 0..10)
end
# Decimal value
assert_raise ArgumentError, "expected option `greater_than` to be a decimal, integer, or float, got: 0..10", fn ->
validate_number(changeset(%{"decimal" => Decimal.new(11)}), :decimal, greater_than: 0..10)
end
end
test "validate_confirmation/3" do
changeset = changeset(%{"title" => "title", "title_confirmation" => "title"})
|> validate_confirmation(:title)
assert changeset.valid?
assert changeset.errors == []
assert validations(changeset) == [{:title, {:confirmation, []}}]
changeset = changeset(%{"title" => "title"})
|> validate_confirmation(:title)
assert changeset.valid?
assert changeset.errors == []
assert validations(changeset) == [{:title, {:confirmation, []}}]
changeset = changeset(%{"title" => "title"})
|> validate_confirmation(:title, required: false)
assert changeset.valid?
assert changeset.errors == []
assert validations(changeset) == [{:title, {:confirmation, [required: false]}}]
changeset = changeset(%{"title" => "title"})
|> validate_confirmation(:title, required: true)
refute changeset.valid?
assert changeset.errors == [title_confirmation: {"can't be blank", [validation: :required]}]
assert validations(changeset) == [{:title, {:confirmation, [required: true]}}]
changeset = changeset(%{"title" => "title", "title_confirmation" => nil})
|> validate_confirmation(:title)
refute changeset.valid?
assert changeset.errors == [title_confirmation: {"does not match confirmation", [validation: :confirmation]}]
assert validations(changeset) == [{:title, {:confirmation, []}}]
changeset = changeset(%{"title" => "title", "title_confirmation" => "not title"})
|> validate_confirmation(:title)
refute changeset.valid?
assert changeset.errors == [title_confirmation: {"does not match confirmation", [validation: :confirmation]}]
assert validations(changeset) == [{:title, {:confirmation, []}}]
changeset = changeset(%{"title" => "title", "title_confirmation" => "not title"})
|> validate_confirmation(:title, message: "doesn't match field below")
refute changeset.valid?
assert changeset.errors == [title_confirmation: {"doesn't match field below", [validation: :confirmation]}]
assert validations(changeset) == [{:title, {:confirmation, [message: "doesn't match field below"]}}]
# Skip when no parameter
changeset = changeset(%{"title" => "title"})
|> validate_confirmation(:title, message: "password doesn't match")
assert changeset.valid?
assert changeset.errors == []
assert validations(changeset) == [{:title, {:confirmation, [message: "password doesn't match"]}}]
# With casting
changeset = changeset(%{"upvotes" => "1", "upvotes_confirmation" => "1"})
|> validate_confirmation(:upvotes)
assert changeset.valid?
assert changeset.errors == []
assert validations(changeset) == [{:upvotes, {:confirmation, []}}]
# With blank change
changeset = changeset(%{"password" => "", "password_confirmation" => "password"})
|> validate_confirmation(:password)
refute changeset.valid?
assert changeset.errors == [password_confirmation: {"does not match confirmation", [validation: :confirmation]}]
# With missing change
changeset = changeset(%{"password_confirmation" => "password"})
|> validate_confirmation(:password)
refute changeset.valid?
assert changeset.errors == [password_confirmation: {"does not match confirmation", [validation: :confirmation]}]
# invalid params
changeset = changeset(:invalid)
|> validate_confirmation(:password)
refute changeset.valid?
assert changeset.errors == []
assert validations(changeset) == []
end
test "validate_acceptance/3" do
# accepted
changeset = changeset(%{"terms_of_service" => "true"})
|> validate_acceptance(:terms_of_service)
assert changeset.valid?
assert changeset.errors == []
assert validations(changeset) == [terms_of_service: {:acceptance, []}]
# not accepted
changeset = changeset(%{"terms_of_service" => "false"})
|> validate_acceptance(:terms_of_service)
refute changeset.valid?
assert changeset.errors == [terms_of_service: {"must be accepted", [validation: :acceptance]}]
assert validations(changeset) == [terms_of_service: {:acceptance, []}]
changeset = changeset(%{"terms_of_service" => "other"})
|> validate_acceptance(:terms_of_service)
refute changeset.valid?
assert changeset.errors == [terms_of_service: {"must be accepted", [validation: :acceptance]}]
assert validations(changeset) == [terms_of_service: {:acceptance, []}]
# empty params
changeset = changeset(%{})
|> validate_acceptance(:terms_of_service)
refute changeset.valid?
assert changeset.errors == [terms_of_service: {"must be accepted", [validation: :acceptance]}]
assert validations(changeset) == [terms_of_service: {:acceptance, []}]
# invalid params
changeset = changeset(:invalid)
|> validate_acceptance(:terms_of_service)
refute changeset.valid?
assert changeset.errors == []
assert validations(changeset) == [terms_of_service: {:acceptance, []}]
# custom message
changeset = changeset(%{})
|> validate_acceptance(:terms_of_service, message: "must be abided")
refute changeset.valid?
assert changeset.errors == [terms_of_service: {"must be abided", [validation: :acceptance]}]
assert validations(changeset) == [terms_of_service: {:acceptance, [message: "must be abided"]}]
end
alias Ecto.TestRepo
describe "unsafe_validate_unique/4" do
setup do
dup_result = {1, [true]}
no_dup_result = {0, []}
base_changeset = changeset(%Post{}, %{"title" => "Hello World", "body" => "hi"})
[dup_result: dup_result, no_dup_result: no_dup_result, base_changeset: base_changeset]
end
defmodule MockRepo do
@moduledoc """
Allows tests to verify or refute that a query was run.
"""
def one(query, opts \\ []) do
send(self(), [__MODULE__, function: :one, query: query, opts: opts])
end
end
test "validates the uniqueness of a single field", context do
Process.put(:test_repo_all_results, context.dup_result)
changeset = unsafe_validate_unique(context.base_changeset, :title, TestRepo)
assert changeset.errors ==
[title: {"has already been taken", validation: :unsafe_unique, fields: [:title]}]
assert changeset.validations == [title: {:unsafe_unique, fields: [:title]}]
Process.put(:test_repo_all_results, context.no_dup_result)
changeset = unsafe_validate_unique(context.base_changeset, :title, TestRepo)
assert changeset.valid?
end
test "validates the uniqueness of a combination of fields", context do
Process.put(:test_repo_all_results, context.dup_result)
changeset = unsafe_validate_unique(context.base_changeset, [:title, :body], TestRepo)
assert changeset.errors ==
[
title:
{"has already been taken", validation: :unsafe_unique, fields: [:title, :body]}
]
assert changeset.validations == [title: {:unsafe_unique, fields: [:title, :body]}]
Process.put(:test_repo_all_results, context.no_dup_result)
changeset = unsafe_validate_unique(context.base_changeset, [:title, :body], TestRepo)
assert changeset.valid?
end
test "does not validate uniqueness if there is any prior error on a field", context do
Process.put(:test_repo_all_results, context.dup_result)
changeset =
context.base_changeset
|> validate_length(:title, max: 3)
|> unsafe_validate_unique(:title, TestRepo)
refute changeset.valid?
assert changeset.errors == [title: {"should be at most %{count} character(s)", [count: 3, validation: :length, kind: :max, type: :string]}]
end
test "does not validate uniqueness if there is any prior error on a combination of fields", context do
Process.put(:test_repo_all_results, context.dup_result)
changeset =
context.base_changeset
|> validate_length(:title, max: 3)
|> unsafe_validate_unique([:title, :body], TestRepo)
refute changeset.valid?
assert changeset.errors == [title: {"should be at most %{count} character(s)", [count: 3, validation: :length, kind: :max, type: :string]}]
end
test "allows setting a custom error message", context do
Process.put(:test_repo_all_results, context.dup_result)
changeset =
unsafe_validate_unique(context.base_changeset, [:title], TestRepo, message: "is taken")
assert changeset.errors ==
[title: {"is taken", validation: :unsafe_unique, fields: [:title]}]
end
test "allows setting a custom error key", context do
Process.put(:test_repo_all_results, context.dup_result)
changeset =
unsafe_validate_unique(context.base_changeset, [:title], TestRepo, message: "is taken", error_key: :foo)
assert changeset.errors ==
[foo: {"is taken", validation: :unsafe_unique, fields: [:title]}]
end
test "accepts a prefix option" do
body_change = changeset(%Post{title: "Hello World", body: "hi"}, %{body: "ho"})
unsafe_validate_unique(body_change, :body, MockRepo, prefix: "my_prefix")
assert_receive [MockRepo, function: :one, query: %Ecto.Query{prefix: "my_prefix"}, opts: []]
end
test "accepts repo options" do
body_change = changeset(%Post{title: "Hello World", body: "hi"}, %{body: "ho"})
unsafe_validate_unique(body_change, :body, MockRepo, repo_opts: [tenant_id: 1])
assert_receive [MockRepo, function: :one, query: %Ecto.Query{}, opts: [tenant_id: 1]]
end
test "accepts query options" do
body_change = changeset(%Post{title: "Hello World", body: "hi"}, %{body: "ho"})
unsafe_validate_unique(body_change, :body, MockRepo, query: Ecto.Query.from(p in Post, where: is_nil(p.published_at)))
assert_receive [MockRepo, function: :one, query: %Ecto.Query{wheres: wheres}, opts: []]
assert [%{expr: query_expr}, %{expr: check_expr}] = wheres
assert Macro.to_string(query_expr) == "is_nil(&0.published_at())"
assert Macro.to_string(check_expr) == "&0.body() == ^0"
end
# TODO: AST is represented as string differently on versions pre 1.13
if Version.match?(System.version(), ">= 1.13.0-dev") do
test "generates correct where clause for single primary key without query option" do
body_change = cast(%SinglePkSchema{id: 0, body: "hi"}, %{body: "ho"}, [:body])
unsafe_validate_unique(body_change, :body, MockRepo)
assert_receive [MockRepo, function: :one, query: %Ecto.Query{wheres: wheres}, opts: []]
assert [%{expr: pk_expr}, %{expr: check_expr}] = wheres
assert Macro.to_string(pk_expr) == "not (&0.id() == ^0)"
assert Macro.to_string(check_expr) == "&0.body() == ^0"
end
test "generates correct where clause for composite primary keys without query option" do
body_change = changeset(%Post{id: 0, token: 1, body: "hi"}, %{body: "ho"})
unsafe_validate_unique(body_change, :body, MockRepo)
assert_receive [MockRepo, function: :one, query: %Ecto.Query{wheres: wheres}, opts: []]
assert [%{expr: pk_expr}, %{expr: check_expr}] = wheres
assert Macro.to_string(pk_expr) == "not (&0.id() == ^0 and &0.token() == ^1)"
assert Macro.to_string(check_expr) == "&0.body() == ^0"
end
test "generates correct where clause for single primary key with query option" do
body_change = cast(%SinglePkSchema{id: 0, body: "hi"}, %{body: "ho"}, [:body])
unsafe_validate_unique(body_change, :body, MockRepo, query: Ecto.Query.from(p in SinglePkSchema, where: is_nil(p.published_at)))
assert_receive [MockRepo, function: :one, query: %Ecto.Query{wheres: wheres}, opts: []]
assert [%{expr: query_expr}, %{expr: pk_expr}, %{expr: check_expr}] = wheres
assert Macro.to_string(query_expr) == "is_nil(&0.published_at())"
assert Macro.to_string(pk_expr) == "not (&0.id() == ^0)"
assert Macro.to_string(check_expr) == "&0.body() == ^0"
end
test "generates correct where clause for composite primary keys with query option" do
body_change = changeset(%Post{id: 0, token: 1, body: "hi"}, %{body: "ho"})
unsafe_validate_unique(body_change, :body, MockRepo, query: Ecto.Query.from(p in Post, where: is_nil(p.published_at)))
assert_receive [MockRepo, function: :one, query: %Ecto.Query{wheres: wheres}, opts: []]
assert [%{expr: query_expr}, %{expr: pk_expr}, %{expr: check_expr}] = wheres
assert Macro.to_string(query_expr) == "is_nil(&0.published_at())"
assert Macro.to_string(pk_expr) == "not (&0.id() == ^0 and &0.token() == ^1)"
assert Macro.to_string(check_expr) == "&0.body() == ^0"
end
else
test "generates correct where clause for single primary key without query option" do
body_change = cast(%SinglePkSchema{id: 0, body: "hi"}, %{body: "ho"}, [:body])
unsafe_validate_unique(body_change, :body, MockRepo)
assert_receive [MockRepo, function: :one, query: %Ecto.Query{wheres: wheres}, opts: []]
assert [%{expr: pk_expr}, %{expr: check_expr}] = wheres
assert Macro.to_string(pk_expr) == "not(&0.id() == ^0)"
assert Macro.to_string(check_expr) == "&0.body() == ^0"
end
test "generates correct where clause for composite primary keys without query option" do
body_change = changeset(%Post{id: 0, token: 1, body: "hi"}, %{body: "ho"})
unsafe_validate_unique(body_change, :body, MockRepo)
assert_receive [MockRepo, function: :one, query: %Ecto.Query{wheres: wheres}, opts: []]
assert [%{expr: pk_expr}, %{expr: check_expr}] = wheres
assert Macro.to_string(pk_expr) == "not(&0.id() == ^0 and &0.token() == ^1)"
assert Macro.to_string(check_expr) == "&0.body() == ^0"
end
test "generates correct where clause for single primary key with query option" do
body_change = cast(%SinglePkSchema{id: 0, body: "hi"}, %{body: "ho"}, [:body])
unsafe_validate_unique(body_change, :body, MockRepo, query: Ecto.Query.from(p in SinglePkSchema, where: is_nil(p.published_at)))
assert_receive [MockRepo, function: :one, query: %Ecto.Query{wheres: wheres}, opts: []]
assert [%{expr: query_expr}, %{expr: pk_expr}, %{expr: check_expr}] = wheres
assert Macro.to_string(query_expr) == "is_nil(&0.published_at())"
assert Macro.to_string(pk_expr) == "not(&0.id() == ^0)"
assert Macro.to_string(check_expr) == "&0.body() == ^0"
end
test "generates correct where clause for composite primary keys with query option" do
body_change = changeset(%Post{id: 0, token: 1, body: "hi"}, %{body: "ho"})
unsafe_validate_unique(body_change, :body, MockRepo, query: Ecto.Query.from(p in Post, where: is_nil(p.published_at)))
assert_receive [MockRepo, function: :one, query: %Ecto.Query{wheres: wheres}, opts: []]
assert [%{expr: query_expr}, %{expr: pk_expr}, %{expr: check_expr}] = wheres
assert Macro.to_string(query_expr) == "is_nil(&0.published_at())"
assert Macro.to_string(pk_expr) == "not(&0.id() == ^0 and &0.token() == ^1)"
assert Macro.to_string(check_expr) == "&0.body() == ^0"
end
end
test "only queries the db when necessary" do
body_change = changeset(%Post{title: "Hello World", body: "hi"}, %{body: "ho"})
unsafe_validate_unique(body_change, :body, MockRepo)
assert_receive [MockRepo, function: :one, query: %Ecto.Query{}, opts: []]
unsafe_validate_unique(body_change, [:body, :title], MockRepo)
assert_receive [MockRepo, function: :one, query: %Ecto.Query{}, opts: []]
unsafe_validate_unique(body_change, :title, MockRepo)
# no overlap between changed fields and those required to be unique
refute_receive [MockRepo, function: :one, query: %Ecto.Query{}, opts: []]
end
end
## Locks
defp prepared_changes(changeset) do
Enum.reduce(changeset.prepare, changeset, & &1.(&2)).changes
end
test "optimistic_lock/3 with changeset with default incrementer" do
changeset = changeset(%{}) |> optimistic_lock(:upvotes)
assert changeset.filters == %{upvotes: 0}
assert changeset.changes == %{}
assert prepared_changes(changeset) == %{upvotes: 1}
changeset = changeset(%Post{upvotes: 2}, %{upvotes: 1}) |> optimistic_lock(:upvotes)
assert changeset.filters == %{upvotes: 1}
assert changeset.changes == %{upvotes: 1}
assert prepared_changes(changeset) == %{upvotes: 2}
# Assert default increment will rollover to 1 when the current one is equal or greater than 2_147_483_647
changeset = changeset(%Post{upvotes: 2_147_483_647}, %{}) |> optimistic_lock(:upvotes)
assert changeset.filters == %{upvotes: 2_147_483_647}
assert changeset.changes == %{}
assert prepared_changes(changeset) == %{upvotes: 1}
changeset = changeset(%Post{upvotes: 3_147_483_647}, %{}) |> optimistic_lock(:upvotes)
assert changeset.filters == %{upvotes: 3_147_483_647}
assert changeset.changes == %{}
assert prepared_changes(changeset) == %{upvotes: 1}
changeset = changeset(%Post{upvotes: 2_147_483_647}, %{upvotes: 2_147_483_648}) |> optimistic_lock(:upvotes)
assert changeset.filters == %{upvotes: 2_147_483_648}
assert changeset.changes == %{upvotes: 2_147_483_648}
assert prepared_changes(changeset) == %{upvotes: 1}
end
test "optimistic_lock/3 with struct" do
changeset = %Post{} |> optimistic_lock(:upvotes)
assert changeset.filters == %{upvotes: 0}
assert changeset.changes == %{}
assert prepared_changes(changeset) == %{upvotes: 1}
end
test "optimistic_lock/3 with custom incrementer" do
changeset = %Post{} |> optimistic_lock(:upvotes, &(&1 - 1))
assert changeset.filters == %{upvotes: 0}
assert changeset.changes == %{}
assert prepared_changes(changeset) == %{upvotes: -1}
end
## Constraints
test "check_constraint/3" do
changeset = change(%Post{}) |> check_constraint(:title, name: :title_must_be_short)
assert constraints(changeset) ==
[%{type: :check, field: :title, constraint: "title_must_be_short", match: :exact,
error_message: "is invalid", error_type: :check}]
changeset = change(%Post{}) |> check_constraint(:title, name: :title_must_be_short, message: "cannot be more than 15 characters")
assert constraints(changeset) ==
[%{type: :check, field: :title, constraint: "title_must_be_short", match: :exact,
error_message: "cannot be more than 15 characters", error_type: :check}]
assert_raise ArgumentError, ~r/invalid match type: :invalid/, fn ->
change(%Post{}) |> check_constraint(:title, name: :whatever, match: :invalid, message: "match is invalid")
end
assert_raise ArgumentError, ~r/supply the name/, fn ->
check_constraint(:title, message: "cannot be more than 15 characters")
end
end
test "unique_constraint/3" do
changeset = change(%Post{}) |> unique_constraint(:title)
assert constraints(changeset) ==
[%{type: :unique, field: :title, constraint: "posts_title_index", match: :exact,
error_message: "has already been taken", error_type: :unique}]
changeset = change(%Post{}) |> unique_constraint(:title, name: :whatever, message: "is taken")
assert constraints(changeset) ==
[%{type: :unique, field: :title, constraint: "whatever", match: :exact, error_message: "is taken", error_type: :unique}]
changeset = change(%Post{}) |> unique_constraint(:title, name: :whatever, match: :suffix, message: "is taken")
assert constraints(changeset) ==
[%{type: :unique, field: :title, constraint: "whatever", match: :suffix, error_message: "is taken", error_type: :unique}]
changeset = change(%Post{}) |> unique_constraint(:title, name: :whatever, match: :prefix, message: "is taken")
assert constraints(changeset) ==
[%{type: :unique, field: :title, constraint: "whatever", match: :prefix, error_message: "is taken", error_type: :unique}]
assert_raise ArgumentError, ~r/invalid match type: :invalid/, fn ->
change(%Post{}) |> unique_constraint(:title, name: :whatever, match: :invalid, message: "is taken")
end
end
test "unique_constraint/3 on field with :source" do
changeset = change(%Post{}) |> unique_constraint(:permalink)
assert constraints(changeset) ==
[%{type: :unique, field: :permalink, constraint: "posts_url_index", match: :exact,
error_message: "has already been taken", error_type: :unique}]
changeset = change(%Post{}) |> unique_constraint(:permalink, name: :whatever, message: "is taken")
assert constraints(changeset) ==
[%{type: :unique, field: :permalink, constraint: "whatever", match: :exact, error_message: "is taken", error_type: :unique}]
changeset = change(%Post{}) |> unique_constraint(:permalink, name: :whatever, match: :suffix, message: "is taken")
assert constraints(changeset) ==
[%{type: :unique, field: :permalink, constraint: "whatever", match: :suffix, error_message: "is taken", error_type: :unique}]
assert_raise ArgumentError, ~r/invalid match type: :invalid/, fn ->
change(%Post{}) |> unique_constraint(:permalink, name: :whatever, match: :invalid, message: "is taken")
end
end
test "unique_constraint/3 with multiple fields" do
changeset = change(%Post{}) |> unique_constraint([:permalink, :color])
assert constraints(changeset) ==
[%{type: :unique, field: :permalink, constraint: "posts_url_color_index", match: :exact,
error_message: "has already been taken", error_type: :unique}]
changeset = change(%Post{}) |> unique_constraint([:permalink, :color], error_key: :color)
assert constraints(changeset) ==
[%{type: :unique, field: :color, constraint: "posts_url_color_index", match: :exact,
error_message: "has already been taken", error_type: :unique}]
end
test "foreign_key_constraint/3" do
changeset = change(%Comment{}) |> foreign_key_constraint(:post_id)
assert constraints(changeset) ==
[%{type: :foreign_key, field: :post_id, constraint: "comments_post_id_fkey", match: :exact,
error_message: "does not exist", error_type: :foreign}]
changeset = change(%Comment{}) |> foreign_key_constraint(:post_id, name: :whatever, message: "is not available")
assert constraints(changeset) ==
[%{type: :foreign_key, field: :post_id, constraint: "whatever", match: :exact, error_message: "is not available", error_type: :foreign}]
end
test "foreign_key_constraint/3 on field with :source" do
changeset = change(%Post{}) |> foreign_key_constraint(:permalink)
assert constraints(changeset) ==
[%{type: :foreign_key, field: :permalink, constraint: "posts_url_fkey", match: :exact,
error_message: "does not exist", error_type: :foreign}]
changeset = change(%Post{}) |> foreign_key_constraint(:permalink, name: :whatever, message: "is not available")
assert constraints(changeset) ==
[%{type: :foreign_key, field: :permalink, constraint: "whatever", match: :exact, error_message: "is not available", error_type: :foreign}]
end
test "assoc_constraint/3" do
changeset = change(%Comment{}) |> assoc_constraint(:post)
assert constraints(changeset) ==
[%{type: :foreign_key, field: :post, constraint: "comments_post_id_fkey", match: :exact,
error_message: "does not exist", error_type: :assoc}]
changeset = change(%Comment{}) |> assoc_constraint(:post, name: :whatever, message: "is not available")
assert constraints(changeset) ==
[%{type: :foreign_key, field: :post, constraint: "whatever", match: :exact, error_message: "is not available", error_type: :assoc}]
end
test "assoc_constraint/3 on field with :source" do
changeset = change(%Post{}) |> assoc_constraint(:category)
assert constraints(changeset) ==
[%{type: :foreign_key, field: :category, constraint: "posts_category_id_fkey", match: :exact,
error_message: "does not exist", error_type: :assoc}]
changeset = change(%Post{}) |> assoc_constraint(:category, name: :whatever, message: "is not available")
assert constraints(changeset) ==
[%{type: :foreign_key, field: :category, constraint: "whatever", match: :exact, error_message: "is not available", error_type: :assoc}]
end
test "assoc_constraint/3 with errors" do
message = ~r"cannot add constraint to changeset because association `unknown` does not exist. Did you mean one of `category`, `comment`, `comments`?"
assert_raise ArgumentError, message, fn ->
change(%Post{}) |> assoc_constraint(:unknown)
end
message = ~r"assoc_constraint can only be added to belongs to associations"
assert_raise ArgumentError, message, fn ->
change(%Post{}) |> assoc_constraint(:comments)
end
end
test "no_assoc_constraint/3 with has_many" do
changeset = change(%Post{}) |> no_assoc_constraint(:comments)
assert constraints(changeset) ==
[%{type: :foreign_key, field: :comments, constraint: "comments_post_id_fkey", match: :exact,
error_message: "are still associated with this entry", error_type: :no_assoc}]
changeset = change(%Post{}) |> no_assoc_constraint(:comments, name: :whatever, message: "exists")
assert constraints(changeset) ==
[%{type: :foreign_key, field: :comments, constraint: "whatever", match: :exact,
error_message: "exists", error_type: :no_assoc}]
end
test "no_assoc_constraint/3 with has_one" do
changeset = change(%Post{}) |> no_assoc_constraint(:comment)
assert constraints(changeset) ==
[%{type: :foreign_key, field: :comment, constraint: "comments_post_id_fkey", match: :exact,
error_message: "is still associated with this entry", error_type: :no_assoc}]
changeset = change(%Post{}) |> no_assoc_constraint(:comment, name: :whatever, message: "exists")
assert constraints(changeset) ==
[%{type: :foreign_key, field: :comment, constraint: "whatever", match: :exact,
error_message: "exists", error_type: :no_assoc}]
end
test "no_assoc_constraint/3 with errors" do
message = ~r"cannot add constraint to changeset because association `unknown` does not exist"
assert_raise ArgumentError, message, fn ->
change(%Post{}) |> no_assoc_constraint(:unknown)
end
message = ~r"no_assoc_constraint can only be added to has one/many associations"
assert_raise ArgumentError, message, fn ->
change(%Comment{}) |> no_assoc_constraint(:post)
end
end
test "exclusion_constraint/3" do
changeset = change(%Post{}) |> exclusion_constraint(:title)
assert constraints(changeset) ==
[%{type: :exclusion, field: :title, constraint: "posts_title_exclusion", match: :exact,
error_message: "violates an exclusion constraint", error_type: :exclusion}]
changeset = change(%Post{}) |> exclusion_constraint(:title, name: :whatever, message: "is invalid")
assert constraints(changeset) ==
[%{type: :exclusion, field: :title, constraint: "whatever", match: :exact,
error_message: "is invalid", error_type: :exclusion}]
assert_raise ArgumentError, ~r/invalid match type: :invalid/, fn ->
change(%Post{}) |> exclusion_constraint(:title, name: :whatever, match: :invalid, message: "match is invalid")
end
end
## traverse_errors
test "traverses changeset errors" do
changeset =
changeset(%{"title" => "title", "body" => "hi", "upvotes" => :bad})
|> validate_length(:body, min: 3)
|> validate_format(:body, ~r/888/)
|> add_error(:title, "is taken", name: "your title")
errors = traverse_errors(changeset, fn
{"is invalid", [type: type, validation: :cast]} ->
"expected to be #{inspect(type)}"
{"is taken", keys} ->
String.upcase("#{keys[:name]} is taken")
{msg, keys} ->
msg
|> String.replace("%{count}", to_string(keys[:count]))
|> String.upcase()
end)
assert errors == %{
body: ["HAS INVALID FORMAT", "SHOULD BE AT LEAST 3 CHARACTER(S)"],
title: ["YOUR TITLE IS TAKEN"],
upvotes: ["expected to be :integer"],
}
end
test "traverses changeset errors with field" do
changeset =
changeset(%{"title" => "title", "body" => "hi", "upvotes" => :bad})
|> validate_length(:body, min: 3)
|> validate_format(:body, ~r/888/)
|> validate_inclusion(:body, ["hola", "bonjour", "hallo"])
|> add_error(:title, "is taken", name: "your title")
errors = traverse_errors(changeset, fn
%Ecto.Changeset{}, field, {_, [type: type, validation: :cast]} ->
"expected #{field} to be #{inspect(type)}"
%Ecto.Changeset{}, field, {_, [name: "your title"]} ->
"value in #{field} is taken"
|> String.upcase()
%Ecto.Changeset{}, field, {_, [count: 3, validation: :length, kind: :min, type: :string] = keys} ->
"should be at least #{keys[:count]} character(s) in field #{field}"
|> String.upcase()
%Ecto.Changeset{validations: validations}, field, {_, [validation: :format]} ->
validation = Keyword.get_values(validations, field)
"field #{field} should match format #{inspect validation[:format]}"
%Ecto.Changeset{validations: validations}, field, {_, [validation: :inclusion, enum: _]} ->
validation = Keyword.get_values(validations, field)
values = Enum.join(validation[:inclusion], ", ")
"#{field} value should be in #{values}"
end)
assert errors == %{
body: ["body value should be in hola, bonjour, hallo",
"field body should match format ~r/888/",
"SHOULD BE AT LEAST 3 CHARACTER(S) IN FIELD BODY"],
title: ["VALUE IN TITLE IS TAKEN"],
upvotes: ["expected upvotes to be :integer"],
}
end
## traverse_validations
test "traverses changeset validations" do
changeset =
changeset(%{"title" => "title", "body" => "hi", "upvotes" => :bad})
|> validate_length(:body, min: 3)
|> validate_format(:body, ~r/888/)
|> validate_inclusion(:upvotes, [:good, :bad])
validations = traverse_validations(changeset, fn
{:length, opts} -> {:length, "#{Keyword.get(opts, :min, 0)}-#{Keyword.get(opts, :max, 32)}"}
{:format, %Regex{source: source}} -> {:format, "/#{source}/"}
{:inclusion, enum} -> {:inclusion, Enum.join(enum, ", ")}
{other, opts} -> {other, inspect(opts)}
end)
assert validations == %{
body: [format: "/888/", length: "3-32"],
upvotes: [inclusion: "good, bad"],
}
end
test "traverses changeset validations with field" do
changeset =
changeset(%{"title" => "title", "body" => "hi", "upvotes" => :bad})
|> validate_length(:body, min: 3)
|> validate_format(:body, ~r/888/)
|> validate_inclusion(:upvotes, [:good, :bad])
validations = traverse_validations(changeset, fn
%Ecto.Changeset{}, field, {:length, opts} ->
"#{field} must be #{Keyword.get(opts, :min, 0)}-#{Keyword.get(opts, :max, 32)} long"
%Ecto.Changeset{}, field, {:format, %Regex{source: source}} ->
"#{field} must match /#{source}/"
%Ecto.Changeset{}, field, {:inclusion, enum} ->
"#{field} must be one of: #{Enum.join(enum, ", ")}"
end)
assert validations == %{
body: ["body must match /888/", "body must be 3-32 long"],
upvotes: ["upvotes must be one of: good, bad"],
}
end
## inspect
defmodule RedactedSchema do
use Ecto.Schema
schema "redacted_schema" do
field :password, :string, redact: true
field :username, :string
field :display_name, :string, redact: false
field :virtual_pass, :string, redact: true, virtual: true
end
end
defmodule RedactedEmbeddedSchema do
use Ecto.Schema
embedded_schema do
field :password, :string, redact: true
field :username, :string
field :display_name, :string, redact: false
field :virtual_pass, :string, redact: true, virtual: true
end
end
describe "inspect" do
test "reveals relevant data" do
assert inspect(%Ecto.Changeset{}) ==
"#Ecto.Changeset<action: nil, changes: %{}, errors: [], data: nil, valid?: false>"
assert inspect(changeset(%{"title" => "title", "body" => "hi"})) ==
"#Ecto.Changeset<action: nil, changes: %{body: \"hi\", title: \"title\"}, " <>
"errors: [], data: #Ecto.ChangesetTest.Post<>, valid?: true>"
data = {%NoSchemaPost{title: "hello"}, %{title: :string, upvotes: :integer}}
params = %{"title" => "world", "upvotes" => "0"}
assert inspect(cast(data, params, ~w(title upvotes)a)) ==
"#Ecto.Changeset<action: nil, changes: %{title: \"world\", upvotes: 0}, " <>
"errors: [], data: #Ecto.ChangesetTest.NoSchemaPost<>, valid?: true>"
end
test "redacts fields marked redact: true" do
changeset = Ecto.Changeset.cast(%RedactedSchema{}, %{password: "hunter2"}, [:password])
refute inspect(changeset) =~ "hunter2"
assert inspect(changeset) =~ "**redacted**"
changeset = Ecto.Changeset.cast(%RedactedEmbeddedSchema{}, %{password: "hunter2"}, [:password])
refute inspect(changeset) =~ "hunter2"
assert inspect(changeset) =~ "**redacted**"
end
test "redacts virtual fields marked redact: true" do
changeset = Ecto.Changeset.cast(%RedactedSchema{}, %{virtual_pass: "hunter2"}, [:virtual_pass])
refute inspect(changeset) =~ "hunter2"
assert inspect(changeset) =~ "**redacted**"
end
test "doesn't redact fields without redacted (defaults to false)" do
changeset = Ecto.Changeset.cast(%RedactedSchema{}, %{username: "hunter2"}, [:username])
assert inspect(changeset) =~ "hunter2"
refute inspect(changeset) =~ "**redacted**"
end
test "doesn't redact fields marked redact: false" do
changeset = Ecto.Changeset.cast(%RedactedSchema{}, %{display_name: "hunter2"}, [:display_name])
assert inspect(changeset) =~ "hunter2"
refute inspect(changeset) =~ "**redacted**"
end
end
end
| 38.860432 | 167 | 0.633932 |
084c1e0e13417548218fedf2514d3661eff8754e | 846 | ex | Elixir | apps/performance_3/lib/metrics_collector/web/endpoint.ex | WhiteRookPL/elixir-fire-brigade-workshop | 1c6183339fc623842a09f4d10be75bcecf2c37e7 | [
"MIT"
] | 14 | 2017-08-09T14:21:47.000Z | 2022-03-11T04:10:49.000Z | apps/performance_3/lib/metrics_collector/web/endpoint.ex | nicholasjhenry/elixir-fire-brigade-workshop | 1c6183339fc623842a09f4d10be75bcecf2c37e7 | [
"MIT"
] | null | null | null | apps/performance_3/lib/metrics_collector/web/endpoint.ex | nicholasjhenry/elixir-fire-brigade-workshop | 1c6183339fc623842a09f4d10be75bcecf2c37e7 | [
"MIT"
] | 15 | 2017-09-05T15:43:53.000Z | 2020-04-13T16:20:18.000Z | defmodule MetricsCollector.Web.Endpoint do
use Phoenix.Endpoint, otp_app: :metrics_collector
plug Plug.RequestId
plug Plug.Logger
plug Plug.Parsers,
parsers: [ :urlencoded, :multipart, :json ],
pass: [ "*/*" ],
json_decoder: Poison
plug Plug.MethodOverride
plug Plug.Head
plug Plug.Session,
store: :cookie,
key: "_metrics_collector_key",
signing_salt: "l+YpBwbw"
plug MetricsCollector.Web.Router
@doc """
Dynamically loads configuration from the system environment
on startup.
It receives the endpoint configuration from the config files
and must return the updated configuration.
"""
def load_from_system_env(config) do
port = System.get_env("PORT") || raise "expected the PORT environment variable to be set"
{:ok, Keyword.put(config, :http, [:inet6, port: port])}
end
end
| 24.882353 | 93 | 0.712766 |
084c1fecf603fc84f6017c9f7e091bbeda1eec7c | 5,107 | ex | Elixir | apps/publishing/lib/publishing/manage.ex | Lgdev07/branchpage | 302731d35446292ca7b9ec67c46db67cd0393849 | [
"MIT"
] | null | null | null | apps/publishing/lib/publishing/manage.ex | Lgdev07/branchpage | 302731d35446292ca7b9ec67c46db67cd0393849 | [
"MIT"
] | null | null | null | apps/publishing/lib/publishing/manage.ex | Lgdev07/branchpage | 302731d35446292ca7b9ec67c46db67cd0393849 | [
"MIT"
] | null | null | null | defmodule Publishing.Manage do
@moduledoc """
Manage's public API.
"""
alias Publishing.Integration
alias Publishing.Manage.{Article, Blog, Platform}
alias Publishing.Markdown
alias Publishing.Repo
import Ecto.Query
def list_articles(opts \\ []) do
start_cursor = opts[:cursor] || DateTime.utc_now()
limit = opts[:limit] || 10
articles =
Article
|> from()
|> order_by([a], desc: a.inserted_at)
|> limit(^limit)
|> where([a], a.inserted_at < ^start_cursor)
|> preload(:blog)
|> Repo.all()
case articles do
[] ->
{nil, []}
articles ->
end_cursor =
articles
|> List.last()
|> Map.get(:inserted_at)
{end_cursor, articles}
end
end
def load_blog!(username) do
db_blog =
Blog
|> Repo.get_by!(username: username)
|> Repo.preload([:articles, :platform])
blog = build_blog(db_blog)
content = %{
fullname: blog.fullname,
bio: blog.bio,
avatar_url: blog.avatar_url
}
{:ok, _} =
db_blog
|> Blog.changeset(content)
|> Repo.update()
Map.merge(db_blog, content)
rescue
_error ->
reraise Publishing.PageNotFound, __STACKTRACE__
end
defp build_blog(%Blog{} = blog) do
%{username: username, platform: %{name: platform}} = blog
{:ok, integration} = Integration.service(platform)
{:ok, content} = integration.get_blog_data(username)
%Blog{}
|> Map.merge(content)
end
@doc """
Loads an article from database.
"""
@spec load_article!(String.t(), any) :: Article.t()
def load_article!(username, id) do
db_article =
Article
|> Repo.get!(id)
|> Repo.preload(:blog)
^username = db_article.blog.username
{:ok, article} =
with {:error, _} <- build_article(db_article.url) do
Repo.delete(db_article)
:fail
end
%{db_article | body: article.body}
rescue
_error ->
reraise Publishing.PageNotFound, __STACKTRACE__
end
@doc """
Saves an article struct to the database.
"""
@spec save_article(Article.t()) :: {:ok, Article.t()} | {:error, String.t()}
def save_article(%Article{} = article) do
{:ok, blog} = upsert_blog(article)
attrs =
article
|> Map.from_struct()
|> Map.merge(%{blog_id: blog.id})
%Article{}
|> Article.changeset(attrs)
|> Repo.insert()
|> case do
{:error, changeset} ->
{:error, Article.get_error(changeset)}
{:ok, %Article{}} = success ->
success
end
end
@doc """
Build an article struct from the given `url`.
"""
@spec build_article(String.t()) :: {:ok, Article.t()} | {:error, String.t()}
def build_article(url) do
with url <- String.trim(url),
{:ok, _url} <- validate_url(url),
{:ok, integration} <- Integration.service(url),
{:ok, username} <- integration.get_username(url),
{:ok, content} <- integration.get_content(url) do
title = Markdown.get_title(content)
description = Markdown.get_description(content)
cover = Markdown.get_cover(content)
html = Markdown.parse(content)
blog = %Blog{username: username}
{:ok,
%Article{
body: html,
title: title,
description: description,
cover: cover,
url: url,
blog: blog
}}
else
{:error, :scheme} ->
{:error, "Invalid scheme. Use http or https"}
{:error, :extension} ->
{:error, "Invalid extension. Must be .md"}
{:error, :integration} ->
{:error, "Not integrated with #{host(url)} yet"}
{:error, :username} ->
{:error, "Invalid #{host(url)} resource"}
{:error, 404} ->
{:error, "Page not found"}
{:error, status} when is_integer(status) ->
{:error, "Failed to retrieve page content. (error #{status})"}
end
end
defp get_platform(url) do
url
|> URI.parse()
|> Map.merge(%{path: "/"})
|> URI.to_string()
|> upsert_platform!()
end
defp upsert_platform!(name) do
platform = Repo.get_by(Platform, name: name)
case platform do
nil ->
%Platform{}
|> Platform.changeset(%{name: name})
|> Repo.insert!()
item ->
item
end
end
defp upsert_blog(%Article{} = article) do
%{url: url, blog: %{username: username}} = article
platform = get_platform(url)
case Repo.one(from Blog, where: [username: ^username]) do
nil ->
attrs = %{username: username, platform_id: platform.id}
%Blog{}
|> Blog.changeset(attrs)
|> Repo.insert()
blog ->
{:ok, blog}
end
end
defp host(url), do: URI.parse(url).host
defp validate_url(url) do
case URI.parse(url) do
%URI{scheme: scheme} when scheme not in ["http", "https"] ->
{:error, :scheme}
%URI{path: path} ->
if MIME.from_path(path || "/") == "text/markdown",
do: {:ok, url},
else: {:error, :extension}
end
end
end
| 22.901345 | 78 | 0.566477 |
084c2f29d86f918644db15466aa7d7a0244bbdc7 | 148 | exs | Elixir | test/auth/uberauth/dummy_strategy_test.exs | isshindev/accent | ae4c13139b0a0dfd64ff536b94c940a4e2862150 | [
"BSD-3-Clause"
] | 806 | 2018-04-07T20:40:33.000Z | 2022-03-30T01:39:57.000Z | test/auth/uberauth/dummy_strategy_test.exs | isshindev/accent | ae4c13139b0a0dfd64ff536b94c940a4e2862150 | [
"BSD-3-Clause"
] | 194 | 2018-04-07T13:49:37.000Z | 2022-03-30T19:58:45.000Z | test/auth/uberauth/dummy_strategy_test.exs | doc-ai/accent | e337e16f3658cc0728364f952c0d9c13710ebb06 | [
"BSD-3-Clause"
] | 89 | 2018-04-09T13:55:49.000Z | 2022-03-24T07:09:31.000Z | defmodule AccentTest.Auth.Uberauth.DummyStrategy do
use ExUnit.Case, async: true
doctest(Accent.Auth.Ueberauth.DummyStrategy, import: true)
end
| 29.6 | 60 | 0.810811 |
084c2faa75232955591ec5e152fd49a738109b14 | 2,238 | ex | Elixir | apps/api/lib/api_web/router.ex | asamoal/plural | 5b336f27cb2d775560e35e5323192c42d62e72f5 | [
"Apache-2.0"
] | 59 | 2021-09-16T19:29:39.000Z | 2022-03-31T20:44:24.000Z | apps/api/lib/api_web/router.ex | svilenkov/plural | ac6c6cc15ac4b66a3b5e32ed4a7bee4d46d1f026 | [
"Apache-2.0"
] | 111 | 2021-08-15T09:56:37.000Z | 2022-03-31T23:59:32.000Z | apps/api/lib/api_web/router.ex | svilenkov/plural | ac6c6cc15ac4b66a3b5e32ed4a7bee4d46d1f026 | [
"Apache-2.0"
] | 4 | 2021-12-13T09:43:01.000Z | 2022-03-29T18:08:44.000Z | defmodule ApiWeb.Router do
use ApiWeb, :router
pipeline :api do
plug :accepts, ["json"]
end
pipeline :auth do
plug ApiWeb.GuardianPipeline
plug ApiWeb.Plugs.AbsintheContext
end
forward "/graphiql", Absinthe.Plug.GraphiQL,
schema: GraphQl,
interface: :playground
scope "/gql" do
pipe_through [:api, :auth]
forward "/", Absinthe.Plug,
schema: GraphQl,
document_providers: [GraphQl.APQ, Absinthe.Plug.DocumentProvider.Default]
end
scope "/", ApiWeb do
pipe_through :api
get "/auth/token", AuthController, :token
post "/auth/token", AuthController, :post_token
post "/auth/license", AuthController, :refresh_license
get "/health", HealthController, :ping
post "/signup", UserController, :create
post "/login", UserController, :login
post "/dkr/callback", DockerController, :events
get "/artifacts/:repository/:name", ArtifactController, :show
end
scope "/api", ApiWeb do
pipe_through :api
post "/email", EmailController, :email
post "/usage", PaymentsController, :usage_record
get "/license/:key", LicenseController, :get
end
scope "/mart", ApiWeb do
pipe_through [:api, :auth]
get "/me", UserController, :me
post "/publishers", UserController, :create_publisher
resources "/charts", ChartController, only: [:create] do
post "/version", ChartController, :version
get "/token", ChartController, :token
end
resources "/installations", InstallationController, only: [:create] do
get "/token", InstallationController, :token
end
end
scope "/cm", ApiWeb do
pipe_through [:auth]
get "/:repo/index.yaml", ChartMuseumController, :index_db
get "/:repo/charts/:chart", ChartMuseumController, :get
scope "/api" do
post "/:repo/charts", ChartMuseumController, :create_chart
post "/:repo/prov", ChartMuseumController, :create_prov
delete "/:repo/charts/:name/:version", ChartMuseumController, :delete
get "/:repo/charts", ChartMuseumController, :list
get "/:repo/charts/:chart", ChartMuseumController, :list_versions
get "/:repo/charts/:chart/:version", ChartMuseumController, :get_version
end
end
end
| 26.329412 | 79 | 0.678731 |
084c43865f27d4af5028158a1d330d3cb391f0e1 | 8,878 | ex | Elixir | lib/livebook/live_markdown/import.ex | FE-box/livebook | 44ae4ecf941decb1b3b0b8b4a0811aa6f9aaf83d | [
"Apache-2.0"
] | null | null | null | lib/livebook/live_markdown/import.ex | FE-box/livebook | 44ae4ecf941decb1b3b0b8b4a0811aa6f9aaf83d | [
"Apache-2.0"
] | null | null | null | lib/livebook/live_markdown/import.ex | FE-box/livebook | 44ae4ecf941decb1b3b0b8b4a0811aa6f9aaf83d | [
"Apache-2.0"
] | null | null | null | defmodule Livebook.LiveMarkdown.Import do
alias Livebook.Notebook
alias Livebook.LiveMarkdown.MarkdownHelpers
@doc """
Converts the given Markdown document into a notebook data structure.
Returns the notebook structure and a list of informative messages/warnings
related to the imported input.
"""
@spec notebook_from_markdown(String.t()) :: {Notebook.t(), list(String.t())}
def notebook_from_markdown(markdown) do
{_, ast, earmark_messages} = EarmarkParser.as_ast(markdown)
earmark_messages = Enum.map(earmark_messages, &earmark_message_to_string/1)
{ast, rewrite_messages} = rewrite_ast(ast)
notebook =
ast
|> group_elements()
|> build_notebook()
{notebook, earmark_messages ++ rewrite_messages}
end
defp earmark_message_to_string({_severity, line_number, message}) do
"Line #{line_number}: #{message}"
end
# Does initial pre-processing of the AST, so that it conforms to the expected form.
# Returns {altered_ast, messages}.
defp rewrite_ast(ast) do
{ast, messages1} = rewrite_multiple_primary_headings(ast)
{ast, messages2} = move_primary_heading_top(ast)
ast = trim_comments(ast)
{ast, messages1 ++ messages2}
end
# There should be only one h1 tag indicating notebook name,
# if there are many we downgrade all headings.
# This doesn't apply to documents exported from Livebook,
# but may be the case for an arbitrary markdown file,
# so we do our best to preserve the intent.
defp rewrite_multiple_primary_headings(ast) do
primary_headings = Enum.count(ast, &(tag(&1) == "h1"))
if primary_headings > 1 do
ast = Enum.map(ast, &downgrade_heading/1)
message =
"Downgrading all headings, because #{primary_headings} instances of heading 1 were found"
{ast, [message]}
else
{ast, []}
end
end
defp downgrade_heading({"h1", attrs, content, meta}), do: {"h2", attrs, content, meta}
defp downgrade_heading({"h2", attrs, content, meta}), do: {"h3", attrs, content, meta}
defp downgrade_heading({"h3", attrs, content, meta}), do: {"h4", attrs, content, meta}
defp downgrade_heading({"h4", attrs, content, meta}), do: {"h5", attrs, content, meta}
defp downgrade_heading({"h5", attrs, content, meta}), do: {"h6", attrs, content, meta}
defp downgrade_heading({"h6", attrs, content, meta}), do: {"strong", attrs, content, meta}
defp downgrade_heading(ast_node), do: ast_node
# This moves h1 together with any preceding comments to the top.
defp move_primary_heading_top(ast) do
case Enum.split_while(ast, &(tag(&1) != "h1")) do
{_ast, []} ->
{ast, []}
{leading, [heading | rest]} ->
{leading, comments} = split_while_right(leading, &(tag(&1) == :comment))
if leading == [] do
{ast, []}
else
ast = comments ++ [heading] ++ leading ++ rest
message = "Moving heading 1 to the top of the notebook"
{ast, [message]}
end
end
end
defp tag(ast_node)
defp tag({tag, _, _, _}), do: tag
defp tag(_), do: nil
defp split_while_right(list, fun) do
{right_rev, left_rev} = list |> Enum.reverse() |> Enum.split_while(fun)
{Enum.reverse(left_rev), Enum.reverse(right_rev)}
end
# Trims one-line comments to allow nice pattern matching
# on Livebook-specific annotations with no regard to surrounding whitespace.
defp trim_comments(ast) do
Enum.map(ast, fn
{:comment, attrs, [line], %{comment: true}} ->
{:comment, attrs, [String.trim(line)], %{comment: true}}
ast_node ->
ast_node
end)
end
# Builds a list of classified elements from the AST.
defp group_elements(ast, elems \\ [])
defp group_elements([], elems), do: elems
defp group_elements([{"h1", _, content, %{}} | ast], elems) do
group_elements(ast, [{:notebook_name, content} | elems])
end
defp group_elements([{"h2", _, content, %{}} | ast], elems) do
group_elements(ast, [{:section_name, content} | elems])
end
# The <!-- livebook:{"force_markdown":true} --> annotation forces the next node
# to be interpreted as Markdown cell content.
defp group_elements(
[
{:comment, _, [~s/livebook:{"force_markdown":true}/], %{comment: true}},
ast_node | ast
],
[{:cell, :markdown, md_ast} | rest]
) do
group_elements(ast, [{:cell, :markdown, [ast_node | md_ast]} | rest])
end
defp group_elements(
[
{:comment, _, [~s/livebook:{"force_markdown":true}/], %{comment: true}},
ast_node | ast
],
elems
) do
group_elements(ast, [{:cell, :markdown, [ast_node]} | elems])
end
defp group_elements(
[{:comment, _, ["livebook:" <> json], %{comment: true}} | ast],
elems
) do
group_elements(ast, [livebook_json_to_element(json) | elems])
end
defp group_elements(
[{"pre", _, [{"code", [{"class", "elixir"}], [source], %{}}], %{}} | ast],
elems
) do
group_elements(ast, [{:cell, :elixir, source} | elems])
end
defp group_elements([ast_node | ast], [{:cell, :markdown, md_ast} | rest]) do
group_elements(ast, [{:cell, :markdown, [ast_node | md_ast]} | rest])
end
defp group_elements([ast_node | ast], elems) do
group_elements(ast, [{:cell, :markdown, [ast_node]} | elems])
end
defp livebook_json_to_element(json) do
data = Jason.decode!(json)
case data do
%{"livebook_object" => "cell_input"} ->
{:cell, :input, data}
_ ->
{:metadata, data}
end
end
# Builds a notebook from the list of elements obtained in the previous step.
# Note that the list of elements is reversed:
# first we group elements by traversing Earmark AST top-down
# and then aggregate elements into data strictures going bottom-up.
defp build_notebook(elems, cells \\ [], sections \\ [])
defp build_notebook([{:cell, :elixir, source} | elems], cells, sections) do
{metadata, elems} = grab_metadata(elems)
cell = %{Notebook.Cell.new(:elixir) | source: source, metadata: metadata}
build_notebook(elems, [cell | cells], sections)
end
defp build_notebook([{:cell, :markdown, md_ast} | elems], cells, sections) do
{metadata, elems} = grab_metadata(elems)
source = md_ast |> Enum.reverse() |> MarkdownHelpers.markdown_from_ast()
cell = %{Notebook.Cell.new(:markdown) | source: source, metadata: metadata}
build_notebook(elems, [cell | cells], sections)
end
defp build_notebook([{:cell, :input, data} | elems], cells, sections) do
{metadata, elems} = grab_metadata(elems)
attrs = parse_input_attrs(data)
cell = %{Notebook.Cell.new(:input) | metadata: metadata} |> Map.merge(attrs)
build_notebook(elems, [cell | cells], sections)
end
defp build_notebook([{:section_name, content} | elems], cells, sections) do
name = MarkdownHelpers.text_from_ast(content)
{metadata, elems} = grab_metadata(elems)
section = %{Notebook.Section.new() | name: name, cells: cells, metadata: metadata}
build_notebook(elems, [], [section | sections])
end
# If there are section-less cells, put them in a default one.
defp build_notebook([{:notebook_name, _content} | _] = elems, cells, sections)
when cells != [] do
section = %{Notebook.Section.new() | cells: cells}
build_notebook(elems, [], [section | sections])
end
# If there are section-less cells, put them in a default one.
defp build_notebook([] = elems, cells, sections) when cells != [] do
section = %{Notebook.Section.new() | cells: cells}
build_notebook(elems, [], [section | sections])
end
defp build_notebook([{:notebook_name, content} | elems], [], sections) do
name = MarkdownHelpers.text_from_ast(content)
{metadata, []} = grab_metadata(elems)
%{Notebook.new() | name: name, sections: sections, metadata: metadata}
end
# If there's no explicit notebook heading, use the defaults.
defp build_notebook([], [], sections) do
%{Notebook.new() | sections: sections}
end
# Takes optional leading metadata JSON object and returns {metadata, rest}.
defp grab_metadata([{:metadata, metadata} | elems]) do
{metadata, elems}
end
defp grab_metadata(elems), do: {%{}, elems}
defp parse_input_attrs(data) do
type = data["type"] |> String.to_existing_atom()
%{
type: type,
name: data["name"],
value: data["value"],
# Fields with implicit value
reactive: Map.get(data, "reactive", false),
props: data |> Map.get("props", %{}) |> parse_input_props(type)
}
end
defp parse_input_props(data, type) do
default_props = Notebook.Cell.Input.default_props(type)
Map.new(default_props, fn {key, default_value} ->
value = Map.get(data, to_string(key), default_value)
{key, value}
end)
end
end
| 33.756654 | 97 | 0.649133 |
084c806a15a70b8da07483142cf5f9553a29fbdd | 1,193 | exs | Elixir | advanced/errorHandling-try-catch.exs | MaraniMatias/elixir-hola-mundo | 325a6ba623378521ec6f79bd4627a0eb7c6cd1fa | [
"MIT"
] | 1 | 2016-12-25T09:53:53.000Z | 2016-12-25T09:53:53.000Z | advanced/errorHandling-try-catch.exs | MaraniMatias/elixir-hola-mundo | 325a6ba623378521ec6f79bd4627a0eb7c6cd1fa | [
"MIT"
] | null | null | null | advanced/errorHandling-try-catch.exs | MaraniMatias/elixir-hola-mundo | 325a6ba623378521ec6f79bd4627a0eb7c6cd1fa | [
"MIT"
] | null | null | null | # IO.puts raise ArgumentError, message: "the argument value is invalid"
try do
raise "Oh no!"
rescue
e in RuntimeError -> IO.puts("An error occurred: " <> e.message)
end
# An error occurred: Oh no!
# :ok
try do
raise "Oh no!"
rescue
e in RuntimeError -> IO.puts("An error occurred: " <> e.message)
end
# An error occurred: Oh no!
# :ok
# After (finally)
try do
raise "Oh no!"
rescue
e in RuntimeError -> IO.puts("An error occurred: " <> e.message)
after
IO.puts "The end!"
end
# An error occurred: Oh no!
# The end!
# :ok
{:ok, file} = File.open "example.json"
try do
# Do hazardous work
after
File.close(file)
end
# New Errors
defmodule ExampleError do
defexception message: "an example error has occurred"
end
try do
raise ExampleError
rescue
e in ExampleError -> e
end
# %ExampleError{message: "an example error has occurred"}
# Throws
try do
for x <- 0..10 do
if x == 5, do: throw(x)
IO.puts(x)
end
catch
x -> "Caught: #{x}"
end
# 0
# 1
# 2
# 3
# 4
# "Caught: 5"
# Exiting
spawn_link fn -> exit("oh no") end # ** (EXIT from #PID<0.101.0>) "oh no"
try do
exit "oh no!"
catch
:exit, _ -> "exit blocked"
end
# "exit blocked"
| 15.906667 | 73 | 0.636211 |
084c8713a1905f7061d372b5c8c79d1b6198d5a6 | 2,410 | exs | Elixir | mix.exs | tizpuppi/cloak | cdeae97c69f7c54aeadf0921db30dc411d7a9938 | [
"MIT"
] | null | null | null | mix.exs | tizpuppi/cloak | cdeae97c69f7c54aeadf0921db30dc411d7a9938 | [
"MIT"
] | null | null | null | mix.exs | tizpuppi/cloak | cdeae97c69f7c54aeadf0921db30dc411d7a9938 | [
"MIT"
] | null | null | null | defmodule Cloak.Mixfile do
use Mix.Project
def project do
[
app: :cloak,
version: "0.7.0",
elixir: "~> 1.0",
build_embedded: Mix.env() == :prod,
start_permanent: Mix.env() == :prod,
test_coverage: [tool: ExCoveralls],
preferred_cli_env: [
coveralls: :test,
"coveralls.detail": :test,
"coveralls.post": :test,
"coveralls.html": :test
],
source_url: "https://github.com/danielberkompas/cloak",
description: "Encrypted fields for Ecto.",
package: package(),
deps: deps(),
docs: docs(),
elixirc_paths: elixirc_paths(Mix.env()),
aliases: aliases()
]
end
def application do
[extra_applications: [:logger]]
end
defp deps do
[
{:ecto, ">= 1.0.0"},
{:flow, "~> 0.13"},
{:pbkdf2, "~> 2.0", optional: true},
{:poison, ">= 1.5.0", optional: true},
{:excoveralls, "~> 0.8", only: :test},
{:postgrex, ">= 0.0.0", only: [:dev, :test]},
{:ex_doc, ">= 0.0.0", only: [:dev, :docs]},
{:inch_ex, ">= 0.0.0", only: :docs}
]
end
defp docs do
[
main: "readme",
extras: [
"README.md",
"guides/how_to/install.md": [title: "Install Cloak"],
"guides/how_to/generate_keys.md": [title: "Generate Encryption Keys"],
"guides/how_to/encrypt_existing_data.md": [title: "Encrypt Existing Data"],
"guides/how_to/rotate_keys.md": [title: "Rotate Keys"],
"guides/upgrading/0.6.x_to_0.7.x.md": [title: "0.6.x to 0.7.x"]
],
extra_section: "GUIDES",
groups_for_extras: [
"How To": ~r/how_to/,
Upgrading: ~r/upgrading/
],
groups_for_modules: [
Behaviours: [
Cloak.Cipher,
Cloak.Vault
],
Ciphers: ~r/Ciphers.AES/,
"Deprecated Ciphers": ~r/Ciphers.Deprecated/,
"Ecto Types": ~r/Fields/
]
]
end
defp package do
[
files: ["lib", "mix.exs", "README.md", "CHANGELOG.md", "LICENSE"],
maintainers: ["Daniel Berkompas"],
licenses: ["MIT"],
links: %{
"Github" => "https://github.com/danielberkompas/cloak"
}
]
end
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
defp aliases do
[
test: ["ecto.create --quiet", "ecto.migrate --quiet", "test"]
]
end
end
| 25.913978 | 83 | 0.535685 |
084c8b8514a15a7591545d3727405c6884ae4989 | 54,480 | ex | Elixir | lib/ecto_adapters_dynamodb.ex | maxig/ecto_adapters_dynamodb | 11e4dd48b53f2a51d7cc8e203e217c1ad8aecd72 | [
"Apache-2.0"
] | null | null | null | lib/ecto_adapters_dynamodb.ex | maxig/ecto_adapters_dynamodb | 11e4dd48b53f2a51d7cc8e203e217c1ad8aecd72 | [
"Apache-2.0"
] | null | null | null | lib/ecto_adapters_dynamodb.ex | maxig/ecto_adapters_dynamodb | 11e4dd48b53f2a51d7cc8e203e217c1ad8aecd72 | [
"Apache-2.0"
] | null | null | null | defmodule Ecto.Adapters.DynamoDB do
@moduledoc """
Ecto adapter for Amazon DynamoDB
Currently for a fairly limited subset of Ecto, enough for basic operations.
"""
#NOTE: in ecto, Repo.get[!] ends up calling:
#-> querable.get
#-> queryable.one
#-> queryable.all
#-> queryable.execute
#-> adapter.execute (possibly prepare somewhere in their too? trace.)
@behaviour Ecto.Adapter
#@behaviour Ecto.Adapter.Storage
#@behaviour Ecto.Adapter.Migration
defmacro __before_compile__(_env) do
# Nothing to see here, yet...
end
use Bitwise, only_operators: true
alias ExAws.Dynamo
alias Ecto.Query.BooleanExpr
# I don't think this is necessary: Probably under child_spec and ensure_all_started
def start_link(repo, opts) do
ecto_dynamo_log(:debug, "#{inspect __MODULE__}.start_link", %{"#{inspect __MODULE__}.start_link-params" => %{repo: repo, opts: opts}})
Agent.start_link fn -> [] end
end
## Adapter behaviour - defined in lib/ecto/adapter.ex (in the ecto github repository)
@doc """
Returns the childspec that starts the adapter process.
"""
def child_spec(repo, opts) do
# TODO: need something here...
# * Pull dynamo db connection options from config
# * Start dynamo connector/aws libraries
# we'll return our own start_link for now, but I don't think we actually need
# an app here, we only need to ensure that our dependencies such as aws libs are started.
#
[:debug_requests, :access_key_id, :secret_access_key, :region, :dynamodb] |> Enum.map(fn key ->
if opts[key] != nil, do: Application.put_env(:ex_aws, key, opts[key])
end)
import Supervisor.Spec
child_spec = worker(__MODULE__, [repo, opts])
ecto_dynamo_log(:debug, "#{inspect __MODULE__}.child_spec", %{"#{inspect __MODULE__}.child_spec-params" => %{repo: repo, child_spec: child_spec, opts: opts}})
child_spec
end
@doc """
Ensure all applications necessary to run the adapter are started.
"""
def ensure_all_started(repo, type) do
ecto_dynamo_log(:debug, "#{inspect __MODULE__}.ensure_all_started", %{"#{inspect __MODULE__}.ensure_all_started-params" => %{type: type, repo: repo}})
with {:ok, _} = Application.ensure_all_started(:ecto_adapters_dynamodb)
do
{:ok, [repo]}
end
end
def supports_ddl_transaction?, do: false
def execute_ddl(repo, command, options) do
Ecto.Adapters.DynamoDB.Migration.execute_ddl(repo, command, options)
end
# moved to transaction.ex in ecto 2.1.4
# def in_transaction?(_repo), do: false
#
# def rollback(_repo, _value), do:
# raise BadFunctionError, message: "#{inspect __MODULE__} does not support transactions."
@doc """
Called to autogenerate a value for id/embed_id/binary_id.
Returns the autogenerated value, or nil if it must be
autogenerated inside the storage or raise if not supported.
For the Ecto type, `:id`, the adapter autogenerates a 128-bit integer
For the Ecto type, `:embed_id`, the adapter autogenerates a string, using `Ecto.UUID.generate()`
For the Ecto type, `:binary_id`, the adapter autogenerates a string, using `Ecto.UUID.generate()`
"""
@max_id ((1 <<< 128) - 1) # biggest possible int in 128 bits
def autogenerate(:id), do: Enum.random(1..@max_id)
def autogenerate(:embed_id), do: Ecto.UUID.generate()
def autogenerate(:binary_id), do: Ecto.UUID.generate()
@doc """
Returns the loaders for a given type.
Rather than use the Ecto adapter loaders callback, the adapter builds on ExAws' decoding functionality, please see ExAws's `ExAws.Dynamo.decode_item` and the private function, `custom_decode`, in this module, which at this time only loads :utc_datetime and :naive_datetime.
"""
def loaders(_primitive, type), do: [type]
@doc """
Returns the dumpers for a given type.
We rely on ExAws encoding functionality during insertion and update to properly format types for DynamoDB. Please see ExAws `ExAws.Dynamo.update_item` and `ExAws.Dynamo.put_item` for specifics. Currently, we only modify :utc_datetime and :naive_datetime, appending the UTC offset, "Z", to the datetime string before passing to ExAws.
"""
def dumpers(:utc_datetime, datetime), do: [datetime, &to_iso_string/1]
def dumpers(:naive_datetime, datetime), do: [datetime, &to_iso_string/1]
def dumpers(_primitive, type), do: [type]
# Add UTC offset
# We are adding the offset here also for the :naive_datetime, this
# assumes we are getting a UTC date (which does correspond with the
# timestamps() macro but not necessarily with :naive_datetime in general)
defp to_iso_string(datetime) do
{:ok, (datetime |> Ecto.DateTime.cast! |> Ecto.DateTime.to_iso8601) <> "Z"}
end
@doc """
Commands invoked to prepare a query for `all`, `update_all` and `delete_all`.
The returned result is given to `execute/6`.
"""
#@callback prepare(atom :: :all | :update_all | :delete_all, query :: Ecto.Query.t) ::
# {:cache, prepared} | {:nocache, prepared}
def prepare(:all, query) do
# 'preparing' is more a SQL concept - Do we really need to do anything here or just pass the params through?
ecto_dynamo_log(:debug, "#{inspect __MODULE__}.prepare: :all", %{"#{inspect __MODULE__}.prepare-params" => %{query: inspect(query, structs: false)}})
{:nocache, {:all, query}}
end
def prepare(:update_all, query) do
ecto_dynamo_log(:debug, "#{inspect __MODULE__}.prepare: :update_all", %{"#{inspect __MODULE__}.prepare-params" => %{query: inspect(query, structs: false)}})
{:nocache, {:update_all, query}}
end
# do: {:cache, {System.unique_integer([:positive]), @conn.update_all(query)}}
def prepare(:delete_all, query) do
ecto_dynamo_log(:debug, "#{inspect __MODULE__}.prepare: :delete_all", %{"#{inspect __MODULE__}.prepare-params" => %{query: inspect(query, structs: false)}})
{:nocache, {:delete_all, query}}
end
# do: {:cache, {System.unique_integer([:positive]), @conn.delete_all(query)}}
@doc """
Executes a previously prepared query.
It must return a tuple containing the number of entries and
the result set as a list of lists. The result set may also be
`nil` if a particular operation does not support them.
The `meta` field is a map containing some of the fields found
in the `Ecto.Query` struct.
It receives a process function that should be invoked for each
selected field in the query result in order to convert them to the
expected Ecto type. The `process` function will be nil if no
result set is expected from the query.
"""
#@callback execute(repo, query_meta, query, params :: list(), process | nil, options) :: result when
# result: {integer, [[term]] | nil} | no_return,
# query: {:nocache, prepared} |
# {:cached, (prepared -> :ok), cached} |
# {:cache, (cached -> :ok), prepared}
def execute(repo, meta, {:nocache, {func, prepared}}, params, process, opts) do
ecto_dynamo_log(:debug, "#{inspect __MODULE__}.execute", %{"#{inspect __MODULE__}.execute-params" => %{repo: repo, meta: meta, prepared: prepared, params: params, process: process, opts: opts}})
{table, model} = prepared.from
validate_where_clauses!(prepared)
IO.inspect(["Lookup fields issue", prepared.wheres, params])
lookup_fields = extract_lookup_fields(prepared.wheres, params, [])
limit_option = opts[:scan_limit]
scan_limit = if is_integer(limit_option), do: [limit: limit_option], else: []
# Ecto migration does not know to specify 'scan: true' to retrieve the persisted migration versions
# from line 34, file "deps/ecto/lib/ecto/migration/schema_migration.ex"
migration_source = Keyword.get(repo.config, :migration_source, "schema_migrations")
updated_opts = if table == migration_source do
ecto_dynamo_log(:debug, "#{inspect __MODULE__}.execute: table name corresponds with migration source: #{inspect migration_source}. Setting options for recursive scan.", %{})
Keyword.drop(opts, [:timeout, :log]) ++ [recursive: true]
else
Keyword.drop(opts, [:scan_limit, :limit]) ++ scan_limit
end
ecto_dynamo_log(:debug, "#{inspect __MODULE__}.execute: local variables", %{"#{inspect __MODULE__}.execute-vars" => %{table: table, lookup_fields: lookup_fields, scan_limit: scan_limit}})
case func do
:delete_all ->
delete_all(table, lookup_fields, updated_opts)
:update_all ->
update_all(table, lookup_fields, updated_opts, prepared.updates, params)
:all ->
ecto_dynamo_log(:info, "#{inspect __MODULE__}.execute: :all", %{"#{inspect __MODULE__}.execute-all-vars" => %{table: table, lookup_fields: lookup_fields, updated_opts: updated_opts}})
t = :os.system_time(:millisecond)
result = Ecto.Adapters.DynamoDB.Query.get_item(table, lookup_fields, updated_opts)
IO.puts "GET ALL request took: #{:os.system_time(:millisecond) - t}ms"
ecto_dynamo_log(:debug, "#{inspect __MODULE__}.execute: all: result", %{"#{inspect __MODULE__}.execute-all-result" => inspect result})
if opts[:query_info_key], do: Ecto.Adapters.DynamoDB.QueryInfo.put(opts[:query_info_key], extract_query_info(result))
if result == %{} do
# Empty map means "not found"
{0, []}
else
t = :os.system_time(:millisecond)
sources =
model.__schema__(:fields)
|> Enum.into(%{}, fn f ->
{model.__schema__(:field_source, f), f}
end)
r =
cond do
!result["Count"] and !result["Responses"] ->
decoded = decode_item(result["Item"], model, sources, prepared.select)
{1, [decoded]}
true ->
# batch_get_item returns "Responses" rather than "Items"
results_to_decode = if result["Items"], do: result["Items"], else: result["Responses"][table]
decoded = Enum.map(results_to_decode, &(decode_item(&1, model, sources, prepared.select)))
{length(decoded), decoded}
end
IO.puts "GET ALL decoding took: #{:os.system_time(:millisecond) - t}ms"
r
end
end
end
# delete_all allows for the recursive option, scanning through multiple pages
defp delete_all(table, lookup_fields, opts) do
ecto_dynamo_log(:info, "#{inspect __MODULE__}.delete_all", %{"#{inspect __MODULE__}.delete_all-params" => %{table: table, lookup_fields: lookup_fields, opts: opts}})
# select only the key
{:primary, key_list} = Ecto.Adapters.DynamoDB.Info.primary_key!(table)
scan_or_query = Ecto.Adapters.DynamoDB.Query.scan_or_query?(table, lookup_fields)
recursive = Ecto.Adapters.DynamoDB.Query.parse_recursive_option(scan_or_query, opts)
updated_opts = prepare_recursive_opts(opts ++ [projection_expression: Enum.join(key_list, ", ")])
delete_all_recursive(table, lookup_fields, updated_opts, recursive, %{}, 0)
end
defp delete_all_recursive(table, lookup_fields, opts, recursive, query_info, total_processed) do
# query the table for which records to delete
fetch_result = Ecto.Adapters.DynamoDB.Query.get_item(table, lookup_fields, opts)
ecto_dynamo_log(:debug, "#{inspect __MODULE__}.delete_all_recursive: fetch_result", %{"#{inspect __MODULE__}.delete_all_recursive-fetch_result" => inspect fetch_result})
items = case fetch_result do
%{"Items" => fetch_items} -> fetch_items
%{"Item" => item} -> [item]
%{"Responses" => table_map} -> table_map[table]
_ -> []
end
prepared_data = for key_list <- Enum.map(items, &Map.to_list/1) do
key_map = for {key, val_map} <- key_list, into: %{}, do: {key, Dynamo.Decoder.decode(val_map)}
[delete_request: [key: key_map]]
end
unprocessed_items = if prepared_data != [] do
batch_delete(table, prepared_data)
else
%{}
end
num_processed =
length(prepared_data) - if !unprocessed_items[table], do: 0, else: length(unprocessed_items[table])
updated_query_info = Enum.reduce(fetch_result, query_info, fn({key, val}, acc) ->
case key do
"Count" -> Map.update(acc, key, val, fn x -> x + val end)
"ScannedCount" -> Map.update(acc, key, val, fn x -> x + val end)
"LastEvaluatedKey" -> Map.update(acc, key, val, fn _ -> fetch_result["LastEvaluatedKey"] end)
_ -> acc
end
end) |> Map.update("UnprocessedItems", unprocessed_items, fn map -> if map == %{}, do: %{}, else: %{table => map[table] ++ unprocessed_items[table]} end)
updated_recursive = Ecto.Adapters.DynamoDB.Query.update_recursive_option(recursive)
if fetch_result["LastEvaluatedKey"] != nil and updated_recursive.continue do
opts_with_offset = opts ++ [exclusive_start_key: fetch_result["LastEvaluatedKey"]]
delete_all_recursive(table, lookup_fields, opts_with_offset, updated_recursive.new_value, updated_query_info, total_processed + num_processed)
else
# We're not retrying unprocessed items yet, but we are providing the relevant info in the QueryInfo agent if :query_info_key is supplied
if opts[:query_info_key], do: Ecto.Adapters.DynamoDB.QueryInfo.put(opts[:query_info_key], updated_query_info)
{num_processed + total_processed, nil}
end
end
# returns unprocessed_items
defp batch_delete(table, prepared_data) do
batch_write_attempt = Dynamo.batch_write_item(%{table => prepared_data}) |> ExAws.request |> handle_error!(%{table: table, records: []})
batch_write_attempt["UnprocessedItems"]
end
defp update_all(table, lookup_fields, opts, updates, params) do
ecto_dynamo_log(:info, "#{inspect __MODULE__}.update_all", %{"#{inspect __MODULE__}.update_all-params" => %{table: table, lookup_fields: lookup_fields, opts: opts}})
scan_or_query = Ecto.Adapters.DynamoDB.Query.scan_or_query?(table, lookup_fields)
recursive = Ecto.Adapters.DynamoDB.Query.parse_recursive_option(scan_or_query, opts)
key_list = Ecto.Adapters.DynamoDB.Info.primary_key!(table)
ecto_dynamo_log(:debug, "#{inspect __MODULE__}.update_all: key_list", %{"#{inspect __MODULE__}.update_all-key_list" => inspect key_list})
# The remove statement must be constructed after finding pull-indexes, but it
# also includes possibly removing nil fields, and since we have one handler for
# both set and remove, we call it during the batch update process
{update_expression, update_fields_sans_set_remove, set_remove_fields} = construct_update_expression(updates, params, opts)
ecto_dynamo_log(:info, "#{inspect __MODULE__}.update_all: update fields", %{"#{inspect __MODULE__}.update_all-update_fields" => %{update_fields_sans_set_remove: inspect(update_fields_sans_set_remove), set_remove_fields: inspect(set_remove_fields)}})
attribute_names = construct_expression_attribute_names(update_fields_sans_set_remove)
attribute_values = construct_expression_attribute_values(update_fields_sans_set_remove, opts)
base_update_options = [expression_attribute_names: attribute_names,
update_expression: update_expression,
return_values: :all_new]
updated_opts = prepare_recursive_opts(opts)
update_options = maybe_add_attribute_values(base_update_options, attribute_values)
pull_actions_without_index =
Keyword.keys(set_remove_fields[:pull])
|> Enum.any?(fn x -> !Enum.member?(Keyword.keys(maybe_list(opts[:pull_indexes])), x) end)
{new_update_options, new_set_remove_fields} =
if pull_actions_without_index do
{update_options, set_remove_fields}
else
merged_pull_indexes = Keyword.merge(set_remove_fields[:pull], maybe_list(opts[:pull_indexes]))
opts_with_pull_indexes = Keyword.update(opts, :pull_indexes, merged_pull_indexes, fn _ -> merged_pull_indexes end)
{update_batch_update_options(update_options, set_remove_fields, opts_with_pull_indexes), []}
end
update_all_recursive(table, lookup_fields, updated_opts, new_update_options, key_list, new_set_remove_fields, recursive, %{}, 0)
end
defp update_all_recursive(table, lookup_fields, opts, update_options, key_list, set_remove_fields, recursive, query_info, total_updated) do
fetch_result = Ecto.Adapters.DynamoDB.Query.get_item(table, lookup_fields, opts)
ecto_dynamo_log(:debug, "#{inspect __MODULE__}.update_all_recursive: fetch_result", %{"#{inspect __MODULE__}.update_all_recursive-fetch_result" => inspect(fetch_result)})
updated_query_info = case fetch_result do
%{"Count" => last_count, "ScannedCount" => last_scanned_count} ->
%{"Count" => last_count + Map.get(query_info, "Count", 0),
"ScannedCount" => last_scanned_count + Map.get(query_info, "ScannedCount", 0),
"LastEvaluatedKey" => Map.get(fetch_result, "LastEvaluatedKey")}
_ -> query_info
end
items = case fetch_result do
%{"Items" => fetch_items} -> fetch_items
%{"Item" => item} -> [item]
%{"Responses" => table_map} -> table_map[table]
_ -> []
end
num_updated = if items != [] do
batch_update(table, items, key_list, update_options, set_remove_fields, opts)
else
0
end
updated_recursive = Ecto.Adapters.DynamoDB.Query.update_recursive_option(recursive)
if fetch_result["LastEvaluatedKey"] != nil and updated_recursive.continue do
opts_with_offset = opts ++ [exclusive_start_key: fetch_result["LastEvaluatedKey"]]
update_all_recursive(table, lookup_fields, opts_with_offset, update_options, key_list, set_remove_fields, updated_recursive.new_value, updated_query_info, total_updated + num_updated)
else
if opts[:query_info_key], do: Ecto.Adapters.DynamoDB.QueryInfo.put(opts[:query_info_key], updated_query_info)
{total_updated + num_updated, []}
end
end
defp batch_update(table, items, key_list, update_options, set_remove_fields, opts) do
Enum.reduce(items, 0, fn(result_to_update, acc) ->
filters = get_key_values_dynamo_map(result_to_update, key_list)
# we only update this on a case-by-case basis if pull actions
# without specific indexes are specified
options_with_set_and_remove = case set_remove_fields do
[] -> update_options
_ ->
pull_fields_with_indexes =
Enum.map(set_remove_fields[:pull], fn {field_atom, val} ->
list = result_to_update[to_string(field_atom)]
{field_atom, find_all_indexes_in_dynamodb_list(list, val)}
end)
merged_pull_indexes = Keyword.merge(pull_fields_with_indexes, maybe_list(opts[:pull_indexes]))
opts_with_pull_indexes = Keyword.update(opts, :pull_indexes, merged_pull_indexes, fn _ -> merged_pull_indexes end)
update_batch_update_options(update_options, set_remove_fields, opts_with_pull_indexes)
end
# 'options_with_set_and_remove' might not have the key, ':expression_attribute_values',
# when there are only removal statements.
record = if options_with_set_and_remove[:expression_attribute_values],
do: [options_with_set_and_remove[:expression_attribute_values] |> Enum.into(%{})],
else: []
if options_with_set_and_remove[:update_expression] |> String.trim != "" do
Dynamo.update_item(table, filters, options_with_set_and_remove) |> ExAws.request |> handle_error!(%{table: table, records: record ++ []})
acc + 1
else
acc
end
end)
end
defp update_batch_update_options(update_options, set_remove_fields, opts) do
attribute_names = construct_expression_attribute_names(Keyword.values(set_remove_fields) |> List.flatten)
set_and_push_fields = maybe_list(set_remove_fields[:set]) ++ maybe_list(set_remove_fields[:push])
opts_with_push = opts ++ Keyword.take(set_remove_fields, [:push])
attribute_values = construct_expression_attribute_values(set_and_push_fields, opts_with_push)
set_statement = construct_set_statement(set_remove_fields[:set], opts_with_push)
opts_for_construct_remove = Keyword.take(set_remove_fields, [:pull]) ++ Keyword.take(opts, [:pull_indexes, :remove_nil_fields])
remove_statement = construct_remove_statement(set_remove_fields[:set], opts_for_construct_remove)
base_update_options =
[expression_attribute_names: Map.merge(attribute_names, update_options[:expression_attribute_names]),
update_expression: set_statement <> " " <> remove_statement <> " " <> update_options[:update_expression] |> String.trim,
return_values: :all_new]
maybe_add_attribute_values(base_update_options, attribute_values ++ maybe_list(update_options[:expression_attribute_values]))
end
# find indexes to remove for update :pull action
defp find_all_indexes_in_dynamodb_list(dynamodb_list, target) do
Dynamo.Decoder.decode(dynamodb_list)
|> Enum.with_index()
|> Enum.filter(fn {x, _} -> x == target end)
|> Enum.map(fn {_, i} -> i end)
end
# During delete_all's and update_all's recursive
# procedure, we want to keep the recursion in
# the top-level, between actions, rather than
# load all the results into memory and then act;
# so we disable the recursion on get_item
defp prepare_recursive_opts(opts) do
opts |> Keyword.delete(:page_limit) |> Keyword.update(:recursive, false, fn _ -> false end)
end
@doc """
Inserts a single new struct in the data store.
## Autogenerate
The primary key will be automatically included in `returning` if the
field has type `:id` or `:binary_id` and no value was set by the
developer or none was autogenerated by the adapter.
"""
#@callback insert(repo, schema_meta, fields, on_conflict, returning, options) ::
# {:ok, fields} | {:invalid, constraints} | no_return
# def insert(_,_,_,_,_) do
def insert(repo, schema_meta, fields, on_conflict, returning, opts) do
ecto_dynamo_log(:debug, "#{inspect __MODULE__}.insert", %{"#{inspect __MODULE__}.insert-params" => %{repo: repo, schema_meta: schema_meta, fields: fields, on_conflict: on_conflict, returning: returning, opts: opts}})
insert_nil_field_option = Keyword.get(opts, :insert_nil_fields, true)
do_not_insert_nil_fields = insert_nil_field_option == false || Application.get_env(:ecto_adapters_dynamodb, :insert_nil_fields) == false
{_, table} = schema_meta.source
model = schema_meta.schema
fields_map = Enum.into(fields, %{})
record = if do_not_insert_nil_fields, do: fields_map, else: build_record_map(model, fields_map)
ecto_dynamo_log(:info, "#{inspect __MODULE__}.insert: local variables", %{"#{inspect __MODULE__}.insert-vars" => %{table: table, record: record}})
{:primary, key_list} = Ecto.Adapters.DynamoDB.Info.primary_key!(table)
hash_key = hd(key_list)
on_conflict_action = elem(on_conflict, 0)
options = case on_conflict_action do
:replace_all -> []
_ ->
attribute_names = for k <- key_list, into: %{}, do: {"##{k}", k}
conditions = for k <- key_list, do: "attribute_not_exists(##{k})"
condition_expression = Enum.join(conditions, " and ")
[expression_attribute_names: attribute_names,
condition_expression: condition_expression]
end
case Dynamo.put_item(table, record, options) |> ExAws.request |> handle_error!(%{table: table, records: [record]}) do
{:error, "ConditionalCheckFailedException"} ->
case on_conflict_action do
# Per discussion with Jose Valim (https://github.com/elixir-ecto/ecto/issues/2378)
# clarifying the adapter should return nothing if there is no `:returning` specified,
# and what we thought was to be returned as a `nil` id, is only for cases where
# "the field is autogenerated by the database" (https://hexdocs.pm/ecto/Ecto.Repo.html)
:nothing -> {:ok, []}
:raise ->
# This constraint name yields the correct behavior in the case the user
# has specified a unique constraint on the primary key in their schema:
constraint_name = "#{table}_#{hash_key}_index"
{:invalid, [unique: constraint_name]}
end
%{} ->
{:ok, []}
end
end
def insert_all(repo, schema_meta, field_list, fields, on_conflict, returning, opts) do
ecto_dynamo_log(:debug, "#{inspect __MODULE__}.insert_all", %{"#{inspect __MODULE__}.insert_all-params" => %{repo: repo, schema_meta: schema_meta, field_list: field_list, fields: fields, on_conflict: on_conflict, returning: returning, opts: opts}})
insert_nil_field_option = Keyword.get(opts, :insert_nil_fields, true)
do_not_insert_nil_fields = insert_nil_field_option == false || Application.get_env(:ecto_adapters_dynamodb, :insert_nil_fields) == false
{_, table} = schema_meta.source
model = schema_meta.schema
prepared_fields = Enum.map(fields, fn(field_set) ->
mapped_fields = Enum.into(field_set, %{})
record = if do_not_insert_nil_fields, do: mapped_fields, else: build_record_map(model, mapped_fields)
[put_request: [item: record]]
end)
ecto_dynamo_log(:info, "#{inspect __MODULE__}.insert_all: local variables", %{"#{inspect __MODULE__}.insert_all-vars" => %{table: table, records: get_records_from_fields(prepared_fields)}})
batch_write(table, prepared_fields, opts)
end
# DynamoDB will reject an entire batch of insert_all() records if there are more than 25 requests.
# https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_BatchWriteItem.html
# batch_write/3 will break the list into chunks of 25 items and insert each separately.
defp batch_write(table, prepared_fields, opts) do
batch_write_limit = 25
response_element = "UnprocessedItems"
grouped_records = Enum.chunk_every(prepared_fields, batch_write_limit)
num_batches = length grouped_records
# Break the prepared_fields into chunks of at most 25 elements to be batch inserted, accumulating
# the total count of records and appropriate results as it loops through the reduce.
{total_processed, results} = grouped_records
|> Stream.with_index
|> Enum.reduce({0, []}, fn({field_group, i}, {running_total_processed, batch_write_results}) ->
{total_batch_processed, batch_write_attempt} = handle_batch_write(field_group, table, response_element)
# Log depth of 11 will capture the full data structure returned in any UnprocessedItems - https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_BatchWriteItem.html
ecto_dynamo_log(:debug, "#{inspect __MODULE__}.batch_write #{i + 1} of #{num_batches}: local variables", %{"#{inspect __MODULE__}.insert_all-batch_write" => %{table: table, field_group: field_group, results: batch_write_attempt}}, [depth: 11])
# We're not retrying unprocessed items yet, but we are providing the relevant info in the QueryInfo agent if :query_info_key is supplied
if opts[:query_info_key] do
query_info = extract_query_info(batch_write_attempt)
Ecto.Adapters.DynamoDB.QueryInfo.update(opts[:query_info_key], [query_info], fn(list) -> list ++ [query_info] end)
end
{running_total_processed + total_batch_processed, batch_write_results ++ [batch_write_attempt]}
end)
result_body_for_log = %{table => Enum.flat_map(results, fn(res) -> res[response_element][table] || [] end)}
ecto_dynamo_log(:info, "#{inspect __MODULE__}.batch_write: batch_write_attempt result", %{"#{inspect __MODULE__}.insert_all-batch_write" => inspect %{response_element => (if result_body_for_log[table] == [], do: %{}, else: result_body_for_log)}})
{total_processed, nil}
end
defp handle_batch_write(field_group, table, response_element) do
results = Dynamo.batch_write_item(%{table => field_group})
|> ExAws.request
|> handle_error!(%{table: table, records: get_records_from_fields(field_group)})
if results[response_element] == %{} do
{length(field_group), results}
else
{length(field_group) - length(results[response_element][table]), results}
end
end
defp get_records_from_fields(fields), do: Enum.map(fields, fn [put_request: [item: record]] -> record end)
defp build_record_map(model, fields_to_insert) do
# Ecto does not convert empty strings to nil before passing them
# to Repo.insert_all, and ExAws will remove empty strings (as well as empty lists)
# when building the insertion query but not nil values. We don't mind the removal
# of empty lists since those cannot be inserted to indexed fields, but we'd like to
# catch the removal of fields with empty strings by ExAws to support our option, :remove_nil_fields,
# so we convert these to nil.
fields = model.__schema__(:fields)
sources = fields |> Enum.into(%{}, fn f -> {f, model.__schema__(:field_source, f)} end)
empty_strings_to_nil = fields_to_insert
|> Enum.map(fn {field, val} -> {field, (if val == "", do: nil, else: val)} end)
|> Enum.into(%{})
model.__struct__
|> Map.delete(:__meta__)
|> Map.from_struct
|> Enum.reduce(%{}, fn {k, v}, acc ->
Map.put(acc, Map.get(sources, k), v)
end)
|> Map.merge(empty_strings_to_nil)
end
# In testing, 'filters' contained only the primary key and value
# TODO: handle cases of more than one tuple in 'filters'?
def delete(repo, schema_meta, filters, opts) do
ecto_dynamo_log(:debug, "#{inspect __MODULE__}.delete", %{"#{inspect __MODULE__}.delete-params" => %{repo: repo, schema_meta: schema_meta, filters: filters, opts: opts}})
{_, table} = schema_meta.source
# We offer the :range_key option for tables with composite primary key
# since Ecto will not provide the range_key value needed for the query.
# If :range_key is not provided, check if the table has a composite
# primary key and query for all the key values
updated_filters = case opts[:range_key] do
nil ->
{:primary, key_list} = Ecto.Adapters.DynamoDB.Info.primary_key!(table)
if (length key_list) > 1 do
updated_opts = opts ++ [projection_expression: Enum.join(key_list, ", ")]
filters_as_strings = for {field, val} <- filters, do: {Atom.to_string(field), {val, :==}}
fetch_result = Ecto.Adapters.DynamoDB.Query.get_item(table, filters_as_strings, updated_opts)
items = case fetch_result do
%{"Items" => fetch_items} -> fetch_items
%{"Item" => item} -> [item]
_ -> []
end
if items == [], do: raise "__MODULE__.update error: no results found for record: #{inspect filters}"
if (length items) > 1, do: raise "__MODULE__.update error: more than one result found for record: #{inspect filters}"
for {field, key_map} <- Map.to_list(hd items) do
[{_field_type, val}] = Map.to_list(key_map)
{field, val}
end
else
filters
end
range_key ->
[range_key | filters]
end
attribute_names = construct_expression_attribute_names(keys_to_atoms(filters))
base_options = [expression_attribute_names: attribute_names]
condition_expression = construct_condition_expression(filters)
options = base_options ++ [condition_expression: condition_expression]
# 'options' might not have the key, ':expression_attribute_values', when there are only removal statements
record = if options[:expression_attribute_values], do: [options[:expression_attribute_values] |> Enum.into(%{})], else: []
case Dynamo.delete_item(table, updated_filters, options) |> ExAws.request |> handle_error!(%{table: table, records: record ++ []}) do
%{} -> {:ok, []}
{:error, "ConditionalCheckFailedException"} -> {:error, :stale}
end
end
def update(repo, schema_meta, fields, filters, returning, opts) do
ecto_dynamo_log(:debug, "#{inspect __MODULE__}.update", %{"#{inspect __MODULE__}.update-params" => %{repo: repo, schema_meta: schema_meta, fields: fields, filters: filters, returning: returning, opts: opts}})
{_, table} = schema_meta.source
# We offer the :range_key option for tables with composite primary key
# since Ecto will not provide the range_key value needed for the query.
# If :range_key is not provided, check if the table has a composite
# primary key and query for all the key values
updated_filters = case opts[:range_key] do
nil ->
{:primary, key_list} = Ecto.Adapters.DynamoDB.Info.primary_key!(table)
if (length key_list) > 1 do
updated_opts = opts ++ [projection_expression: Enum.join(key_list, ", ")]
filters_as_strings = for {field, val} <- filters, do: {Atom.to_string(field), {val, :==}}
fetch_result = Ecto.Adapters.DynamoDB.Query.get_item(table, filters_as_strings, updated_opts)
items = case fetch_result do
%{"Items" => fetch_items} -> fetch_items
%{"Item" => item} -> [item]
_ -> []
end
if items == [], do: raise "__MODULE__.update error: no results found for record: #{inspect filters}"
if (length items) > 1, do: raise "__MODULE__.update error: more than one result found for record: #{inspect filters}"
for {field, key_map} <- Map.to_list(hd items) do
{field, ExAws.Dynamo.Decoder.decode(key_map)}
end
else
filters
end
range_key ->
[range_key | filters]
end
update_expression = construct_update_expression(fields, opts)
# add updated_filters to attribute_ names and values for condition_expression
attribute_names = construct_expression_attribute_names(fields ++ keys_to_atoms(filters))
attribute_values = construct_expression_attribute_values(fields, opts)
base_options = [expression_attribute_names: attribute_names,
update_expression: update_expression]
condition_expression =
filters
|> Enum.map(fn(tuple) -> construct_condition_expression(tuple) end)
|> Enum.join(" AND ")
options = maybe_add_attribute_values(base_options, attribute_values)
++ [condition_expression: condition_expression]
# 'options' might not have the key, ':expression_attribute_values', when there are only removal statements
record = if options[:expression_attribute_values], do: [options[:expression_attribute_values] |> Enum.into(%{})], else: []
case Dynamo.update_item(table, updated_filters, options) |> ExAws.request |> handle_error!(%{table: table, records: record ++ []}) do
%{} -> {:ok, []}
{:error, "ConditionalCheckFailedException"} -> {:error, :stale}
end
end
defp keys_to_atoms(list),
do: for {k, v} <- list, do: {maybe_string_to_atom(k), v}
defp maybe_string_to_atom(s),
do: if is_binary(s), do: String.to_atom(s), else: s
defp construct_condition_expression({field, _val} = _filters),
do: "attribute_exists(##{to_string(field)})"
defp extract_query_info(result), do: result |> Map.take(["Count", "ScannedCount", "LastEvaluatedKey", "UnprocessedItems", "UnprocessedKeys"])
# Used in update_all
defp extract_update_params([], _action_atom, _params), do: []
defp extract_update_params([%{expr: key_list}], action_atom, params) do
case key_list[action_atom] do
nil ->
[]
action_list ->
for s <- action_list do
{field_atom, {:^, _, [idx]}} = s
{field_atom, Enum.at(params,idx)}
end
end
end
defp extract_update_params([a], _action_atom, _params), do: error "#{inspect __MODULE__}.extract_update_params: Updates is either missing the :expr key or does not contain a struct or map: #{inspect a}"
defp extract_update_params(unsupported, _action_atom, _params), do: error "#{inspect __MODULE__}.extract_update_params: unsupported parameter construction. #{inspect unsupported}"
# Ecto does not support push pull for types other than array.
# Therefore, we enable add and delete via opts
defp extract_update_params(key_list, action_atom) do
case key_list[action_atom] do
nil -> []
action_list -> action_list
end
end
# used in :update_all
defp get_key_values_dynamo_map(dynamo_map, {:primary, keys}) do
for k <- keys, do: {String.to_atom(k), Dynamo.Decoder.decode(dynamo_map[k])}
end
defp construct_expression_attribute_names(fields) do
for {f, _} <- fields, into: %{}, do: {"##{Atom.to_string(f)}", Atom.to_string(f)}
end
defp construct_expression_attribute_values(fields, opts) do
remove_rather_than_set_to_null = opts[:remove_nil_fields] || Application.get_env(:ecto_adapters_dynamodb, :remove_nil_fields_on_update) == true
# If the value is nil and the :remove_nil_fields option is set,
# we're removing this attribute, not updating it, so filter out any such fields:
if remove_rather_than_set_to_null do
for {k, v} <- fields, !is_nil(v), do: {k, format_val(k, v, opts)}
else
for {k, v} <- fields, do: {k, format_nil(k, v, opts)}
end |> Enum.filter(fn {x, _} -> not Keyword.has_key?(maybe_list(opts[:pull]), x) end)
end
defp maybe_list(l) when is_list(l), do: l
defp maybe_list(_), do: []
defp format_nil(_k, v, _opts) when is_nil(v), do: %{"NULL" => "true"}
defp format_nil(k, v, opts), do: format_val(k, v, opts)
defp format_val(k, v, opts) do
case opts[:push][k] do
nil -> v
_ -> [v]
end
end
# DynamoDB throws an error if we pass in an empty list for attribute values,
# so we have to implement this stupid little helper function to avoid hurting
# its feelings:
defp maybe_add_attribute_values(options, []) do
options
end
defp maybe_add_attribute_values(options, attribute_values) do
[expression_attribute_values: attribute_values] ++ options
end
defp construct_update_expression(updates, params, opts) do
to_set = extract_update_params(updates, :set, params)
to_push = extract_update_params(updates, :push, params)
to_pull = extract_update_params(updates, :pull, params)
to_add = extract_update_params(opts, :add) ++ extract_update_params(updates, :inc, params)
to_delete = extract_update_params(opts, :delete)
{construct_add_statement(to_add, opts) <> " " <>
construct_delete_statement(to_delete, opts) |> String.trim(),
to_add ++ to_delete,
[set: to_set, push: to_push, pull: to_pull]}
end
# The update callback supplies fields in the paramaters
# whereas update_all includes a more complicated updates
# structure
defp construct_update_expression(fields, opts) do
set_statement = construct_set_statement(fields, opts)
rem_statement = construct_remove_statement(fields, opts)
String.trim("#{set_statement} #{rem_statement}")
end
# fields::[{:field, val}]
defp construct_set_statement(fields, opts) do
remove_rather_than_set_to_null = opts[:remove_nil_fields] || Application.get_env(:ecto_adapters_dynamodb, :remove_nil_fields_on_update) == true
set_clauses = for {key, val} <- fields, not (is_nil(val) and remove_rather_than_set_to_null) do
key_str = Atom.to_string(key)
"##{key_str}=:#{key_str}"
end
++ case opts[:push] do
nil -> []
push_list ->
for {key, _val} <- push_list do
key_str = Atom.to_string(key)
if Enum.member?(maybe_list(opts[:prepend_to_list]), key),
do: "##{key_str} = list_append(:#{key_str}, ##{key_str})",
else: "##{key_str} = list_append(##{key_str}, :#{key_str})"
end
end
case set_clauses do
[] ->
""
_ ->
"SET " <> Enum.join(set_clauses, ", ")
end
end
defp construct_remove_statement(fields, opts) do
remove_rather_than_set_to_null = opts[:remove_nil_fields] || Application.get_env(:ecto_adapters_dynamodb, :remove_nil_fields_on_update) == true
remove_clauses =
if remove_rather_than_set_to_null do
for {key, val} <- fields, is_nil(val), do: "##{Atom.to_string(key)}"
else
[]
end
# Ecto :pull update can be emulated provided
# we are given an index to remove in opts[:pull_indexes]
++ cond do
!opts[:pull_indexes] or (Keyword.values(opts[:pull_indexes]) |> List.flatten) == [] ->
[]
opts[:pull] == nil ->
[]
true ->
for {key, _val} <- opts[:pull] do
key_str = Atom.to_string(key)
Enum.map(opts[:pull_indexes][key], fn index -> "##{key_str}[#{index}]" end) |> Enum.join(", ")
end
end
case remove_clauses do
[] ->
""
_ ->
"REMOVE " <> Enum.join(remove_clauses, ", ")
end
end
# fields::[{:field, val}]
defp construct_add_statement(fields, _opts) do
add_clauses = for {key, _val} <- fields do
key_str = Atom.to_string(key)
"##{key_str} :#{key_str}"
end
case add_clauses do
[] ->
""
_ ->
"ADD " <> Enum.join(add_clauses, ", ")
end
end
defp construct_delete_statement(fields, _opts) do
delete_clauses = for {key, _val} <- fields do
key_str = Atom.to_string(key)
"##{key_str} :#{key_str}"
end
case delete_clauses do
[] ->
""
_ ->
"DELETE " <> Enum.join(delete_clauses, ", ")
end
end
defp validate_where_clauses!(query) do
for w <- query.wheres do
validate_where_clause! w
end
end
defp validate_where_clause!(%BooleanExpr{expr: {op, _, _}}) when op in [:==, :<, :>, :<=, :>=, :in], do: :ok
defp validate_where_clause!(%BooleanExpr{expr: {logical_op, _, _}}) when logical_op in [:and, :or], do: :ok
defp validate_where_clause!(%BooleanExpr{expr: {:is_nil, _, _}}), do: :ok
defp validate_where_clause!(%BooleanExpr{expr: {:fragment, _, _}}), do: :ok
defp validate_where_clause!(unsupported), do: error "unsupported where clause: #{inspect unsupported}"
# We are parsing a nested, recursive structure of the general type:
# %{:logical_op, list_of_clauses} | %{:conditional_op, field_and_value}
defp extract_lookup_fields([], _params, lookup_fields), do: lookup_fields
defp extract_lookup_fields([query | queries], params, lookup_fields) do
# A logical operator tuple does not always have a parent 'expr' key.
maybe_extract_from_expr = case query do
%BooleanExpr{expr: expr} -> expr
# TODO: could there be other cases?
_ -> query
end
case maybe_extract_from_expr do
# A logical operator points to a list of conditionals
{op, _, [left, right]} when op in [:==, :<, :>, :<=, :>=, :in] ->
{field, value} = get_op_clause(left, right, params)
updated_lookup_fields =
case List.keyfind(lookup_fields, field, 0) do
# we assume the most ops we can apply to one field is two, otherwise this might throw an error
{field, {old_val, old_op}} ->
List.keyreplace(lookup_fields, field, 0, {field, {[value, old_val], [op, old_op]}})
_ -> [{field, {value, op}} | lookup_fields]
end
extract_lookup_fields(queries, params, updated_lookup_fields)
# Logical operator expressions have more than one op clause
# We are matching queries of the type: 'from(p in Person, where: p.email == "g@email.com" and p.first_name == "George")'
# But not of the type: 'from(p in Person, where: [email: "g@email.com", first_name: "George"])'
#
# A logical operator is a member of a list
{logical_op, _, clauses} when logical_op in [:and, :or] ->
deeper_lookup_fields = extract_lookup_fields(clauses, params, [])
extract_lookup_fields(queries, params, [{logical_op, deeper_lookup_fields} | lookup_fields])
{:fragment, _, raw_expr_mixed_list} ->
parsed_fragment = parse_raw_expr_mixed_list(raw_expr_mixed_list, params)
extract_lookup_fields(queries, params, [parsed_fragment | lookup_fields])
# We perform a post-query is_nil filter on indexed fields and have DynamoDB filter
# for nil non-indexed fields (although post-query nil-filters on (missing) indexed
# attributes could only find matches when the attributes are not the range part of
# a queried partition key (hash part) since those would not return the sought records).
{:is_nil, _, [arg]} ->
{{:., _, [_, field_name]}, _, _} = arg
# We give the nil value a string, "null", since it will be mapped as a DynamoDB attribute_expression_value
extract_lookup_fields(queries, params, [{to_string(field_name), {"null", :is_nil}} | lookup_fields])
_ -> extract_lookup_fields(queries, params, lookup_fields)
end
end
# Specific (as opposed to generalized) parsing for Ecto :fragment - the only use for it
# so far is 'between' which is the only way to query 'between' on an indexed field since
# those accept only single conditions.
#
# Example with values as strings: [raw: "", expr: {{:., [], [{:&, [], [0]}, :person_id]}, [], []}, raw: " between ", expr: "person:a", raw: " and ", expr: "person:f", raw: ""]
#
# Example with values as part of the string itself: [raw: "", expr: {{:., [], [{:&, [], [0]}, :person_id]}, [], []}, raw: " between person:a and person:f"]
#
# Example with values in params: [raw: "", expr: {{:., [], [{:&, [], [0]}, :person_id]}, [], []}, raw: " between ", expr: {:^, [], [0]}, raw: " and ", expr: {:^, [], [1]}, raw: ""]
#
defp parse_raw_expr_mixed_list(raw_expr_mixed_list, params) do
# group the expression into fields, values, and operators,
# only supporting the example with values in params
case raw_expr_mixed_list do
# between
[raw: _, expr: {{:., [], [{:&, [], [0]}, field_atom]}, [], []}, raw: between_str, expr: {:^, [], [idx1]}, raw: and_str, expr: {:^, [], [idx2]}, raw: _] ->
if not (Regex.match?(~r/^\s*between\s*and\s*$/i, between_str <> and_str)), do:
parse_raw_expr_mixed_list_error(raw_expr_mixed_list)
{to_string(field_atom), {[Enum.at(params, idx1), Enum.at(params, idx2)], :between}}
# begins_with
[raw: begins_with_str, expr: {{:., [], [{:&, [], [0]}, field_atom]}, [], []}, raw: comma_str, expr: {:^, [], [idx]}, raw: closing_parenthesis_str] ->
if not (Regex.match?(~r/^\s*begins_with\(\s*,\s*\)\s*$/i, begins_with_str <> comma_str <> closing_parenthesis_str)), do:
parse_raw_expr_mixed_list_error(raw_expr_mixed_list)
{to_string(field_atom), {Enum.at(params, idx), :begins_with}}
_ -> parse_raw_expr_mixed_list_error(raw_expr_mixed_list)
end
end
defp parse_raw_expr_mixed_list_error(raw_expr_mixed_list), do:
raise "#{inspect __MODULE__}.parse_raw_expr_mixed_list parse error. We currently only support the Ecto fragments of the form, 'where: fragment(\"? between ? and ?\", FIELD_AS_VARIABLE, VALUE_AS_VARIABLE, VALUE_AS_VARIABLE)'; and 'where: fragment(\"begins_with(?, ?)\", FIELD_AS_VARIABLE, VALUE_AS_VARIABLE)'. Received: #{inspect raw_expr_mixed_list}"
defp get_op_clause(left, right, params) do
field = left |> get_field |> Atom.to_string
value = get_value(right, params)
{field, value}
end
defp get_field({{:., _, [{:&, _, [0]}, field]}, _, []}), do: field
defp get_field(other_clause) do
error "Unsupported where clause, left hand side: #{other_clause}"
end
defp get_value({:^, _, [idx]}, params), do: Enum.at(params, idx)
# Test smth new
defp get_value({:^, _, idxs}, params) when length(idxs) == 2, do: Enum.slice(params, hd(idxs), List.last(idxs))
# Handle queries with interpolated values
# ex. Repo.all from i in Item, where: i.id in ^item_ids
defp get_value({:^, _, _}, params), do: params
# Handle .all(query) QUERIES
defp get_value(other_clause, _params), do: other_clause
defp error(msg) do
raise ArgumentError, message: msg
end
defp extract_select_fields(%Ecto.Query.SelectExpr{expr: expr} = _) do
case expr do
{_, _, [0]} ->
[]
{{:., _, [{_, _, _}, field]}, _, _} ->
[field]
{:{}, _, clauses} ->
for {{_, _, [{_, _, _}, field]}, _, _} <- clauses, do: field
end
end
# Decodes maps and datetime, seemingly unhandled by ExAws Dynamo decoder
# (timestamps() corresponds with :naive_datetime)
defp custom_decode(item, model, select) do
selected_fields = extract_select_fields(select)
case selected_fields do
[] ->
[Enum.reduce(model.__schema__(:fields), item, fn (field, acc) ->
Map.update!(acc, field, fn val -> decode_type(model.__schema__(:type, field), val) end)
end)]
fields ->
for field <- fields, do: decode_type(model.__schema__(:type, field), Map.get(item, field))
end
end
defp decode_item(item, model, sources, select) do
item = Enum.reduce(item, %{}, fn {k, v}, acc ->
key = to_string(Map.get(sources, String.to_atom(k)))
Map.put(acc, key, v)
end)
%{"Item" => item}
|> Dynamo.decode_item(as: model)
|> custom_decode(model, select)
end
# This is used slightly differently
# when handling select in custom_decode/2
defp decode_type(type, val) do
if is_nil val do
val
else
case type do
:utc_datetime ->
{:ok, dt, _offset} = DateTime.from_iso8601(val)
dt
:naive_datetime ->
NaiveDateTime.from_iso8601!(val)
{:embed, _} ->
decode_embed(type, val)
t when t in [Ecto.Adapters.DynamoDB.DynamoDBSet, MapSet] ->
MapSet.new(val)
_ -> val
end
end
end
defp decode_embed(type, val) do
case Ecto.Adapters.SQL.load_embed(type, val) do
{:ok, decoded_value} ->
decoded_value
:error ->
ecto_dynamo_log(:info, "#{inspect __MODULE__}.decode_embed: failed to decode embedded value: #{inspect val}")
nil
end
end
# We found one instance where DynamoDB's error message could
# be more instructive - when trying to set an indexed field to something
# other than a string or number - so we're adding a more helpful message.
# The parameter, 'params', has the type %{table: :string, records: [:map]}
defp handle_error!(ex_aws_request_result, params) do
case ex_aws_request_result do
{:ok, result} -> result
{:error, {error_name, _} = error} ->
# Check for inappropriate insert into indexed field
indexed_fields = Ecto.Adapters.DynamoDB.Info.indexed_attributes(params.table)
# Repo.insert_all can present multiple records at once
forbidden_insert_on_indexed_field = Enum.reduce(params.records, false, fn (record, acc) ->
acc || Enum.any?(record, fn {field, val} ->
[type] = ExAws.Dynamo.Encoder.encode(val) |> Map.keys
# Ecto does not convert Empty strings to nil before passing them to Repo.update_all or
# Repo.insert_all DynamoDB provides an instructive message during an update (forwarded by ExAws),
# but less so for batch_write_item, so we catch the empty string as well.
# Dynamo does not allow insertion of empty strings in any case.
(Enum.member?(indexed_fields, to_string(field)) and not type in ["S", "N"]) || val == ""
end)
end)
cond do
# we use this error to check if an update or delete record does not exist
error_name == "ConditionalCheckFailedException" ->
{:error, error_name}
forbidden_insert_on_indexed_field ->
raise "The following request error could be related to attempting to insert an empty string or attempting to insert a type other than a string or number on an indexed field. Indexed fields: #{inspect indexed_fields}. Records: #{inspect params.records}.\n\nExAws Request Error! #{inspect error}"
true ->
raise ExAws.Error, message: "ExAws Request Error! #{inspect error}"
end
end
end
@doc """
Logs message to console and optionally to file. Log levels, colours and file path may be set in configuration (details in README.md).
"""
def ecto_dynamo_log(level, message, attributes \\ %{}, opts \\ []) do
depth = opts[:depth] || 4
colours = Application.get_env(:ecto_adapters_dynamodb, :log_colours)
d = DateTime.utc_now
formatted_message = "#{d.year}-#{d.month}-#{d.day} #{d.hour}:#{d.minute}:#{d.second} UTC [Ecto dynamo #{level}] #{message}"
{:ok, log_message} = Poison.encode(%{message: formatted_message, attributes: chisel(attributes, depth)})
log_path = Application.get_env(:ecto_adapters_dynamodb, :log_path)
log_levels = Application.get_env(:ecto_adapters_dynamodb, :log_levels) || [:info]
if level in log_levels do
if Application.get_env(:ecto_adapters_dynamodb, :log_in_colour) do
IO.ANSI.format([colours[level] || :normal, log_message], true) |> IO.puts
else
log_message |> IO.puts
end
if String.valid?(log_path) and Regex.match?(~r/\S/, log_path), do: log_pipe(log_path, log_message)
end
end
defp chisel(str, _depth) when is_binary(str), do: str
defp chisel(num, _depth) when is_number(num), do: num
defp chisel(any, _depth) when (not is_map(any) and not is_list(any)), do: inspect any
defp chisel(_, 0), do: "beyond_log_depth"
defp chisel(%{__struct__: _} = struct, _depth), do: inspect struct
defp chisel(map, depth) when is_map(map) do
for {k, v} <- map, into: %{}, do: {k, chisel(v, depth - 1)}
end
defp chisel(list, depth) when is_list(list) do
for e <- list, do: chisel(e, depth - 1)
#Stream.with_index(list) |> Enum.reduce(%{}, fn({v,k}, acc)-> Map.put(acc, k, chisel(v, depth - 1)) end)
end
defp log_pipe(path, str) do
{:ok, file} = File.open(path, [:append])
IO.binwrite(file, str)
File.close(file)
end
end
| 44.655738 | 354 | 0.67221 |
084c900672060871d97a0f0e0362cbce6432cf09 | 6,767 | ex | Elixir | lib/astarte_data_updater_plant/data_updater/payloads_decoder.ex | rbino/astarte_data_updater_plant | b1769207636fad9f91fdcad4ed768757af09683f | [
"Apache-2.0"
] | 5 | 2018-01-30T15:08:23.000Z | 2019-12-20T15:06:56.000Z | lib/astarte_data_updater_plant/data_updater/payloads_decoder.ex | rbino/astarte_data_updater_plant | b1769207636fad9f91fdcad4ed768757af09683f | [
"Apache-2.0"
] | 17 | 2018-01-31T15:50:22.000Z | 2019-12-05T17:26:06.000Z | lib/astarte_data_updater_plant/data_updater/payloads_decoder.ex | rbino/astarte_data_updater_plant | b1769207636fad9f91fdcad4ed768757af09683f | [
"Apache-2.0"
] | 2 | 2018-01-31T15:43:04.000Z | 2019-11-15T12:00:05.000Z | #
# This file is part of Astarte.
#
# Copyright 2018 Ispirata Srl
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
defmodule Astarte.DataUpdaterPlant.DataUpdater.PayloadsDecoder do
alias Astarte.Core.Interface
@max_uncompressed_payload_size 10_485_760
@doc """
Decode a BSON payload a returns a tuple containing the decoded value, the timestamp and metadata.
reception_timestamp is used if no timestamp has been sent with the payload.
"""
@spec decode_bson_payload(binary, integer) :: {map, integer, map}
def decode_bson_payload(payload, reception_timestamp) do
if byte_size(payload) != 0 do
case Cyanide.decode(payload) do
{:ok, %{"v" => bson_value, "t" => %DateTime{} = timestamp, "m" => %{} = metadata}} ->
bson_timestamp = DateTime.to_unix(timestamp, :millisecond)
{bson_value, bson_timestamp, metadata}
{:ok, %{"v" => bson_value, "m" => %{} = metadata}} ->
{bson_value, div(reception_timestamp, 10000), metadata}
{:ok, %{"v" => bson_value, "t" => %DateTime{} = timestamp}} ->
bson_timestamp = DateTime.to_unix(timestamp, :millisecond)
{bson_value, bson_timestamp, %{}}
{:ok, %{"v" => {0 = _subtype, <<>> = _bin}}} ->
{nil, nil, nil}
{:ok, %{"v" => bson_value}} ->
{bson_value, div(reception_timestamp, 10000), %{}}
{:ok, %{} = bson_value} ->
# Handling old format object aggregation
{bson_value, div(reception_timestamp, 10000), %{}}
{:error, _reason} ->
{:error, :undecodable_bson_payload}
_ ->
{:error, :undecodable_bson_payload}
end
else
{nil, nil, nil}
end
end
@doc """
Safely decodes a zlib deflated binary and inflates it.
This function avoids zip bomb vulnerabilities, and it decodes up to 10_485_760 bytes.
"""
@spec safe_inflate(binary) :: binary
def safe_inflate(zlib_payload) do
z = :zlib.open()
:ok = :zlib.inflateInit(z)
{continue_flag, output_list} = :zlib.safeInflate(z, zlib_payload)
uncompressed_size =
List.foldl(output_list, 0, fn output_block, acc ->
acc + byte_size(output_block)
end)
deflated_payload =
if uncompressed_size < @max_uncompressed_payload_size do
output_acc =
List.foldl(output_list, <<>>, fn output_block, acc ->
acc <> output_block
end)
safe_inflate_loop(z, output_acc, uncompressed_size, continue_flag)
else
:error
end
:zlib.inflateEnd(z)
:zlib.close(z)
deflated_payload
end
defp safe_inflate_loop(z, output_acc, size_acc, :continue) do
{continue_flag, output_list} = :zlib.safeInflate(z, [])
uncompressed_size =
List.foldl(output_list, size_acc, fn output_block, acc ->
acc + byte_size(output_block)
end)
if uncompressed_size < @max_uncompressed_payload_size do
output_acc =
List.foldl(output_list, output_acc, fn output_block, acc ->
acc <> output_block
end)
safe_inflate_loop(z, output_acc, uncompressed_size, continue_flag)
else
:error
end
end
defp safe_inflate_loop(_z, output_acc, _size_acc, :finished) do
output_acc
end
@doc """
Decodes a properties paths list and returning a MapSet with them.
"""
@spec parse_device_properties_payload(String.t(), map) ::
{:ok, MapSet.t(String.t())} | {:error, :invalid_properties}
def parse_device_properties_payload("", _introspection) do
{:ok, MapSet.new()}
end
def parse_device_properties_payload(decoded_payload, introspection) do
if String.valid?(decoded_payload) do
parse_device_properties_string(decoded_payload, introspection)
else
{:error, :invalid_properties}
end
end
def parse_device_properties_string(decoded_payload, introspection) do
paths_list =
decoded_payload
|> String.split(";")
|> List.foldl(MapSet.new(), fn property_full_path, paths_acc ->
with [interface, path] <- String.split(property_full_path, "/", parts: 2) do
if Map.has_key?(introspection, interface) do
MapSet.put(paths_acc, {interface, "/" <> path})
else
paths_acc
end
else
_ ->
# TODO: we should print a warning, or return a :issues_found status
paths_acc
end
end)
{:ok, paths_list}
end
@doc """
Decodes introspection string into a list of tuples
"""
@spec parse_introspection(String.t()) ::
{:ok, list({String.t(), integer, integer})} | {:error, :invalid_introspection}
def parse_introspection("") do
{:ok, []}
end
def parse_introspection(introspection_payload) do
if String.valid?(introspection_payload) do
parse_introspection_string(introspection_payload)
else
{:error, :invalid_introspection}
end
end
defp parse_introspection_string(introspection_payload) do
introspection_tokens = String.split(introspection_payload, ";")
all_tokens_are_good =
Enum.all?(introspection_tokens, fn token ->
with [interface_name, major_version_string, minor_version_string] <-
String.split(token, ":"),
{major_version, ""} <- Integer.parse(major_version_string),
{minor_version, ""} <- Integer.parse(minor_version_string) do
cond do
String.match?(interface_name, Interface.interface_name_regex()) == false ->
false
major_version < 0 ->
false
minor_version < 0 ->
false
true ->
true
end
else
_not_expected ->
false
end
end)
if all_tokens_are_good do
parsed_introspection =
for token <- introspection_tokens do
[interface_name, major_version_string, minor_version_string] = String.split(token, ":")
{major_version, ""} = Integer.parse(major_version_string)
{minor_version, ""} = Integer.parse(minor_version_string)
{interface_name, major_version, minor_version}
end
{:ok, parsed_introspection}
else
{:error, :invalid_introspection}
end
end
end
| 30.345291 | 99 | 0.640757 |
084c9ff3752171aa114ebf1939078e079b928dfc | 1,833 | exs | Elixir | mix.exs | wingyplus/money | 18d8eb3581886badb9b89adf455844517c10f599 | [
"MIT"
] | null | null | null | mix.exs | wingyplus/money | 18d8eb3581886badb9b89adf455844517c10f599 | [
"MIT"
] | null | null | null | mix.exs | wingyplus/money | 18d8eb3581886badb9b89adf455844517c10f599 | [
"MIT"
] | null | null | null | defmodule Money.Mixfile do
use Mix.Project
@version "1.7.0"
@github_url "https://github.com/elixirmoney/money"
def project do
[
app: :money,
aliases: aliases(),
name: "Money",
version: @version,
elixir: "~> 1.0",
deps: deps(),
source_url: "https://github.com/elixirmoney/money",
docs: fn ->
[
source_ref: "v#{@version}",
canonical: "http://hexdocs.pm/money",
main: "Money",
source_url: @github_url,
extras: ["README.md", "CONTRIBUTING.md"]
]
end,
description: description(),
package: package(),
preferred_cli_env: [check: :test],
dialyzer: [plt_add_apps: [:ecto, :phoenix_html]]
]
end
def application do
[]
end
defp deps do
[
# Soft dependencies
{:ecto, "~> 1.0 or ~> 2.0 or ~> 2.1 or ~> 3.0", optional: true},
{:phoenix_html, "~> 2.0", optional: true},
{:decimal, "~> 1.0", optional: true},
# Code style and analyzers
{:credo, "~> 1.1", only: [:dev, :test], runtime: false, optional: true},
{:dialyxir, "~> 1.0.0-rc.6", only: [:dev, :test], runtime: false, optional: true},
# Docs
{:ex_doc, "~> 0.21", only: [:dev, :docs]}
]
end
defp description do
"""
Elixir library for working with Money safer, easier, and fun, is an interpretation of the Fowler's Money pattern in fun.prog.
"""
end
defp package do
[
maintainers: ["Petr Stepchenko", "Giulio De Donato", "Andrew Timberlake"],
contributors: ["Petr Stepchenko", "Giulio De Donato", "Andrew Timberlake"],
licenses: ["MIT"],
links: %{"GitHub" => @github_url}
]
end
defp aliases do
[
check: ["format --check-formatted", "credo --strict", "test", "dialyzer"]
]
end
end
| 25.109589 | 129 | 0.555374 |
084ca9821529331b56c66665a65abfb157a3a93c | 156 | exs | Elixir | .formatter.exs | ORBAT/mulix | 75503f79eb1832329afe96540cfd0e2dc9c01575 | [
"MIT"
] | null | null | null | .formatter.exs | ORBAT/mulix | 75503f79eb1832329afe96540cfd0e2dc9c01575 | [
"MIT"
] | null | null | null | .formatter.exs | ORBAT/mulix | 75503f79eb1832329afe96540cfd0e2dc9c01575 | [
"MIT"
] | null | null | null | # Used by "mix format"
[
inputs: ["mix.exs", "{config,lib,test}/**/*.{ex,exs}"],
locals_without_parens: [defmultiop: :*, defop: :*, defgenericop: :*]
]
| 26 | 70 | 0.596154 |
084cdc7869276b25503a70c3c86713e82c69da52 | 4,167 | ex | Elixir | lib/ravix_ecto/parsers/query_params.ex | YgorCastor/ravix-ecto | 63badc62e9ea2b38c7667d4ee1bfa8cb7c1cf371 | [
"Apache-2.0"
] | 1 | 2022-03-30T14:56:00.000Z | 2022-03-30T14:56:00.000Z | lib/ravix_ecto/parsers/query_params.ex | YgorCastor/ravix-ecto | 63badc62e9ea2b38c7667d4ee1bfa8cb7c1cf371 | [
"Apache-2.0"
] | null | null | null | lib/ravix_ecto/parsers/query_params.ex | YgorCastor/ravix-ecto | 63badc62e9ea2b38c7667d4ee1bfa8cb7c1cf371 | [
"Apache-2.0"
] | null | null | null | defmodule Ravix.Ecto.Parser.QueryParams do
import Ravix.Ecto.Parser.{ConditionalTokens, Shared}
alias Ecto.Query, as: EctoQuery
def parse(%EctoQuery{wheres: wheres} = query, params, pk) do
wheres
|> Enum.map(fn %EctoQuery.BooleanExpr{expr: expr} ->
pair(expr, params, pk, query, "where clause")
end)
|> :lists.flatten()
|> merge_keys(query, "where clause")
|> map_unless_empty
end
def parse([{_, _} | _] = fields, keys, pk) do
fields
|> Keyword.take(keys)
|> parse(pk)
end
def parse(filter, pk) do
filter |> value(pk, "where clause") |> map_unless_empty
end
def parse_update(%EctoQuery{updates: updates} = query, params, pk) do
updates
|> Enum.flat_map(fn %EctoQuery.QueryExpr{expr: expr} ->
pair(expr, query, params, pk)
end)
|> :lists.flatten()
|> merge_keys(query, "update clause")
end
defp merge_keys(keyword, query, place) do
Enum.reduce(keyword, %{}, fn {key, value}, acc ->
Map.update(acc, key, value, fn
old when is_list(old) -> old ++ value
_ -> error(query, place)
end)
end)
end
defp mapped_pair_or_value({op, _, _} = tuple, params, pk, query, place) when is_op(op) do
List.wrap(pair(tuple, params, pk, query, place))
end
defp mapped_pair_or_value(value, params, pk, query, place) do
value(value, params, pk, query, place)
end
defp pair(expr, query, params, pk) do
Enum.map(expr, fn {key, value} ->
{update_op!(key, query), value(value, params, pk, query, "update clause")}
end)
end
defp pair({:not, _, [{:in, _, [left, right]}]}, params, pk, query, place) do
{field(left, pk, query, place), [{binary_op(:nin), value(right, params, pk, query, place)}]}
end
defp pair({:is_nil, _, [expr]}, _, pk, query, place) do
{field(expr, pk, query, place), nil}
end
defp pair({:in, _, [left, {:^, _, [0, 0]}]}, _params, pk, query, place) do
{field(left, pk, query, place), [{binary_op(:in), []}]}
end
defp pair({:in, _, [left, {:^, _, [ix, len]}]}, params, pk, query, place) do
args =
ix..(ix + len - 1)
|> Enum.map(&elem(params, &1))
|> Enum.map(&value(&1, params, pk, query, place))
{field(left, pk, query, place), [{binary_op(:in), args}]}
end
defp pair({:in, _, [lhs, {{:., _, _}, _, _} = rhs]}, params, pk, query, place) do
{field(rhs, pk, query, place), [{binary_op(:in), [value(lhs, params, pk, query, place)]}]}
end
defp pair({:not, _, [{:in, _, [left, {:^, _, [ix, len]}]}]}, params, pk, query, place) do
args =
ix..(ix + len - 1)
|> Enum.map(&elem(params, &1))
|> Enum.map(&value(&1, params, pk, query, place))
{field(left, pk, query, place), [{binary_op(:nin), args}]}
end
defp pair({:not, _, [{:in, _, [left, right]}]}, params, pk, query, place) do
{field(left, pk, query, place), [{binary_op(:nin), value(right, params, pk, query, place)}]}
end
defp pair({:not, _, [{:is_nil, _, [expr]}]}, _, pk, query, place) do
{field(expr, pk, query, place), [{binary_op(:ne), nil}]}
end
defp pair({:not, _, [{:==, _, [left, right]}]}, params, pk, query, place) do
{field(left, pk, query, place), [{binary_op(:ne), value(right, params, pk, query, place)}]}
end
defp pair({:not, _, [expr]}, params, pk, query, place) do
{bool_op(:not), [pair(expr, params, pk, query, place)]}
end
defp pair({:fragment, _, [args]}, params, pk, query, place)
when is_list(args) or tuple_size(args) == 3 do
value(args, params, pk, query, place)
end
defp pair({op, _, [left, right]}, params, pk, query, place) when op in ecto_binary_tokens() do
case value(right, params, pk, query, place) do
value when is_list(value) -> {field(left, pk, query, place), [{binary_op(:in), value}]}
value -> {field(left, pk, query, place), [{binary_op(op), value}]}
end
end
defp pair({op, _, args}, params, pk, query, place) when op in ecto_boolean_tokens() do
args = Enum.map(args, &mapped_pair_or_value(&1, params, pk, query, place))
{bool_op(op), args}
end
defp pair(_expr, _params, _pk, query, place) do
error(query, place)
end
end
| 32.302326 | 96 | 0.595872 |
084cdd0f911e58c02d114adddfd88af4de6f32dd | 956 | exs | Elixir | test/multipoint_polygon_test.exs | otherchris/topo | 228c2c371c5e89cd1297662da7701c52c5d8b167 | [
"MIT"
] | 110 | 2016-05-05T21:09:19.000Z | 2022-03-08T05:22:16.000Z | test/multipoint_polygon_test.exs | otherchris/topo | 228c2c371c5e89cd1297662da7701c52c5d8b167 | [
"MIT"
] | 15 | 2016-12-01T00:32:11.000Z | 2022-01-18T13:56:37.000Z | test/multipoint_polygon_test.exs | otherchris/topo | 228c2c371c5e89cd1297662da7701c52c5d8b167 | [
"MIT"
] | 24 | 2016-09-19T20:06:50.000Z | 2021-06-16T06:41:10.000Z | defmodule MultiPointPolygonTest do
use ExUnit.Case
@polygon %Geo.MultiPolygon{
coordinates: [
[
[{60, 120}, {60, 40}, {160, 40}, {160, 120}, {60, 120}],
[{140, 100}, {80, 100}, {80, 60}, {140, 60}, {140, 100}]
]
]
}
test "One Point in a Polygon" do
b = %Geo.MultiPoint{coordinates: [{70, 50}, {100, 80}]}
assert Topo.intersects?(@polygon, b)
assert Topo.intersects?(b, @polygon)
refute Topo.contains?(@polygon, b)
end
test "Both Points outside of a Polygon" do
b = %Geo.MultiPoint{coordinates: [{70, 35}, {100, 80}]}
refute Topo.intersects?(@polygon, b)
refute Topo.intersects?(b, @polygon)
refute Topo.contains?(@polygon, b)
end
test "Both Points inside of a Polygon" do
b = %Geo.MultiPoint{coordinates: [{70, 50}, {71, 55}]}
assert Topo.intersects?(@polygon, b)
assert Topo.intersects?(b, @polygon)
assert Topo.contains?(@polygon, b)
end
end
| 23.9 | 64 | 0.603556 |
084ceb5b78c7a5ac9cda2eb9f5126a46e2e506dd | 908 | ex | Elixir | lib/leather_web/channels/user_socket.ex | nicksergeant/leather | 15b1c9403999737f7a6ee9a1c0349e047805bbe6 | [
"MIT"
] | 67 | 2016-10-24T04:11:40.000Z | 2021-11-25T16:46:51.000Z | lib/leather_web/channels/user_socket.ex | nicksergeant/leather | 15b1c9403999737f7a6ee9a1c0349e047805bbe6 | [
"MIT"
] | 6 | 2017-08-17T21:43:50.000Z | 2021-11-03T13:13:49.000Z | lib/leather_web/channels/user_socket.ex | nicksergeant/leather | 15b1c9403999737f7a6ee9a1c0349e047805bbe6 | [
"MIT"
] | 7 | 2017-08-13T01:43:37.000Z | 2022-01-11T04:38:27.000Z | defmodule LeatherWeb.UserSocket do
@moduledoc false
use Phoenix.Socket
channel("accounts:*", LeatherWeb.AccountsChannel)
channel("budgets:*", LeatherWeb.BudgetsChannel)
channel("forecast:*", LeatherWeb.ForecastChannel)
channel("link:*", LeatherWeb.LinkChannel)
channel("stashes:*", LeatherWeb.StashesChannel)
channel("transactions:*", LeatherWeb.TransactionsChannel)
transport(:websocket, Phoenix.Transports.WebSocket)
@max_age 2 * 7 * 24 * 60 * 60
def connect(%{"token" => token}, socket) do
case Phoenix.Token.verify(socket, "user socket", token, max_age: @max_age) do
{:ok, user_id} ->
user = Leather.Repo.get(Leather.User, user_id)
{:ok, assign(socket, :user, user)}
{:error, _reason} ->
:error
end
end
def connect(_params, _socket) do
:error
end
def id(socket) do
"users_socket:#{socket.assigns.user.id}"
end
end
| 26.705882 | 81 | 0.679515 |
084cff557736cd6ca774838ba0af4439e7189b0e | 572 | exs | Elixir | speedtest/test/speedtest/server_test.exs | robsonrod/elixir-projs | 0e6ff1cc1e2bd9d25fcdc9414f9312fbb3ae9018 | [
"MIT"
] | null | null | null | speedtest/test/speedtest/server_test.exs | robsonrod/elixir-projs | 0e6ff1cc1e2bd9d25fcdc9414f9312fbb3ae9018 | [
"MIT"
] | null | null | null | speedtest/test/speedtest/server_test.exs | robsonrod/elixir-projs | 0e6ff1cc1e2bd9d25fcdc9414f9312fbb3ae9018 | [
"MIT"
] | null | null | null | defmodule Speedtest.ServerTest do
use ExUnit.Case
doctest Speedtest.Server
test "fetch servers from url" do
expected = %Speedtest.ServerInfo{
cc: "BR",
country: "Brazil",
host: "velocimetro-bsb.virtua.com.br:8080",
id: 15014,
lat: -15.781,
long: -47.9196,
name: "Brasilia",
sponsor: "Claro net vírtua",
url: "http://velocimetro-bsb.virtua.com.br:8080/speedtest/upload.php"
}
first = Enum.at(Speedtest.Server.fetch_servers(Speedtest.Server.default_url()), 0)
assert(first == expected)
end
end
| 26 | 86 | 0.646853 |
084d01caf31b67798dee6d40fa504fa2a239f0fe | 2,452 | exs | Elixir | test/red_potion/web/controllers/counter_controller_test.exs | mvdwg/red_potion | 3026b13d80d9157a598b08e959beb5e56c6f9ff9 | [
"MIT"
] | null | null | null | test/red_potion/web/controllers/counter_controller_test.exs | mvdwg/red_potion | 3026b13d80d9157a598b08e959beb5e56c6f9ff9 | [
"MIT"
] | null | null | null | test/red_potion/web/controllers/counter_controller_test.exs | mvdwg/red_potion | 3026b13d80d9157a598b08e959beb5e56c6f9ff9 | [
"MIT"
] | null | null | null | defmodule RedPotion.Web.CounterControllerTest do
use RedPotion.Web.ConnCase
alias RedPotion.Artifacts
@create_attrs %{name: "some name"}
@update_attrs %{name: "some updated name"}
@invalid_attrs %{name: nil}
def fixture(:counter) do
{:ok, counter} = Artifacts.create_counter(@create_attrs)
counter
end
test "lists all entries on index", %{conn: conn} do
conn = get conn, counter_path(conn, :index)
assert html_response(conn, 200) =~ "Listing Counters"
end
test "renders form for new counters", %{conn: conn} do
conn = get conn, counter_path(conn, :new)
assert html_response(conn, 200) =~ "New Counter"
end
test "creates counter and redirects to show when data is valid", %{conn: conn} do
conn = post conn, counter_path(conn, :create), counter: @create_attrs
assert %{id: id} = redirected_params(conn)
assert redirected_to(conn) == counter_path(conn, :show, id)
conn = get conn, counter_path(conn, :show, id)
assert html_response(conn, 200) =~ "Show Counter"
end
test "does not create counter and renders errors when data is invalid", %{conn: conn} do
conn = post conn, counter_path(conn, :create), counter: @invalid_attrs
assert html_response(conn, 200) =~ "New Counter"
end
test "renders form for editing chosen counter", %{conn: conn} do
counter = fixture(:counter)
conn = get conn, counter_path(conn, :edit, counter)
assert html_response(conn, 200) =~ "Edit Counter"
end
test "updates chosen counter and redirects when data is valid", %{conn: conn} do
counter = fixture(:counter)
conn = put conn, counter_path(conn, :update, counter), counter: @update_attrs
assert redirected_to(conn) == counter_path(conn, :show, counter)
conn = get conn, counter_path(conn, :show, counter)
assert html_response(conn, 200) =~ "some updated name"
end
test "does not update chosen counter and renders errors when data is invalid", %{conn: conn} do
counter = fixture(:counter)
conn = put conn, counter_path(conn, :update, counter), counter: @invalid_attrs
assert html_response(conn, 200) =~ "Edit Counter"
end
test "deletes chosen counter", %{conn: conn} do
counter = fixture(:counter)
conn = delete conn, counter_path(conn, :delete, counter)
assert redirected_to(conn) == counter_path(conn, :index)
assert_error_sent 404, fn ->
get conn, counter_path(conn, :show, counter)
end
end
end
| 35.028571 | 97 | 0.694127 |
084d028320ca331ee93535f1bae5a42c110c70a3 | 371 | ex | Elixir | lib/delivery_web/controllers/items_controller.ex | gjlbro/testdelivery | 3d667cca67b3a561d7395673b6b968523ed3270a | [
"MIT"
] | null | null | null | lib/delivery_web/controllers/items_controller.ex | gjlbro/testdelivery | 3d667cca67b3a561d7395673b6b968523ed3270a | [
"MIT"
] | null | null | null | lib/delivery_web/controllers/items_controller.ex | gjlbro/testdelivery | 3d667cca67b3a561d7395673b6b968523ed3270a | [
"MIT"
] | null | null | null | defmodule DeliveryWeb.ItemsController do
use DeliveryWeb, :controller
alias Delivery.Item
alias DeliveryWeb.FallbackController
action_fallback FallbackController
def create(conn, params) do
with {:ok, %Item{} = item} <- Delivery.create_item(params) do
conn
|> put_status(:created)
|> render("create.json", item: item)
end
end
end
| 21.823529 | 65 | 0.703504 |
084d4fd5f4f27a107af9e6fbfe44b50c026592df | 1,047 | ex | Elixir | apps/fw/lib/fw/application.ex | paulanthonywilson/mcam | df9c5aaae00b568749dff22613636f5cb92f905a | [
"MIT"
] | null | null | null | apps/fw/lib/fw/application.ex | paulanthonywilson/mcam | df9c5aaae00b568749dff22613636f5cb92f905a | [
"MIT"
] | 8 | 2020-11-16T09:59:12.000Z | 2020-11-16T10:13:07.000Z | apps/fw/lib/fw/application.ex | paulanthonywilson/mcam | df9c5aaae00b568749dff22613636f5cb92f905a | [
"MIT"
] | null | null | null | defmodule Fw.Application do
# See https://hexdocs.pm/elixir/Application.html
# for more information on OTP Applications
@moduledoc false
use Application
def start(_type, _args) do
# See https://hexdocs.pm/elixir/Supervisor.html
# for other strategies and supported options
opts = [strategy: :one_for_one, name: Fw.Supervisor]
children =
[
# Children for all targets
# Starts a worker by calling: Fw.Worker.start_link(arg)
# {Fw.Worker, arg},
] ++ children(target())
Supervisor.start_link(children, opts)
end
# List all child processes to be supervised
def children(:host) do
[
# Children that only run on the host
# Starts a worker by calling: Fw.Worker.start_link(arg)
# {Fw.Worker, arg},
]
end
def children(_target) do
[
# Children for all targets except host
# Starts a worker by calling: Fw.Worker.start_link(arg)
# {Fw.Worker, arg},
]
end
def target() do
Application.get_env(:fw, :target)
end
end
| 23.795455 | 63 | 0.647564 |
084d6acba96b35c42a45679a168684a4532c5617 | 175 | exs | Elixir | priv/test/migrations/20171031174537_create_users.exs | mtdurling/guardian_trackable | 9f9bac8ff389a90738b50a135a78d96d9deb6281 | [
"MIT"
] | 21 | 2017-11-06T11:20:24.000Z | 2021-10-03T11:44:58.000Z | priv/test/migrations/20171031174537_create_users.exs | mtdurling/guardian_trackable | 9f9bac8ff389a90738b50a135a78d96d9deb6281 | [
"MIT"
] | 4 | 2018-01-10T02:32:15.000Z | 2019-04-25T17:33:24.000Z | priv/test/migrations/20171031174537_create_users.exs | mtdurling/guardian_trackable | 9f9bac8ff389a90738b50a135a78d96d9deb6281 | [
"MIT"
] | 4 | 2018-01-12T01:16:37.000Z | 2020-04-08T14:23:24.000Z | defmodule GuardianTrackable.Dummy.Repo.Migrations.CreateUsers do
use Ecto.Migration
def change do
create table(:users) do
add :email, :string
end
end
end
| 17.5 | 64 | 0.72 |
084d780cb07e59177c6ac97b3a4631bb7a7255e5 | 5,469 | exs | Elixir | test/swoosh/adapters/amazonses_test.exs | Deepwalker/swoosh | 5970e1a20e5d787347ce825f4803e972ddc30095 | [
"MIT"
] | 1 | 2020-12-22T19:28:30.000Z | 2020-12-22T19:28:30.000Z | test/swoosh/adapters/amazonses_test.exs | Deepwalker/swoosh | 5970e1a20e5d787347ce825f4803e972ddc30095 | [
"MIT"
] | 21 | 2021-03-08T10:04:20.000Z | 2022-03-23T10:20:17.000Z | test/swoosh/adapters/amazonses_test.exs | nash-io/swoosh | 05c8676890da07403225c302f9a069fc7d221330 | [
"MIT"
] | 1 | 2019-11-05T19:06:55.000Z | 2019-11-05T19:06:55.000Z | defmodule Swoosh.Adapters.AmazonSESTest do
use Swoosh.AdapterCase, async: true
import Swoosh.Email
alias Swoosh.Adapters.AmazonSES
@success_response """
<SendEmailResponse>
<SendEmailResult>
<MessageId>messageId</MessageId>
</SendEmailResult>
<ResponseMetadata>
<RequestId>requestId</RequestId>
</ResponseMetadata>
</SendEmailResponse>
"""
@error_response """
<ErrorResponse>
<Error>
<Type>ErrorType</Type>
<Code>ErrorCode</Code>
<Message>Error Message</Message>
</Error>
<RequestId>a97266f7-b062-11e7-b126-6b0f7a9b3379</RequestId>
</ErrorResponse>
"""
setup_all do
config = [
region: "us-east-1",
access_key: "test_access",
secret: "test_secret"
]
valid_email =
new()
|> from("guybrush.threepwood@pirates.grog")
|> to("elaine.marley@triisland.gov")
|> subject("Mighty Pirate Newsletter")
|> text_body("Hello")
|> html_body("<h1>Hello</h1>")
{:ok, valid_email: valid_email, config: config}
end
setup context do
bypass = Bypass.open
config = Keyword.put(context[:config], :host, "http://localhost:#{bypass.port}")
%{bypass: bypass, config: config}
end
test "a sent email results in :ok", %{bypass: bypass, config: config, valid_email: email} do
Bypass.expect bypass, fn conn ->
conn = parse(conn)
expected_path = "/"
body_params = %{
"Action" => "SendRawEmail",
"Version" => "2010-12-01",
}
assert body_params["Action"] == conn.body_params["Action"]
assert body_params["Version"] == conn.body_params["Version"]
assert expected_path == conn.request_path
assert "POST" == conn.method
Plug.Conn.resp(conn, 200, @success_response)
end
assert AmazonSES.deliver(email, config) == {:ok, %{id: "messageId", request_id: "requestId"}}
end
test "a sent email with tags results in :ok", %{bypass: bypass, config: config} do
email =
new()
|> from("guybrush.threepwood@pirates.grog")
|> to("elaine.marley@triisland.gov")
|> subject("Mighty Pirate Newsletter")
|> text_body("Hello")
|> html_body("<h1>Hello</h1>")
|> put_provider_option(:tags, [%{name: "name1", value: "test1"}])
|> put_provider_option(:configuration_set_name, "configuration_set_name1")
Bypass.expect bypass, fn conn ->
conn = parse(conn)
expected_path = "/"
body_params = %{
"Action" => "SendRawEmail",
"Version" => "2010-12-01",
"ConfigurationSetName" => "configuration_set_name1",
"Tags.member.1.Name" => "name1",
"Tags.member.1.Value" => "test1"
}
assert body_params["Action"] == conn.body_params["Action"]
assert body_params["Version"] == conn.body_params["Version"]
assert body_params["ConfigurationSetName"] == conn.body_params["ConfigurationSetName"]
assert body_params["Tags.member.1.Name"] == conn.body_params["Tags.member.1.Name"]
assert body_params["Tags.member.1.Value"] == conn.body_params["Tags.member.1.Value"]
assert expected_path == conn.request_path
assert "POST" == conn.method
Plug.Conn.resp(conn, 200, @success_response)
end
assert AmazonSES.deliver(email, config) == {:ok, %{id: "messageId", request_id: "requestId"}}
end
test "delivery/1 with all fields returns :ok", %{bypass: bypass, config: config} do
email =
new()
|> from({"G Threepwood", "guybrush.threepwood@pirates.grog"})
|> to({"Murry The Skull", "murry@lechucksship.gov"})
|> to("elaine.marley@triisland.gov")
|> cc({"Cannibals", "canni723@monkeyisland.com"})
|> cc("carla@sworddojo.org")
|> bcc({"LeChuck", "lechuck@underworld.com"})
|> bcc("stan@coolshirt.com")
|> subject("Mighty Pirate Newsletter")
|> text_body("Hello")
|> html_body("<h1>Hello</h1>")
Bypass.expect bypass, fn conn ->
conn = parse(conn)
expected_path = "/"
body_params = %{
"Action" => "SendRawEmail",
"Version" => "2010-12-01",
}
assert body_params["Action"] == conn.body_params["Action"]
assert body_params["Version"] == conn.body_params["Version"]
assert expected_path == conn.request_path
assert "POST" == conn.method
Plug.Conn.resp(conn, 200, @success_response)
end
assert AmazonSES.deliver(email, config) == {:ok, %{id: "messageId", request_id: "requestId"}}
end
test "a sent email that returns a api error parses correctly", %{bypass: bypass, config: config, valid_email: email} do
Bypass.expect bypass, fn conn ->
conn = parse(conn)
expected_path = "/"
assert expected_path == conn.request_path
assert "POST" == conn.method
Plug.Conn.resp(conn, 500, @error_response)
end
assert AmazonSES.deliver(email, config) == {:error, %{code: "ErrorCode", message: "Error Message"}}
end
test "validate_config/1 with valid config", %{config: config} do
assert AmazonSES.validate_config(config) == :ok
end
test "validate_config/1 with invalid config" do
assert_raise ArgumentError, """
expected [:secret, :access_key, :region] to be set, got: []
""", fn ->
AmazonSES.validate_config([])
end
end
end
| 32.360947 | 122 | 0.610715 |
084d9b527b794decfe8bdcfc6da4f3d9540319ac | 532 | exs | Elixir | priv/repo/migrations/20181009210537_create_files.exs | mindriot101/level | 0a2cbae151869c2d9b79b3bfb388f5d00739ae12 | [
"Apache-2.0"
] | 928 | 2018-04-03T16:18:11.000Z | 2019-09-09T17:59:55.000Z | priv/repo/migrations/20181009210537_create_files.exs | mindriot101/level | 0a2cbae151869c2d9b79b3bfb388f5d00739ae12 | [
"Apache-2.0"
] | 74 | 2018-04-03T00:46:50.000Z | 2019-03-10T18:57:27.000Z | priv/repo/migrations/20181009210537_create_files.exs | mindriot101/level | 0a2cbae151869c2d9b79b3bfb388f5d00739ae12 | [
"Apache-2.0"
] | 89 | 2018-04-03T17:33:20.000Z | 2019-08-19T03:40:20.000Z | defmodule Level.Repo.Migrations.CreateFiles do
use Ecto.Migration
def change do
create table(:files, primary_key: false) do
add :id, :binary_id, primary_key: true
add :space_id, references(:spaces, on_delete: :nothing, type: :binary_id), null: false
add :space_user_id, references(:space_users, on_delete: :nothing, type: :binary_id),
null: false
add :filename, :text, null: false
add :content_type, :text
add :size, :integer, null: false
timestamps()
end
end
end
| 26.6 | 92 | 0.669173 |
084da4ec6abddd493f03abff967a55df1d067dae | 733 | exs | Elixir | test/remove_tags_test.exs | fmcgeough/ex_aws_cloud_trail | 30346e5c11acf9dc5d133635b8025fe659e9f957 | [
"MIT"
] | null | null | null | test/remove_tags_test.exs | fmcgeough/ex_aws_cloud_trail | 30346e5c11acf9dc5d133635b8025fe659e9f957 | [
"MIT"
] | null | null | null | test/remove_tags_test.exs | fmcgeough/ex_aws_cloud_trail | 30346e5c11acf9dc5d133635b8025fe659e9f957 | [
"MIT"
] | null | null | null | defmodule RemoveTagsTest do
use ExUnit.Case
test "remove tags from a trail" do
arn = "arn:aws:cloudtrail:us-east-2:123456789012:trail/MyTrail"
tags = [%{key: "Key", value: "Value"}, %{key: "Key2", value: "Value2"}]
op = ExAws.CloudTrail.remove_tags(arn, tags)
assert op.headers == [
{"x-amz-target", "CloudTrail_20131101.RemoveTags"},
{"content-type", "application/x-amz-json-1.1"}
]
assert op.data == %{
"ResourceId" => "arn:aws:cloudtrail:us-east-2:123456789012:trail/MyTrail",
"TagsList" => [
%{"Key" => "Key", "Value" => "Value"},
%{"Key" => "Key2", "Value" => "Value2"}
]
}
end
end
| 31.869565 | 87 | 0.53206 |
084dbfb6417b8b55e0ff00860a02dfdc900bd612 | 790 | ex | Elixir | web/router.ex | kexoth/nlb-pipeline | 77d2c79b58e03f0326608162e9cee768362e2076 | [
"MIT"
] | 6 | 2017-06-13T19:35:05.000Z | 2020-05-05T06:50:34.000Z | web/router.ex | kexoth/nlb-pipeline | 77d2c79b58e03f0326608162e9cee768362e2076 | [
"MIT"
] | null | null | null | web/router.ex | kexoth/nlb-pipeline | 77d2c79b58e03f0326608162e9cee768362e2076 | [
"MIT"
] | 1 | 2021-09-27T11:58:11.000Z | 2021-09-27T11:58:11.000Z | defmodule NlbPipeline.Router do
use NlbPipeline.Web, :router
pipeline :browser do
plug :accepts, ["html"]
plug :fetch_session
plug :fetch_flash
plug :protect_from_forgery
plug :put_secure_browser_headers
end
pipeline :api do
plug :accepts, ["json"]
end
scope "/", NlbPipeline do
pipe_through :browser # Use the default browser stack
post "/", PageController, :index
get "/", PageController, :index
get "/logout", PageController, :logout
get "/support/:support_id", PageController, :support
get "/risk_management/:guardian_id", PageController, :risk_management
get "/dashboard", PageController, :dashboard
resources "/events", EventController
end
# Other scopes may use custom stacks.
# scope "/api", NlbPipeline do
# pipe_through :api
# end
end
| 23.939394 | 71 | 0.73038 |
084de6bb5f5264599fbfef2a3087a3761042af8b | 610 | ex | Elixir | lib/graph.ex | enter-haken/brain | 828a86a383595791229fbde1564a170324f6ff8f | [
"MIT"
] | 5 | 2021-04-14T08:52:45.000Z | 2021-12-26T13:55:50.000Z | lib/graph.ex | enter-haken/brain | 828a86a383595791229fbde1564a170324f6ff8f | [
"MIT"
] | null | null | null | lib/graph.ex | enter-haken/brain | 828a86a383595791229fbde1564a170324f6ff8f | [
"MIT"
] | null | null | null | defmodule Brain.Graph do
alias Brain.{Memory, Link}
def get(memories, links) do
dot_memories =
memories
|> Enum.map(fn %Memory{dot_node: dot} ->
dot
end)
|> Enum.join("\n")
dot_links =
links
|> Enum.map(fn %Link{dot: dot} ->
dot
end)
|> Enum.join("\n")
~s(
graph {
node [fontname="helvetica" shape=none];
graph [fontname="helvetica"];
edge [fontname="helvetica"];
splines=curved;
style=filled;
K=1.5;
#{dot_links}
#{dot_memories}
}
)
end
end
| 16.944444 | 47 | 0.498361 |
084e1e5ebd5a831dfb3b4d101c8cf7a5cdacdce7 | 2,991 | ex | Elixir | clients/service_usage/lib/google_api/service_usage/v1/model/usage_rule.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/service_usage/lib/google_api/service_usage/v1/model/usage_rule.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/service_usage/lib/google_api/service_usage/v1/model/usage_rule.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.ServiceUsage.V1.Model.UsageRule do
@moduledoc """
Usage configuration rules for the service.
NOTE: Under development.
Use this rule to configure unregistered calls for the service. Unregistered
calls are calls that do not contain consumer project identity.
(Example: calls that do not contain an API key).
By default, API methods do not allow unregistered calls, and each method call
must be identified by a consumer project identity. Use this rule to
allow/disallow unregistered calls.
Example of an API that wants to allow unregistered calls for entire service.
usage:
rules:
- selector: "*"
allow_unregistered_calls: true
Example of a method that wants to allow unregistered calls.
usage:
rules:
- selector: "google.example.library.v1.LibraryService.CreateBook"
allow_unregistered_calls: true
## Attributes
* `allowUnregisteredCalls` (*type:* `boolean()`, *default:* `nil`) - If true, the selected method allows unregistered calls, e.g. calls
that don't identify any user or application.
* `selector` (*type:* `String.t`, *default:* `nil`) - Selects the methods to which this rule applies. Use '*' to indicate all
methods in all APIs.
Refer to selector for syntax details.
* `skipServiceControl` (*type:* `boolean()`, *default:* `nil`) - If true, the selected method should skip service control and the control
plane features, such as quota and billing, will not be available.
This flag is used by Google Cloud Endpoints to bypass checks for internal
methods, such as service health check methods.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:allowUnregisteredCalls => boolean(),
:selector => String.t(),
:skipServiceControl => boolean()
}
field(:allowUnregisteredCalls)
field(:selector)
field(:skipServiceControl)
end
defimpl Poison.Decoder, for: GoogleApi.ServiceUsage.V1.Model.UsageRule do
def decode(value, options) do
GoogleApi.ServiceUsage.V1.Model.UsageRule.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.ServiceUsage.V1.Model.UsageRule do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 35.607143 | 141 | 0.721163 |
084e4559f423725233224cad675c458a25178da6 | 267 | exs | Elixir | config/test.exs | PavelTyk/bepaid_ex | f734ce9f0ba5473bfdd3b793e34a8ce21975fb0b | [
"MIT"
] | 1 | 2020-12-17T03:51:27.000Z | 2020-12-17T03:51:27.000Z | config/test.exs | PavelTyk/bepaid_ex | f734ce9f0ba5473bfdd3b793e34a8ce21975fb0b | [
"MIT"
] | null | null | null | config/test.exs | PavelTyk/bepaid_ex | f734ce9f0ba5473bfdd3b793e34a8ce21975fb0b | [
"MIT"
] | 1 | 2019-05-03T13:59:08.000Z | 2019-05-03T13:59:08.000Z | use Mix.Config
config :bepaid_ex,
shop_id: "1",
key_secret: "BEPAID_KEY_SECRET"
config :exvcr,
vcr_cassette_library_dir: "test/fixture/vcr_cassettes",
custom_cassette_library_dir: "test/fixture/custom_cassettes",
filter_request_headers: ["Authorization"]
| 24.272727 | 63 | 0.786517 |
084e5db8bd9e5e7b403f97402c66bf161b3d4801 | 291 | ex | Elixir | web/channels/result_channel.ex | Namuraid/backend | 04a10248bfeb156eb291207931621b40585d8f7e | [
"MIT"
] | null | null | null | web/channels/result_channel.ex | Namuraid/backend | 04a10248bfeb156eb291207931621b40585d8f7e | [
"MIT"
] | null | null | null | web/channels/result_channel.ex | Namuraid/backend | 04a10248bfeb156eb291207931621b40585d8f7e | [
"MIT"
] | null | null | null | defmodule Namuraid.ResultChannel do
use Phoenix.Channel
def join("result", _params, socket) do
{:ok, %{
"result" => Namuraid.State.get(:resultcsv, [])
}, socket}
end
def update(csv) do
Namuraid.Endpoint.broadcast("result", "result", %{"result" => csv})
end
end
| 20.785714 | 71 | 0.635739 |
084e9841872a804922e21e343ac7c81253fd083f | 542 | exs | Elixir | server/cardinal/priv/repo/migrations/20210317195951_create_anime_table.exs | llucasreis/cardinal | 714d89d37ef0fa305d78622ff7228864bf382035 | [
"MIT"
] | null | null | null | server/cardinal/priv/repo/migrations/20210317195951_create_anime_table.exs | llucasreis/cardinal | 714d89d37ef0fa305d78622ff7228864bf382035 | [
"MIT"
] | null | null | null | server/cardinal/priv/repo/migrations/20210317195951_create_anime_table.exs | llucasreis/cardinal | 714d89d37ef0fa305d78622ff7228864bf382035 | [
"MIT"
] | null | null | null | defmodule Cardinal.Repo.Migrations.CreateAnimeTable do
use Ecto.Migration
def change do
create table(:animes, primary_key: false) do
add :id, :uuid, primary_key: true
add :kitsu_id, :string
add :title, :string
add :slug, :string
add :description, :text
add :episodes, :integer
add :genres, {:array, :string}
add :image_url, :string
add :status, :string
timestamps()
end
create unique_index(:animes, [:kitsu_id])
create unique_index(:animes, [:slug])
end
end
| 24.636364 | 54 | 0.640221 |
084ec220cf9092c5f2e3caa01d2cba0c8a2c816c | 875 | exs | Elixir | apps/core/test/pubsub/cache/repositories_test.exs | michaeljguarino/forge | 50ee583ecb4aad5dee4ef08fce29a8eaed1a0824 | [
"Apache-2.0"
] | 59 | 2021-09-16T19:29:39.000Z | 2022-03-31T20:44:24.000Z | apps/core/test/pubsub/cache/repositories_test.exs | svilenkov/plural | ac6c6cc15ac4b66a3b5e32ed4a7bee4d46d1f026 | [
"Apache-2.0"
] | 111 | 2021-08-15T09:56:37.000Z | 2022-03-31T23:59:32.000Z | apps/core/test/pubsub/cache/repositories_test.exs | svilenkov/plural | ac6c6cc15ac4b66a3b5e32ed4a7bee4d46d1f026 | [
"Apache-2.0"
] | 4 | 2021-12-13T09:43:01.000Z | 2022-03-29T18:08:44.000Z | defmodule Core.PubSub.Consumers.Cache.RepositoriesTest do
use Core.SchemaCase, async: true
alias Core.PubSub
alias Core.PubSub.Consumers.Cache
describe "InstallationCreated" do
test "it will wipe a has_installations record" do
%{user: user} = inst = insert(:installation)
Core.Cache.put({:has_installations, user.id}, true)
event = %PubSub.InstallationCreated{item: inst}
Cache.handle_event(event)
refute Core.Cache.get({:has_installations, user.id})
end
end
describe "InstallationDeleted" do
test "it will wipe a has_installations record" do
%{user: user} = inst = insert(:installation)
Core.Cache.put({:has_installations, user.id}, true)
event = %PubSub.InstallationDeleted{item: inst}
Cache.handle_event(event)
refute Core.Cache.get({:has_installations, user.id})
end
end
end
| 29.166667 | 58 | 0.701714 |
084ecdeca44d4b1a0a9191afadeb9517223d2647 | 31 | ex | Elixir | lib/history/job.ex | fremantle-industries/history | a8a33744279ff4ca62620785f9a2e9c0c99e4de7 | [
"MIT"
] | 20 | 2021-08-06T01:09:48.000Z | 2022-03-28T18:44:56.000Z | lib/history/job.ex | fremantle-industries/history | a8a33744279ff4ca62620785f9a2e9c0c99e4de7 | [
"MIT"
] | 13 | 2021-08-21T21:17:02.000Z | 2022-03-27T06:33:51.000Z | lib/history/job.ex | fremantle-industries/history | a8a33744279ff4ca62620785f9a2e9c0c99e4de7 | [
"MIT"
] | 2 | 2021-09-23T11:31:59.000Z | 2022-01-09T16:19:35.000Z | defprotocol History.Job do
end
| 10.333333 | 26 | 0.83871 |
084efb1fb868e0c0e68f826bf231e38cdd2fbf93 | 27 | ex | Elixir | machine_learning_toolkit/apps/utilities/lib/utilities.ex | gguimond/elixir | 415a7ed10fb44d84089ff89fb651b765b5f5e53f | [
"MIT"
] | 1 | 2019-03-28T09:08:16.000Z | 2019-03-28T09:08:16.000Z | machine_learning_toolkit/apps/utilities/lib/utilities.ex | gguimond/elixir | 415a7ed10fb44d84089ff89fb651b765b5f5e53f | [
"MIT"
] | null | null | null | machine_learning_toolkit/apps/utilities/lib/utilities.ex | gguimond/elixir | 415a7ed10fb44d84089ff89fb651b765b5f5e53f | [
"MIT"
] | null | null | null | defmodule Utilities do
end
| 9 | 22 | 0.851852 |
084f1e61106a526fc0c6ca802292e3ee00ade7b8 | 941 | ex | Elixir | test/support/channel_case.ex | richeterre/jumubase-phoenix | 7584f890af117d496971b5284bf9de798e22266f | [
"MIT"
] | 2 | 2019-01-20T07:03:30.000Z | 2019-04-11T10:20:14.000Z | test/support/channel_case.ex | richeterre/jumubase-phoenix | 7584f890af117d496971b5284bf9de798e22266f | [
"MIT"
] | 6 | 2018-09-20T05:52:14.000Z | 2019-04-23T19:27:39.000Z | test/support/channel_case.ex | richeterre/jumubase-phoenix | 7584f890af117d496971b5284bf9de798e22266f | [
"MIT"
] | null | null | null | defmodule JumubaseWeb.ChannelCase do
@moduledoc """
This module defines the test case to be used by
channel tests.
Such tests rely on `Phoenix.ChannelTest` and also
import other functionality to make it easier
to build common datastructures and query the data layer.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with channels
use Phoenix.ChannelTest
# The default endpoint for testing
@endpoint JumubaseWeb.Endpoint
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(Jumubase.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(Jumubase.Repo, {:shared, self()})
end
:ok
end
end
| 24.763158 | 70 | 0.717322 |
084f506365e8948b8fca5f08157dd51ac5da639d | 681 | ex | Elixir | lib/elixero/core_api/models/prepayments.ex | philals/elixero | fd75fe4a6f0a93b1d2ff94adbb307d20f014d458 | [
"MIT"
] | 84 | 2016-11-09T01:15:17.000Z | 2022-01-06T02:55:35.000Z | lib/elixero/core_api/models/prepayments.ex | philals/elixero | fd75fe4a6f0a93b1d2ff94adbb307d20f014d458 | [
"MIT"
] | 14 | 2017-03-10T04:16:07.000Z | 2021-11-10T16:39:19.000Z | lib/elixero/core_api/models/prepayments.ex | philals/elixero | fd75fe4a6f0a93b1d2ff94adbb307d20f014d458 | [
"MIT"
] | 18 | 2017-03-11T21:12:15.000Z | 2022-02-22T20:07:10.000Z | defmodule EliXero.CoreApi.Models.Prepayments do
use Ecto.Schema
import Ecto.Changeset
@derive {Poison.Encoder, except: [:__meta__, :id]}
schema "prepayments" do
embeds_many :Prepayments, EliXero.CoreApi.Models.Prepayments.Prepayment
end
def from_map(data) do
%__MODULE__{}
|> cast(data, [])
|> cast_embed(:Prepayments)
|> apply_changes
end
def from_validation_exception(data) do
remapped_data = %{:Prepayments => data."Elements"}
%__MODULE__{}
|> cast(remapped_data, [])
|> cast_embed(:Prepayments)
|> apply_changes
end
end | 26.192308 | 80 | 0.591777 |
084f53915b115cf577d083f35909d36c0bf22b07 | 766 | ex | Elixir | lib/lightbridge/mqtt_supervisor.ex | jamesduncombe/lightbridge | c6b5fd54f5495ae12fefc0174ca95ebe2f69a1ce | [
"MIT"
] | null | null | null | lib/lightbridge/mqtt_supervisor.ex | jamesduncombe/lightbridge | c6b5fd54f5495ae12fefc0174ca95ebe2f69a1ce | [
"MIT"
] | null | null | null | lib/lightbridge/mqtt_supervisor.ex | jamesduncombe/lightbridge | c6b5fd54f5495ae12fefc0174ca95ebe2f69a1ce | [
"MIT"
] | null | null | null | defmodule Lightbridge.MqttSupervisor do
@moduledoc """
Handles connection to MQTT broker.
"""
use Supervisor
alias Lightbridge.MqttHandler
import Lightbridge.Config, only: [fetch: 1]
def start_link(opts) do
Supervisor.start_link(__MODULE__, opts, name: __MODULE__)
end
def init(_args) do
children = [
{Tortoise.Connection,
[
client_id: fetch(:mqtt_client_id),
user_name: fetch(:mqtt_username),
password: fetch(:mqtt_password),
server: {Tortoise.Transport.Tcp, host: fetch(:mqtt_host), port: fetch(:mqtt_port)},
handler: {MqttHandler, []},
subscriptions: [{fetch(:mqtt_topic), _qos = 0}]
]}
]
Supervisor.init(children, strategy: :one_for_one)
end
end
| 23.9375 | 92 | 0.650131 |
084f8fbcc72f8d36895dc22e741c0f86e213162d | 6,692 | ex | Elixir | lib/core.ex | andersonmcook/prom_ex | 4913b15be186db29ee9fe800bf6baf6807e1902d | [
"MIT"
] | null | null | null | lib/core.ex | andersonmcook/prom_ex | 4913b15be186db29ee9fe800bf6baf6807e1902d | [
"MIT"
] | null | null | null | lib/core.ex | andersonmcook/prom_ex | 4913b15be186db29ee9fe800bf6baf6807e1902d | [
"MIT"
] | null | null | null | defmodule PromEx.TelemetryMetricsPrometheus.Core do
@moduledoc """
Prometheus Reporter for [`Telemetry.Metrics`](https://github.com/beam-telemetry/telemetry_metrics) definitions.
Provide a list of metric definitions to the `child_spec/1` function. It's recommended to
add this to your supervision tree.
def start(_type, _args) do
# List all child processes to be supervised
children = [
{PromEx.TelemetryMetricsPrometheus.Core, [
metrics: [
counter("http.request.count"),
sum("http.request.payload_size", unit: :byte),
last_value("vm.memory.total", unit: :byte)
]
]}
]
opts = [strategy: :one_for_one, name: ExampleApp.Supervisor]
Supervisor.start_link(children, opts)
end
Note that aggregations for distributions (histogram) only occur at scrape time.
These aggregations only have to process events that have occurred since the last
scrape, so it's recommended at this time to keep an eye on scrape durations if
you're reporting a large number of distributions or you have a high tag cardinality.
## Telemetry.Metrics to Prometheus Equivalents
Metric types:
* Counter - Counter
* Distribution - Histogram
* LastValue - Gauge
* Sum - Counter
* Summary - Summary (Not supported)
### Units
Prometheus recommends the usage of base units for compatibility - [Base Units](https://prometheus.io/docs/practices/naming/#base-units).
This is simple to do with `:telemetry` and `Telemetry.Metrics` as all memory
related measurements in the BEAM are reported in bytes and Metrics provides
automatic time unit conversions.
Note that measurement unit should used as part of the reported name in the case of
histograms and gauges to Prometheus. As such, it is important to explicitly define
the unit of measure for these types when the unit is time or memory related.
It is suggested to not mix units, e.g. seconds with milliseconds.
It is required to define your buckets according to the end unit translation
since this measurements are converted at the time of handling the event, prior
to bucketing.
#### Memory
Report memory as `:byte`.
#### Time
Report durations as `:second`. The BEAM and `:telemetry` events use `:native` time
units. Converting to seconds is as simple as adding the conversion tuple for
the unit - `{:native, :second}`
### Naming
`Telemetry.Metrics` definition names do not translate easily to Prometheus naming
conventions. By default, the name provided when creating your definition uses parts
of the provided name to determine what event to listen to and which event measurement
to use.
For example, `"http.request.duration"` results in listening for `[:http, :request]`
events and use `:duration` from the event measurements. Prometheus would recommend
a name of `http_request_duration_seconds` as a good name.
It is therefore recommended to use the name in your definition to reflect the name
you wish to see reported, e.g. `http.request.duration.seconds` or `[:http, :request, :duration, :seconds]` and use the `:event_name` override and `:measurement` options in your definition.
Example:
Metrics.distribution(
"http.request.duration.seconds",
event_name: [:http, :request, :complete],
measurement: :duration,
unit: {:native, :second},
reporter_options: [
buckets: [0.01, 0.025, 0.05, 0.1, 0.2, 0.5, 1]
]
)
The exporter sanitizes names to Prometheus' requirements ([Metric Naming](https://prometheus.io/docs/instrumenting/writing_exporters/#naming)) and joins the event name parts with an underscore.
### Labels
Labels in Prometheus are referred to as `:tags` in `Telemetry.Metrics` - see the docs
for more information on tag usage.
**Important: Each tag + value results in a separate time series. For distributions, this
is further complicated as a time series is created for each bucket plus one for measurements
exceeding the limit of the last bucket - `+Inf`.**
It is recommended, but not required, to abide by Prometheus' best practices regarding labels -
[Label Best Practices](https://prometheus.io/docs/practices/naming/#labels)
### Missing or Invalid Measurements and Tags
If a measurement value is missing or non-numeric, the error is logged at the `debug` level
and the event is not recorded. Events with missing tags are also logged and skipped.
"""
alias Telemetry.Metrics
alias PromEx.TelemetryMetricsPrometheus.Core.{Aggregator, Exporter, Registry}
require Logger
@type metrics :: [Metrics.t()]
@type prometheus_option ::
{:metrics, metrics()}
| {:name, atom()}
@type prometheus_options :: [prometheus_option()]
@doc """
Reporter's child spec.
This function allows you to start the reporter under a supervisor like this:
children = [
{PromEx.TelemetryMetricsPrometheus.Core, options}
]
See `start_child/1` for options.
"""
@spec child_spec(prometheus_options()) :: Supervisor.child_spec()
def child_spec(options) do
opts = ensure_options(options)
id =
case Keyword.get(opts, :name, :prometheus_metrics) do
name when is_atom(name) -> name
{:global, name} -> name
{:via, _, name} -> name
end
spec = %{
id: id,
start: {Registry, :start_link, [opts]}
}
Supervisor.child_spec(spec, [])
end
@doc """
Start the `PromEx.TelemetryMetricsPrometheus.Core.Supervisor`
Available options:
* `:name` - name of the reporter instance. Defaults to `:prometheus_metrics`
* `:metrics` - a list of metrics to track.
"""
@spec start_link(prometheus_options()) :: GenServer.on_start()
def start_link(options) do
opts = ensure_options(options)
Registry.start_link(opts)
end
@doc """
Returns a metrics scrape in Prometheus exposition format for the given reporter
name - defaults to `:prometheus_metrics`.
"""
@spec scrape(name :: atom()) :: String.t()
def scrape(name \\ :prometheus_metrics) do
config = Registry.config(name)
metrics = Registry.metrics(name)
:ok = Aggregator.aggregate(metrics, config.aggregates_table_id, config.dist_table_id)
Aggregator.get_time_series(config.aggregates_table_id)
|> Exporter.export(metrics)
end
@spec ensure_options(prometheus_options()) :: prometheus_options()
defp ensure_options(options) do
Keyword.merge(default_options(), options)
end
@spec default_options() :: prometheus_options()
defp default_options() do
[
name: :prometheus_metrics
]
end
end
| 34.142857 | 195 | 0.704423 |
084f9a9b53b8b9200307963e710f162c8a1449f6 | 1,016 | exs | Elixir | test/web/controller/admin/dashboard_controller_test.exs | NatTuck/ex_venture | 7a74d33025a580f1e3e93d3755f22258eb3e9127 | [
"MIT"
] | null | null | null | test/web/controller/admin/dashboard_controller_test.exs | NatTuck/ex_venture | 7a74d33025a580f1e3e93d3755f22258eb3e9127 | [
"MIT"
] | null | null | null | test/web/controller/admin/dashboard_controller_test.exs | NatTuck/ex_venture | 7a74d33025a580f1e3e93d3755f22258eb3e9127 | [
"MIT"
] | null | null | null | defmodule Web.Admin.DashboardControllerTest do
use Web.ConnCase
test "hitting the dashboard redirects to session", %{conn: conn} do
conn = get conn, dashboard_path(conn, :index)
assert redirected_to(conn) == session_path(conn, :new)
end
test "user token and an admin allows in", %{conn: conn} do
user = create_user(%{name: "user", password: "password", flags: ["admin"]})
character = create_character(user, %{name: "user"})
user = %{user | characters: [character]}
conn = conn |> assign(:user, user)
conn = get conn, dashboard_path(conn, :index)
assert html_response(conn, 200)
end
test "user token and not an admin", %{conn: conn} do
user = create_user(%{name: "user", password: "password", flags: []})
character = create_character(user, %{name: "user"})
user = %{user | characters: [character]}
conn = conn |> assign(:user, user)
conn = get conn, dashboard_path(conn, :index)
assert redirected_to(conn) == session_path(conn, :new)
end
end
| 35.034483 | 79 | 0.662402 |
084f9db3a8465e650b9c00c4551bb20c7a7a8c47 | 1,627 | ex | Elixir | lib/accent/integrations/integration_manager.ex | isshindev/accent | ae4c13139b0a0dfd64ff536b94c940a4e2862150 | [
"BSD-3-Clause"
] | 806 | 2018-04-07T20:40:33.000Z | 2022-03-30T01:39:57.000Z | lib/accent/integrations/integration_manager.ex | isshindev/accent | ae4c13139b0a0dfd64ff536b94c940a4e2862150 | [
"BSD-3-Clause"
] | 194 | 2018-04-07T13:49:37.000Z | 2022-03-30T19:58:45.000Z | lib/accent/integrations/integration_manager.ex | doc-ai/accent | e337e16f3658cc0728364f952c0d9c13710ebb06 | [
"BSD-3-Clause"
] | 89 | 2018-04-09T13:55:49.000Z | 2022-03-24T07:09:31.000Z | defmodule Accent.IntegrationManager do
alias Accent.{Integration, Repo}
import Ecto.Changeset
@spec create(map()) :: {:ok, Integration.t()} | {:error, Ecto.Changeset.t()}
def create(params) do
%Integration{}
|> changeset(params)
|> foreign_key_constraint(:user_id)
|> Repo.insert()
end
@spec update(Integration.t(), map()) :: {:ok, Integration.t()} | {:error, Ecto.Changeset.t()}
def update(integration, params) do
integration
|> changeset(params)
|> Repo.update()
end
@spec delete(Integration.t()) :: {:ok, Integration.t()} | {:error, Ecto.Changeset.t()}
def delete(integration) do
integration
|> Repo.delete()
end
defp changeset(model, params) do
model
|> cast(params, [:project_id, :user_id, :service, :events])
|> validate_inclusion(:service, ~w(slack github discord))
|> cast_embed(:data, with: changeset_data(params[:service] || model.service))
|> foreign_key_constraint(:project_id)
|> validate_required([:service, :data])
end
defp changeset_data("slack") do
fn model, params ->
model
|> cast(params, [:url])
|> validate_required([:url])
end
end
defp changeset_data("discord") do
fn model, params ->
model
|> cast(params, [:url])
|> validate_required([:url])
end
end
defp changeset_data("github") do
fn model, params ->
model
|> cast(params, [:repository, :default_ref, :token])
|> validate_required([:repository, :default_ref, :token])
end
end
defp changeset_data(_) do
fn model, params -> cast(model, params, []) end
end
end
| 25.421875 | 95 | 0.629994 |
084fc4e321a5a905c55bff14081dc74567f5d580 | 393 | ex | Elixir | lib/digital_ocean/helpers/body.ex | kianmeng/digital-ocean-elixir | eff6fd1c621ab51908edad731794b0ef2db1cac1 | [
"MIT"
] | 5 | 2020-09-20T19:35:32.000Z | 2021-12-15T08:57:22.000Z | lib/digital_ocean/helpers/body.ex | kianmeng/digital-ocean-elixir | eff6fd1c621ab51908edad731794b0ef2db1cac1 | [
"MIT"
] | 8 | 2020-09-25T14:30:03.000Z | 2022-02-21T18:04:48.000Z | lib/digital_ocean/helpers/body.ex | kianmeng/digital-ocean-elixir | eff6fd1c621ab51908edad731794b0ef2db1cac1 | [
"MIT"
] | 4 | 2020-09-25T14:26:11.000Z | 2021-08-09T06:43:18.000Z | defmodule DigitalOcean.Helpers.Body do
@moduledoc false
alias DigitalOcean.{ Config, Operation }
@spec encode!(Operation.t(), Config.t()) :: String.t()
def encode!(%_{ method: method }, _config)
when method == :delete or method == :get do
""
end
def encode!(operation, config) do
operation.params
|> Enum.into(%{})
|> config.json_codec.encode!()
end
end
| 21.833333 | 56 | 0.641221 |
084fe0289ea78bb2a983da40d850ff34fa0d13c1 | 2,718 | exs | Elixir | config/config.exs | amaltson/tilex | 149193bf29c747a39bada8160a7dd9e831777d69 | [
"MIT"
] | null | null | null | config/config.exs | amaltson/tilex | 149193bf29c747a39bada8160a7dd9e831777d69 | [
"MIT"
] | null | null | null | config/config.exs | amaltson/tilex | 149193bf29c747a39bada8160a7dd9e831777d69 | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
#
# This configuration file is loaded before any dependency and
# is restricted to this project.
use Mix.Config
# General application configuration
config :tilex, ecto_repos: [Tilex.Repo]
# Configures the endpoint
config :tilex, TilexWeb.Endpoint,
url: [host: "localhost"],
secret_key_base: "mdTtrt4Y4JrtiTv63NepUe4fs1iSt23VfzKpnXm6mawKl6wN8jEfLfIf2HbyMeKe",
render_errors: [layout: :app, view: TilexWeb.ErrorView, accepts: ~w(html json)],
pubsub: [name: Tilex.PubSub, adapter: Phoenix.PubSub.PG2],
http: [protocol_options: [max_request_line_length: 8192, max_header_value_length: 8192]]
# Provide reasonable default for configuration options
config :tilex, :page_size, 5
config :tilex, :auth_controller, AuthController
config :tilex, :slack_notifier, Tilex.Notifications.Notifiers.Slack
config :tilex, :twitter_notifier, Tilex.Notifications.Notifiers.Twitter
config :tilex, :organization_name, System.get_env("ORGANIZATION_NAME")
config :tilex, :canonical_domain, System.get_env("CANONICAL_DOMAIN")
config :tilex, :default_twitter_handle, System.get_env("DEFAULT_TWITTER_HANDLE")
config :tilex, :cors_origin, System.get_env("CORS_ORIGIN")
config :tilex, :hosted_domain, System.get_env("HOSTED_DOMAIN")
config :tilex, :guest_author_whitelist, System.get_env("GUEST_AUTHOR_WHITELIST")
# Configures Elixir's Logger
config :logger, :console,
format: "$time $metadata[$level] $message\n",
metadata: [:request_id]
config :ueberauth, Ueberauth,
providers: [
google:
{Ueberauth.Strategy.Google,
[
approval_prompt: "force",
access_type: "offline",
default_scope: "email profile"
]}
]
config :ueberauth, Ueberauth.Strategy.Google.OAuth,
client_id: System.get_env("GOOGLE_CLIENT_ID"),
client_secret: System.get_env("GOOGLE_CLIENT_SECRET")
config :guardian, Guardian,
# optional
allowed_algos: ["HS512"],
# optional
verify_module: Guardian.JWT,
issuer: "MyApp",
ttl: {30, :days},
allowed_drift: 2000,
# optional
verify_issuer: true,
secret_key: %{
"k" => "_AbBL082GKlPjoY9o-KM78PhyALavJRtZXOW7D-ZyqE",
"kty" => "oct"
},
serializer: Tilex.GuardianSerializer
config :extwitter, :oauth,
consumer_key: System.get_env("twitter_consumer_key"),
consumer_secret: System.get_env("twitter_consumer_secret"),
access_token: System.get_env("twitter_access_token"),
access_token_secret: System.get_env("twitter_access_token_secret")
# Import environment specific config. This must remain at the bottom
# of this file so it overrides the configuration defined above.
import_config "#{Mix.env()}.exs"
| 35.763158 | 90 | 0.759382 |
08500813194f5d30158d9ac6c0350a1118b21b8e | 2,199 | ex | Elixir | clients/game_services/lib/google_api/game_services/v1/model/list_game_server_deployments_response.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/game_services/lib/google_api/game_services/v1/model/list_game_server_deployments_response.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/game_services/lib/google_api/game_services/v1/model/list_game_server_deployments_response.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.GameServices.V1.Model.ListGameServerDeploymentsResponse do
@moduledoc """
Response message for GameServerDeploymentsService.ListGameServerDeployments.
## Attributes
* `gameServerDeployments` (*type:* `list(GoogleApi.GameServices.V1.Model.GameServerDeployment.t)`, *default:* `nil`) - The list of game server deployments.
* `nextPageToken` (*type:* `String.t`, *default:* `nil`) - Token to retrieve the next page of results, or empty if there are no more results in the list.
* `unreachable` (*type:* `list(String.t)`, *default:* `nil`) - List of locations that could not be reached.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:gameServerDeployments =>
list(GoogleApi.GameServices.V1.Model.GameServerDeployment.t()) | nil,
:nextPageToken => String.t() | nil,
:unreachable => list(String.t()) | nil
}
field(:gameServerDeployments,
as: GoogleApi.GameServices.V1.Model.GameServerDeployment,
type: :list
)
field(:nextPageToken)
field(:unreachable, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.GameServices.V1.Model.ListGameServerDeploymentsResponse do
def decode(value, options) do
GoogleApi.GameServices.V1.Model.ListGameServerDeploymentsResponse.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.GameServices.V1.Model.ListGameServerDeploymentsResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 37.913793 | 159 | 0.737608 |
08505d48629bb7978acdbd5b6c943e0722a67421 | 1,014 | exs | Elixir | 01.exs | ivanrosolen/how-to-elixir | 46386a1316a39ac083cb4efa96eaf4641b2ac3ac | [
"MIT"
] | null | null | null | 01.exs | ivanrosolen/how-to-elixir | 46386a1316a39ac083cb4efa96eaf4641b2ac3ac | [
"MIT"
] | null | null | null | 01.exs | ivanrosolen/how-to-elixir | 46386a1316a39ac083cb4efa96eaf4641b2ac3ac | [
"MIT"
] | null | null | null | IO.puts("Ivan")
IO.puts "Rosolen"
IO.puts "Ivan" <> "Rosolen"
add = fn a, b -> a + b end
check = is_function(add)
IO.puts check
result = add.(2, 4)
IO.puts "Resultado #{result}"
checkAdd = is_function(add, 1)
IO.puts "Check add 1 params: #{checkAdd}"
double = fn a -> add.(a, a) end
checkDouble = is_function(double, 1)
IO.puts "Check double 1 params: #{checkDouble}"
result = double.(4)
IO.puts "Resultado #{result}"
# Like lists, tuples are also immutable. Every operation on a tuple returns a new tuple, it never changes the given one.
array = [1, 2, 3]
IO.inspect array, label: "Lista: "
newArrayMerge = [1, 2, 3] ++ [4, 5]
IO.inspect newArrayMerge, label: "Listas juntas: "
newArrayWithoutSomeVal = [1, 2, 2, 3, 3, 3] -- [2, 3]
IO.inspect newArrayWithoutSomeVal, label: "Lista sem alguns: "
tuple = {"ivan", "rosolen"}
IO.inspect tuple, label: "tuple: "
put_elem(tuple, 1, "mori")
IO.inspect tuple, label: "nova tuple: "
ivan = put_elem(tuple, 1, "mori")
IO.inspect ivan, label: "nova tuple certo: "
| 27.405405 | 121 | 0.674556 |
0850892b907c63c84e7863e0b699adb5934c0ba1 | 2,893 | exs | Elixir | 2021/day16.exs | princemaple/aoc | 325dd12c8b5b827458214846f184e07a6cfbdf34 | [
"MIT"
] | 1 | 2021-12-13T00:31:09.000Z | 2021-12-13T00:31:09.000Z | 2021/day16.exs | princemaple/aoc | 325dd12c8b5b827458214846f184e07a6cfbdf34 | [
"MIT"
] | null | null | null | 2021/day16.exs | princemaple/aoc | 325dd12c8b5b827458214846f184e07a6cfbdf34 | [
"MIT"
] | null | null | null | # Title: Day16
# ── Untitled ──
data =
"input"
|> IO.getn(1_000_000)
|> String.trim()
|> String.split("", trim: true)
|> Enum.flat_map(
&(&1
|> String.to_integer(16)
|> Integer.digits(2)
|> then(fn l -> [0, 0, 0] ++ l end)
|> Enum.take(-4))
)
defmodule D16 do
def parse(data) do
{data, %{}}
|> parse_meta(:version)
|> parse_meta(:type)
|> parse_by_type()
end
def parse_meta({[a, b, c | rest], meta}, meta_key) do
{rest, Map.put(meta, meta_key, Integer.undigits([a, b, c], 2))}
end
def parse_by_type({data, %{type: 4} = meta}) do
{literal, rest} = parse_literal(data)
{{Integer.undigits(literal, 2), rest}, meta}
end
def parse_by_type({data, meta}) do
{parse_operator(data), meta}
end
def parse_literal([1, a, b, c, d | rest]) do
{data, rest} = parse_literal(rest)
{[a, b, c, d] ++ data, rest}
end
def parse_literal([0, a, b, c, d | rest]) do
{[a, b, c, d], rest}
end
def parse_operator([0 | rest]) do
{length, rest} = Enum.split(rest, 15)
{payload, rest} = Enum.split(rest, Integer.undigits(length, 2))
{parse_sub(payload), rest}
end
def parse_operator([1 | rest]) do
{length, payload} = Enum.split(rest, 11)
{data, [rest]} = Enum.split(parse_sub(payload, Integer.undigits(length, 2)), -1)
{data, rest}
end
def parse_sub([]), do: []
def parse_sub(data) do
{{data, rest}, meta} = parse(data)
[{{data, []}, meta} | parse_sub(rest)]
end
def parse_sub(rest, 0), do: [rest]
def parse_sub(data, n) do
{{data, rest}, meta} = parse(data)
[{{data, []}, meta} | parse_sub(rest, n - 1)]
end
def solve1({{data, _rest}, meta}) when is_list(data) do
meta.version + (data |> Enum.map(&solve1/1) |> Enum.sum())
end
def solve1({{_data, _rest}, meta}) do
meta.version
end
def solve2({{data, _}, %{type: 0}}) do
data |> Enum.map(&solve2/1) |> Enum.sum()
end
def solve2({{data, _}, %{type: 1}}) do
data |> Enum.map(&solve2/1) |> Enum.product()
end
def solve2({{data, _}, %{type: 2}}) do
data |> Enum.map(&solve2/1) |> Enum.min()
end
def solve2({{data, _}, %{type: 3}}) do
data |> Enum.map(&solve2/1) |> Enum.max()
end
def solve2({{data, _}, %{type: 4}}) do
data
end
def solve2({{data, _}, %{type: 5}}) do
data
|> Enum.map(&solve2/1)
|> then(fn [a, b] -> (a > b && 1) || 0 end)
end
def solve2({{data, _}, %{type: 6}}) do
data
|> Enum.map(&solve2/1)
|> then(fn [a, b] -> (a < b && 1) || 0 end)
end
def solve2({{data, _}, %{type: 7}}) do
data
|> Enum.map(&solve2/1)
|> then(fn [a, b] -> (a == b && 1) || 0 end)
end
end
data
|> D16.parse()
|> D16.solve1()
data
|> D16.parse()
|> D16.solve2()
| 22.601563 | 85 | 0.521604 |
0850a2f8a9cd747db55a545e89fb50081c20bcbb | 9,045 | ex | Elixir | lib/extracker.ex | Cantido/ex_tracker | 32f5785a39b5da209e2a4564f6e004479283fc1e | [
"MIT"
] | 1 | 2021-11-23T04:29:37.000Z | 2021-11-23T04:29:37.000Z | lib/extracker.ex | Cantido/ex_tracker | 32f5785a39b5da209e2a4564f6e004479283fc1e | [
"MIT"
] | 6 | 2020-11-10T03:44:44.000Z | 2022-02-28T11:05:39.000Z | lib/extracker.ex | Cantido/ex_tracker | 32f5785a39b5da209e2a4564f6e004479283fc1e | [
"MIT"
] | 1 | 2020-01-08T16:23:42.000Z | 2020-01-08T16:23:42.000Z | # SPDX-FileCopyrightText: 2021 Rosa Richter
#
# SPDX-License-Identifier: MIT
defmodule Extracker do
@moduledoc """
A fast & scaleable BitTorrent tracker.
"""
@doc """
Set the duration that an announced peer will be kept in the system.
"""
def set_interval(interval) do
Redix.command!(:redix, ["SET", "interval", interval])
:ok
end
@doc """
Announce a peer to the tracker.
"""
def announce(info_hash, peer_id, address, stats, opts \\ [])
def announce(
hash,
id,
{{a, b, c, d}, port},
{ul, dl, left},
opts
) do
validate_info_hash!(hash)
validate_peer_id!(id)
validate_ip_address!({{a, b, c, d}, port})
validate_byte_count!(ul)
validate_byte_count!(dl)
validate_byte_count!(left)
event = Keyword.get(opts, :event, :interval)
numwant = Keyword.get(opts, :numwant, 50)
peer_id = Base.encode16(id, case: :lower)
info_hash = Base.encode16(hash, case: :lower)
now_iso8601 = DateTime.utc_now() |> DateTime.to_iso8601()
config_queries = [
["GET", "interval"]
]
peer_data_queries = [
["SADD", "torrents", info_hash],
["SET", "peer:#{peer_id}:address", "#{:inet.ntoa({a, b, c, d})}:#{port}"],
["SET", "peer:#{peer_id}:last_contacted", now_iso8601]
]
peer_state_queries =
case event do
:interval ->
[]
:completed ->
[
["INCR", "torrent:#{info_hash}:downloaded"],
["SADD", "torrent:#{info_hash}:complete-peers", peer_id],
["SREM", "torrent:#{info_hash}:incomplete-peers", peer_id],
[
"SUNIONSTORE",
"torrent:#{info_hash}:peers",
"torrent:#{info_hash}:incomplete-peers",
"torrent:#{info_hash}:complete-peers"
]
]
:started ->
[
["SADD", "torrent:#{info_hash}:incomplete-peers", peer_id],
["SREM", "torrent:#{info_hash}:complete-peers", peer_id],
[
"SUNIONSTORE",
"torrent:#{info_hash}:peers",
"torrent:#{info_hash}:incomplete-peers",
"torrent:#{info_hash}:complete-peers"
]
]
:stopped ->
[
["SREM", "torrent:#{info_hash}:complete-peers", peer_id],
["SREM", "torrent:#{info_hash}:incomplete-peers", peer_id],
[
"SUNIONSTORE",
"torrent:#{info_hash}:peers",
"torrent:#{info_hash}:incomplete-peers",
"torrent:#{info_hash}:complete-peers"
]
]
end
peer_list_queries = [
["SCARD", "torrent:#{info_hash}:complete-peers"],
["SCARD", "torrent:#{info_hash}:incomplete-peers"],
["SRANDMEMBER", "torrent:#{info_hash}:peers", numwant]
]
redis_results =
Redix.pipeline!(
:redix,
config_queries ++ peer_data_queries ++ peer_state_queries ++ peer_list_queries
)
ids = List.last(redis_results)
address_requests =
Enum.map(ids, fn id_i ->
["GET", "peer:#{id_i}:address"]
end)
addresses =
if Enum.empty?(address_requests) do
[]
else
Redix.pipeline!(:redix, address_requests)
end
peers =
Enum.zip(ids, addresses)
|> Enum.map(fn {id, address} ->
[host_str, port_str] = String.split(address, ":", limit: 2)
{:ok, ip} = :inet.parse_address(String.to_charlist(host_str))
port = String.to_integer(port_str)
%{
peer_id: Base.decode16!(id, case: :lower),
ip: ip,
port: port
}
end)
interval = List.first(redis_results) |> String.to_integer()
complete_count = Enum.at(redis_results, -3)
incomplete_count = Enum.at(redis_results, -2)
{:ok,
%{complete: complete_count, incomplete: incomplete_count, interval: interval, peers: peers}}
end
def announce(_, _, _, _, _) do
{:error, "invalid request"}
end
defp validate_info_hash!(info_hash) do
unless is_binary(info_hash) and byte_size(info_hash) == 20 do
raise "invalid info hash"
end
end
defp validate_peer_id!(peer_id) do
unless is_binary(peer_id) and byte_size(peer_id) == 20 do
raise "invalid peer ID"
end
end
defp validate_ip_address!({{a, b, c, d}, port}) do
unless a in 0..255 and b in 0..255 and c in 0..255 and d in 0..255 and port in 0..65_535 do
raise "invalid IP address"
end
end
defp validate_byte_count!(count) do
unless is_number(count) and count >= 0 do
raise "invalid byte count"
end
end
@doc """
Get complete, incomplete, and all-time-downloaded counts for a torrent.
"""
def scrape(info_hash) do
validate_info_hash!(info_hash)
info_hash = Base.encode16(info_hash, case: :lower)
results =
Redix.pipeline!(:redix, [
["SCARD", "torrent:#{info_hash}:complete-peers"],
["SCARD", "torrent:#{info_hash}:incomplete-peers"],
["GET", "torrent:#{info_hash}:downloaded"]
])
downloaded =
if dl = Enum.at(results, 2) do
String.to_integer(dl)
else
0
end
{:ok,
%{
complete: Enum.at(results, 0),
incomplete: Enum.at(results, 1),
downloaded: downloaded
}}
end
@doc """
Delete all information relevant to a torrent.
"""
def drop(info_hash) do
validate_info_hash!(info_hash)
info_hash = Base.encode16(info_hash, case: :lower)
delete_commands =
Redix.pipeline!(:redix, [
["SMEMBERS", "torrent:#{info_hash}:peers"],
["SREM", "torrents", info_hash],
["DEL", "torrent:#{info_hash}:downloaded"],
["DEL", "torrent:#{info_hash}:complete-peers"],
["DEL", "torrent:#{info_hash}:incomplete-peers"],
["DEL", "torrent:#{info_hash}:peers"]
])
|> List.first()
|> Enum.flat_map(fn peer_id ->
[
["DEL", "peer:#{peer_id}:address"],
["DEL", "peer:#{peer_id}:last_contacted"]
]
end)
if Enum.any?(delete_commands) do
Redix.pipeline!(:redix, delete_commands)
end
:ok
end
@doc """
Remove all expired peers from the server.
"""
def clean(ttl) do
info_hashes = Redix.command!(:redix, ["SMEMBERS", "torrents"])
peer_ids_for_hash =
if Enum.any?(info_hashes) do
Redix.pipeline!(:redix, Enum.map(info_hashes, &["SMEMBERS", "torrent:#{&1}:peers"]))
else
[]
end
peer_ids = Enum.concat(peer_ids_for_hash)
peer_last_contacted_dates =
if Enum.any?(peer_ids) do
Redix.pipeline!(:redix, Enum.map(peer_ids, &["GET", "peer:#{&1}:last_contacted"]))
else
[]
end
|> Enum.map(fn last_contacted ->
if last_contacted do
{:ok, timestamp, _offset} = DateTime.from_iso8601(last_contacted)
timestamp
else
nil
end
end)
timestamps_for_peers = Enum.zip(peer_ids, peer_last_contacted_dates) |> Map.new()
report =
Enum.zip(info_hashes, peer_ids_for_hash)
|> Map.new(fn {info_hash, peer_ids} ->
peer_timestamps = Map.take(timestamps_for_peers, peer_ids)
{info_hash, peer_timestamps}
end)
now = DateTime.utc_now()
expired_peers_by_hash =
Map.new(report, fn {info_hash, peer_timestamps} ->
expired_peers_for_hash =
Enum.filter(peer_timestamps, fn {_peer_id, timestamp} ->
is_nil(timestamp) or not active?(timestamp, now, ttl)
end)
|> Map.new()
|> Map.keys()
{info_hash, expired_peers_for_hash}
end)
|> Enum.filter(fn {_info_hash, expired_peers} ->
Enum.any?(expired_peers)
end)
drops =
Enum.flat_map(expired_peers_by_hash, fn {info_hash, peers} ->
set_drops = [
["SREM", "torrent:#{info_hash}:complete-peers"] ++ peers,
["SREM", "torrent:#{info_hash}:incomplete-peers"] ++ peers
]
peer_drops =
Enum.flat_map(peers, fn peer_id ->
[
["DEL", "peer:#{peer_id}:address"],
["DEL", "peer:#{peer_id}:last_contacted"]
]
end)
set_drops ++ peer_drops
end)
if Enum.any?(drops) do
Redix.pipeline!(:redix, drops)
end
:ok
end
defp active?(timestamp, now, ttl) do
expiration = DateTime.add(timestamp, ttl, :second)
DateTime.compare(now, expiration) in [:lt, :eq]
end
@doc """
Get the number of torrents the server knows about.
"""
def count_torrents do
Redix.command!(:redix, ["SCARD", "torrents"])
end
@doc """
Get the number of peers the server knows about.
"""
def count_peers do
count_commands =
Redix.command!(:redix, ["SMEMBERS", "torrents"])
|> Enum.map(&["SCARD", "torrent:#{&1}:peers"])
if Enum.any?(count_commands) do
Redix.pipeline!(:redix, count_commands)
|> Enum.sum()
else
0
end
end
end
| 26.066282 | 97 | 0.571697 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.