hexsha stringlengths 40 40 | size int64 2 991k | ext stringclasses 2 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 4 208 | max_stars_repo_name stringlengths 6 106 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses list | max_stars_count int64 1 33.5k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 208 | max_issues_repo_name stringlengths 6 106 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses list | max_issues_count int64 1 16.3k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 208 | max_forks_repo_name stringlengths 6 106 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses list | max_forks_count int64 1 6.91k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 2 991k | avg_line_length float64 1 36k | max_line_length int64 1 977k | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
081a82866383a0283257d3ba555b2850d1502f87 | 1,186 | ex | Elixir | apps/debugger/lib/debugger/protocol.basic.ex | ihabunek/elixir-ls | a8bdf9304f04254160c9fc982ad314a50085c51a | [
"Apache-2.0"
] | 912 | 2017-06-08T03:58:03.000Z | 2021-09-06T03:42:07.000Z | apps/debugger/lib/debugger/protocol.basic.ex | ihabunek/elixir-ls | a8bdf9304f04254160c9fc982ad314a50085c51a | [
"Apache-2.0"
] | 196 | 2017-06-09T23:32:16.000Z | 2021-10-15T15:38:43.000Z | apps/debugger/lib/debugger/protocol.basic.ex | ihabunek/elixir-ls | a8bdf9304f04254160c9fc982ad314a50085c51a | [
"Apache-2.0"
] | 78 | 2017-07-06T18:35:34.000Z | 2020-04-12T08:10:45.000Z | defmodule ElixirLS.Debugger.Protocol.Basic do
@moduledoc """
Macros for VS Code debug protocol messages
These macros can be used for pattern matching or for creating messages corresponding to the
request, response, and event types as specified in VS Code debug protocol.
"""
defmacro request(seq, command) do
quote do
%{"type" => "request", "command" => unquote(command), "seq" => unquote(seq)}
end
end
defmacro request(seq, command, arguments) do
quote do
%{
"type" => "request",
"command" => unquote(command),
"seq" => unquote(seq),
"arguments" => unquote(arguments)
}
end
end
defmacro response(seq, request_seq, command, body) do
quote do
%{
"type" => "response",
"command" => unquote(command),
"seq" => unquote(seq),
"request_seq" => unquote(request_seq),
"success" => true,
"body" => unquote(body)
}
end
end
defmacro event(seq, event, body) do
quote do
%{
"type" => "event",
"event" => unquote(event),
"body" => unquote(body),
"seq" => unquote(seq)
}
end
end
end
| 23.72 | 94 | 0.573356 |
081a83433829ad3df86a685ed03c71027c52872b | 1,466 | ex | Elixir | clients/tool_results/lib/google_api/tool_results/v1beta3/model/tool_exit_code.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/tool_results/lib/google_api/tool_results/v1beta3/model/tool_exit_code.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/tool_results/lib/google_api/tool_results/v1beta3/model/tool_exit_code.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | 1 | 2018-07-28T20:50:50.000Z | 2018-07-28T20:50:50.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.ToolResults.V1beta3.Model.ToolExitCode do
@moduledoc """
Exit code from a tool execution.
## Attributes
- number (Integer): Tool execution exit code. A value of 0 means that the execution was successful. - In response: always set - In create/update request: always set Defaults to: `null`.
"""
defstruct [
:"number"
]
end
defimpl Poison.Decoder, for: GoogleApi.ToolResults.V1beta3.Model.ToolExitCode do
def decode(value, _options) do
value
end
end
defimpl Poison.Encoder, for: GoogleApi.ToolResults.V1beta3.Model.ToolExitCode do
def encode(value, options) do
GoogleApi.ToolResults.V1beta3.Deserializer.serialize_non_nil(value, options)
end
end
| 31.869565 | 188 | 0.755798 |
081a9727bf90129b194dd4e8aaf295816db316ff | 735 | exs | Elixir | lib/maps.exs | amiroff/learning_elixir | 362eb71ac6af69877c96a3ad03213632a8ed85fc | [
"MIT"
] | null | null | null | lib/maps.exs | amiroff/learning_elixir | 362eb71ac6af69877c96a3ad03213632a8ed85fc | [
"MIT"
] | null | null | null | lib/maps.exs | amiroff/learning_elixir | 362eb71ac6af69877c96a3ad03213632a8ed85fc | [
"MIT"
] | null | null | null | # Maps are collection of key-value pairs like keyword lists
%{ key => value, key2 => value2 }
# Keys and values can be anything
cities = %{ 34 => "İstanbul", 16 => "Bursa" }
# If the key is an atom we can use shortcut
colors = %{ :white => "fff", :black => "000" }
# can be written as:
colors = %{ white: "fff", black: "000" }
# we can use expressions as keys:
name = "Metin Emiroğlu"
%{ String.downcase(name) => name } # %{"metin emiroğlu" => "Metin Emiroğlu"}
# Accessing values via keywords
cities[34] # İstanbul
colors[:white] # "fff"
# if key is an atom we can use . notation:
colors.white # "fff"
# We get KeyError if key does not exist:
colors.beige # ** (KeyError) key :beige not found in: %{black: "000", white: "fff"}
| 28.269231 | 83 | 0.64898 |
081ac383adedfcb7c8800c3c08e3aee9c26b9eb5 | 687 | ex | Elixir | lib/edgedb/protocol/codecs/builtin/duration.ex | f0lio/edgedb-elixir | b285bd8037b0b951aabfa1d1733889880f8bfd66 | [
"MIT"
] | null | null | null | lib/edgedb/protocol/codecs/builtin/duration.ex | f0lio/edgedb-elixir | b285bd8037b0b951aabfa1d1733889880f8bfd66 | [
"MIT"
] | null | null | null | lib/edgedb/protocol/codecs/builtin/duration.ex | f0lio/edgedb-elixir | b285bd8037b0b951aabfa1d1733889880f8bfd66 | [
"MIT"
] | null | null | null | defmodule EdgeDB.Protocol.Codecs.Builtin.Duration do
@moduledoc false
use EdgeDB.Protocol.Codec
alias EdgeDB.Protocol.Datatypes
@days 0
@months 0
defbuiltinscalarcodec(
type_name: "std::duration",
type_id: Datatypes.UUID.from_string("00000000-0000-0000-0000-00000000010E"),
type: Datatypes.Int64.t()
)
@impl EdgeDB.Protocol.Codec
def encode_instance(duration) when is_integer(duration) do
[
Datatypes.Int64.encode(duration),
Datatypes.Int32.encode(@days),
Datatypes.Int32.encode(@months)
]
end
@impl EdgeDB.Protocol.Codec
def decode_instance(<<duration::int64, @days::int32, @months::int32>>) do
duration
end
end
| 22.16129 | 80 | 0.71179 |
081ad895bf5068141ccee5a0dea0675a86138ad4 | 342 | ex | Elixir | lib/advent_of_code/utils.ex | odarriba/advent_of_code_2021 | 658059cc83438c588d72fd6b30685f555cf8f7a2 | [
"MIT"
] | 5 | 2021-11-30T15:26:47.000Z | 2021-12-25T19:31:44.000Z | lib/advent_of_code/utils.ex | odarriba/advent_of_code_2021 | 658059cc83438c588d72fd6b30685f555cf8f7a2 | [
"MIT"
] | null | null | null | lib/advent_of_code/utils.ex | odarriba/advent_of_code_2021 | 658059cc83438c588d72fd6b30685f555cf8f7a2 | [
"MIT"
] | null | null | null | defmodule AdventOfCode.Utils do
@moduledoc """
Module with utilities to be used by various exercises.
"""
@doc """
Function that receives a list of lists and transposes it: changes columns to
be rows and vice versa.
"""
def transpose_matrix(matrix) do
matrix
|> List.zip()
|> Enum.map(&Tuple.to_list/1)
end
end
| 21.375 | 78 | 0.678363 |
081adeb4f79f85794a0dd257b288db5537233a86 | 783 | ex | Elixir | lib/chat/utils.ex | arjit95/elixir-chat | e4db9d37713edb8398017c8629b8064541c5a110 | [
"MIT"
] | null | null | null | lib/chat/utils.ex | arjit95/elixir-chat | e4db9d37713edb8398017c8629b8064541c5a110 | [
"MIT"
] | null | null | null | lib/chat/utils.ex | arjit95/elixir-chat | e4db9d37713edb8398017c8629b8064541c5a110 | [
"MIT"
] | null | null | null | defmodule Chat.Utils do
@doc """
Used for processing database reads in batches.
Useful for pagination of data
```elixir
# Reads messages in batches of 100
Chat.Utils.batch_ops(fn {skip, limit} ->
# data fetched from db
data
end, 100)
```
"""
@spec batch_ops(term(), number()) :: term()
def batch_ops(func, batch_limit) do
{_reads, _skips, total_reads} =
Stream.unfold({0, batch_limit, []}, fn {skip, limit, current_reads} ->
reads = func.(skip, limit)
num_reads = length(reads)
if num_reads == 0 do
nil
else
{{limit, skip, current_reads}, {num_reads, skip + num_reads, reads ++ current_reads}}
end
end)
|> Enum.to_list()
|> List.last()
total_reads
end
end
| 23.727273 | 95 | 0.595147 |
081b22954267b827ca24be9c2201f84233ac5d6a | 202 | ex | Elixir | lib/inmana/restaurants/index.ex | andrermartins/inmana | 19f36e3b7ab509ea72af2eff22981adf2ffcacab | [
"MIT"
] | null | null | null | lib/inmana/restaurants/index.ex | andrermartins/inmana | 19f36e3b7ab509ea72af2eff22981adf2ffcacab | [
"MIT"
] | null | null | null | lib/inmana/restaurants/index.ex | andrermartins/inmana | 19f36e3b7ab509ea72af2eff22981adf2ffcacab | [
"MIT"
] | null | null | null | defmodule Inmana.Restaurants.Index do
alias Inmana.{Repo, Restaurant}
def call() do
Restaurant
|> Repo.all()
|> handle_index()
end
defp handle_index(result), do: {:ok, result}
end
| 16.833333 | 46 | 0.663366 |
081b36f8a2661b1817f73d59d8c4eb3324d9a2d0 | 2,003 | ex | Elixir | lib/challenge_gov/winners.ex | jennstein2017/Challenge_gov | e0820df8b124a32ff8b78cb827ae43551492988b | [
"CC0-1.0"
] | 9 | 2020-02-26T20:24:38.000Z | 2022-03-22T21:14:52.000Z | lib/challenge_gov/winners.ex | jennstein2017/Challenge_gov | e0820df8b124a32ff8b78cb827ae43551492988b | [
"CC0-1.0"
] | 15 | 2020-04-22T19:33:24.000Z | 2022-03-26T15:11:17.000Z | lib/challenge_gov/winners.ex | jennstein2017/Challenge_gov | e0820df8b124a32ff8b78cb827ae43551492988b | [
"CC0-1.0"
] | 4 | 2020-04-27T22:58:57.000Z | 2022-01-14T13:42:09.000Z | defmodule ChallengeGov.Winners do
@moduledoc """
Context for winners
"""
alias Ecto.Multi
alias Stein.Storage
alias ChallengeGov.Repo
alias ChallengeGov.PhaseWinners.Winner
def get(id) do
Winner
|> Repo.get(id)
|> case do
nil ->
{:error, :no_winner}
winner ->
{:ok, winner}
end
end
def delete(winner) do
Ecto.Multi.new()
|> Multi.run(:remove_image, fn _repo, _changes ->
remove_image(winner)
end)
|> Multi.delete(:delete, fn %{remove_image: winner} ->
winner
end)
|> Repo.transaction()
|> case do
{:ok, %{delete: winner}} ->
{:ok, winner}
{:error, _, _, _} ->
{:error, :something_went_wrong}
end
end
# Uploads
def image_path(_phase_winner, nil, nil), do: nil
def image_path(phase_winner, key, extension),
do: "/phase_winners/#{phase_winner.id}/winner_image_#{key}#{extension}"
def image_path(phase_winner) do
image_path(phase_winner, phase_winner.image_key, phase_winner.image_extension)
end
def upload_image(phase_winner, image) do
file = Storage.prep_file(image)
key = UUID.uuid4()
path = image_path(phase_winner, key, file.extension)
meta = [{:content_disposition, ~s{attachment; filename="#{file.filename}"}}]
allowed_extensions = [".jpg", ".jpeg", ".png", ".gif"]
case Storage.upload(file, path, meta: meta, extensions: allowed_extensions) do
:ok ->
phase_winner
|> Winner.image_changeset(key, file.extension)
|> Repo.update()
{:error, reason} ->
{:error, reason}
end
end
def remove_image(winner) do
case Storage.delete(image_path(winner)) do
:ok ->
winner
|> Winner.image_changeset(nil, nil)
|> Repo.update()
{:error, _reason} ->
winner
|> Ecto.Changeset.change()
|> Ecto.Changeset.add_error(:image, "There was an issue removing this image")
|> Repo.update()
end
end
end
| 23.564706 | 85 | 0.612581 |
081b37d5785d807debc6fb6594d759c462c4cb08 | 950 | exs | Elixir | test/controllers/page_controller_test.exs | b-a-b-e/ProComPrag | 50c6c87933e71cb69b5c95bc77bf591a34661410 | [
"MIT"
] | 1 | 2020-05-31T21:54:40.000Z | 2020-05-31T21:54:40.000Z | test/controllers/page_controller_test.exs | b-a-b-e/ProComPrag | 50c6c87933e71cb69b5c95bc77bf591a34661410 | [
"MIT"
] | 64 | 2019-07-29T22:06:16.000Z | 2022-03-28T23:46:58.000Z | test/controllers/page_controller_test.exs | babe-project/BABE | 50c6c87933e71cb69b5c95bc77bf591a34661410 | [
"MIT"
] | 1 | 2021-02-06T10:23:25.000Z | 2021-02-06T10:23:25.000Z | defmodule PageControllerTest do
@moduledoc false
use Magpie.ConnCase
@username Application.get_env(:magpie, :authentication)[:username]
@password Application.get_env(:magpie, :authentication)[:password]
defp using_basic_auth(conn, username \\ @username, password \\ @password) do
header_content = "Basic " <> Base.encode64("#{username}:#{password}")
conn |> put_req_header("authorization", header_content)
end
test "Requires authentication for accessing the landing page", %{conn: conn} do
conn =
conn
|> get(page_path(conn, :index))
assert response(conn, 401)
assert conn.halted
end
describe "index/2" do
test "index/2 shows the landing page", %{conn: conn} do
conn =
conn
|> using_basic_auth()
|> get("/")
assert html_response(conn, 200) =~
"Minimal Architecture for the Generation of Portable Interactive Experiments"
end
end
end
| 27.941176 | 92 | 0.669474 |
081b6c48ce0f9005b9cae3d710945ce74ca46cf0 | 442 | ex | Elixir | lib/turbo_html/config.ex | drnikon/turbo_html | 12e69abb6dc66d09c6a4332f037c75f126ad222b | [
"MIT"
] | null | null | null | lib/turbo_html/config.ex | drnikon/turbo_html | 12e69abb6dc66d09c6a4332f037c75f126ad222b | [
"MIT"
] | null | null | null | lib/turbo_html/config.ex | drnikon/turbo_html | 12e69abb6dc66d09c6a4332f037c75f126ad222b | [
"MIT"
] | null | null | null | defmodule Turbo.HTML.Config do
@moduledoc false
def view_style(application \\ :turbo_html) do
config(:view_style, :uikit, application)
end
defp config(application) do
Application.get_env(application, Turbo.HTML, [])
end
defp config(key, default, application) do
application
|> config()
|> Keyword.get(key, default)
|> resolve_config(default)
end
defp resolve_config(value, _default), do: value
end
| 21.047619 | 52 | 0.701357 |
081b989a26a07f0d1ad1f29bf93c566971a87e65 | 1,208 | ex | Elixir | web/channels/user_socket.ex | mukilarasan101/todos-backend | b7b3e00aa17bb117ea031dd0cba1d14d3d298159 | [
"MIT"
] | null | null | null | web/channels/user_socket.ex | mukilarasan101/todos-backend | b7b3e00aa17bb117ea031dd0cba1d14d3d298159 | [
"MIT"
] | null | null | null | web/channels/user_socket.ex | mukilarasan101/todos-backend | b7b3e00aa17bb117ea031dd0cba1d14d3d298159 | [
"MIT"
] | null | null | null | defmodule Todo.UserSocket do
use Phoenix.Socket
## Channels
# channel "rooms:*", Todo.RoomChannel
channel "pushchanges:*", Todo.PushChangeChannel
## Transports
transport :websocket, Phoenix.Transports.WebSocket
# transport :longpoll, Phoenix.Transports.LongPoll
# Socket params are passed from the client and can
# be used to verify and authenticate a user. After
# verification, you can put default assigns into
# the socket that will be set for all channels, ie
#
# {:ok, assign(socket, :user_id, verified_user_id)}
#
# To deny connection, return `:error`.
#
# See `Phoenix.Token` documentation for examples in
# performing token verification on connect.
def connect(_params, socket) do
{:ok, socket}
end
# Socket id's are topics that allow you to identify all sockets for a given user:
#
# def id(socket), do: "users_socket:#{socket.assigns.user_id}"
#
# Would allow you to broadcast a "disconnect" event and terminate
# all active sockets and channels for a given user:
#
# Todo.Endpoint.broadcast("users_socket:#{user.id}", "disconnect", %{})
#
# Returning `nil` makes this socket anonymous.
def id(_socket), do: nil
end
| 30.974359 | 83 | 0.702815 |
081bab3a97a985ba58eaceb7c8a799dafeae68b9 | 2,060 | exs | Elixir | implements/rail-fence-cipher/rail_fence_cipher.exs | MickeyOoh/Exercises | 3b34e7fdab4a09e0269d20c68531b4fb75bb7f16 | [
"MIT"
] | null | null | null | implements/rail-fence-cipher/rail_fence_cipher.exs | MickeyOoh/Exercises | 3b34e7fdab4a09e0269d20c68531b4fb75bb7f16 | [
"MIT"
] | 1 | 2018-06-19T18:59:41.000Z | 2018-06-19T18:59:41.000Z | implements/rail-fence-cipher/rail_fence_cipher.exs | MickeyOoh/Exercises | 3b34e7fdab4a09e0269d20c68531b4fb75bb7f16 | [
"MIT"
] | null | null | null | defmodule RailFenceCipher do
@doc """
Encode a given plaintext to the corresponding rail fence ciphertext
"""
@spec encode(String.t(), pos_integer) :: String.t()
def encode(str, 1), do: str
def encode(str, rails) do
#len = String.length(str)
pattern = Enum.concat(Enum.to_list( 0..(rails-1)),
Enum.to_list((rails-2)..1 ) )
letters = String.codepoints(str) # str -> list
#|> Enum.with_index()
result = List.duplicate([], rails)
railfence(letters, pattern, result)
|> List.to_string()
end
#def set_dt([], _ , result), do: result
def railfence([], _ , result) do
Enum.map(result, &(Enum.reverse(&1)))
|> List.flatten()
end
def railfence([hstr | tstr], [hpos | tpos], result) do
row = Enum.at(result, hpos)
result = List.replace_at(result, hpos, [hstr | row])
railfence(tstr, tpos ++ [hpos], result)
end
@doc """
Decode a given rail fence ciphertext to the corresponding plaintext
"""
@spec decode(String.t(), pos_integer) :: String.t()
def decode(str, 1), do: str
def decode(str, rails) do
pattern = Enum.concat(Enum.to_list( 0..(rails-1)),
Enum.to_list((rails-2)..1 ) )
locations = 0..(String.length(str) - 1)
|> Enum.to_list
result = List.duplicate([], rails)
positions = railfence(locations, pattern, result)
strings = String.codepoints(str)
result = List.duplicate("_", String.length(str))
convert(strings, positions,result)
|> List.to_string()
end
def convert([], _ , result), do: result
def convert([hstr | tstr], [hpos | tpos], result) do
result = List.replace_at(result, hpos, hstr)
convert(tstr, tpos, result)
end
def test do
msg = "WEAREDISCOVEREDFLEEATONCE"
result = "WECRLTEERDSOEEFEAOCAIVDEN"
encode(msg, 3) |> IO.puts
IO.puts result
end
def test2 do
msg = "WEAREDISCOVEREDFLEEATONCE"
result = "WECRLTEERDSOEEFEAOCAIVDEN"
decode(result, 3) |> IO.puts
IO.puts msg
end
end
| 32.1875 | 69 | 0.613592 |
081bbff4aacbc4124b2d20cb9a2c8ac4c822ef7e | 14,724 | exs | Elixir | test/phoenix/tracker/state_test.exs | Goose97/phoenix_pubsub | 35e7c54a6f32babea1638f6c5b74575a66a2fe6f | [
"MIT"
] | 524 | 2016-01-22T23:51:56.000Z | 2022-03-26T00:33:43.000Z | test/phoenix/tracker/state_test.exs | Goose97/phoenix_pubsub | 35e7c54a6f32babea1638f6c5b74575a66a2fe6f | [
"MIT"
] | 136 | 2016-01-22T01:51:57.000Z | 2022-03-23T11:13:56.000Z | test/phoenix/tracker/state_test.exs | Goose97/phoenix_pubsub | 35e7c54a6f32babea1638f6c5b74575a66a2fe6f | [
"MIT"
] | 132 | 2016-01-21T21:20:12.000Z | 2022-03-22T09:33:12.000Z | defmodule Phoenix.Tracker.StateTest do
use ExUnit.Case, async: true
alias Phoenix.Tracker.{State}
def sorted_clouds(clouds) do
clouds
|> Enum.flat_map(fn {_name, cloud} -> Enum.to_list(cloud) end)
|> Enum.sort()
end
defp new(node, config) do
State.new({node, 1}, :"#{node} #{config.test}")
end
defp new_pid() do
spawn(fn -> :ok end)
end
defp keys(elements) do
elements
|> Enum.map(fn {{_, _, key}, _, _} -> key end)
|> Enum.sort()
end
defp tab2list(tab), do: tab |> :ets.tab2list() |> Enum.sort()
test "that this is set up correctly", config do
a = new(:a, config)
assert {_a, map} = State.extract(a, a.replica, a.context)
assert map == %{}
end
test "user added online is online", config do
a = new(:a, config)
john = new_pid()
a = State.join(a, john, "lobby", :john)
assert [{:john, _meta}] = State.get_by_topic(a, "lobby")
a = State.leave(a, john, "lobby", :john)
assert [] = State.get_by_topic(a, "lobby")
end
test "users from other servers merge", config do
a = new(:a, config)
b = new(:b, config)
{a, _, _} = State.replica_up(a, b.replica)
{b, _, _} = State.replica_up(b, a.replica)
alice = new_pid()
bob = new_pid()
carol = new_pid()
assert [] = tab2list(a.pids)
a = State.join(a, alice, "lobby", :alice)
assert [{_, "lobby", :alice}] = tab2list(a.pids)
b = State.join(b, bob, "lobby", :bob)
# Merging emits a bob join event
assert {a, [{{_, _, :bob}, _, _}], []} = State.merge(a, State.extract(b, a.replica, a.context))
assert [:alice, :bob] = keys(State.online_list(a))
# Merging twice doesn't dupe events
pids_before = tab2list(a.pids)
assert {newa, [], []} = State.merge(a, State.extract(b, a.replica, a.context))
assert newa == a
assert pids_before == tab2list(newa.pids)
assert {b, [{{_, _, :alice}, _, _}], []} = State.merge(b, State.extract(a, b.replica, b.context))
assert {^b, [], []} = State.merge(b, State.extract(a, b.replica, b.context))
# observe remove
assert [{_, "lobby", :alice}, {_, "lobby", :bob}] = tab2list(a.pids)
a = State.leave(a, alice, "lobby", :alice)
assert [{_, "lobby", :bob}] = tab2list(a.pids)
b_pids_before = tab2list(b.pids)
assert [{_, "lobby", :alice}, {_, "lobby", :bob}] = b_pids_before
assert {b, [], [{{_, _, :alice}, _, _}]} = State.merge(b, State.extract(a, b.replica, b.context))
assert [{_, "lobby", :alice}] = b_pids_before -- tab2list(b.pids)
assert [:bob] = keys(State.online_list(b))
assert {^b, [], []} = State.merge(b, State.extract(a, b.replica, b.context))
b = State.join(b, carol, "lobby", :carol)
assert [:bob, :carol] = keys(State.online_list(b))
assert {a, [{{_, _, :carol}, _, _}],[]} = State.merge(a, State.extract(b, a.replica, a.context))
assert {^a, [], []} = State.merge(a, State.extract(b, a.replica, a.context))
assert (State.online_list(b) |> Enum.sort) == (State.online_list(a) |> Enum.sort)
end
test "basic netsplit", config do
a = new(:a, config)
b = new(:b, config)
{a, _, _} = State.replica_up(a, b.replica)
{b, _, _} = State.replica_up(b, a.replica)
alice = new_pid()
bob = new_pid()
carol = new_pid()
david = new_pid()
a = State.join(a, alice, "lobby", :alice)
b = State.join(b, bob, "lobby", :bob)
{a, [{{_, _, :bob}, _, _}], _} = State.merge(a, State.extract(b, a.replica, a.context))
assert [:alice, :bob] = a |> State.online_list() |> keys()
a = State.join(a, carol, "lobby", :carol)
a = State.leave(a, alice, "lobby", :alice)
a = State.join(a, david, "lobby", :david)
assert {a, [] ,[{{_, _, :bob}, _, _}]} = State.replica_down(a, {:b,1})
assert [:carol, :david] = keys(State.online_list(a))
assert {a,[],[]} = State.merge(a, State.extract(b, a.replica, a.context))
assert [:carol, :david] = keys(State.online_list(a))
assert {a,[{{_, _, :bob}, _, _}],[]} = State.replica_up(a, {:b,1})
assert [:bob, :carol, :david] = keys(State.online_list(a))
end
test "joins are observed via other node", config do
[a, b, c] = given_connected_cluster([:a, :b, :c], config)
alice = new_pid()
bob = new_pid()
a = State.join(a, alice, "lobby", :alice)
# the below join is just so that node c has some context from node a
{c, [{{_, _, :alice}, _, _}], []} =
State.merge(c, State.extract(a, c.replica, c.context))
# netsplit between a and c
{a, [], []} = State.replica_down(a, {:c, 1})
{c, [], [{{_, _, :alice}, _, _}]} = State.replica_down(c, {:a, 1})
a = State.join(a, bob, "lobby", :bob)
{b, [{{_, _, :bob}, _, _}, {{_, _, :alice}, _, _}], []} =
State.merge(b, State.extract(a, b.replica, b.context))
assert {_, [{{_, _, :bob}, _, _}], []} =
State.merge(c, State.extract(b, c.replica, c.context))
end
test "removes are observed via other node", config do
[a, b, c] = given_connected_cluster([:a, :b, :c], config)
alice = new_pid()
bob = new_pid()
a = State.join(a, alice, "lobby", :alice)
{c, [{{_, _, :alice}, _, _}], []} =
State.merge(c, State.extract(a, c.replica, c.context))
# netsplit between a and c
{a, [], []} = State.replica_down(a, {:c, 1})
{c, [], [{{_, _, :alice}, _, _}]} = State.replica_down(c, {:a, 1})
a = State.join(a, bob, "lobby", :bob)
{b, [{{_, _, :bob}, _, _}, {{_, _, :alice}, _, _}], []} =
State.merge(b, State.extract(a, b.replica, b.context))
{c, [{{_, _, :bob}, _, _}], []} =
State.merge(c, State.extract(b, c.replica, c.context))
a = State.leave(a, bob, "lobby", :bob)
{b, [], [{{_, _, :bob}, _, _}]} =
State.merge(b, State.extract(a, b.replica, b.context))
assert {_, [], [{{_, _, :bob}, _, _}]} =
State.merge(c, State.extract(b, c.replica, c.context))
end
test "get_by_pid", config do
pid = self()
state = new(:node1, config)
assert State.get_by_pid(state, pid) == []
state = State.join(state, pid, "topic", "key1", %{})
assert [{{"topic", ^pid, "key1"}, %{}, {{:node1, 1}, 1}}] =
State.get_by_pid(state, pid)
assert {{"topic", ^pid, "key1"}, %{}, {{:node1, 1}, 1}} =
State.get_by_pid(state, pid, "topic", "key1")
assert State.get_by_pid(state, pid, "notopic", "key1") == nil
assert State.get_by_pid(state, pid, "notopic", "nokey") == nil
end
test "get_by_key", config do
pid = self()
pid2 = spawn(fn -> Process.sleep(:infinity) end)
state = new(:node1, config)
assert State.get_by_key(state, "topic", "key1") == []
state = State.join(state, pid, "topic", "key1", %{device: :browser})
state = State.join(state, pid2, "topic", "key1", %{device: :ios})
state = State.join(state, pid2, "topic", "key2", %{device: :ios})
assert [{^pid, %{device: :browser}}, {_pid2, %{device: :ios}}] =
State.get_by_key(state, "topic", "key1")
assert State.get_by_key(state, "another_topic", "key1") == []
assert State.get_by_key(state, "topic", "another_key") == []
end
test "get_by_topic", config do
pid = self()
state = new(:node1, config)
state2 = new(:node2, config)
state3 = new(:node3, config)
{state, _, _} = State.replica_up(state, {:node2, 1})
{state, _, _} = State.replica_up(state, {:node3, 1})
{state2, _, _} = State.replica_up(state2, {:node1, 1})
{state2, _, _} = State.replica_up(state2, {:node3, 1})
{state3, _, _} = State.replica_up(state3, {:node1, 1})
{state3, _, _} = State.replica_up(state3, {:node2, 1})
assert state.context ==
%{{:node2, 1} => 0, {:node3, 1} => 0, {:node1, 1} => 0}
assert state2.context ==
%{{:node1, 1} => 0, {:node3, 1} => 0, {:node2, 1} => 0}
assert state3.context ==
%{{:node1, 1} => 0, {:node2, 1} => 0, {:node3, 1} => 0}
user2 = new_pid()
user3 = new_pid()
assert [] = State.get_by_topic(state, "topic")
state = State.join(state, pid, "topic", "key1", %{})
state = State.join(state, pid, "topic", "key2", %{})
state2 = State.join(state2, user2, "topic", "user2", %{})
state3 = State.join(state3, user3, "topic", "user3", %{})
# all replicas online
assert [{"key1", %{}}, {"key2", %{}}] =
State.get_by_topic(state, "topic")
{state, _, _} = State.merge(state, State.extract(state2, state.replica, state.context))
{state, _, _} = State.merge(state, State.extract(state3, state.replica, state.context))
assert [{"key1", %{}}, {"key2", %{}}, {"user2", %{}}, {"user3", %{}}] =
State.get_by_topic(state, "topic")
# one replica offline
{state, _, _} = State.replica_down(state, state2.replica)
assert [{"key1", %{}}, {"key2", %{}}, {"user3", %{}}] =
State.get_by_topic(state, "topic")
# two replicas offline
{state, _, _} = State.replica_down(state, state3.replica)
assert [{"key1", %{}}, {"key2", %{}}] = State.get_by_topic(state, "topic")
assert [] = State.get_by_topic(state, "another:topic")
end
test "remove_down_replicas", config do
state1 = new(:node1, config)
state2 = new(:node2, config)
{state1, _, _} = State.replica_up(state1, state2.replica)
{state2, _, _} = State.replica_up(state2, state1.replica)
alice = new_pid()
bob = new_pid()
state1 = State.join(state1, alice, "lobby", :alice)
state2 = State.join(state2, bob, "lobby", :bob)
{state2, _, _} = State.merge(state2, State.extract(state1, state2.replica, state2.context))
assert keys(State.online_list(state2)) == [:alice, :bob]
{state2, _, _} = State.replica_down(state2, {:node1, 1})
assert [{^alice, "lobby", :alice},
{^bob, "lobby", :bob}] = tab2list(state2.pids)
state2 = State.remove_down_replicas(state2, {:node1, 1})
assert [{^bob, "lobby", :bob}] = tab2list(state2.pids)
{state2, _, _} = State.replica_up(state2, {:node1, 1})
assert keys(State.online_list(state2)) == [:bob]
end
test "basic deltas", config do
a = new(:a, config)
b = new(:b, config)
{a, _, _} = State.replica_up(a, b.replica)
{b, _, _} = State.replica_up(b, a.replica)
alice = new_pid()
bob = new_pid()
a = State.join(a, alice, "lobby", :alice)
b = State.join(b, bob, "lobby", :bob)
assert {b, [{{_, _, :alice}, _, _}], []} = State.merge(b, a.delta)
assert {{:b, 1}, %{{:a, 1} => 1, {:b, 1} => 1}} = State.clocks(b)
a = State.reset_delta(a)
a = State.leave(a, alice, "lobby", :alice)
assert {b, [], [{{_, _, :alice}, _, _}]} = State.merge(b, a.delta)
assert {{:b, 1}, %{{:a, 1} => 2, {:b, 1} => 1}} = State.clocks(b)
a = State.join(a, alice, "lobby", :alice)
assert {b, [{{_, _, :alice}, _, _}], []} = State.merge(b, a.delta)
assert {{:b, 1}, %{{:a, 1} => 3, {:b, 1} => 1}} = State.clocks(b)
assert Enum.all?(Enum.map(b.clouds, fn {_, cloud} -> Enum.empty?(cloud) end))
end
test "deltas are not merged for non-contiguous ranges", config do
s1 = new(:s1, config)
s2 = State.join(s1, new_pid(), "lobby", "user1", %{})
s3 = State.join(s2, new_pid(), "lobby", "user2", %{})
s4 = State.join(State.reset_delta(s3), new_pid(), "lobby", "user3", %{})
assert State.merge_deltas(s2.delta, s4.delta) == {:error, :not_contiguous}
assert State.merge_deltas(s4.delta, s2.delta) == {:error, :not_contiguous}
end
test "extracted state context contains only replicas known to remote replica",
config do
s1 = new(:s1, config)
s2 = new(:s2, config)
s3 = new(:s3, config)
{s1, _, _} = State.replica_up(s1, s2.replica)
{s2, _, _} = State.replica_up(s2, s1.replica)
{s2, _, _} = State.replica_up(s2, s3.replica)
s1 = State.join(s1, new_pid(), "lobby", "user1", %{})
s2 = State.join(s2, new_pid(), "lobby", "user2", %{})
s3 = State.join(s3, new_pid(), "lobby", "user3", %{})
{s1, _, _} = State.merge(s1, s2.delta)
{s2, _, _} = State.merge(s2, s1.delta)
{s2, _, _} = State.merge(s2, s3.delta)
{extracted, _} = State.extract(s2, s1.replica, s1.context)
assert extracted.context == %{{:s1, 1} => 1, {:s2, 1} => 1}
end
test "merging deltas", config do
s1 = new(:s1, config)
s2 = new(:s2, config)
user1 = new_pid()
user2 = new_pid()
s1 = State.join(s1, user1, "lobby", "user1", %{})
s1 = State.join(s1, user1, "private", "user1", %{})
s2 = State.join(s2, user2, "lobby", "user2", %{})
s2 = State.join(s2, user2, "private", "user2", %{})
{:ok, delta1} = State.merge_deltas(s1.delta, s2.delta)
assert delta1.values == %{
{{:s1, 1}, 1} => {user1, "lobby", "user1", %{}},
{{:s1, 1}, 2} => {user1, "private", "user1", %{}},
{{:s2, 1}, 1} => {user2, "lobby", "user2", %{}},
{{:s2, 1}, 2} => {user2, "private", "user2", %{}}
}
assert sorted_clouds(delta1.clouds) ==
[{{:s1, 1}, 1}, {{:s1, 1}, 2}, {{:s2, 1}, 1}, {{:s2, 1}, 2}]
end
test "merging deltas with removes", config do
s1 = new(:s1, config)
s2 = new(:s2, config)
user1 = new_pid()
{s1, _, _} = State.replica_up(s1, s2.replica)
{s2, _, _} = State.replica_up(s2, s1.replica)
# concurrent add wins
s1 = State.join(s1, user1, "lobby", "user1", %{})
s1 = State.join(s1, user1, "private", "user1", %{})
s2 = State.join(s2, user1, "lobby", "user1", %{})
s2 = State.leave(s2, user1, "lobby", "user1")
{:ok, delta1} = State.merge_deltas(s1.delta, s2.delta)
s1 = %State{s1 | delta: delta1}
assert delta1.values == %{
{{:s1, 1}, 1} => {user1, "lobby", "user1", %{}},
{{:s1, 1}, 2} => {user1, "private", "user1", %{}},
}
assert sorted_clouds(delta1.clouds) ==
[{{:s1, 1}, 1}, {{:s1, 1}, 2}, {{:s2, 1}, 1}, {{:s2, 1}, 2}]
# merging duplicates maintains delta
assert {:ok, ^delta1} = State.merge_deltas(delta1, s2.delta)
{s2, _, _} = State.merge(s2, s1.delta)
s2 = State.leave(s2, user1, "private", "user1")
# observed remove
{:ok, delta1} = State.merge_deltas(s1.delta, s2.delta)
assert delta1.values == %{
{{:s1, 1}, 1} => {user1, "lobby", "user1", %{}},
}
# maintains tombstone
assert sorted_clouds(delta1.clouds) ==
[{{:s1, 1}, 1}, {{:s1, 1}, 2}, {{:s2, 1}, 1}, {{:s2, 1}, 2}, {{:s2, 1}, 3}]
end
defp given_connected_cluster(nodes, config) do
states = Enum.map(nodes, fn n -> new(n, config) end)
replicas = Enum.map(states, fn s -> s.replica end)
Enum.map(states, fn s ->
Enum.reduce(replicas, s, fn replica, acc ->
case acc.replica == replica do
true -> acc
false -> State.replica_up(acc, replica) |> elem(0)
end
end)
end)
end
end
| 35.651332 | 101 | 0.55868 |
081bd5a1397d00c08e0793f9a4ce63256135c56e | 555 | exs | Elixir | apps/bytepack/priv/repo/migrations/20200427155211_create_audit_logs.exs | dashbitco/bytepack_archive | 79f8e62149d020f2afcc501592ed399f7ce7a60b | [
"Unlicense"
] | 313 | 2020-12-03T17:26:24.000Z | 2022-03-18T09:05:14.000Z | apps/bytepack/priv/repo/migrations/20200427155211_create_audit_logs.exs | dashbitco/bytepack_archive | 79f8e62149d020f2afcc501592ed399f7ce7a60b | [
"Unlicense"
] | null | null | null | apps/bytepack/priv/repo/migrations/20200427155211_create_audit_logs.exs | dashbitco/bytepack_archive | 79f8e62149d020f2afcc501592ed399f7ce7a60b | [
"Unlicense"
] | 57 | 2020-12-03T17:41:53.000Z | 2022-03-17T17:28:16.000Z | defmodule Bytepack.Repo.Migrations.CreateAuditLogs do
use Ecto.Migration
def change do
create table(:audit_logs) do
add :action, :string, null: false
add :ip_address, :inet
add :user_agent, :string
add :user_email, :string
add :params, :map, null: false
add :user_id, references(:users, on_delete: :nilify_all)
add :org_id, references(:orgs, on_delete: :delete_all)
timestamps(updated_at: false)
end
create index(:audit_logs, [:user_id])
create index(:audit_logs, [:org_id])
end
end
| 27.75 | 62 | 0.672072 |
081bfc9206331503572b076085793ab88ccf02a2 | 2,915 | ex | Elixir | clients/ad_exchange_seller/lib/google_api/ad_exchange_seller/v20/model/preferred_deal.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/ad_exchange_seller/lib/google_api/ad_exchange_seller/v20/model/preferred_deal.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/ad_exchange_seller/lib/google_api/ad_exchange_seller/v20/model/preferred_deal.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.AdExchangeSeller.V20.Model.PreferredDeal do
@moduledoc """
## Attributes
- advertiserName (String.t): The name of the advertiser this deal is for. Defaults to: `null`.
- buyerNetworkName (String.t): The name of the buyer network this deal is for. Defaults to: `null`.
- currencyCode (String.t): The currency code that applies to the fixed_cpm value. If not set then assumed to be USD. Defaults to: `null`.
- endTime (String.t): Time when this deal stops being active in seconds since the epoch (GMT). If not set then this deal is valid until manually disabled by the publisher. Defaults to: `null`.
- fixedCpm (String.t): The fixed price for this preferred deal. In cpm micros of currency according to currencyCode. If set, then this preferred deal is eligible for the fixed price tier of buying (highest priority, pay exactly the configured fixed price). Defaults to: `null`.
- id (String.t): Unique identifier of this preferred deal. Defaults to: `null`.
- kind (String.t): Kind of resource this is, in this case adexchangeseller#preferredDeal. Defaults to: `null`.
- startTime (String.t): Time when this deal becomes active in seconds since the epoch (GMT). If not set then this deal is active immediately upon creation. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:advertiserName => any(),
:buyerNetworkName => any(),
:currencyCode => any(),
:endTime => any(),
:fixedCpm => any(),
:id => any(),
:kind => any(),
:startTime => any()
}
field(:advertiserName)
field(:buyerNetworkName)
field(:currencyCode)
field(:endTime)
field(:fixedCpm)
field(:id)
field(:kind)
field(:startTime)
end
defimpl Poison.Decoder, for: GoogleApi.AdExchangeSeller.V20.Model.PreferredDeal do
def decode(value, options) do
GoogleApi.AdExchangeSeller.V20.Model.PreferredDeal.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.AdExchangeSeller.V20.Model.PreferredDeal do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 42.246377 | 279 | 0.720069 |
081c329e8dd04627cc3403a764d60f352bc587c7 | 168 | exs | Elixir | config/config.exs | rogaz/thesis-phoenix | 8ad24cdc7e24bf312139a527db5a3bf07e05820f | [
"MIT"
] | 681 | 2016-06-21T20:49:21.000Z | 2022-02-19T04:08:38.000Z | config/config.exs | rogaz/thesis-phoenix | 8ad24cdc7e24bf312139a527db5a3bf07e05820f | [
"MIT"
] | 125 | 2016-06-21T21:14:49.000Z | 2020-12-12T20:15:48.000Z | config/config.exs | rogaz/thesis-phoenix | 8ad24cdc7e24bf312139a527db5a3bf07e05820f | [
"MIT"
] | 76 | 2016-09-06T03:40:55.000Z | 2022-01-20T21:29:22.000Z | use Mix.Config
config :thesis, store: Thesis.EctoStore
config :thesis, Thesis.EctoStore, repo: MyApp.Repo
config :thesis, env: Mix.env
import_config "#{Mix.env}.exs"
| 21 | 50 | 0.755952 |
081c4763c37f6b895d7d643c3bf6d4aaa8bfdca0 | 1,065 | ex | Elixir | backend/lib/backend_web/channels/user_socket.ex | silver-panda/budgetr | d8cad5c1401677947444add24c8d41f2450c8cc3 | [
"MIT"
] | null | null | null | backend/lib/backend_web/channels/user_socket.ex | silver-panda/budgetr | d8cad5c1401677947444add24c8d41f2450c8cc3 | [
"MIT"
] | 3 | 2020-11-08T11:19:47.000Z | 2021-05-07T17:20:05.000Z | backend/lib/backend_web/channels/user_socket.ex | silver-panda/budgetr | d8cad5c1401677947444add24c8d41f2450c8cc3 | [
"MIT"
] | null | null | null | defmodule BackendWeb.UserSocket do
use Phoenix.Socket
## Channels
# channel "room:*", BackendWeb.RoomChannel
# Socket params are passed from the client and can
# be used to verify and authenticate a user. After
# verification, you can put default assigns into
# the socket that will be set for all channels, ie
#
# {:ok, assign(socket, :user_id, verified_user_id)}
#
# To deny connection, return `:error`.
#
# See `Phoenix.Token` documentation for examples in
# performing token verification on connect.
def connect(_params, socket, _connect_info) do
{:ok, socket}
end
# Socket id's are topics that allow you to identify all sockets for a given user:
#
# def id(socket), do: "user_socket:#{socket.assigns.user_id}"
#
# Would allow you to broadcast a "disconnect" event and terminate
# all active sockets and channels for a given user:
#
# BackendWeb.Endpoint.broadcast("user_socket:#{user.id}", "disconnect", %{})
#
# Returning `nil` makes this socket anonymous.
def id(_socket), do: nil
end
| 31.323529 | 83 | 0.696714 |
081c4a82e4c97bfc4e81910d35fe0c336c7441ad | 1,117 | exs | Elixir | challenge_1/elixir/joegotflow83/reverse/config/config.exs | rchicoli/2017-challenges | 44f0b672e5dea34de1dde131b6df837d462f8e29 | [
"Apache-2.0"
] | 271 | 2017-01-01T22:58:36.000Z | 2021-11-28T23:05:29.000Z | challenge_1/elixir/joegotflow83/reverse/config/config.exs | AakashOfficial/2017Challenges | a8f556f1d5b43c099a0394384c8bc2d826f9d287 | [
"Apache-2.0"
] | 283 | 2017-01-01T23:26:05.000Z | 2018-03-23T00:48:55.000Z | challenge_1/elixir/joegotflow83/reverse/config/config.exs | AakashOfficial/2017Challenges | a8f556f1d5b43c099a0394384c8bc2d826f9d287 | [
"Apache-2.0"
] | 311 | 2017-01-01T22:59:23.000Z | 2021-09-23T00:29:12.000Z | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# 3rd-party users, it should be done in your "mix.exs" file.
# You can configure for your application as:
#
# config :reverse, key: :value
#
# And access this configuration in your application as:
#
# Application.get_env(:reverse, :key)
#
# Or configure a 3rd-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env}.exs"
| 36.032258 | 73 | 0.751119 |
081c6e56722488e347d53754d88b168dbd9b3e57 | 17,613 | ex | Elixir | clients/content/lib/google_api/content/v2/api/shippingsettings.ex | linjunpop/elixir-google-api | 444cb2b2fb02726894535461a474beddd8b86db4 | [
"Apache-2.0"
] | null | null | null | clients/content/lib/google_api/content/v2/api/shippingsettings.ex | linjunpop/elixir-google-api | 444cb2b2fb02726894535461a474beddd8b86db4 | [
"Apache-2.0"
] | null | null | null | clients/content/lib/google_api/content/v2/api/shippingsettings.ex | linjunpop/elixir-google-api | 444cb2b2fb02726894535461a474beddd8b86db4 | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Content.V2.Api.Shippingsettings do
@moduledoc """
API calls for all endpoints tagged `Shippingsettings`.
"""
alias GoogleApi.Content.V2.Connection
alias GoogleApi.Gax.{Request, Response}
@doc """
Retrieves and updates the shipping settings of multiple accounts in a single request.
## Parameters
- connection (GoogleApi.Content.V2.Connection): Connection to server
- optional_params (KeywordList): [optional] Optional parameters
- :alt (String.t): Data format for the response.
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :quotaUser (String.t): An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
- :userIp (String.t): Deprecated. Please use quotaUser instead.
- :dryRun (boolean()): Flag to simulate a request like in a live environment. If set to true, dry-run mode checks the validity of the request and returns errors (if any).
- :body (ShippingsettingsCustomBatchRequest):
## Returns
{:ok, %GoogleApi.Content.V2.Model.ShippingsettingsCustomBatchResponse{}} on success
{:error, info} on failure
"""
@spec content_shippingsettings_custombatch(Tesla.Env.client(), keyword()) ::
{:ok, GoogleApi.Content.V2.Model.ShippingsettingsCustomBatchResponse.t()}
| {:error, Tesla.Env.t()}
def content_shippingsettings_custombatch(connection, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:dryRun => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/shippingsettings/batch")
|> Request.add_optional_params(optional_params_config, optional_params)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.Content.V2.Model.ShippingsettingsCustomBatchResponse{}]
)
end
@doc """
Retrieves the shipping settings of the account.
## Parameters
- connection (GoogleApi.Content.V2.Connection): Connection to server
- merchant_id (String.t): The ID of the managing account. If this parameter is not the same as accountId, then this account must be a multi-client account and accountId must be the ID of a sub-account of this account.
- account_id (String.t): The ID of the account for which to get/update shipping settings.
- optional_params (KeywordList): [optional] Optional parameters
- :alt (String.t): Data format for the response.
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :quotaUser (String.t): An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
- :userIp (String.t): Deprecated. Please use quotaUser instead.
## Returns
{:ok, %GoogleApi.Content.V2.Model.ShippingSettings{}} on success
{:error, info} on failure
"""
@spec content_shippingsettings_get(Tesla.Env.client(), String.t(), String.t(), keyword()) ::
{:ok, GoogleApi.Content.V2.Model.ShippingSettings.t()} | {:error, Tesla.Env.t()}
def content_shippingsettings_get(
connection,
merchant_id,
account_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/{merchantId}/shippingsettings/{accountId}", %{
"merchantId" => URI.encode(merchant_id, &URI.char_unreserved?/1),
"accountId" => URI.encode(account_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Content.V2.Model.ShippingSettings{}])
end
@doc """
Retrieves supported carriers and carrier services for an account.
## Parameters
- connection (GoogleApi.Content.V2.Connection): Connection to server
- merchant_id (String.t): The ID of the account for which to retrieve the supported carriers.
- optional_params (KeywordList): [optional] Optional parameters
- :alt (String.t): Data format for the response.
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :quotaUser (String.t): An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
- :userIp (String.t): Deprecated. Please use quotaUser instead.
## Returns
{:ok, %GoogleApi.Content.V2.Model.ShippingsettingsGetSupportedCarriersResponse{}} on success
{:error, info} on failure
"""
@spec content_shippingsettings_getsupportedcarriers(Tesla.Env.client(), String.t(), keyword()) ::
{:ok, GoogleApi.Content.V2.Model.ShippingsettingsGetSupportedCarriersResponse.t()}
| {:error, Tesla.Env.t()}
def content_shippingsettings_getsupportedcarriers(
connection,
merchant_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/{merchantId}/supportedCarriers", %{
"merchantId" => URI.encode(merchant_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.Content.V2.Model.ShippingsettingsGetSupportedCarriersResponse{}]
)
end
@doc """
Retrieves supported holidays for an account.
## Parameters
- connection (GoogleApi.Content.V2.Connection): Connection to server
- merchant_id (String.t): The ID of the account for which to retrieve the supported holidays.
- optional_params (KeywordList): [optional] Optional parameters
- :alt (String.t): Data format for the response.
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :quotaUser (String.t): An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
- :userIp (String.t): Deprecated. Please use quotaUser instead.
## Returns
{:ok, %GoogleApi.Content.V2.Model.ShippingsettingsGetSupportedHolidaysResponse{}} on success
{:error, info} on failure
"""
@spec content_shippingsettings_getsupportedholidays(Tesla.Env.client(), String.t(), keyword()) ::
{:ok, GoogleApi.Content.V2.Model.ShippingsettingsGetSupportedHolidaysResponse.t()}
| {:error, Tesla.Env.t()}
def content_shippingsettings_getsupportedholidays(
connection,
merchant_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/{merchantId}/supportedHolidays", %{
"merchantId" => URI.encode(merchant_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.Content.V2.Model.ShippingsettingsGetSupportedHolidaysResponse{}]
)
end
@doc """
Lists the shipping settings of the sub-accounts in your Merchant Center account.
## Parameters
- connection (GoogleApi.Content.V2.Connection): Connection to server
- merchant_id (String.t): The ID of the managing account. This must be a multi-client account.
- optional_params (KeywordList): [optional] Optional parameters
- :alt (String.t): Data format for the response.
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :quotaUser (String.t): An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
- :userIp (String.t): Deprecated. Please use quotaUser instead.
- :maxResults (integer()): The maximum number of shipping settings to return in the response, used for paging.
- :pageToken (String.t): The token returned by the previous request.
## Returns
{:ok, %GoogleApi.Content.V2.Model.ShippingsettingsListResponse{}} on success
{:error, info} on failure
"""
@spec content_shippingsettings_list(Tesla.Env.client(), String.t(), keyword()) ::
{:ok, GoogleApi.Content.V2.Model.ShippingsettingsListResponse.t()}
| {:error, Tesla.Env.t()}
def content_shippingsettings_list(connection, merchant_id, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:maxResults => :query,
:pageToken => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/{merchantId}/shippingsettings", %{
"merchantId" => URI.encode(merchant_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.Content.V2.Model.ShippingsettingsListResponse{}]
)
end
@doc """
Updates the shipping settings of the account. This method supports patch semantics.
## Parameters
- connection (GoogleApi.Content.V2.Connection): Connection to server
- merchant_id (String.t): The ID of the managing account. If this parameter is not the same as accountId, then this account must be a multi-client account and accountId must be the ID of a sub-account of this account.
- account_id (String.t): The ID of the account for which to get/update shipping settings.
- optional_params (KeywordList): [optional] Optional parameters
- :alt (String.t): Data format for the response.
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :quotaUser (String.t): An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
- :userIp (String.t): Deprecated. Please use quotaUser instead.
- :dryRun (boolean()): Flag to simulate a request like in a live environment. If set to true, dry-run mode checks the validity of the request and returns errors (if any).
- :body (ShippingSettings):
## Returns
{:ok, %GoogleApi.Content.V2.Model.ShippingSettings{}} on success
{:error, info} on failure
"""
@spec content_shippingsettings_patch(Tesla.Env.client(), String.t(), String.t(), keyword()) ::
{:ok, GoogleApi.Content.V2.Model.ShippingSettings.t()} | {:error, Tesla.Env.t()}
def content_shippingsettings_patch(
connection,
merchant_id,
account_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:dryRun => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:patch)
|> Request.url("/{merchantId}/shippingsettings/{accountId}", %{
"merchantId" => URI.encode(merchant_id, &URI.char_unreserved?/1),
"accountId" => URI.encode(account_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Content.V2.Model.ShippingSettings{}])
end
@doc """
Updates the shipping settings of the account.
## Parameters
- connection (GoogleApi.Content.V2.Connection): Connection to server
- merchant_id (String.t): The ID of the managing account. If this parameter is not the same as accountId, then this account must be a multi-client account and accountId must be the ID of a sub-account of this account.
- account_id (String.t): The ID of the account for which to get/update shipping settings.
- optional_params (KeywordList): [optional] Optional parameters
- :alt (String.t): Data format for the response.
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :quotaUser (String.t): An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
- :userIp (String.t): Deprecated. Please use quotaUser instead.
- :dryRun (boolean()): Flag to simulate a request like in a live environment. If set to true, dry-run mode checks the validity of the request and returns errors (if any).
- :body (ShippingSettings):
## Returns
{:ok, %GoogleApi.Content.V2.Model.ShippingSettings{}} on success
{:error, info} on failure
"""
@spec content_shippingsettings_update(Tesla.Env.client(), String.t(), String.t(), keyword()) ::
{:ok, GoogleApi.Content.V2.Model.ShippingSettings.t()} | {:error, Tesla.Env.t()}
def content_shippingsettings_update(
connection,
merchant_id,
account_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:dryRun => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:put)
|> Request.url("/{merchantId}/shippingsettings/{accountId}", %{
"merchantId" => URI.encode(merchant_id, &URI.char_unreserved?/1),
"accountId" => URI.encode(account_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Content.V2.Model.ShippingSettings{}])
end
end
| 42.440964 | 219 | 0.681485 |
081c7ee3edf7bb3fad5db8dc75a7eaf91c7941a2 | 21,284 | ex | Elixir | clients/dfa_reporting/lib/google_api/dfa_reporting/v34/api/event_tags.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/dfa_reporting/lib/google_api/dfa_reporting/v34/api/event_tags.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/dfa_reporting/lib/google_api/dfa_reporting/v34/api/event_tags.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DFAReporting.V34.Api.EventTags do
@moduledoc """
API calls for all endpoints tagged `EventTags`.
"""
alias GoogleApi.DFAReporting.V34.Connection
alias GoogleApi.Gax.{Request, Response}
@library_version Mix.Project.config() |> Keyword.get(:version, "")
@doc """
Deletes an existing event tag.
## Parameters
* `connection` (*type:* `GoogleApi.DFAReporting.V34.Connection.t`) - Connection to server
* `profile_id` (*type:* `String.t`) - User profile ID associated with this request.
* `id` (*type:* `String.t`) - Event tag ID.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %{}}` on success
* `{:error, info}` on failure
"""
@spec dfareporting_event_tags_delete(
Tesla.Env.client(),
String.t(),
String.t(),
keyword(),
keyword()
) :: {:ok, nil} | {:ok, Tesla.Env.t()} | {:ok, list()} | {:error, any()}
def dfareporting_event_tags_delete(
connection,
profile_id,
id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query
}
request =
Request.new()
|> Request.method(:delete)
|> Request.url("/dfareporting/v3.4/userprofiles/{profileId}/eventTags/{id}", %{
"profileId" => URI.encode(profile_id, &URI.char_unreserved?/1),
"id" => URI.encode(id, &(URI.char_unreserved?(&1) || &1 == ?/))
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [decode: false])
end
@doc """
Gets one event tag by ID.
## Parameters
* `connection` (*type:* `GoogleApi.DFAReporting.V34.Connection.t`) - Connection to server
* `profile_id` (*type:* `String.t`) - User profile ID associated with this request.
* `id` (*type:* `String.t`) - Event tag ID.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.DFAReporting.V34.Model.EventTag{}}` on success
* `{:error, info}` on failure
"""
@spec dfareporting_event_tags_get(
Tesla.Env.client(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.DFAReporting.V34.Model.EventTag.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def dfareporting_event_tags_get(connection, profile_id, id, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/dfareporting/v3.4/userprofiles/{profileId}/eventTags/{id}", %{
"profileId" => URI.encode(profile_id, &URI.char_unreserved?/1),
"id" => URI.encode(id, &(URI.char_unreserved?(&1) || &1 == ?/))
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.DFAReporting.V34.Model.EventTag{}])
end
@doc """
Inserts a new event tag.
## Parameters
* `connection` (*type:* `GoogleApi.DFAReporting.V34.Connection.t`) - Connection to server
* `profile_id` (*type:* `String.t`) - User profile ID associated with this request.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:body` (*type:* `GoogleApi.DFAReporting.V34.Model.EventTag.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.DFAReporting.V34.Model.EventTag{}}` on success
* `{:error, info}` on failure
"""
@spec dfareporting_event_tags_insert(Tesla.Env.client(), String.t(), keyword(), keyword()) ::
{:ok, GoogleApi.DFAReporting.V34.Model.EventTag.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def dfareporting_event_tags_insert(connection, profile_id, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/dfareporting/v3.4/userprofiles/{profileId}/eventTags", %{
"profileId" => URI.encode(profile_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.DFAReporting.V34.Model.EventTag{}])
end
@doc """
Retrieves a list of event tags, possibly filtered.
## Parameters
* `connection` (*type:* `GoogleApi.DFAReporting.V34.Connection.t`) - Connection to server
* `profile_id` (*type:* `String.t`) - User profile ID associated with this request.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:adId` (*type:* `String.t`) - Select only event tags that belong to this ad.
* `:advertiserId` (*type:* `String.t`) - Select only event tags that belong to this advertiser.
* `:campaignId` (*type:* `String.t`) - Select only event tags that belong to this campaign.
* `:definitionsOnly` (*type:* `boolean()`) - Examine only the specified campaign or advertiser's event tags for matching selector criteria. When set to false, the parent advertiser and parent campaign of the specified ad or campaign is examined as well. In addition, when set to false, the status field is examined as well, along with the enabledByDefault field. This parameter can not be set to true when adId is specified as ads do not define their own even tags.
* `:enabled` (*type:* `boolean()`) - Select only enabled event tags. What is considered enabled or disabled depends on the definitionsOnly parameter. When definitionsOnly is set to true, only the specified advertiser or campaign's event tags' enabledByDefault field is examined. When definitionsOnly is set to false, the specified ad or specified campaign's parent advertiser's or parent campaign's event tags' enabledByDefault and status fields are examined as well.
* `:eventTagTypes` (*type:* `list(String.t)`) - Select only event tags with the specified event tag types. Event tag types can be used to specify whether to use a third-party pixel, a third-party JavaScript URL, or a third-party click-through URL for either impression or click tracking.
* `:ids` (*type:* `list(String.t)`) - Select only event tags with these IDs.
* `:searchString` (*type:* `String.t`) - Allows searching for objects by name or ID. Wildcards (*) are allowed. For example, "eventtag*2015" will return objects with names like "eventtag June 2015", "eventtag April 2015", or simply "eventtag 2015". Most of the searches also add wildcards implicitly at the start and the end of the search string. For example, a search string of "eventtag" will match objects with name "my eventtag", "eventtag 2015", or simply "eventtag".
* `:sortField` (*type:* `String.t`) - Field by which to sort the list.
* `:sortOrder` (*type:* `String.t`) - Order of sorted results.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.DFAReporting.V34.Model.EventTagsListResponse{}}` on success
* `{:error, info}` on failure
"""
@spec dfareporting_event_tags_list(Tesla.Env.client(), String.t(), keyword(), keyword()) ::
{:ok, GoogleApi.DFAReporting.V34.Model.EventTagsListResponse.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def dfareporting_event_tags_list(connection, profile_id, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:adId => :query,
:advertiserId => :query,
:campaignId => :query,
:definitionsOnly => :query,
:enabled => :query,
:eventTagTypes => :query,
:ids => :query,
:searchString => :query,
:sortField => :query,
:sortOrder => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/dfareporting/v3.4/userprofiles/{profileId}/eventTags", %{
"profileId" => URI.encode(profile_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.DFAReporting.V34.Model.EventTagsListResponse{}]
)
end
@doc """
Updates an existing event tag. This method supports patch semantics.
## Parameters
* `connection` (*type:* `GoogleApi.DFAReporting.V34.Connection.t`) - Connection to server
* `profile_id` (*type:* `String.t`) - User profile ID associated with this request.
* `id` (*type:* `String.t`) - EventTag ID.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:body` (*type:* `GoogleApi.DFAReporting.V34.Model.EventTag.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.DFAReporting.V34.Model.EventTag{}}` on success
* `{:error, info}` on failure
"""
@spec dfareporting_event_tags_patch(
Tesla.Env.client(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.DFAReporting.V34.Model.EventTag.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def dfareporting_event_tags_patch(connection, profile_id, id, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:patch)
|> Request.url("/dfareporting/v3.4/userprofiles/{profileId}/eventTags", %{
"profileId" => URI.encode(profile_id, &URI.char_unreserved?/1)
})
|> Request.add_param(:query, :id, id)
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.DFAReporting.V34.Model.EventTag{}])
end
@doc """
Updates an existing event tag.
## Parameters
* `connection` (*type:* `GoogleApi.DFAReporting.V34.Connection.t`) - Connection to server
* `profile_id` (*type:* `String.t`) - User profile ID associated with this request.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:body` (*type:* `GoogleApi.DFAReporting.V34.Model.EventTag.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.DFAReporting.V34.Model.EventTag{}}` on success
* `{:error, info}` on failure
"""
@spec dfareporting_event_tags_update(Tesla.Env.client(), String.t(), keyword(), keyword()) ::
{:ok, GoogleApi.DFAReporting.V34.Model.EventTag.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def dfareporting_event_tags_update(connection, profile_id, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:put)
|> Request.url("/dfareporting/v3.4/userprofiles/{profileId}/eventTags", %{
"profileId" => URI.encode(profile_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.DFAReporting.V34.Model.EventTag{}])
end
end
| 48.153846 | 480 | 0.622627 |
081c8aaf27973832360116f7774f544a82ffaf18 | 2,250 | ex | Elixir | example/lib/nested_web.ex | BrianPhilips/ecto_nested_changeset | 4d16f2955c1e7c71e25685030f66aefeba5ce0fc | [
"MIT"
] | null | null | null | example/lib/nested_web.ex | BrianPhilips/ecto_nested_changeset | 4d16f2955c1e7c71e25685030f66aefeba5ce0fc | [
"MIT"
] | null | null | null | example/lib/nested_web.ex | BrianPhilips/ecto_nested_changeset | 4d16f2955c1e7c71e25685030f66aefeba5ce0fc | [
"MIT"
] | null | null | null | defmodule NestedWeb do
@moduledoc """
The entrypoint for defining your web interface, such
as controllers, views, channels and so on.
This can be used in your application as:
use NestedWeb, :controller
use NestedWeb, :view
The definitions below will be executed for every view,
controller, etc, so keep them short and clean, focused
on imports, uses and aliases.
Do NOT define functions inside the quoted expressions
below. Instead, define any helper function in modules
and import those modules here.
"""
def controller do
quote do
use Phoenix.Controller, namespace: NestedWeb
import Plug.Conn
alias NestedWeb.Router.Helpers, as: Routes
end
end
def view do
quote do
use Phoenix.View,
root: "lib/nested_web/templates",
namespace: NestedWeb
# Import convenience functions from controllers
import Phoenix.Controller,
only: [get_flash: 1, get_flash: 2, view_module: 1, view_template: 1]
# Include shared imports and aliases for views
unquote(view_helpers())
end
end
def live_view do
quote do
use Phoenix.LiveView,
layout: {NestedWeb.LayoutView, "live.html"}
unquote(view_helpers())
end
end
def live_component do
quote do
use Phoenix.LiveComponent
unquote(view_helpers())
end
end
def router do
quote do
use Phoenix.Router
import Plug.Conn
import Phoenix.Controller
import Phoenix.LiveView.Router
end
end
def channel do
quote do
use Phoenix.Channel
end
end
defp view_helpers do
quote do
# Use all HTML functionality (forms, tags, etc)
use Phoenix.HTML
# Import LiveView helpers (live_render, live_component, live_patch, etc)
import Phoenix.LiveView.Helpers
import NestedWeb.LiveHelpers
# Import basic rendering functionality (render, render_layout, etc)
import Phoenix.View
import NestedWeb.ErrorHelpers
alias NestedWeb.Router.Helpers, as: Routes
end
end
@doc """
When used, dispatch to the appropriate controller/view/etc.
"""
defmacro __using__(which) when is_atom(which) do
apply(__MODULE__, which, [])
end
end
| 22.277228 | 78 | 0.68 |
081c900c676950c023e1da8a3de52d3e555d67a2 | 2,509 | exs | Elixir | test/behaviour/handle_discover_test.exs | ityonemo/ExDhcp | 9a7f47da61a93b4fb9efaa62bbdb362e3b467b2b | [
"MIT"
] | null | null | null | test/behaviour/handle_discover_test.exs | ityonemo/ExDhcp | 9a7f47da61a93b4fb9efaa62bbdb362e3b467b2b | [
"MIT"
] | 16 | 2019-12-16T04:56:50.000Z | 2020-03-11T20:18:56.000Z | test/behaviour/handle_discover_test.exs | ityonemo/ex_dhcp | 9a7f47da61a93b4fb9efaa62bbdb362e3b467b2b | [
"MIT"
] | 1 | 2019-12-13T19:04:50.000Z | 2019-12-13T19:04:50.000Z | defmodule DhcpTest.Behaviour.HandleDiscoverTest do
@moduledoc false
use ExUnit.Case, async: true
alias ExDhcp.Packet
@moduletag [handle_discover: true, behaviour: true]
# packet request example taken from wikipedia:
# https://en.wikipedia.org/wiki/Dynamic_Host_Configuration_Protocol#Discovery
@dhcp_discover %Packet{
op: 1, xid: 0x3903_F326, chaddr: {0x00, 0x05, 0x3C, 0x04, 0x8D, 0x59},
options: %{message_type: :discover, requested_address: {192, 168, 1, 100},
parameter_request_list: [1, 3, 15, 6]}
}
defmodule DiscSrvNoRespond do
alias DhcpTest.Behaviour.CommonDhcp
require CommonDhcp
CommonDhcp.setup
def handle_discover(pack, xid, chaddr, test_pid) do
send(test_pid, {:discover, xid, pack, chaddr})
{:norespond, :new_state}
end
end
test "a dhcp discover message gets sent to handle_discover" do
conn = DiscSrvNoRespond.connect
DiscSrvNoRespond.send_packet(conn, @dhcp_discover)
assert_receive {:discover, xid, pack, chaddr}
assert pack == @dhcp_discover
assert xid == @dhcp_discover.xid
assert chaddr == @dhcp_discover.chaddr
end
defmodule DiscSrvRespond do
alias DhcpTest.Behaviour.CommonDhcp
require CommonDhcp
CommonDhcp.setup
def handle_discover(pack, _, _, _) do
# for simplicity, just send back the same packet.
{:respond, pack, :new_state}
end
end
test "a dhcp discover message can respond to the caller" do
conn = DiscSrvRespond.connect()
DiscSrvRespond.send_packet(conn, @dhcp_discover)
assert_receive {:udp, _, _, _, binary}
assert @dhcp_discover == Packet.decode(binary)
end
defmodule DiscParserlessSrv do
alias DhcpTest.Behaviour.CommonDhcp
require CommonDhcp
CommonDhcp.setup dhcp_options: []
def handle_discover(pack, xid, chaddr, test_pid) do
send(test_pid, {:discover, xid, pack, chaddr})
{:respond, pack, :new_state}
end
end
test "dhcp will respond to discover without options parsers" do
conn = DiscParserlessSrv.connect()
DiscParserlessSrv.send_packet(conn, @dhcp_discover)
assert_receive {:discover, xid, pack, chaddr}
# make sure that the inner contents are truly unencoded.
assert %{50 => <<192, 168, 1, 100>>, 53 => <<1>>, 55 => <<1, 3, 15, 6>>}
== pack.options
assert xid == @dhcp_discover.xid
assert chaddr == @dhcp_discover.chaddr
assert_receive {:udp, _, _, _, packet}
assert @dhcp_discover == Packet.decode(packet)
end
end
| 28.511364 | 79 | 0.698685 |
081c94d6ec619293f6b18aed5329a2041fdab223 | 263 | exs | Elixir | priv/repo/migrations/20181102234527_add_redirect_uris_to_games.exs | shanesveller/grapevine | fe74ade1adff88dfe4c1ab55fee3902dbb4664fe | [
"MIT"
] | null | null | null | priv/repo/migrations/20181102234527_add_redirect_uris_to_games.exs | shanesveller/grapevine | fe74ade1adff88dfe4c1ab55fee3902dbb4664fe | [
"MIT"
] | null | null | null | priv/repo/migrations/20181102234527_add_redirect_uris_to_games.exs | shanesveller/grapevine | fe74ade1adff88dfe4c1ab55fee3902dbb4664fe | [
"MIT"
] | null | null | null | defmodule Grapevine.Repo.Migrations.AddRedirectUrisToGames do
use Ecto.Migration
def change do
create table(:redirect_uris) do
add(:game_id, references(:games), null: false)
add(:uri, :text, null: false)
timestamps()
end
end
end
| 20.230769 | 61 | 0.688213 |
081cb3a0cd8528f09fbb3418c61e74dfb336edaf | 2,513 | exs | Elixir | priv/repo/migrations/20210920141807_add_paywall_infra.exs | malav2110/oli-torus | 8af64e762a7c8a2058bd27a7ab8e96539ffc055f | [
"MIT"
] | 45 | 2020-04-17T15:40:27.000Z | 2022-03-25T00:13:30.000Z | priv/repo/migrations/20210920141807_add_paywall_infra.exs | malav2110/oli-torus | 8af64e762a7c8a2058bd27a7ab8e96539ffc055f | [
"MIT"
] | 944 | 2020-02-13T02:37:01.000Z | 2022-03-31T17:50:07.000Z | priv/repo/migrations/20210920141807_add_paywall_infra.exs | malav2110/oli-torus | 8af64e762a7c8a2058bd27a7ab8e96539ffc055f | [
"MIT"
] | 23 | 2020-07-28T03:36:13.000Z | 2022-03-17T14:29:02.000Z | defmodule Oli.Repo.Migrations.AddPaywallInfra do
use Ecto.Migration
import Ecto.Query, warn: false
def change do
execute "CREATE EXTENSION IF NOT EXISTS pgcrypto"
create table(:api_keys) do
add :status, :string, default: "enabled", null: false
add :hint, :string
add :hash, :string
add :payments_enabled, :boolean, default: true
add :products_enabled, :boolean, default: true
timestamps()
end
create unique_index(:api_keys, [:hash], name: :index_api_keys_hash)
create table(:payments) do
add :code, :bigint
add :type, :string, default: "direct", null: false
add :generation_date, :utc_datetime
add :application_date, :utc_datetime
add :amount, :map
add(:section_id, references(:sections))
add(:enrollment_id, references(:enrollments))
timestamps()
end
create unique_index(:payments, [:code], name: :index_payments_code)
create index(:payments, [:enrollment_id])
create index(:payments, [:section_id])
create table(:discounts) do
add :type, :string, default: "percentage", null: false
add :percentage, :float
add :amount, :map
add(:section_id, references(:sections))
add :institution_id, references(:institutions)
timestamps()
end
create index(:discounts, [:section_id])
create index(:discounts, [:institution_id])
create table(:section_visibilities) do
add :section_id, references(:sections)
add :institution_id, references(:institutions)
timestamps()
end
create index(:section_visibilities, [:section_id])
create index(:section_visibilities, [:institution_id])
alter table(:sections) do
add :type, :string, default: "enrollable", null: false
add :visibility, :string, default: "global", null: false
add :requires_payment, :boolean, default: false
add :amount, :map
add :has_grace_period, :boolean, default: false, null: false
add :grace_period_days, :integer, default: 0, null: false
add :grace_period_strategy, :string, default: "relative_to_section", null: false
add(:blueprint_id, references(:sections))
end
create index(:sections, [:type])
flush()
from(p in "sections",
where: is_nil(p.type)
)
|> Oli.Repo.update_all(set: [type: "enrollable", visibility: "global", requires_payment: false, has_grace_period: false, grace_period_days: 0, grace_period_strategy: "relative_to_section"])
end
end
| 30.277108 | 193 | 0.674095 |
081cbbb99d975cbb6a28526fea8c51102c87d287 | 2,657 | ex | Elixir | lib/honu_web/storage.ex | elixir-honu/honu | e82cbc4c2457b3d64b929cc013c17cdb4fcc8f6c | [
"MIT"
] | 1 | 2021-08-08T10:33:42.000Z | 2021-08-08T10:33:42.000Z | lib/honu_web/storage.ex | elixir-honu/honu | e82cbc4c2457b3d64b929cc013c17cdb4fcc8f6c | [
"MIT"
] | null | null | null | lib/honu_web/storage.ex | elixir-honu/honu | e82cbc4c2457b3d64b929cc013c17cdb4fcc8f6c | [
"MIT"
] | null | null | null | defmodule HonuWeb.Storage do
alias Honu.Attachments.Blob
@type option :: {atom(), any()}
@callback url(Blob.t(), [option]) :: {:ok, String.t()} | {:error, String.t()}
# @callback url_for_direct_upload() :: {:ok, String.t()} | {:error, String.t()}
# @callback headers_for_direct_upload() :: {:ok, map()} | {:error, String.t()}
def config(key, opts \\ []) do
Application.fetch_env!(:honu, __MODULE__)
|> Keyword.merge(opts)
|> Keyword.fetch!(key)
end
def namespace do
Application.fetch_env!(:honu, __MODULE__)
|> Keyword.fetch(:namespace)
|> case do
:error -> "honu"
namespace -> namespace
end
end
def base_url(conn) do
"#{conn.scheme}://#{conn.host}:#{conn.port}"
end
def sanitized_filename(filename) do
filename
end
def default_crypto_opts do
# https://github.com/rails/rails/pull/6952#issuecomment-7661220
[
key_iterations: 1000,
key_length: 256,
key_digest: :sha256,
max_age: config(:service_urls_expire_in)
]
end
def permanent_opts do
[signed_at: 0, max_age: 31_536_000_000]
end
def generate_data(data, opts \\ []) do
HonuWeb.Token.sign(
HonuWeb.Storage.config(:secret_key_base),
"blob_id",
data,
Keyword.merge(default_crypto_opts(), opts)
)
|> Base.url_encode64()
end
def generate_digest(data) do
:crypto.mac(:hmac, :sha256, HonuWeb.Storage.config(:secret_key_base), data)
|> Base.encode16(case: :lower)
end
def content_disposition_with(filename, type \\ "inline") do
disposition = Enum.find(["inline", "attachment"], "inline", fn x -> x == type end)
%{
disposition: disposition,
filename: sanitized_filename(filename)
}
end
def decode_verified_key(encoded_key, purpose \\ "blob_id") do
token =
encoded_key
|> String.split("--")
|> List.first()
|> Base.url_decode64!()
case verify_token(token, purpose) do
{:ok, map} -> {:ok, map}
{:error, _} -> {:error, "Expired"}
end
end
def find_signed(signed_blob_id, purpose \\ "blob_id", opts \\ []) do
token =
signed_blob_id
|> String.split("--")
|> List.first()
|> Base.url_decode64!()
case verify_token(token, purpose, opts) do
{:ok, key} ->
{:ok, Honu.Attachments.get_attachment_by_key!(key, config(:repo))}
{:error, _} ->
{:error, "Expired"}
end
end
defp verify_token(token, purpose, opts \\ []) do
HonuWeb.Token.verify(
HonuWeb.Storage.config(:secret_key_base),
purpose,
token,
Keyword.merge(default_crypto_opts(), opts)
)
end
end
| 24.376147 | 86 | 0.614227 |
081ce4284f86b07675e4fd5bcb1927a9ef495fae | 1,489 | ex | Elixir | lib/bike_brigade/slack_api/payload_builder.ex | bikebrigade/dispatch | eb622fe4f6dab7c917d678d3d7a322a01f97da44 | [
"Apache-2.0"
] | 28 | 2021-10-11T01:53:53.000Z | 2022-03-24T17:45:55.000Z | lib/bike_brigade/slack_api/payload_builder.ex | bikebrigade/dispatch | eb622fe4f6dab7c917d678d3d7a322a01f97da44 | [
"Apache-2.0"
] | 20 | 2021-10-21T08:12:31.000Z | 2022-03-31T13:35:53.000Z | lib/bike_brigade/slack_api/payload_builder.ex | bikebrigade/dispatch | eb622fe4f6dab7c917d678d3d7a322a01f97da44 | [
"Apache-2.0"
] | null | null | null | defmodule BikeBrigade.SlackApi.PayloadBuilder do
alias BikeBrigade.Messaging.SmsMessage
alias BikeBrigadeWeb.Router.Helpers, as: Routes
alias BikeBrigadeWeb.Endpoint
def build(channel_id, %SmsMessage{rider: rider} = message) do
text =
"<#{Routes.rider_show_url(Endpoint, :show, rider.id)}|*#{rider.name}*>: #{filter_mrkdwn(message.body)}"
%{
channel: channel_id,
blocks: [
%{
type: "section",
text: %{
type: "mrkdwn",
text: text
},
accessory: %{
type: "button",
text: %{
type: "plain_text",
text: "Reply",
emoji: true
},
url: Routes.sms_message_index_url(Endpoint, :show, rider.id)
}
}
| for m <- message.media do
%{
type: "image",
image_url: m.url,
alt_text: "Rider sent us media"
}
end
]
}
|> Jason.encode!()
end
def build(channel_id, message) do
%{
channel: channel_id,
blocks: [
%{
type: "section",
text: %{
type: "mrkdwn",
text: message
}
}
]
}
|> Jason.encode!()
end
def filter_mrkdwn(nil) do
""
end
def filter_mrkdwn(str) do
str
|> String.replace("&", "&")
|> String.replace("<", "<")
|> String.replace(">", ">")
end
end
| 21.897059 | 109 | 0.47683 |
081cec047e0726c81c06a37eeed279788e6b9320 | 3,736 | ex | Elixir | lib/conn.ex | starbuildr/gen_router | bcde92a4c51049ff6ba8664625fb7c6d2120d16e | [
"MIT"
] | 2 | 2019-04-09T13:18:47.000Z | 2021-11-03T21:09:16.000Z | lib/conn.ex | starbuildr/gen_router | bcde92a4c51049ff6ba8664625fb7c6d2120d16e | [
"MIT"
] | null | null | null | lib/conn.ex | starbuildr/gen_router | bcde92a4c51049ff6ba8664625fb7c6d2120d16e | [
"MIT"
] | null | null | null | defmodule GenRouter.Conn do
@moduledoc """
Structure which represents connection with Telegram bot.
Inspired by %Plug.Conn{}, adapted for bots.
Attributes:
* __skip__: system buffer to keep track of skipped scopes;
* path: route to controller which should handle this object;
* params: payload which will be passed to controller;
* assigns: non-parsed data assigned by a system (auth, etc);
* scope: local scope of current request, clears for each new route;
* code: response code, we use common HTTP codes, currently only 200 is supported;
* response: response payload, usually JSON;
* halted: stop the pipeline execution if conn was settled.
"""
defstruct __skip__: %{},
path: "/",
params: %{},
assigns: %{},
scope: %{},
code: nil,
response: nil,
halted: false
@type t :: %GenRouter.Conn{}
@doc """
Build Conn object with system fields
"""
@spec build(module(), map()) :: t
def build(router_module, %{path: path, params: params, assigns: assigns, scope: scope}) do
%GenRouter.Conn{
path: path,
params: params,
assigns: assigns,
scope: scope
}
|> reset_router_matches(router_module)
end
@doc """
Assign variable to current request.
"""
@spec assign(t, atom(), any()) :: t
def assign(%GenRouter.Conn{assigns: assigns} = conn, key, value) do
assigns = Map.put(assigns, key, value)
%{conn | assigns: assigns}
end
@doc """
Update state and complete the current request.
"""
@spec complete(t, String.t() | nil | :default, map() | :default, integer() | :default) :: t
def complete(conn, response \\ :default, scope \\ :default, code \\ :default)
def complete(%GenRouter.Conn{} = conn, response, scope, code)
when (is_map(scope) or scope === :default) and (is_integer(code) or code === :default) do
response = from_conn_or_default(conn, :response, response)
scope = from_conn_or_default(conn, :scope, scope)
code = from_conn_or_default(conn, :code, code)
%{conn | response: response, scope: scope, code: code}
|> halt()
end
@doc """
Put the next path after the current request.
It unhalts settled conn, so all the pipelines will be executed again.
"""
@spec forward(t, String.t(), map(), Keyword.t()) :: t
def forward(%GenRouter.Conn{} = conn, path \\ "/", scope \\ %{}, opts \\ [])
when is_bitstring(path) do
router_module =
Keyword.get(opts, :router_module, false) ||
Application.get_env(:gen_router, GenRouter.Conn)
|> Keyword.fetch!(:default_router)
%{conn | path: path, scope: scope, code: 302, halted: false}
|> reset_router_matches(router_module)
|> router_module.do_match(opts)
end
@doc """
Halt execution pipeline, Conn is settled.
"""
@spec halt(t) :: t
def halt(%GenRouter.Conn{} = conn) do
%{conn | halted: true}
end
@doc """
Reset router matching pipeline
"""
@spec reset_router_matches(t, module()) :: t
def reset_router_matches(conn, router_module) do
skip_router_scopes = Enum.reduce(router_module.scopes(), %{}, &Map.put(&2, &1, false))
%{conn | __skip__: skip_router_scopes}
end
@spec from_conn_or_default(t, :response | :scope | :code, any()) :: any()
defp from_conn_or_default(conn, :response, :default), do: conn.response
defp from_conn_or_default(_conn, :response, response), do: response
defp from_conn_or_default(conn, :scope, :default), do: conn.scope
defp from_conn_or_default(_conn, :scope, scope), do: scope || %{}
defp from_conn_or_default(conn, :code, :default), do: conn.code || 200
defp from_conn_or_default(_conn, :code, code), do: code || 200
end
| 33.657658 | 95 | 0.647484 |
081d273a466e341d85476a8a6162b98828f276f5 | 3,524 | ex | Elixir | lib/ex_twilio/jwt/access_token.ex | techgaun/ex_twilio | b22f5421c4c709232b5ef53191cc19d5ffb5c99c | [
"MIT"
] | null | null | null | lib/ex_twilio/jwt/access_token.ex | techgaun/ex_twilio | b22f5421c4c709232b5ef53191cc19d5ffb5c99c | [
"MIT"
] | null | null | null | lib/ex_twilio/jwt/access_token.ex | techgaun/ex_twilio | b22f5421c4c709232b5ef53191cc19d5ffb5c99c | [
"MIT"
] | null | null | null | defmodule ExTwilio.JWT.AccessToken do
@moduledoc """
A Twilio JWT access token, as described in the Twilio docs:
https://www.twilio.com/docs/iam/access-tokens
"""
alias ExTwilio.JWT.Grant
alias ExTwilio.Ext
@enforce_keys [:account_sid, :api_key, :api_secret, :identity, :grants, :expires_in]
defstruct token_identifier: nil,
account_sid: nil,
api_key: nil,
api_secret: nil,
identity: nil,
grants: [],
expires_in: nil
@type t :: %__MODULE__{
account_sid: String.t(),
api_key: String.t(),
api_secret: String.t(),
identity: String.t(),
grants: [ExTwilio.JWT.Grant.t()],
expires_in: integer
}
@doc """
Creates a new JWT access token.
## Example
AccessToken.new(
account_sid: "account_sid",
api_key: "api_key",
api_secret: "secret",
identity: "user@email.com",
expires_in: 86_400,
grants: [AccessToken.ChatGrant.new(service_sid: "sid")]
)
"""
@spec new(attrs :: Keyword.t()) :: t
def new(attrs \\ []) do
struct(__MODULE__, attrs)
end
@doc """
Converts an access token into a string JWT.
Will raise errors if the `token` does not have all the required fields.
## Example
token =
AccessToken.new(
account_sid: "account_sid",
api_key: "api_key",
api_secret: "secret",
identity: "user@email.com",
expires_in: 86_400,
grants: [AccessToken.ChatGrant.new(service_sid: "sid")]
)
AccessToken.to_jwt!(token)
# => "eyJhbGciOiJIUzI1NiIsImN0eSI6InR3aWxpby1mcGE7dj0xIiwidHlwIjoiSldUIn0.eyJleHAiOjE1MjM5MTIxODgsImdyYW50cyI6eyJjaGF0Ijp7ImVuZHBvaW50X2lkIjpudWxsLCJzZXJ2aWNlX3NpZCI6InNpZCJ9LCJpZGVudGl0eSI6InVzZXJAZW1haWwuY29tIn0sImlhdCI6MTUyMzkwNDk4OCwibmJmIjoxNTIzOTA0OTg3fQ.M_5dsj1VWBrIZKvcIdygSpmiMsrZdkplYYNjxEhBHk0"
"""
@spec to_jwt!(t) :: String.t() | no_return
def to_jwt!(token) do
token =
token
|> Ext.Map.validate!(:account_sid, &is_binary/1, "must be a binary")
|> Ext.Map.validate!(:api_key, &is_binary/1, "must be a binary")
|> Ext.Map.validate!(:api_secret, &is_binary/1, "must be a binary")
|> Ext.Map.validate!(:identity, &is_binary/1, "must be a binary")
|> Ext.Map.validate!(:grants, &list_of_grants?/1, "must be a list of grants")
|> Ext.Map.validate!(:expires_in, &is_integer/1, "must be an integer")
Joken.token()
|> Joken.with_claims(claims(token))
|> Joken.with_sub(token.account_sid)
|> Joken.with_jti(token.token_identifier || "#{token.api_key}-#{random_str()}")
|> Joken.with_iss(token.api_key)
|> Joken.with_nbf(DateTime.utc_now() |> DateTime.to_unix())
|> Joken.with_exp(token.expires_in)
|> Joken.with_header_args(%{"typ" => "JWT", "alg" => "HS256", "cty" => "twilio-fpa;v=1"})
|> Joken.with_signer(Joken.hs256(token.api_secret))
|> Joken.sign()
|> Joken.get_compact()
end
defp list_of_grants?(grants) when is_list(grants) do
Enum.all?(grants, &Grant.impl_for(&1))
end
defp list_of_grants?(_other), do: false
defp claims(token) do
grants =
Enum.reduce(token.grants, %{"identity" => token.identity}, fn grant, acc ->
Map.put(acc, Grant.type(grant), Grant.attrs(grant))
end)
%{"grants" => grants}
end
defp random_str do
16
|> :crypto.strong_rand_bytes()
|> Base.encode16()
|> String.downcase()
end
end
| 30.643478 | 311 | 0.633371 |
081d6a948780259bfad1c97e4f9ffd07d8f58d76 | 4,143 | ex | Elixir | clients/content/lib/google_api/content/v21/model/date_time.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/content/lib/google_api/content/v21/model/date_time.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/content/lib/google_api/content/v21/model/date_time.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Content.V21.Model.DateTime do
@moduledoc """
Represents civil time (or occasionally physical time). This type can represent a civil time in one of a few possible ways: * When utc_offset is set and time_zone is unset: a civil time on a calendar day with a particular offset from UTC. * When time_zone is set and utc_offset is unset: a civil time on a calendar day in a particular time zone. * When neither time_zone nor utc_offset is set: a civil time on a calendar day in local time. The date is relative to the Proleptic Gregorian Calendar. If year is 0, the DateTime is considered not to have a specific year. month and day must have valid, non-zero values. This type may also be used to represent a physical time if all the date and time fields are set and either case of the `time_offset` oneof is set. Consider using `Timestamp` message for physical time instead. If your use case also would like to store the user's timezone, that can be done in another field. This type is more flexible than some applications may want. Make sure to document and validate your application's limitations.
## Attributes
* `day` (*type:* `integer()`, *default:* `nil`) - Required. Day of month. Must be from 1 to 31 and valid for the year and month.
* `hours` (*type:* `integer()`, *default:* `nil`) - Required. Hours of day in 24 hour format. Should be from 0 to 23. An API may choose to allow the value "24:00:00" for scenarios like business closing time.
* `minutes` (*type:* `integer()`, *default:* `nil`) - Required. Minutes of hour of day. Must be from 0 to 59.
* `month` (*type:* `integer()`, *default:* `nil`) - Required. Month of year. Must be from 1 to 12.
* `nanos` (*type:* `integer()`, *default:* `nil`) - Required. Fractions of seconds in nanoseconds. Must be from 0 to 999,999,999.
* `seconds` (*type:* `integer()`, *default:* `nil`) - Required. Seconds of minutes of the time. Must normally be from 0 to 59. An API may allow the value 60 if it allows leap-seconds.
* `timeZone` (*type:* `GoogleApi.Content.V21.Model.TimeZone.t`, *default:* `nil`) - Time zone.
* `utcOffset` (*type:* `String.t`, *default:* `nil`) - UTC offset. Must be whole seconds, between -18 hours and +18 hours. For example, a UTC offset of -4:00 would be represented as { seconds: -14400 }.
* `year` (*type:* `integer()`, *default:* `nil`) - Optional. Year of date. Must be from 1 to 9999, or 0 if specifying a datetime without a year.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:day => integer() | nil,
:hours => integer() | nil,
:minutes => integer() | nil,
:month => integer() | nil,
:nanos => integer() | nil,
:seconds => integer() | nil,
:timeZone => GoogleApi.Content.V21.Model.TimeZone.t() | nil,
:utcOffset => String.t() | nil,
:year => integer() | nil
}
field(:day)
field(:hours)
field(:minutes)
field(:month)
field(:nanos)
field(:seconds)
field(:timeZone, as: GoogleApi.Content.V21.Model.TimeZone)
field(:utcOffset)
field(:year)
end
defimpl Poison.Decoder, for: GoogleApi.Content.V21.Model.DateTime do
def decode(value, options) do
GoogleApi.Content.V21.Model.DateTime.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Content.V21.Model.DateTime do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 58.352113 | 1,050 | 0.696597 |
081dd8592f802b8188607ff936dd5a4fb10bf002 | 1,514 | ex | Elixir | test/support/data_case.ex | yammine/YAMM | c118eab5029b86c2caf24890dfffa8437684f5d1 | [
"MIT"
] | null | null | null | test/support/data_case.ex | yammine/YAMM | c118eab5029b86c2caf24890dfffa8437684f5d1 | [
"MIT"
] | null | null | null | test/support/data_case.ex | yammine/YAMM | c118eab5029b86c2caf24890dfffa8437684f5d1 | [
"MIT"
] | null | null | null | defmodule YAMM.DataCase do
@moduledoc """
This module defines the setup for tests requiring
access to the application's data layer.
You may define functions here to be used as helpers in
your tests.
Finally, if the test case interacts with the database,
we enable the SQL sandbox, so changes done to the database
are reverted at the end of every test. If you are using
PostgreSQL, you can even run database tests asynchronously
by setting `use YAMM.DataCase, async: true`, although
this option is not recommended for other databases.
"""
use ExUnit.CaseTemplate
using do
quote do
alias YAMM.Repo
import Ecto
import Ecto.Changeset
import Ecto.Query
import YAMM.DataCase
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(YAMM.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(YAMM.Repo, {:shared, self()})
end
:ok
end
@doc """
A helper that transforms changeset errors into a map of messages.
assert {:error, changeset} = Accounts.create_user(%{password: "short"})
assert "password is too short" in errors_on(changeset).password
assert %{password: ["password is too short"]} = errors_on(changeset)
"""
def errors_on(changeset) do
Ecto.Changeset.traverse_errors(changeset, fn {message, opts} ->
Regex.replace(~r"%{(\w+)}", message, fn _, key ->
opts |> Keyword.get(String.to_existing_atom(key), key) |> to_string()
end)
end)
end
end
| 27.035714 | 77 | 0.684941 |
081e1bf703d659ff4a271cfbd5f967a405371db3 | 69 | exs | Elixir | backend/test/test_helper.exs | tykowale/faketwitter | 8cb2f15bbc285d079daec153311b7f6fda167dd2 | [
"MIT"
] | 1 | 2016-03-20T21:38:39.000Z | 2016-03-20T21:38:39.000Z | backend/test/test_helper.exs | tykowale/faketwitter | 8cb2f15bbc285d079daec153311b7f6fda167dd2 | [
"MIT"
] | null | null | null | backend/test/test_helper.exs | tykowale/faketwitter | 8cb2f15bbc285d079daec153311b7f6fda167dd2 | [
"MIT"
] | null | null | null | ExUnit.start
Ecto.Adapters.SQL.Sandbox.mode(Backend.Repo, :manual)
| 13.8 | 53 | 0.782609 |
081e1f6bd8d223a48148f39bd0f7dbcaead7fdf2 | 221 | ex | Elixir | test/support/biblio/publisher.ex | jan-sti/gim | 1b8be6c2163577f375825170cc9b01674e59b646 | [
"ECL-2.0",
"Apache-2.0"
] | 4 | 2020-01-21T09:15:24.000Z | 2021-02-04T21:21:56.000Z | test/support/biblio/publisher.ex | jan-sti/gim | 1b8be6c2163577f375825170cc9b01674e59b646 | [
"ECL-2.0",
"Apache-2.0"
] | 2 | 2020-04-06T05:20:09.000Z | 2020-06-09T09:56:20.000Z | test/support/biblio/publisher.ex | jan-sti/gim | 1b8be6c2163577f375825170cc9b01674e59b646 | [
"ECL-2.0",
"Apache-2.0"
] | 1 | 2020-04-22T08:44:35.000Z | 2020-04-22T08:44:35.000Z | defmodule GimTest.Biblio.Publisher do
@moduledoc false
use Gim.Schema
alias GimTest.Biblio.Book
schema do
property(:name, index: :unique)
has_edges(:publisher_of, Book, reflect: :published_by)
end
end
| 18.416667 | 58 | 0.733032 |
081e33786508d04671d086ec68f4dae5f203650c | 1,248 | ex | Elixir | apps/extractor/test/support/test_steps.ex | kennyatpillar/hindsight | e90e2150a14218e5d6fdf5874f57eb055fd2dd07 | [
"Apache-2.0"
] | null | null | null | apps/extractor/test/support/test_steps.ex | kennyatpillar/hindsight | e90e2150a14218e5d6fdf5874f57eb055fd2dd07 | [
"Apache-2.0"
] | null | null | null | apps/extractor/test/support/test_steps.ex | kennyatpillar/hindsight | e90e2150a14218e5d6fdf5874f57eb055fd2dd07 | [
"Apache-2.0"
] | null | null | null | defmodule Test.Steps do
defmodule CreateResponse do
defstruct response: nil
defimpl Extract.Step, for: CreateResponse do
def execute(step, context) do
{:ok, Extract.Context.set_response(context, step.response)}
end
end
end
defmodule SetVariable do
defstruct [:name, :value]
defimpl Extract.Step, for: SetVariable do
def execute(step, context) do
{:ok, Extract.Context.add_variable(context, step.name, step.value)}
end
end
end
defmodule SetStream do
defstruct [:values]
defimpl Extract.Step, for: SetStream do
def execute(step, context) do
source = fn _opts ->
case step.values do
nil -> context.response.body
s -> s
end
end
{:ok, Extract.Context.set_source(context, source)}
end
end
end
defmodule TransformStream do
defstruct [:transform]
defimpl Extract.Step, for: TransformStream do
alias Extract.Context
def execute(step, context) do
source = fn opts ->
Context.get_stream(context, opts)
|> Stream.map(step.transform)
end
{:ok, Extract.Context.set_source(context, source)}
end
end
end
end
| 22.285714 | 75 | 0.621795 |
081e840452bf987dd8a41151685348058e7f03df | 1,051 | ex | Elixir | servers/elixir/lib/iot_server.ex | fablabjoinville/workshop-iot | 8aad0525765b11eaacbf4f93368f1858f2f7b4da | [
"MIT"
] | 2 | 2018-03-20T11:30:43.000Z | 2019-03-22T13:05:00.000Z | servers/elixir/lib/iot_server.ex | fablabjoinville/iot | 8aad0525765b11eaacbf4f93368f1858f2f7b4da | [
"MIT"
] | null | null | null | servers/elixir/lib/iot_server.ex | fablabjoinville/iot | 8aad0525765b11eaacbf4f93368f1858f2f7b4da | [
"MIT"
] | null | null | null | defmodule IotServer do
use Application
# See http://elixir-lang.org/docs/stable/elixir/Application.html
# for more information on OTP Applications
def start(_type, _args) do
import Supervisor.Spec
# Define workers and child supervisors to be supervised
children = [
# Start the Ecto repository
supervisor(IotServer.Repo, []),
# Start the endpoint when the application starts
supervisor(IotServer.Endpoint, []),
# Start your own worker by calling: IotServer.Worker.start_link(arg1, arg2, arg3)
# worker(IotServer.Worker, [arg1, arg2, arg3]),
]
# See http://elixir-lang.org/docs/stable/elixir/Supervisor.html
# for other strategies and supported options
opts = [strategy: :one_for_one, name: IotServer.Supervisor]
Supervisor.start_link(children, opts)
end
# Tell Phoenix to update the endpoint configuration
# whenever the application is updated.
def config_change(changed, _new, removed) do
IotServer.Endpoint.config_change(changed, removed)
:ok
end
end
| 32.84375 | 87 | 0.716461 |
081e9a40c4cb09354c287148b49261990b4e8d87 | 4,197 | ex | Elixir | hexdocs__pm__phoenix__up_and_running.html/hello/lib/hello/cms.ex | jim80net/elixir_tutorial_projects | db19901a9305b297faa90642bebcc08455621b52 | [
"Unlicense"
] | null | null | null | hexdocs__pm__phoenix__up_and_running.html/hello/lib/hello/cms.ex | jim80net/elixir_tutorial_projects | db19901a9305b297faa90642bebcc08455621b52 | [
"Unlicense"
] | null | null | null | hexdocs__pm__phoenix__up_and_running.html/hello/lib/hello/cms.ex | jim80net/elixir_tutorial_projects | db19901a9305b297faa90642bebcc08455621b52 | [
"Unlicense"
] | null | null | null | defmodule Hello.CMS do
@moduledoc """
The CMS context.
"""
import Ecto.Query, warn: false
alias Hello.Repo
alias Hello.Accounts
alias Hello.CMS.{Page, Author}
@doc """
Returns the list of pages.
## Examples
iex> list_pages()
[%Page{}, ...]
"""
def list_pages do
Repo.all(from(p in Page, order_by: [desc: p.views]))
|> Repo.preload(author: [user: :credential])
end
@doc """
Gets a single page.
Raises `Ecto.NoResultsError` if the Page does not exist.
## Examples
iex> get_page!(123)
%Page{}
iex> get_page!(456)
** (Ecto.NoResultsError)
"""
def get_page!(id) do
Page
|> Repo.get!(id)
|> Repo.preload(author: [user: :credential])
end
@doc """
Creates a page.
## Examples
iex> create_page(%{field: value})
{:ok, %Page{}}
iex> create_page(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def inc_page_views(%Page{} = page) do
{1, [%Page{views: views}]} =
from(p in Page, where: p.id == ^page.id, select: [:views])
|> Repo.update_all(inc: [views: 1])
put_in(page.views, views)
end
def create_page(%Author{} = author, attrs \\ %{}) do
%Page{}
|> Page.changeset(attrs)
|> Ecto.Changeset.put_change(:author_id, author.id)
|> Repo.insert()
end
def ensure_author_exists(%Accounts.User{} = user) do
%Author{user_id: user.id}
|> Ecto.Changeset.change()
|> Ecto.Changeset.unique_constraint(:user_id)
|> Repo.insert()
|> handle_existing_author()
end
defp handle_existing_author({:ok, author}), do: author
defp handle_existing_author({:error, changeset}) do
Repo.get_by!(Author, user_id: changeset.data.user_id)
end
@doc """
Updates a page.
## Examples
iex> update_page(page, %{field: new_value})
{:ok, %Page{}}
iex> update_page(page, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_page(%Page{} = page, attrs) do
page
|> Page.changeset(attrs)
|> Repo.update()
end
@doc """
Deletes a page.
## Examples
iex> delete_page(page)
{:ok, %Page{}}
iex> delete_page(page)
{:error, %Ecto.Changeset{}}
"""
def delete_page(%Page{} = page) do
Repo.delete(page)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking page changes.
## Examples
iex> change_page(page)
%Ecto.Changeset{data: %Page{}}
"""
def change_page(%Page{} = page, attrs \\ %{}) do
Page.changeset(page, attrs)
end
@doc """
Returns the list of authors.
## Examples
iex> list_authors()
[%Author{}, ...]
"""
def list_authors do
Repo.all(Author)
end
@doc """
Gets a single author.
Raises `Ecto.NoResultsError` if the Author does not exist.
## Examples
iex> get_author!(123)
%Author{}
iex> get_author!(456)
** (Ecto.NoResultsError)
"""
def get_author!(id) do
Author
|> Repo.get!(id)
|> Repo.preload(user: :credential)
end
@doc """
Creates a author.
## Examples
iex> create_author(%{field: value})
{:ok, %Author{}}
iex> create_author(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_author(attrs \\ %{}) do
%Author{}
|> Author.changeset(attrs)
|> Repo.insert()
end
@doc """
Updates a author.
## Examples
iex> update_author(author, %{field: new_value})
{:ok, %Author{}}
iex> update_author(author, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_author(%Author{} = author, attrs) do
author
|> Author.changeset(attrs)
|> Repo.update()
end
@doc """
Deletes a author.
## Examples
iex> delete_author(author)
{:ok, %Author{}}
iex> delete_author(author)
{:error, %Ecto.Changeset{}}
"""
def delete_author(%Author{} = author) do
Repo.delete(author)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking author changes.
## Examples
iex> change_author(author)
%Ecto.Changeset{data: %Author{}}
"""
def change_author(%Author{} = author, attrs \\ %{}) do
Author.changeset(author, attrs)
end
end
| 18.090517 | 64 | 0.584703 |
081ea9102e8f47e634d813e0c8f57deb07e903a8 | 552 | ex | Elixir | assignment_3/lib/vrify_web/views/changeset_view.ex | woutdp/assignment-vrify | c022117c87a2fff63f87a594848ecd58738567a0 | [
"MIT"
] | null | null | null | assignment_3/lib/vrify_web/views/changeset_view.ex | woutdp/assignment-vrify | c022117c87a2fff63f87a594848ecd58738567a0 | [
"MIT"
] | null | null | null | assignment_3/lib/vrify_web/views/changeset_view.ex | woutdp/assignment-vrify | c022117c87a2fff63f87a594848ecd58738567a0 | [
"MIT"
] | null | null | null | defmodule VRIFYWeb.ChangesetView do
use VRIFYWeb, :view
@doc """
Traverses and translates changeset errors.
See `Ecto.Changeset.traverse_errors/2` and
`VRIFYWeb.ErrorHelpers.translate_error/1` for more details.
"""
def translate_errors(changeset) do
Ecto.Changeset.traverse_errors(changeset, &translate_error/1)
end
def render("error.json", %{changeset: changeset}) do
# When encoded, the changeset returns its errors
# as a JSON object. So we just pass it forward.
%{errors: translate_errors(changeset)}
end
end
| 27.6 | 65 | 0.735507 |
081ec27b138633b3172376d96b0d052c61e08d54 | 6,397 | ex | Elixir | lib/type/http.ex | IvanPereyra-23/PaaS | 0179c7b57645473308b0a295a70b6284ed220fbf | [
"Apache-2.0"
] | 1 | 2020-08-27T18:43:11.000Z | 2020-08-27T18:43:11.000Z | lib/type/http.ex | IvanPereyra-23/PaaS | 0179c7b57645473308b0a295a70b6284ed220fbf | [
"Apache-2.0"
] | null | null | null | lib/type/http.ex | IvanPereyra-23/PaaS | 0179c7b57645473308b0a295a70b6284ed220fbf | [
"Apache-2.0"
] | 1 | 2020-08-27T18:43:21.000Z | 2020-08-27T18:43:21.000Z | # Copyright(c) 2015-2020 ACCESS CO., LTD. All rights reserved.
use Croma
defmodule Antikythera.Http do
defmodule Method do
use Croma.SubtypeOfAtom, values: [:get, :post, :put, :patch, :delete, :options, :connect, :trace, :head]
@all [:get, :post, :put, :patch, :delete, :options, :connect, :trace, :head]
def all(), do: @all
@spec from_string(String.t) :: t
@spec to_string(t) :: String.t
for method <- @all do
method_str = method |> Atom.to_string() |> String.upcase()
def from_string(unquote(method_str)), do: unquote(method)
def to_string(unquote(method)), do: unquote(method_str)
end
end
defmodule QueryParams do
use Croma.SubtypeOfMap, key_module: Croma.String, value_module: Croma.String, default: %{}
end
defmodule Headers do
@moduledoc """
HTTP headers as a map.
If multiple headers in a single request/response have the same header name,
their values are concatenated with commas.
In case of `cookie` header values are concatenated using semicolons instead of commas.
"""
use Croma.SubtypeOfMap, key_module: Croma.String, value_module: Croma.String, default: %{}
end
defmodule SetCookie do
alias Croma.TypeGen
use Croma.Struct, recursive_new?: true, fields: [
value: Croma.String,
path: TypeGen.nilable(Antikythera.EncodedPath),
domain: TypeGen.nilable(Antikythera.Domain),
secure: TypeGen.nilable(Croma.Boolean),
http_only: TypeGen.nilable(Croma.Boolean),
max_age: TypeGen.nilable(Croma.Integer),
]
@type options_t :: %{
optional(:path ) => Antikythera.EncodedPath.t,
optional(:domain ) => Antikythera.Domain.t,
optional(:secure ) => boolean,
optional(:http_only) => boolean,
optional(:max_age ) => non_neg_integer,
}
defun parse!(s :: v[String.t]) :: {String.t, t} do
[pair | attrs] = String.split(s, ~R/\s*;\s*/)
[name, value] = String.split(pair, "=", parts: 2)
cookie =
Enum.reduce(attrs, %__MODULE__{value: value}, fn(attr, acc) ->
case attr_to_opt(attr) do
nil -> acc
{opt_name, opt_value} -> Map.put(acc, opt_name, opt_value)
end
end)
{name, cookie}
end
defp attr_to_opt(attr) do
[name | rest] = String.split(attr, ~R/\s*=\s*/, parts: 2)
case String.downcase(name) do
"path" -> {:path , hd(rest)}
"domain" -> {:domain , hd(rest)}
"secure" -> {:secure , true}
"httponly" -> {:http_only, true}
"max-age" -> {:max_age , String.to_integer(hd(rest))}
_ -> nil # version, expires or comment attribute
end
end
end
defmodule SetCookiesMap do
use Croma.SubtypeOfMap, key_module: Croma.String, value_module: SetCookie, default: %{}
end
defmodule ReqCookiesMap do
use Croma.SubtypeOfMap, key_module: Croma.String, value_module: Croma.String, default: %{}
end
defmodule RawBody do
@type t :: binary
defun valid?(v :: term) :: boolean, do: is_binary(v)
def default(), do: ""
end
defmodule Body do
@type t :: binary | [any] | %{String.t => any}
defun valid?(v :: term) :: boolean do
is_binary(v) or is_map(v) or is_list(v)
end
def default(), do: ""
end
defmodule Status do
statuses = [
continue: 100,
switching_protocols: 101,
processing: 102,
ok: 200,
created: 201,
accepted: 202,
non_authoritative_information: 203,
no_content: 204,
reset_content: 205,
partial_content: 206,
multi_status: 207,
already_reported: 208,
multiple_choices: 300,
moved_permanently: 301,
found: 302,
see_other: 303,
not_modified: 304,
use_proxy: 305,
reserved: 306,
temporary_redirect: 307,
permanent_redirect: 308,
bad_request: 400,
unauthorized: 401,
payment_required: 402,
forbidden: 403,
not_found: 404,
method_not_allowed: 405,
not_acceptable: 406,
proxy_authentication_required: 407,
request_timeout: 408,
conflict: 409,
gone: 410,
length_required: 411,
precondition_failed: 412,
request_entity_too_large: 413,
request_uri_too_long: 414,
unsupported_media_type: 415,
requested_range_not_satisfiable: 416,
expectation_failed: 417,
unprocessable_entity: 422,
locked: 423,
failed_dependency: 424,
upgrade_required: 426,
precondition_required: 428,
too_many_requests: 429,
request_header_fields_too_large: 431,
internal_server_error: 500,
not_implemented: 501,
bad_gateway: 502,
service_unavailable: 503,
gateway_timeout: 504,
http_version_not_supported: 505,
variant_also_negotiates: 506,
insufficient_storage: 507,
loop_detected: 508,
not_extended: 510,
network_authentication_required: 511,
]
defmodule Atom do
use Croma.SubtypeOfAtom, values: Keyword.keys(statuses)
end
defmodule Int do
use Croma.SubtypeOfInt, min: 100, max: 999
end
@type t :: Atom.t | Int.t
defun valid?(v :: term) :: boolean do
Int.valid?(v) or Atom.valid?(v)
end
@spec code(Int.t | atom) :: Int.t
def code(int) when int in 100..999 do
int
end
for {atom, code} <- statuses do
def code(unquote(atom)), do: unquote(code)
end
end
end
| 33.668421 | 108 | 0.536189 |
081f07c9ba11021c2c8c23d0bb024306965fa4b6 | 633 | ex | Elixir | lib/ratchet_wrench/session.ex | hirocaster/ratchet_wrench | 67b056cf25b9f3001dd87e167db68fd5d911089e | [
"MIT"
] | 8 | 2020-05-29T03:22:02.000Z | 2022-03-18T12:16:49.000Z | lib/ratchet_wrench/session.ex | hirocaster/ratchet_wrench | 67b056cf25b9f3001dd87e167db68fd5d911089e | [
"MIT"
] | 4 | 2020-07-06T08:11:18.000Z | 2020-08-14T11:10:10.000Z | lib/ratchet_wrench/session.ex | hirocaster/ratchet_wrench | 67b056cf25b9f3001dd87e167db68fd5d911089e | [
"MIT"
] | 4 | 2020-05-26T02:47:00.000Z | 2021-05-03T15:26:12.000Z | defmodule RatchetWrench.Session do
def create(connection) do
case GoogleApi.Spanner.V1.Api.Projects.spanner_projects_instances_databases_sessions_create(
connection,
RatchetWrench.database()
) do
{:ok, session} -> {:ok, session}
{:error, reason} -> {:error, reason.body}
end
end
def delete(connection, session) do
case GoogleApi.Spanner.V1.Api.Projects.spanner_projects_instances_databases_sessions_delete(
connection,
session.name
) do
{:ok, result} -> {:ok, result}
{:error, reason} -> {:error, reason.body}
end
end
end
| 28.772727 | 96 | 0.64297 |
081f21082410813db5378e8b13196f6e30982fe7 | 104 | exs | Elixir | daniel/prog_elix/ch10/longest_line.exs | jdashton/glowing-succotash | 44580c2d4cb300e33156d42e358e8a055948a079 | [
"MIT"
] | null | null | null | daniel/prog_elix/ch10/longest_line.exs | jdashton/glowing-succotash | 44580c2d4cb300e33156d42e358e8a055948a079 | [
"MIT"
] | 1 | 2020-02-26T14:55:23.000Z | 2020-02-26T14:55:23.000Z | daniel/prog_elix/ch10/longest_line.exs | jdashton/glowing-succotash | 44580c2d4cb300e33156d42e358e8a055948a079 | [
"MIT"
] | null | null | null | IO.puts(
File.read!("/usr/share/dict/words")
|> String.split()
|> Enum.max_by(&String.length/1)
)
| 17.333333 | 37 | 0.625 |
081f2bb08c4626859ebdedb20fd1f6d1ac8d1c50 | 1,185 | ex | Elixir | lib/err/generic_error.ex | leandrocp/err | ca044a9451b8ece82d177d36376c647fb78d464c | [
"MIT"
] | 8 | 2020-10-01T06:57:56.000Z | 2021-11-09T04:30:20.000Z | lib/err/generic_error.ex | leandrocp/err | ca044a9451b8ece82d177d36376c647fb78d464c | [
"MIT"
] | 1 | 2020-09-27T14:37:55.000Z | 2020-09-27T14:37:55.000Z | lib/err/generic_error.ex | leandrocp/err | ca044a9451b8ece82d177d36376c647fb78d464c | [
"MIT"
] | null | null | null | defmodule Err.GenericError do
@moduledoc """
A generic error composed by:
* mod: origin of the error, used for tracking and also to format the error message (optional).
* reason: an atom or string to represent the error (required).
* changeset: store the changeset that caused the error (optional).
This exception is usefull to get something up and running but like the name suggest, it's generic
and you may want to define specific exceptions for your app. This module is a good starting point.
"""
@type t() :: %__MODULE__{
mod: module(),
reason: atom() | String.t(),
changeset: Ecto.Changeset.t() | nil
}
defexception [:mod, :reason, :changeset]
@doc """
Return the message for the given error.
### Examples
iex> {:error, %Err.GenericError{} = error} = do_something()
iex> Err.message(error)
"Unable to perform this action."
"""
@spec message(t()) :: String.t()
def message(%__MODULE__{reason: reason, mod: mod}) when is_nil(mod) do
"generic error #{inspect(reason)}"
end
def message(%__MODULE__{reason: reason, mod: mod}) do
mod.format_error(reason)
end
end
| 27.55814 | 100 | 0.655696 |
081f30c403d4f6a466c9002ff628935e4c363162 | 942 | ex | Elixir | lib/planga/chat/converse/persistence/behaviour.ex | ResiliaDev/Planga | b21d290dd7c2c7fa30571d0a5124d63bd09c0c9e | [
"MIT"
] | 37 | 2018-07-13T14:08:16.000Z | 2021-04-09T15:00:22.000Z | lib/planga/chat/converse/persistence/behaviour.ex | ResiliaDev/Planga | b21d290dd7c2c7fa30571d0a5124d63bd09c0c9e | [
"MIT"
] | 9 | 2018-07-16T15:24:39.000Z | 2021-09-01T14:21:20.000Z | lib/planga/chat/converse/persistence/behaviour.ex | ResiliaDev/Planga | b21d290dd7c2c7fa30571d0a5124d63bd09c0c9e | [
"MIT"
] | 3 | 2018-10-05T20:19:25.000Z | 2019-12-05T00:30:01.000Z | defmodule Planga.Chat.Converse.Persistence.Behaviour do
@callback fetch_messages_by_conversation_id(
conversation_id :: integer,
sent_before_datetime :: DateTime.t()
) :: [%Planga.Chat.Message{}] | no_return()
@callback find_or_create_conversation_by_remote_id!(
app_id :: String.t(),
remote_id :: String.t()
) :: %Planga.Chat.Conversation{} | no_return()
@callback create_message(
app_id :: String.t(),
remote_conversation_id :: String.t(),
user_id :: integer,
message :: String.t(),
other_user_ids :: [integer]
) :: %Planga.Chat.Message{}
@doc """
Looks at current role a certain user has.
"""
@callback fetch_conversation_user_info(conversation_id :: integer, user_id :: any) ::
{:ok, role :: %Planga.Chat.ConversationUser{}} | {:error, any}
end
| 36.230769 | 87 | 0.585987 |
081f43c18a8325440e7694b8fc3c5d2e64cc918f | 18,180 | ex | Elixir | test/support/factory.ex | mydearxym/mastani_server | f24034a4a5449200165cf4a547964a0961793eab | [
"Apache-2.0"
] | 2 | 2018-03-26T08:56:21.000Z | 2018-07-02T22:34:51.000Z | test/support/factory.ex | mydearxym/mastani_server | f24034a4a5449200165cf4a547964a0961793eab | [
"Apache-2.0"
] | 22 | 2018-03-21T03:40:50.000Z | 2018-07-10T06:33:10.000Z | test/support/factory.ex | mydearxym/mastani_server | f24034a4a5449200165cf4a547964a0961793eab | [
"Apache-2.0"
] | null | null | null | defmodule GroupherServer.Support.Factory do
@moduledoc """
This module defines the mock data/func to be used by
tests that require insert some mock data to db.
for example you can db_insert(:user) to insert user into db
"""
import Helper.Utils, only: [done: 1]
import GroupherServer.CMS.Helper.Matcher
alias GroupherServer.{Accounts, CMS, Delivery}
alias Accounts.Model.User
alias CMS.Model.{
Author,
Category,
Community,
Thread,
CommunityThread,
ArticleTag,
CommunityWiki,
CommunityCheatsheet,
Comment
}
@default_article_meta CMS.Model.Embeds.ArticleMeta.default_meta()
@default_emotions CMS.Model.Embeds.CommentEmotion.default_emotions()
# simulate editor.js fmt rich text
def mock_rich_text(text \\ "text") do
"""
{
"time": 111,
"blocks": [
{
"id": "lldjfiek",
"type": "paragraph",
"data": {
"text": "#{text}"
}
}
],
"version": "2.22.0"
}
"""
end
# for link tasks
def mock_rich_text(text1, text2) do
"""
{
"time": 111,
"blocks": [
{
"id": "lldjfiek",
"type": "paragraph",
"data": {
"text": "#{text1}"
}
},
{
"id": "llddiekek",
"type": "paragraph",
"data": {
"text": "#{text2}"
}
}
],
"version": "2.22.0"
}
"""
end
def mock_xss_string(:safe) do
mock_rich_text("<script>blackmail</script>")
end
def mock_xss_string(text \\ "blackmail") do
mock_rich_text("<script>alert(#{text})</script>")
end
def mock_comment(text \\ "comment") do
mock_rich_text(text)
end
defp mock_meta(:post) do
text = Faker.Lorem.sentence(10)
%{
meta: @default_article_meta,
title: "post-#{String.slice(text, 1, 49)}",
body: mock_rich_text(text),
digest: String.slice(text, 100, 150),
solution_digest: String.slice(text, 1, 150),
length: String.length(text),
author: mock(:author),
views: Enum.random(0..2000),
original_community: mock(:community),
communities: [
mock(:community),
mock(:community)
],
emotions: @default_emotions,
active_at: Timex.shift(Timex.now(), seconds: -1),
is_question: false,
is_solved: false,
pending: 0
}
end
defp mock_meta(:repo) do
desc = Faker.Lorem.sentence(10)
%{
meta: @default_article_meta |> Map.merge(%{thread: "REPO"}),
title: String.slice(desc, 1, 49),
owner_name: "coderplanets",
owner_url: "http://www.github.com/coderplanets",
repo_url: "http://www.github.com/coderplanets//coderplanets_server",
desc: desc,
homepage_url: "http://www.github.com/coderplanets",
readme: desc,
issues_count: Enum.random(0..2000),
prs_count: Enum.random(0..2000),
fork_count: Enum.random(0..2000),
star_count: Enum.random(0..2000),
watch_count: Enum.random(0..2000),
license: "MIT",
release_tag: "v22",
primary_language: %{
name: "javascript",
color: "tomato"
},
contributors: [
mock_meta(:repo_contributor),
mock_meta(:repo_contributor)
],
author: mock(:author),
views: Enum.random(0..2000),
original_community: mock(:community),
communities: [
mock(:community),
mock(:community)
],
emotions: @default_emotions,
active_at: Timex.shift(Timex.now(), seconds: +1),
pending: 0
}
end
defp mock_meta(:wiki) do
%{
community: mock(:community),
readme: Faker.Lorem.sentence(10),
last_sync: Timex.today() |> Timex.to_datetime(),
contributors: [
mock_meta(:github_contributor),
mock_meta(:github_contributor),
mock_meta(:github_contributor)
]
}
end
defp mock_meta(:cheatsheet) do
mock_meta(:wiki)
end
defp mock_meta(:github_contributor) do
unique_num = System.unique_integer([:positive, :monotonic])
%{
github_id: "#{unique_num}-#{Faker.Lorem.sentence(10)}",
avatar: Faker.Avatar.image_url(),
html_url: Faker.Avatar.image_url(),
nickname: "mydearxym2",
bio: Faker.Lorem.sentence(10),
location: "location #{unique_num}",
company: Faker.Company.name()
}
end
defp mock_meta(:job) do
text = Faker.Lorem.sentence(10)
%{
meta: @default_article_meta |> Map.merge(%{thread: "JOB"}),
title: "job-#{String.slice(text, 1, 49)}",
company: Faker.Company.name(),
body: mock_rich_text(text),
desc: "活少, 美女多",
digest: String.slice(text, 1, 150),
length: String.length(text),
author: mock(:author),
views: Enum.random(0..2000),
original_community: mock(:community),
communities: [
mock(:community)
],
emotions: @default_emotions,
active_at: Timex.shift(Timex.now(), seconds: +1),
pending: 0
}
end
defp mock_meta(:blog) do
text = Faker.Lorem.sentence(10)
%{
meta: @default_article_meta |> Map.merge(%{thread: "BLOG"}),
title: "HTML slot 插槽元素深入",
rss: mock_rss_addr(),
body: mock_rich_text(text),
# digest: String.slice(text, 1, 150),
length: String.length(text),
author: mock(:author),
views: Enum.random(0..2000),
original_community: mock(:community),
communities: [
mock(:community)
],
emotions: @default_emotions,
active_at: Timex.shift(Timex.now(), seconds: +1),
pending: 0
}
end
defp mock_meta(:works) do
text = Faker.Lorem.sentence(10)
%{
meta: @default_article_meta |> Map.merge(%{thread: "WORKS"}),
cover: "cover attrs",
title: "works-#{String.slice(text, 1, 49)}",
desc: "works desc",
homeLink: "https://cool-works.com",
body: mock_rich_text(text),
digest: String.slice(text, 1, 150),
# length: String.length(text),
author: mock(:author),
views: Enum.random(0..2000),
original_community: mock(:community),
communities: [
mock(:community)
],
emotions: @default_emotions,
active_at: Timex.shift(Timex.now(), seconds: +1),
pending: 0
}
end
defp mock_meta(:radar) do
text = Faker.Lorem.sentence(10)
%{
meta: @default_article_meta |> Map.merge(%{thread: "RADAR"}),
title: "radar-#{String.slice(text, 1, 49)}",
body: mock_rich_text(text),
digest: String.slice(text, 1, 150),
link_addr: "https://#{Faker.Company.name()}.com/#{Faker.Company.name()}/post",
# length: String.length(text),
author: mock(:author),
views: Enum.random(0..2000),
original_community: mock(:community),
communities: [
mock(:community)
],
emotions: @default_emotions,
active_at: Timex.shift(Timex.now(), seconds: +1),
pending: 0
}
end
defp mock_meta(:guide) do
text = Faker.Lorem.sentence(10)
%{
meta: @default_article_meta |> Map.merge(%{thread: "GUIDE"}),
title: String.slice(text, 1, 49),
body: mock_rich_text(text),
digest: String.slice(text, 1, 150),
# length: String.length(text),
author: mock(:author),
views: Enum.random(0..2000),
original_community: mock(:community),
communities: [
mock(:community)
],
emotions: @default_emotions,
active_at: Timex.shift(Timex.now(), seconds: +1),
pending: 0
}
end
defp mock_meta(:meetup) do
text = Faker.Lorem.sentence(20)
%{
meta: @default_article_meta |> Map.merge(%{thread: "MEETUP"}),
title: String.slice(text, 1, 49),
body: mock_rich_text(text),
digest: String.slice(text, 1, 150),
link_addr: "https://#{Faker.Company.name()}.com/#{Faker.Company.name()}/post",
# length: String.length(text),
author: mock(:author),
views: Enum.random(0..2000),
original_community: mock(:community),
communities: [
mock(:community)
],
emotions: @default_emotions,
active_at: Timex.shift(Timex.now(), seconds: +1),
pending: 0
}
end
defp mock_meta(:drink) do
text = Faker.Lorem.sentence(20)
%{
meta: @default_article_meta |> Map.merge(%{thread: "DRINK"}),
title: String.slice(text, 1, 49),
body: mock_rich_text(text),
digest: String.slice(text, 1, 150),
# length: String.length(text),
author: mock(:author),
views: Enum.random(0..2000),
original_community: mock(:community),
communities: [
mock(:community)
],
emotions: @default_emotions,
active_at: Timex.shift(Timex.now(), seconds: +1),
pending: 0
}
end
defp mock_meta(:comment) do
%{body: mock_rich_text(), author: mock(:user)}
end
defp mock_meta(:mention) do
unique_num = System.unique_integer([:positive, :monotonic])
%{
from_user: mock(:user),
to_user: mock(:user),
source_id: "1",
source_type: "post",
source_preview: "source_preview #{unique_num}."
}
end
defp mock_meta(:author) do
%{role: "normal", user: mock(:user)}
end
defp mock_meta(:communities_threads) do
%{community_id: 1, thread_id: 1}
end
defp mock_meta(:thread) do
unique_num = System.unique_integer([:positive, :monotonic])
%{title: "thread #{unique_num}", raw: "thread #{unique_num}", index: :rand.uniform(20)}
end
defp mock_meta(:community) do
unique_num = System.unique_integer([:positive, :monotonic])
random_num = Enum.random(0..2000)
title = "community_#{random_num}_#{unique_num}"
%{
title: title,
aka: title,
desc: "community desc",
raw: title,
logo: "https://coderplanets.oss-cn-beijing.aliyuncs.com/icons/pl/elixir.svg",
author: mock(:user)
}
end
defp mock_meta(:category) do
unique_num = System.unique_integer([:positive, :monotonic])
%{
title: "category#{unique_num}",
raw: "category#{unique_num}",
author: mock(:author)
}
end
defp mock_meta(:article_tag) do
unique_num = System.unique_integer([:positive, :monotonic])
%{
title: "#{Faker.Pizza.cheese()}#{unique_num}",
raw: "#{Faker.Pizza.cheese()}#{unique_num}",
thread: "POST",
color: "YELLOW",
group: "cool",
# community: Faker.Pizza.topping(),
community: mock(:community),
author: mock(:author),
extra: []
# user_id: 1
}
end
defp mock_meta(:user) do
unique_num = System.unique_integer([:positive, :monotonic])
%{
login: "#{Faker.Person.first_name()}#{unique_num}" |> String.downcase(),
nickname: "#{Faker.Person.first_name()}#{unique_num}",
bio: Faker.Lorem.Shakespeare.romeo_and_juliet(),
avatar: Faker.Avatar.image_url(),
email: "faker@gmail.com"
}
end
defp mock_meta(:repo_contributor) do
%{
avatar: Faker.Avatar.image_url(),
html_url: Faker.Avatar.image_url(),
nickname: "mydearxym2"
}
end
defp mock_meta(:github_profile) do
unique_num = System.unique_integer([:positive, :monotonic])
%{
id: "#{Faker.Person.first_name()} #{unique_num}",
login: "#{Faker.Person.first_name()}#{unique_num}",
github_id: "#{unique_num + 1000}",
node_id: "#{unique_num + 2000}",
access_token: "#{unique_num + 3000}",
bio: Faker.Lorem.Shakespeare.romeo_and_juliet(),
company: Faker.Company.name(),
location: "chengdu",
email: Faker.Internet.email(),
avatar_url: Faker.Avatar.image_url(),
html_url: Faker.Avatar.image_url(),
followers: unique_num * unique_num,
following: unique_num * unique_num * unique_num
}
end
defp mock_meta(:bill) do
%{
payment_usage: "donate",
payment_method: "alipay",
amount: 51.2,
note: "thank you"
}
end
def mock_attrs(_, attrs \\ %{})
def mock_attrs(:user, attrs), do: mock_meta(:user) |> Map.merge(attrs)
def mock_attrs(:author, attrs), do: mock_meta(:author) |> Map.merge(attrs)
def mock_attrs(:community, attrs), do: mock_meta(:community) |> Map.merge(attrs)
def mock_attrs(:thread, attrs), do: mock_meta(:thread) |> Map.merge(attrs)
def mock_attrs(:mention, attrs), do: mock_meta(:mention) |> Map.merge(attrs)
def mock_attrs(:wiki, attrs), do: mock_meta(:wiki) |> Map.merge(attrs)
def mock_attrs(:cheatsheet, attrs), do: mock_meta(:cheatsheet) |> Map.merge(attrs)
def mock_attrs(:github_contributor, attrs),
do: mock_meta(:github_contributor) |> Map.merge(attrs)
def mock_attrs(:communities_threads, attrs),
do: mock_meta(:communities_threads) |> Map.merge(attrs)
def mock_attrs(:article_tag, attrs), do: mock_meta(:article_tag) |> Map.merge(attrs)
def mock_attrs(:category, attrs), do: mock_meta(:category) |> Map.merge(attrs)
def mock_attrs(:github_profile, attrs), do: mock_meta(:github_profile) |> Map.merge(attrs)
def mock_attrs(:bill, attrs), do: mock_meta(:bill) |> Map.merge(attrs)
def mock_attrs(thread, attrs), do: mock_meta(thread) |> Map.merge(attrs)
# NOTICE: avoid Recursive problem
# this line of code will cause SERIOUS Recursive problem
defp mock(:wiki), do: CommunityWiki |> struct(mock_meta(:wiki))
defp mock(:cheatsheet), do: CommunityCheatsheet |> struct(mock_meta(:cheatsheet))
defp mock(:comment), do: Comment |> struct(mock_meta(:comment))
defp mock(:author), do: Author |> struct(mock_meta(:author))
defp mock(:category), do: Category |> struct(mock_meta(:category))
defp mock(:article_tag), do: ArticleTag |> struct(mock_meta(:article_tag))
defp mock(:user), do: User |> struct(mock_meta(:user))
defp mock(:community), do: Community |> struct(mock_meta(:community))
defp mock(:thread), do: Thread |> struct(mock_meta(:thread))
defp mock(:communities_threads),
do: CommunityThread |> struct(mock_meta(:communities_threads))
defp mock(thread) do
with {:ok, info} <- match(thread) do
info.model |> struct(mock_meta(thread))
end
end
defp mock(factory_name, attributes) do
factory_name |> mock() |> struct(attributes)
end
# """
# not use changeset because in test we may insert some attrs which not in schema
# like: views, insert/update ... to test filter-sort,when ...
# """
def db_insert(factory_name, attributes \\ []) do
GroupherServer.Repo.insert(mock(factory_name, attributes))
end
def db_insert_multi(factory_name, count, delay \\ 0) do
results =
Enum.reduce(1..count, [], fn _, acc ->
Process.sleep(delay)
{:ok, value} = db_insert(factory_name)
acc ++ [value]
end)
results |> done
end
@images [
"https://rmt.dogedoge.com/fetch/~/source/unsplash/photo-1557555187-23d685287bc3?ixid=MXwxMjA3fDB8MHxwaG90by1wYWdlfHx8fGVufDB8fHw%3D&ixlib=rb-1.2.1&auto=format&fit=crop&w=1000&q=80",
"https://rmt.dogedoge.com/fetch/~/source/unsplash/photo-1484399172022-72a90b12e3c1?ixid=MXwxMjA3fDB8MHxwaG90by1wYWdlfHx8fGVufDB8fHw%3D&ixlib=rb-1.2.1&auto=format&fit=crop&w=1000&q=80",
"https://images.unsplash.com/photo-1506034861661-ad49bbcf7198?ixlib=rb-1.2.1&ixid=MXwxMjA3fDB8MHxwaG90by1wYWdlfHx8fGVufDB8fHw%3D&auto=format&fit=crop&w=1350&q=80",
"https://images.unsplash.com/photo-1614607206234-f7b56bdff6e7?ixid=MXwxMjA3fDB8MHxwaG90by1wYWdlfHx8fGVufDB8fHw%3D&ixlib=rb-1.2.1&auto=format&fit=crop&w=634&q=80",
"https://images.unsplash.com/photo-1614526261139-1e5ebbd5086c?ixid=MXwxMjA3fDB8MHxwaG90by1wYWdlfHx8fGVufDB8fHw%3D&ixlib=rb-1.2.1&auto=format&fit=crop&w=1350&q=80",
"https://images.unsplash.com/photo-1614366559478-edf9d1cc4719?ixid=MXwxMjA3fDB8MHxwaG90by1wYWdlfHx8fGVufDB8fHw%3D&ixlib=rb-1.2.1&auto=format&fit=crop&w=634&q=80",
"https://images.unsplash.com/photo-1614588108027-22a021c8d8e1?ixid=MXwxMjA3fDB8MHxwaG90by1wYWdlfHx8fGVufDB8fHw%3D&ixlib=rb-1.2.1&auto=format&fit=crop&w=1349&q=80",
"https://images.unsplash.com/photo-1614522407266-ad3c5fa6bc24?ixid=MXwxMjA3fDB8MHxwaG90by1wYWdlfHx8fGVufDB8fHw%3D&ixlib=rb-1.2.1&auto=format&fit=crop&w=1352&q=80",
"https://images.unsplash.com/photo-1601933470096-0e34634ffcde?ixid=MXwxMjA3fDF8MHxwaG90by1wYWdlfHx8fGVufDB8fHw%3D&ixlib=rb-1.2.1&auto=format&fit=crop&w=1350&q=80",
"https://images.unsplash.com/photo-1614598943918-3d0f1e65c22c?ixid=MXwxMjA3fDB8MHxwaG90by1wYWdlfHx8fGVufDB8fHw%3D&ixlib=rb-1.2.1&auto=format&fit=crop&w=1350&q=80",
"https://images.unsplash.com/photo-1614542530265-7a46ededfd64?ixid=MXwxMjA3fDB8MHxwaG90by1wYWdlfHx8fGVufDB8fHw%3D&ixlib=rb-1.2.1&auto=format&fit=crop&w=634&q=80"
]
@doc "mock image"
@spec mock_image(Number.t()) :: String.t()
def mock_image(index \\ 0) do
Enum.at(@images, index)
end
@doc "mock images"
@spec mock_images(Number.t()) :: [String.t()]
def mock_images(count \\ 1) do
@images |> Enum.slice(0, count)
end
def mock_rss_addr() do
# "https://www.xiabingbao.com/atom.xml" # 不规范
# "https://rsshub.app/blogs/wangyin"
"https://www.zhangxinxu.com/wordpress/feed/"
# "https://overreacted.io/rss.xml"
# "https://www.ruanyifeng.com/blog/atom.xml"
# "https://lutaonan.com/rss.xml"
end
def mock_mention_for(user, from_user) do
{:ok, post} = db_insert(:post)
mention_attr = %{
thread: "POST",
title: post.title,
article_id: post.id,
comment_id: nil,
block_linker: ["tmp"],
inserted_at: post.updated_at |> DateTime.truncate(:second),
updated_at: post.updated_at |> DateTime.truncate(:second)
}
mention_contents = [
Map.merge(mention_attr, %{from_user_id: from_user.id, to_user_id: user.id})
]
Delivery.send(:mention, post, mention_contents, from_user)
end
def mock_notification_for(user, from_user) do
{:ok, post} = db_insert(:post)
notify_attrs = %{
thread: :post,
article_id: post.id,
title: post.title,
action: :upvote,
user_id: user.id,
read: false
}
Delivery.send(:notify, notify_attrs, from_user)
end
end
| 30.452261 | 208 | 0.633663 |
081f533da6539af16b5c4d3c331ee21603800219 | 2,913 | exs | Elixir | lib/elixir/test/elixir/access_test.exs | xtian/elixir | c680eb1a3992309c272e8f808e15990ea5318d6e | [
"Apache-2.0"
] | 1 | 2017-07-25T21:46:25.000Z | 2017-07-25T21:46:25.000Z | lib/elixir/test/elixir/access_test.exs | xtian/elixir | c680eb1a3992309c272e8f808e15990ea5318d6e | [
"Apache-2.0"
] | null | null | null | lib/elixir/test/elixir/access_test.exs | xtian/elixir | c680eb1a3992309c272e8f808e15990ea5318d6e | [
"Apache-2.0"
] | 1 | 2017-07-25T21:46:48.000Z | 2017-07-25T21:46:48.000Z | Code.require_file "test_helper.exs", __DIR__
defmodule AccessTest do
use ExUnit.Case, async: true
doctest Access
# Test nil at compilation time does not fail
# and that @config[:foo] has proper precedence.
@config nil
nil = @config[:foo]
@config [foo: :bar]
:bar = @config[:foo]
@mod :lists
[1, 2, 3] = @mod.flatten([1, [2], 3])
@mod -13
-13 = @mod
test "for nil" do
assert nil[:foo] == nil
assert Access.fetch(nil, :foo) == :error
assert Access.get(nil, :foo) == nil
assert_raise ArgumentError, "could not put/update key :foo on a nil value", fn ->
Access.get_and_update(nil, :foo, fn nil -> {:ok, :bar} end)
end
end
test "for keywords" do
assert [foo: :bar][:foo] == :bar
assert [foo: [bar: :baz]][:foo][:bar] == :baz
assert [foo: [bar: :baz]][:fuu][:bar] == nil
assert Access.fetch([foo: :bar], :foo) == {:ok, :bar}
assert Access.fetch([foo: :bar], :bar) == :error
msg = ~r/the Access calls for keywords expect the key to be an atom/
assert_raise ArgumentError, msg, fn ->
Access.fetch([], "foo")
end
assert Access.get([foo: :bar], :foo) == :bar
assert Access.get_and_update([], :foo, fn nil -> {:ok, :baz} end) == {:ok, [foo: :baz]}
assert Access.get_and_update([foo: :bar], :foo, fn :bar -> {:ok, :baz} end) == {:ok, [foo: :baz]}
assert Access.pop([foo: :bar], :foo) == {:bar, []}
assert Access.pop([], :foo) == {nil, []}
end
test "for maps" do
assert %{foo: :bar}[:foo] == :bar
assert %{1 => 1}[1] == 1
assert %{1.0 => 1.0}[1.0] == 1.0
assert %{1 => 1}[1.0] == nil
assert Access.fetch(%{foo: :bar}, :foo) == {:ok, :bar}
assert Access.fetch(%{foo: :bar}, :bar) == :error
assert Access.get(%{foo: :bar}, :foo) == :bar
assert Access.get_and_update(%{}, :foo, fn nil -> {:ok, :baz} end) == {:ok, %{foo: :baz}}
assert Access.get_and_update(%{foo: :bar}, :foo, fn :bar -> {:ok, :baz} end) == {:ok, %{foo: :baz}}
assert Access.pop(%{foo: :bar}, :foo) == {:bar, %{}}
assert Access.pop(%{}, :foo) == {nil, %{}}
end
test "for struct" do
defmodule Sample do
defstruct [:name]
end
assert_raise UndefinedFunctionError,
"function AccessTest.Sample.fetch/2 is undefined (AccessTest.Sample does not implement the Access behaviour)", fn ->
Access.fetch(struct(Sample, []), :name)
end
assert_raise UndefinedFunctionError,
"function AccessTest.Sample.get_and_update/3 is undefined (AccessTest.Sample does not implement the Access behaviour)", fn ->
Access.get_and_update(struct(Sample, []), :name, fn nil -> {:ok, :baz} end)
end
assert_raise UndefinedFunctionError,
"function AccessTest.Sample.pop/2 is undefined (AccessTest.Sample does not implement the Access behaviour)", fn ->
Access.pop(struct(Sample, []), :name)
end
end
end
| 32.366667 | 142 | 0.590457 |
081f54be76a372387eee891c9554961a47e8599e | 4,087 | exs | Elixir | learning/programming_elixir/chapter_22.exs | Mdlkxzmcp/various_elixir | c87527b7118a0c74a042073c04d2228025888ddf | [
"MIT"
] | 2 | 2020-01-20T20:15:20.000Z | 2020-02-27T11:08:42.000Z | learning/programming_elixir/chapter_22.exs | Mdlkxzmcp/various_elixir | c87527b7118a0c74a042073c04d2228025888ddf | [
"MIT"
] | null | null | null | learning/programming_elixir/chapter_22.exs | Mdlkxzmcp/various_elixir | c87527b7118a0c74a042073c04d2228025888ddf | [
"MIT"
] | null | null | null | # Linking Modules: Behavio(u)rs and Use
## Tracing Method Calls
defmodule Tracer do
defmacro def(definition, do: _content) do
IO.inspect definition
quote do: {}
end
end
defmodule Test do
import Kernel, except: [def: 2]
import Tracer, only: [def: 2]
def puts_sum_three(a,b,c), do: IO.inspect(a+b+c)
def add_list(list), do: Enum.reduce(list, 0, &(&1+&2))
end
# Test.puts_sum_three(1,2,3)
# Test.add_list([5,6,7,8])
# {:puts_sum_three, [line: 16],
# [{:a, [line: 16], nil}, {:b, [line: 16], nil}, {:c, [line: 16], nil}]}
# {:add_list, [line: 17], [{:list, [line: 17], nil}]}
# ** (UndefinedFunctionError) function Test.puts_sum_three/3 is undefined or private
defmodule Tracer2 do
defmacro def(definition, do: content) do
quote do
Kernel.def(unquote(definition)) do
unquote(content)
end
end
end
end
defmodule Test2 do
import Kernel, except: [def: 2]
import Tracer2, only: [def: 2]
def puts_sum_three(a,b,c), do: IO.inspect(a+b+c)
def add_list(list), do: Enum.reduce(list, 0, &(&1+&2))
end
Test2.puts_sum_three(1,2,3)
Test2.add_list([5,6,7,8])
# {:puts_sum_three, [line: 16],
# [{:a, [line: 16], nil}, {:b, [line: 16], nil}, {:c, [line: 16], nil}]}
# {:add_list, [line: 17], [{:list, [line: 17], nil}]}
# 6
defmodule Tracer3 do
def dump_args(args) do
args |> Enum.map(&inspect/1) |> Enum.join(", ")
end
def dump_def(name, args) do
"#{name}(#{dump_args(args)})"
end
defmacro def(definition={name, _, args}, do: content) do
quote do
Kernel.def(unquote(definition)) do
IO.puts " ==> call: #{Tracer3.dump_def(unquote(name), unquote(args))}"
result = unquote(content)
IO.puts "<== result: #{result}"
result
end
end
end
end
defmodule Test3 do
import Kernel, except: [def: 2]
import Tracer3, only: [def: 2]
def puts_sum_three(a,b,c), do: IO.inspect(a+b+c)
def add_list(list), do: Enum.reduce(list, 0, &(&1+&2))
end
Test3.puts_sum_three(1,2,3)
Test3.add_list([5,6,7,8])
# ==> call: puts_sum_three(1, 2, 3)
# 6
# <== result: 6
# ==> call: add_list([5, 6, 7, 8])
# <== result: 26
defmodule Tracer4 do
def dump_args(args) do
args |> Enum.map(&inspect/1) |> Enum.join(", ")
end
def dump_def(name, args) do
"#{name}(#{dump_args(args)})"
end
defmacro def(definition={name, _, args}, do: content) do
quote do
Kernel.def(unquote(definition)) do
IO.puts " ==> call: #{Tracer4.dump_def(unquote(name), unquote(args))}"
result = unquote(content)
IO.puts "<== result: #{result}"
result
end
end
end
defmacro __using__(_opts) do
quote do
import Kernel, except: [def: 2]
import unquote(__MODULE__), only: [def: 2]
end
end
end
defmodule Test4 do
use Tracer4
def puts_sum_three(a,b,c), do: IO.inspect(a+b+c)
def add_list(list), do: Enum.reduce(list, 0, &(&1+&2))
end
Test4.puts_sum_three(1,2,3)
Test4.add_list([5,6,7,8])
# ==> call: puts_sum_three(1, 2, 3)
# 6
# <== result: 6
# ==> call: add_list([5, 6, 7, 8])
# <== result: 26
## Exercises:
defmodule ANSITracer do
def dump_args(args) do
args |> Enum.map(&inspect/1) |> Enum.join(", ")
end
def dump_def(name, args) do
IO.ANSI.format([:green, "#{name}", "(#{dump_args(args)})"], true)
end
defmacro def(definition={name, _, args}, do: content) do
quote do
Kernel.def(unquote(definition)) do
IO.puts " ==> call: #{ANSITracer.dump_def(unquote(name), unquote(args))}"
result = unquote(content)
IO.puts IO.ANSI.format(["<== result: ", :bright, "#{result}"], true)
result
end
end
end
defmacro __using__(_opts) do
quote do
import Kernel, except: [def: 2]
import unquote(__MODULE__), only: [def: 2]
end
end
end
defmodule TestANSI do
use ANSITracer
def puts_sum_three(a,b,c), do: IO.inspect(a+b+c)
def add_list(list), do: Enum.reduce(list, 0, &(&1+&2))
end
TestANSI.puts_sum_three(1,2,3)
TestANSI.add_list([5,6,7,8])
| 22.832402 | 84 | 0.60044 |
081f74a19eeb2f1dc93bc0708c39e3d5cefc31bd | 2,469 | exs | Elixir | test/xdr/transactions/operations/set_trust_line_flags_result_code_test.exs | einerzg/stellar_base | 2d10c5fc3b8159efc5de10b5c7c665e3b57b3d8f | [
"MIT"
] | null | null | null | test/xdr/transactions/operations/set_trust_line_flags_result_code_test.exs | einerzg/stellar_base | 2d10c5fc3b8159efc5de10b5c7c665e3b57b3d8f | [
"MIT"
] | null | null | null | test/xdr/transactions/operations/set_trust_line_flags_result_code_test.exs | einerzg/stellar_base | 2d10c5fc3b8159efc5de10b5c7c665e3b57b3d8f | [
"MIT"
] | null | null | null | defmodule StellarBase.XDR.Operations.SetTrustLineFlagsResultCodeTest do
use ExUnit.Case
alias StellarBase.XDR.Operations.SetTrustLineFlagsResultCode
@codes [
:SET_TRUST_LINE_FLAGS_SUCCESS,
:SET_TRUST_LINE_FLAGS_MALFORMED,
:SET_TRUST_LINE_FLAGS_NO_TRUST_LINE,
:SET_TRUST_LINE_FLAGS_CANT_REVOKE,
:SET_TRUST_LINE_FLAGS_INVALID_STATE,
:SET_TRUST_LINE_FLAGS_LOW_RESERVE
]
@binaries [
<<0, 0, 0, 0>>,
<<255, 255, 255, 255>>,
<<255, 255, 255, 254>>,
<<255, 255, 255, 253>>,
<<255, 255, 255, 252>>,
<<255, 255, 255, 251>>
]
describe "SetTrustLineFlagsResultCode" do
setup do
%{
codes: @codes,
results: @codes |> Enum.map(fn code -> SetTrustLineFlagsResultCode.new(code) end),
binaries: @binaries
}
end
test "new/1", %{codes: types} do
for type <- types,
do:
%SetTrustLineFlagsResultCode{identifier: ^type} =
SetTrustLineFlagsResultCode.new(type)
end
test "encode_xdr/1", %{results: results, binaries: binaries} do
for {result, binary} <- Enum.zip(results, binaries),
do: {:ok, ^binary} = SetTrustLineFlagsResultCode.encode_xdr(result)
end
test "encode_xdr/1 with an invalid code" do
{:error, :invalid_key} =
SetTrustLineFlagsResultCode.encode_xdr(%SetTrustLineFlagsResultCode{identifier: :TEST})
end
test "encode_xdr!/1", %{results: results, binaries: binaries} do
for {result, binary} <- Enum.zip(results, binaries),
do: ^binary = SetTrustLineFlagsResultCode.encode_xdr!(result)
end
test "decode_xdr/2", %{results: results, binaries: binaries} do
for {result, binary} <- Enum.zip(results, binaries),
do: {:ok, {^result, ""}} = SetTrustLineFlagsResultCode.decode_xdr(binary)
end
test "decode_xdr/2 with an invalid declaration" do
{:error, :invalid_key} = SetTrustLineFlagsResultCode.decode_xdr(<<1, 0, 0, 1>>)
end
test "decode_xdr!/2", %{results: results, binaries: binaries} do
for {result, binary} <- Enum.zip(results, binaries),
do: {^result, ^binary} = SetTrustLineFlagsResultCode.decode_xdr!(binary <> binary)
end
test "decode_xdr!/2 with an error code", %{binaries: binaries} do
for binary <- binaries,
do:
{%SetTrustLineFlagsResultCode{identifier: _}, ""} =
SetTrustLineFlagsResultCode.decode_xdr!(binary)
end
end
end
| 32.064935 | 95 | 0.650466 |
081f85db4c6b2f31e200a7017ce34fedd8c1addf | 1,267 | ex | Elixir | server/test/support/conn_case.ex | nilenso/pencil.space | 2934d83287dd716b9b984aae5bb7e72247bf4d23 | [
"MIT"
] | 3 | 2021-05-30T07:56:32.000Z | 2021-08-06T12:03:15.000Z | server/test/support/conn_case.ex | nilenso/pencil.space | 2934d83287dd716b9b984aae5bb7e72247bf4d23 | [
"MIT"
] | 5 | 2020-05-06T13:05:52.000Z | 2020-12-04T20:59:41.000Z | server/test/support/conn_case.ex | nilenso/pencil.space | 2934d83287dd716b9b984aae5bb7e72247bf4d23 | [
"MIT"
] | null | null | null | defmodule PencilSpaceServerWeb.ConnCase do
@moduledoc """
This module defines the test case to be used by
tests that require setting up a connection.
Such tests rely on `Phoenix.ConnTest` and also
import other functionality to make it easier
to build common data structures and query the data layer.
Finally, if the test case interacts with the database,
we enable the SQL sandbox, so changes done to the database
are reverted at the end of every test. If you are using
PostgreSQL, you can even run database tests asynchronously
by setting `use PencilSpaceServerWeb.ConnCase, async: true`, although
this option is not recommended for other databases.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with connections
import Plug.Conn
import Phoenix.ConnTest
alias PencilSpaceServerWeb.Router.Helpers, as: Routes
# The default endpoint for testing
@endpoint PencilSpaceServerWeb.Endpoint
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(PencilSpaceServer.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(PencilSpaceServer.Repo, {:shared, self()})
end
{:ok, conn: Phoenix.ConnTest.build_conn()}
end
end
| 30.166667 | 79 | 0.737964 |
081f9e22ad7d4a7f6ed273f5ea9732c328c616e5 | 456 | ex | Elixir | examples/ct_ecto/lib/ct/application.ex | 50kudos/closure_table | 4c1a8e321318dc2dcb2d178b9dbc58431e0211e4 | [
"Apache-2.0"
] | 16 | 2019-09-08T14:31:36.000Z | 2022-02-14T19:50:23.000Z | examples/ct_ecto/lib/ct/application.ex | 50kudos/closure_table | 4c1a8e321318dc2dcb2d178b9dbc58431e0211e4 | [
"Apache-2.0"
] | 6 | 2019-09-14T06:03:41.000Z | 2021-05-16T00:53:40.000Z | examples/ct_ecto/lib/ct/application.ex | 50kudos/closure_table | 4c1a8e321318dc2dcb2d178b9dbc58431e0211e4 | [
"Apache-2.0"
] | 7 | 2019-09-13T16:39:45.000Z | 2021-05-11T06:15:04.000Z | defmodule CT.Application do
# See https://hexdocs.pm/elixir/Application.html
# for more information on OTP Applications
@moduledoc false
use Application
def start(_type, _args) do
children = [
CT.Repo,
CT.MyCTE
]
# See https://hexdocs.pm/elixir/Supervisor.html
# for other strategies and supported options
opts = [strategy: :one_for_one, name: CT.Supervisor]
Supervisor.start_link(children, opts)
end
end
| 22.8 | 56 | 0.695175 |
081fa3d98531bde142953cd3d5a6c8eeb748c1dc | 340 | ex | Elixir | lib/supabase_surface/components/icons/icon_gitlab.ex | treebee/supabase-surface | 5a184ca92323c085dd81e2fc8aa8c10367f2382e | [
"Apache-2.0"
] | 5 | 2021-06-08T08:02:43.000Z | 2022-02-09T23:13:46.000Z | lib/supabase_surface/components/icons/icon_gitlab.ex | treebee/supabase-surface | 5a184ca92323c085dd81e2fc8aa8c10367f2382e | [
"Apache-2.0"
] | null | null | null | lib/supabase_surface/components/icons/icon_gitlab.ex | treebee/supabase-surface | 5a184ca92323c085dd81e2fc8aa8c10367f2382e | [
"Apache-2.0"
] | 1 | 2021-07-14T05:20:31.000Z | 2021-07-14T05:20:31.000Z | defmodule SupabaseSurface.Components.Icons.IconGitlab do
use SupabaseSurface.Components.Icon
@impl true
def render(assigns) do
icon_size = IconContainer.get_size(assigns.size)
~F"""
<IconContainer assigns={assigns}>
{Feathericons.gitlab(width: icon_size, height: icon_size)}
</IconContainer>
"""
end
end
| 22.666667 | 64 | 0.717647 |
081fb5e2c42b68673e2d90203ba1cdc1f6133001 | 1,424 | exs | Elixir | config/dev.exs | boolafish/elixir-omg | 46b568404972f6e4b4da3195d42d4fb622edb934 | [
"Apache-2.0"
] | null | null | null | config/dev.exs | boolafish/elixir-omg | 46b568404972f6e4b4da3195d42d4fb622edb934 | [
"Apache-2.0"
] | null | null | null | config/dev.exs | boolafish/elixir-omg | 46b568404972f6e4b4da3195d42d4fb622edb934 | [
"Apache-2.0"
] | null | null | null | import Config
config :logger,
backends: [:console, Sentry.LoggerBackend]
config :omg,
ethereum_events_check_interval_ms: 500,
coordinator_eth_height_check_interval_ms: 1_000
config :omg_child_chain,
block_queue_eth_height_check_interval_ms: 1_000
config :omg_child_chain_rpc, environment: :dev
config :phoenix, :stacktrace_depth, 20
config :omg_child_chain_rpc, OMG.ChildChainRPC.Tracer,
disabled?: true,
env: "development"
config :omg_db,
path: Path.join([System.get_env("HOME"), ".omg/data"])
config :ethereumex,
http_options: [recv_timeout: 60_000]
config :omg_eth,
min_exit_period_seconds: 10 * 60,
ethereum_block_time_seconds: 1,
node_logging_in_debug: true
config :omg_watcher_rpc, environment: :dev
config :phoenix, :stacktrace_depth, 20
config :omg_watcher_rpc, OMG.WatcherRPC.Tracer,
disabled?: true,
env: "development"
config :omg_watcher_info, environment: :dev
config :omg_watcher_info, OMG.WatcherInfo.Tracer,
disabled?: true,
env: "development"
config :omg_watcher, environment: :dev
config :omg_watcher,
# 1 hour of Ethereum blocks
exit_processor_sla_margin: 60 * 4,
# this means we allow the `sla_margin` above be larger than the `min_exit_period`
exit_processor_sla_margin_forced: true
config :omg_watcher, OMG.Watcher.Tracer,
disabled?: true,
env: "development"
config :omg_status, OMG.Status.Metric.Tracer,
env: "development",
disabled?: true
| 24.135593 | 83 | 0.77177 |
081fbac5222f073bae7dacb319098cc1d53787ab | 2,805 | ex | Elixir | lib/asciinema_web/router.ex | jrschumacher/asciinema-server | 5f04b0c10de13da0925e28f8fd65210698501bb2 | [
"Apache-2.0"
] | null | null | null | lib/asciinema_web/router.ex | jrschumacher/asciinema-server | 5f04b0c10de13da0925e28f8fd65210698501bb2 | [
"Apache-2.0"
] | null | null | null | lib/asciinema_web/router.ex | jrschumacher/asciinema-server | 5f04b0c10de13da0925e28f8fd65210698501bb2 | [
"Apache-2.0"
] | null | null | null | defmodule AsciinemaWeb.Router do
use AsciinemaWeb, :router
use Plug.ErrorHandler
defp handle_errors(_conn, %{reason: %Ecto.NoResultsError{}}), do: nil
defp handle_errors(_conn, %{reason: %Phoenix.NotAcceptableError{}}), do: nil
use Sentry.Plug
pipeline :browser do
plug :accepts, ["html"]
plug :fetch_session
plug :fetch_flash
plug :protect_from_forgery
plug :put_secure_browser_headers
plug AsciinemaWeb.Auth
end
pipeline :asciicast do
plug :accepts, ["html", "js", "json", "cast", "png", "gif"]
plug :put_secure_browser_headers
end
scope "/", AsciinemaWeb do
pipe_through :asciicast
get "/a/:id", AsciicastController, :show
end
scope "/", AsciinemaWeb do
pipe_through :browser # Use the default browser stack
get "/a/:id/iframe", AsciicastController, :iframe
get "/docs", DocController, :index
get "/docs/:topic", DocController, :show
resources "/login", LoginController, only: [:new, :create], singleton: true
get "/login/sent", LoginController, :sent, as: :login
resources "/user", UserController, as: :user, only: [:edit, :update], singleton: true
resources "/users", UserController, as: :users, only: [:new, :create]
resources "/session", SessionController, only: [:new, :create], singleton: true
get "/connect/:api_token", ApiTokenController, :show, as: :connect
get "/about", PageController, :about
get "/privacy", PageController, :privacy
get "/tos", PageController, :tos
get "/contact", PageController, :contact
get "/contributing", PageController, :contributing
end
scope "/api", AsciinemaWeb.Api, as: :api do
post "/asciicasts", AsciicastController, :create
end
# Other scopes may use custom stacks.
# scope "/api", Asciinema do
# pipe_through :api
# end
end
defmodule AsciinemaWeb.Router.Helpers.Extra do
alias AsciinemaWeb.Router.Helpers, as: H
def profile_path(_conn, user) do
profile_path(user)
end
def profile_path(%Plug.Conn{} = conn) do
profile_path(conn.assigns.current_user)
end
def profile_path(%{id: id, username: username}) do
if username do
"/~#{username}"
else
"/u/#{id}"
end
end
def asciicast_file_path(conn, asciicast) do
H.asciicast_path(conn, :show, asciicast) <> "." <> ext(asciicast)
end
def asciicast_file_url(conn, asciicast) do
H.asciicast_url(conn, :show, asciicast) <> "." <> ext(asciicast)
end
defp ext(asciicast) do
case asciicast.version do
0 -> "json"
1 -> "json"
_ -> "cast"
end
end
def asciicast_image_path(conn, asciicast) do
H.asciicast_path(conn, :show, asciicast) <> ".png"
end
def asciicast_animation_path(conn, asciicast) do
H.asciicast_path(conn, :show, asciicast) <> ".gif"
end
end
| 27.23301 | 89 | 0.673797 |
081fd7ea38cdb8308f51d6ba760ab2753df782e8 | 3,234 | ex | Elixir | lib/autoalias/conflicts.ex | ARtoriouSs/autoalias | 7589f0321d936777b01378b8db602317e3514ac4 | [
"MIT"
] | 5 | 2020-03-05T17:30:08.000Z | 2020-10-24T18:18:24.000Z | lib/autoalias/conflicts.ex | ARtoriouSs/autoalias | 7589f0321d936777b01378b8db602317e3514ac4 | [
"MIT"
] | null | null | null | lib/autoalias/conflicts.ex | ARtoriouSs/autoalias | 7589f0321d936777b01378b8db602317e3514ac4 | [
"MIT"
] | null | null | null | defmodule Autoalias.Conflicts do
@moduledoc """
Provides mechanism for resolving module conflicts
"""
@doc """
Accepts list of modules and returns list of modules with resolved conflicts.
Conflicts solving performs recursively till there will be no conflicts at all.
If we have several modules with the same ending and alias all of them, only last one will be accepted.
For example if there is modules like `Foo.Bar.SomeModule` and `Baz.Qux.Corge.SomeModule` it will alias
the longest module, and closest parent for all conflicted modules. In this case it will be:
```elixir
alias Baz.Qux.Corge.SomeModule
alias Foo.Bar
```
So we can now use `SomeModule` to access first one and `Bar.SomeModule` for the second.
Here is the corner case when conflict appears with one-word module, e.g. `SomeModule` and `MyApp.SomeModule`.
In this case it will create alias for `MyApp.SomeModule` and you will loose direct access to one-word
`SomeModule` module. If this happened, you can prepend module with `Elixir.` prefix, like `Elixir.SomeModule`,
to access it. `Elixir` module itself cannot be aliased at all.
This cases are pretty rare, but it can happen.
"""
def resolve(modules) do
resolved =
modules
|> Enum.sort(fn first, second -> submodules_count(first) <= submodules_count(second) end)
|> Enum.map(fn module -> get_conflicts(module, modules) end)
|> Enum.dedup_by(fn %{target: target} -> last_child(target) end)
|> Enum.map(fn %{conflicts: conflicts} -> conflicts end)
|> List.flatten()
|> Enum.reduce(modules, fn module, modules ->
modules
|> List.delete(module)
|> List.insert_at(-1, parent(module))
end)
if has_conflicts?(resolved), do: resolve(resolved), else: resolved
end
# Accepts target module and list of modules.
# Returns map which contains target module and all modules with the same ending, e.g.:
#
# %{target: Foo.Bar.Baz, conflicts: [Qux.Baz, Corge.Baz]}
#
@doc false
defp get_conflicts(target, modules) do
conflicts =
modules
|> Enum.reduce([], fn module, conflicts ->
if module != target and last_child(module) == last_child(target) do
conflicts ++ [module]
else
conflicts
end
end)
%{target: target, conflicts: conflicts}
end
# Checks whether or not modules list has conflicts
#
@doc false
defp has_conflicts?(modules) do
modules
|> Enum.any?(fn module ->
module
|> get_conflicts(modules)
|> Map.get(:conflicts)
|> Enum.any?()
end)
end
# Counts submodules not considering Elixir module, for Foo.Bar it will be 2
#
@doc false
defp submodules_count(Elixir), do: 0
defp submodules_count(module) do
module |> Module.split() |> Enum.count()
end
# Returns module parent, e.g. for Foo.Bar it will return Foo
#
@doc false
defp last_child(Elixir), do: Elixir
defp last_child(module) do
module
|> Module.split()
|> List.last()
end
# Returns module parent, e.g. for Foo.Bar it will return Foo
#
@doc false
def parent(module) do
module
|> Module.split()
|> Enum.drop(-1)
|> Module.concat()
end
end
| 30.509434 | 112 | 0.66945 |
081fdf105272cf21764992556c30b5552767eecb | 5,516 | ex | Elixir | clients/ad_exchange_buyer/lib/google_api/ad_exchange_buyer/v14/model/publisher_profile_api_proto.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/ad_exchange_buyer/lib/google_api/ad_exchange_buyer/v14/model/publisher_profile_api_proto.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/ad_exchange_buyer/lib/google_api/ad_exchange_buyer/v14/model/publisher_profile_api_proto.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.AdExchangeBuyer.V14.Model.PublisherProfileApiProto do
@moduledoc """
## Attributes
* `audience` (*type:* `String.t`, *default:* `nil`) - Publisher provided info on its audience.
* `buyerPitchStatement` (*type:* `String.t`, *default:* `nil`) - A pitch statement for the buyer
* `directContact` (*type:* `String.t`, *default:* `nil`) - Direct contact for the publisher profile.
* `exchange` (*type:* `String.t`, *default:* `nil`) - Exchange where this publisher profile is from. E.g. AdX, Rubicon etc...
* `googlePlusLink` (*type:* `String.t`, *default:* `nil`) - Link to publisher's Google+ page.
* `isParent` (*type:* `boolean()`, *default:* `nil`) - True, if this is the parent profile, which represents all domains owned by the publisher.
* `isPublished` (*type:* `boolean()`, *default:* `nil`) - True, if this profile is published. Deprecated for state.
* `kind` (*type:* `String.t`, *default:* `adexchangebuyer#publisherProfileApiProto`) - Identifies what kind of resource this is. Value: the fixed string "adexchangebuyer#publisherProfileApiProto".
* `logoUrl` (*type:* `String.t`, *default:* `nil`) - The url to the logo for the publisher.
* `mediaKitLink` (*type:* `String.t`, *default:* `nil`) - The url for additional marketing and sales materials.
* `name` (*type:* `String.t`, *default:* `nil`) -
* `overview` (*type:* `String.t`, *default:* `nil`) - Publisher provided overview.
* `profileId` (*type:* `integer()`, *default:* `nil`) - The pair of (seller.account_id, profile_id) uniquely identifies a publisher profile for a given publisher.
* `programmaticContact` (*type:* `String.t`, *default:* `nil`) - Programmatic contact for the publisher profile.
* `publisherDomains` (*type:* `list(String.t)`, *default:* `nil`) - The list of domains represented in this publisher profile. Empty if this is a parent profile.
* `publisherProfileId` (*type:* `String.t`, *default:* `nil`) - Unique Id for publisher profile.
* `publisherProvidedForecast` (*type:* `GoogleApi.AdExchangeBuyer.V14.Model.PublisherProvidedForecast.t`, *default:* `nil`) - Publisher provided forecasting information.
* `rateCardInfoLink` (*type:* `String.t`, *default:* `nil`) - Link to publisher rate card
* `samplePageLink` (*type:* `String.t`, *default:* `nil`) - Link for a sample content page.
* `seller` (*type:* `GoogleApi.AdExchangeBuyer.V14.Model.Seller.t`, *default:* `nil`) - Seller of the publisher profile.
* `state` (*type:* `String.t`, *default:* `nil`) - State of the publisher profile.
* `topHeadlines` (*type:* `list(String.t)`, *default:* `nil`) - Publisher provided key metrics and rankings.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:audience => String.t(),
:buyerPitchStatement => String.t(),
:directContact => String.t(),
:exchange => String.t(),
:googlePlusLink => String.t(),
:isParent => boolean(),
:isPublished => boolean(),
:kind => String.t(),
:logoUrl => String.t(),
:mediaKitLink => String.t(),
:name => String.t(),
:overview => String.t(),
:profileId => integer(),
:programmaticContact => String.t(),
:publisherDomains => list(String.t()),
:publisherProfileId => String.t(),
:publisherProvidedForecast =>
GoogleApi.AdExchangeBuyer.V14.Model.PublisherProvidedForecast.t(),
:rateCardInfoLink => String.t(),
:samplePageLink => String.t(),
:seller => GoogleApi.AdExchangeBuyer.V14.Model.Seller.t(),
:state => String.t(),
:topHeadlines => list(String.t())
}
field(:audience)
field(:buyerPitchStatement)
field(:directContact)
field(:exchange)
field(:googlePlusLink)
field(:isParent)
field(:isPublished)
field(:kind)
field(:logoUrl)
field(:mediaKitLink)
field(:name)
field(:overview)
field(:profileId)
field(:programmaticContact)
field(:publisherDomains, type: :list)
field(:publisherProfileId)
field(
:publisherProvidedForecast,
as: GoogleApi.AdExchangeBuyer.V14.Model.PublisherProvidedForecast
)
field(:rateCardInfoLink)
field(:samplePageLink)
field(:seller, as: GoogleApi.AdExchangeBuyer.V14.Model.Seller)
field(:state)
field(:topHeadlines, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.AdExchangeBuyer.V14.Model.PublisherProfileApiProto do
def decode(value, options) do
GoogleApi.AdExchangeBuyer.V14.Model.PublisherProfileApiProto.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.AdExchangeBuyer.V14.Model.PublisherProfileApiProto do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 47.551724 | 200 | 0.674583 |
081fef5a5a0aeef0c3a707da33aa035c9b5d2cf3 | 570 | ex | Elixir | apps/buzzcms/lib/buzzcms/schema/entry_type_taxonomy.ex | buzzcms/buzzcms | 8ca8e6dea381350f94cc4a666448b5dba6676520 | [
"Apache-2.0"
] | null | null | null | apps/buzzcms/lib/buzzcms/schema/entry_type_taxonomy.ex | buzzcms/buzzcms | 8ca8e6dea381350f94cc4a666448b5dba6676520 | [
"Apache-2.0"
] | 41 | 2020-02-12T07:53:14.000Z | 2020-03-30T02:18:14.000Z | apps/buzzcms/lib/buzzcms/schema/entry_type_taxonomy.ex | buzzcms/buzzcms | 8ca8e6dea381350f94cc4a666448b5dba6676520 | [
"Apache-2.0"
] | null | null | null | defmodule Buzzcms.Schema.EntryTypeTaxonomy do
use Ecto.Schema
import Ecto.Changeset
@primary_key false
schema "entry_type_taxonomy" do
belongs_to :entry_type, Buzzcms.Schema.EntryType
belongs_to :taxonomy, Buzzcms.Schema.Taxonomy
field :position, :integer
end
def changeset(entity, params \\ %{}) do
entity
|> cast(params, [:entry_type_id, :taxonomy_id, :position])
|> unique_constraint(:taxonomy, name: :entry_type_taxonomy_pkey)
|> foreign_key_constraint(:entry_type_id)
|> foreign_key_constraint(:taxonomy_id)
end
end
| 27.142857 | 68 | 0.738596 |
08200060864e447730ab6b1653512b0974c148f8 | 5,748 | ex | Elixir | lib/bamboo/adapters/mailgun_adapter.ex | speeddragon/bamboo | b7bc9df132c7710ac3344be677b22a156e4eecd3 | [
"MIT"
] | null | null | null | lib/bamboo/adapters/mailgun_adapter.ex | speeddragon/bamboo | b7bc9df132c7710ac3344be677b22a156e4eecd3 | [
"MIT"
] | null | null | null | lib/bamboo/adapters/mailgun_adapter.ex | speeddragon/bamboo | b7bc9df132c7710ac3344be677b22a156e4eecd3 | [
"MIT"
] | null | null | null | defmodule Bamboo.MailgunAdapter do
@moduledoc """
Sends email using Mailgun's API.
Use this adapter to send emails through Mailgun's API. Requires that an API
key and a domain are set in the config.
## Example config
# In config/config.exs, or config.prod.exs, etc.
config :my_app, MyApp.Mailer,
adapter: Bamboo.MailgunAdapter,
api_key: "my_api_key",
domain: "your.domain"
# Define a Mailer. Maybe in lib/my_app/mailer.ex
defmodule MyApp.Mailer do
use Bamboo.Mailer, otp_app: :my_app
end
"""
@service_name "Mailgun"
@base_uri "https://api.mailgun.net/v3"
@behaviour Bamboo.Adapter
alias Bamboo.{Email, Attachment}
import Bamboo.ApiError
@doc false
def handle_config(config) do
for setting <- [:api_key, :domain] do
if config[setting] in [nil, ""] do
raise_missing_setting_error(config, setting)
end
end
config
end
defp raise_missing_setting_error(config, setting) do
raise ArgumentError, """
There was no #{setting} set for the Mailgun adapter.
* Here are the config options that were passed in:
#{inspect(config)}
"""
end
def deliver(email, config) do
body = to_mailgun_body(email)
case :hackney.post(full_uri(config), headers(email, config), body, [:with_body]) do
{:ok, status, _headers, response} when status > 299 ->
raise_api_error(@service_name, response, body)
{:ok, status, headers, response} ->
%{status_code: status, headers: headers, body: response}
{:error, reason} ->
raise_api_error(inspect(reason))
end
end
@doc false
def supports_attachments?, do: true
defp full_uri(config) do
Application.get_env(:bamboo, :mailgun_base_uri, @base_uri) <>
"/" <> config.domain <> "/messages"
end
defp headers(%Email{} = email, config) do
[{"Content-Type", content_type(email)}, {"Authorization", "Basic #{auth_token(config)}"}]
end
defp auth_token(config), do: Base.encode64("api:" <> config.api_key)
defp content_type(%{attachments: []}), do: "application/x-www-form-urlencoded"
defp content_type(%{}), do: "multipart/form-data"
defp to_mailgun_body(email) do
%{}
|> put_from(email)
|> put_to(email)
|> put_subject(email)
|> put_html(email)
|> put_text(email)
|> put_cc(email)
|> put_bcc(email)
|> put_reply_to(email)
|> put_attachments(email)
|> put_headers(email)
|> put_custom_vars(email)
|> filter_non_empty_mailgun_fields
|> encode_body
end
defp put_from(body, %Email{from: from}), do: Map.put(body, :from, prepare_recipient(from))
defp put_to(body, %Email{to: to}), do: Map.put(body, :to, prepare_recipients(to))
defp put_reply_to(body, %Email{headers: %{"reply-to" => nil}}), do: body
defp put_reply_to(body, %Email{headers: %{"reply-to" => address}}),
do: Map.put(body, :"h:Reply-To", address)
defp put_reply_to(body, %Email{headers: _headers}), do: body
defp put_cc(body, %Email{cc: []}), do: body
defp put_cc(body, %Email{cc: cc}), do: Map.put(body, :cc, prepare_recipients(cc))
defp put_bcc(body, %Email{bcc: []}), do: body
defp put_bcc(body, %Email{bcc: bcc}), do: Map.put(body, :bcc, prepare_recipients(bcc))
defp prepare_recipients(recipients) do
recipients
|> Enum.map(&prepare_recipient(&1))
|> Enum.join(",")
end
defp prepare_recipient({nil, address}), do: address
defp prepare_recipient({"", address}), do: address
defp prepare_recipient({name, address}), do: "#{name} <#{address}>"
defp put_subject(body, %Email{subject: subject}), do: Map.put(body, :subject, subject)
defp put_text(body, %Email{text_body: nil}), do: body
defp put_text(body, %Email{text_body: text_body}), do: Map.put(body, :text, text_body)
defp put_html(body, %Email{html_body: nil}), do: body
defp put_html(body, %Email{html_body: html_body}), do: Map.put(body, :html, html_body)
defp put_headers(body, %Email{headers: headers}) do
Enum.reduce(headers, body, fn {key, value}, acc ->
Map.put(acc, :"h:#{key}", value)
end)
end
defp put_custom_vars(body, %Email{private: private}) do
custom_vars = Map.get(private, :mailgun_custom_vars, %{})
Enum.reduce(custom_vars, body, fn {key, value}, acc ->
Map.put(acc, :"v:#{key}", value)
end)
end
defp put_attachments(body, %Email{attachments: []}), do: body
defp put_attachments(body, %Email{attachments: attachments}) do
attachment_data =
attachments
|> Enum.reverse()
|> Enum.map(&prepare_file(&1))
Map.put(body, :attachments, attachment_data)
end
defp prepare_file(%Attachment{} = attachment) do
{"", attachment.data,
{"form-data", [{"name", ~s/"attachment"/}, {"filename", ~s/"#{attachment.filename}"/}]}, []}
end
@mailgun_message_fields ~w(from to cc bcc subject text html)a
@internal_fields ~w(attachments)a
def filter_non_empty_mailgun_fields(body) do
Enum.filter(body, fn {key, value} ->
# Key is a well known mailgun field (including header and custom var field) and its value is not empty
(key in @mailgun_message_fields || key in @internal_fields ||
String.starts_with?(Atom.to_string(key), ["h:", "v:"])) && !(value in [nil, "", []])
end)
|> Enum.into(%{})
end
defp encode_body(%{attachments: attachments} = body) do
{
:multipart,
# Drop the remaining non-Mailgun fields
# Append the attachment parts
body
|> Map.drop(@internal_fields)
|> Enum.map(fn {k, v} -> {to_string(k), to_string(v)} end)
|> Kernel.++(attachments)
}
end
defp encode_body(body_without_attachments), do: Plug.Conn.Query.encode(body_without_attachments)
end
| 30.252632 | 108 | 0.65501 |
08201fb0a5c6f6d67e60cd692c3163a6e66e4d0c | 97 | exs | Elixir | test/oauth2_example_test.exs | castingclouds/rc4gho | fdbd6665aa233370c857d63b1d6a22459e1833ce | [
"MIT"
] | null | null | null | test/oauth2_example_test.exs | castingclouds/rc4gho | fdbd6665aa233370c857d63b1d6a22459e1833ce | [
"MIT"
] | null | null | null | test/oauth2_example_test.exs | castingclouds/rc4gho | fdbd6665aa233370c857d63b1d6a22459e1833ce | [
"MIT"
] | null | null | null | defmodule RocketTest do
use ExUnit.Case
test "the truth" do
assert 1 + 1 == 2
end
end
| 12.125 | 23 | 0.649485 |
08207137c353f633b8597c803cfe3fa43cef7e05 | 11,160 | ex | Elixir | lib/plug/builder.ex | shadowfacts/plug | c27823e537df26557a1facc3febad5ebe5f1e415 | [
"Apache-2.0"
] | null | null | null | lib/plug/builder.ex | shadowfacts/plug | c27823e537df26557a1facc3febad5ebe5f1e415 | [
"Apache-2.0"
] | null | null | null | lib/plug/builder.ex | shadowfacts/plug | c27823e537df26557a1facc3febad5ebe5f1e415 | [
"Apache-2.0"
] | null | null | null | defmodule Plug.Builder do
@moduledoc """
Conveniences for building plugs.
This module can be `use`-d into a module in order to build
a plug pipeline:
defmodule MyApp do
use Plug.Builder
plug Plug.Logger
plug :hello, upper: true
# A function from another module can be plugged too, provided it's
# imported into the current module first.
import AnotherModule, only: [interesting_plug: 2]
plug :interesting_plug
def hello(conn, opts) do
body = if opts[:upper], do: "WORLD", else: "world"
send_resp(conn, 200, body)
end
end
Multiple plugs can be defined with the `plug/2` macro, forming a pipeline.
The plugs in the pipeline will be executed in the order they've been added
through the `plug/2` macro. In the example above, `Plug.Logger` will be
called first and then the `:hello` function plug will be called on the
resulting connection.
`Plug.Builder` also imports the `Plug.Conn` module, making functions like
`send_resp/3` available.
## Options
When used, the following options are accepted by `Plug.Builder`:
* `:log_on_halt` - accepts the level to log whenever the request is halted
* `:init_mode` - the environment to initialize the plug's options, one of
`:compile` or `:runtime`. Defaults `:compile`.
## Plug behaviour
Internally, `Plug.Builder` implements the `Plug` behaviour, which means both
the `init/1` and `call/2` functions are defined.
By implementing the Plug API, `Plug.Builder` guarantees this module is a plug
and can be handed to a web server or used as part of another pipeline.
## Overriding the default Plug API functions
Both the `init/1` and `call/2` functions defined by `Plug.Builder` can be
manually overridden. For example, the `init/1` function provided by
`Plug.Builder` returns the options that it receives as an argument, but its
behaviour can be customized:
defmodule PlugWithCustomOptions do
use Plug.Builder
plug Plug.Logger
def init(opts) do
opts
end
end
The `call/2` function that `Plug.Builder` provides is used internally to
execute all the plugs listed using the `plug` macro, so overriding the
`call/2` function generally implies using `super` in order to still call the
plug chain:
defmodule PlugWithCustomCall do
use Plug.Builder
plug Plug.Logger
plug Plug.Head
def call(conn, opts) do
conn
|> super(opts) # calls Plug.Logger and Plug.Head
|> assign(:called_all_plugs, true)
end
end
## Halting a plug pipeline
A plug pipeline can be halted with `Plug.Conn.halt/1`. The builder will
prevent further plugs downstream from being invoked and return the current
connection. In the following example, the `Plug.Logger` plug never gets
called:
defmodule PlugUsingHalt do
use Plug.Builder
plug :stopper
plug Plug.Logger
def stopper(conn, _opts) do
halt(conn)
end
end
"""
@type plug :: module | atom
@doc false
defmacro __using__(opts) do
quote do
@behaviour Plug
@plug_builder_opts unquote(opts)
def init(opts) do
opts
end
def call(conn, opts) do
plug_builder_call(conn, opts)
end
defoverridable init: 1, call: 2
import Plug.Conn
import Plug.Builder, only: [plug: 1, plug: 2, builder_opts: 0]
Module.register_attribute(__MODULE__, :plugs, accumulate: true)
@before_compile Plug.Builder
end
end
@doc false
defmacro __before_compile__(env) do
plugs = Module.get_attribute(env.module, :plugs)
plugs =
if builder_ref = get_plug_builder_ref(env.module) do
traverse(plugs, builder_ref)
else
plugs
end
builder_opts = Module.get_attribute(env.module, :plug_builder_opts)
{conn, body} = Plug.Builder.compile(env, plugs, builder_opts)
quote do
defp plug_builder_call(unquote(conn), opts), do: unquote(body)
end
end
defp traverse(tuple, ref) when is_tuple(tuple) do
tuple |> Tuple.to_list() |> traverse(ref) |> List.to_tuple()
end
defp traverse(map, ref) when is_map(map) do
map |> Map.to_list() |> traverse(ref) |> Map.new()
end
defp traverse(list, ref) when is_list(list) do
Enum.map(list, &traverse(&1, ref))
end
defp traverse(ref, ref) do
{:unquote, [], [quote(do: opts)]}
end
defp traverse(term, _ref) do
term
end
@doc """
A macro that stores a new plug. `opts` will be passed unchanged to the new
plug.
This macro doesn't add any guards when adding the new plug to the pipeline;
for more information about adding plugs with guards see `compile/3`.
## Examples
plug Plug.Logger # plug module
plug :foo, some_options: true # plug function
"""
defmacro plug(plug, opts \\ []) do
plug = Macro.expand(plug, %{__CALLER__ | function: {:init, 1}})
quote do
@plugs {unquote(plug), unquote(opts), true}
end
end
@doc """
Annotates a plug will receive the options given
to the current module itself as arguments.
Imagine the following plug:
defmodule MyPlug do
use Plug.Builder
plug :inspect_opts, builder_opts()
defp inspect_opts(conn, opts) do
IO.inspect(opts)
conn
end
end
When plugged as:
plug MyPlug, custom: :options
It will print `[custom: :options]` as the builder options
were passed to the inner plug.
Note you only pass `builder_opts()` to **function plugs**.
You cannot use `builder_opts()` with module plugs because
their options are evaluated at compile time. If you need
to pass `builder_opts()` to a module plug, you can wrap
the module plug in function. To be precise, do not do this:
plug Plug.Parsers, builder_opts()
Instead do this:
plug :custom_plug_parsers, builder_opts()
defp custom_plug_parsers(conn, opts) do
Plug.Parsers.call(conn, Plug.Parsers.init(opts))
end
"""
defmacro builder_opts() do
quote do
Plug.Builder.__builder_opts__(__MODULE__)
end
end
@doc false
def __builder_opts__(module) do
get_plug_builder_ref(module) || generate_plug_builder_ref(module)
end
defp get_plug_builder_ref(module) do
Module.get_attribute(module, :plug_builder_ref)
end
defp generate_plug_builder_ref(module) do
ref = make_ref()
Module.put_attribute(module, :plug_builder_ref, ref)
ref
end
@doc """
Compiles a plug pipeline.
Each element of the plug pipeline (according to the type signature of this
function) has the form:
{plug_name, options, guards}
Note that this function expects a reversed pipeline (with the last plug that
has to be called coming first in the pipeline).
The function returns a tuple with the first element being a quoted reference
to the connection and the second element being the compiled quoted pipeline.
## Examples
Plug.Builder.compile(env, [
{Plug.Logger, [], true}, # no guards, as added by the Plug.Builder.plug/2 macro
{Plug.Head, [], quote(do: a when is_binary(a))}
], [])
"""
@spec compile(Macro.Env.t(), [{plug, Plug.opts(), Macro.t()}], Keyword.t()) ::
{Macro.t(), Macro.t()}
def compile(env, pipeline, builder_opts) do
conn = quote do: conn
init_mode = builder_opts[:init_mode] || :compile
unless init_mode in [:compile, :runtime] do
raise ArgumentError, """
invalid :init_mode when compiling #{inspect(env.module)}.
Supported values include :compile or :runtime. Got: #{inspect(init_mode)}
"""
end
ast =
Enum.reduce(pipeline, conn, fn {plug, opts, guards}, acc ->
{plug, opts, guards}
|> init_plug(init_mode)
|> quote_plug(acc, env, builder_opts)
end)
{conn, ast}
end
# Initializes the options of a plug in the configured init_mode.
defp init_plug({plug, opts, guards}, init_mode) do
case Atom.to_charlist(plug) do
~c"Elixir." ++ _ -> init_module_plug(plug, opts, guards, init_mode)
_ -> init_fun_plug(plug, opts, guards)
end
end
defp init_module_plug(plug, opts, guards, :compile) do
initialized_opts = plug.init(opts)
if function_exported?(plug, :call, 2) do
{:module, plug, escape(initialized_opts), guards}
else
raise ArgumentError, message: "#{inspect(plug)} plug must implement call/2"
end
end
defp init_module_plug(plug, opts, guards, :runtime) do
{:module, plug, quote(do: unquote(plug).init(unquote(escape(opts)))), guards}
end
defp init_fun_plug(plug, opts, guards) do
{:function, plug, escape(opts), guards}
end
defp escape(opts) do
Macro.escape(opts, unquote: true)
end
# `acc` is a series of nested plug calls in the form of
# plug3(plug2(plug1(conn))). `quote_plug` wraps a new plug around that series
# of calls.
defp quote_plug({plug_type, plug, opts, guards}, acc, env, builder_opts) do
call = quote_plug_call(plug_type, plug, opts)
error_message =
case plug_type do
:module -> "expected #{inspect(plug)}.call/2 to return a Plug.Conn"
:function -> "expected #{plug}/2 to return a Plug.Conn"
end <> ", all plugs must receive a connection (conn) and return a connection"
{fun, meta, [arg, [do: clauses]]} =
quote do
case unquote(compile_guards(call, guards)) do
%Plug.Conn{halted: true} = conn ->
unquote(log_halt(plug_type, plug, env, builder_opts))
conn
%Plug.Conn{} = conn ->
unquote(acc)
other ->
raise unquote(error_message) <> ", got: #{inspect(other)}"
end
end
generated? = :erlang.system_info(:otp_release) >= '19'
clauses =
Enum.map(clauses, fn {:->, meta, args} ->
if generated? do
{:->, [generated: true] ++ meta, args}
else
{:->, Keyword.put(meta, :line, -1), args}
end
end)
{fun, meta, [arg, [do: clauses]]}
end
defp quote_plug_call(:function, plug, opts) do
quote do: unquote(plug)(conn, unquote(opts))
end
defp quote_plug_call(:module, plug, opts) do
quote do: unquote(plug).call(conn, unquote(opts))
end
defp compile_guards(call, true) do
call
end
defp compile_guards(call, guards) do
quote do
case true do
true when unquote(guards) -> unquote(call)
true -> conn
end
end
end
defp log_halt(plug_type, plug, env, builder_opts) do
if level = builder_opts[:log_on_halt] do
message =
case plug_type do
:module -> "#{inspect(env.module)} halted in #{inspect(plug)}.call/2"
:function -> "#{inspect(env.module)} halted in #{inspect(plug)}/2"
end
quote do
require Logger
# Matching, to make Dialyzer happy on code executing Plug.Builder.compile/3
_ = Logger.unquote(level)(unquote(message))
end
else
nil
end
end
end
| 27.555556 | 87 | 0.648656 |
08207a13b2b072a4a098d6e7558da8f27b165651 | 5,821 | ex | Elixir | clients/authorized_buyers_marketplace/lib/google_api/authorized_buyers_marketplace/v1/model/publisher_profile.ex | renovate-bot/elixir-google-api | 1da34cd39b670c99f067011e05ab90af93fef1f6 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/authorized_buyers_marketplace/lib/google_api/authorized_buyers_marketplace/v1/model/publisher_profile.ex | swansoffiee/elixir-google-api | 9ea6d39f273fb430634788c258b3189d3613dde0 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/authorized_buyers_marketplace/lib/google_api/authorized_buyers_marketplace/v1/model/publisher_profile.ex | dazuma/elixir-google-api | 6a9897168008efe07a6081d2326735fe332e522c | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.AuthorizedBuyersMarketplace.V1.Model.PublisherProfile do
@moduledoc """
The values in the publisher profile are supplied by the publisher. All fields are not filterable unless stated otherwise.
## Attributes
* `audienceDescription` (*type:* `String.t`, *default:* `nil`) - Description on the publisher's audience.
* `directDealsContact` (*type:* `String.t`, *default:* `nil`) - Contact information for direct reservation deals. This is free text entered by the publisher and may include information like names, phone numbers and email addresses.
* `displayName` (*type:* `String.t`, *default:* `nil`) - Display name of the publisher profile. Can be used to filter the response of the publisherProfiles.list method.
* `domains` (*type:* `list(String.t)`, *default:* `nil`) - The list of domains represented in this publisher profile. Empty if this is a parent profile. These are top private domains, meaning that these will not contain a string like "photos.google.co.uk/123", but will instead contain "google.co.uk". Can be used to filter the response of the publisherProfiles.list method.
* `isParent` (*type:* `boolean()`, *default:* `nil`) - Indicates if this profile is the parent profile of the seller. A parent profile represents all the inventory from the seller, as opposed to child profile that is created to brand a portion of inventory. One seller has only one parent publisher profile, and can have multiple child profiles. See https://support.google.com/admanager/answer/6035806 for details. Can be used to filter the response of the publisherProfiles.list method by setting the filter to "is_parent: true".
* `logoUrl` (*type:* `String.t`, *default:* `nil`) - A Google public URL to the logo for this publisher profile. The logo is stored as a PNG, JPG, or GIF image.
* `mediaKitUrl` (*type:* `String.t`, *default:* `nil`) - URL to additional marketing and sales materials.
* `mobileApps` (*type:* `list(GoogleApi.AuthorizedBuyersMarketplace.V1.Model.PublisherProfileMobileApplication.t)`, *default:* `nil`) - The list of apps represented in this publisher profile. Empty if this is a parent profile.
* `name` (*type:* `String.t`, *default:* `nil`) - Name of the publisher profile. Format: `buyers/{buyer}/publisherProfiles/{publisher_profile}`
* `overview` (*type:* `String.t`, *default:* `nil`) - Overview of the publisher.
* `pitchStatement` (*type:* `String.t`, *default:* `nil`) - Statement explaining what's unique about publisher's business, and why buyers should partner with the publisher.
* `programmaticDealsContact` (*type:* `String.t`, *default:* `nil`) - Contact information for programmatic deals. This is free text entered by the publisher and may include information like names, phone numbers and email addresses.
* `publisherCode` (*type:* `String.t`, *default:* `nil`) - A unique identifying code for the seller. This value is the same for all of the seller's parent and child publisher profiles. Can be used to filter the response of the publisherProfiles.list method.
* `samplePageUrl` (*type:* `String.t`, *default:* `nil`) - URL to a sample content page.
* `topHeadlines` (*type:* `list(String.t)`, *default:* `nil`) - Up to three key metrics and rankings. For example "#1 Mobile News Site for 20 Straight Months".
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:audienceDescription => String.t() | nil,
:directDealsContact => String.t() | nil,
:displayName => String.t() | nil,
:domains => list(String.t()) | nil,
:isParent => boolean() | nil,
:logoUrl => String.t() | nil,
:mediaKitUrl => String.t() | nil,
:mobileApps =>
list(
GoogleApi.AuthorizedBuyersMarketplace.V1.Model.PublisherProfileMobileApplication.t()
)
| nil,
:name => String.t() | nil,
:overview => String.t() | nil,
:pitchStatement => String.t() | nil,
:programmaticDealsContact => String.t() | nil,
:publisherCode => String.t() | nil,
:samplePageUrl => String.t() | nil,
:topHeadlines => list(String.t()) | nil
}
field(:audienceDescription)
field(:directDealsContact)
field(:displayName)
field(:domains, type: :list)
field(:isParent)
field(:logoUrl)
field(:mediaKitUrl)
field(:mobileApps,
as: GoogleApi.AuthorizedBuyersMarketplace.V1.Model.PublisherProfileMobileApplication,
type: :list
)
field(:name)
field(:overview)
field(:pitchStatement)
field(:programmaticDealsContact)
field(:publisherCode)
field(:samplePageUrl)
field(:topHeadlines, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.AuthorizedBuyersMarketplace.V1.Model.PublisherProfile do
def decode(value, options) do
GoogleApi.AuthorizedBuyersMarketplace.V1.Model.PublisherProfile.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.AuthorizedBuyersMarketplace.V1.Model.PublisherProfile do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 59.397959 | 534 | 0.708298 |
0820999438137802bda87e2bf4fd9eb8320c8156 | 2,831 | ex | Elixir | lib/absinthe_metrics.ex | xosdy/absinthe-metrics | 7267e251d1a4a63a8c47fd945143a6063f402b45 | [
"MIT"
] | null | null | null | lib/absinthe_metrics.ex | xosdy/absinthe-metrics | 7267e251d1a4a63a8c47fd945143a6063f402b45 | [
"MIT"
] | null | null | null | lib/absinthe_metrics.ex | xosdy/absinthe-metrics | 7267e251d1a4a63a8c47fd945143a6063f402b45 | [
"MIT"
] | null | null | null | defmodule AbsintheMetrics do
alias Absinthe.Resolution
@behaviour Absinthe.Middleware
@callback instrument(
object :: atom,
field :: atom,
result :: any,
time :: non_neg_integer,
resolution :: Resolution.t()
) ::
any
@callback field(object :: String.t(), field :: String.t(), args :: []) :: any
defmacro __using__(opts) do
adapter = Keyword.get(opts, :adapter, AbsintheMetrics.Backend.Echo)
arguments = Keyword.get(opts, :arguments, [])
wrapped_arguments =
case arguments do
[] -> []
arguments -> [arguments]
end
quote do
def instrument([], _field, _obj), do: []
def instrument(
middleware,
%{__reference__: %{module: Absinthe.Type.BuiltIns.Introspection}},
_obj
),
do: middleware
def instrument(middleware, field, _obj) do
[{{AbsintheMetrics, :call}, {unquote(adapter), unquote(arguments)}} | middleware]
end
def install(schema) do
instrumented? = fn %{middleware: middleware} = field, obj ->
middleware =
case middleware do
[{{Absinthe.Middleware, :shim}, {_, _, _}}] ->
Absinthe.Middleware.unshim(middleware, schema)
_ ->
middleware
end
middleware
|> Enum.any?(fn
{{AbsintheMetrics, :call}, _} ->
true
_ ->
false
end)
end
for %{fields: fields} = object <- Absinthe.Schema.types(schema),
{k, %Absinthe.Type.Field{name: name, identifier: id} = field} <- fields,
instrumented?.(field, object) do
apply(
unquote(adapter),
:field,
[object.identifier, field.identifier] ++ unquote(wrapped_arguments)
)
end
end
end
end
def call(%Resolution{state: :unresolved} = res, {adapter, _}) do
now = :erlang.monotonic_time()
%{
res
| middleware:
res.middleware ++
[
{{AbsintheMetrics, :after_resolve},
start_at: now,
adapter: adapter,
field: res.definition.schema_node.identifier,
object: res.parent_type.identifier}
]
}
end
def after_resolve(%Resolution{state: :resolved} = res,
start_at: start_at,
adapter: adapter,
field: field,
object: object
) do
end_at = :erlang.monotonic_time()
diff = end_at - start_at
result =
case res.errors do
[] -> {:ok, res.value}
errors -> {:error, errors}
end
adapter.instrument(object, field, result, diff, res)
res
end
end
| 25.736364 | 89 | 0.52349 |
08209dec67a917c7db6cd3174944a240a31cd29d | 1,359 | exs | Elixir | mix.exs | zookzook/hocon | f99655cbbccb5fb2a5d2acff85a697a1c89fc1fe | [
"Apache-2.0"
] | 14 | 2019-11-29T16:29:15.000Z | 2021-08-17T20:32:28.000Z | mix.exs | zookzook/hocon | f99655cbbccb5fb2a5d2acff85a697a1c89fc1fe | [
"Apache-2.0"
] | 5 | 2019-12-02T21:06:27.000Z | 2019-12-19T08:26:01.000Z | mix.exs | zookzook/hocon | f99655cbbccb5fb2a5d2acff85a697a1c89fc1fe | [
"Apache-2.0"
] | 2 | 2020-03-23T16:56:46.000Z | 2022-01-17T20:23:25.000Z | defmodule Hocon.MixProject do
use Mix.Project
@version "0.1.8"
def project do
[
app: :hocon,
version: @version,
name: "hocon",
elixir: "~> 1.9",
start_permanent: Mix.env() == :prod,
deps: deps(),
test_coverage: [tool: ExCoveralls],
preferred_cli_env: [docs: :docs, coveralls: :test, "coveralls.detail": :test, "coveralls.post": :test, "coveralls.html": :test],
description: description(),
package: package(),
docs: docs()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:excoveralls, "~> 0.12.1", only: :test},
{:ex_doc, "~> 0.21", only: :dev, runtime: false}
]
end
defp description() do
"""
Parse HOCON configuration files in Elixir following the HOCON specifications.
"""
end
defp package() do
[maintainers: ["Michael Maier"],
licenses: ["Apache 2.0"],
links: %{"GitHub" => "https://github.com/zookzook/hocon"}]
end
defp docs() do
[main: "Hocon",
name: "HOCON",
extras: ["README.md"],
source_ref: "#{@version}",
canonical: "http://hexdocs.pm/hocon",
source_url: "https://github.com/zookzook/hocon"]
end
end
| 23.033898 | 134 | 0.586461 |
0820a9068803b680b912b20126682590c2883e15 | 23,884 | ex | Elixir | lib/ash/changeset/changeset.ex | elbow-jason/ash | eb63bc9d4d24187ad07d9892088b4e55ad6258e4 | [
"MIT"
] | null | null | null | lib/ash/changeset/changeset.ex | elbow-jason/ash | eb63bc9d4d24187ad07d9892088b4e55ad6258e4 | [
"MIT"
] | null | null | null | lib/ash/changeset/changeset.ex | elbow-jason/ash | eb63bc9d4d24187ad07d9892088b4e55ad6258e4 | [
"MIT"
] | null | null | null | defmodule Ash.Changeset do
@moduledoc """
Changesets are used to create and update data in Ash.
Create a changeset with `create/2` or `update/2`, and alter the attributes
and relationships using the functions provided in this module. Nothing in this module
actually incurs changes in a data layer. To commit a changeset, see `c:Ash.Api.create/2`
and `c:Ash.Api.update/2`.
## Primary Keys
For relationship manipulation using `append_to_relationship/3`, `remove_from_relationship/3`
and `replace_relationship/3` there are three types that can be used for primary keys:
1.) An instance of the resource in question.
2.) If the primary key is just a single field, i.e `:id`, then a single value, i.e `1`
3.) A map of keys to values representing the primary key, i.e `%{id: 1}` or `%{id: 1, org_id: 2}`
## Join Attributes
For many to many relationships, the attributes on a join relationship may be set while relating items
by passing a tuple of the primary key and the changes to be applied. This is done via upserts, so
update validations on the join resource are *not* applied, but create validations are.
For example:
```elixir
Ash.Changeset.replace_relationship(changeset, :linked_tickets, [
{1, %{link_type: "blocking"}},
{a_ticket, %{link_type: "caused_by"}},
{%{id: 2}, %{link_type: "related_to"}}
])
```
"""
defstruct [
:data,
:action_type,
:resource,
:api,
data_layer_context: %{},
after_action: [],
before_action: [],
errors: [],
valid?: true,
attributes: %{},
relationships: %{},
change_dependencies: [],
requests: []
]
defimpl Inspect do
import Inspect.Algebra
def inspect(changeset, opts) do
container_doc(
"#Ash.Changeset<",
[
concat("action_type: ", inspect(changeset.action_type)),
concat("attributes: ", to_doc(changeset.attributes, opts)),
concat("relationships: ", to_doc(changeset.relationships, opts)),
concat("errors: ", to_doc(changeset.errors, opts)),
concat("data: ", to_doc(changeset.data, opts)),
concat("valid?: ", to_doc(changeset.valid?, opts))
],
">",
opts,
fn str, _ -> str end
)
end
end
@type t :: %__MODULE__{}
alias Ash.Error.{
Changes.InvalidAttribute,
Changes.InvalidRelationship,
Changes.NoSuchAttribute,
Changes.NoSuchRelationship,
Invalid.NoSuchResource
}
@doc "Return a changeset over a resource or a record"
@spec new(Ash.resource() | Ash.record(), map) :: t
def new(resource, initial_attributes \\ %{})
def new(%resource{} = record, initial_attributes) do
if Ash.Resource.resource?(resource) do
%__MODULE__{resource: resource, data: record, action_type: :update}
|> change_attributes(initial_attributes)
else
%__MODULE__{resource: resource, action_type: :create, data: struct(resource)}
|> add_error(NoSuchResource.exception(resource: resource))
end
end
def new(resource, initial_attributes) do
if Ash.Resource.resource?(resource) do
%__MODULE__{resource: resource, action_type: :create, data: struct(resource)}
|> change_attributes(initial_attributes)
else
%__MODULE__{resource: resource, action_type: :create, data: struct(resource)}
|> add_error(NoSuchResource.exception(resource: resource))
end
end
@doc """
Wraps a function in the before/after action hooks of a changeset.
The function takes a changeset and if it returns
`{:ok, result}`, the result will be passed through the after
action hooks.
"""
@spec with_hooks(t(), (t() -> {:ok, Ash.record()} | {:error, term})) ::
{:ok, term} | {:error, term}
def with_hooks(changeset, func) do
changeset =
Enum.reduce_while(changeset.before_action, changeset, fn before_action, changeset ->
case before_action.(changeset) do
%{valid?: true} = changeset -> {:cont, changeset}
changeset -> {:halt, changeset}
end
end)
if changeset.valid? do
case func.(changeset) do
{:ok, result} ->
Enum.reduce_while(
changeset.after_action,
{:ok, result},
fn after_action, {:ok, result} ->
case after_action.(changeset, result) do
{:ok, new_result} -> {:cont, {:ok, new_result}}
{:error, error} -> {:halt, {:error, error}}
end
end
)
{:error, error} ->
{:error, error}
end
else
{:error, changeset.errors}
end
end
@doc "Gets the changing value or the original value of an attribute"
@spec get_attribute(t, atom) :: term
def get_attribute(changeset, attribute) do
case fetch_change(changeset, attribute) do
{:ok, value} ->
value
:error ->
get_data(changeset, attribute)
end
end
@doc "Gets the new value for an attribute, or `:error` if it is not being changed"
@spec fetch_change(t, atom) :: {:ok, any} | :error
def fetch_change(changeset, attribute) do
Map.fetch(changeset.attributes, attribute)
end
@doc "Gets the original value for an attribute"
@spec get_data(t, atom) :: {:ok, any} | :error
def get_data(changeset, attribute) do
Map.get(changeset.data, attribute)
end
@spec put_datalayer_context(t(), atom, term) :: t()
def put_datalayer_context(changeset, key, value) do
%{changeset | data_layer_context: Map.put(changeset.data_layer_context, key, value)}
end
@spec set_datalayer_context(t(), map) :: t()
def set_datalayer_context(changeset, map) do
%{changeset | data_layer_context: Map.merge(changeset.data_layer_context, map)}
end
@doc """
Appends a record of list of records to a relationship. Stacks with previous removals/additions.
Accepts a primary key or a list of primary keys. See the section on "Primary Keys" in the
module documentation for more.
For many to many relationships, accepts changes for any `join_attributes` configured on
the resource. See the section on "Join Attributes" in the module documentation for more.
Cannot be used with `belongs_to` or `has_one` relationships.
See `replace_relationship/3` for manipulating `belongs_to` and `has_one` relationships.
"""
@spec append_to_relationship(t, atom, Ash.primary_key() | [Ash.primary_key()]) :: t()
def append_to_relationship(changeset, relationship, record_or_records) do
case Ash.Resource.relationship(changeset.resource, relationship) do
nil ->
error =
NoSuchRelationship.exception(
resource: changeset.resource,
name: relationship
)
add_error(changeset, error)
%{cardinality: :one, type: type} = relationship ->
error =
InvalidRelationship.exception(
relationship: relationship.name,
message: "Cannot append to a #{type} relationship"
)
add_error(changeset, error)
%{writable?: false} = relationship ->
error =
InvalidRelationship.exception(
relationship: relationship.name,
message: "Relationship is not editable"
)
add_error(changeset, error)
%{type: :many_to_many} = relationship ->
case primary_keys_with_changes(relationship, List.wrap(record_or_records)) do
{:ok, primary_keys} ->
relationships =
changeset.relationships
|> Map.put_new(relationship.name, %{})
|> add_to_relationship_key_and_reconcile(relationship, :add, primary_keys)
%{changeset | relationships: relationships}
{:error, error} ->
add_error(changeset, error)
end
relationship ->
case primary_key(relationship, List.wrap(record_or_records)) do
{:ok, primary_keys} ->
relationships =
changeset.relationships
|> Map.put_new(relationship.name, %{})
|> add_to_relationship_key_and_reconcile(relationship, :add, primary_keys)
%{changeset | relationships: relationships}
{:error, error} ->
add_error(changeset, error)
end
end
end
@doc """
Removes a record of list of records to a relationship. Stacks with previous removals/additions.
Accepts a primary key or a list of primary keys. See the section on "Primary Keys" in the
module documentation for more.
Cannot be used with `belongs_to` or `has_one` relationships.
See `replace_relationship/3` for manipulating those relationships.
"""
@spec remove_from_relationship(t, atom, Ash.primary_key() | [Ash.primary_key()]) :: t()
def remove_from_relationship(changeset, relationship, record_or_records) do
case Ash.Resource.relationship(changeset.resource, relationship) do
nil ->
error =
NoSuchRelationship.exception(
resource: changeset.resource,
name: relationship
)
add_error(changeset, error)
%{cardinality: :one, type: type} = relationship ->
error =
InvalidRelationship.exception(
relationship: relationship.name,
message: "Cannot remove from a #{type} relationship"
)
add_error(changeset, error)
%{writable?: false} = relationship ->
error =
InvalidRelationship.exception(
relationship: relationship.name,
message: "Relationship is not editable"
)
add_error(changeset, error)
relationship ->
case primary_key(relationship, List.wrap(record_or_records)) do
{:ok, primary_keys} ->
relationships =
changeset.relationships
|> Map.put_new(relationship.name, %{})
|> add_to_relationship_key_and_reconcile(relationship, :remove, primary_keys)
%{changeset | relationships: relationships}
{:error, error} ->
add_error(changeset, error)
nil
end
end
end
defp add_to_relationship_key_and_reconcile(relationships, relationship, key, to_add) do
Map.update!(relationships, relationship.name, fn relationship_changes ->
relationship_changes
|> Map.put_new(key, [])
|> Map.update!(key, &Kernel.++(to_add, &1))
|> reconcile_relationship_changes()
end)
end
@doc """
Replaces the value of a relationship. Any previous additions/removals are cleared.
Accepts a primary key or a list of primary keys. See the section on "Primary Keys" in the
module documentation for more.
For many to many relationships, accepts changes for any `join_attributes` configured on
the resource. See the section on "Join Attributes" in the module documentation for more.
For a `has_many` or `many_to_many` relationship, this means removing any currently related
records that are not present in the replacement list, and creating any that do not exist
in the data layer.
For a `belongs_to` or `has_one`, replace with a `nil` value to unset a relationship.
"""
@spec replace_relationship(
t(),
atom(),
Ash.primary_key() | [Ash.primary_key()] | nil
) :: t()
def replace_relationship(changeset, relationship, record_or_records) do
case Ash.Resource.relationship(changeset.resource, relationship) do
nil ->
error =
NoSuchRelationship.exception(
resource: changeset.resource,
name: relationship
)
add_error(changeset, error)
%{writable?: false} = relationship ->
error =
InvalidRelationship.exception(
relationship: relationship.name,
message: "Relationship is not editable"
)
add_error(changeset, error)
%{cardinality: :one, type: type}
when is_list(record_or_records) and length(record_or_records) > 1 ->
error =
InvalidRelationship.exception(
relationship: relationship.name,
message: "Cannot replace a #{type} relationship with multiple records"
)
add_error(changeset, error)
%{type: :many_to_many} = relationship ->
case primary_keys_with_changes(relationship, List.wrap(record_or_records)) do
{:ok, primary_key} ->
relationships =
Map.put(changeset.relationships, relationship.name, %{replace: primary_key})
%{changeset | relationships: relationships}
{:error, error} ->
add_error(changeset, error)
end
relationship ->
record =
if relationship.cardinality == :one do
if is_list(record_or_records) do
List.first(record_or_records)
else
record_or_records
end
else
List.wrap(record_or_records)
end
case primary_key(relationship, record) do
{:ok, primary_key} ->
relationships =
Map.put(changeset.relationships, relationship.name, %{replace: primary_key})
%{changeset | relationships: relationships}
{:error, error} ->
add_error(changeset, error)
end
end
end
@doc "Returns true if an attribute exists in the changes"
@spec changing_attribute?(t(), atom) :: boolean
def changing_attribute?(changeset, attribute) do
Map.has_key?(changeset.attributes, attribute)
end
@doc "Change an attribute only if is not currently being changed"
@spec change_new_attribute(t(), atom, term) :: t()
def change_new_attribute(changeset, attribute, value) do
if changing_attribute?(changeset, attribute) do
changeset
else
change_attribute(changeset, attribute, value)
end
end
@doc """
Change an attribute if is not currently being changed, by calling the provided function
Use this if you want to only perform some expensive calculation for an attribute value
only if there isn't already a change for that attribute
"""
@spec change_new_attribute_lazy(t(), atom, (() -> any)) :: t()
def change_new_attribute_lazy(changeset, attribute, func) do
if changing_attribute?(changeset, attribute) do
changeset
else
change_attribute(changeset, attribute, func.())
end
end
@doc "Calls `change_attribute/3` for each key/value pair provided"
@spec change_attributes(t(), map | Keyword.t()) :: t()
def change_attributes(changeset, changes) do
Enum.reduce(changes, changeset, fn {key, value}, changeset ->
change_attribute(changeset, key, value)
end)
end
@doc "Adds a change to the changeset, unless the value matches the existing value"
def change_attribute(changeset, attribute, value) do
case Ash.Resource.attribute(changeset.resource, attribute) do
nil ->
error =
NoSuchAttribute.exception(
resource: changeset.resource,
name: attribute
)
add_error(changeset, error)
%{writable?: false} = attribute ->
add_attribute_invalid_error(changeset, attribute, "Attribute is not writable")
attribute ->
with {:ok, casted} <- Ash.Type.cast_input(attribute.type, value),
:ok <- validate_allow_nil(attribute, casted),
:ok <- Ash.Type.apply_constraints(attribute.type, casted, attribute.constraints) do
data_value = Map.get(changeset.data, attribute.name)
cond do
is_nil(data_value) and is_nil(casted) ->
changeset
Ash.Type.equal?(attribute.type, casted, data_value) ->
changeset
true ->
%{changeset | attributes: Map.put(changeset.attributes, attribute.name, casted)}
end
else
:error ->
add_attribute_invalid_error(changeset, attribute)
{:error, error_or_errors} ->
error_or_errors
|> List.wrap()
|> Enum.reduce(changeset, &add_attribute_invalid_error(&2, attribute, &1))
end
end
end
@doc "Calls `force_change_attribute/3` for each key/value pair provided"
@spec force_change_attributes(t(), map) :: t()
def force_change_attributes(changeset, changes) do
Enum.reduce(changes, changeset, fn {key, value}, changeset ->
force_change_attribute(changeset, key, value)
end)
end
@doc "Changes an attribute even if it isn't writable"
@spec force_change_attribute(t(), atom, any) :: t()
def force_change_attribute(changeset, attribute, value) do
case Ash.Resource.attribute(changeset.resource, attribute) do
nil ->
error =
NoSuchAttribute.exception(
resource: changeset.resource,
name: attribute
)
add_error(changeset, error)
attribute ->
with {:ok, casted} <- Ash.Type.cast_input(attribute.type, value),
:ok <- Ash.Type.apply_constraints(attribute.type, casted, attribute.constraints) do
data_value = Map.get(changeset.data, attribute.name)
cond do
is_nil(data_value) and is_nil(casted) ->
changeset
Ash.Type.equal?(attribute.type, casted, data_value) ->
changeset
true ->
%{changeset | attributes: Map.put(changeset.attributes, attribute.name, casted)}
end
else
:error ->
add_attribute_invalid_error(changeset, attribute)
{:error, error_or_errors} ->
error_or_errors
|> List.wrap()
|> Enum.reduce(changeset, &add_attribute_invalid_error(&2, attribute, &1))
end
end
end
@doc "Adds a before_action hook to the changeset."
@spec before_action(t(), (t() -> t())) :: t()
def before_action(changeset, func) do
%{changeset | before_action: [func | changeset.before_action]}
end
@doc "Adds an after_action hook to the changeset."
@spec after_action(t(), (t(), Ash.record() -> {:ok, Ash.record()} | {:error, term})) :: t()
def after_action(changeset, func) do
%{changeset | after_action: [func | changeset.after_action]}
end
@doc "Returns the original data with attribute changes merged."
@spec apply_attributes(t()) :: Ash.record()
def apply_attributes(changeset) do
Enum.reduce(changeset.attributes, changeset.data, fn {attribute, value}, data ->
Map.put(data, attribute, value)
end)
end
@doc "Adds an error to the changesets errors list, and marks the change as `valid?: false`"
@spec add_error(t(), Ash.error()) :: t()
def add_error(changeset, error) do
%{changeset | errors: [error | changeset.errors], valid?: false}
end
defp reconcile_relationship_changes(%{replace: _, add: add} = changes) do
changes
|> Map.delete(:add)
|> Map.update!(:replace, fn replace ->
replace ++ add
end)
|> reconcile_relationship_changes()
end
defp reconcile_relationship_changes(%{replace: _, remove: remove} = changes) do
changes
|> Map.delete(:remove)
|> Map.update!(:replace, fn replace ->
Enum.reject(replace, &(&1 in remove))
end)
|> reconcile_relationship_changes()
end
defp reconcile_relationship_changes(changes) do
changes
|> update_if_present(:replace, &uniq_if_list/1)
|> update_if_present(:remove, &uniq_if_list/1)
|> update_if_present(:add, &uniq_if_list/1)
end
defp uniq_if_list(list) when is_list(list), do: Enum.uniq(list)
defp uniq_if_list(other), do: other
defp update_if_present(map, key, func) do
if Map.has_key?(map, key) do
Map.update!(map, key, func)
else
map
end
end
defp through_changeset(relationship, changes) do
new(relationship.through, changes)
end
defp primary_keys_with_changes(_, []), do: {:ok, []}
defp primary_keys_with_changes(relationship, records) do
Enum.reduce_while(records, {:ok, []}, fn
{record, changes}, {:ok, acc} ->
with {:ok, primary_key} <- primary_key(relationship, record),
%{valid?: true} = changeset <- through_changeset(relationship, changes) do
{:cont, {:ok, [{primary_key, changeset} | acc]}}
else
%{valid?: false, errors: errors} -> {:halt, {:error, errors}}
{:error, error} -> {:halt, {:error, error}}
end
record, {:ok, acc} ->
case primary_key(relationship, record) do
{:ok, primary_key} -> {:cont, {:ok, [primary_key | acc]}}
{:error, error} -> {:halt, {:error, error}}
end
end)
end
defp primary_key(_, nil), do: {:ok, nil}
defp primary_key(relationship, records) when is_list(records) do
case Ash.Resource.primary_key(relationship.destination) do
[_field] ->
multiple_primary_keys(relationship, records)
_ ->
case single_primary_key(relationship, records) do
{:ok, keys} ->
{:ok, keys}
{:error, _} ->
do_primary_key(relationship, records)
end
end
end
defp primary_key(relationship, record) do
do_primary_key(relationship, record)
end
defp do_primary_key(relationship, record) when is_map(record) do
primary_key = Ash.Resource.primary_key(relationship.destination)
is_pkey_map? =
Enum.all?(primary_key, fn key ->
Map.has_key?(record, key) || Map.has_key?(record, to_string(key))
end)
if is_pkey_map? do
pkey =
Enum.reduce(primary_key, %{}, fn key, acc ->
case Map.fetch(record, key) do
{:ok, value} -> Map.put(acc, key, value)
:error -> Map.put(acc, key, Map.get(record, to_string(key)))
end
end)
{:ok, pkey}
else
error =
InvalidRelationship.exception(
relationship: relationship.name,
message: "Invalid identifier #{inspect(record)}"
)
{:error, error}
end
end
defp do_primary_key(relationship, record) do
single_primary_key(relationship, record)
end
defp multiple_primary_keys(relationship, values) do
Enum.reduce_while(values, {:ok, []}, fn record, {:ok, primary_keys} ->
case do_primary_key(relationship, record) do
{:ok, pkey} -> {:cont, {:ok, [pkey | primary_keys]}}
{:error, error} -> {:halt, {:error, error}}
end
end)
end
defp single_primary_key(relationship, value) do
with [field] <- Ash.Resource.primary_key(relationship.destination),
attribute <- Ash.Resource.attribute(relationship.destination, field),
{:ok, casted} <- Ash.Type.cast_input(attribute.type, value) do
{:ok, %{field => casted}}
else
_ ->
error =
InvalidRelationship.exception(
relationship: relationship.name,
message: "Invalid identifier #{inspect(value)}"
)
{:error, error}
end
end
@doc false
def changes_depend_on(changeset, dependency) do
%{changeset | change_dependencies: [dependency | changeset.change_dependencies]}
end
@doc false
def add_requests(changeset, requests) when is_list(requests) do
Enum.reduce(requests, changeset, &add_requests(&2, &1))
end
def add_requests(changeset, request) do
%{changeset | requests: [request | changeset.requests]}
end
defp validate_allow_nil(%{allow_nil?: false} = attribute, nil) do
{:error,
InvalidAttribute.exception(
field: attribute.name,
message: "must be present",
validation: {:present, 1, 1}
)}
end
defp validate_allow_nil(_, _), do: :ok
defp add_attribute_invalid_error(changeset, attribute, message \\ nil) do
error =
InvalidAttribute.exception(
field: attribute.name,
validation: {:cast, attribute.type},
message: message
)
add_error(changeset, error)
end
end
| 32.232119 | 103 | 0.63733 |
0820c95f754e2679e08f5f79a4ace35aa5c2c1ba | 1,223 | ex | Elixir | web/controllers/api/user_controller.ex | robot-overlord/todo-example | 2877bf3dc94e857a576fdc922c040c6af2f68ec0 | [
"MIT"
] | 1 | 2017-07-20T17:41:13.000Z | 2017-07-20T17:41:13.000Z | web/controllers/api/user_controller.ex | robot-overlord/todo-example | 2877bf3dc94e857a576fdc922c040c6af2f68ec0 | [
"MIT"
] | null | null | null | web/controllers/api/user_controller.ex | robot-overlord/todo-example | 2877bf3dc94e857a576fdc922c040c6af2f68ec0 | [
"MIT"
] | null | null | null | defmodule Todo.API.UserController do
alias Todo.User
use Todo.Web, :controller
@spec index(Plug.Conn.t(), map()) :: Plug.Conn.t()
def index(conn, _params), do: render(conn, "index.json", users: Repo.all(User))
@spec show(Plug.Conn.t(), map()) :: Plug.Conn.t()
def show(conn, %{"id" => id}) do
user =
User
|> Repo.get!(id)
|> Repo.preload([:lists, :completed_items])
render(conn, "show.json", user: user)
end
@spec create(Plug.Conn.t(), map()) :: Plug.Conn.t()
def create(conn, %{"data" => params}) do
user =
User
|> User.changeset(params)
|> Repo.insert!()
|> Repo.preload([:lists, :completed_items])
conn
|> put_status(:created)
|> render("show.json", user: user)
end
@spec update(Plug.Conn.t(), map()) :: Plug.Conn.t()
def update(conn, %{"id" => id, "data" => changes}) do
user =
User
|> Repo.get!(id)
|> User.changeset(changes)
|> Repo.update!()
render(conn, "show.json", user: user)
end
@spec delete(Plug.Conn.t(), map()) :: Plug.Conn.t()
def delete(conn, %{"id" => id}) do
user =
User
|> Repo.get!(id)
|> Repo.delete!()
send_resp(conn, 204, "")
end
end
| 23.519231 | 81 | 0.55601 |
0820e9ffbe48578f29cf39fde6dc1ce52c8aa780 | 6,419 | ex | Elixir | lib/rservex/connection.ex | Siel/Rservex | a74a526edb7a38ee15c2be63c726f0196eb7b9f4 | [
"Apache-2.0"
] | 1 | 2020-11-27T16:26:58.000Z | 2020-11-27T16:26:58.000Z | lib/rservex/connection.ex | Siel/Rservex | a74a526edb7a38ee15c2be63c726f0196eb7b9f4 | [
"Apache-2.0"
] | 1 | 2020-04-30T05:31:10.000Z | 2020-04-30T05:31:10.000Z | lib/rservex/connection.ex | Siel/Rservex | a74a526edb7a38ee15c2be63c726f0196eb7b9f4 | [
"Apache-2.0"
] | null | null | null | defmodule Rservex.Connection do
@cmd_eval 3
@dt_string 4
# define CMD_RESP 0x10000 /* all responses have this flag set */
# define RESP_OK (CMD_RESP|0x0001) /* command succeeded; returned parameters depend on the command issued */
# define RESP_ERR (CMD_RESP|0x0002) /* command failed, check stats code attached string may describe the error */
@resp_ok 0x10001
@resp_err 0x10002
@dt_sexp 10
@xt_arr_str 34
# all int and double entries throughout the transfer are encoded in Intel-endianess format:
# int=0x12345678 -> char[4]=(0x78,0x56,x34,0x12)
@doc """
check if the response has a valid format according: https://www.rforge.net/Rserve/dev.html
"""
@spec check_ack(port()) :: {:error, :invalid_ack} | {:ok, port()}
def check_ack(conn) do
{:ok, msg} = :gen_tcp.recv(conn, 32)
case msg do
<<"Rsrv", _version::size(32), _protocolor::size(32), _extra::binary>> ->
{:ok, conn}
_ ->
{:error, :invalid_ack}
end
end
def clear_buffer(conn) do
:gen_tcp.recv(conn, 0, 1000)
end
@spec send_message(port(), any(), :eval) ::
{:error, atom()} | {:ok, {:error, atom()} | {:ok, any()}}
def send_message(conn, data, type) do
message = encode_message(data, type)
case :gen_tcp.send(conn, message) do
:ok ->
receive_reply(conn)
{:error, error} ->
{:error, error}
end
end
def receive_reply(conn) do
# recv(Socket, Length, Timeout)
# Argument Length is only meaningful when the socket is in raw mode and denotes the number of bytes to read. If Length is 0, all available bytes are returned. If Length > 0, exactly Length bytes are returned
{:ok, header} = :gen_tcp.recv(conn, 16)
<<cmd_resp::little-32, length_low::little-32, _offset::little-32, length_high::little-32>> =
header
# The CMD_RESP mask is set for all responses. Each response consists of the response command (RESP_OK or RESP_ERR - least significant 24 bit) and the status code (most significant 8 bits).
case cmd_resp do
@resp_ok ->
# left shift
length = length_low + :erlang.bsl(length_high, 31)
receive_data(conn, length)
@resp_err ->
# TODO: read error content
{:error, :resp_err}
resp_code ->
IO.inspect(header)
IO.inspect(length_low)
# IO.inspect(:gen_tcp.recv(conn, 0, 1000))
raise("Unkwnown CMD_RESP: " <> inspect(resp_code))
end
end
def receive_data(conn, len) do
case Enum.reverse(receive_data(conn, len, [])) do
# Only one value received
[val] ->
val
# multiple values received
list ->
list
end
end
def receive_data(_conn, 0, acc) do
acc
end
def receive_data(conn, len, acc) do
{:ok, data_header} = :gen_tcp.recv(conn, 4)
<<item_type::little-8, item_length::little-24>> = data_header
item = receive_item(conn, item_type)
acc = [item | acc]
receive_data(conn, len - 4 - item_length, acc)
end
# R SEXP value (DT_SEXP) are recursively encoded in a similar way as the parameter attributes. Each SEXP consists of a 4-byte header and the actual contents. The header is of the form:
# [0] (byte) eXpression Type
# [1] (24-bit int) length
def receive_item(conn, @dt_sexp) do
{:ok, sexp_header} = :gen_tcp.recv(conn, 4)
<<sexp_type::little-8, sexp_length::little-24>> = sexp_header
receive_sexp(conn, sexp_type, sexp_length)
end
# The expression type consists of the actual type (least significant 6 bits) and attributes.
# define XT_ARRAY_STR 34 /* P data: string,string,.. (string=byte,byte,...,0) padded with '\01' */
def receive_sexp(conn, @xt_arr_str, length) do
receive_arr_str(conn, length)
end
def receive_sexp(conn, type, length) do
IO.inspect(type)
IO.inspect(length)
clear_buffer(conn)
end
def receive_arr_str(_conn, 0) do
{:xt_arr_str, ""}
end
def receive_arr_str(conn, length) do
{:ok, data} = :gen_tcp.recv(conn, length)
response =
data
|> String.replace(<<0, 1>>, "")
|> String.split(<<0>>)
# SUPER HACKY IMPLEMENTATION
response =
case response do
[response] ->
response
|> String.replace(<<1>>, "")
[response, ""] ->
response
response ->
response
end
{:ok, {:xt_arr_str, response}}
end
# command parameters | response data
# CMD_eval DT_STRING or DT_SEXP | DT_SEXP
def encode_message(data, :eval) do
body = dt(data, :string)
length = :erlang.iolist_size(body)
[header(@cmd_eval, length), body]
end
# The header is structured as follows:
# [0] (int) 'command'
# [4] (int) 'length' of the message (bits 0-31)
# [8] (int) 'offset' of the data part
# [12] (int) 'length' of the message (bits 32-63)
# 'command' specifies the request or response type.
# 'length' specifies the number of bytes belonging to this message (excluding the header).
# 'offset' specifies the offset of the data part, where 0 means directly after the header (which is normally the case)
# 'length2' high bits of the length (must be 0 if the packet size is smaller than 4GB)
def header(command, length) do
<<command::little-32, length::little-32, 0::little-32, 0::little-32>>
end
# The data part contains any additional parameters that are send along with the command. Each parameter consists of a 4-byte header:
# [0] (byte) type
# [1] (24-bit int) length
# Types used by the current Rserve implementation (for list of all supported types see Rsrv.h):
# DT_INT (4 bytes) integer
# DT_STRING (n bytes) null terminated string
# DT_BYTESTREAM (n bytes) any binary data
# DT_SEXP R's encoded SEXP, see below
# all int and double entries throughout the transfer are encoded in Intel-endianess format:
# int=0x12345678 -> char[4]=(0x78,0x56,x34,0x12) functions/macros for converting from native to protocol format are available in Rsrv.h.
# Rsrv.h:
# define DT_STRING 4 /* 0 terminted string */
def dt(string, :string) do
string = transfer_string(string)
length = :erlang.iolist_size(string)
[<<@dt_string::little-8, length::little-24>>, string]
end
def transfer_string(string) do
# According to Rsrv.h an dt_string type of transmision must terminate in 0
[string, <<0>>]
end
end
| 31.777228 | 211 | 0.649945 |
08210c198db6b3d05e28ed42a55b528190f25302 | 324 | exs | Elixir | mix.exs | pascalwengerter/ProjectEuler100 | 5e553839b6dc3c85521bd979fd6eaa1d02b83fff | [
"MIT"
] | 1 | 2021-02-27T10:57:27.000Z | 2021-02-27T10:57:27.000Z | mix.exs | pascalwengerter/ProjectEuler100 | 5e553839b6dc3c85521bd979fd6eaa1d02b83fff | [
"MIT"
] | null | null | null | mix.exs | pascalwengerter/ProjectEuler100 | 5e553839b6dc3c85521bd979fd6eaa1d02b83fff | [
"MIT"
] | null | null | null | defmodule ProjectEuler.Mix do
use Mix.Project
def project do
[
app: :example,
version: "0.1.0",
elixir: "~> 1.1",
start_permanent: Mix.env() == :prod,
deps: deps()
]
end
def application do
[
extra_applications: [:logger]
]
end
defp deps do
[]
end
end | 13.5 | 42 | 0.537037 |
0821716e804edf797c98bf11ebacbf486be0c8dc | 125 | exs | Elixir | test/test_helper.exs | leandrocp/diff | be6d57be5a2dbbc6d117cc4686cad3a3f4a3e46b | [
"Apache-2.0"
] | 59 | 2020-01-20T20:32:31.000Z | 2022-02-11T07:25:45.000Z | test/test_helper.exs | leandrocp/diff | be6d57be5a2dbbc6d117cc4686cad3a3f4a3e46b | [
"Apache-2.0"
] | 33 | 2020-01-20T19:38:09.000Z | 2021-11-19T10:08:25.000Z | test/test_helper.exs | leandrocp/diff | be6d57be5a2dbbc6d117cc4686cad3a3f4a3e46b | [
"Apache-2.0"
] | 12 | 2020-01-20T21:11:14.000Z | 2021-05-10T19:56:47.000Z | ExUnit.start()
Mox.defmock(Diff.StorageMock, for: Diff.Storage)
Mox.defmock(Diff.Package.StoreMock, for: Diff.Package.Store)
| 31.25 | 60 | 0.792 |
0821d3014a41f0092a504122016e9fe4b3ebc3f8 | 4,127 | exs | Elixir | lib/elixir/test/elixir/kernel/fn_test.exs | ma2gedev/elixir | 3f4410ff687fdb63016b063e3af74caecb5b20c0 | [
"Apache-2.0"
] | null | null | null | lib/elixir/test/elixir/kernel/fn_test.exs | ma2gedev/elixir | 3f4410ff687fdb63016b063e3af74caecb5b20c0 | [
"Apache-2.0"
] | null | null | null | lib/elixir/test/elixir/kernel/fn_test.exs | ma2gedev/elixir | 3f4410ff687fdb63016b063e3af74caecb5b20c0 | [
"Apache-2.0"
] | null | null | null | Code.require_file "../test_helper.exs", __DIR__
defmodule Kernel.FnTest do
use ExUnit.Case, async: true
import CompileAssertion
test "arithmetic constants on match" do
assert (fn 1 + 2 -> :ok end).(3) == :ok
assert (fn 1 - 2 -> :ok end).(-1) == :ok
assert (fn -1 -> :ok end).(-1) == :ok
assert (fn +1 -> :ok end).(1) == :ok
end
test "clause with ^" do
x = 1
assert (fn ^x -> :ok; _ -> :error end).(1) == :ok
end
test "capture remote" do
assert (&:erlang.atom_to_list/1).(:a) == 'a'
assert (&Kernel.atom_to_list/1).(:a) == 'a'
assert (&List.flatten/1).([[0]]) == [0]
assert (&(List.flatten/1)).([[0]]) == [0]
assert (&List.flatten(&1)).([[0]]) == [0]
assert &List.flatten(&1) == &List.flatten/1
end
test "capture local" do
assert (&atl/1).(:a) == 'a'
assert (&(atl/1)).(:a) == 'a'
assert (&atl(&1)).(:a) == 'a'
end
test "capture local with question mark" do
assert (&is_a?/2).(:atom, :a)
assert (&(is_a?/2)).(:atom, :a)
assert (&is_a?(&1, &2)).(:atom, :a)
end
test "capture imported" do
assert (&atom_to_list/1).(:a) == 'a'
assert (&(atom_to_list/1)).(:a) == 'a'
assert (&atom_to_list(&1)).(:a) == 'a'
assert &atom_to_list(&1) == &atom_to_list/1
end
test "capture macro" do
assert (&to_string/1).(:a) == "a"
assert (&to_string(&1)).(:a) == "a"
assert (&Kernel.to_string/1).(:a) == "a"
assert (&Kernel.to_string(&1)).(:a) == "a"
end
test "local partial application" do
assert (&atb(&1, :utf8)).(:a) == "a"
assert (&atb(list_to_atom(&1), :utf8)).('a') == "a"
end
test "imported partial application" do
assert (&is_record(&1, Range)).(1..3)
end
test "remote partial application" do
assert (&:erlang.binary_part(&1, 1, 2)).("foo") == "oo"
assert (&:erlang.binary_part(atom_to_binary(&1), 1, 2)).(:foo) == "oo"
end
test "capture and partially apply tuples" do
assert (&{ &1, &2 }).(1, 2) == { 1, 2 }
assert (&{ &1, &2, &3 }).(1, 2, 3) == { 1, 2, 3 }
assert (&{ 1, &1 }).(2) == { 1, 2 }
assert (&{ 1, &1, &2 }).(2, 3) == { 1, 2, 3 }
end
test "capture and partially apply lists" do
assert (&[ &1, &2 ]).(1, 2) == [ 1, 2 ]
assert (&[ &1, &2, &3 ]).(1, 2, 3) == [ 1, 2, 3 ]
assert (&[ 1, &1 ]).(2) == [ 1, 2 ]
assert (&[ 1, &1, &2 ]).(2, 3) == [ 1, 2, 3 ]
assert (&[&1|&2]).(1, 2) == [1|2]
end
test "capture and partially apply on call" do
assert (&(&1.file)).(__ENV__) == __FILE__
assert (&(&1.file(&2))).(__ENV__, "Hello").file == "Hello"
end
test "capture block like" do
assert (&(!is_atom(&1))).(:foo) == false
end
test "capture other" do
assert (& &1).(:ok) == :ok
fun = fn a, b -> a + b end
assert (&fun.(&1, 2)).(1) == 3
end
test "failure on non-continuous" do
assert_compile_fail CompileError, "nofile:1: capture &2 cannot be defined without &1", "&(&2)"
end
test "failure on integers" do
assert_compile_fail CompileError, "nofile:1: unhandled &1 outside of a capture", "&1"
assert_compile_fail CompileError, "nofile:1: capture &0 is not allowed", "&foo(&0)"
end
test "failure on block" do
assert_compile_fail SyntaxError,
"nofile:1: invalid args for &, block expressions " <>
"are not allowed, got: (\n 1\n 2\n)",
"&(1;2)"
end
test "failure on other types" do
assert_compile_fail SyntaxError,
"nofile:1: invalid args for &, expected an expression in the format of &Mod.fun/arity, " <>
"&local/arity or a capture containing at least one argument as &1, got: :foo",
"&:foo"
end
test "failure when no captures" do
assert_compile_fail SyntaxError,
"nofile:1: invalid args for &, expected an expression in the format of &Mod.fun/arity, " <>
"&local/arity or a capture containing at least one argument as &1, got: foo()",
"&foo()"
end
defp is_a?(:atom, atom) when is_atom(atom), do: true
defp is_a?(_, _), do: false
defp atl(arg) do
:erlang.atom_to_list arg
end
defp atb(arg, encoding) do
:erlang.atom_to_binary(arg, encoding)
end
end
| 28.659722 | 98 | 0.559002 |
0821e29cf77a4bb5b9d057c2df66db5c819b3f4a | 93 | exs | Elixir | test/test_helper.exs | Testmetrics/testmetrics_elixir_client | 729c6beadba92630a31d8c6d9177ebbf41d16430 | [
"MIT"
] | 1 | 2019-09-04T22:47:57.000Z | 2019-09-04T22:47:57.000Z | test/test_helper.exs | Testmetrics/testmetrics_elixir_client | 729c6beadba92630a31d8c6d9177ebbf41d16430 | [
"MIT"
] | null | null | null | test/test_helper.exs | Testmetrics/testmetrics_elixir_client | 729c6beadba92630a31d8c6d9177ebbf41d16430 | [
"MIT"
] | null | null | null | ExUnit.configure(formatters: [ExUnit.CLIFormatter, TestmetricsElixirClient])
ExUnit.start()
| 23.25 | 76 | 0.827957 |
0821fc2083aca1bd25fa72279bbe0f718a2c2898 | 450 | exs | Elixir | test/bitlog_web/views/error_view_test.exs | Soonad/Bitlog | 17f41b591169dca7412b9790e8f20abf11b46313 | [
"MIT"
] | 2 | 2019-12-03T10:38:38.000Z | 2019-12-04T23:52:11.000Z | test/bitlog_web/views/error_view_test.exs | moonad/Bitlog | 17f41b591169dca7412b9790e8f20abf11b46313 | [
"MIT"
] | null | null | null | test/bitlog_web/views/error_view_test.exs | moonad/Bitlog | 17f41b591169dca7412b9790e8f20abf11b46313 | [
"MIT"
] | null | null | null | defmodule BitlogWeb.ErrorViewTest do
use BitlogWeb.ConnCase, async: true
# Bring render/3 and render_to_string/3 for testing custom views
import Phoenix.View
test "renders 404.json" do
assert render(BitlogWeb.ErrorView, "404.json", []) == %{errors: %{detail: "Not Found"}}
end
test "renders 500.json" do
assert render(BitlogWeb.ErrorView, "500.json", []) ==
%{errors: %{detail: "Internal Server Error"}}
end
end
| 28.125 | 91 | 0.68 |
082215c9c213f9e18fe262097c559e0e9632260e | 1,005 | ex | Elixir | lib/allais_paradox/participant.ex | kazuwo/course_evaluation_questionnaire | 72f01051900b68a0239b3a87e38981bae83aefcb | [
"MIT"
] | null | null | null | lib/allais_paradox/participant.ex | kazuwo/course_evaluation_questionnaire | 72f01051900b68a0239b3a87e38981bae83aefcb | [
"MIT"
] | null | null | null | lib/allais_paradox/participant.ex | kazuwo/course_evaluation_questionnaire | 72f01051900b68a0239b3a87e38981bae83aefcb | [
"MIT"
] | null | null | null | defmodule CourseEvaluationQuestionnaire.Participant do
alias CourseEvaluationQuestionnaire.Actions
# Actions
def fetch_contents(data, id) do
Actions.update_participant_contents(data, id)
end
def next_question(data, id, selected) do
data = data |> put_in([:participants, id, :sequence], selected["next"])
if selected["next"] == "question2" do
data = data |> put_in([:participants, id, :question1], selected["selected"])
else
data = data |> put_in([:participants, id, :question2], selected["selected"])
|> Map.put(:answered, data.answered + 1)
end
Actions.next_question(data, id, selected)
end
# Utilities
def format_participant(participant), do: participant
def format_data(data) do
%{
page: data.page,
}
end
def format_contents(data, id) do
%{participants: participants} = data
participant = Map.get(participants, id)
format_participant(participant)
|> Map.merge(format_data(data))
end
end
| 27.916667 | 82 | 0.677612 |
08221f8feee02a33cbe490f3fd294d9bbe380f43 | 1,702 | ex | Elixir | lib/csv_formatter.ex | TenTakano/BtcProfits | 29eb0fb494995cab51a929725cf76264f69bd5a2 | [
"MIT"
] | null | null | null | lib/csv_formatter.ex | TenTakano/BtcProfits | 29eb0fb494995cab51a929725cf76264f69bd5a2 | [
"MIT"
] | null | null | null | lib/csv_formatter.ex | TenTakano/BtcProfits | 29eb0fb494995cab51a929725cf76264f69bd5a2 | [
"MIT"
] | null | null | null | defmodule BtcProfits.CsvFormatter do
@time_zone_diff_seconds 60 * 60 * 9
def import_profit_sheet(path) do
case import_lines(path, "\r\n") do
{:ok, [_ | content]} ->
result =
Enum.map(content, &format_profit_sheet/1)
|> Enum.reject(&(String.contains?(&1.type, "Withdraw") || &1.type == "Refund"))
{:ok, result}
_ ->
:error
end
end
def import_btc_price(path) do
case import_lines(path, "\n") do
{:ok, content} ->
result =
Enum.map(content, &format_btc_price_sheet/1)
|> Enum.reject(&(&1 == nil))
{:ok, result}
_ ->
:error
end
end
defp import_lines(path, new_line) do
case File.read(path) do
{:ok, content} ->
{:ok, String.split(content, new_line)}
{:error, _} ->
:error
end
end
defp format_profit_sheet(line) do
[timestamp, _coin, type, amount, _address, balance] = String.split(line, ",")
[date, time, _timezone] = String.split(timestamp, " ")
{:ok, datetime, _} = DateTime.from_iso8601("#{date}T#{String.pad_leading(time, 8, "0")}Z")
%{
time: DateTime.add(datetime, @time_zone_diff_seconds, :second),
type: type,
amount: String.to_float(amount),
balance: String.to_float(balance)
}
end
defp format_btc_price_sheet(line) do
case String.split(line, ",") do
[timestamp, _open, _high, _low, close, _value] ->
{:ok, date} =
String.split(timestamp, " ")
|> List.first()
|> Date.from_iso8601()
%{
date: date,
price: String.to_integer(close)
}
_ ->
nil
end
end
end
| 23.315068 | 94 | 0.556404 |
08222ce695c095a6f7cc7389874b3923226eec07 | 10,925 | ex | Elixir | lib/mix/tasks/hex.package.ex | sorentwo/hex | 92e71162bdf1d7b17a4c641b900c81868ccab56c | [
"Apache-2.0"
] | null | null | null | lib/mix/tasks/hex.package.ex | sorentwo/hex | 92e71162bdf1d7b17a4c641b900c81868ccab56c | [
"Apache-2.0"
] | null | null | null | lib/mix/tasks/hex.package.ex | sorentwo/hex | 92e71162bdf1d7b17a4c641b900c81868ccab56c | [
"Apache-2.0"
] | 1 | 2021-07-26T18:20:06.000Z | 2021-07-26T18:20:06.000Z | defmodule Mix.Tasks.Hex.Package do
use Mix.Task
alias Hex.Registry.Server, as: Registry
@shortdoc "Fetches or diffs packages"
@default_diff_command "git diff --no-index __PATH1__ __PATH2__"
@doc false
def default_diff_command(), do: @default_diff_command
@moduledoc """
Fetches or diffs packages.
## Fetch package
Fetch a package tarball to the current directory.
mix hex.package fetch PACKAGE [VERSION] [--unpack] [--output PATH]
If `version` is not given, use the latest version.
You can pipe the fetched tarball to stdout by setting `--output -`.
## Diff package versions
mix hex.package diff APP VERSION
This command compares the project's dependency `APP` against
the target package version, unpacking the target version into
temporary directory and running a diff command.
## Fetch and diff package contents between versions
mix hex.package diff PACKAGE VERSION1 VERSION2
mix hex.package diff PACKAGE VERSION1..VERSION2
This command fetches package tarballs for both versions,
unpacks them into temporary directories and runs a diff command.
Afterwards, the temporary directories are automatically deleted.
Note, similarly to when tarballs are fetched with `mix deps.get`,
a `hex_metadata.config` is placed in each unpacked directory.
This file contains package's metadata as Erlang terms and so
we can additionally see the diff of that.
The exit code of the task is that of the underlying diff command.
### Diff command
The diff command can be customized by setting `diff_command`
configuration option, see `mix help hex.config` for more information.
The default diff command is:
#{@default_diff_command}
The `__PATH1__` and `__PATH2__` placeholders will be interpolated with
paths to directories of unpacked tarballs for each version.
Many diff commands supports coloured output but because we execute
the command in non-interactive mode, they'd usually be disabled.
On Unix systems you can pipe the output to more commands, for example:
mix hex.package diff decimal 1.0.0..1.1.0 | colordiff | less -R
Here, the output of `mix hex.package diff` is piped to the `colordiff`
utility to adds colours, which in turn is piped to `less -R` which
"pages" it. (`-R` preserves escape codes which allows colours to work.)
Another option is to configure the diff command itself. For example, to
force Git to always colour the output we can set the `--color=always` option:
mix hex.config diff_command "git diff --color=always --no-index __PATH1__ __PATH2__"
mix hex.package diff decimal 1.0.0..1.1.0
## Command line options
* `--unpack` - Unpacks the tarball after fetching it
* `-o`, `--output` - Sets output path. When used with `--unpack` it means
the directory (Default: `<app>-<version>`). Otherwise, it specifies
tarball path (Default: `<app>-<version>.tar`)
* `--organization ORGANIZATION` - Set this for private packages belonging to an organization
* `--repo REPO` - Set this for self-hosted Hex instances, default: `hexpm`
"""
@behaviour Hex.Mix.TaskDescription
@switches [unpack: :boolean, organization: :string, output: :string, repo: :string]
@aliases [o: :output]
@impl true
def run(args) do
Hex.start()
{opts, args} = Hex.OptionParser.parse!(args, strict: @switches, aliases: @aliases)
unpack = Keyword.get(opts, :unpack, false)
output = Keyword.get(opts, :output, nil)
case args do
["fetch", package] ->
fetch(repo(opts), package, nil, unpack, output)
["fetch", package, version] ->
fetch(repo(opts), package, version, unpack, output)
["diff", package, version1, version2] ->
diff(repo(opts), package, parse_version!(version1, version2))
["diff", package, version] ->
diff(repo(opts), package, parse_version!(version))
_ ->
Mix.raise("""
Invalid arguments, expected one of:
mix hex.package fetch PACKAGE [VERSION] [--unpack]
mix hex.package diff APP VERSION
mix hex.package diff PACKAGE VERSION1 VERSION2
mix hex.package diff PACKAGE VERSION1..VERSION2
""")
end
end
@impl true
def tasks() do
[
{"fetch PACKAGE [VERSION] [--unpack]", "Fetch the package"},
{"diff APP VERSION", "Diff dependency against version"},
{"diff PACKAGE VERSION1 VERSION2", "Diff package versions"},
{"diff PACKAGE VERSION1..VERSION2", "Diff package versions"}
]
end
defp fetch(repo, package, nil, unpack?, output) do
version = find_package_latest_version(repo, package)
fetch(repo, package, version, unpack?, output)
end
defp fetch(repo, package, version, false, "-") do
Hex.Registry.Server.open()
Hex.Registry.Server.prefetch([{repo, package}])
tarball = fetch_tarball!(repo, package, version)
IO.binwrite(tarball)
Hex.Registry.Server.close()
end
defp fetch(_repo, _package, _version, true, "-") do
Mix.raise("Cannot unpack the package while output destination is stdout")
end
defp fetch(repo, package, version, unpack?, output) do
Hex.Registry.Server.open()
Hex.Registry.Server.prefetch([{repo, package}])
tarball = fetch_tarball!(repo, package, version)
if output, do: File.mkdir_p!(output)
abs_name = Path.absname("#{package}-#{version}")
{abs_path, tar_path} =
if output do
{output, Path.join(output, "#{package}-#{version}.tar")}
else
{abs_name, "#{abs_name}.tar"}
end
File.write!(tar_path, tarball)
if unpack? do
%{inner_checksum: inner_checksum, outer_checksum: outer_checksum} =
Hex.Tar.unpack!(tar_path, abs_path)
verify_inner_checksum!(repo, package, version, inner_checksum)
verify_outer_checksum!(repo, package, version, outer_checksum)
else
{:ok, outer_checksum} = Hex.Tar.outer_checksum(tar_path)
verify_outer_checksum!(repo, package, version, outer_checksum)
end
message =
if unpack? do
File.rm!(tar_path)
"#{package} v#{version} extracted to #{abs_path}"
else
"#{package} v#{version} downloaded to #{tar_path}"
end
Hex.Shell.info(message)
Hex.Registry.Server.close()
end
defp fetch_tarball!(repo, package, version) do
path = Hex.SCM.cache_path(repo, package, version)
case Hex.SCM.fetch(repo, package, version) do
{:ok, _} ->
File.read!(path)
{:error, reason} ->
if File.exists?(path) do
File.read!(path)
else
Mix.raise(
"Downloading " <>
Hex.Repo.tarball_url(repo, package, version) <> " failed:\n\n" <> reason
)
end
end
end
defp verify_inner_checksum!(repo, package, version, checksum) do
registry_checksum = Registry.inner_checksum(repo, package, version)
if checksum != registry_checksum do
Mix.raise("Checksum mismatch against registry (inner)")
end
end
defp verify_outer_checksum!(repo, package, version, checksum) do
registry_checksum = Registry.outer_checksum(repo, package, version)
if checksum != registry_checksum do
Mix.raise("Checksum mismatch against registry (outer)")
end
end
defp diff(repo, app, version) when is_binary(version) do
Hex.Mix.check_deps()
{path_lock, package} =
case Map.get(Mix.Dep.Lock.read(), String.to_atom(app)) do
nil ->
Mix.raise(
"Cannot find the app \"#{app}\" in \"mix.lock\" file, " <>
"please ensure it has been specified in \"mix.exs\" and run \"mix deps.get\""
)
lock ->
path = Path.join(Mix.Project.deps_path(), app)
package = Hex.Utils.lock(lock).name
{path, package}
end
path = tmp_path("#{package}-#{version}-")
try do
fetch_and_unpack!(repo, package, [{path, version}])
code = run_diff_path!(path_lock, path)
Mix.Tasks.Hex.set_exit_code(code)
after
File.rm_rf!(path)
end
end
defp diff(repo, package, {version1, version2}) do
path1 = tmp_path("#{package}-#{version1}-")
path2 = tmp_path("#{package}-#{version2}-")
try do
fetch_and_unpack!(repo, package, [{path1, version1}, {path2, version2}])
code = run_diff_path!(path1, path2)
Mix.Tasks.Hex.set_exit_code(code)
after
File.rm_rf!(path1)
File.rm_rf!(path2)
end
end
defp fetch_and_unpack!(repo, package, versions) do
Hex.Registry.Server.open()
Hex.Registry.Server.prefetch([{repo, package}])
try do
Enum.each(versions, fn {path, version} ->
tarball = fetch_tarball!(repo, package, version)
%{inner_checksum: inner_checksum, outer_checksum: outer_checksum} =
Hex.Tar.unpack!({:binary, tarball}, path)
verify_inner_checksum!(repo, package, version, inner_checksum)
verify_outer_checksum!(repo, package, version, outer_checksum)
end)
after
Hex.Registry.Server.close()
end
end
defp run_diff_path!(path1, path2) do
cmd =
Hex.State.fetch!(:diff_command)
|> String.replace("__PATH1__", escape_and_quote_path(path1))
|> String.replace("__PATH2__", escape_and_quote_path(path2))
Mix.shell().cmd(cmd)
end
defp escape_and_quote_path(path) do
escaped = String.replace(path, "\"", "\\\"")
~s("#{escaped}")
end
defp tmp_path(prefix) do
random_string = Base.encode16(:crypto.strong_rand_bytes(4))
Path.join(System.tmp_dir!(), prefix <> random_string)
end
defp parse_version!(string) do
case String.split(string, "..", trim: true) do
[version1, version2] ->
parse_two_versions!(version1, version2)
[version] ->
version |> Hex.Version.parse!() |> to_string()
end
end
defp parse_version!(version1, version2) do
parse_two_versions!(version1, version2)
end
defp parse_two_versions!(version1, version2) do
version1 = Hex.Version.parse!(version1)
version2 = Hex.Version.parse!(version2)
{to_string(version1), to_string(version2)}
end
defp repo(opts) do
repo = Keyword.get(opts, :repo, "hexpm")
if organization = opts[:organization] do
Enum.join([repo, organization], ":")
else
repo
end
end
defp find_package_latest_version(organization, name) do
%{"latest_stable_version" => latest_stable_version} =
retrieve_package_info(organization, name)
latest_stable_version
end
defp retrieve_package_info(organization, name) do
case Hex.API.Package.get(organization, name) do
{:ok, {code, body, _}} when code in 200..299 ->
body
{:ok, {404, _, _}} ->
Mix.raise("No package with name #{name}")
other ->
Hex.Shell.error("Failed to retrieve package information")
Hex.Utils.print_error_result(other)
end
end
end
| 30.179558 | 96 | 0.663341 |
08224a45e19aff856bdd30a46c3dbf5e9c66b2bd | 652 | ex | Elixir | lib/std_json_io/application.ex | chvanikoff/std_json_io | 973dbfc21dfa28fe88533dcf918e017c5f76aaaf | [
"MIT"
] | 1 | 2017-08-24T19:54:27.000Z | 2017-08-24T19:54:27.000Z | lib/std_json_io/application.ex | chvanikoff/std_json_io | 973dbfc21dfa28fe88533dcf918e017c5f76aaaf | [
"MIT"
] | 2 | 2016-10-25T01:46:47.000Z | 2018-04-23T20:31:48.000Z | lib/std_json_io/application.ex | chvanikoff/std_json_io | 973dbfc21dfa28fe88533dcf918e017c5f76aaaf | [
"MIT"
] | 10 | 2016-07-20T12:34:53.000Z | 2018-08-22T21:37:09.000Z | defmodule StdJsonIo.Application do
use Application
def start(_type, _args) do
import Supervisor.Spec, warn: false
config = Application.get_all_env(:std_json_io)
pool_options = [
name: {:local, StdJsonIo.Pool},
worker_module: StdJsonIo.Worker,
size: Keyword.get(config, :pool_size, 15),
max_overflow: Keyword.get(config, :pool_max_overflow, 10),
strategy: :fifo
]
children = [
:poolboy.child_spec(StdJsonIo.Pool, pool_options, [script: Keyword.fetch!(config, :script)])
]
opts = [strategy: :one_for_one, name: StdJsonIo.Supervisor]
Supervisor.start_link(children, opts)
end
end
| 31.047619 | 98 | 0.68865 |
08225353ad378d61b82bbcafabb91826adbd8b5e | 709 | exs | Elixir | sample/mix.exs | GameEssa/ElixirProject | 8f5fef5256719ef2ebcfb77c7bb7eb70687ece4f | [
"MIT"
] | null | null | null | sample/mix.exs | GameEssa/ElixirProject | 8f5fef5256719ef2ebcfb77c7bb7eb70687ece4f | [
"MIT"
] | null | null | null | sample/mix.exs | GameEssa/ElixirProject | 8f5fef5256719ef2ebcfb77c7bb7eb70687ece4f | [
"MIT"
] | null | null | null | defmodule Sample.MixProject do
use Mix.Project
def project do
[
app: :sample,
version: "0.1.0",
elixir: "~> 1.11",
start_permanent: Mix.env() == :prod,
deps: deps(),
escript: escript()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
# {:dep_from_hexpm, "~> 0.3.0"},
# {:dep_from_git, git: "https://github.com/elixir-lang/my_dep.git", tag: "0.1.0"}
{:ex_doc, "~> 0.21", only: :dev, runtime: false}
]
end
def escript do
[main_module: Executable.CLI]
end
end
| 20.257143 | 87 | 0.576869 |
08225c8c3ecb8d161f1d1cadb5b5b8c1f4f0fd9a | 1,937 | ex | Elixir | clients/container_analysis/lib/google_api/container_analysis/v1alpha1/model/dsse_hint.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/container_analysis/lib/google_api/container_analysis/v1alpha1/model/dsse_hint.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/container_analysis/lib/google_api/container_analysis/v1alpha1/model/dsse_hint.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.ContainerAnalysis.V1alpha1.Model.DSSEHint do
@moduledoc """
This submessage provides human-readable hints about the purpose of the authority. Because the name of a note acts as its resource reference, it is important to disambiguate the canonical name of the Note (which might be a UUID for security purposes) from "readable" names more suitable for debug output. Note that these hints should not be used to look up authorities in security sensitive contexts, such as when looking up attestations to verify.
## Attributes
* `humanReadableName` (*type:* `String.t`, *default:* `nil`) - Required. The human readable name of this attestation authority, for example "cloudbuild-prod".
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:humanReadableName => String.t() | nil
}
field(:humanReadableName)
end
defimpl Poison.Decoder, for: GoogleApi.ContainerAnalysis.V1alpha1.Model.DSSEHint do
def decode(value, options) do
GoogleApi.ContainerAnalysis.V1alpha1.Model.DSSEHint.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.ContainerAnalysis.V1alpha1.Model.DSSEHint do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 41.212766 | 449 | 0.762003 |
08228e5b61b2c6d6a372258027a67eebf4038c11 | 2,068 | ex | Elixir | lib/nerves_runtime/log/kmsg_parser.ex | pancho-villa/nerves_runtime | 2e88d55edb680de596c1405678556133adb4f9a9 | [
"Apache-2.0"
] | 49 | 2017-03-02T03:38:32.000Z | 2022-02-24T12:11:40.000Z | lib/nerves_runtime/log/kmsg_parser.ex | pancho-villa/nerves_runtime | 2e88d55edb680de596c1405678556133adb4f9a9 | [
"Apache-2.0"
] | 83 | 2017-03-09T09:09:14.000Z | 2022-02-09T11:13:27.000Z | lib/nerves_runtime/log/kmsg_parser.ex | pancho-villa/nerves_runtime | 2e88d55edb680de596c1405678556133adb4f9a9 | [
"Apache-2.0"
] | 21 | 2017-02-28T16:55:32.000Z | 2021-07-29T15:54:13.000Z | defmodule Nerves.Runtime.Log.KmsgParser do
@moduledoc """
Functions for parsing kmsg strings
"""
alias Nerves.Runtime.Log.SyslogParser
@doc """
Parse out the kmsg facility, severity, and message (including the timestamp
and host) from a kmsg-formatted string.
See https://elixir.bootlin.com/linux/latest/source/Documentation/ABI/testing/dev-kmsg for full details.
Most messages are of the form:
```text
priority,sequence,timestamp,flag;message
```
`priority` is an integer that when broken apart gives you a facility and severity.
`sequence` is a monotonically increasing counter
`timestamp` is the time in microseconds
`flag` is almost always `-`
`message` is everything else
This parser only supports the minimum kmsg reports. The spec above describes
more functionality, but it appears to be uncommon and I haven't seen any
examples yet in my testing.
"""
@spec parse(String.t()) ::
{:ok,
%{
facility: SyslogParser.facility(),
severity: SyslogParser.severity(),
message: String.t(),
timestamp: integer(),
sequence: integer(),
flags: [atom()]
}}
| {:error, :parse_error}
def parse(line) do
with [metadata, message] <- String.split(line, ";"),
[priority_str, sequence_str, timestamp_str, flag] <-
String.split(metadata, ",", parts: 4),
{priority_int, ""} <- Integer.parse(priority_str),
{sequence, ""} <- Integer.parse(sequence_str),
{timestamp, ""} <- Integer.parse(timestamp_str),
{:ok, facility, severity} <- SyslogParser.decode_priority(priority_int) do
{:ok,
%{
facility: facility,
severity: severity,
message: message,
timestamp: timestamp,
sequence: sequence,
flags: parse_flags(flag)
}}
else
_ -> {:error, :parse_error}
end
end
defp parse_flags("-"), do: []
defp parse_flags("c"), do: [:continue]
defp parse_flags(_), do: []
end
| 30.865672 | 105 | 0.623791 |
0822ba6e73dbeb30554c8568aaaed553c947ac17 | 880 | exs | Elixir | test/ex_polygon/rest/crypto/snapshot_gainers_losers_test.exs | aai/ex_polygon | 3bd6d8d0f1fbe4cd3fa939751c5ff00813eeeba3 | [
"MIT"
] | null | null | null | test/ex_polygon/rest/crypto/snapshot_gainers_losers_test.exs | aai/ex_polygon | 3bd6d8d0f1fbe4cd3fa939751c5ff00813eeeba3 | [
"MIT"
] | null | null | null | test/ex_polygon/rest/crypto/snapshot_gainers_losers_test.exs | aai/ex_polygon | 3bd6d8d0f1fbe4cd3fa939751c5ff00813eeeba3 | [
"MIT"
] | null | null | null | defmodule ExPolygon.Rest.Crypto.SnapshotGainersLosersTest do
use ExUnit.Case, async: false
use ExVCR.Mock, adapter: ExVCR.Adapter.Hackney
doctest ExPolygon.Rest.HTTPClient
setup_all do
HTTPoison.start()
:ok
end
@api_key System.get_env("POLYGON_API_KEY")
test ".query returns an ok tuple and a list top/bottom 20" do
use_cassette "rest/crypto/snapshot_gainers_losers/query_ok" do
assert {:ok, snaps} = ExPolygon.Rest.Crypto.SnapshotGainersLosers.query("gainers", @api_key)
assert [%ExPolygon.Snapshot{} = snap | _] = snaps
assert is_bitstring(snap.ticker)
assert is_map(snap.day)
assert is_map(snap.last_trade)
assert is_map(snap.min)
assert is_map(snap.prev_day)
assert is_float(snap.todays_change)
assert is_float(snap.todays_change_perc)
assert is_integer(snap.updated)
end
end
end
| 30.344828 | 98 | 0.722727 |
0822da400e7445865bd19f5f13eed484d54e0a86 | 583 | exs | Elixir | lib/euler_004.exs | sorentwo/euler | 76244a0ef3dcfa17d6b9571daa5d0b46f09057f4 | [
"MIT"
] | 8 | 2015-11-04T05:03:05.000Z | 2022-01-25T19:34:46.000Z | lib/euler_004.exs | sorentwo/euler | 76244a0ef3dcfa17d6b9571daa5d0b46f09057f4 | [
"MIT"
] | null | null | null | lib/euler_004.exs | sorentwo/euler | 76244a0ef3dcfa17d6b9571daa5d0b46f09057f4 | [
"MIT"
] | null | null | null | defmodule EulerFour do
@doc """
A palindromic number reads the same both ways. The largest palindrome made
from the product of two 2-digit numbers is 9009 = 91 × 99.
Find the largest palindrome made from the product of two 3-digit numbers.
"""
def solve do
Enum.map(tuples, fn({a,b}) -> a * b end)
|> Enum.filter(&palindrome?/1)
|> Enum.max
end
def tuples do
lc x inlist Enum.to_list(999..100), y inlist Enum.to_list(999..100), do: {x, y}
end
def palindrome?(n) do
integer_to_binary(n) == String.reverse(integer_to_binary(n))
end
end
| 26.5 | 83 | 0.667238 |
0822e18a92317f106af0b9ff2e333eeaad95f56a | 686 | ex | Elixir | apps/ello_stream/lib/ello_stream/key.ex | ello/apex | 4acb096b3ce172ff4ef9a51e5d068d533007b920 | [
"MIT"
] | 16 | 2017-06-21T21:31:20.000Z | 2021-05-09T03:23:26.000Z | apps/ello_stream/lib/ello_stream/key.ex | ello/apex | 4acb096b3ce172ff4ef9a51e5d068d533007b920 | [
"MIT"
] | 25 | 2017-06-07T12:18:28.000Z | 2018-06-08T13:27:43.000Z | apps/ello_stream/lib/ello_stream/key.ex | ello/apex | 4acb096b3ce172ff4ef9a51e5d068d533007b920 | [
"MIT"
] | 3 | 2018-06-14T15:34:07.000Z | 2022-02-28T21:06:13.000Z | defmodule Ello.Stream.Key do
alias Ello.Core.Discovery.Category
alias Ello.Core.Contest.ArtistInvite
@category_key "categories:v1"
@artist_invite_key "artist_invite:v1"
def find(:global_recent), do: "all_post_firehose"
def find(:global_shop), do: "global_shop_stream:v1"
def find(%ArtistInvite{id: id}), do: "#{@artist_invite_key}:#{id}"
def find(%ArtistInvite{id: id}, _), do: "#{@artist_invite_key}:#{id}"
def find(%Category{roshi_slug: slug}, :featured), do: "#{@category_key}:#{slug}"
def find(%Category{roshi_slug: slug}, :recent), do: "#{@category_key}:recent:#{slug}"
def find(%Category{roshi_slug: slug}, :shop), do: "#{@category_key}:shop:#{slug}"
end
| 40.352941 | 87 | 0.702624 |
08230707012435c172de65a59b03b06f94327f67 | 24 | ex | Elixir | lib/authit.ex | StephaneRob/authit | e5b3f637676ba7b16aa0c8d28c29f73d368bc3c4 | [
"BSD-2-Clause"
] | 1 | 2021-09-28T15:29:15.000Z | 2021-09-28T15:29:15.000Z | lib/authit.ex | StephaneRob/authit | e5b3f637676ba7b16aa0c8d28c29f73d368bc3c4 | [
"BSD-2-Clause"
] | null | null | null | lib/authit.ex | StephaneRob/authit | e5b3f637676ba7b16aa0c8d28c29f73d368bc3c4 | [
"BSD-2-Clause"
] | null | null | null | defmodule Authit do
end
| 8 | 19 | 0.833333 |
08231350fb51b50799dd6c3a245125c0aab3367a | 3,048 | exs | Elixir | test/oli_web/live/products_test.exs | wyeworks/oli-torus | 146ee79a7e315e57bdf3c7b6fd4f7dbe73610647 | [
"MIT"
] | null | null | null | test/oli_web/live/products_test.exs | wyeworks/oli-torus | 146ee79a7e315e57bdf3c7b6fd4f7dbe73610647 | [
"MIT"
] | 9 | 2021-11-02T16:52:09.000Z | 2022-03-25T15:14:01.000Z | test/oli_web/live/products_test.exs | wyeworks/oli-torus | 146ee79a7e315e57bdf3c7b6fd4f7dbe73610647 | [
"MIT"
] | null | null | null | defmodule OliWeb.ProductsLiveTest do
use ExUnit.Case
use OliWeb.ConnCase
import Phoenix.LiveViewTest
import Oli.Factory
alias Oli.Delivery.Sections
defp live_view_details_route(product_slug) do
Routes.live_path(OliWeb.Endpoint, OliWeb.Products.DetailsView, product_slug)
end
defp create_product(_conn) do
product = insert(:section, type: :blueprint, requires_payment: true, amount: Money.new(:USD, 10))
[product: product]
end
describe "product overview content settings" do
setup [:admin_conn, :create_product]
test "save event updates curriculum numbering visibility", %{conn: conn, product: product} do
{:ok, view, _html} = live(conn, live_view_details_route(product.slug))
assert view
|> element("#section_display_curriculum_item_numbering")
|> render() =~ "checked"
view
|> element("#content-form form[phx-change=\"save\"")
|> render_change(%{
"section" => %{"display_curriculum_item_numbering" => "false"}
})
updated_section = Sections.get_section!(product.id)
refute updated_section.display_curriculum_item_numbering
refute view
|> element("#section_display_curriculum_item_numbering")
|> render() =~ "checked"
end
end
describe "user cannot access when is not logged in" do
setup [:create_product]
test "redirects to new session when accessing the product detail view", %{
conn: conn,
product: product
} do
product_slug = product.slug
redirect_path =
"/authoring/session/new?request_path=%2Fauthoring%2Fproducts%2F#{product_slug}"
{:error, {:redirect, %{to: ^redirect_path}}} =
live(conn, live_view_details_route(product_slug))
end
end
describe "user cannot access when is logged in as an author but is not a system admin" do
setup [:author_conn, :create_product]
test "redirects to new session when accessing the section overview view", %{
conn: conn,
product: product
} do
conn = get(conn, live_view_details_route(product.slug))
redirect_path = "/unauthorized"
assert redirected_to(conn, 302) =~ redirect_path
end
end
describe "details live view" do
setup [:admin_conn, :create_product]
test "returns 404 when product not exists", %{conn: conn} do
conn = get(conn, live_view_details_route("not_exists"))
redirect_path = "/not_found"
assert redirected_to(conn, 302) =~ redirect_path
end
test "loads product data correctly", %{conn: conn, product: product} do
{:ok, view, _html} = live(conn, live_view_details_route(product.slug))
assert render(view) =~ "Details"
assert render(view) =~ "The Product title and description"
assert has_element?(view, "input[value=\"#{product.title}\"]")
assert has_element?(view, "input[name=\"section[pay_by_institution]\"]")
assert has_element?(view, "a[href=\"#{Routes.discount_path(OliWeb.Endpoint, :product, product.slug)}\"]")
end
end
end
| 31.42268 | 111 | 0.680774 |
08232844b1241a788d298e1ce9f08f66f982b3ee | 3,606 | ex | Elixir | lib/toml_elixir/mapper.ex | nikolauska/toml_elixir | f978a4866a726efb0c628edf9fbe7fd235c47855 | [
"MIT"
] | 4 | 2017-05-21T11:47:36.000Z | 2017-12-27T10:55:41.000Z | lib/toml_elixir/mapper.ex | nikolauska/toml_elixir | f978a4866a726efb0c628edf9fbe7fd235c47855 | [
"MIT"
] | 2 | 2017-07-18T16:19:38.000Z | 2018-02-19T19:15:36.000Z | lib/toml_elixir/mapper.ex | nikolauska/toml_elixir | f978a4866a726efb0c628edf9fbe7fd235c47855 | [
"MIT"
] | null | null | null | defmodule TomlElixir.Mapper do
@moduledoc """
Module for transforming toml list to map format
"""
alias TomlElixir.Error
@doc """
Transform TOML list to map format
"""
@spec parse(list) :: map
def parse([]), do: %{}
def parse(toml) when is_list(toml), do: to_map(toml, {[], %{}})
@spec to_map(list, {list, map}) :: map
defp to_map([], {_to, acc}), do: acc
defp to_map([{:table, to} | _tail], {to, _acc}) do
throw Error.exception("Duplicate table #{Enum.join(to, ".")}")
end
defp to_map([{:table, to} | []], {_to, acc}) do
do_put_in(to, nil, %{}, acc)
end
defp to_map([{:table, to} | tail], {_to, acc}) do
to_map(tail, {to, acc})
end
defp to_map([{:array_table, to} | tail], {_to, acc}) do
to_map(tail, {to, do_put_in_new(to, acc)})
end
defp to_map([{{:key, key}, {:array, val}} | tail], {to, acc}) do
to_map(tail, {to, do_put_in(to, key, val, acc)})
end
defp to_map([{{:key, key}, {:datetime, val}} | tail], {to, acc}) do
to_map(tail, {to, do_put_in(to, key, val, acc)})
end
defp to_map([{{:key, key}, {:date, val}} | tail], {to, acc}) do
to_map(tail, {to, do_put_in(to, key, val, acc)})
end
defp to_map([{{:key, key}, {:time, val}} | tail], {to, acc}) do
to_map(tail, {to, do_put_in(to, key, val, acc)})
end
defp to_map([{{:key, key}, {:string, val}} | tail], {to, acc}) do
to_map(tail, {to, do_put_in(to, key, val, acc)})
end
defp to_map([{{:key, key}, {:string_ml, val}} | tail], {to, acc}) do
to_map(tail, {to, do_put_in(to, key, val, acc)})
end
defp to_map([{{:key, key}, {:literal, val}} | tail], {to, acc}) do
to_map(tail, {to, do_put_in(to, key, val, acc)})
end
defp to_map([{{:key, key}, {:literal_ml, val}} | tail], {to, acc}) do
to_map(tail, {to, do_put_in(to, key, val, acc)})
end
defp to_map([{{:key, key}, {:inline_table, val}} | tail], {to, acc}) when is_list(val) do
to_map(tail, {to, do_put_in(to, key, parse(val), acc)})
end
defp to_map([{{:key, key}, val} | tail], {to, acc}) do
to_map(tail, {to, do_put_in(to, key, val, acc)})
end
@spec do_put_in(list, String.t | nil, any, list | map) :: map
defp do_put_in([], key, val, []) do
[Map.put(%{}, key, val)]
end
defp do_put_in([], key, val, acc) when is_list(acc) do
List.update_at(acc, -1, &Map.put(&1, key, val))
end
defp do_put_in([], key, val, acc) when is_map(acc) do
if Map.has_key?(acc, key) do
throw Error.exception("Duplicate key #{key}")
else
Map.put(acc, key, val)
end
end
defp do_put_in([key], nil, val, acc) when is_map(acc) do
Map.put(acc, key, val)
end
defp do_put_in(to, key, val, acc) when is_list(acc) do
List.update_at(acc, -1, &do_put_in(to, key, val, &1))
end
defp do_put_in([head | tail], key, val, acc) when is_map(acc) do
Map.put(acc, head, do_put_in(tail, key, val, Map.get(acc, head, %{})))
end
defp do_put_in(_to, _key, _val, acc) do
throw Error.exception("Invalid type #{inspect acc}, should be map")
end
@spec do_put_in_new(list, list | map) :: list | map
defp do_put_in_new([], acc) when is_list(acc) do
List.insert_at(acc, -1, %{})
end
defp do_put_in_new([], acc) when acc == %{} do
[%{}]
end
defp do_put_in_new([], acc) when is_map(acc) do
throw Error.exception("Should be empty, but #{inspect acc} was found")
end
defp do_put_in_new(to, acc) when is_list(acc) do
List.update_at(acc, -1, &do_put_in_new(to, &1))
end
defp do_put_in_new([head | tail], acc) when is_map(acc) do
Map.put(acc, head, do_put_in_new(tail, Map.get(acc, head, %{})))
end
end
| 35.009709 | 91 | 0.599556 |
08233297913d491a01bd60907b13537aaa133327 | 2,532 | ex | Elixir | clients/dfa_reporting/lib/google_api/dfa_reporting/v34/model/metro.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/dfa_reporting/lib/google_api/dfa_reporting/v34/model/metro.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/dfa_reporting/lib/google_api/dfa_reporting/v34/model/metro.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DFAReporting.V34.Model.Metro do
@moduledoc """
Contains information about a metro region that can be targeted by ads.
## Attributes
* `countryCode` (*type:* `String.t`, *default:* `nil`) - Country code of the country to which this metro region belongs.
* `countryDartId` (*type:* `String.t`, *default:* `nil`) - DART ID of the country to which this metro region belongs.
* `dartId` (*type:* `String.t`, *default:* `nil`) - DART ID of this metro region.
* `dmaId` (*type:* `String.t`, *default:* `nil`) - DMA ID of this metro region. This is the ID used for targeting and generating reports, and is equivalent to metro_code.
* `kind` (*type:* `String.t`, *default:* `nil`) - Identifies what kind of resource this is. Value: the fixed string "dfareporting#metro".
* `metroCode` (*type:* `String.t`, *default:* `nil`) - Metro code of this metro region. This is equivalent to dma_id.
* `name` (*type:* `String.t`, *default:* `nil`) - Name of this metro region.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:countryCode => String.t() | nil,
:countryDartId => String.t() | nil,
:dartId => String.t() | nil,
:dmaId => String.t() | nil,
:kind => String.t() | nil,
:metroCode => String.t() | nil,
:name => String.t() | nil
}
field(:countryCode)
field(:countryDartId)
field(:dartId)
field(:dmaId)
field(:kind)
field(:metroCode)
field(:name)
end
defimpl Poison.Decoder, for: GoogleApi.DFAReporting.V34.Model.Metro do
def decode(value, options) do
GoogleApi.DFAReporting.V34.Model.Metro.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.DFAReporting.V34.Model.Metro do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 38.953846 | 174 | 0.67891 |
08234e95297de00a3dc34ee4d645cef302db4841 | 365 | ex | Elixir | lib/yatzy/scoring/sixes.ex | idabmat/yatzy | fb1cc1f13670a19f3541a3b1df15e9897ffcfae3 | [
"MIT"
] | 3 | 2020-04-23T14:38:39.000Z | 2020-05-03T17:20:32.000Z | lib/yatzy/scoring/sixes.ex | idabmat/yatzy | fb1cc1f13670a19f3541a3b1df15e9897ffcfae3 | [
"MIT"
] | null | null | null | lib/yatzy/scoring/sixes.ex | idabmat/yatzy | fb1cc1f13670a19f3541a3b1df15e9897ffcfae3 | [
"MIT"
] | null | null | null | defmodule Yatzy.Scoring.Sixes do
@moduledoc """
Sixes: The sum of all dice showing the number 6.
"""
alias Yatzy.Roll
defstruct roll: %Roll{},
name: "Sixes",
description: "The sum of all dice showind the number 6."
defimpl Yatzy.Scoring.Score do
def execute(%{roll: roll}), do: Yatzy.Scoring.count(roll.dice, 6)
end
end
| 22.8125 | 69 | 0.643836 |
082351ec191c0ff43592efd09508a32075bd2bce | 8,218 | ex | Elixir | lib/aws/generated/finspacedata.ex | andrewhr/aws-elixir | 861dc2fafca50a2b2f83badba4cdcb44b5b0c171 | [
"Apache-2.0"
] | null | null | null | lib/aws/generated/finspacedata.ex | andrewhr/aws-elixir | 861dc2fafca50a2b2f83badba4cdcb44b5b0c171 | [
"Apache-2.0"
] | null | null | null | lib/aws/generated/finspacedata.ex | andrewhr/aws-elixir | 861dc2fafca50a2b2f83badba4cdcb44b5b0c171 | [
"Apache-2.0"
] | null | null | null | # WARNING: DO NOT EDIT, AUTO-GENERATED CODE!
# See https://github.com/aws-beam/aws-codegen for more details.
defmodule AWS.Finspacedata do
@moduledoc """
The FinSpace APIs let you take actions inside the FinSpace.
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: nil,
api_version: "2020-07-13",
content_type: "application/x-amz-json-1.1",
credential_scope: nil,
endpoint_prefix: "finspace-api",
global?: false,
protocol: "rest-json",
service_id: "finspace data",
signature_version: "v4",
signing_name: "finspace-api",
target_prefix: nil
}
end
@doc """
Creates a new Changeset in a FinSpace Dataset.
"""
def create_changeset(%Client{} = client, dataset_id, input, options \\ []) do
url_path = "/datasets/#{AWS.Util.encode_uri(dataset_id)}/changesetsv2"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Creates a Dataview for a Dataset.
"""
def create_data_view(%Client{} = client, dataset_id, input, options \\ []) do
url_path = "/datasets/#{AWS.Util.encode_uri(dataset_id)}/dataviewsv2"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Creates a new FinSpace Dataset.
"""
def create_dataset(%Client{} = client, input, options \\ []) do
url_path = "/datasetsv2"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Deletes a FinSpace Dataset.
"""
def delete_dataset(%Client{} = client, dataset_id, input, options \\ []) do
url_path = "/datasetsv2/#{AWS.Util.encode_uri(dataset_id)}"
headers = []
{query_params, input} =
[
{"clientToken", "clientToken"}
]
|> Request.build_params(input)
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Get information about a Changeset.
"""
def get_changeset(%Client{} = client, changeset_id, dataset_id, options \\ []) do
url_path =
"/datasets/#{AWS.Util.encode_uri(dataset_id)}/changesetsv2/#{AWS.Util.encode_uri(changeset_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Gets information about a Dataview.
"""
def get_data_view(%Client{} = client, data_view_id, dataset_id, options \\ []) do
url_path =
"/datasets/#{AWS.Util.encode_uri(dataset_id)}/dataviewsv2/#{AWS.Util.encode_uri(data_view_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Returns information about a Dataset.
"""
def get_dataset(%Client{} = client, dataset_id, options \\ []) do
url_path = "/datasetsv2/#{AWS.Util.encode_uri(dataset_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Request programmatic credentials to use with FinSpace SDK.
"""
def get_programmatic_access_credentials(
%Client{} = client,
duration_in_minutes \\ nil,
environment_id,
options \\ []
) do
url_path = "/credentials/programmatic"
headers = []
query_params = []
query_params =
if !is_nil(environment_id) do
[{"environmentId", environment_id} | query_params]
else
query_params
end
query_params =
if !is_nil(duration_in_minutes) do
[{"durationInMinutes", duration_in_minutes} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
A temporary Amazon S3 location, where you can copy your files from a source
location to stage or use as a scratch space in FinSpace notebook.
"""
def get_working_location(%Client{} = client, input, options \\ []) do
url_path = "/workingLocationV1"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Lists the FinSpace Changesets for a Dataset.
"""
def list_changesets(
%Client{} = client,
dataset_id,
max_results \\ nil,
next_token \\ nil,
options \\ []
) do
url_path = "/datasets/#{AWS.Util.encode_uri(dataset_id)}/changesetsv2"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Lists all available Dataviews for a Dataset.
"""
def list_data_views(
%Client{} = client,
dataset_id,
max_results \\ nil,
next_token \\ nil,
options \\ []
) do
url_path = "/datasets/#{AWS.Util.encode_uri(dataset_id)}/dataviewsv2"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Lists all of the active Datasets that a user has access to.
"""
def list_datasets(%Client{} = client, max_results \\ nil, next_token \\ nil, options \\ []) do
url_path = "/datasetsv2"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"nextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"maxResults", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Updates a FinSpace Changeset.
"""
def update_changeset(%Client{} = client, changeset_id, dataset_id, input, options \\ []) do
url_path =
"/datasets/#{AWS.Util.encode_uri(dataset_id)}/changesetsv2/#{AWS.Util.encode_uri(changeset_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Updates a FinSpace Dataset.
"""
def update_dataset(%Client{} = client, dataset_id, input, options \\ []) do
url_path = "/datasetsv2/#{AWS.Util.encode_uri(dataset_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
end
| 20.191646 | 102 | 0.574349 |
0823553927b1302f32e4c273a1daf3b74b527315 | 1,724 | ex | Elixir | clients/health_care/lib/google_api/health_care/v1beta1/model/bounding_poly.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/health_care/lib/google_api/health_care/v1beta1/model/bounding_poly.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/health_care/lib/google_api/health_care/v1beta1/model/bounding_poly.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.HealthCare.V1beta1.Model.BoundingPoly do
@moduledoc """
A bounding polygon for the detected image annotation.
## Attributes
* `label` (*type:* `String.t`, *default:* `nil`) - A description of this polygon.
* `vertices` (*type:* `list(GoogleApi.HealthCare.V1beta1.Model.Vertex.t)`, *default:* `nil`) - List of the vertices of this polygon.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:label => String.t() | nil,
:vertices => list(GoogleApi.HealthCare.V1beta1.Model.Vertex.t()) | nil
}
field(:label)
field(:vertices, as: GoogleApi.HealthCare.V1beta1.Model.Vertex, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.HealthCare.V1beta1.Model.BoundingPoly do
def decode(value, options) do
GoogleApi.HealthCare.V1beta1.Model.BoundingPoly.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.HealthCare.V1beta1.Model.BoundingPoly do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 34.48 | 136 | 0.729698 |
08235d909750f8377593cc2572308897f940e24c | 1,087 | exs | Elixir | mix.exs | mazurka/example-class-roll | 50e55eca6e0e3dce5214d1bb9b03a0421c0ed879 | [
"MIT"
] | null | null | null | mix.exs | mazurka/example-class-roll | 50e55eca6e0e3dce5214d1bb9b03a0421c0ed879 | [
"MIT"
] | null | null | null | mix.exs | mazurka/example-class-roll | 50e55eca6e0e3dce5214d1bb9b03a0421c0ed879 | [
"MIT"
] | null | null | null | defmodule ClassRoll.Mixfile do
use Mix.Project
def project do
[app: :class_roll,
version: "0.1.0",
elixir: "~> 1.0",
compilers: Mix.compilers,
deps: deps,
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod]
end
####
# Applications
####
def application do
[
mod: { ClassRoll, [] },
applications: [
:cowboy,
:logger,
] ++ (Mix.env == :dev && dev_applications || [])
]
end
defp dev_applications do
[:rl,]
end
####
# Deps
####
defp deps do
[{ :cowboy, "1.0.0" },
{ :ecto, "~> 1.0.0" },
{ :etude, "~> 0.3.0" },
{ :mazurka, github: "mazurka/mazurka" },
# { :mazurka, "~> 0.3.0" },
{ :parse_trans, github: "uwiger/parse_trans" },
{ :parse_trans, github: "uwiger/parse_trans" },
{ :plug, "~> 0.13.0" },
{ :plug_wait1, "~> 0.1.2" },
{ :poison, "1.4.0", override: true },
{ :postgrex, ">= 0.0.0" },
{ :rl, github: "camshaft/rl", only: :dev },
{ :simple_env, github: "camshaft/simple_env" },]
end
end
| 20.903846 | 54 | 0.50046 |
0823a41b48d111369739cca56ab63c715c8a0ee1 | 2,242 | exs | Elixir | mylist.exs | lebm/MyElixirTests | 4221012296c913179cdda278290263bfc1141454 | [
"Apache-2.0"
] | null | null | null | mylist.exs | lebm/MyElixirTests | 4221012296c913179cdda278290263bfc1141454 | [
"Apache-2.0"
] | null | null | null | mylist.exs | lebm/MyElixirTests | 4221012296c913179cdda278290263bfc1141454 | [
"Apache-2.0"
] | null | null | null | # Lists e recurson examples
defmodule MyList do
def len([]), do: 0
def len([_head|tail]), do: 1 + len(tail)
def square([]), do: []
def square([head|tail]), do: [ head*head | square(tail) ]
# I don´t if this definition is correct/useful, bu its is cool! And it works!
def map([], _func), do: []
def map([head|tail], func), do: [ func.(head) | map(tail, func) ]
def map(a..a, func), do: [ func.(a) ]
def map(a..b, func), do: [ func.(a) | map((a+1)..b, func) ]
def reduce([], value, _func), do: value
def reduce([head|tail], value, func), do: reduce(tail, func.(head, value), func)
def reduce(a..a, value, func), do: func.(a, value)
def reduce(a..b, value, func), do: reduce((a+1)..b, func.(a, value), func)
def mapsum(list, func), do:
list
|> map(func)
|> reduce(0,&(&1+&2))
def mapreduce(list, initvalue, mapfunc, reducefunc), do:
list
|> map(mapfunc)
|> reduce(initvalue, reducefunc)
end
defmodule Main do
# You should import only the functions you will need
# Example only. It is better to user module name when calling external functions.
# If the module name is long, create an alias
import MyList, only: [len: 1, square: 1, map: 2, reduce: 3, mapsum: 2, mapreduce: 4]
def run do
IO.puts len([1, 2, 3, 4])
IO.puts len(["a", "b", "c"])
IO.puts len([:one, "two", 3])
IO.inspect square([1, 2, 3, 4])
IO.inspect map([1, 2, 3, 4], fn x -> x + 1 end)
IO.inspect map([1, 2, 3, 4], &(&1*&1))
# Using anonymous function "full" syntax
IO.inspect reduce([1, 2, 3, 4], 0, fn (a, b) -> a + b end)
# Using anonymous function "capture operatori/shorthand" syntax
IO.inspect reduce([1, 2, 3, 4], 1, &(&1*&2))
# Using ranges e pipes
1..10 |> reduce(1, &(&1*&2)) |> IO.inspect
# Mapsum using pipes
IO.inspect mapsum([1, 2, 3, 4], &(&1*&1))
IO.inspect mapsum([1, 2, 3, 4], &(&1+1))
IO.inspect mapsum([1, 2, 3, 4], &(&1*10))
IO.inspect mapsum(1..4, &(&1*&1))
IO.inspect mapsum(1..4, &(&1+1))
IO.inspect mapsum(1..4, &(&1*10))
IO.inspect mapreduce(1..4, 0, &(&1*&1), &(&1+&2))
IO.inspect mapreduce(1..4, 1, &(&1*&1), &(&1*&2))
end
end
| 32.970588 | 86 | 0.557538 |
0823c079fe37983b10dd73980ac537cf50ff0bf9 | 1,167 | ex | Elixir | lib/safira/contest/redeem.ex | cesium/safira | 07a02f54f9454db1cfb5a510da68f40c47dcd916 | [
"MIT"
] | 40 | 2018-07-04T19:13:45.000Z | 2021-12-16T23:53:43.000Z | lib/safira/contest/redeem.ex | cesium/safira | 07a02f54f9454db1cfb5a510da68f40c47dcd916 | [
"MIT"
] | 94 | 2018-07-25T13:13:39.000Z | 2022-02-15T04:09:42.000Z | lib/safira/contest/redeem.ex | cesium/safira | 07a02f54f9454db1cfb5a510da68f40c47dcd916 | [
"MIT"
] | 5 | 2018-11-26T17:19:03.000Z | 2021-02-23T08:09:37.000Z | defmodule Safira.Contest.Redeem do
use Ecto.Schema
import Ecto.Changeset
alias Safira.Contest.Badge
alias Safira.Accounts.Attendee
alias Safira.Accounts.Manager
schema "redeems" do
belongs_to(:attendee, Attendee, foreign_key: :attendee_id, type: :binary_id)
belongs_to(:manager, Manager)
belongs_to(:badge, Badge)
timestamps()
end
@doc false
def changeset(redeem, attrs) do
redeem
|> cast(attrs, [:attendee_id, :manager_id, :badge_id])
|> validate_required([:attendee_id, :badge_id])
|> unique_constraint(:unique_attendee_badge,
name: :unique_attendee_badge,
message: "An attendee can't have the same badge twice"
)
|> is_within_period()
end
def is_within_period(changeset) do
{_, badge} = fetch_field(changeset, :badge)
curr = Datetime.utc_now()
cond do
Datetime.compare(curr, badge.start) == :lt ->
add_error(changeset, :begin, "Badge cannot be redeemed before the activity")
Datetime.compare(curr, badge.end) == :gt ->
add_error(changeset, :end, "Badge cannot be redeemed after the activity")
true ->
changeset
end
end
end
| 26.522727 | 84 | 0.681234 |
0823c121ce5fe5b53bff2c61e6071a2082eacdda | 4,285 | ex | Elixir | lib/models/relationship.ex | heikkari/dashium | d3f58033efac81e7f747091f2810caf2ce8f45ab | [
"MIT"
] | 2 | 2021-09-05T23:23:00.000Z | 2021-09-11T16:26:11.000Z | lib/models/relationship.ex | heikkari/dashium | d3f58033efac81e7f747091f2810caf2ce8f45ab | [
"MIT"
] | null | null | null | lib/models/relationship.ex | heikkari/dashium | d3f58033efac81e7f747091f2810caf2ce8f45ab | [
"MIT"
] | null | null | null | defmodule Models.Relationship do
defstruct [
:user_ids, # 0 = sender, 1 = receiver.
:status, # 0 = friend request, 1 = friends, 2 = blocked.
]
@spec delete(integer, integer) :: boolean
def delete(sender, receiver)
when is_integer(sender) and is_integer(receiver)
do
query = %{ user_ids: %{ "$in" => [ sender, receiver ] } }
{ status, _ } = Mongo.delete_one(:mongo, "relationships", query)
status === :ok
end
@spec exists(integer, integer) :: boolean
def exists(sender, receiver)
when is_integer(sender) and is_integer(receiver)
do
{ status, _ } = __MODULE__.with(sender, receiver)
status === :ok
end
@spec update(integer, integer, integer) :: boolean
def update(sender, receiver, status)
when is_integer(sender) and is_integer(receiver) and is_integer(status)
do
query = %{ status: 0, user_ids: %{ "$in" => [ sender, receiver ] } }
{ result, _ } = Mongo.update_one(:mongo, "relationships", query, %{ "$set": %{ status: status } })
result === :ok
end
@spec create(integer, integer, integer) :: boolean
def create(sender, receiver, status)
when is_integer(sender) and is_integer(receiver) and is_integer(status)
do
if not exists(sender, receiver) do
query = %__MODULE__{
status: status,
user_ids: [ sender, receiver ],
}
{ result, _ } = Mongo.insert_one(:mongo, "relationships", Map.from_struct query)
result === :ok
else
true
end
end
@spec is_blocked(integer, integer) :: boolean
def is_blocked(sender, receiver)
when is_integer(sender) and is_integer(receiver)
do
case __MODULE__.with(sender, receiver) do
{ :error, _ } -> false
{ :ok, relationship } -> relationship.status == 2
end
end
@spec are_friends(integer, integer) :: boolean
def are_friends(sender, receiver)
when is_integer(sender) and is_integer(receiver)
do
case __MODULE__.with(sender, receiver) do
{ :error, _ } -> false
{ :ok, relationship } -> relationship.status == 1
end
end
@doc """
Sends a friend request. Inserts a Relationship struct into the database with
a `status` value of 0. Returns an HTTP status code.
"""
@spec send_friend_request(integer, integer, binary) :: boolean
def send_friend_request(sender, receiver, msg)
when is_integer(sender) and is_integer(receiver) and is_binary(msg)
do
operation = fn ->
x = create(sender, receiver, 0)
y = Models.Message.send(sender, receiver, 1, "Friend request", msg)
x and y
end
cond do
sender === receiver -> false
is_blocked sender, receiver -> false
are_friends sender, receiver -> false
true -> operation.()
end
end
@doc """
Accepts a friend request.
"""
@spec accept_friend_request(integer, integer) :: boolean
def accept_friend_request(sender, receiver)
when is_integer(sender) and is_integer(receiver)
do
case __MODULE__.with(sender, receiver) do
{ :error, nil } -> false
{ :ok, r } -> (fn r ->
if sender !== (r.user_ids |> Enum.at(1)) do
false
else
if r.status === 0,
do: update(sender, receiver, 1),
else: false
end
end).(r)
end
end
@spec block(integer, integer) :: boolean
def block(sender, receiver) do
x = create(sender, receiver, 0) # Create a relationship between the users if there isn't one
y = update(sender, receiver, 2) # Update the status to 2 (Blocked)
x and y
end
@spec with(integer, integer) :: any
def with(sender, receiver) when is_integer(sender) and is_integer(receiver) do
case Mongo.find_one(:mongo, "relationships", %{ user_ids: %{ "$in" => [ sender, receiver ] } }) do
nil -> { :error, nil }
document -> { :ok, new(document) }
end
end
@doc """
Returns a list of the user's relations, filtered by the provided status.
"""
@spec of(integer, integer) :: list
def of(user_id, status) when is_integer(user_id) and is_integer(status) do
# Get all documents in which the user ID is referenced
Mongo.find(:mongo, "relationships", %{ status: status, user_ids: %{ "$in" => [ user_id ] } })
|> Enum.map(&(&1["user_ids"]))
end
use ExConstructor
end
| 30.607143 | 102 | 0.633139 |
0823c25fa3401cfbaca8baf089241a5466bb2ea5 | 2,989 | ex | Elixir | lib/blockchain_api/schema/transaction.ex | pakorn186c/blockchain-api | 3c9fbc892e645f9bb144414f3da36749603f37bc | [
"Apache-2.0"
] | 17 | 2019-11-03T03:02:41.000Z | 2022-01-13T17:03:32.000Z | lib/blockchain_api/schema/transaction.ex | AddressXception/blockchain-api | eea98fa78af2887cc84762f84532c602c3b8b666 | [
"Apache-2.0"
] | 5 | 2019-11-07T23:26:53.000Z | 2020-11-24T21:45:35.000Z | lib/blockchain_api/schema/transaction.ex | AddressXception/blockchain-api | eea98fa78af2887cc84762f84532c602c3b8b666 | [
"Apache-2.0"
] | 11 | 2019-12-04T07:03:16.000Z | 2022-01-13T17:03:50.000Z | defmodule BlockchainAPI.Schema.Transaction do
use Ecto.Schema
import Ecto.Changeset
alias BlockchainAPI.{Schema.Block, Schema.Transaction, Util}
@fields [:id, :hash, :type, :block_height]
@derive {Phoenix.Param, key: :hash}
@derive {Jason.Encoder, only: @fields}
schema "transactions" do
field :type, :string, null: false
field :block_height, :integer, null: false
field :hash, :binary, null: false
field :status, :string, null: false, default: "cleared"
belongs_to :block, Block, define_field: false, foreign_key: :height
timestamps()
end
@doc false
def changeset(transaction, attrs) do
transaction
|> cast(attrs, [:hash, :type, :block_height, :status])
|> validate_required([:hash, :type, :status])
|> unique_constraint(:hash)
|> foreign_key_constraint(:block_height)
end
def encode_model(transaction) do
%{
Map.take(transaction, @fields)
| hash: Util.bin_to_string(transaction.hash)
}
end
defimpl Jason.Encoder, for: Transaction do
def encode(transaction, opts) do
transaction
|> Transaction.encode_model()
|> Jason.Encode.map(opts)
end
end
def map(:blockchain_txn_coinbase_v1, txn) do
%{type: "coinbase", hash: :blockchain_txn_coinbase_v1.hash(txn)}
end
def map(:blockchain_txn_consensus_group_v1, txn) do
%{type: "election", hash: :blockchain_txn_consensus_group_v1.hash(txn)}
end
def map(:blockchain_txn_security_coinbase_v1, txn) do
%{type: "security", hash: :blockchain_txn_security_coinbase_v1.hash(txn)}
end
def map(:blockchain_txn_security_exchange_v1, txn) do
%{type: "security_exchange", hash: :blockchain_txn_security_exchange_v1.hash(txn)}
end
def map(:blockchain_txn_dc_coinbase_v1, txn) do
%{type: "data_credit", hash: :blockchain_txn_dc_coinbase_v1.hash(txn)}
end
def map(:blockchain_txn_payment_v1, txn) do
%{type: "payment", hash: :blockchain_txn_payment_v1.hash(txn)}
end
def map(:blockchain_txn_add_gateway_v1, txn) do
%{type: "gateway", hash: :blockchain_txn_add_gateway_v1.hash(txn)}
end
def map(:blockchain_txn_assert_location_v1, txn) do
%{type: "location", hash: :blockchain_txn_assert_location_v1.hash(txn)}
end
def map(:blockchain_txn_gen_gateway_v1, txn) do
%{type: "gateway", hash: :blockchain_txn_gen_gateway_v1.hash(txn)}
end
def map(:blockchain_txn_poc_request_v1, txn) do
%{type: "poc_request", hash: :blockchain_txn_poc_request_v1.hash(txn)}
end
def map(:blockchain_txn_poc_receipts_v1, txn) do
%{type: "poc_receipts", hash: :blockchain_txn_poc_receipts_v1.hash(txn)}
end
def map(:blockchain_txn_rewards_v1, txn) do
%{type: "rewards", hash: :blockchain_txn_rewards_v1.hash(txn)}
end
def map(:blockchain_txn_oui_v1, txn) do
%{type: "oui", hash: :blockchain_txn_oui_v1.hash(txn)}
end
def map(:blockchain_txn_payment_v2, txn) do
%{type: "payment_v2", hash: :blockchain_txn_payment_v2.hash(txn)}
end
end
| 29.594059 | 86 | 0.71462 |
0823dcb51cd0d3081467660931e9b014de5450d0 | 756 | ex | Elixir | apps/graphql/lib/graphql/resolvers/settlement.ex | ehealth-ua/ehealth.api | 4ffe26a464fe40c95fb841a4aa2e147068f65ca2 | [
"Apache-2.0"
] | 8 | 2019-06-14T11:34:49.000Z | 2021-08-05T19:14:24.000Z | apps/graphql/lib/graphql/resolvers/settlement.ex | edenlabllc/ehealth.api.public | 4ffe26a464fe40c95fb841a4aa2e147068f65ca2 | [
"Apache-2.0"
] | 1 | 2019-07-08T15:20:22.000Z | 2019-07-08T15:20:22.000Z | apps/graphql/lib/graphql/resolvers/settlement.ex | ehealth-ua/ehealth.api | 4ffe26a464fe40c95fb841a4aa2e147068f65ca2 | [
"Apache-2.0"
] | 6 | 2018-05-11T13:59:32.000Z | 2022-01-19T20:15:22.000Z | defmodule GraphQL.Resolvers.Settlement do
@moduledoc false
import GraphQL.Resolvers.Helpers.Load, only: [response_to_ecto_struct: 2]
alias Absinthe.Relay.Connection
alias Core.Uaddresses
alias Core.Uaddresses.Settlement
def list_settlements(%{filter: filter, order_by: order_by} = args, _) do
with {:ok, offset, limit} <- Connection.offset_and_limit_for_query(args, []),
{:ok, settlements} <- Uaddresses.list_settlements(filter, order_by, {offset, limit + 1}) do
opts = [has_previous_page: offset > 0, has_next_page: length(settlements) > limit]
settlements = Enum.map(settlements, &response_to_ecto_struct(Settlement, &1))
Connection.from_slice(Enum.take(settlements, limit), offset, opts)
end
end
end
| 37.8 | 100 | 0.734127 |
08243df7cab0f2edc194bdc7885e630b98900079 | 460 | ex | Elixir | lib/ex_aws/s3/direct_upload/date_util.ex | anylabs/ex_aws_s3_direct_upload | 745418e02be321f0e2db181f76fee7a8bcf14ed6 | [
"Apache-2.0"
] | 1 | 2019-05-01T18:39:34.000Z | 2019-05-01T18:39:34.000Z | lib/ex_aws/s3/direct_upload/date_util.ex | anylabs/ex_aws_s3_direct_upload | 745418e02be321f0e2db181f76fee7a8bcf14ed6 | [
"Apache-2.0"
] | 4 | 2019-12-06T17:28:43.000Z | 2020-04-10T14:56:16.000Z | lib/ex_aws/s3/direct_upload/date_util.ex | anylabs/ex_aws_s3_direct_upload | 745418e02be321f0e2db181f76fee7a8bcf14ed6 | [
"Apache-2.0"
] | 3 | 2018-11-06T23:07:22.000Z | 2019-10-16T15:11:10.000Z | defmodule ExAws.S3.DirectUpload.DateUtil do
@moduledoc """
"""
def today_datetime do
%{DateTime.utc_now | hour: 0, minute: 0, second: 0, microsecond: {0,0}}
|> DateTime.to_iso8601(:basic)
end
def today_date do
Date.utc_today
|> Date.to_iso8601(:basic)
end
def expiration_datetime do
DateTime.utc_now()
|> DateTime.to_unix()
|> Kernel.+(60 * 60)
|> DateTime.from_unix!()
|> DateTime.to_iso8601()
end
end
| 18.4 | 75 | 0.63913 |
0824410e8499e61f8603577e8d9421d258a238a1 | 2,184 | ex | Elixir | clients/dns/lib/google_api/dns/v1/model/dns_key_spec.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | clients/dns/lib/google_api/dns/v1/model/dns_key_spec.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | clients/dns/lib/google_api/dns/v1/model/dns_key_spec.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DNS.V1.Model.DnsKeySpec do
@moduledoc """
Parameters for DnsKey key generation. Used for generating initial keys for a new ManagedZone and as default when adding a new DnsKey.
## Attributes
* `algorithm` (*type:* `String.t`, *default:* `nil`) - String mnemonic specifying the DNSSEC algorithm of this key.
* `keyLength` (*type:* `integer()`, *default:* `nil`) - Length of the keys in bits.
* `keyType` (*type:* `String.t`, *default:* `nil`) - Specifies whether this is a key signing key (KSK) or a zone signing key (ZSK). Key signing keys have the Secure Entry Point flag set and, when active, are only used to sign resource record sets of type DNSKEY. Zone signing keys do not have the Secure Entry Point flag set and are used to sign all other types of resource record sets.
* `kind` (*type:* `String.t`, *default:* `dns#dnsKeySpec`) -
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:algorithm => String.t(),
:keyLength => integer(),
:keyType => String.t(),
:kind => String.t()
}
field(:algorithm)
field(:keyLength)
field(:keyType)
field(:kind)
end
defimpl Poison.Decoder, for: GoogleApi.DNS.V1.Model.DnsKeySpec do
def decode(value, options) do
GoogleApi.DNS.V1.Model.DnsKeySpec.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.DNS.V1.Model.DnsKeySpec do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 39 | 390 | 0.708333 |
0824606a2802ec7961c0f6cc163174bbde1e043c | 1,237 | exs | Elixir | examples/petstore/mix.exs | tyrchen/quenya | b9e8ef9e71e0e52b010b930eee66942e30c62ddd | [
"MIT"
] | 143 | 2020-12-01T06:53:36.000Z | 2022-03-24T02:33:01.000Z | examples/petstore/mix.exs | tyrchen/quenya | b9e8ef9e71e0e52b010b930eee66942e30c62ddd | [
"MIT"
] | 2 | 2020-11-30T05:30:42.000Z | 2020-12-17T06:33:17.000Z | examples/petstore/mix.exs | tyrchen/quenya | b9e8ef9e71e0e52b010b930eee66942e30c62ddd | [
"MIT"
] | 12 | 2020-12-07T01:22:17.000Z | 2020-12-27T12:49:39.000Z | defmodule Petstore.MixProject do
use Mix.Project
def project do
[
app: :petstore,
version: "0.1.0",
elixir: "~> 1.10",
elixirc_paths: elixirc_paths(Mix.env()),
compilers: Mix.compilers(),
start_permanent: Mix.env() == :prod,
deps: deps()
]
end
# Configuration for the OTP application.
#
# Type `mix help compile.app` for more information.
def application do
[
mod: {Petstore.Application, []},
extra_applications: [:logger, :runtime_tools]
]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(env) when env in [:dev, :test], do: ["lib", "gen", "test/support"]
defp elixirc_paths(_), do: ["lib", "gen"]
# Specifies your project dependencies.
#
# Type `mix help deps` for examples and options.
defp deps do
[
{:jason, "~> 1.0"},
{:plug_cowboy, "~> 2.0"},
# Quenya
{:quenya, path: "../..", override: true},
# Quenya builder
{:quenya_builder, path: "../../builder", override: true, runtime: false},
# Only needed if you'd like to generate fake handler
{:json_data_faker, "~> 0.1"},
# dev and test
{:mock, "~> 0.3.0", only: :test}
]
end
end
| 23.788462 | 87 | 0.584479 |
082469846ffe777c8254db2ec7c419c557a1497c | 10,934 | ex | Elixir | apps/emulation/lib/combase.ex | 226wyj/Raft | b882839579bc70b5501e7f1d6fe41d3119162df4 | [
"MIT"
] | null | null | null | apps/emulation/lib/combase.ex | 226wyj/Raft | b882839579bc70b5501e7f1d6fe41d3119162df4 | [
"MIT"
] | null | null | null | apps/emulation/lib/combase.ex | 226wyj/Raft | b882839579bc70b5501e7f1d6fe41d3119162df4 | [
"MIT"
] | null | null | null | defmodule EmulatorError do
defexception message: "Error in emulation"
end
defmodule ComBase do
@moduledoc """
ComBase emulates a distribtued application connected
over a potentially asynchronous network.
"""
defstruct(
registration: nil,
rev_registration: nil,
fuzz_chain: nil,
times: nil,
unfuzzables: nil
)
require Fuzzers
require Logger
require MapSet
@doc """
Initialize the communication base.
"""
@spec init() :: %ComBase{
registration: pid(),
rev_registration: pid(),
fuzz_chain: pid(),
times: pid(),
unfuzzables: pid()
}
def init do
Logger.debug("Initializing ComBase")
{:ok, pid} = Agent.start_link(fn -> %{} end)
{:ok, rpid} = Agent.start_link(fn -> %{} end)
{:ok, times} = Agent.start_link(fn -> %{} end)
{:ok, unfuzzables} = Agent.start_link(fn -> MapSet.new() end)
# By always adding 0 delay, we make sure there is a queue but
# messages are delayed
{:ok, fchain} = Agent.start_link(fn -> [] end)
Logger.debug("Registration map is #{inspect(pid)}")
Logger.debug("Reverse Registration map is #{inspect(rpid)}")
Logger.debug("Fuzz chain is #{inspect(fchain)}")
Logger.debug("The set of unfuzzables is #{inspect(unfuzzables)}")
%ComBase{
registration: pid,
rev_registration: rpid,
fuzz_chain: fchain,
times: times,
unfuzzables: unfuzzables
}
end
@spec unfuzzable?(%ComBase{unfuzzables: pid()}, atom() | pid()) :: boolean()
defp unfuzzable?(ctx, proc) do
Agent.get(ctx.unfuzzables, fn s -> MapSet.member?(s, proc) end)
end
defp unlink_and_exit(pid) do
Process.exit(pid, :stop)
catch
_ -> true
end
defp stop_linked_agent(pid) do
Agent.stop(pid)
catch
:exit, _ -> true
end
@doc """
Terminate communication base.
"""
@spec terminate(%ComBase{
registration: pid(),
rev_registration: pid(),
fuzz_chain: pid(),
times: pid()
}) :: :ok
def terminate(ctx) do
rev_reg = Agent.get(ctx.rev_registration, fn m -> m end)
keys = Map.keys(rev_reg)
Enum.each(keys, &Process.unlink/1)
Enum.each(keys, &unlink_and_exit/1)
stop_linked_agent(ctx.rev_registration)
stop_linked_agent(ctx.registration)
stop_linked_agent(ctx.fuzz_chain)
stop_linked_agent(ctx.times)
end
defp get_fuzz_chain(ctx) do
chain = Agent.get(ctx.fuzz_chain, fn f -> f end)
if chain == [] do
# Add a 0 delay to queue messages
# before delivery.
[Fuzzers.delay(0)]
else
chain
end
end
defp create_fuzzers(ctx, sender) do
my_pid = self()
my_id = whoami(ctx)
fuzz_chain =
if unfuzzable?(ctx, my_id) || unfuzzable?(ctx, my_pid) do
[]
else
get_fuzz_chain(ctx)
end
Fuzzers.build_fuzz_chain(my_pid, my_id, sender, fuzz_chain)
end
defp recv_proxy_getq(ctx, queues, sender) do
q = Agent.get(queues, fn q -> q[sender] end)
if q do
q
else
q = create_fuzzers(ctx, sender)
Agent.update(queues, fn m -> Map.put(m, sender, q) end)
q
end
end
defp recv_proxy_internal(ctx, proc, queues) do
receive do
{:control, m} ->
q = recv_proxy_getq(ctx, queues, whoami(ctx))
send(q, {:control_proc, m})
{:msg, sender, msg} ->
# The start of the chain.
if unfuzzable?(ctx, sender) do
# Do not fuzz messages from an unfuzzable sender
send(proc, {sender, msg})
else
q = recv_proxy_getq(ctx, queues, sender)
send(q, {:proc, sender, msg})
end
{:proc, sender, msg} ->
send(proc, {sender, msg})
{:control_proc, msg} ->
send(proc, msg)
m ->
Logger.error(
"Process #{whoami(ctx)} received message #{inspect(m)} " <>
"that was not sent using emultion"
)
raise EmulatorError, message: "Message not sent using emulation"
end
recv_proxy_internal(ctx, proc, queues)
end
defp recv_proxy(ctx, proc) do
{:ok, pid} = Agent.start(fn -> %{} end)
recv_proxy_internal(ctx, proc, pid)
end
@doc """
Get ID for the current process.
"""
@spec whoami(%ComBase{rev_registration: pid()}) :: atom() | pid()
def whoami(ctx) do
pid = self()
case Agent.get(ctx.rev_registration, fn r -> r[pid] end) do
nil ->
Logger.info("Process #{inspect(pid)} is not registered.")
self()
id ->
id
end
end
@doc """
Add to the list of fuzzers used when messages are received. Note
this function must be called before any messages are sent.
"""
@spec append_fuzzers(%ComBase{fuzz_chain: pid()}, [{atom(), float() | pid()}]) ::
:ok
def append_fuzzers(ctx, fuzzer_list) do
Agent.update(ctx.fuzz_chain, fn f -> f ++ fuzzer_list end)
end
@doc """
Get the current fuzzer list
"""
@spec get_fuzzers(%ComBase{fuzz_chain: pid()}) :: [{atom(), float() | pid()}]
def get_fuzzers(ctx) do
Agent.get(ctx.fuzz_chain, fn f -> f end)
end
@doc """
Send a message to the process named proc. Message can be
anything. ctx should be the context in which the process
was created.
"""
@spec send(
%ComBase{registration: pid(), rev_registration: pid()},
atom() | pid(),
any()
) ::
boolean()
def send(ctx, proc, msg) do
p =
if is_pid(proc) do
proc
else
Agent.get(ctx.registration, fn r -> r[proc] end)
end
src = whoami(ctx)
internal = Agent.get(ctx.rev_registration, fn r -> r[p] != nil end)
if p do
if internal do
send(p, {:msg, src, msg})
true
else
send(p, msg)
true
end
else
Logger.warn(
"Could not translate #{inspect(proc)} into a PID, " <>
"unable to send."
)
false
end
end
@doc """
Mark this process as one whose messages (i.e.,
those sent or received by the process) should not
be fuzzed. This is meant as an aid to testing, and
should not be used by code not within a test.
"""
@spec mark_unfuzzable(%ComBase{unfuzzables: pid()}) :: :ok
def mark_unfuzzable(ctx) do
p = whoami(ctx)
Agent.update(ctx.unfuzzables, fn s -> MapSet.put(s, p) end)
end
@doc """
Get a list of all registed processes
"""
@spec list_proc(%ComBase{registration: pid()}) :: [atom()]
def list_proc(ctx) do
Agent.get(ctx.registration, fn m -> Map.keys(m) end)
end
@doc """
Send message to all active processes.
"""
@spec broadcast(%ComBase{registration: pid(), rev_registration: pid()}, any()) ::
boolean()
def broadcast(ctx, msg) do
src = whoami(ctx)
# We use send_after here to more closely resemble
# the execution model presented in class.
Agent.get(ctx.registration, fn
r ->
Enum.map(Map.values(r), fn dst ->
Process.send_after(dst, {:msg, src, msg}, 0)
end)
end)
end
# We need to wait until the Spawned process
# has finished registration, else there is a
# race condition.
defp spawn_helper(func) do
receive do
:go ->
func.()
_ ->
# Messages sent before the process is active
# just get dropped.
spawn_helper(func)
end
end
@doc """
Spawn a process with supplied name and function.
"""
@spec spawn(
%ComBase{registration: pid(), rev_registration: pid(), times: pid()},
atom(),
(() -> any())
) :: pid()
def spawn(ctx, name, f) do
%{registration: p, rev_registration: r, times: t} = ctx
if Agent.get(p, fn m -> Map.has_key?(m, name) end) do
Logger.error(
"Tried to spawn process with name #{inspect(name)} which is " <>
" already registered."
)
raise EmulatorError, message: "Cannot spawn processes with the same name"
else
pid = spawn_link(fn -> spawn_helper(f) end)
proxy_pid = spawn_link(fn -> recv_proxy(ctx, pid) end)
Agent.update(p, fn m -> Map.put(m, name, proxy_pid) end)
Agent.update(r, fn m ->
Map.put(Map.put(m, proxy_pid, name), pid, name)
end)
Agent.update(t, fn m ->
Map.put(m, name, System.monotonic_time())
end)
# Now we are registered, it is safe to start.
send(pid, :go)
Logger.debug(
"Spawned #{inspect(name)} with " <>
"main process: #{inspect(pid)} " <>
"proxy: #{inspect(proxy_pid)}"
)
pid
end
end
@doc """
Set a timer for the given number of milliseconds, and send a message
with the atom :timer when done.
"""
@spec timer(
%ComBase{
rev_registration: pid(),
registration: pid()
},
non_neg_integer()
) :: reference()
def timer(ctx, ms) do
timer(ctx, ms, :timer)
end
@doc """
Set a timer for the given number of milliseconds, and send a message
with the atom `message` when done.
"""
@spec timer(
%ComBase{
rev_registration: pid(),
registration: pid()
},
non_neg_integer(),
atom()
) :: reference()
def timer(ctx, ms, atom) do
src = whoami(ctx)
p = Agent.get(ctx.registration, fn r -> r[src] end)
if p do
Process.send_after(p, {:control, atom}, ms)
else
Logger.error(
"Process #{inspect(self())} outside emultion tried setting " <>
"timer"
)
raise EmulatorError, message: "Timer set by process not in emulations"
end
end
# Get current time at process `process`.
@spec time_at_process(%ComBase{times: pid()}, atom()) :: number()
defp time_at_process(ctx, process) do
System.monotonic_time() - Agent.get(ctx.times, fn m -> m[process] end)
end
@doc """
Translate `time` (gotten from `System.monotonic_time()`) to time
at process `p`. This should only be used for testing.
"""
@spec translate_time(%ComBase{times: pid()}, atom(), number()) :: number()
def translate_time(ctx, p, time) do
time - Agent.get(ctx.times, fn m -> m[p] end)
end
@doc """
Get current time.
"""
@spec now(%ComBase{times: pid()}) :: number()
def now(ctx) do
me = whoami(ctx)
if is_pid(me) do
raise(EmulatorError, message: "Process #{inspect(me)} " <> "
outside emulation asking for time")
else
time_at_process(ctx, me)
end
end
@doc """
Set current time.
"""
@spec set_time(%ComBase{times: pid()}, number()) :: :ok
def set_time(ctx, time) do
me = whoami(ctx)
if is_pid(me) do
raise(EmulatorError, message: "Process #{inspect(me)} " <> "
outside emulation asking for time")
else
base = System.monotonic_time() - time
Agent.update(ctx.times, fn m -> Map.put(m, me, base) end)
:ok
end
end
end
| 25.310185 | 83 | 0.588988 |
0824dbbe10f9673c41cb53be152d0d2bc00c7bfb | 10,836 | ex | Elixir | clients/content/lib/google_api/content/v2/api/datafeedstatuses.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | clients/content/lib/google_api/content/v2/api/datafeedstatuses.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/content/lib/google_api/content/v2/api/datafeedstatuses.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Content.V2.Api.Datafeedstatuses do
@moduledoc """
API calls for all endpoints tagged `Datafeedstatuses`.
"""
alias GoogleApi.Content.V2.Connection
alias GoogleApi.Gax.{Request, Response}
@library_version Mix.Project.config() |> Keyword.get(:version, "")
@doc """
Gets multiple Merchant Center datafeed statuses in a single request.
## Parameters
* `connection` (*type:* `GoogleApi.Content.V2.Connection.t`) - Connection to server
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:body` (*type:* `GoogleApi.Content.V2.Model.DatafeedstatusesCustomBatchRequest.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Content.V2.Model.DatafeedstatusesCustomBatchResponse{}}` on success
* `{:error, info}` on failure
"""
@spec content_datafeedstatuses_custombatch(Tesla.Env.client(), keyword(), keyword()) ::
{:ok, GoogleApi.Content.V2.Model.DatafeedstatusesCustomBatchResponse.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def content_datafeedstatuses_custombatch(connection, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/content/v2/datafeedstatuses/batch", %{})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.Content.V2.Model.DatafeedstatusesCustomBatchResponse{}]
)
end
@doc """
Retrieves the status of a datafeed from your Merchant Center account.
## Parameters
* `connection` (*type:* `GoogleApi.Content.V2.Connection.t`) - Connection to server
* `merchant_id` (*type:* `String.t`) - The ID of the account that manages the datafeed. This account cannot be a multi-client account.
* `datafeed_id` (*type:* `String.t`) - The ID of the datafeed.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:country` (*type:* `String.t`) - The country for which to get the datafeed status. If this parameter is provided then language must also be provided. Note that this parameter is required for feeds targeting multiple countries and languages, since a feed may have a different status for each target.
* `:language` (*type:* `String.t`) - The language for which to get the datafeed status. If this parameter is provided then country must also be provided. Note that this parameter is required for feeds targeting multiple countries and languages, since a feed may have a different status for each target.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Content.V2.Model.DatafeedStatus{}}` on success
* `{:error, info}` on failure
"""
@spec content_datafeedstatuses_get(
Tesla.Env.client(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Content.V2.Model.DatafeedStatus.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def content_datafeedstatuses_get(
connection,
merchant_id,
datafeed_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:country => :query,
:language => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/content/v2/{merchantId}/datafeedstatuses/{datafeedId}", %{
"merchantId" => URI.encode(merchant_id, &URI.char_unreserved?/1),
"datafeedId" => URI.encode(datafeed_id, &(URI.char_unreserved?(&1) || &1 == ?/))
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Content.V2.Model.DatafeedStatus{}])
end
@doc """
Lists the statuses of the datafeeds in your Merchant Center account.
## Parameters
* `connection` (*type:* `GoogleApi.Content.V2.Connection.t`) - Connection to server
* `merchant_id` (*type:* `String.t`) - The ID of the account that manages the datafeeds. This account cannot be a multi-client account.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:maxResults` (*type:* `integer()`) - The maximum number of products to return in the response, used for paging.
* `:pageToken` (*type:* `String.t`) - The token returned by the previous request.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Content.V2.Model.DatafeedstatusesListResponse{}}` on success
* `{:error, info}` on failure
"""
@spec content_datafeedstatuses_list(Tesla.Env.client(), String.t(), keyword(), keyword()) ::
{:ok, GoogleApi.Content.V2.Model.DatafeedstatusesListResponse.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def content_datafeedstatuses_list(connection, merchant_id, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:maxResults => :query,
:pageToken => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/content/v2/{merchantId}/datafeedstatuses", %{
"merchantId" => URI.encode(merchant_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.Content.V2.Model.DatafeedstatusesListResponse{}]
)
end
end
| 47.113043 | 310 | 0.63732 |
08251d84488923a0265d479df2780ae95371db7f | 5,504 | exs | Elixir | .credo.exs | r-icarus/torch | c5c94001b91fc459d727b18fa137b6936a65918d | [
"MIT"
] | null | null | null | .credo.exs | r-icarus/torch | c5c94001b91fc459d727b18fa137b6936a65918d | [
"MIT"
] | null | null | null | .credo.exs | r-icarus/torch | c5c94001b91fc459d727b18fa137b6936a65918d | [
"MIT"
] | null | null | null | # This file contains the configuration for Credo and you are probably reading
# this after creating it with `mix credo.gen.config`.
#
# If you find anything wrong or unclear in this file, please report an
# issue on GitHub: https://github.com/rrrene/credo/issues
#
%{
#
# You can have as many configs as you like in the `configs:` field.
configs: [
%{
#
# Run any exec using `mix credo -C <name>`. If no exec name is given
# "default" is used.
#
name: "default",
#
# These are the files included in the analysis:
files: %{
#
# You can give explicit globs or simply directories.
# In the latter case `**/*.{ex,exs}` will be used.
#
included: ["lib/", "src/", "web/", "apps/"],
excluded: [~r"/_build/", ~r"/deps/"]
},
#
# If you create your own checks, you must specify the source files for
# them here, so they can be loaded by Credo before running the analysis.
#
requires: [],
#
# If you want to enforce a style guide and need a more traditional linting
# experience, you can change `strict` to `true` below:
#
strict: false,
#
# If you want to use uncolored output by default, you can change `color`
# to `false` below:
#
color: true,
#
# You can customize the parameters of any check by adding a second element
# to the tuple.
#
# To disable a check put `false` as second element:
#
# {Credo.Check.Design.DuplicatedCode, false}
#
checks: [
{Credo.Check.Consistency.ExceptionNames},
{Credo.Check.Consistency.LineEndings},
{Credo.Check.Consistency.ParameterPatternMatching},
{Credo.Check.Consistency.SpaceAroundOperators},
{Credo.Check.Consistency.SpaceInParentheses},
{Credo.Check.Consistency.TabsOrSpaces},
# You can customize the priority of any check
# Priority values are: `low, normal, high, higher`
#
{Credo.Check.Design.AliasUsage, priority: :low},
# For some checks, you can also set other parameters
#
# If you don't want the `setup` and `test` macro calls in ExUnit tests
# or the `schema` macro in Ecto schemas to trigger DuplicatedCode, just
# set the `excluded_macros` parameter to `[:schema, :setup, :test]`.
#
{Credo.Check.Design.DuplicatedCode, excluded_macros: []},
# You can also customize the exit_status of each check.
# If you don't want TODO comments to cause `mix credo` to fail, just
# set this value to 0 (zero).
#
{Credo.Check.Design.TagTODO, exit_status: 2},
{Credo.Check.Design.TagFIXME},
{Credo.Check.Readability.FunctionNames},
{Credo.Check.Readability.LargeNumbers},
{Credo.Check.Readability.MaxLineLength, priority: :low, max_length: 100},
{Credo.Check.Readability.ModuleAttributeNames},
{Credo.Check.Readability.ModuleDoc},
{Credo.Check.Readability.ModuleNames},
{Credo.Check.Readability.ParenthesesOnZeroArityDefs},
{Credo.Check.Readability.ParenthesesInCondition},
{Credo.Check.Readability.PredicateFunctionNames},
{Credo.Check.Readability.PreferImplicitTry},
{Credo.Check.Readability.RedundantBlankLines},
{Credo.Check.Readability.StringSigils},
{Credo.Check.Readability.TrailingBlankLine, false},
{Credo.Check.Readability.TrailingWhiteSpace},
{Credo.Check.Readability.VariableNames},
{Credo.Check.Readability.Semicolons},
{Credo.Check.Readability.SpaceAfterCommas},
{Credo.Check.Refactor.DoubleBooleanNegation},
{Credo.Check.Refactor.CondStatements},
{Credo.Check.Refactor.CyclomaticComplexity},
{Credo.Check.Refactor.FunctionArity},
{Credo.Check.Refactor.LongQuoteBlocks},
{Credo.Check.Refactor.MatchInCondition},
{Credo.Check.Refactor.NegatedConditionsInUnless},
{Credo.Check.Refactor.NegatedConditionsWithElse},
{Credo.Check.Refactor.Nesting},
{Credo.Check.Refactor.PipeChainStart},
{Credo.Check.Refactor.UnlessWithElse},
{Credo.Check.Warning.BoolOperationOnSameValues},
{Credo.Check.Warning.ExpensiveEmptyEnumCheck},
{Credo.Check.Warning.IExPry},
{Credo.Check.Warning.IoInspect},
{Credo.Check.Warning.LazyLogging},
{Credo.Check.Warning.OperationOnSameValues},
{Credo.Check.Warning.OperationWithConstantResult},
{Credo.Check.Warning.UnusedEnumOperation},
{Credo.Check.Warning.UnusedFileOperation},
{Credo.Check.Warning.UnusedKeywordOperation},
{Credo.Check.Warning.UnusedListOperation},
{Credo.Check.Warning.UnusedPathOperation},
{Credo.Check.Warning.UnusedRegexOperation},
{Credo.Check.Warning.UnusedStringOperation},
{Credo.Check.Warning.UnusedTupleOperation},
{Credo.Check.Warning.RaiseInsideRescue},
# Controversial and experimental checks (opt-in, just remove `, false`)
#
{Credo.Check.Refactor.ABCSize, false},
{Credo.Check.Refactor.VariableRebinding},
{Credo.Check.Refactor.AppendSingleItem, false},
{Credo.Check.Warning.MapGetUnsafePass},
{Credo.Check.Consistency.MultiAliasImportRequireUse, false}
# Custom checks can be created using `mix credo.gen.check`.
#
]
}
]
}
| 40.470588 | 81 | 0.650436 |
0825240443a9e64bbfd82bdbba353b586c1ace37 | 3,434 | ex | Elixir | lib/csv/decoding/parser.ex | parkerduckworth/csv | 7a3545fe1d16bd76d30751b66b611f636127c4e0 | [
"MIT"
] | null | null | null | lib/csv/decoding/parser.ex | parkerduckworth/csv | 7a3545fe1d16bd76d30751b66b611f636127c4e0 | [
"MIT"
] | null | null | null | lib/csv/decoding/parser.ex | parkerduckworth/csv | 7a3545fe1d16bd76d30751b66b611f636127c4e0 | [
"MIT"
] | null | null | null | defmodule CSV.Decoding.Parser do
alias CSV.EscapeSequenceError
alias CSV.StrayQuoteError
@moduledoc ~S"""
The CSV Parser module - parses tokens coming from the lexer and parses them
into a row of fields.
"""
@doc """
Parses tokens by receiving them from a sender / lexer and sending them to
the given receiver process (the decoder).
## Options
Options get transferred from the decoder. They are:
* `:strip_fields` – When set to true, will strip whitespace from fields.
Defaults to false.
"""
def parse(message, options \\ [])
def parse({tokens, index}, options) do
case parse([], "", tokens, :unescaped, options) do
{:ok, row} -> {:ok, row, index}
{:error, type, message} -> {:error, type, message, index}
end
end
def parse({:error, mod, message, index}, _) do
{:error, mod, message, index}
end
defp parse(row, field, [token | tokens], :inline_quote, options) do
case token do
{:double_quote, content} ->
parse(row, field <> content, tokens, :unescaped, options)
_ ->
parse(row, field, :stray_quote, nil)
end
end
defp parse(row, field, [token | tokens], :inline_quote_in_escaped, options) do
case token do
{:double_quote, content} ->
parse(row, field <> content, tokens, :escaped, options)
{:separator, _} ->
parse(row ++ [field |> strip(options)], "", tokens, :unescaped, options)
{:delimiter, _} ->
parse(row, field, tokens, :unescaped, options)
_ ->
parse(row, field, :stray_quote, nil)
end
end
defp parse(row, field, [token | tokens], :escaped, options) do
case token do
{:double_quote, _} ->
parse(row, field, tokens, :inline_quote_in_escaped, options)
{_, content} ->
parse(row, field <> content, tokens, :escaped, options)
end
end
defp parse(_, field, [], :escaped, _) do
{:error, EscapeSequenceError, field}
end
defp parse(_, field, [], :inline_quote, _) do
{:error, StrayQuoteError, field}
end
defp parse(row, "", [token | tokens], :unescaped, options) do
case token do
{:content, content} ->
parse(row, content, tokens, :unescaped, options)
{:separator, _} ->
parse(row ++ [""], "", tokens, :unescaped, options)
{:delimiter, _} ->
parse(row, "", tokens, :unescaped, options)
{:double_quote, _} ->
parse(row, "", tokens, :escaped, options)
end
end
defp parse(row, field, [token | tokens], :unescaped, options) do
case token do
{:content, content} ->
parse(row, field <> content, tokens, :unescaped, options)
{:separator, _} ->
parse(row ++ [field |> strip(options)], "", tokens, :unescaped, options)
{:delimiter, _} ->
parse(row, field, tokens, :unescaped, options)
{:double_quote, _} ->
parse(row, field, tokens, :inline_quote, options)
end
end
defp parse(row, field, [], :inline_quote_in_escaped, options) do
{:ok, row ++ [field |> strip(options)]}
end
defp parse(row, field, [], :unescaped, options) do
{:ok, row ++ [field |> strip(options)]}
end
defp parse(row, field, :stray_quote, _) do
{:ok, row}
end
defp strip(field, options) do
strip_fields = options |> Keyword.get(:strip_fields, false)
case strip_fields do
true -> field |> String.trim()
_ -> field
end
end
end
| 26.015152 | 80 | 0.604543 |
08253cf9e503034110e9d565a9e575a85e9c453f | 3,298 | ex | Elixir | clients/you_tube/lib/google_api/you_tube/v3/model/live_chat_moderator_list_response.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/you_tube/lib/google_api/you_tube/v3/model/live_chat_moderator_list_response.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/you_tube/lib/google_api/you_tube/v3/model/live_chat_moderator_list_response.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.YouTube.V3.Model.LiveChatModeratorListResponse do
@moduledoc """
## Attributes
* `etag` (*type:* `String.t`, *default:* `nil`) - Etag of this resource.
* `eventId` (*type:* `String.t`, *default:* `nil`) - Serialized EventId of the request which produced this response.
* `items` (*type:* `list(GoogleApi.YouTube.V3.Model.LiveChatModerator.t)`, *default:* `nil`) - A list of moderators that match the request criteria.
* `kind` (*type:* `String.t`, *default:* `youtube#liveChatModeratorListResponse`) - Identifies what kind of resource this is. Value: the fixed string "youtube#liveChatModeratorListResponse".
* `nextPageToken` (*type:* `String.t`, *default:* `nil`) - The token that can be used as the value of the pageToken parameter to retrieve the next page in the result set.
* `pageInfo` (*type:* `GoogleApi.YouTube.V3.Model.PageInfo.t`, *default:* `nil`) - General pagination information.
* `prevPageToken` (*type:* `String.t`, *default:* `nil`) - The token that can be used as the value of the pageToken parameter to retrieve the previous page in the result set.
* `tokenPagination` (*type:* `GoogleApi.YouTube.V3.Model.TokenPagination.t`, *default:* `nil`) -
* `visitorId` (*type:* `String.t`, *default:* `nil`) - The visitorId identifies the visitor.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:etag => String.t() | nil,
:eventId => String.t() | nil,
:items => list(GoogleApi.YouTube.V3.Model.LiveChatModerator.t()) | nil,
:kind => String.t() | nil,
:nextPageToken => String.t() | nil,
:pageInfo => GoogleApi.YouTube.V3.Model.PageInfo.t() | nil,
:prevPageToken => String.t() | nil,
:tokenPagination => GoogleApi.YouTube.V3.Model.TokenPagination.t() | nil,
:visitorId => String.t() | nil
}
field(:etag)
field(:eventId)
field(:items, as: GoogleApi.YouTube.V3.Model.LiveChatModerator, type: :list)
field(:kind)
field(:nextPageToken)
field(:pageInfo, as: GoogleApi.YouTube.V3.Model.PageInfo)
field(:prevPageToken)
field(:tokenPagination, as: GoogleApi.YouTube.V3.Model.TokenPagination)
field(:visitorId)
end
defimpl Poison.Decoder, for: GoogleApi.YouTube.V3.Model.LiveChatModeratorListResponse do
def decode(value, options) do
GoogleApi.YouTube.V3.Model.LiveChatModeratorListResponse.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.YouTube.V3.Model.LiveChatModeratorListResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 46.450704 | 194 | 0.702547 |
08255d610024e9333339a71a240123eaf2154e39 | 5,784 | ex | Elixir | clients/docs/lib/google_api/docs/v1/model/paragraph_style_suggestion_state.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/docs/lib/google_api/docs/v1/model/paragraph_style_suggestion_state.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/docs/lib/google_api/docs/v1/model/paragraph_style_suggestion_state.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Docs.V1.Model.ParagraphStyleSuggestionState do
@moduledoc """
A mask that indicates which of the fields on the base ParagraphStyle have been changed in this suggestion. For any field set to true, there is a new suggested value.
## Attributes
* `alignmentSuggested` (*type:* `boolean()`, *default:* `nil`) - Indicates if there was a suggested change to alignment.
* `avoidWidowAndOrphanSuggested` (*type:* `boolean()`, *default:* `nil`) - Indicates if there was a suggested change to avoid_widow_and_orphan.
* `borderBetweenSuggested` (*type:* `boolean()`, *default:* `nil`) - Indicates if there was a suggested change to border_between.
* `borderBottomSuggested` (*type:* `boolean()`, *default:* `nil`) - Indicates if there was a suggested change to border_bottom.
* `borderLeftSuggested` (*type:* `boolean()`, *default:* `nil`) - Indicates if there was a suggested change to border_left.
* `borderRightSuggested` (*type:* `boolean()`, *default:* `nil`) - Indicates if there was a suggested change to border_right.
* `borderTopSuggested` (*type:* `boolean()`, *default:* `nil`) - Indicates if there was a suggested change to border_top.
* `directionSuggested` (*type:* `boolean()`, *default:* `nil`) - Indicates if there was a suggested change to direction.
* `headingIdSuggested` (*type:* `boolean()`, *default:* `nil`) - Indicates if there was a suggested change to heading_id.
* `indentEndSuggested` (*type:* `boolean()`, *default:* `nil`) - Indicates if there was a suggested change to indent_end.
* `indentFirstLineSuggested` (*type:* `boolean()`, *default:* `nil`) - Indicates if there was a suggested change to indent_first_line.
* `indentStartSuggested` (*type:* `boolean()`, *default:* `nil`) - Indicates if there was a suggested change to indent_start.
* `keepLinesTogetherSuggested` (*type:* `boolean()`, *default:* `nil`) - Indicates if there was a suggested change to keep_lines_together.
* `keepWithNextSuggested` (*type:* `boolean()`, *default:* `nil`) - Indicates if there was a suggested change to keep_with_next.
* `lineSpacingSuggested` (*type:* `boolean()`, *default:* `nil`) - Indicates if there was a suggested change to line_spacing.
* `namedStyleTypeSuggested` (*type:* `boolean()`, *default:* `nil`) - Indicates if there was a suggested change to named_style_type.
* `shadingSuggestionState` (*type:* `GoogleApi.Docs.V1.Model.ShadingSuggestionState.t`, *default:* `nil`) - A mask that indicates which of the fields in shading have been changed in this suggestion.
* `spaceAboveSuggested` (*type:* `boolean()`, *default:* `nil`) - Indicates if there was a suggested change to space_above.
* `spaceBelowSuggested` (*type:* `boolean()`, *default:* `nil`) - Indicates if there was a suggested change to space_below.
* `spacingModeSuggested` (*type:* `boolean()`, *default:* `nil`) - Indicates if there was a suggested change to spacing_mode.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:alignmentSuggested => boolean(),
:avoidWidowAndOrphanSuggested => boolean(),
:borderBetweenSuggested => boolean(),
:borderBottomSuggested => boolean(),
:borderLeftSuggested => boolean(),
:borderRightSuggested => boolean(),
:borderTopSuggested => boolean(),
:directionSuggested => boolean(),
:headingIdSuggested => boolean(),
:indentEndSuggested => boolean(),
:indentFirstLineSuggested => boolean(),
:indentStartSuggested => boolean(),
:keepLinesTogetherSuggested => boolean(),
:keepWithNextSuggested => boolean(),
:lineSpacingSuggested => boolean(),
:namedStyleTypeSuggested => boolean(),
:shadingSuggestionState => GoogleApi.Docs.V1.Model.ShadingSuggestionState.t(),
:spaceAboveSuggested => boolean(),
:spaceBelowSuggested => boolean(),
:spacingModeSuggested => boolean()
}
field(:alignmentSuggested)
field(:avoidWidowAndOrphanSuggested)
field(:borderBetweenSuggested)
field(:borderBottomSuggested)
field(:borderLeftSuggested)
field(:borderRightSuggested)
field(:borderTopSuggested)
field(:directionSuggested)
field(:headingIdSuggested)
field(:indentEndSuggested)
field(:indentFirstLineSuggested)
field(:indentStartSuggested)
field(:keepLinesTogetherSuggested)
field(:keepWithNextSuggested)
field(:lineSpacingSuggested)
field(:namedStyleTypeSuggested)
field(:shadingSuggestionState, as: GoogleApi.Docs.V1.Model.ShadingSuggestionState)
field(:spaceAboveSuggested)
field(:spaceBelowSuggested)
field(:spacingModeSuggested)
end
defimpl Poison.Decoder, for: GoogleApi.Docs.V1.Model.ParagraphStyleSuggestionState do
def decode(value, options) do
GoogleApi.Docs.V1.Model.ParagraphStyleSuggestionState.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Docs.V1.Model.ParagraphStyleSuggestionState do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 55.615385 | 202 | 0.713347 |
0825897a22ff76bca1025fe10d95c5cb89849690 | 816 | ex | Elixir | lib/addict/interactors/login.ex | mainframe2/addict | aa70768f20939bf1f4d36a680240cb32f36e2a79 | [
"MIT"
] | null | null | null | lib/addict/interactors/login.ex | mainframe2/addict | aa70768f20939bf1f4d36a680240cb32f36e2a79 | [
"MIT"
] | null | null | null | lib/addict/interactors/login.ex | mainframe2/addict | aa70768f20939bf1f4d36a680240cb32f36e2a79 | [
"MIT"
] | null | null | null | defmodule Addict.Interactors.Login do
@moduledoc """
Verifies if the `password` is correct for the provided `email`
Returns `{:ok, user}` or `{:error, [errors]}`
"""
alias Addict.Interactors.{GetUserByEmail, VerifyPassword}
def call(%{"email" => email, "password" => password}, configs \\ Addict.Configs) do
extra_login_validation = configs.extra_login_validation || fn a -> {:ok, a} end
before_login_validation = configs.before_login_validation || fn a -> {:ok, a} end
with {:ok, user} <- GetUserByEmail.call(email),
{:ok, _} <- Addict.Helper.exec(before_login_validation, [user]),
{:ok} <- VerifyPassword.call(user, password),
{:ok, _} <- Addict.Helper.exec(extra_login_validation, [user]) do
{:ok, user}
else
error -> error
end
end
end
| 34 | 85 | 0.650735 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.