hexsha stringlengths 40 40 | size int64 2 991k | ext stringclasses 2 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 4 208 | max_stars_repo_name stringlengths 6 106 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses list | max_stars_count int64 1 33.5k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 208 | max_issues_repo_name stringlengths 6 106 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses list | max_issues_count int64 1 16.3k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 208 | max_forks_repo_name stringlengths 6 106 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses list | max_forks_count int64 1 6.91k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 2 991k | avg_line_length float64 1 36k | max_line_length int64 1 977k | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
1cef7600a41b677d11fb73cc524942546b82d445 | 511 | ex | Elixir | lib/foundation_phoenix_web/views/error_view.ex | PanyPy/foundation_phoenix | 571aaa0b274c1428b4cf8e370777ae82c9167eb2 | [
"MIT"
] | null | null | null | lib/foundation_phoenix_web/views/error_view.ex | PanyPy/foundation_phoenix | 571aaa0b274c1428b4cf8e370777ae82c9167eb2 | [
"MIT"
] | 2 | 2021-03-10T11:28:36.000Z | 2021-05-11T07:00:39.000Z | lib/foundation_phoenix_web/views/error_view.ex | PanyPy/foundation_phoenix | 571aaa0b274c1428b4cf8e370777ae82c9167eb2 | [
"MIT"
] | null | null | null | defmodule FoundationPhoenixWeb.ErrorView do
use FoundationPhoenixWeb, :view
# If you want to customize a particular status code
# for a certain format, you may uncomment below.
# def render("500.html", _assigns) do
# "Internal Server Error"
# end
# By default, Phoenix returns the status message from
# the template name. For example, "404.html" becomes
# "Not Found".
def template_not_found(template, _assigns) do
Phoenix.Controller.status_message_from_template(template)
end
end
| 30.058824 | 61 | 0.745597 |
1cef8e029fad43424605c512b3dfe22fd4cecf7d | 14,924 | exs | Elixir | test/ecto/query/planner_test.exs | ashneyderman/ecto | 16f27f64c5ca2480568fad10e40c26522ffbf793 | [
"Apache-2.0"
] | null | null | null | test/ecto/query/planner_test.exs | ashneyderman/ecto | 16f27f64c5ca2480568fad10e40c26522ffbf793 | [
"Apache-2.0"
] | null | null | null | test/ecto/query/planner_test.exs | ashneyderman/ecto | 16f27f64c5ca2480568fad10e40c26522ffbf793 | [
"Apache-2.0"
] | null | null | null | Code.require_file "../../../integration_test/support/types.exs", __DIR__
defmodule Ecto.Query.PlannerTest do
use ExUnit.Case, async: true
import Ecto.Query
alias Ecto.Query.Planner
alias Ecto.Query.JoinExpr
defmodule Comment do
use Ecto.Model
schema "comments" do
field :text, :string
field :temp, :string, virtual: true
field :posted, Ecto.DateTime
field :uuid, :binary_id
belongs_to :post, Ecto.Query.PlannerTest.Post
has_many :post_comments, through: [:post, :comments]
end
end
defmodule Post do
use Ecto.Model
@primary_key {:id, Custom.Permalink, []}
schema "posts" do
field :title, :string
field :text, :string
field :code, :binary
field :posted, :datetime
field :visits, :integer
has_many :comments, Ecto.Query.PlannerTest.Comment
end
end
defp prepare(query, operation \\ :all, params \\ []) do
Planner.prepare(query, operation, params, %{binary_id: Ecto.UUID})
end
defp normalize(query, operation \\ :all, params \\ []) do
{query, params} = prepare(query, operation, params)
Planner.normalize(query, operation, params)
end
defp normalize_with_params(query) do
{query, params} = prepare(query, :all, [])
{Planner.normalize(query, :all, []), params}
end
test "prepare: merges all parameters" do
query =
from p in Post,
select: {p.title, ^"0"},
join: c in Comment,
on: c.text == ^"1",
left_join: d in assoc(p, :comments),
where: p.title == ^"2",
group_by: p.title == ^"3",
having: p.title == ^"4",
order_by: [asc: fragment("?", ^"5")],
limit: ^6,
offset: ^7,
preload: [post: d]
{_query, params} = prepare(query)
assert params == ["0", "1", "2", "3", "4", "5", 6, 7]
end
test "prepare: checks from" do
assert_raise Ecto.QueryError, ~r"query must have a from expression", fn ->
prepare(%Ecto.Query{})
end
end
test "prepare: casts values" do
{_query, params} = prepare(Post |> where([p], p.id == ^"1"))
assert params == [1]
exception = assert_raise Ecto.CastError, fn ->
prepare(Post |> where([p], p.title == ^nil))
end
assert Exception.message(exception) =~ "value `nil` in `where` cannot be cast to type :string"
assert Exception.message(exception) =~ "where: p.title == ^nil"
assert Exception.message(exception) =~ "Error when casting value to `#{inspect Post}.title`"
exception = assert_raise Ecto.CastError, fn ->
prepare(Post |> where([p], p.title == ^1))
end
assert Exception.message(exception) =~ "value `1` in `where` cannot be cast to type :string"
assert Exception.message(exception) =~ "where: p.title == ^1"
assert Exception.message(exception) =~ "Error when casting value to `#{inspect Post}.title`"
end
test "prepare: casts and dumps custom types" do
datetime = %Ecto.DateTime{year: 2015, month: 1, day: 7, hour: 21, min: 18, sec: 13, usec: 0}
{_query, params} = prepare(Comment |> where([c], c.posted == ^datetime))
assert params == [{{2015, 1, 7}, {21, 18, 13, 0}}]
permalink = "1-hello-world"
{_query, params} = prepare(Post |> where([p], p.id == ^permalink))
assert params == [1]
end
test "prepare: casts and dumps custom types to native ones" do
datetime = %Ecto.DateTime{year: 2015, month: 1, day: 7, hour: 21, min: 18, sec: 13, usec: 0}
{_query, params} = prepare(Post |> where([p], p.posted == ^datetime))
assert params == [{{2015, 1, 7}, {21, 18, 13, 0}}]
end
test "prepare: casts and dumps binary ids" do
uuid = "00010203-0405-0607-0809-0a0b0c0d0e0f"
{_query, params} = prepare(Comment |> where([c], c.uuid == ^uuid))
assert params == [%Ecto.Query.Tagged{type: :uuid,
value: <<0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15>>}]
end
test "prepare: casts and dumps custom types in in-expressions" do
datetime = %Ecto.DateTime{year: 2015, month: 1, day: 7, hour: 21, min: 18, sec: 13, usec: 0}
{_query, params} = prepare(Comment |> where([c], c.posted in ^[datetime]))
assert params == [{{2015, 1, 7}, {21, 18, 13, 0}}]
permalink = "1-hello-world"
{_query, params} = prepare(Post |> where([p], p.id in ^[permalink]))
assert params == [1]
datetime = %Ecto.DateTime{year: 2015, month: 1, day: 7, hour: 21, min: 18, sec: 13, usec: 0}
{_query, params} = prepare(Comment |> where([c], c.posted in [^datetime]))
assert params == [{{2015, 1, 7}, {21, 18, 13, 0}}]
permalink = "1-hello-world"
{_query, params} = prepare(Post |> where([p], p.id in [^permalink]))
assert params == [1]
{_query, params} = prepare(Post |> where([p], p.code in [^"abcd"]))
assert params == [%Ecto.Query.Tagged{tag: nil, type: :binary, value: "abcd"}]
{_query, params} = prepare(Post |> where([p], p.code in ^["abcd"]))
assert params == [%Ecto.Query.Tagged{tag: nil, type: :binary, value: "abcd"}]
end
test "prepare: casts values on update_all" do
{_query, params} = prepare(Post |> update([p], set: [id: ^"1"]), :update_all)
assert params == [1]
{_query, params} = prepare(Post |> update([p], set: [title: ^nil]), :update_all)
assert params == [%Ecto.Query.Tagged{type: :string, value: nil}]
{_query, params} = prepare(Post |> update([p], set: [title: nil]), :update_all)
assert params == []
end
test "prepare: joins" do
query = from(p in Post, join: c in "comments") |> prepare |> elem(0)
assert hd(query.joins).source == {"comments", nil}
query = from(p in Post, join: c in Comment) |> prepare |> elem(0)
assert hd(query.joins).source == {"comments", Comment}
query = from(p in Post, join: c in {"post_comments", Comment}) |> prepare |> elem(0)
assert hd(query.joins).source == {"post_comments", Comment}
end
test "prepare: joins associations" do
query = from(p in Post, join: assoc(p, :comments)) |> prepare |> elem(0)
assert %JoinExpr{on: on, source: source, assoc: nil, qual: :inner} = hd(query.joins)
assert source == {"comments", Comment}
assert Macro.to_string(on.expr) == "&1.post_id() == &0.id()"
query = from(p in Post, left_join: assoc(p, :comments)) |> prepare |> elem(0)
assert %JoinExpr{on: on, source: source, assoc: nil, qual: :left} = hd(query.joins)
assert source == {"comments", Comment}
assert Macro.to_string(on.expr) == "&1.post_id() == &0.id()"
end
test "prepare: nested joins associations" do
query = from(c in Comment, left_join: assoc(c, :post_comments)) |> prepare |> elem(0)
assert {{"comments", _}, {"comments", _}, {"posts", _}} = query.sources
assert [join1, join2] = query.joins
assert Enum.map(query.joins, & &1.ix) == [2, 1]
assert Macro.to_string(join1.on.expr) == "&2.id() == &0.post_id()"
assert Macro.to_string(join2.on.expr) == "&1.post_id() == &2.id()"
query = from(p in Comment, left_join: assoc(p, :post),
left_join: assoc(p, :post_comments)) |> prepare |> elem(0)
assert {{"comments", _}, {"posts", _}, {"comments", _}, {"posts", _}} = query.sources
assert [join1, join2, join3] = query.joins
assert Enum.map(query.joins, & &1.ix) == [1, 3, 2]
assert Macro.to_string(join1.on.expr) == "&1.id() == &0.post_id()"
assert Macro.to_string(join2.on.expr) == "&3.id() == &0.post_id()"
assert Macro.to_string(join3.on.expr) == "&2.post_id() == &3.id()"
query = from(p in Comment, left_join: assoc(p, :post_comments),
left_join: assoc(p, :post)) |> prepare |> elem(0)
assert {{"comments", _}, {"comments", _}, {"posts", _}, {"posts", _}} = query.sources
assert [join1, join2, join3] = query.joins
assert Enum.map(query.joins, & &1.ix) == [3, 1, 2]
assert Macro.to_string(join1.on.expr) == "&3.id() == &0.post_id()"
assert Macro.to_string(join2.on.expr) == "&1.post_id() == &3.id()"
assert Macro.to_string(join3.on.expr) == "&2.id() == &0.post_id()"
end
test "prepare: cannot associate without model" do
query = from(p in "posts", join: assoc(p, :comments))
message = ~r"cannot perform association join on \"posts\" because it does not have a model"
assert_raise Ecto.QueryError, message, fn ->
prepare(query)
end
end
test "prepare: requires an association field" do
query = from(p in Post, join: assoc(p, :title))
assert_raise Ecto.QueryError, ~r"could not find association `title`", fn ->
prepare(query)
end
end
test "normalize: tagged types" do
{query, params} = from(Post, []) |> select([p], type(^"1", :integer))
|> normalize_with_params
assert query.select.expr ==
%Ecto.Query.Tagged{type: :integer, value: {:^, [], [0]}, tag: :integer}
assert params == [1]
{query, params} = from(Post, []) |> select([p], type(^"1", Custom.Permalink))
|> normalize_with_params
assert query.select.expr ==
%Ecto.Query.Tagged{type: :id, value: {:^, [], [0]}, tag: Custom.Permalink}
assert params == [1]
{query, params} = from(Post, []) |> select([p], type(^"1", p.visits))
|> normalize_with_params
assert query.select.expr ==
%Ecto.Query.Tagged{type: :integer, value: {:^, [], [0]}, tag: :integer}
assert params == [1]
assert_raise Ecto.QueryError, fn ->
from(Post, []) |> select([p], type(^"1", :datetime)) |> normalize
end
end
test "normalize: validate fields" do
message = ~r"field `Ecto.Query.PlannerTest.Comment.temp` in `select` does not exist in the model source"
assert_raise Ecto.QueryError, message, fn ->
query = from(Comment, []) |> select([c], c.temp)
normalize(query)
end
message = ~r"field `Ecto.Query.PlannerTest.Comment.text` in `where` does not type check"
assert_raise Ecto.QueryError, message, fn ->
query = from(Comment, []) |> where([c], c.text)
normalize(query)
end
end
test "normalize: validate fields in composite types" do
query = from(Post, []) |> where([p], p.id in [1, 2, 3])
normalize(query)
message = ~r"field `Ecto.Query.PlannerTest.Comment.text` in `where` does not type check"
assert_raise Ecto.QueryError, message, fn ->
query = from(Comment, []) |> where([c], c.text in [1, 2, 3])
normalize(query)
end
end
test "normalize: flattens and expands in expressions" do
{query, params} = where(Post, [p], p.id in [1, 2, 3]) |> normalize_with_params()
assert Macro.to_string(hd(query.wheres).expr) == "&0.id() in [1, 2, 3]"
assert params == []
{query, params} = where(Post, [p], p.id in [^1, 2, ^3]) |> normalize_with_params()
assert Macro.to_string(hd(query.wheres).expr) == "&0.id() in [^0, 2, ^1]"
assert params == [1, 3]
{query, params} = where(Post, [p], p.id in ^[]) |> normalize_with_params()
assert Macro.to_string(hd(query.wheres).expr) == "&0.id() in []"
assert params == []
{query, params} = where(Post, [p], p.id in ^[1, 2, 3]) |> normalize_with_params()
assert Macro.to_string(hd(query.wheres).expr) == "&0.id() in ^(0, 3)"
assert params == [1, 2, 3]
{query, params} = where(Post, [p], p.title == ^"foo" and p.id in ^[1, 2, 3] and
p.title == ^"bar") |> normalize_with_params()
assert Macro.to_string(hd(query.wheres).expr) ==
"&0.title() == ^0 and &0.id() in ^(1, 3) and &0.title() == ^4"
assert params == ["foo", 1, 2, 3, "bar"]
end
test "normalize: select" do
query = from(Post, []) |> normalize()
assert query.select.expr == {:&, [], [0]}
assert query.select.fields == [{:&, [], [0]}]
query = from(Post, []) |> select([p], {p, p.title}) |> normalize()
assert query.select.fields ==
[{:&, [], [0]}, {{:., [], [{:&, [], [0]}, :title]}, [ecto_type: :string], []}]
query = from(Post, []) |> select([p], {p.title, p}) |> normalize()
assert query.select.fields ==
[{:&, [], [0]}, {{:., [], [{:&, [], [0]}, :title]}, [ecto_type: :string], []}]
query =
from(Post, [])
|> join(:inner, [_], c in Comment)
|> preload([_, c], comments: c)
|> select([p, _], {p.title, p})
|> normalize()
assert query.select.fields ==
[{:&, [], [0]}, {:&, [], [1]},
{{:., [], [{:&, [], [0]}, :title]}, [ecto_type: :string], []}]
end
test "normalize: preload" do
message = ~r"the binding used in `from` must be selected in `select` when using `preload`"
assert_raise Ecto.QueryError, message, fn ->
Post |> preload(:hello) |> select([p], p.title) |> normalize
end
end
test "normalize: preload assoc" do
query = from(p in Post, join: c in assoc(p, :comments), preload: [comments: c])
normalize(query)
message = ~r"field `Ecto.Query.PlannerTest.Post.not_field` in preload is not an association"
assert_raise Ecto.QueryError, message, fn ->
query = from(p in Post, join: c in assoc(p, :comments), preload: [not_field: c])
normalize(query)
end
message = ~r"requires an inner or left join, got right join"
assert_raise Ecto.QueryError, message, fn ->
query = from(p in Post, right_join: c in assoc(p, :comments), preload: [comments: c])
normalize(query)
end
end
test "normalize: all does not allow updates" do
message = ~r"`all` does not allow `update` expressions"
assert_raise Ecto.QueryError, message, fn ->
from(p in Post, update: [set: [name: "foo"]]) |> normalize(:all, [])
end
end
test "normalize: update all only allow filters and checks updates" do
message = ~r"`update_all` requires at least one field to be updated"
assert_raise Ecto.QueryError, message, fn ->
from(p in Post, select: p, update: []) |> normalize(:update_all, [])
end
message = ~r"duplicate field `title` for `update_all`"
assert_raise Ecto.QueryError, message, fn ->
from(p in Post, select: p, update: [set: [title: "foo", title: "bar"]])
|> normalize(:update_all, [])
end
message = ~r"`update_all` allows only `where` and `join` expressions in query"
assert_raise Ecto.QueryError, message, fn ->
from(p in Post, select: p, update: [set: [title: "foo"]]) |> normalize(:update_all, [])
end
end
test "normalize: delete all only allow filters and forbids updates" do
message = ~r"`delete_all` does not allow `update` expressions"
assert_raise Ecto.QueryError, message, fn ->
from(p in Post, update: [set: [name: "foo"]]) |> normalize(:delete_all, [])
end
message = ~r"`delete_all` allows only `where` and `join` expressions in query"
assert_raise Ecto.QueryError, message, fn ->
from(p in Post, select: p) |> normalize(:delete_all, [])
end
end
end
| 39.170604 | 108 | 0.593809 |
1cef8fb857be04e9a208a271b4fe973ad8219299 | 1,406 | ex | Elixir | lib/ucx_ucc/utils.ex | josephkabraham/ucx_ucc | 0dbd9e3eb5940336b4870cff033482ceba5f6ee7 | [
"MIT"
] | null | null | null | lib/ucx_ucc/utils.ex | josephkabraham/ucx_ucc | 0dbd9e3eb5940336b4870cff033482ceba5f6ee7 | [
"MIT"
] | null | null | null | lib/ucx_ucc/utils.ex | josephkabraham/ucx_ucc | 0dbd9e3eb5940336b4870cff033482ceba5f6ee7 | [
"MIT"
] | null | null | null | defmodule UcxUcc.Utils do
defmacro __using__(_) do
quote do
import unquote(__MODULE__)
end
end
defmacro is_falsey(value) do
quote do
(unquote(value) == nil or unquote(value) == false)
end
end
defmacro is_falsy(value) do
quote do
is_falsey(unquote(value))
end
end
defmacro is_truthy(value) do
quote do
(not is_falsey(unquote(value)))
end
end
def deep_merge(left, right) do
Map.merge(left, right, &deep_resolve/3)
end
# Key exists in both maps, and both values are maps as well.
# These can be merged recursively.
defp deep_resolve(_key, left = %{}, right = %{}) do
deep_merge(left, right)
end
# Key exists in both maps, but at least one of the values is
# NOT a map. We fall back to standard merge behavior, preferring
# the value on the right.
defp deep_resolve(_key, _left, right) do
right
end
def to_camel_case(atom) when is_atom(atom), do: atom |> to_string |> to_camel_case
def to_camel_case(string) do
string
|> String.split("_")
|> Enum.map(&String.capitalize/1)
|> Enum.join("")
end
@doc """
Generate a random string.
Returns a random string, with the given length.
"""
@spec random_string(integer) :: binary
def random_string(length) do
length
|> :crypto.strong_rand_bytes()
|> Base.url_encode64()
|> binary_part(0, length)
end
end
| 22.31746 | 84 | 0.659317 |
1cef943a685c769fb8a02794330d1a2d2a610a05 | 1,127 | exs | Elixir | config/config.exs | maxiwoj/AddressBook | 0d2b7171aa533f1afca6cb7347a6775c523c0489 | [
"Apache-2.0"
] | null | null | null | config/config.exs | maxiwoj/AddressBook | 0d2b7171aa533f1afca6cb7347a6775c523c0489 | [
"Apache-2.0"
] | null | null | null | config/config.exs | maxiwoj/AddressBook | 0d2b7171aa533f1afca6cb7347a6775c523c0489 | [
"Apache-2.0"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# 3rd-party users, it should be done in your "mix.exs" file.
# You can configure for your application as:
#
# config :address_book, key: :value
#
# And access this configuration in your application as:
#
# Application.get_env(:address_book, :key)
#
# Or configure a 3rd-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env}.exs"
| 36.354839 | 73 | 0.753327 |
1cefa6e88bb1809ddc9822c34202cd7f1a974cd3 | 958 | ex | Elixir | lib/cargo_elixir/application.ex | helium/cargo-elixir | 1ad2b5aa32a091eb67a46cf555501b4e2c45f5c9 | [
"Apache-2.0"
] | 9 | 2020-11-13T18:24:25.000Z | 2022-03-17T19:03:02.000Z | lib/cargo_elixir/application.ex | helium/cargo-elixir | 1ad2b5aa32a091eb67a46cf555501b4e2c45f5c9 | [
"Apache-2.0"
] | 8 | 2020-12-07T17:41:01.000Z | 2022-01-05T10:51:21.000Z | lib/cargo_elixir/application.ex | helium/cargo-elixir | 1ad2b5aa32a091eb67a46cf555501b4e2c45f5c9 | [
"Apache-2.0"
] | 5 | 2020-12-07T07:16:21.000Z | 2022-03-21T00:44:39.000Z | defmodule CargoElixir.Application do
# See https://hexdocs.pm/elixir/Application.html
# for more information on OTP Applications
@moduledoc false
use Application
def start(_type, _args) do
# List all child processes to be supervised
children = [
# Start the Ecto repository
CargoElixir.Repo,
# Start the endpoint when the application starts
CargoElixirWeb.Endpoint
# Starts a worker by calling: CargoElixir.Worker.start_link(arg)
# {CargoElixir.Worker, arg},
]
# See https://hexdocs.pm/elixir/Supervisor.html
# for other strategies and supported options
opts = [strategy: :one_for_one, name: CargoElixir.Supervisor]
Supervisor.start_link(children, opts)
end
# Tell Phoenix to update the endpoint configuration
# whenever the application is updated.
def config_change(changed, _new, removed) do
CargoElixirWeb.Endpoint.config_change(changed, removed)
:ok
end
end
| 29.9375 | 70 | 0.722338 |
1cefbcccdad8b04c89d63a61d1369fcaf406d67e | 940 | ex | Elixir | clients/keto/elixir/lib/keto/model/create_relation_tuple_internal_server_error_body.ex | ory/sdk-generator | 958314d130922ad6f20f439b5230141a832231a5 | [
"Apache-2.0"
] | null | null | null | clients/keto/elixir/lib/keto/model/create_relation_tuple_internal_server_error_body.ex | ory/sdk-generator | 958314d130922ad6f20f439b5230141a832231a5 | [
"Apache-2.0"
] | null | null | null | clients/keto/elixir/lib/keto/model/create_relation_tuple_internal_server_error_body.ex | ory/sdk-generator | 958314d130922ad6f20f439b5230141a832231a5 | [
"Apache-2.0"
] | null | null | null | # NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
# https://openapi-generator.tech
# Do not edit the class manually.
defmodule Keto.Model.CreateRelationTupleInternalServerErrorBody do
@moduledoc """
CreateRelationTupleInternalServerErrorBody CreateRelationTupleInternalServerErrorBody CreateRelationTupleInternalServerErrorBody create relation tuple internal server error body
"""
@derive [Poison.Encoder]
defstruct [
:"code",
:"details",
:"message",
:"reason",
:"request",
:"status"
]
@type t :: %__MODULE__{
:"code" => integer() | nil,
:"details" => [map()] | nil,
:"message" => String.t | nil,
:"reason" => String.t | nil,
:"request" => String.t | nil,
:"status" => String.t | nil
}
end
defimpl Poison.Decoder, for: Keto.Model.CreateRelationTupleInternalServerErrorBody do
def decode(value, _options) do
value
end
end
| 26.111111 | 179 | 0.688298 |
1cefdd91fce2274ba9fe0906a684d9e04488ebc1 | 5,929 | ex | Elixir | kousa/lib/beef/follows.ex | Jawkx/dogehouse | eea5954aec56961a3c0144289e2ed4da739739c3 | [
"MIT"
] | 1 | 2021-04-19T19:32:51.000Z | 2021-04-19T19:32:51.000Z | kousa/lib/beef/follows.ex | Jawkx/dogehouse | eea5954aec56961a3c0144289e2ed4da739739c3 | [
"MIT"
] | null | null | null | kousa/lib/beef/follows.ex | Jawkx/dogehouse | eea5954aec56961a3c0144289e2ed4da739739c3 | [
"MIT"
] | 1 | 2021-03-12T21:31:01.000Z | 2021-03-12T21:31:01.000Z | defmodule Beef.Follows do
@moduledoc """
Context module for Follows.
This will eventually go away and be replaced
by using associations on users.
"""
import Ecto.Query
@fetch_limit 21
alias Beef.Schemas.Follow
alias Beef.Schemas.User
alias Beef.Schemas.Room
@spec get_followers_online_and_not_in_a_room(String.t()) :: [Follow.t()]
def get_followers_online_and_not_in_a_room(user_id) do
from(
f in Follow,
inner_join: u in User,
on: f.followerId == u.id,
where: f.userId == ^user_id and u.online == true and is_nil(u.currentRoomId)
)
|> Beef.Repo.all()
end
def bulk_insert(follows) do
Beef.Repo.insert_all(
Follow,
follows,
on_conflict: :nothing
)
end
# TODO: change the name of this, is_ by convention means
# "guard".
def following_me?(user_id, user_id_to_check) do
not is_nil(
from(
f in Follow,
where: f.userId == ^user_id and f.followerId == ^user_id_to_check
)
|> Beef.Repo.one()
)
end
# fetch all the users
def fetch_following_online(user_id, offset \\ 0) do
items =
from(
f in Follow,
inner_join: u in User,
on: f.userId == u.id,
left_join: f2 in Follow,
on: f2.userId == ^user_id and f2.followerId == u.id,
left_join: cr in Room,
on: u.currentRoomId == cr.id,
where:
f.followerId == ^user_id and
(is_nil(cr.isPrivate) or
cr.isPrivate == false),
select: %{u | currentRoom: cr, followsYou: not is_nil(f2.userId)},
limit: ^@fetch_limit,
offset: ^offset,
order_by: [desc: u.online]
)
|> Beef.Repo.all()
{Enum.slice(items, 0, -1 + @fetch_limit),
if(length(items) == @fetch_limit, do: -1 + offset + @fetch_limit, else: nil)}
end
def fetch_invite_list(user_id, offset \\ 0) do
user = Beef.Users.get_by_id(user_id)
items =
from(
f in Follow,
inner_join: u in User,
on: f.followerId == u.id,
where:
f.userId == ^user_id and u.online == true and
(u.currentRoomId != ^user.currentRoomId or is_nil(u.currentRoomId)),
select: u,
limit: ^@fetch_limit,
offset: ^offset
)
|> Beef.Repo.all()
{Enum.slice(items, 0, -1 + @fetch_limit),
if(length(items) == @fetch_limit, do: -1 + offset + @fetch_limit, else: nil)}
end
def get_followers(user_id, user_id_to_get_followers_for, offset \\ 20) do
items =
from(
f in Follow,
where: f.userId == ^user_id_to_get_followers_for,
inner_join: u in User,
on: f.followerId == u.id,
left_join: f2 in Follow,
on: f2.userId == u.id and f2.followerId == ^user_id,
select: %{u | youAreFollowing: not is_nil(f2.userId)},
limit: ^@fetch_limit,
offset: ^offset,
order_by: [desc: f.inserted_at]
)
|> Beef.Repo.all()
{Enum.slice(items, 0, -1 + @fetch_limit),
if(length(items) == @fetch_limit, do: -1 + offset + @fetch_limit, else: nil)}
end
def get_following(user_id, user_id_to_get_following_for, offset \\ 20) do
items =
from(
f in Follow,
where: f.followerId == ^user_id_to_get_following_for,
inner_join: u in User,
on: f.userId == u.id,
left_join: f2 in Follow,
on: f2.userId == u.id and f2.followerId == ^user_id,
select: %{u | youAreFollowing: not is_nil(f2.userId)},
limit: ^@fetch_limit,
offset: ^offset,
order_by: [desc: f.inserted_at]
)
|> Beef.Repo.all()
{Enum.slice(items, 0, -1 + @fetch_limit),
if(length(items) == @fetch_limit, do: -1 + offset + @fetch_limit, else: nil)}
end
def delete(user_id, follower_id) do
{rows_affected, _} =
from(f in Follow, where: f.userId == ^user_id and f.followerId == ^follower_id)
|> Beef.Repo.delete_all()
if rows_affected == 1 do
from(u in User,
where: u.id == ^user_id,
update: [
inc: [
numFollowers: -1
]
]
)
|> Beef.Repo.update_all([])
from(u in User,
where: u.id == ^follower_id,
update: [
inc: [
numFollowing: -1
]
]
)
|> Beef.Repo.update_all([])
end
end
def insert(data) do
%Follow{}
|> Follow.insert_changeset(data)
|> Beef.Repo.insert()
|> case do
{:ok, _} ->
# TODO: eliminate N+1 by setting up changesets
# in an idiomatic fashion.
from(u in User,
where: u.id == ^data.userId,
update: [
inc: [
numFollowers: 1
]
]
)
|> Beef.Repo.update_all([])
from(u in User,
where: u.id == ^data.followerId,
update: [
inc: [
numFollowing: 1
]
]
)
|> Beef.Repo.update_all([])
error ->
error
end
end
def get_info(me_id, other_user_id) do
from(f in Follow,
where:
(f.userId == ^me_id and f.followerId == ^other_user_id) or
(f.userId == ^other_user_id and f.followerId == ^me_id),
limit: 2
)
|> Beef.Repo.all()
|> case do
# when both follow each other there should be two results.
[_, _] ->
%{followsYou: true, youAreFollowing: true}
# when following is unidirectional, there should be one result.
# this susses out the direction of that relationship
[%{userId: ^me_id, followerId: ^other_user_id}] ->
%{followsYou: true, youAreFollowing: false}
[%{userId: ^other_user_id, followerId: ^me_id}] ->
%{followsYou: false, youAreFollowing: true}
# no relationship, no entries.
[] ->
%{followsYou: false, youAreFollowing: false}
end
end
end
| 26.46875 | 85 | 0.560128 |
1ceff6687b8fffcf7bbf3f6edc839b8b9c377c85 | 529 | ex | Elixir | lib/anchore_engine_api_server/model/policy_rule_params.ex | michaeljguarino/anchore-elixir-client | 156a44f429ecb62433729a2b4c52de5dc0ef44d2 | [
"MIT"
] | null | null | null | lib/anchore_engine_api_server/model/policy_rule_params.ex | michaeljguarino/anchore-elixir-client | 156a44f429ecb62433729a2b4c52de5dc0ef44d2 | [
"MIT"
] | null | null | null | lib/anchore_engine_api_server/model/policy_rule_params.ex | michaeljguarino/anchore-elixir-client | 156a44f429ecb62433729a2b4c52de5dc0ef44d2 | [
"MIT"
] | null | null | null | # NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule AnchoreEngineAPIServer.Model.PolicyRuleParams do
@moduledoc """
"""
@derive [Poison.Encoder]
defstruct [
:name,
:value
]
@type t :: %__MODULE__{
:name => String.t,
:value => String.t
}
end
defimpl Poison.Decoder, for: AnchoreEngineAPIServer.Model.PolicyRuleParams do
def decode(value, _options) do
value
end
end
| 18.892857 | 77 | 0.699433 |
1cf0088643ace1ec262d046a460dda42fa151fb1 | 2,347 | exs | Elixir | mix.exs | alboratech/bitcraft | 40a8d6d1d72b3b6a9b9b70793f12edd1e133cd8d | [
"MIT"
] | 5 | 2021-08-20T08:41:19.000Z | 2022-02-02T22:04:05.000Z | mix.exs | cabol/bitcraft | 40a8d6d1d72b3b6a9b9b70793f12edd1e133cd8d | [
"MIT"
] | null | null | null | mix.exs | cabol/bitcraft | 40a8d6d1d72b3b6a9b9b70793f12edd1e133cd8d | [
"MIT"
] | null | null | null | defmodule Bitcraft.MixProject do
use Mix.Project
@source_url "https://github.com/alboratech/bitcraft"
@version "0.1.0"
def project do
[
app: :bitcraft,
version: @version,
elixir: "~> 1.9",
elixirc_paths: elixirc_paths(Mix.env()),
aliases: aliases(),
deps: deps(),
# Docs
name: "Bitcraft",
docs: docs(),
# Testing
test_coverage: [tool: ExCoveralls],
preferred_cli_env: [
check: :test,
coveralls: :test,
"coveralls.detail": :test,
"coveralls.post": :test,
"coveralls.html": :test
],
# Dialyzer
dialyzer: dialyzer(),
# Hex
package: package(),
description: """
A toolkit for encoding/decoding bit strings and DSL for binary protocols bit blocks.
"""
]
end
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
def application, do: []
defp deps do
[
# Test & Code Analysis
{:excoveralls, "~> 0.13", only: :test},
{:credo, "~> 1.5", only: [:dev, :test], runtime: false},
{:dialyxir, "~> 1.0", only: [:dev, :test], runtime: false},
{:sobelow, "~> 0.10", only: [:dev, :test], runtime: false},
# Docs
{:ex_doc, "~> 0.23", only: :dev, runtime: false}
]
end
defp aliases do
[
check: [
"compile --warnings-as-errors",
"format --check-formatted",
"credo --strict",
"coveralls.html",
"sobelow --exit --skip",
"dialyzer --format short"
]
]
end
defp package do
[
name: :bitcraft,
maintainers: [
"Carlos Bolanos (GH: cabol)",
"Albora Technologies"
],
licenses: ["MIT"],
links: %{"GitHub" => @source_url}
]
end
defp docs do
[
main: "Bitcraft",
source_ref: "v#{@version}",
canonical: "http://hexdocs.pm/bitcraft",
source_url: @source_url
]
end
defp dialyzer do
[
plt_file: {:no_warn, "priv/plts/" <> plt_file_name()},
flags: [
:unmatched_returns,
:error_handling,
:race_conditions,
:no_opaque,
:unknown,
:no_return
]
]
end
defp plt_file_name do
"dialyzer-#{Mix.env()}-#{System.otp_release()}-#{System.version()}.plt"
end
end
| 21.144144 | 90 | 0.532169 |
1cf074eefb369985c322b6a310784ad74c2d2146 | 10,187 | exs | Elixir | lib/ex_unit/test/ex_unit/formatter_test.exs | andrewtimberlake/elixir | a1c4ffc897f9407fe7e739e20e697805fbbff810 | [
"Apache-2.0"
] | null | null | null | lib/ex_unit/test/ex_unit/formatter_test.exs | andrewtimberlake/elixir | a1c4ffc897f9407fe7e739e20e697805fbbff810 | [
"Apache-2.0"
] | 1 | 2019-04-25T12:52:49.000Z | 2019-04-25T13:27:31.000Z | lib/ex_unit/test/ex_unit/formatter_test.exs | andrewtimberlake/elixir | a1c4ffc897f9407fe7e739e20e697805fbbff810 | [
"Apache-2.0"
] | null | null | null | Code.require_file("../test_helper.exs", __DIR__)
defmodule ExUnit.FormatterTest do
use ExUnit.Case
import ExUnit.Formatter
doctest ExUnit.Formatter
defmacrop catch_assertion(expr) do
quote do
try do
unquote(expr)
rescue
ex -> ex
end
end
end
defp test_module do
%ExUnit.TestModule{name: Hello}
end
defp test do
%ExUnit.Test{name: :world, module: Hello, tags: %{file: __ENV__.file, line: 1}}
end
def falsy() do
false
end
defp formatter(_kind, message) do
message
end
test "formats test case filters" do
filters = [run: true, slow: false]
assert format_filters(filters, :exclude) =~ "Excluding tags: [run: true, slow: false]"
assert format_filters(filters, :include) =~ "Including tags: [run: true, slow: false]"
end
test "formats test errors" do
failure = [{:error, catch_error(raise "oops"), []}]
assert format_test_failure(test(), failure, 1, 80, &formatter/2) =~ """
1) world (Hello)
test/ex_unit/formatter_test.exs:1
** (RuntimeError) oops
"""
end
test "formats test exits" do
failure = [{:exit, 1, []}]
assert format_test_failure(test(), failure, 1, 80, &formatter/2) == """
1) world (Hello)
test/ex_unit/formatter_test.exs:1
** (exit) 1
"""
end
test "formats test exits with mfa" do
failure = [{:exit, {:bye, {:mod, :fun, []}}, []}]
assert format_test_failure(test(), failure, 1, 80, &formatter/2) == """
1) world (Hello)
test/ex_unit/formatter_test.exs:1
** (exit) exited in: :mod.fun()
** (EXIT) :bye
"""
end
test "formats test throws" do
failure = [{:throw, 1, []}]
assert format_test_failure(test(), failure, 1, 80, &formatter/2) == """
1) world (Hello)
test/ex_unit/formatter_test.exs:1
** (throw) 1
"""
end
test "formats test EXITs" do
failure = [{{:EXIT, self()}, 1, []}]
assert format_test_failure(test(), failure, 1, 80, &formatter/2) == """
1) world (Hello)
test/ex_unit/formatter_test.exs:1
** (EXIT from #{inspect(self())}) 1
"""
end
test "formats test errors with test_location_relative_path" do
Application.put_env(:ex_unit, :test_location_relative_path, "apps/sample")
failure = [{:error, catch_error(raise "oops"), []}]
assert format_test_failure(test(), failure, 1, 80, &formatter/2) =~ """
1) world (Hello)
apps/sample/test/ex_unit/formatter_test.exs:1
** (RuntimeError) oops
"""
after
Application.delete_env(:ex_unit, :test_location_relative_path)
end
test "formats test errors with code snippets" do
stack = {Hello, :world, 1, [file: __ENV__.file, line: 3]}
failure = [{:error, catch_error(raise "oops"), [stack]}]
assert format_test_failure(test(), failure, 1, 80, &formatter/2) =~ """
1) world (Hello)
test/ex_unit/formatter_test.exs:1
** (RuntimeError) oops
code: defmodule ExUnit.FormatterTest do
"""
end
test "formats stacktraces" do
stacktrace = [{Oops, :wrong, 1, [file: "formatter_test.exs", line: 1]}]
failure = [{:error, catch_error(raise "oops"), stacktrace}]
assert format_test_failure(test(), failure, 1, 80, &formatter/2) =~ """
1) world (Hello)
test/ex_unit/formatter_test.exs:1
** (RuntimeError) oops
stacktrace:
formatter_test.exs:1: Oops.wrong/1
"""
end
test "formats assertions" do
failure = [{:error, catch_assertion(assert ExUnit.FormatterTest.falsy()), []}]
assert format_test_failure(test(), failure, 1, 80, &formatter/2) =~ """
1) world (Hello)
test/ex_unit/formatter_test.exs:1
Expected truthy, got false
code: assert ExUnit.FormatterTest.falsy()
"""
end
test "formats multiple assertions" do
failure = [
{:error, catch_assertion(assert ExUnit.FormatterTest.falsy()), []},
{:error, catch_assertion(assert 1 == 2), []}
]
assert format_test_failure(test(), failure, 1, 80, &formatter/2) =~ """
1) world (Hello)
test/ex_unit/formatter_test.exs:1
Failure #1
Expected truthy, got false
code: assert ExUnit.FormatterTest.falsy()
Failure #2
Assertion with == failed
code: assert 1 == 2
left: 1
right: 2
"""
end
defp trim_multiline_whitespace(string) do
String.replace(string, ~r"\n\s+\n", "\n\n")
end
test "blames function clause error" do
{error, stack} =
try do
Access.fetch(:foo, :bar)
rescue
exception -> {exception, __STACKTRACE__}
end
failure = format_test_failure(test(), [{:error, error, [hd(stack)]}], 1, 80, &formatter/2)
assert trim_multiline_whitespace(failure) =~ """
1) world (Hello)
test/ex_unit/formatter_test.exs:1
** (FunctionClauseError) no function clause matching in Access.fetch/2
The following arguments were given to Access.fetch/2:
# 1
:foo
# 2
:bar
Attempted function clauses (showing 5 out of 5):
def fetch(%module{} = container, key)
"""
assert failure =~ ~r"\(elixir\) lib/access\.ex:\d+: Access\.fetch/2"
end
test "formats setup_all errors" do
failure = [{:error, catch_error(raise "oops"), []}]
assert format_test_all_failure(test_module(), failure, 1, 80, &formatter/2) =~ """
1) Hello: failure on setup_all callback, all tests have been invalidated
** (RuntimeError) oops
"""
end
test "formats assertions with operators with no limit" do
failure = [{:error, catch_assertion(assert [1, 2, 3] == [4, 5, 6]), []}]
assert format_test_all_failure(test_module(), failure, 1, :infinity, &formatter/2) =~ """
1) Hello: failure on setup_all callback, all tests have been invalidated
Assertion with == failed
code: assert [1, 2, 3] == [4, 5, 6]
left: [1, 2, 3]
right: [4, 5, 6]
"""
end
test "formats assertions with operators with column limit" do
failure = [{:error, catch_assertion(assert [1, 2, 3] == [4, 5, 6]), []}]
assert format_test_all_failure(test_module(), failure, 1, 15, &formatter/2) =~ """
1) Hello: failure on setup_all callback, all tests have been invalidated
Assertion with == failed
code: assert [1, 2, 3] == [4, 5, 6]
left: [1,
2,
3]
right: [4,
5,
6]
"""
end
test "formats assertions with complex function call arguments" do
failure = [{:error, catch_assertion(assert is_list(List.to_tuple([1, 2, 3]))), []}]
assert format_test_all_failure(test_module(), failure, 1, 80, &formatter/2) =~ """
1) Hello: failure on setup_all callback, all tests have been invalidated
Expected truthy, got false
code: assert is_list(List.to_tuple([1, 2, 3]))
arguments:
# 1
{1, 2, 3}
"""
failure = [{:error, catch_assertion(assert is_list({1, 2})), []}]
assert format_test_all_failure(test_module(), failure, 1, 80, &formatter/2) =~ """
1) Hello: failure on setup_all callback, all tests have been invalidated
Expected truthy, got false
code: assert is_list({1, 2})
"""
end
test "formats assertions with message with multiple lines" do
message = "Some meaningful error:\nuseful info\nanother useful info"
failure = [{:error, catch_assertion(assert(false, message)), []}]
assert format_test_all_failure(test_module(), failure, 1, :infinity, &formatter/2) =~ """
1) Hello: failure on setup_all callback, all tests have been invalidated
Some meaningful error:
useful info
another useful info
"""
end
defmodule BadInspect do
defstruct key: 0
defimpl Inspect do
def inspect(struct, opts) when is_atom(opts) do
struct.unknown
end
end
end
test "inspect failure" do
failure = [{:error, catch_assertion(assert :will_fail == %BadInspect{}), []}]
message =
"got FunctionClauseError with message \"no function clause matching " <>
"in Inspect.ExUnit.FormatterTest.BadInspect.inspect/2\" while inspecting " <>
"%{__struct__: ExUnit.FormatterTest.BadInspect, key: 0}"
assert format_test_failure(test(), failure, 1, 80, &formatter/2) =~ """
1) world (Hello)
test/ex_unit/formatter_test.exs:1
Assertion with == failed
code: assert :will_fail == %BadInspect{}
left: :will_fail
right: %Inspect.Error{
message: #{inspect(message)}
}
"""
end
defmodule BadMessage do
defexception key: 0
@impl true
def message(_message) do
raise "oops"
end
end
test "message failure" do
failure = [{:error, catch_error(raise BadMessage), []}]
message =
"got RuntimeError with message \"oops\" while retrieving Exception.message/1 " <>
"for %ExUnit.FormatterTest.BadMessage{key: 0}"
assert format_test_failure(test(), failure, 1, 80, &formatter/2) =~ """
1) world (Hello)
test/ex_unit/formatter_test.exs:1
** (ExUnit.FormatterTest.BadMessage) #{message}
"""
end
end
| 31.636646 | 94 | 0.556101 |
1cf08489c46b1a3d504c2b77f51ae6da190ee8d6 | 2,803 | exs | Elixir | test/dice_test.exs | Celeo/gandalf_bot | f8e48b9a0b403fac5e29e514ce6c684d1bebeecc | [
"MIT"
] | null | null | null | test/dice_test.exs | Celeo/gandalf_bot | f8e48b9a0b403fac5e29e514ce6c684d1bebeecc | [
"MIT"
] | null | null | null | test/dice_test.exs | Celeo/gandalf_bot | f8e48b9a0b403fac5e29e514ce6c684d1bebeecc | [
"MIT"
] | null | null | null | defmodule Bot.Dice.Test do
use ExUnit.Case
alias Bot.Dice
describe "handle_roll()" do
setup do
on_exit(fn ->
Bot.Util.Rng.clear_stored_numbers()
:ok
end)
:ok
end
test "returns no values for no dice" do
results = Dice.handle_roll("")
assert results == {Dice.RollType.Explode_10, []}
end
test "gets results with a static gen fn" do
Bot.Util.Rng.enqueue_static([8, 8, 8])
results = Dice.handle_roll("3")
assert results == {Dice.RollType.Explode_10, [{8, false}, {8, false}, {8, false}]}
end
test "handles exploding correctly" do
Bot.Util.Rng.enqueue_static([8, 4, 5, 9, 2])
results = Dice.handle_roll("3 8again")
assert results ==
{Dice.RollType.Explode_8,
[{8, false}, {4, true}, {5, false}, {9, false}, {2, true}]}
end
test "handles math - addition" do
Bot.Util.Rng.enqueue_static([8, 8, 8, 8, 8, 8])
{_, results} = Dice.handle_roll("3 + 3")
assert length(results) == 6
end
test "handles math - no modifiers" do
Bot.Util.Rng.enqueue_static([8, 8, 8, 8, 8, 8])
{_, results} = Dice.handle_roll("3 3")
assert length(results) == 6
end
test "handles math - negative" do
Bot.Util.Rng.enqueue_static([8])
{_, results} = Dice.handle_roll("3 - 2")
assert length(results) == 1
end
test "handles math - down to zero" do
{_, results} = Dice.handle_roll("3 - 3")
assert length(results) == 0
end
end
describe "roll_results_to_string()" do
test "handles empty results" do
result = Dice.roll_results_to_string({Dice.RollType.Explode_10, []})
assert result == "Did not roll any dice"
end
test "handles normal roll" do
result = Dice.roll_results_to_string({Dice.RollType.Explode_10, [{10, false}, {4, true}]})
assert result == "Successes: **1**\n10 (4)"
end
test "handles exceptional results" do
result =
Dice.roll_results_to_string(
{Dice.RollType.Explode_10, [{8, false}, {8, false}, {8, false}, {8, false}, {8, false}]}
)
assert result == "Successes: **5**\n8 8 8 8 8\nExceptional success!"
end
test "handles failure results" do
result = Dice.roll_results_to_string({Dice.RollType.Explode_10, [{1, false}]})
assert result == "Successes: **0**\n1\nFool of a Took!"
end
test "handles chance results failure" do
result = Dice.roll_results_to_string({Dice.RollType.Chance, [{5, false}]})
assert result == "Chance failed! (5)"
end
test "handles chance results success" do
result = Dice.roll_results_to_string({Dice.RollType.Chance, [{10, false}]})
assert result == "Chance **succeeded**! (10)"
end
end
end
| 29.505263 | 98 | 0.602569 |
1cf0b467b5fa714da504e130ae7d3c11f7d0f5a1 | 21,642 | exs | Elixir | test/scenic/color_test.exs | bruceme/scenic | bd8a1e63c122c44cc263e1fb5dfab2547ce8ef43 | [
"Apache-2.0"
] | null | null | null | test/scenic/color_test.exs | bruceme/scenic | bd8a1e63c122c44cc263e1fb5dfab2547ce8ef43 | [
"Apache-2.0"
] | null | null | null | test/scenic/color_test.exs | bruceme/scenic | bd8a1e63c122c44cc263e1fb5dfab2547ce8ef43 | [
"Apache-2.0"
] | null | null | null | defmodule Scenic.ColorTest do
@moduledoc false
use ExUnit.Case, async: true
doctest Scenic.Color
alias Scenic.Color
test "to_g implied" do
# from bisque
g = round((0xFF + 0xE4 + 0xC4) / 3)
assert Color.to_g(128) == {:color_g, 128}
assert Color.to_g({128, 200}) == {:color_g, 128}
assert Color.to_g(:bisque) == {:color_g, g}
assert Color.to_g({:bisque, 200}) == {:color_g, g}
assert Color.to_g({0xFF, 0xE4, 0xC4}) == {:color_g, g}
assert Color.to_g({0xFF, 0xE4, 0xC4, 0xFF}) == {:color_g, g}
end
test "to_g explicit" do
# from bisque
hsv = Color.to_hsv(:bisque)
hsl = Color.to_hsl(:bisque)
g = round((0xFF + 0xE4 + 0xC4) / 3)
assert Color.to_g({:color_g, 128}) == {:color_g, 128}
assert Color.to_g({:color_ga, {128, 200}}) == {:color_g, 128}
assert Color.to_g({:color_rgb, {0xFF, 0xE4, 0xC4}}) == {:color_g, g}
assert Color.to_g({:color_rgba, {0xFF, 0xE4, 0xC4, 0xFF}}) == {:color_g, g}
assert Color.to_g(hsv) == {:color_g, g}
assert Color.to_g(hsl) == {:color_g, g}
end
test "to_ga implied" do
# from bisque
g = round((0xFF + 0xE4 + 0xC4) / 3)
assert Color.to_ga(128) == {:color_ga, {128, 0xFF}}
assert Color.to_ga({128, 200}) == {:color_ga, {128, 200}}
assert Color.to_ga(:bisque) == {:color_ga, {g, 0xFF}}
assert Color.to_ga({:bisque, 200}) == {:color_ga, {g, 200}}
assert Color.to_ga({0xFF, 0xE4, 0xC4}) == {:color_ga, {g, 0xFF}}
assert Color.to_ga({0xFF, 0xE4, 0xC4, 200}) == {:color_ga, {g, 200}}
end
test "to_ga explicit" do
# from bisque
hsv = Color.to_hsv(:bisque)
hsl = Color.to_hsl(:bisque)
g = round((0xFF + 0xE4 + 0xC4) / 3)
assert Color.to_ga({:color_g, 128}) == {:color_ga, {128, 0xFF}}
assert Color.to_ga({:color_ga, {128, 200}}) == {:color_ga, {128, 200}}
assert Color.to_ga({:color_rgb, {0xFF, 0xE4, 0xC4}}) == {:color_ga, {g, 0xFF}}
assert Color.to_ga({:color_rgba, {0xFF, 0xE4, 0xC4, 200}}) == {:color_ga, {g, 200}}
assert Color.to_ga(hsv) == {:color_ga, {g, 0xFF}}
assert Color.to_ga(hsl) == {:color_ga, {g, 0xFF}}
end
test "to_rgb implied" do
# from bisque
assert Color.to_rgb(128) == {:color_rgb, {128, 128, 128}}
assert Color.to_rgb({128, 200}) == {:color_rgb, {128, 128, 128}}
assert Color.to_rgb(:bisque) == {:color_rgb, {0xFF, 0xE4, 0xC4}}
assert Color.to_rgb({:bisque, 200}) == {:color_rgb, {0xFF, 0xE4, 0xC4}}
assert Color.to_rgb({0xFF, 0xE4, 0xC4}) == {:color_rgb, {0xFF, 0xE4, 0xC4}}
assert Color.to_rgb({0xFF, 0xE4, 0xC4, 200}) == {:color_rgb, {0xFF, 0xE4, 0xC4}}
end
test "to_rgb explicit" do
# from bisque
hsv = Color.to_hsv(:bisque)
hsl = Color.to_hsl(:bisque)
assert Color.to_rgb({:color_g, 128}) == {:color_rgb, {128, 128, 128}}
assert Color.to_rgb({:color_ga, {128, 200}}) == {:color_rgb, {128, 128, 128}}
assert Color.to_rgb({:color_rgb, {0xFF, 0xE4, 0xC4}}) == {:color_rgb, {0xFF, 0xE4, 0xC4}}
assert Color.to_rgb({:color_rgba, {0xFF, 0xE4, 0xC4, 200}}) ==
{:color_rgb, {0xFF, 0xE4, 0xC4}}
assert Color.to_rgb(hsv) == {:color_rgb, {0xFF, 0xE4, 0xC4}}
assert Color.to_rgb(hsl) == {:color_rgb, {0xFF, 0xE4, 0xC4}}
end
test "to_rgba implied" do
# from bisque
assert Color.to_rgba(128) == {:color_rgba, {128, 128, 128, 0xFF}}
assert Color.to_rgba({128, 200}) == {:color_rgba, {128, 128, 128, 200}}
assert Color.to_rgba(:bisque) == {:color_rgba, {0xFF, 0xE4, 0xC4, 0xFF}}
assert Color.to_rgba({:bisque, 200}) == {:color_rgba, {0xFF, 0xE4, 0xC4, 200}}
assert Color.to_rgba({0xFF, 0xE4, 0xC4}) == {:color_rgba, {0xFF, 0xE4, 0xC4, 0xFF}}
assert Color.to_rgba({0xFF, 0xE4, 0xC4, 200}) == {:color_rgba, {0xFF, 0xE4, 0xC4, 200}}
end
test "to_rgba explicit" do
# from bisque
hsv = Color.to_hsv(:bisque)
hsl = Color.to_hsl(:bisque)
assert Color.to_rgba({:color_g, 128}) == {:color_rgba, {128, 128, 128, 0xFF}}
assert Color.to_rgba({:color_ga, {128, 200}}) == {:color_rgba, {128, 128, 128, 200}}
assert Color.to_rgba({:color_rgb, {0xFF, 0xE4, 0xC4}}) ==
{:color_rgba, {0xFF, 0xE4, 0xC4, 0xFF}}
assert Color.to_rgba({:color_rgba, {0xFF, 0xE4, 0xC4, 200}}) ==
{:color_rgba, {0xFF, 0xE4, 0xC4, 200}}
assert Color.to_rgba(hsv) == {:color_rgba, {0xFF, 0xE4, 0xC4, 0xFF}}
assert Color.to_rgba(hsl) == {:color_rgba, {0xFF, 0xE4, 0xC4, 0xFF}}
end
test "to_hsv implied - using to_rgb as truth" do
# from bisque
rgb = Color.to_rgb(:bisque)
assert Color.to_hsv(0) == {:color_hsv, {0, 0, 0}}
{:color_hsv, {0.0, 0.0, v}} = Color.to_hsv(128)
assert v > 50 && v < 51
{:color_hsv, {0.0, 0.0, v}} = Color.to_hsv({128, 200})
assert v > 50 && v < 51
assert Color.to_hsv(:bisque) |> Color.to_rgb() == rgb
assert Color.to_hsv({:bisque, 200}) |> Color.to_rgb() == rgb
assert Color.to_hsv({0xFF, 0xE4, 0xC4}) |> Color.to_rgb() == rgb
assert Color.to_hsv({0xFF, 0xE4, 0xC4, 200}) |> Color.to_rgb() == rgb
end
test "to_hsv explicit - using to_rgb as truth" do
# from bisque
hsl = Color.to_hsl(:bisque)
rgb = Color.to_rgb(:bisque)
{:color_hsv, {0.0, 0.0, v}} = Color.to_hsv({:color_g, 128})
assert v > 50 && v < 51
{:color_hsv, {0.0, 0.0, v}} = Color.to_hsv({:color_ga, {128, 200}})
assert v > 50 && v < 51
assert Color.to_hsv(rgb) |> Color.to_rgb() == rgb
assert Color.to_hsv(hsl) |> Color.to_rgb() == rgb
assert Color.to_hsv({:color_rgba, {0xFF, 0xE4, 0xC4, 200}}) |> Color.to_rgb() == rgb
assert Color.to_hsv({:color_hsv, {1.1, 1.2, 1.3}}) == {:color_hsv, {1.1, 1.2, 1.3}}
end
test "to_hsl implied - using to_rgb as truth" do
# from bisque
rgb = Color.to_rgb(:bisque)
assert Color.to_hsl(0) == {:color_hsl, {0, 0, 0}}
{:color_hsl, {0.0, 0.0, l}} = Color.to_hsl(128)
assert l > 50 && l < 51
{:color_hsl, {0.0, 0.0, l}} = Color.to_hsl({128, 200})
assert l > 50 && l < 51
assert Color.to_hsl(:bisque) |> Color.to_rgb() == rgb
assert Color.to_hsl({:bisque, 200}) |> Color.to_rgb() == rgb
assert Color.to_hsl({0xFF, 0xE4, 0xC4}) |> Color.to_rgb() == rgb
assert Color.to_hsl({0xFF, 0xE4, 0xC4, 200}) |> Color.to_rgb() == rgb
end
test "to_hsl explicit - using to_rgb as truth" do
# from bisque
rgb = Color.to_rgb(:bisque)
{:color_hsl, {0.0, 0.0, l}} = Color.to_hsl({:color_g, 128})
assert l > 50 && l < 51
{:color_hsl, {0.0, 0.0, l}} = Color.to_hsl({:color_ga, {128, 200}})
assert l > 50 && l < 51
assert Color.to_hsl({:color_rgb, {0xFF, 0xE4, 0xC4}}) |> Color.to_rgb() == rgb
assert Color.to_hsl({:color_rgba, {0xFF, 0xE4, 0xC4, 200}}) |> Color.to_rgb() == rgb
assert Color.to_hsl({:color_hsl, {1.1, 1.2, 1.3}}) == {:color_hsl, {1.1, 1.2, 1.3}}
end
test "named looks right" do
Color.named()
|> Enum.each(fn {n, {r, g, b}} ->
assert is_atom(n)
assert is_integer(r) && r >= 0 && r <= 255
assert is_integer(g) && g >= 0 && g <= 255
assert is_integer(b) && b >= 0 && b <= 255
end)
end
test ":alice_blue", do: assert(Color.to_rgb(:alice_blue) == {:color_rgb, {0xF0, 0xF8, 0xFF}})
test ":antique_white",
do: assert(Color.to_rgb(:antique_white) == {:color_rgb, {0xFA, 0xEB, 0xD7}})
test ":aqua", do: assert(Color.to_rgb(:aqua) == {:color_rgb, {0x00, 0xFF, 0xFF}})
test ":aquamarine", do: assert(Color.to_rgb(:aquamarine) == {:color_rgb, {0x7F, 0xFF, 0xD4}})
test ":azure", do: assert(Color.to_rgb(:azure) == {:color_rgb, {0xF0, 0xFF, 0xFF}})
test ":beige", do: assert(Color.to_rgb(:beige) == {:color_rgb, {0xF5, 0xF5, 0xDC}})
test ":bisque", do: assert(Color.to_rgb(:bisque) == {:color_rgb, {0xFF, 0xE4, 0xC4}})
test ":black", do: assert(Color.to_rgb(:black) == {:color_rgb, {0x00, 0x00, 0x00}})
test ":blanched_almond",
do: assert(Color.to_rgb(:blanched_almond) == {:color_rgb, {0xFF, 0xEB, 0xCD}})
test ":blue", do: assert(Color.to_rgb(:blue) == {:color_rgb, {0x00, 0x00, 0xFF}})
test ":blue_violet", do: assert(Color.to_rgb(:blue_violet) == {:color_rgb, {0x8A, 0x2B, 0xE2}})
test ":brown", do: assert(Color.to_rgb(:brown) == {:color_rgb, {0xA5, 0x2A, 0x2A}})
test ":burly_wood", do: assert(Color.to_rgb(:burly_wood) == {:color_rgb, {0xDE, 0xB8, 0x87}})
test ":cadet_blue", do: assert(Color.to_rgb(:cadet_blue) == {:color_rgb, {0x5F, 0x9E, 0xA0}})
test ":chartreuse", do: assert(Color.to_rgb(:chartreuse) == {:color_rgb, {0x7F, 0xFF, 0x00}})
test ":chocolate", do: assert(Color.to_rgb(:chocolate) == {:color_rgb, {0xD2, 0x69, 0x1E}})
test ":coral", do: assert(Color.to_rgb(:coral) == {:color_rgb, {0xFF, 0x7F, 0x50}})
test ":cornflower_blue",
do: assert(Color.to_rgb(:cornflower_blue) == {:color_rgb, {0x64, 0x95, 0xED}})
test ":cornsilk", do: assert(Color.to_rgb(:cornsilk) == {:color_rgb, {0xFF, 0xF8, 0xDC}})
test ":crimson", do: assert(Color.to_rgb(:crimson) == {:color_rgb, {0xDC, 0x14, 0x3C}})
test ":cyan", do: assert(Color.to_rgb(:cyan) == {:color_rgb, {0x00, 0xFF, 0xFF}})
test ":dark_blue", do: assert(Color.to_rgb(:dark_blue) == {:color_rgb, {0x00, 0x00, 0x8B}})
test ":dark_cyan", do: assert(Color.to_rgb(:dark_cyan) == {:color_rgb, {0x00, 0x8B, 0x8B}})
test ":dark_golden_rod",
do: assert(Color.to_rgb(:dark_golden_rod) == {:color_rgb, {0xB8, 0x86, 0x0B}})
test ":dark_gray", do: assert(Color.to_rgb(:dark_gray) == {:color_rgb, {0xA9, 0xA9, 0xA9}})
test ":dark_grey", do: assert(Color.to_rgb(:dark_grey) == {:color_rgb, {0xA9, 0xA9, 0xA9}})
test ":dark_green", do: assert(Color.to_rgb(:dark_green) == {:color_rgb, {0x00, 0x64, 0x00}})
test ":dark_khaki", do: assert(Color.to_rgb(:dark_khaki) == {:color_rgb, {0xBD, 0xB7, 0x6B}})
test ":dark_magenta",
do: assert(Color.to_rgb(:dark_magenta) == {:color_rgb, {0x8B, 0x00, 0x8B}})
test ":dark_olive_green",
do: assert(Color.to_rgb(:dark_olive_green) == {:color_rgb, {0x55, 0x6B, 0x2F}})
test ":dark_orange", do: assert(Color.to_rgb(:dark_orange) == {:color_rgb, {0xFF, 0x8C, 0x00}})
test ":dark_orchid", do: assert(Color.to_rgb(:dark_orchid) == {:color_rgb, {0x99, 0x32, 0xCC}})
test ":dark_red", do: assert(Color.to_rgb(:dark_red) == {:color_rgb, {0x8B, 0x00, 0x00}})
test ":dark_salmon", do: assert(Color.to_rgb(:dark_salmon) == {:color_rgb, {0xE9, 0x96, 0x7A}})
test ":dark_sea_green",
do: assert(Color.to_rgb(:dark_sea_green) == {:color_rgb, {0x8F, 0xBC, 0x8F}})
test ":dark_slate_blue",
do: assert(Color.to_rgb(:dark_slate_blue) == {:color_rgb, {0x48, 0x3D, 0x8B}})
test ":dark_slate_gray",
do: assert(Color.to_rgb(:dark_slate_gray) == {:color_rgb, {0x2F, 0x4F, 0x4F}})
test ":dark_slate_grey",
do: assert(Color.to_rgb(:dark_slate_grey) == {:color_rgb, {0x2F, 0x4F, 0x4F}})
test ":dark_turquoise",
do: assert(Color.to_rgb(:dark_turquoise) == {:color_rgb, {0x00, 0xCE, 0xD1}})
test ":dark_violet", do: assert(Color.to_rgb(:dark_violet) == {:color_rgb, {0x94, 0x00, 0xD3}})
test ":deep_pink", do: assert(Color.to_rgb(:deep_pink) == {:color_rgb, {0xFF, 0x14, 0x93}})
test ":deep_sky_blue",
do: assert(Color.to_rgb(:deep_sky_blue) == {:color_rgb, {0x00, 0xBF, 0xFF}})
test ":dim_gray", do: assert(Color.to_rgb(:dim_gray) == {:color_rgb, {0x69, 0x69, 0x69}})
test ":dim_grey", do: assert(Color.to_rgb(:dim_grey) == {:color_rgb, {0x69, 0x69, 0x69}})
test ":dodger_blue", do: assert(Color.to_rgb(:dodger_blue) == {:color_rgb, {0x1E, 0x90, 0xFF}})
test ":fire_brick", do: assert(Color.to_rgb(:fire_brick) == {:color_rgb, {0xB2, 0x22, 0x22}})
test ":floral_white",
do: assert(Color.to_rgb(:floral_white) == {:color_rgb, {0xFF, 0xFA, 0xF0}})
test ":forest_green",
do: assert(Color.to_rgb(:forest_green) == {:color_rgb, {0x22, 0x8B, 0x22}})
test ":fuchsia", do: assert(Color.to_rgb(:fuchsia) == {:color_rgb, {0xFF, 0x00, 0xFF}})
test ":gainsboro", do: assert(Color.to_rgb(:gainsboro) == {:color_rgb, {0xDC, 0xDC, 0xDC}})
test ":ghost_white", do: assert(Color.to_rgb(:ghost_white) == {:color_rgb, {0xF8, 0xF8, 0xFF}})
test ":gold", do: assert(Color.to_rgb(:gold) == {:color_rgb, {0xFF, 0xD7, 0x00}})
test ":golden_rod", do: assert(Color.to_rgb(:golden_rod) == {:color_rgb, {0xDA, 0xA5, 0x20}})
test ":gray", do: assert(Color.to_rgb(:gray) == {:color_rgb, {0x80, 0x80, 0x80}})
test ":grey", do: assert(Color.to_rgb(:grey) == {:color_rgb, {0x80, 0x80, 0x80}})
test ":green", do: assert(Color.to_rgb(:green) == {:color_rgb, {0x00, 0x80, 0x00}})
test ":green_yellow",
do: assert(Color.to_rgb(:green_yellow) == {:color_rgb, {0xAD, 0xFF, 0x2F}})
test ":honey_dew", do: assert(Color.to_rgb(:honey_dew) == {:color_rgb, {0xF0, 0xFF, 0xF0}})
test ":hot_pink", do: assert(Color.to_rgb(:hot_pink) == {:color_rgb, {0xFF, 0x69, 0xB4}})
test ":indian_red", do: assert(Color.to_rgb(:indian_red) == {:color_rgb, {0xCD, 0x5C, 0x5C}})
test ":indigo", do: assert(Color.to_rgb(:indigo) == {:color_rgb, {0x4B, 0x00, 0x82}})
test ":ivory", do: assert(Color.to_rgb(:ivory) == {:color_rgb, {0xFF, 0xFF, 0xF0}})
test ":khaki", do: assert(Color.to_rgb(:khaki) == {:color_rgb, {0xF0, 0xE6, 0x8C}})
test ":lavender", do: assert(Color.to_rgb(:lavender) == {:color_rgb, {0xE6, 0xE6, 0xFA}})
test ":lavender_blush",
do: assert(Color.to_rgb(:lavender_blush) == {:color_rgb, {0xFF, 0xF0, 0xF5}})
test ":lawn_green", do: assert(Color.to_rgb(:lawn_green) == {:color_rgb, {0x7C, 0xFC, 0x00}})
test ":lemon_chiffon",
do: assert(Color.to_rgb(:lemon_chiffon) == {:color_rgb, {0xFF, 0xFA, 0xCD}})
test ":light_blue", do: assert(Color.to_rgb(:light_blue) == {:color_rgb, {0xAD, 0xD8, 0xE6}})
test ":light_coral", do: assert(Color.to_rgb(:light_coral) == {:color_rgb, {0xF0, 0x80, 0x80}})
test ":light_cyan", do: assert(Color.to_rgb(:light_cyan) == {:color_rgb, {0xE0, 0xFF, 0xFF}})
test ":light_golden_rod",
do: assert(Color.to_rgb(:light_golden_rod) == {:color_rgb, {0xFA, 0xFA, 0xD2}})
test ":light_golden_rod_yellow",
do: assert(Color.to_rgb(:light_golden_rod_yellow) == {:color_rgb, {0xFA, 0xFA, 0xD2}})
test ":light_gray", do: assert(Color.to_rgb(:light_gray) == {:color_rgb, {0xD3, 0xD3, 0xD3}})
test ":light_grey", do: assert(Color.to_rgb(:light_grey) == {:color_rgb, {0xD3, 0xD3, 0xD3}})
test ":light_green", do: assert(Color.to_rgb(:light_green) == {:color_rgb, {0x90, 0xEE, 0x90}})
test ":light_pink", do: assert(Color.to_rgb(:light_pink) == {:color_rgb, {0xFF, 0xB6, 0xC1}})
test ":light_salmon",
do: assert(Color.to_rgb(:light_salmon) == {:color_rgb, {0xFF, 0xA0, 0x7A}})
test ":light_sea_green",
do: assert(Color.to_rgb(:light_sea_green) == {:color_rgb, {0x20, 0xB2, 0xAA}})
test ":light_sky_blue",
do: assert(Color.to_rgb(:light_sky_blue) == {:color_rgb, {0x87, 0xCE, 0xFA}})
test ":light_slate_gray",
do: assert(Color.to_rgb(:light_slate_gray) == {:color_rgb, {0x77, 0x88, 0x99}})
test ":light_steel_blue",
do: assert(Color.to_rgb(:light_steel_blue) == {:color_rgb, {0xB0, 0xC4, 0xDE}})
test ":light_yellow",
do: assert(Color.to_rgb(:light_yellow) == {:color_rgb, {0xFF, 0xFF, 0xE0}})
test ":lime", do: assert(Color.to_rgb(:lime) == {:color_rgb, {0x00, 0xFF, 0x00}})
test ":lime_green", do: assert(Color.to_rgb(:lime_green) == {:color_rgb, {0x32, 0xCD, 0x32}})
test ":linen", do: assert(Color.to_rgb(:linen) == {:color_rgb, {0xFA, 0xF0, 0xE6}})
test ":magenta", do: assert(Color.to_rgb(:magenta) == {:color_rgb, {0xFF, 0x00, 0xFF}})
test ":maroon", do: assert(Color.to_rgb(:maroon) == {:color_rgb, {0x80, 0x00, 0x00}})
test ":medium_aqua_marine",
do: assert(Color.to_rgb(:medium_aqua_marine) == {:color_rgb, {0x66, 0xCD, 0xAA}})
test ":medium_blue", do: assert(Color.to_rgb(:medium_blue) == {:color_rgb, {0x00, 0x00, 0xCD}})
test ":medium_orchid",
do: assert(Color.to_rgb(:medium_orchid) == {:color_rgb, {0xBA, 0x55, 0xD3}})
test ":medium_purple",
do: assert(Color.to_rgb(:medium_purple) == {:color_rgb, {0x93, 0x70, 0xDB}})
test ":medium_sea_green",
do: assert(Color.to_rgb(:medium_sea_green) == {:color_rgb, {0x3C, 0xB3, 0x71}})
test ":medium_slate_blue",
do: assert(Color.to_rgb(:medium_slate_blue) == {:color_rgb, {0x7B, 0x68, 0xEE}})
test ":medium_spring_green",
do: assert(Color.to_rgb(:medium_spring_green) == {:color_rgb, {0x00, 0xFA, 0x9A}})
test ":medium_turquoise",
do: assert(Color.to_rgb(:medium_turquoise) == {:color_rgb, {0x48, 0xD1, 0xCC}})
test ":medium_violet_red",
do: assert(Color.to_rgb(:medium_violet_red) == {:color_rgb, {0xC7, 0x15, 0x85}})
test ":midnight_blue",
do: assert(Color.to_rgb(:midnight_blue) == {:color_rgb, {0x19, 0x19, 0x70}})
test ":mint_cream", do: assert(Color.to_rgb(:mint_cream) == {:color_rgb, {0xF5, 0xFF, 0xFA}})
test ":misty_rose", do: assert(Color.to_rgb(:misty_rose) == {:color_rgb, {0xFF, 0xE4, 0xE1}})
test ":moccasin", do: assert(Color.to_rgb(:moccasin) == {:color_rgb, {0xFF, 0xE4, 0xB5}})
test ":navajo_white",
do: assert(Color.to_rgb(:navajo_white) == {:color_rgb, {0xFF, 0xDE, 0xAD}})
test ":navy", do: assert(Color.to_rgb(:navy) == {:color_rgb, {0x00, 0x00, 0x80}})
test ":old_lace", do: assert(Color.to_rgb(:old_lace) == {:color_rgb, {0xFD, 0xF5, 0xE6}})
test ":olive", do: assert(Color.to_rgb(:olive) == {:color_rgb, {0x80, 0x80, 0x00}})
test ":olive_drab", do: assert(Color.to_rgb(:olive_drab) == {:color_rgb, {0x6B, 0x8E, 0x23}})
test ":orange", do: assert(Color.to_rgb(:orange) == {:color_rgb, {0xFF, 0xA5, 0x00}})
test ":orange_red", do: assert(Color.to_rgb(:orange_red) == {:color_rgb, {0xFF, 0x45, 0x00}})
test ":orchid", do: assert(Color.to_rgb(:orchid) == {:color_rgb, {0xDA, 0x70, 0xD6}})
test ":pale_golden_rod",
do: assert(Color.to_rgb(:pale_golden_rod) == {:color_rgb, {0xEE, 0xE8, 0xAA}})
test ":pale_green", do: assert(Color.to_rgb(:pale_green) == {:color_rgb, {0x98, 0xFB, 0x98}})
test ":pale_turquoise",
do: assert(Color.to_rgb(:pale_turquoise) == {:color_rgb, {0xAF, 0xEE, 0xEE}})
test ":pale_violet_red",
do: assert(Color.to_rgb(:pale_violet_red) == {:color_rgb, {0xDB, 0x70, 0x93}})
test ":papaya_whip", do: assert(Color.to_rgb(:papaya_whip) == {:color_rgb, {0xFF, 0xEF, 0xD5}})
test ":peach_puff", do: assert(Color.to_rgb(:peach_puff) == {:color_rgb, {0xFF, 0xDA, 0xB9}})
test ":peru", do: assert(Color.to_rgb(:peru) == {:color_rgb, {0xCD, 0x85, 0x3F}})
test ":pink", do: assert(Color.to_rgb(:pink) == {:color_rgb, {0xFF, 0xC0, 0xCB}})
test ":plum", do: assert(Color.to_rgb(:plum) == {:color_rgb, {0xDD, 0xA0, 0xDD}})
test ":powder_blue", do: assert(Color.to_rgb(:powder_blue) == {:color_rgb, {0xB0, 0xE0, 0xE6}})
test ":purple", do: assert(Color.to_rgb(:purple) == {:color_rgb, {0x80, 0x00, 0x80}})
test ":rebecca_purple",
do: assert(Color.to_rgb(:rebecca_purple) == {:color_rgb, {0x66, 0x33, 0x99}})
test ":red", do: assert(Color.to_rgb(:red) == {:color_rgb, {0xFF, 0x00, 0x00}})
test ":rosy_brown", do: assert(Color.to_rgb(:rosy_brown) == {:color_rgb, {0xBC, 0x8F, 0x8F}})
test ":royal_blue", do: assert(Color.to_rgb(:royal_blue) == {:color_rgb, {0x41, 0x69, 0xE1}})
test ":saddle_brown",
do: assert(Color.to_rgb(:saddle_brown) == {:color_rgb, {0x8B, 0x45, 0x13}})
test ":salmon", do: assert(Color.to_rgb(:salmon) == {:color_rgb, {0xFA, 0x80, 0x72}})
test ":sandy_brown", do: assert(Color.to_rgb(:sandy_brown) == {:color_rgb, {0xF4, 0xA4, 0x60}})
test ":sea_green", do: assert(Color.to_rgb(:sea_green) == {:color_rgb, {0x2E, 0x8B, 0x57}})
test ":sea_shell", do: assert(Color.to_rgb(:sea_shell) == {:color_rgb, {0xFF, 0xF5, 0xEE}})
test ":sienna", do: assert(Color.to_rgb(:sienna) == {:color_rgb, {0xA0, 0x52, 0x2D}})
test ":silver", do: assert(Color.to_rgb(:silver) == {:color_rgb, {0xC0, 0xC0, 0xC0}})
test ":sky_blue", do: assert(Color.to_rgb(:sky_blue) == {:color_rgb, {0x87, 0xCE, 0xEB}})
test ":slate_blue", do: assert(Color.to_rgb(:slate_blue) == {:color_rgb, {0x6A, 0x5A, 0xCD}})
test ":slate_gray", do: assert(Color.to_rgb(:slate_gray) == {:color_rgb, {0x70, 0x80, 0x90}})
test ":slate_grey", do: assert(Color.to_rgb(:slate_grey) == {:color_rgb, {0x70, 0x80, 0x90}})
test ":snow", do: assert(Color.to_rgb(:snow) == {:color_rgb, {0xFF, 0xFA, 0xFA}})
test ":spring_green",
do: assert(Color.to_rgb(:spring_green) == {:color_rgb, {0x00, 0xFF, 0x7F}})
test ":steel_blue", do: assert(Color.to_rgb(:steel_blue) == {:color_rgb, {0x46, 0x82, 0xB4}})
test ":tan", do: assert(Color.to_rgb(:tan) == {:color_rgb, {0xD2, 0xB4, 0x8C}})
test ":teal", do: assert(Color.to_rgb(:teal) == {:color_rgb, {0x00, 0x80, 0x80}})
test ":thistle", do: assert(Color.to_rgb(:thistle) == {:color_rgb, {0xD8, 0xBF, 0xD8}})
test ":tomato", do: assert(Color.to_rgb(:tomato) == {:color_rgb, {0xFF, 0x63, 0x47}})
test ":turquoise", do: assert(Color.to_rgb(:turquoise) == {:color_rgb, {0x40, 0xE0, 0xD0}})
test ":violet", do: assert(Color.to_rgb(:violet) == {:color_rgb, {0xEE, 0x82, 0xEE}})
test ":wheat", do: assert(Color.to_rgb(:wheat) == {:color_rgb, {0xF5, 0xDE, 0xB3}})
test ":white", do: assert(Color.to_rgb(:white) == {:color_rgb, {0xFF, 0xFF, 0xFF}})
test ":white_smoke", do: assert(Color.to_rgb(:white_smoke) == {:color_rgb, {0xF5, 0xF5, 0xF5}})
test ":yellow", do: assert(Color.to_rgb(:yellow) == {:color_rgb, {0xFF, 0xFF, 0x00}})
test ":yellow_green",
do: assert(Color.to_rgb(:yellow_green) == {:color_rgb, {0x9A, 0xCD, 0x32}})
test ":clear", do: assert(Color.to_rgba(:clear) == {:color_rgba, {0, 0, 0, 0}})
test ":transparent", do: assert(Color.to_rgba(:transparent) == {:color_rgba, {0, 0, 0, 0}})
end
| 47.150327 | 97 | 0.633352 |
1cf0d051c9b12b6963764c6d35f1b43d5994a675 | 2,243 | exs | Elixir | config/prod.exs | davepersing/blog_engine | 8605732f72c169d6cc2c2261a9acef0de7769403 | [
"MIT"
] | null | null | null | config/prod.exs | davepersing/blog_engine | 8605732f72c169d6cc2c2261a9acef0de7769403 | [
"MIT"
] | null | null | null | config/prod.exs | davepersing/blog_engine | 8605732f72c169d6cc2c2261a9acef0de7769403 | [
"MIT"
] | null | null | null | use Mix.Config
# For production, we configure the host to read the PORT
# from the system environment. Therefore, you will need
# to set PORT=80 before running your server.
#
# You should also configure the url host to something
# meaningful, we use this information when generating URLs.
#
# Finally, we also include the path to a manifest
# containing the digested version of static files. This
# manifest is generated by the mix phoenix.digest task
# which you typically run after static files are built.
config :blog_engine, BlogEngine.Endpoint,
http: [port: {:system, "PORT"}],
url: [host: "example.com", port: 80],
cache_static_manifest: "priv/static/manifest.json"
# Do not print debug messages in production
config :logger, level: :info
config :comeonin, bcrypt_log_rounds: 14
# ## SSL Support
#
# To get SSL working, you will need to add the `https` key
# to the previous section and set your `:url` port to 443:
#
# config :blog_engine, BlogEngine.Endpoint,
# ...
# url: [host: "example.com", port: 443],
# https: [port: 443,
# keyfile: System.get_env("SOME_APP_SSL_KEY_PATH"),
# certfile: System.get_env("SOME_APP_SSL_CERT_PATH")]
#
# Where those two env variables return an absolute path to
# the key and cert in disk or a relative path inside priv,
# for example "priv/ssl/server.key".
#
# We also recommend setting `force_ssl`, ensuring no data is
# ever sent via http, always redirecting to https:
#
# config :blog_engine, BlogEngine.Endpoint,
# force_ssl: [hsts: true]
#
# Check `Plug.SSL` for all available options in `force_ssl`.
# ## Using releases
#
# If you are doing OTP releases, you need to instruct Phoenix
# to start the server for all endpoints:
#
# config :phoenix, :serve_endpoints, true
#
# Alternatively, you can configure exactly which server to
# start per endpoint:
#
# config :blog_engine, BlogEngine.Endpoint, server: true
#
# You will also need to set the application root to `.` in order
# for the new static assets to be served after a hot upgrade:
#
# config :blog_engine, BlogEngine.Endpoint, root: "."
# Finally import the config/prod.secret.exs
# which should be versioned separately.
import_config "prod.secret.exs"
| 33.477612 | 67 | 0.71868 |
1cf1172ff5c968be2f003c479e1b08bb6eb72ac2 | 1,068 | ex | Elixir | lib/silver_vine/token/oauth2/refresh_token.ex | ritou/elixir-silver-vine-token | b55dde5691a9f96d32570fa329a664dd1652f1de | [
"MIT"
] | null | null | null | lib/silver_vine/token/oauth2/refresh_token.ex | ritou/elixir-silver-vine-token | b55dde5691a9f96d32570fa329a664dd1652f1de | [
"MIT"
] | 1 | 2021-10-19T10:41:22.000Z | 2021-10-19T10:41:22.000Z | lib/silver_vine/token/oauth2/refresh_token.ex | ritou/elixir-silver-vine-token | b55dde5691a9f96d32570fa329a664dd1652f1de | [
"MIT"
] | null | null | null | defmodule SilverVine.Token.OAuth2.RefreshToken do
@moduledoc """
A module to represent OAuth 2.0 Refresh Token using JWT.
"""
@behaviour SilverVine.Token.Base
@jwt_header_typ "rt+jwt"
alias SilverVine.Token
alias KittenBlue.JWK
@doc """
Generate a token string (JWS) from the Payload map and key information.
NOTE: Please validate the Payload before calling this function.
"""
@spec generate(payload :: map, key :: JWK.t()) ::
{:ok, token :: String.t()} | {:error, term}
def generate(
payload = %{
"iss" => _,
"exp" => _,
"aud" => _,
"sub" => _,
"client_id" => _,
"iat" => _,
"auth_id" => _
},
key = %JWK{}
),
do: Token.generate(key, payload |> Token.put_jti(), @jwt_header_typ)
@doc """
Verify signature and typ header.
"""
@spec verify(token :: String.t(), keys :: list(JWK.t())) ::
{:ok, payload :: map} | {:error, term}
def verify(token, keys), do: Token.verify(token, keys, @jwt_header_typ)
end
| 26.04878 | 74 | 0.564607 |
1cf154ca3d643c0800206781db85c81636f7eb5f | 897 | ex | Elixir | apps/interface/lib/interface/ui/reset_network.ex | rosetta-home/example_fw | 7359bd0dec05dc2df73b49b815fb844af842098d | [
"Apache-2.0"
] | 67 | 2017-03-25T19:18:09.000Z | 2021-02-15T13:09:57.000Z | apps/interface/lib/interface/ui/reset_network.ex | rosetta-home/example_fw | 7359bd0dec05dc2df73b49b815fb844af842098d | [
"Apache-2.0"
] | 2 | 2018-08-21T20:12:58.000Z | 2020-04-29T23:06:53.000Z | apps/interface/lib/interface/ui/reset_network.ex | rosetta-home/example_fw | 7359bd0dec05dc2df73b49b815fb844af842098d | [
"Apache-2.0"
] | 15 | 2017-03-25T19:16:04.000Z | 2020-07-06T00:14:09.000Z | defmodule Interface.UI.ResetNetwork do
require Logger
alias Cicada.{NetworkManager}
defmodule State do
defstruct hostname: nil
end
def init({:tcp, :http}, req, _opts) do
{host, req} = :cowboy_req.host(req)
{:ok, req, %State{:hostname => host }}
end
def handle(req, state) do
:ok = NetworkManager.WiFi.delete_creds
networks = NetworkManager.WiFi.scan
st = EEx.eval_file(Path.join(:code.priv_dir(:interface), "network.html.eex"), [networks: networks])
headers = [
{"cache-control", "no-cache"},
{"connection", "close"},
{"content-type", "text/html"},
{"expires", "Mon, 3 Jan 2000 12:34:56 GMT"},
{"pragma", "no-cache"},
{"Access-Control-Allow-Origin", "*"},
]
{:ok, req2} = :cowboy_req.reply(200, headers, st, req)
{:ok, req2, state}
end
def terminate(_reason, _req, _state), do: :ok
end
| 27.181818 | 103 | 0.610925 |
1cf1664713e077752c7cff36ac1c2d2910fdda4a | 621 | ex | Elixir | lib/matryoshka/server.ex | gravityblast/matryoshka | f31da33b3ffab626db4bdeab84b2300a02780dc6 | [
"MIT"
] | 2 | 2019-03-03T20:32:55.000Z | 2019-05-14T11:46:56.000Z | lib/matryoshka/server.ex | gravityblast/matryoshka | f31da33b3ffab626db4bdeab84b2300a02780dc6 | [
"MIT"
] | null | null | null | lib/matryoshka/server.ex | gravityblast/matryoshka | f31da33b3ffab626db4bdeab84b2300a02780dc6 | [
"MIT"
] | null | null | null | defmodule Matryoshka.Server do
use Plug.Router
plug :match
plug :fetch_query_params
plug :skip_favicon
plug Matryoshka.OpenImagePlug
plug Matryoshka.Transformations
plug Matryoshka.ServeImagePlug
plug :dispatch
def skip_favicon(%Plug.Conn{request_path: "/favicon.ico"} = conn, _) do
conn
|> send_resp(404, "no found")
|> halt
end
def skip_favicon(conn, _), do: conn
get "/*path" do
body = """
params #{inspect conn.params} <br/>
query_params #{inspect conn.query_params} <br/>
assigns #{inspect conn.assigns} <br/>
"""
send_resp(conn, 200, body)
end
end
| 20.7 | 73 | 0.674718 |
1cf1694745aff9da25544f15be354f06cfe41632 | 6,422 | ex | Elixir | lib/oban/queue/executor.ex | iautom8things/oban | 5f1dfc277c2933fdc0dada812dbbca31c6d55fa0 | [
"Apache-2.0"
] | null | null | null | lib/oban/queue/executor.ex | iautom8things/oban | 5f1dfc277c2933fdc0dada812dbbca31c6d55fa0 | [
"Apache-2.0"
] | null | null | null | lib/oban/queue/executor.ex | iautom8things/oban | 5f1dfc277c2933fdc0dada812dbbca31c6d55fa0 | [
"Apache-2.0"
] | null | null | null | defmodule Oban.Queue.Executor do
@moduledoc false
alias Oban.{
Breaker,
Config,
CrashError,
Job,
PerformError,
Query,
Telemetry,
TimeoutError,
Worker
}
require Logger
@type success :: {:success, Job.t()}
@type failure :: {:failure, Job.t(), Worker.t(), atom(), term()}
@type t :: %__MODULE__{
conf: Config.t(),
duration: pos_integer(),
job: Job.t(),
kind: any(),
meta: map(),
queue_time: integer(),
start_mono: integer(),
start_time: integer(),
stop_mono: integer(),
worker: Worker.t(),
safe: boolean(),
snooze: pos_integer(),
stacktrace: Exception.stacktrace(),
state: :unset | :discard | :failure | :success | :snoozed
}
@enforce_keys [:conf, :job]
defstruct [
:conf,
:error,
:job,
:meta,
:snooze,
:start_mono,
:start_time,
:stop_mono,
:worker,
safe: true,
duration: 0,
kind: :error,
queue_time: 0,
stacktrace: [],
state: :unset
]
@spec new(Config.t(), Job.t()) :: t()
def new(%Config{} = conf, %Job{} = job) do
struct!(__MODULE__,
conf: conf,
job: job,
meta: event_metadata(conf, job),
start_mono: System.monotonic_time(),
start_time: System.system_time()
)
end
@spec put(t(), :safe, boolean()) :: t()
def put(%__MODULE__{} = exec, :safe, value) when is_boolean(value) do
%{exec | safe: value}
end
@spec call(t()) :: :success | :failure
def call(%__MODULE__{} = exec) do
exec =
exec
|> record_started()
|> resolve_worker()
|> perform()
|> record_finished()
Breaker.with_retry(fn -> report_finished(exec).state end)
end
def record_started(%__MODULE__{} = exec) do
Telemetry.execute([:oban, :job, :start], %{system_time: exec.start_time}, exec.meta)
exec
end
@spec resolve_worker(t()) :: t()
def resolve_worker(%__MODULE__{} = exec) do
case Worker.from_string(exec.job.worker) do
{:ok, worker} ->
%{exec | worker: worker}
{:error, error} ->
unless exec.safe, do: raise(error)
%{exec | state: :failure, error: error}
end
end
@spec perform(t()) :: t()
def perform(%__MODULE__{state: :unset} = exec) do
case exec.worker.timeout(exec.job) do
:infinity ->
perform_inline(exec)
timeout when is_integer(timeout) ->
perform_timed(exec, timeout)
end
end
def perform(%__MODULE__{} = exec), do: exec
@spec record_finished(t()) :: t()
def record_finished(%__MODULE__{} = exec) do
stop_mono = System.monotonic_time()
duration = stop_mono - exec.start_mono
queue_time = DateTime.diff(exec.job.attempted_at, exec.job.scheduled_at, :nanosecond)
%{exec | duration: duration, queue_time: queue_time, stop_mono: stop_mono}
end
@spec report_finished(t()) :: t()
def report_finished(%__MODULE__{} = exec) do
case exec.state do
:success ->
Query.complete_job(exec.conf, exec.job)
execute_stop(exec)
:failure ->
job = job_with_unsaved_error(exec)
Query.retry_job(exec.conf, job, backoff(exec.worker, job))
execute_exception(exec)
:snoozed ->
Query.snooze_job(exec.conf, exec.job, exec.snooze)
execute_stop(exec)
:discard ->
Query.discard_job(exec.conf, job_with_unsaved_error(exec))
execute_stop(exec)
end
exec
end
defp perform_inline(%{safe: true} = exec) do
perform_inline(%{exec | safe: false})
rescue
error ->
%{exec | state: :failure, error: error, stacktrace: __STACKTRACE__}
catch
kind, reason ->
error = CrashError.exception({kind, reason, __STACKTRACE__})
%{exec | state: :failure, error: error, stacktrace: __STACKTRACE__}
end
defp perform_inline(%{worker: worker, job: job} = exec) do
case worker.perform(job) do
:ok ->
%{exec | state: :success}
{:ok, _result} ->
%{exec | state: :success}
:discard ->
%{exec | state: :discard, error: PerformError.exception({worker, :discard})}
{:discard, reason} ->
%{exec | state: :discard, error: PerformError.exception({worker, {:discard, reason}})}
{:error, reason} ->
%{exec | state: :failure, error: PerformError.exception({worker, {:error, reason}})}
{:snooze, seconds} ->
%{exec | state: :snoozed, snooze: seconds}
returned ->
Logger.warn(fn ->
"""
Expected #{worker}.perform/1 to return:
- `:ok`
- `:discard`
- `{:ok, value}`
- `{:error, reason}`,
- `{:discard, reason}`
- `{:snooze, seconds}`
Instead received:
#{inspect(returned, pretty: true)}
The job will be considered a success.
"""
end)
%{exec | state: :success}
end
end
defp perform_timed(exec, timeout) do
task = Task.async(fn -> perform_inline(exec) end)
case Task.yield(task, timeout) || Task.shutdown(task) do
{:ok, reply} ->
reply
nil ->
error = TimeoutError.exception({exec.worker, timeout})
%{exec | state: :failure, error: error}
end
end
defp backoff(nil, job), do: Worker.backoff(job)
defp backoff(worker, job), do: worker.backoff(job)
defp execute_stop(exec) do
measurements = %{duration: exec.duration, queue_time: exec.queue_time}
metadata = Map.put(exec.meta, :state, exec.state)
Telemetry.execute([:oban, :job, :stop], measurements, metadata)
end
defp execute_exception(exec) do
measurements = %{duration: exec.duration, queue_time: exec.queue_time}
meta =
Map.merge(exec.meta, %{
kind: exec.kind,
error: exec.error,
stacktrace: exec.stacktrace,
state: exec.state
})
Telemetry.execute([:oban, :job, :exception], measurements, meta)
end
defp event_metadata(conf, job) do
job
|> Map.take([:id, :args, :queue, :worker, :attempt, :max_attempts, :tags])
|> Map.put(:job, job)
|> Map.put(:prefix, conf.prefix)
|> Map.put(:config, conf)
end
defp job_with_unsaved_error(%__MODULE__{} = exec) do
%{
exec.job
| unsaved_error: %{kind: exec.kind, reason: exec.error, stacktrace: exec.stacktrace}
}
end
end
| 24.325758 | 94 | 0.585176 |
1cf181dd419cd00dfe7397a3d3abd963881a9525 | 1,769 | ex | Elixir | clients/ad_sense_host/lib/google_api/ad_sense_host/v41/model/ad_unit_content_ads_settings_backup_option.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/ad_sense_host/lib/google_api/ad_sense_host/v41/model/ad_unit_content_ads_settings_backup_option.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/ad_sense_host/lib/google_api/ad_sense_host/v41/model/ad_unit_content_ads_settings_backup_option.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | 1 | 2018-07-28T20:50:50.000Z | 2018-07-28T20:50:50.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.AdSenseHost.V41.Model.AdUnitContentAdsSettingsBackupOption do
@moduledoc """
The backup option to be used in instances where no ad is available.
## Attributes
- color (String): Color to use when type is set to COLOR. These are represented as six hexadecimal characters, similar to HTML color codes, but without the leading hash. Defaults to: `null`.
- type (String): Type of the backup option. Possible values are BLANK, COLOR and URL. Defaults to: `null`.
- url (String): URL to use when type is set to URL. Defaults to: `null`.
"""
defstruct [
:"color",
:"type",
:"url"
]
end
defimpl Poison.Decoder, for: GoogleApi.AdSenseHost.V41.Model.AdUnitContentAdsSettingsBackupOption do
def decode(value, _options) do
value
end
end
defimpl Poison.Encoder, for: GoogleApi.AdSenseHost.V41.Model.AdUnitContentAdsSettingsBackupOption do
def encode(value, options) do
GoogleApi.AdSenseHost.V41.Deserializer.serialize_non_nil(value, options)
end
end
| 35.38 | 192 | 0.752968 |
1cf1a03641c0d75cbe53771c2bcf8219739b5a2f | 1,129 | ex | Elixir | lib/stixex/object/intrusion_set.ex | FloatingGhost/stixex | c3b012d0e8596fde6bd512f856f05b0187bb5273 | [
"MIT"
] | 1 | 2019-05-07T22:44:45.000Z | 2019-05-07T22:44:45.000Z | lib/stixex/object/intrusion_set.ex | FloatingGhost/stixex | c3b012d0e8596fde6bd512f856f05b0187bb5273 | [
"MIT"
] | null | null | null | lib/stixex/object/intrusion_set.ex | FloatingGhost/stixex | c3b012d0e8596fde6bd512f856f05b0187bb5273 | [
"MIT"
] | null | null | null | defmodule StixEx.Object.IntrusionSet do
use StixEx.Object, type_name: "intrusion-set"
@required_fields [:name]
embedded_schema do
common_fields()
field(:name, :string)
field(:description, :string)
field(:first_seen, :utc_datetime)
field(:last_seen, :utc_datetime)
field(:aliases, {:array, :string})
field(:goals, {:array, :string})
field(:resource_level, :string)
field(:primary_motivation, :string)
field(:secondary_motivations, {:array, :string})
end
common_functions()
def changeset(struct, params) do
struct
|> cast_common(params)
|> cast(params, [
:name,
:description,
:first_seen,
:last_seen,
:goals,
:resource_level,
:primary_motivation,
:secondary_motivations
])
|> validate_required(@required_fields)
|> validate_change(:resource_level, validate_values_in_vocab("attack-resource-level-ov"))
|> validate_change(:primary_motivation, validate_values_in_vocab("attack-motivation-ov"))
|> validate_change(:secondary_motivations, validate_values_in_vocab("attack-motivation-ov"))
end
end
| 28.225 | 96 | 0.690877 |
1cf1a646a500e2a750a4bc4c5c5adc56d797c448 | 4,660 | ex | Elixir | lib/pixie.ex | rdalin82/pixie | add50e2bd7fbd807c7b82cd10a2123828be4c58f | [
"MIT"
] | null | null | null | lib/pixie.ex | rdalin82/pixie | add50e2bd7fbd807c7b82cd10a2123828be4c58f | [
"MIT"
] | null | null | null | lib/pixie.ex | rdalin82/pixie | add50e2bd7fbd807c7b82cd10a2123828be4c58f | [
"MIT"
] | null | null | null | defmodule Pixie do
use Application
@default_timeout 25_000 # 25 seconds.
# @default_transports ~w| long-polling cross-origin-long-polling callback-polling websocket eventsource |
@default_transports ~w| long-polling cross-origin-long-polling callback-polling websocket |
@default_backend [name: :ETS]
@bayeux_version "1.0"
@moduledoc """
This module defines sensible defaults for all user configurable options, and
provides a few helper functions, such as `publish` and `subscribe`.
"""
@doc """
Used to start the Pixie application by Mix.
"""
def start(_,_), do: start
def start do
Pixie.Supervisor.start_link
end
@doc """
Returns the currently running Pixie version.
"""
def version do
{:ok, version} = :application.get_key :pixie, :vsn
to_string version
end
@doc """
Returns the Bayeux version which Pixie implements. Currently `#{inspect @bayeux_version}`
"""
def bayeux_version do
@bayeux_version
end
@doc """
Returns configured timeout in milliseconds.
Defaults to `#{@default_timeout}` if nothing is configured.
This value is used by Pixie to decide how long to wait between connect
responses, and various multiples are used for client expiry timeouts,
etc.
"""
def timeout do
Application.get_env(:pixie, :timeout, @default_timeout)
end
@doc """
Returns either the configured backend options, or `#{inspect @default_backend}`.
"""
def backend_options do
case Application.get_env(:pixie, :backend) do
[] -> @default_backend
opts when is_list(opts) -> opts
_ -> @default_backend
end
end
@doc """
The Bayeux protocol is undecided as to whether subscription requests should
be responded to immediately, or can wait until either the next connect
timeout, or the next message arrives for delivery to the client.
By default Pixie waits to send subscribe requests, however if you have
client's expecting an immediate response to subscriptions you can
turn this on.
An example of why you may want to send subscription responses immediately:
```javascript
client = new Faye.Client("http://my.chat.server/pixie");
client.subscribe("/foyer").then(function() {
client.publish("/foyer", {message: "New user joined channel #foyer"})
}, function(err) {
alert("Unable to join #foyer: " + err)
});
```
See [Faye's documentation](http://faye.jcoglan.com/browser/subscribing.html)
for more information.
"""
def subscribe_immediately? do
Application.get_env(:pixie, :subscribe_immediately, false)
end
@doc """
Publish a `Pixie.Message.Publish`.
"""
def publish %Pixie.Message.Publish{}=message do
Pixie.Backend.publish message
end
@doc """
Publish an arbitrary map. This converts the map to a `Pixie.Message.Publish`
struct.
"""
def publish %{}=message do
publish Pixie.Message.Publish.init(message)
end
@doc """
Publish a message to the specified channel. This saves you from having to
build the `Pixie.Message.Publish` yourself, you can simply specify the
channel to publish to and an arbitrary map for the message's `data` property.
"""
def publish channel, %{}=data do
publish %{channel: channel, data: data}
end
@doc """
Subscribe to a channel and call the provided function with messages.
```elixir
{:ok, sub} = Pixie.subscribe "/my_awesome_channel", fn(message,_)->
IO.inspect message
end
```
The function must take two arguments:
- A message struct.
- The subscription pid.
"""
def subscribe channel_name, callback do
Pixie.LocalSubscription.subscribe channel_name, callback
end
@doc """
Cancel a local subscription.
Example:
```elixir
Pixie.subscribe "/only_one_please", fn(message,sub)->
IO.inspect message
Pixie.unsubscribe sub
end
```
"""
def unsubscribe pid do
Pixie.LocalSubscription.unsubscribe pid
end
@doc """
Returns a list of the configured extensions.
"""
def configured_extensions do
Application.get_env(:pixie, :extensions, [])
end
@doc """
Returns a list of configured event monitors for use by `Pixie.Monitor`.
"""
def configured_monitors do
Application.get_env(:pixie, :monitors, [])
end
@doc """
Returns a list of the currently enabled transports.
This can be configured with:
```elixir
config :pixie, :enabled_transports, ~w| long-polling websocket |
```
Defaults to `#{inspect @default_transports}`.
"""
def enabled_transports do
Enum.into Application.get_env(:pixie, :enabled_transports, @default_transports), HashSet.new
end
end
| 26.936416 | 107 | 0.696137 |
1cf1b2102994bf382ebb12eda032af05cf073675 | 67 | exs | Elixir | test/cineplex_test.exs | upmaru/cineplex | 7d1d516d3e3d3683b2ad4425b61517a8f556f721 | [
"MIT"
] | null | null | null | test/cineplex_test.exs | upmaru/cineplex | 7d1d516d3e3d3683b2ad4425b61517a8f556f721 | [
"MIT"
] | null | null | null | test/cineplex_test.exs | upmaru/cineplex | 7d1d516d3e3d3683b2ad4425b61517a8f556f721 | [
"MIT"
] | null | null | null | defmodule CineplexTest do
use ExUnit.Case
doctest Cineplex
end
| 13.4 | 25 | 0.80597 |
1cf1c88c8b5c739661ab7c348ea95ead3af3272a | 357 | ex | Elixir | day25/day25.ex | kjelloh/advent_of_code_2021 | 656c26a418a4b379ef48a45ac6ffd978c17a744f | [
"CC0-1.0"
] | null | null | null | day25/day25.ex | kjelloh/advent_of_code_2021 | 656c26a418a4b379ef48a45ac6ffd978c17a744f | [
"CC0-1.0"
] | null | null | null | day25/day25.ex | kjelloh/advent_of_code_2021 | 656c26a418a4b379ef48a45ac6ffd978c17a744f | [
"CC0-1.0"
] | null | null | null | Initial state:
...>...
.......
......>
v.....>
......>
.......
..vvv..
After 1 step:
..vv>..
.......
>......
v.....>
>......
.......
....v..
After 2 steps:
....v>.
..vv...
.>.....
......>
v>.....
.......
.......
After 3 steps:
......>
..v.v..
..>v...
>......
..>....
v......
.......
After 4 steps:
>......
..v....
..>.v..
.>.v...
...>...
.......
v...... | 8.113636 | 14 | 0.210084 |
1cf20209cf21731372ad47802b0c8803ff1c13bc | 1,250 | ex | Elixir | phoenix/lib/hello_web/endpoint.ex | JesterPrince/docker-web-framework-examples | eb30151283f7236a9b90791c4a1f3cee981aab4a | [
"MIT"
] | 216 | 2018-06-14T19:53:34.000Z | 2022-03-10T20:27:52.000Z | phoenix/lib/hello_web/endpoint.ex | JesterPrince/docker-web-framework-examples | eb30151283f7236a9b90791c4a1f3cee981aab4a | [
"MIT"
] | 10 | 2018-06-15T00:19:39.000Z | 2021-04-15T21:39:44.000Z | phoenix/lib/hello_web/endpoint.ex | JesterPrince/docker-web-framework-examples | eb30151283f7236a9b90791c4a1f3cee981aab4a | [
"MIT"
] | 46 | 2018-06-20T03:12:42.000Z | 2021-10-14T03:27:34.000Z | defmodule HelloWeb.Endpoint do
use Phoenix.Endpoint, otp_app: :hello
socket "/socket", HelloWeb.UserSocket,
websocket: true,
longpoll: false
# Serve at "/" the static files from "priv/static" directory.
#
# You should set gzip to true if you are running phx.digest
# when deploying your static files in production.
plug Plug.Static,
at: "/",
from: :hello,
gzip: false,
only: ~w(css fonts images js favicon.ico robots.txt)
# Code reloading can be explicitly enabled under the
# :code_reloader configuration of your endpoint.
if code_reloading? do
socket "/phoenix/live_reload/socket", Phoenix.LiveReloader.Socket
plug Phoenix.LiveReloader
plug Phoenix.CodeReloader
end
plug Plug.RequestId
plug Plug.Logger
plug Plug.Parsers,
parsers: [:urlencoded, :multipart, :json],
pass: ["*/*"],
json_decoder: Phoenix.json_library()
plug Plug.MethodOverride
plug Plug.Head
# The session will be stored in the cookie and signed,
# this means its contents can be read but not tampered with.
# Set :encryption_salt if you would also like to encrypt it.
plug Plug.Session,
store: :cookie,
key: "_hello_key",
signing_salt: "Jwt1wXF6"
plug HelloWeb.Router
end
| 26.595745 | 69 | 0.7064 |
1cf2057667d24072d22c2edd2c568b3ab6fd4f4f | 399 | exs | Elixir | priv/repo/migrations/20181219010630_create_celestial_bodies.exs | CraigCottingham/ksp-maps | ffd81c5e667cf6ca7770e717c736e51489c0c3d3 | [
"Apache-2.0"
] | 6 | 2019-01-25T12:07:43.000Z | 2021-09-09T20:31:39.000Z | priv/repo/migrations/20181219010630_create_celestial_bodies.exs | CraigCottingham/ksp-maps | ffd81c5e667cf6ca7770e717c736e51489c0c3d3 | [
"Apache-2.0"
] | 144 | 2019-01-26T02:20:07.000Z | 2021-07-21T06:28:23.000Z | priv/repo/migrations/20181219010630_create_celestial_bodies.exs | CraigCottingham/ksp-maps | ffd81c5e667cf6ca7770e717c736e51489c0c3d3 | [
"Apache-2.0"
] | 2 | 2019-07-31T21:11:00.000Z | 2020-04-15T19:51:14.000Z | defmodule KerbalMaps.Repo.Migrations.CreateCelestialBodies do
@moduledoc false
use Ecto.Migration
def change do
create table(:celestial_bodies) do
add :name, :text, null: false
add :parent_id, references(:celestial_bodies), null: true
timestamps()
end
create unique_index(:celestial_bodies, [:name])
create index(:celestial_bodies, [:parent_id])
end
end
| 23.470588 | 63 | 0.714286 |
1cf2171944dfa05ad1be96e447aff7320289b739 | 1,768 | ex | Elixir | clients/drive/lib/google_api/drive/v3/model/file_image_media_metadata_location.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/drive/lib/google_api/drive/v3/model/file_image_media_metadata_location.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/drive/lib/google_api/drive/v3/model/file_image_media_metadata_location.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.Drive.V3.Model.FileImageMediaMetadataLocation do
@moduledoc """
Geographic location information stored in the image.
## Attributes
* `altitude` (*type:* `float()`, *default:* `nil`) - The altitude stored in the image.
* `latitude` (*type:* `float()`, *default:* `nil`) - The latitude stored in the image.
* `longitude` (*type:* `float()`, *default:* `nil`) - The longitude stored in the image.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:altitude => float(),
:latitude => float(),
:longitude => float()
}
field(:altitude)
field(:latitude)
field(:longitude)
end
defimpl Poison.Decoder, for: GoogleApi.Drive.V3.Model.FileImageMediaMetadataLocation do
def decode(value, options) do
GoogleApi.Drive.V3.Model.FileImageMediaMetadataLocation.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Drive.V3.Model.FileImageMediaMetadataLocation do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 33.358491 | 92 | 0.717195 |
1cf2846f9cdb3670957b68f3ca1c7f014a068bf1 | 1,671 | ex | Elixir | lib/stellar/base/memo.ex | turbo-play/elixir-stellar-sdk | ed0711d69493ed2723a1197120667a7fa2c97f9f | [
"MIT"
] | 1 | 2019-12-19T14:12:20.000Z | 2019-12-19T14:12:20.000Z | lib/stellar/base/memo.ex | turbo-play/elixir-stellar-sdk | ed0711d69493ed2723a1197120667a7fa2c97f9f | [
"MIT"
] | null | null | null | lib/stellar/base/memo.ex | turbo-play/elixir-stellar-sdk | ed0711d69493ed2723a1197120667a7fa2c97f9f | [
"MIT"
] | 4 | 2019-10-09T20:58:35.000Z | 2021-12-06T23:00:46.000Z | defmodule Stellar.Base.Memo do
defstruct type: nil, value: nil
def new(:none), do: none()
def new(_), do: {:error, "Invalid memo type"}
def new(:id, id), do: id(id)
def new(:text, text), do: text(text)
def new(:hash, hash), do: hash(hash)
def new(:return, hash), do: return(hash)
def new(_, _), do: {:error, "Invalid memo type"}
def none() do
%__MODULE__{type: :none}
end
def id(id) when is_integer(id) and id >= 0 do
%__MODULE__{type: :id, value: id}
end
def id(_), do: id()
def id(), do: {:error, "Expects an integer >= 0"}
def text(text) when is_binary(text) and byte_size(text) <= 28 do
%__MODULE__{type: :text, value: text}
end
def text(_), do: text()
def text(), do: {:error, "Expects string, array or buffer, max 28 bytes"}
def hash(hash) do
%__MODULE__{type: :hash, value: hash}
end
def return(hash) do
%__MODULE__{type: :return, value: hash}
end
def to_xdr(%__MODULE__{} = memo) do
memo_params =
case memo.type do
:none -> {:MEMO_NONE, nil}
:id -> {:MEMO_ID, memo.value}
:text -> {:MEMO_TEXT, memo.value}
:hash -> {:MEMO_HASH, memo.value}
:return -> {:MEMO_RETURN, memo.value}
end
with {:ok, result} <- Stellar.XDR.Types.Transaction.Memo.new(memo_params) do
result
else
err -> err
end
end
def to_xdr(nil), do: nil
def from_xdr({:MEMO_NONE, nil}), do: none()
def from_xdr({:MEMO_ID, id}), do: id(id)
def from_xdr({:MEMO_TEXT, text}), do: text(text)
def from_xdr({:MEMO_HASH, hash}), do: hash(hash)
def from_xdr({:MEMO_RETURN, hash}), do: return(hash)
def from_xdr(_), do: {:error, ""}
end
| 26.109375 | 80 | 0.605625 |
1cf2f5ae8a069c987fdf3368430d5bb4e400855b | 2,341 | exs | Elixir | config/prod.exs | cogini/elixir-deploy-template | eebe9e335ebbc54ef9388faf50c6fd64936a85aa | [
"MIT"
] | 82 | 2018-01-08T16:57:13.000Z | 2021-12-25T07:34:21.000Z | config/prod.exs | cogini/elixir-deploy-template | eebe9e335ebbc54ef9388faf50c6fd64936a85aa | [
"MIT"
] | 8 | 2018-05-21T10:31:27.000Z | 2018-11-25T07:17:56.000Z | config/prod.exs | cogini/elixir-deploy-template | eebe9e335ebbc54ef9388faf50c6fd64936a85aa | [
"MIT"
] | 15 | 2018-05-21T10:18:16.000Z | 2021-03-30T17:14:40.000Z | use Mix.Config
# For production, we often load configuration from external
# sources, such as your system environment. For this reason,
# you won't find the :http configuration below, but set inside
# DeployTemplateWeb.Endpoint.init/2 when load_from_system_env is
# true. Any dynamic configuration should be done there.
#
# Don't forget to configure the url host to something meaningful,
# Phoenix uses this information when generating URLs.
#
# Finally, we also include the path to a cache manifest
# containing the digested version of static files. This
# manifest is generated by the mix phx.digest task
# which you typically run after static files are built.
config :deploy_template, DeployTemplateWeb.Endpoint,
load_from_system_env: true,
url: [host: "example.com", port: 80],
cache_static_manifest: "priv/static/cache_manifest.json"
# Do not print debug messages in production
config :logger, level: :info
# ## SSL Support
#
# To get SSL working, you will need to add the `https` key
# to the previous section and set your `:url` port to 443:
#
# config :deploy_template, DeployTemplateWeb.Endpoint,
# ...
# url: [host: "example.com", port: 443],
# https: [:inet6,
# port: 443,
# keyfile: System.get_env("SOME_APP_SSL_KEY_PATH"),
# certfile: System.get_env("SOME_APP_SSL_CERT_PATH")]
#
# Where those two env variables return an absolute path to
# the key and cert in disk or a relative path inside priv,
# for example "priv/ssl/server.key".
#
# We also recommend setting `force_ssl`, ensuring no data is
# ever sent via http, always redirecting to https:
#
# config :deploy_template, DeployTemplateWeb.Endpoint,
# force_ssl: [hsts: true]
#
# Check `Plug.SSL` for all available options in `force_ssl`.
# ## Using releases
#
# If you are doing OTP releases, you need to instruct Phoenix
# to start the server for all endpoints:
#
config :phoenix, :serve_endpoints, true
# Alternatively, you can configure exactly which server to
# start per endpoint:
#
# config :deploy_template, DeployTemplateWeb.Endpoint, server: true
#
config :shutdown_flag,
flag_file: "/var/tmp/deploy/deploy-template/shutdown.flag",
check_delay: 10_000
# Finally import the config/prod.secret.exs
# which should be versioned separately.
import_config "prod.secret.exs"
| 33.927536 | 71 | 0.733874 |
1cf31a37b014a2bc97fe4b90d0136530670609e1 | 7,881 | exs | Elixir | test/text_delta/composition_test.exs | document-delta/text_delta | a37ccbcabf83c845a1040dd4fb4de9f45382aaf0 | [
"MIT"
] | 48 | 2017-05-29T19:39:42.000Z | 2022-03-28T15:53:29.000Z | test/text_delta/composition_test.exs | document-delta/text_delta | a37ccbcabf83c845a1040dd4fb4de9f45382aaf0 | [
"MIT"
] | 1 | 2017-05-26T15:03:30.000Z | 2017-05-26T15:37:16.000Z | test/text_delta/composition_test.exs | document-delta/text_delta | a37ccbcabf83c845a1040dd4fb4de9f45382aaf0 | [
"MIT"
] | 6 | 2017-11-30T16:54:27.000Z | 2021-05-19T10:27:12.000Z | defmodule TextDelta.CompositionTest do
use ExUnit.Case
use EQC.ExUnit
import TextDelta.Generators
doctest TextDelta.Composition
property "(a + b) + c = a + (b + c)" do
forall doc <- document() do
forall delta_a <- document_delta(doc) do
doc_a = TextDelta.compose(doc, delta_a)
forall delta_b <- document_delta(doc_a) do
doc_b = TextDelta.compose(doc_a, delta_b)
delta_c = TextDelta.compose(delta_a, delta_b)
doc_c = TextDelta.compose(doc, delta_c)
ensure(doc_b == doc_c)
end
end
end
end
describe "compose" do
test "insert with insert" do
a =
TextDelta.new()
|> TextDelta.insert("A")
b =
TextDelta.new()
|> TextDelta.insert("B")
composition =
TextDelta.new()
|> TextDelta.insert("B")
|> TextDelta.insert("A")
assert TextDelta.compose(a, b) == composition
end
test "insert with retain" do
a =
TextDelta.new()
|> TextDelta.insert("A")
b =
TextDelta.new()
|> TextDelta.retain(1, %{bold: true, color: "red", font: nil})
composition =
TextDelta.new()
|> TextDelta.insert("A", %{bold: true, color: "red"})
assert TextDelta.compose(a, b) == composition
end
test "insert with delete" do
a =
TextDelta.new()
|> TextDelta.insert("A")
b =
TextDelta.new()
|> TextDelta.delete(1)
composition = TextDelta.new()
assert TextDelta.compose(a, b) == composition
end
test "delete with insert" do
a =
TextDelta.new()
|> TextDelta.delete(1)
b =
TextDelta.new()
|> TextDelta.insert("B")
composition =
TextDelta.new()
|> TextDelta.insert("B")
|> TextDelta.delete(1)
assert TextDelta.compose(a, b) == composition
end
test "delete with retain" do
a =
TextDelta.new()
|> TextDelta.delete(1)
b =
TextDelta.new()
|> TextDelta.retain(1, %{bold: true, color: "red"})
composition =
TextDelta.new()
|> TextDelta.delete(1)
|> TextDelta.retain(1, %{bold: true, color: "red"})
assert TextDelta.compose(a, b) == composition
end
test "delete with larger retain" do
a =
TextDelta.new()
|> TextDelta.delete(1)
b =
TextDelta.new()
|> TextDelta.retain(2)
composition =
TextDelta.new()
|> TextDelta.delete(1)
assert TextDelta.compose(a, b) == composition
end
test "delete with delete" do
a =
TextDelta.new()
|> TextDelta.delete(1)
b =
TextDelta.new()
|> TextDelta.delete(1)
composition =
TextDelta.new()
|> TextDelta.delete(2)
assert TextDelta.compose(a, b) == composition
end
test "retain with insert" do
a =
TextDelta.new()
|> TextDelta.retain(1, %{color: "blue"})
b =
TextDelta.new()
|> TextDelta.insert("B")
composition =
TextDelta.new()
|> TextDelta.insert("B")
|> TextDelta.retain(1, %{color: "blue"})
assert TextDelta.compose(a, b) == composition
end
test "retain with retain" do
a =
TextDelta.new()
|> TextDelta.retain(1, %{color: "blue"})
b =
TextDelta.new()
|> TextDelta.retain(1, %{bold: true, color: "red", font: nil})
composition =
TextDelta.new()
|> TextDelta.retain(1, %{bold: true, color: "red", font: nil})
assert TextDelta.compose(a, b) == composition
end
test "retain with delete" do
a =
TextDelta.new()
|> TextDelta.retain(1, %{color: "blue"})
b =
TextDelta.new()
|> TextDelta.delete(1)
composition =
TextDelta.new()
|> TextDelta.delete(1)
assert TextDelta.compose(a, b) == composition
end
test "insertion in the middle of a text" do
a =
TextDelta.new()
|> TextDelta.insert("Hello")
b =
TextDelta.new()
|> TextDelta.retain(3)
|> TextDelta.insert("X")
composition = TextDelta.new() |> TextDelta.insert("HelXlo")
assert TextDelta.compose(a, b) == composition
end
test "insert and delete with different ordering" do
initial =
TextDelta.new()
|> TextDelta.insert("Hello")
insert_first =
TextDelta.new()
|> TextDelta.retain(3)
|> TextDelta.insert("X")
|> TextDelta.delete(1)
delete_first =
TextDelta.new()
|> TextDelta.retain(3)
|> TextDelta.delete(1)
|> TextDelta.insert("X")
composition =
TextDelta.new()
|> TextDelta.insert("HelXo")
assert TextDelta.compose(initial, insert_first) == composition
assert TextDelta.compose(initial, delete_first) == composition
end
test "insert embed" do
a =
TextDelta.new()
|> TextDelta.insert(1, %{src: "img.png"})
b =
TextDelta.new()
|> TextDelta.retain(1, %{alt: "logo"})
composition =
TextDelta.new()
|> TextDelta.insert(1, %{src: "img.png", alt: "logo"})
assert TextDelta.compose(a, b) == composition
end
test "insert half of and delete entirety of text" do
a =
TextDelta.new()
|> TextDelta.retain(4)
|> TextDelta.insert("Hello")
b =
TextDelta.new()
|> TextDelta.delete(9)
composition =
TextDelta.new()
|> TextDelta.delete(4)
assert TextDelta.compose(a, b) == composition
end
test "retain more than the length of text" do
a =
TextDelta.new()
|> TextDelta.insert("Hello")
b =
TextDelta.new()
|> TextDelta.retain(10)
composition =
TextDelta.new()
|> TextDelta.insert("Hello")
assert TextDelta.compose(a, b) == composition
end
test "retain empty embed" do
a =
TextDelta.new()
|> TextDelta.insert(1)
b =
TextDelta.new()
|> TextDelta.retain(1)
composition =
TextDelta.new()
|> TextDelta.insert(1)
assert TextDelta.compose(a, b) == composition
end
test "remove attribute" do
a =
TextDelta.new()
|> TextDelta.insert("A", %{bold: true})
b =
TextDelta.new()
|> TextDelta.retain(1, %{bold: nil})
composition =
TextDelta.new()
|> TextDelta.insert("A")
assert TextDelta.compose(a, b) == composition
end
test "remove embed attribute" do
a =
TextDelta.new()
|> TextDelta.insert(2, %{bold: true})
b =
TextDelta.new()
|> TextDelta.retain(1, %{bold: nil})
composition =
TextDelta.new()
|> TextDelta.insert(2)
assert TextDelta.compose(a, b) == composition
end
test "change attributes and delete parts of text" do
a =
TextDelta.new()
|> TextDelta.insert("Test", %{bold: true})
b =
TextDelta.new()
|> TextDelta.retain(1, %{color: "red"})
|> TextDelta.delete(2)
composition =
TextDelta.new()
|> TextDelta.insert("T", %{color: "red", bold: true})
|> TextDelta.insert("t", %{bold: true})
assert TextDelta.compose(a, b) == composition
end
test "delete+retain with delete" do
a =
TextDelta.new()
|> TextDelta.delete(1)
|> TextDelta.retain(1, %{style: "P"})
b =
TextDelta.new()
|> TextDelta.delete(1)
composition =
TextDelta.new()
|> TextDelta.delete(2)
assert TextDelta.compose(a, b) == composition
end
end
end
| 21.952646 | 70 | 0.53813 |
1cf32391112765fee12cc6420e6d01f91ce2afc7 | 1,117 | exs | Elixir | config/config.exs | gyson/t | e0f272abd78528cfc284c2dc523e76437c3e4260 | [
"MIT"
] | null | null | null | config/config.exs | gyson/t | e0f272abd78528cfc284c2dc523e76437c3e4260 | [
"MIT"
] | null | null | null | config/config.exs | gyson/t | e0f272abd78528cfc284c2dc523e76437c3e4260 | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# third-party users, it should be done in your "mix.exs" file.
# You can configure your application as:
#
# config :t, key: :value
#
# and access this configuration in your application as:
#
# Application.get_env(:t, :key)
#
# You can also configure a third-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env()}.exs"
| 36.032258 | 73 | 0.748433 |
1cf3250ba7b3b1d591293c4e99284a41a96bc786 | 1,679 | ex | Elixir | clients/tool_results/lib/google_api/tool_results/v1beta3/model/perf_sample.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/tool_results/lib/google_api/tool_results/v1beta3/model/perf_sample.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/tool_results/lib/google_api/tool_results/v1beta3/model/perf_sample.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.ToolResults.V1beta3.Model.PerfSample do
@moduledoc """
Resource representing a single performance measure or data point
## Attributes
* `sampleTime` (*type:* `GoogleApi.ToolResults.V1beta3.Model.Timestamp.t`, *default:* `nil`) - Timestamp of collection.
* `value` (*type:* `float()`, *default:* `nil`) - Value observed
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:sampleTime => GoogleApi.ToolResults.V1beta3.Model.Timestamp.t(),
:value => float()
}
field(:sampleTime, as: GoogleApi.ToolResults.V1beta3.Model.Timestamp)
field(:value)
end
defimpl Poison.Decoder, for: GoogleApi.ToolResults.V1beta3.Model.PerfSample do
def decode(value, options) do
GoogleApi.ToolResults.V1beta3.Model.PerfSample.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.ToolResults.V1beta3.Model.PerfSample do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 33.58 | 123 | 0.736152 |
1cf3285985b20cd5ec8bc0cefef31e95eea49dae | 2,657 | ex | Elixir | lib/stream_stats.ex | whossname/stream_stats | 6eca90186d5c36ccabde750b8d07757142878883 | [
"MIT"
] | 1 | 2019-09-27T03:30:22.000Z | 2019-09-27T03:30:22.000Z | lib/stream_stats.ex | whossname/stream_stats | 6eca90186d5c36ccabde750b8d07757142878883 | [
"MIT"
] | null | null | null | lib/stream_stats.ex | whossname/stream_stats | 6eca90186d5c36ccabde750b8d07757142878883 | [
"MIT"
] | null | null | null | defmodule StreamStats do
@moduledoc """
Enables concurrent calculation of count, mean and standard deviation.
New values can be aggregated into an existing stat tuple and two stat
tuples can be merged into one.
Inspired by the following article by John D. Cook:
https://www.johndcook.com/blog/skewness_kurtosis/
"""
@type t() :: {count(), mean(), m2()}
@type count() :: Integer.t()
@type mean() :: number()
@type m2() :: number()
@doc """
Adds a value to the aggregated stats tuple. Implemented as Welford's Online algorithm.
https://en.wikipedia.org/wiki/Algorithms_for_calculating_variance#Welford's_Online_algorithm
"""
@spec push_value(number(), nil | t()) :: t()
def push_value(value, nil), do: {1, value, 0.0}
def push_value(value, {prev_count, prev_mean, prev_m2}) do
count = prev_count + 1
prev_delta = value - prev_mean
mean = prev_mean + prev_delta / count
new_delta = value - mean
m2 = prev_m2 + prev_delta * new_delta
{count, mean, m2}
end
@doc """
Merges two stats tuples. Implemented as Chan's Parallel Algorithm.
https://en.wikipedia.org/wiki/Algorithms_for_calculating_variance#Parallel_algorithm
"""
@spec combine_stats(nil | t(), t()) :: t()
def combine_stats({_, _, _} = stats_a, nil), do: stats_a
def combine_stats(nil, {_, _, _} = stats_b), do: stats_b
def combine_stats({count_a, mean_a, m2_a}, {count_b, mean_b, m2_b}) do
count = count_a + count_b
delta = mean_b - mean_a
# I think this way of calculating the mean is more stable than the obvious way
mean = mean_a + delta * count_b / count
m2 = m2_a + m2_b + delta * delta * count_a * count_b / count
{count, mean, m2}
end
@doc """
Aggregates the values in a list to a stats tuple.
"""
@spec reduce(Enum.t(), t() | nil) :: any()
def reduce(values, stats \\ nil) do
Enum.reduce(values, stats, &combine/2)
end
@doc """
Aggregates a number or stats tuple into a stats tuple.
First argument can be a number or stats tuple.
"""
@spec combine(number() | t(), t()) :: t()
def combine({_, _, _} = stats_a, stats_b), do: combine_stats(stats_a, stats_b)
def combine(value, stats), do: push_value(value, stats)
@doc """
Calculates the variance using a stats tuple.
"""
@spec variance(t()) :: number()
def variance(stats) do
{count, _mean, m2} = stats
if count <= 1 do
0.0
else
m2 / (count - 1.0)
end
end
@doc """
Calculates the standard deviation using a stats tuple.
"""
@spec standard_deviation(t()) :: number()
def standard_deviation(stats) do
:math.sqrt(variance(stats))
end
end
| 27.391753 | 94 | 0.655627 |
1cf330ea8f6d6caa1c5528fdaee9481dfe7c87a1 | 6,747 | ex | Elixir | lib/poxa/pusher_event.ex | robktek/poxa | 1c330789735f9bc1e91c3dfa7a10edb3aff50aab | [
"MIT"
] | null | null | null | lib/poxa/pusher_event.ex | robktek/poxa | 1c330789735f9bc1e91c3dfa7a10edb3aff50aab | [
"MIT"
] | null | null | null | lib/poxa/pusher_event.ex | robktek/poxa | 1c330789735f9bc1e91c3dfa7a10edb3aff50aab | [
"MIT"
] | null | null | null | defmodule Poxa.PusherEvent do
@moduledoc """
This module contains a set of functions to work with pusher events
Take a look at http://pusher.com/docs/pusher_protocol#events for more info
"""
require Logger
alias Poxa.PresenceSubscription
import Poison, only: [encode!: 1]
@doc """
Return a JSON for an established connection using the `socket_id` parameter to
identify this connection
{ "event" : "pusher:connection_established",
"data" : {
"socket_id" : "129319",
"activity_timeout" : 12
}
}"
"""
@spec connection_established(binary) :: binary
def connection_established(socket_id) do
data = %{socket_id: socket_id, activity_timeout: 120} |> encode!
%{event: "pusher:connection_established",
data: data} |> encode!
end
@doc """
Return a JSON for a succeeded subscription
"{ "event" : "pusher_internal:subscription_succeeded",
"channel" : "public-channel",
"data" : {} }"
If it's a presence subscription the following JSON is generated
"{ "event": "pusher_internal:subscription_succeeded",
"channel": "presence-example-channel",
"data": {
"presence": {
"ids": ["1234","98765"],
"hash": {
"1234": {
"name":"John Locke",
"twitter": "@jlocke"
},
"98765": {
"name":"Nicola Tesla",
"twitter": "@ntesla"
}
},
"count": 2
}
}
}"
"""
@spec subscription_succeeded(binary | PresenceSubscription.t) :: binary
def subscription_succeeded(channel) when is_binary(channel) do
%{event: "pusher_internal:subscription_succeeded",
channel: channel,
data: %{} } |> encode!
end
def subscription_succeeded(%PresenceSubscription{channel: channel, channel_data: channel_data}) do
ids = Map.keys(channel_data)
count = Enum.count(ids)
data = %{presence: %{ids: ids, hash: channel_data, count: count}} |> encode!
%{event: "pusher_internal:subscription_succeeded",
channel: channel,
data: data } |> encode!
end
@subscription_error %{event: "pusher:subscription_error", data: %{}} |> encode!
@doc """
Return a JSON for a subscription error
"{ "event" : "pusher:subscription_error",
"data" : {} }"
"""
@spec subscription_error :: <<_ :: 376>>
def subscription_error, do: @subscription_error
@doc """
Return a JSON for a pusher error using `message` and an optional `code`
Example:
"{
"event": "pusher:error",
"data": {
"message": String,
"code": Integer
}
}"
"""
@spec pusher_error(binary, integer | nil) :: binary
def pusher_error(message, code \\ nil) do
%{ event: "pusher:error", data: %{ message: message, code: code } } |> encode!
end
@pong %{event: "pusher:pong", data: %{}} |> encode!
@doc """
PING? PONG!
"{ "event" : "pusher:pong",
"data" : {} }"
"""
@spec pong :: <<_ :: 264>>
def pong, do: @pong
@doc """
Returns a JSON for a new member subscription on a presence channel
"{ "event" : "pusher_internal:member_added",
"channel" : "public-channel",
"data" : { "user_id" : 123,
"user_info" : "456" } }"
"""
@spec presence_member_added(binary, PresenceSubscription.user_id, PresenceSubscription.user_info) :: binary
def presence_member_added(channel, user_id, user_info) do
data = %{user_id: user_id, user_info: user_info} |> encode!
%{event: "pusher_internal:member_added",
channel: channel,
data: data} |> encode!
end
@doc """
Returns a JSON for a member having the id `user_id` unsubscribing on
a presence channel named `channel`
"{ "event" : "pusher_internal:member_removed",
"channel" : "public-channel",
"data" : { "user_id" : 123 } }"
"""
@spec presence_member_removed(binary, PresenceSubscription.user_id) :: binary
def presence_member_removed(channel, user_id) do
data = %{user_id: user_id} |> encode!
%{event: "pusher_internal:member_removed",
channel: channel,
data: data} |> encode!
end
defstruct [:channels, :name, :data, :socket_id, :user_id]
@type t :: %Poxa.PusherEvent{channels: list, name: binary, user_id: nil | binary,
data: binary | map, socket_id: nil | binary}
@doc """
Builds the struct based on the decoded JSON from /events endpoint
"""
@spec build(map) :: {:ok, Poxa.PusherEvent.t} | {:error, atom}
def build(%{"name" => name, "channels" => channels, "data" => data} = event) do
build_event(channels, data, name, event["socket_id"])
end
def build(%{"name" => name, "channel" => channel, "data" => data} = event) do
build_event([channel], data, name, event["socket_id"])
end
def build(_), do: {:error, :invalid_pusher_event}
@doc """
Build client events
"""
@spec build_client_event(map, binary) :: {:ok, Poxa.PusherEvent.t} | {:error, atom}
def build_client_event(%{"event" => name, "channel" => channel, "data" => data}, socket_id) do
Logger.debug "--------------------- Build Client Event Name: #{name} Channel #{channel}"
user_id = Poxa.PresenceChannel.my_user_id(channel)
build_event([channel], data, name, socket_id, user_id)
end
def build_client_event(%{"name" => name, "channel" => channel, "data" => data}, socket_id) do
Logger.debug "--------------------- Build Client Event Name: #{name} Channel #{channel}"
user_id = Poxa.PresenceChannel.my_user_id(channel)
build_event([channel], data, name, socket_id, user_id)
end
defp build_event(channels, data, name, socket_id, user_id \\ nil) do
event = %Poxa.PusherEvent{channels: channels, data: data, name: name, socket_id: socket_id, user_id: user_id}
if valid?(event), do: {:ok, event},
else: {:error, :invalid_event}
end
defp valid?(%Poxa.PusherEvent{channels: channels, socket_id: socket_id}) do
Enum.all?(channels, &Poxa.Channel.valid?(&1)) and
(!socket_id || Poxa.SocketId.valid?(socket_id))
end
defp valid?(_), do: false
@doc """
Send `message` to `channels` excluding `exclude`
"""
@spec publish(Poxa.PusherEvent.t) :: :ok
def publish(%Poxa.PusherEvent{channels: channels} = event) do
for channel <- channels do
publish_event_to_channel(event, channel)
end
:ok
end
defp publish_event_to_channel(event, channel) do
message = build_message(event, channel) |> encode!
Poxa.registry.send!(message, channel, self(), event.socket_id)
end
defp build_message(event, channel) do
%{event: event.name, data: event.data, channel: channel}
end
end
| 32.752427 | 113 | 0.619535 |
1cf34471e025b0d5530aeb8316af311fde3cb048 | 3,139 | exs | Elixir | farmbot_ext/test/farmbot_ext/mqtt/sync_handler_support_test.exs | adamswsk/farmbot_os | d177d3b74888c1e7bcbf8f8595818708ee97f73b | [
"MIT"
] | 1 | 2021-08-23T13:36:14.000Z | 2021-08-23T13:36:14.000Z | farmbot_ext/test/farmbot_ext/mqtt/sync_handler_support_test.exs | adamswsk/farmbot_os | d177d3b74888c1e7bcbf8f8595818708ee97f73b | [
"MIT"
] | null | null | null | farmbot_ext/test/farmbot_ext/mqtt/sync_handler_support_test.exs | adamswsk/farmbot_os | d177d3b74888c1e7bcbf8f8595818708ee97f73b | [
"MIT"
] | null | null | null | defmodule FarmbotExt.SyncHandlerSupportTest do
use ExUnit.Case
use Mimic
alias FarmbotExt.MQTT.SyncHandlerSupport, as: Support
alias FarmbotExt.MQTT.SyncHandler, as: State
alias FarmbotCore.{BotState, JSON, Leds, Asset}
alias FarmbotExt.MQTT
alias FarmbotExt.API.EagerLoader
@fake_state %State{
client_id: "fboslol",
username: "device_1",
preloaded: true
}
@fake_payload %{
"body" => %{"foo" => "bar"},
"args" => %{"label" => "fsdfsdfsdfwer"}
}
test "finalize_preload(state, :ok)" do
old_state = %{@fake_state | preloaded: false}
expected = %{old_state | preloaded: true}
expect(Leds, :green, 1, fn
:solid -> :ok
_ -> raise "wrong arguments in finalize_preload"
end)
expect(BotState, :set_sync_status, 1, fn
"synced" -> :ok
_ -> raise "wrong arguments in finalize_preload"
end)
result = Support.finalize_preload(old_state, :ok)
assert result == {:noreply, expected}
end
test "drop_all_cache()" do
expect(EagerLoader.Supervisor, :drop_all_cache, 1, fn ->
raise "Intentional exception to test error handling"
end)
assert :ok == Support.drop_all_cache()
end
test "handle_asset (error)" do
fake_kind = "Point"
fake_id = 123
fake_params = %{fake: :params}
expect(BotState, :set_sync_status, 2, fn
"synced" -> :ok
"syncing" -> :ok
_ -> raise "Wrong sync status"
end)
expect(Leds, :green, 2, fn
:really_fast_blink -> :ok
:solid -> :ok
end)
expect(Asset.Command, :update, 1, fn
asset_kind, id, params ->
assert asset_kind == fake_kind
assert id == fake_id
assert params == fake_params
:ok
end)
Support.handle_asset(fake_kind, fake_id, fake_params)
end
test "finalize_preload(state, reason)" do
expect(BotState, :set_sync_status, 1, fn
"sync_error" -> :ok
_ -> raise "Sync status is wrong"
end)
expect(FarmbotExt.Time, :send_after, 1, fn pid, msg, timeout ->
assert pid == self()
assert msg == :preload
assert timeout == 5000
end)
expect(Leds, :green, 1, fn
:slow_blink -> :ok
_ -> raise "Wrong LED"
end)
assert {:noreply, @fake_state} == Support.finalize_preload(@fake_state, "Something")
end
test "reply_to_sync_message" do
json = JSON.encode!(@fake_payload)
expect(Asset.Command, :update, 1, fn _, _, _ -> :ok end)
expect(MQTT, :publish, 1, fn client_id, topic, that_json ->
expected_json = %{
"kind" => "rpc_ok",
"args" => %{"label" => "fsdfsdfsdfwer"}
}
assert client_id == @fake_state.client_id
assert JSON.decode!(that_json) == expected_json
assert topic == "bot/device_1/from_device"
:ok
end)
expect(BotState, :set_sync_status, 2, fn
"synced" -> :ok
"syncing" -> :ok
_ -> raise "UNEXPECTED SYNC"
end)
expect(Leds, :green, 2, fn
:really_fast_blink -> :ok
:solid -> :ok
_ -> raise "UNEXPECTED LED USE"
end)
Support.reply_to_sync_message(@fake_state, "Point", "123", json)
end
end
| 24.912698 | 88 | 0.61612 |
1cf3536163585d70d883bef7d460e639ed1338f0 | 706 | ex | Elixir | web/gettext.ex | marcsugiyama/restfully | 8352bc5718c1298c836ed72fc9d7b5cd4a9695bb | [
"Apache-2.0"
] | 1 | 2018-03-14T23:48:57.000Z | 2018-03-14T23:48:57.000Z | web/gettext.ex | marcsugiyama/restfully | 8352bc5718c1298c836ed72fc9d7b5cd4a9695bb | [
"Apache-2.0"
] | null | null | null | web/gettext.ex | marcsugiyama/restfully | 8352bc5718c1298c836ed72fc9d7b5cd4a9695bb | [
"Apache-2.0"
] | 2 | 2018-06-04T12:37:32.000Z | 2021-06-15T11:45:05.000Z | defmodule Restfully.Gettext do
@moduledoc """
A module providing Internationalization with a gettext-based API.
By using [Gettext](http://hexdocs.pm/gettext),
your module gains a set of macros for translations, for example:
import Restfully.Gettext
# Simple translation
gettext "Here is the string to translate"
# Plural translation
ngettext "Here is the string to translate",
"Here are the strings to translate",
3
# Domain-based translation
dgettext "errors", "Here is the error message to translate"
See the [Gettext Docs](http://hexdocs.pm/gettext) for detailed usage.
"""
use Gettext, otp_app: :restfully
end
| 28.24 | 71 | 0.679887 |
1cf38b417bf5b82832622573d9736f30900c21fe | 1,683 | ex | Elixir | clients/android_enterprise/lib/google_api/android_enterprise/v1/model/installs_list_response.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/android_enterprise/lib/google_api/android_enterprise/v1/model/installs_list_response.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/android_enterprise/lib/google_api/android_enterprise/v1/model/installs_list_response.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.AndroidEnterprise.V1.Model.InstallsListResponse do
@moduledoc """
## Attributes
* `install` (*type:* `list(GoogleApi.AndroidEnterprise.V1.Model.Install.t)`, *default:* `nil`) - An installation of an app for a user on a specific device. The existence of an install implies that the user must have an entitlement to the app.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:install => list(GoogleApi.AndroidEnterprise.V1.Model.Install.t()) | nil
}
field(:install, as: GoogleApi.AndroidEnterprise.V1.Model.Install, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.AndroidEnterprise.V1.Model.InstallsListResponse do
def decode(value, options) do
GoogleApi.AndroidEnterprise.V1.Model.InstallsListResponse.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.AndroidEnterprise.V1.Model.InstallsListResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 35.808511 | 246 | 0.754011 |
1cf39e6b7ad98db751d063001554db6dbaa1a6d0 | 3,572 | ex | Elixir | clients/ad_experience_report/lib/google_api/ad_experience_report/v1/api/violating_sites.ex | chingor13/elixir-google-api | 85e13fa25c4c9f4618bb463ab4c79245fc6d2a7b | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/ad_experience_report/lib/google_api/ad_experience_report/v1/api/violating_sites.ex | chingor13/elixir-google-api | 85e13fa25c4c9f4618bb463ab4c79245fc6d2a7b | [
"Apache-2.0"
] | null | null | null | clients/ad_experience_report/lib/google_api/ad_experience_report/v1/api/violating_sites.ex | chingor13/elixir-google-api | 85e13fa25c4c9f4618bb463ab4c79245fc6d2a7b | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.AdExperienceReport.V1.Api.ViolatingSites do
@moduledoc """
API calls for all endpoints tagged `ViolatingSites`.
"""
alias GoogleApi.AdExperienceReport.V1.Connection
alias GoogleApi.Gax.{Request, Response}
@doc """
Lists sites with Ad Experience Report statuses of "Failing" or "Warning".
## Parameters
* `connection` (*type:* `GoogleApi.AdExperienceReport.V1.Connection.t`) - Connection to server
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:$.xgafv` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.AdExperienceReport.V1.Model.ViolatingSitesResponse{}}` on success
* `{:error, info}` on failure
"""
@spec adexperiencereport_violating_sites_list(Tesla.Env.client(), keyword(), keyword()) ::
{:ok, GoogleApi.AdExperienceReport.V1.Model.ViolatingSitesResponse.t()}
| {:error, Tesla.Env.t()}
def adexperiencereport_violating_sites_list(connection, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1/violatingSites", %{})
|> Request.add_optional_params(optional_params_config, optional_params)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.AdExperienceReport.V1.Model.ViolatingSitesResponse{}]
)
end
end
| 43.560976 | 196 | 0.664894 |
1cf3d334ec291a1387c1e693e91589ec17408eca | 969 | ex | Elixir | lib/nimble_options/validation_error.ex | lud/nimble_options | 0cbf16243948ef8b287f4aeb4bdecb69ba3c632a | [
"Apache-2.0"
] | 293 | 2020-04-03T15:58:27.000Z | 2022-03-29T10:29:32.000Z | lib/nimble_options/validation_error.ex | lud/nimble_options | 0cbf16243948ef8b287f4aeb4bdecb69ba3c632a | [
"Apache-2.0"
] | 60 | 2020-04-04T15:46:00.000Z | 2022-02-09T14:09:59.000Z | lib/nimble_options/validation_error.ex | lud/nimble_options | 0cbf16243948ef8b287f4aeb4bdecb69ba3c632a | [
"Apache-2.0"
] | 23 | 2020-04-04T15:43:10.000Z | 2022-02-09T13:03:38.000Z | defmodule NimbleOptions.ValidationError do
@moduledoc """
An error that is returned (or raised) when options are invalid.
Only the following documented fields are considered public. All other fields are
considered private and should not be referenced:
* `:key` - The key that did not successfully validate.
* `:keys_path` - If the key is nested, this is the path to the key.
* `:value` - The value that failed to validate. This field is `nil` if there was no
value provided.
Since this is an exception, you can either raise it directly with `raise/1`
or turn it into a message string with `Exception.message/1`.
"""
@type t() :: %__MODULE__{}
defexception [:message, :key, :value, keys_path: []]
@impl true
def message(%__MODULE__{message: message, keys_path: keys_path}) do
suffix =
case keys_path do
[] -> ""
keys -> " (in options #{inspect(keys)})"
end
message <> suffix
end
end
| 28.5 | 87 | 0.670795 |
1cf3fbf05dc682936e22ff77d724f0502aa2ff48 | 4,289 | ex | Elixir | apps/omg_api/lib/monitor.ex | hoardexchange/elixir-omg | 423528699d467f1cc0d02c596290ab907af38c2c | [
"Apache-2.0"
] | null | null | null | apps/omg_api/lib/monitor.ex | hoardexchange/elixir-omg | 423528699d467f1cc0d02c596290ab907af38c2c | [
"Apache-2.0"
] | null | null | null | apps/omg_api/lib/monitor.ex | hoardexchange/elixir-omg | 423528699d467f1cc0d02c596290ab907af38c2c | [
"Apache-2.0"
] | 2 | 2020-06-07T11:14:54.000Z | 2020-08-02T07:36:32.000Z | # Copyright 2018 OmiseGO Pte Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule OMG.API.Monitor do
@moduledoc """
This module is a custom implemented supervisor that monitors all it's chilldren
and restarts them based on alarms raised. This means that in the period when Geth alarms are raised
it would wait before it would restart them.
When you receive an EXIT, check for an alarm raised that's related to Ethereum client synhronisation or connection
problems and react accordingly.
Children that need Ethereum client connectivity are OMG.API.EthereumEventListener
OMG.API.BlockQueue.Server and OMG.API.RootChainCoordinator. For these children, we make
additional checks if they exit. If there's an alarm raised of type :ethereum_client_connection we postpone
the restart util the alarm is cleared. Other children are restarted immediately.
"""
use GenServer
require Logger
alias OMG.API.Alert.Alarm
# needs to be less then checks from RootChainCoordinator
@default_interval 300
@type t :: %__MODULE__{
pid: pid(),
spec: {module(), term()} | map(),
tref: :timer.tref() | nil
}
defstruct pid: nil, spec: nil, tref: nil
def start_link(children_specs) do
GenServer.start_link(__MODULE__, children_specs, name: __MODULE__)
end
def init(children_specs) do
Process.flag(:trap_exit, true)
children = Enum.map(children_specs, &start_child(&1))
{:ok, children}
end
def handle_info({:delayed_restart, child}, state) do
# child still holds the old pid
from = child.pid
with false <- is_raised?(),
{%__MODULE__{pid: ^from, tref: tref} = child, other_children} <- find_child_from_dead_pid(from, state) do
{:ok, :cancel} = :timer.cancel(tref)
new_child = start_child(child.spec)
{:noreply, [new_child | other_children]}
else
_ ->
# alarm is still raised, or the child was already cleared from state in a previous timer
{:noreply, state}
end
end
# we got an exit signal from a linked child, we have to act as a supervisor now and decide what to do
# we try to find the child via his old pid that we kept in the state, retrieve his exit reason and specification for
# starting the child
def handle_info({:EXIT, from, _reason}, state) do
{%__MODULE__{pid: ^from} = child, other_children} = find_child_from_dead_pid(from, state)
new_child = restart_or_delay(child)
{:noreply, [new_child | other_children]}
end
# We try to find the child specs from the pid that was started.
# The child will be updated so we return also the new child list without that child.
@spec find_child_from_dead_pid(pid(), list(t)) :: {t, list(t)} | {nil, list(t)}
defp find_child_from_dead_pid(pid, state) do
item = Enum.find(state, &(&1.pid == pid))
{item, state -- [item]}
end
### Figure out, if the client is unavailable. If it is, we'll postpone the
### restart until the alarm clears. Other processes can be restarted immediately.
defp restart_or_delay(child) do
case is_raised?() do
true ->
{:ok, tref} = :timer.send_interval(@default_interval, {:delayed_restart, child})
%__MODULE__{child | tref: tref}
_ ->
start_child(child.spec)
end
end
defp start_child({child_module, args} = spec) do
{:ok, pid} = child_module.start_link(args)
%__MODULE__{pid: pid, spec: spec}
end
defp start_child(%{id: _name, start: {child_module, function, args}} = spec) do
{:ok, pid} = apply(child_module, function, args)
%__MODULE__{pid: pid, spec: spec}
end
defp is_raised?() do
alarms = Alarm.all()
alarms
|> Enum.find(fn x -> match?(%{id: :ethereum_client_connection}, x) end)
|> is_map()
end
end
| 33.771654 | 118 | 0.697599 |
1cf4845435e9e156e69a5238227b984f59cecf4b | 495 | ex | Elixir | lib/jalka2021_web/views/error_view.ex | kriips/jalka2021 | f4d968e20cae116fd4056bff2f937cd036421977 | [
"MIT"
] | null | null | null | lib/jalka2021_web/views/error_view.ex | kriips/jalka2021 | f4d968e20cae116fd4056bff2f937cd036421977 | [
"MIT"
] | null | null | null | lib/jalka2021_web/views/error_view.ex | kriips/jalka2021 | f4d968e20cae116fd4056bff2f937cd036421977 | [
"MIT"
] | null | null | null | defmodule Jalka2021Web.ErrorView do
use Jalka2021Web, :view
# If you want to customize a particular status code
# for a certain format, you may uncomment below.
# def render("500.html", _assigns) do
# "Internal Server Error"
# end
# By default, Phoenix returns the status message from
# the template name. For example, "404.html" becomes
# "Not Found".
def template_not_found(template, _assigns) do
Phoenix.Controller.status_message_from_template(template)
end
end
| 29.117647 | 61 | 0.737374 |
1cf48a265da097feee3869b9b6cb8903628bdce7 | 258 | exs | Elixir | priv/repo/migrations/20200416000727_add_timers_table.exs | jgsmith/militerm | c4252d0a93f5620b90750ac2b61baf282e9ef7eb | [
"Apache-2.0"
] | 6 | 2017-06-16T10:26:35.000Z | 2021-04-07T15:01:00.000Z | priv/repo/migrations/20200416000727_add_timers_table.exs | jgsmith/militerm | c4252d0a93f5620b90750ac2b61baf282e9ef7eb | [
"Apache-2.0"
] | 2 | 2020-04-14T02:17:46.000Z | 2021-03-10T11:09:05.000Z | priv/repo/migrations/20200416000727_add_timers_table.exs | jgsmith/militerm | c4252d0a93f5620b90750ac2b61baf282e9ef7eb | [
"Apache-2.0"
] | null | null | null | defmodule Militerm.Repo.Migrations.AddTimersTable do
use Ecto.Migration
def change do
create table(:timers) do
add :entity_id, :string
add :data, :map
timestamps()
end
create unique_index(:timers, [:entity_id])
end
end
| 17.2 | 52 | 0.670543 |
1cf4a605e2bcf424caa8cd6548a881bc97e072dc | 5,288 | ex | Elixir | lib/liblink/cluster/discover/service.ex | Xerpa/liblink | 7b983431c5b391bb8cf182edd9ca4937601eea35 | [
"Apache-2.0"
] | 3 | 2018-10-26T12:55:15.000Z | 2019-05-03T22:41:34.000Z | lib/liblink/cluster/discover/service.ex | Xerpa/liblink | 7b983431c5b391bb8cf182edd9ca4937601eea35 | [
"Apache-2.0"
] | 4 | 2018-08-26T14:43:57.000Z | 2020-09-23T21:14:56.000Z | lib/liblink/cluster/discover/service.ex | Xerpa/liblink | 7b983431c5b391bb8cf182edd9ca4937601eea35 | [
"Apache-2.0"
] | null | null | null | # Copyright (C) 2018 Xerpa
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule Liblink.Cluster.Discover.Service do
use Liblink.Logger
alias Liblink.Data.Cluster
alias Liblink.Data.Cluster.RemoteService
alias Liblink.Data.Cluster.Discover
alias Liblink.Data.Cluster.Service
alias Liblink.Cluster.Naming
alias Liblink.Network.Consul
alias Liblink.Cluster.Database
alias Liblink.Cluster.Database.Mutation
@type t :: map
@spec new(Database.t(), Consul.t(), Cluster.t(), Service.protocol()) :: {:ok, t}
def new(pid, consul = %Consul{}, cluster = %Cluster{discover: %Discover{}}, protocol) do
{:ok, %{database: pid, consul: consul, cluster: cluster, protocol: protocol}}
end
@spec halt(t) :: :ok
def halt(state) do
:ok = Mutation.del_remote_services(state.database, state.cluster.id, state.protocol)
end
@spec exec(t) :: t
def exec(state) do
cluster = state.cluster
protocol = state.protocol
service_name = Naming.service_name(cluster, protocol)
datacenters =
case state.consul.config.datacenters do
[] -> [""]
datacenters -> datacenters
end
health_reply =
Enum.reduce_while(datacenters, {:ok, []}, fn datacenter, {:ok, acc_services} ->
query = [dc: datacenter, state: "passing"]
case Consul.Health.service(state.consul, service_name, query) do
{:ok, %{status: 200, body: services}} when is_list(services) ->
{:cont, {:ok, services ++ acc_services}}
_error ->
{:halt, :error}
end
end)
case health_reply do
{:ok, services} ->
handle_service(state, services)
state
_error ->
Liblink.Logger.warn("error reading services from consul")
state
end
end
@spec handle_service(t, [map]) :: :ok
defp handle_service(state, reply) when is_list(reply) do
cluster = state.cluster
protocol = state.protocol
services =
Enum.reduce_while(reply, MapSet.new(), fn health, acc ->
case build_remote_service(cluster, protocol, health) do
{:ok, remote_service} ->
if accept_service?(remote_service) do
{:cont, MapSet.put(acc, remote_service)}
else
{:cont, acc}
end
error ->
Liblink.Logger.error("error reading services from consul: error=#{inspect(error)}")
{:halt, :error}
end
end)
unless services == :error do
Mutation.add_remote_services(state.database, cluster.id, protocol, services)
end
end
@spec accept_service?(RemoteService.t()) :: boolean
defp accept_service?(remote_service) do
remote_service.cluster.discover.restrict.(remote_service.protocol, remote_service.metadata)
end
@spec build_remote_service(Cluster.t(), Service.protocol(), map) ::
{:ok, RemoteService.t()} | :error
defp build_remote_service(cluster, protocol, health) do
with true <- is_map(health),
{:ok, node} when is_map(node) <- Map.fetch(health, "Node"),
{:ok, service} when is_map(service) <- Map.fetch(health, "Service"),
{:ok, checks} when is_list(checks) <- Map.fetch(health, "Checks"),
{:ok, datacenter} when is_binary(datacenter) <- Map.fetch(node, "Datacenter"),
{:ok, node_addr} when is_binary(node_addr) <- Map.fetch(node, "Address"),
{:ok, tags} when is_list(tags) <- Map.fetch(service, "Tags"),
{:ok, metadata} when is_map(metadata) or is_nil(metadata) <- Map.fetch(service, "Meta"),
{:ok, svc_addr} when is_binary(svc_addr) <- Map.fetch(service, "Address"),
{:ok, svc_port} when is_integer(svc_port) <- Map.fetch(service, "Port") do
svc_addr =
if String.trim(svc_addr) == "" do
node_addr
else
svc_addr
end
metadata =
if is_map(metadata) do
metadata
else
%{}
end
|> Map.put("ll-datacenter", datacenter)
RemoteService.new(
status: compute_status(checks),
cluster: cluster,
protocol: protocol,
address: svc_addr,
port: svc_port,
metadata: metadata,
tags: tags
)
else
_ ->
Liblink.Logger.warn("received an invalid data from consul health=#{inspect(health)}")
:error
end
end
@spec compute_status([map]) :: :critical | :passing
defp compute_status(checks) do
Enum.reduce(checks, :passing, fn check, status ->
if is_map(check) do
case Map.fetch(check, "Status") do
{:ok, "passing"} -> status
{:ok, "critical"} -> :critical
{:ok, "warning"} -> status
:error -> :critical
end
else
:critical
end
end)
end
end
| 31.664671 | 97 | 0.625946 |
1cf4b014e439b39bafa35ca2e7c6d36e6fb57eb8 | 1,152 | ex | Elixir | lib/movement/builders/revision_uncorrect_all.ex | isshindev/accent | ae4c13139b0a0dfd64ff536b94c940a4e2862150 | [
"BSD-3-Clause"
] | 806 | 2018-04-07T20:40:33.000Z | 2022-03-30T01:39:57.000Z | lib/movement/builders/revision_uncorrect_all.ex | isshindev/accent | ae4c13139b0a0dfd64ff536b94c940a4e2862150 | [
"BSD-3-Clause"
] | 194 | 2018-04-07T13:49:37.000Z | 2022-03-30T19:58:45.000Z | lib/movement/builders/revision_uncorrect_all.ex | doc-ai/accent | e337e16f3658cc0728364f952c0d9c13710ebb06 | [
"BSD-3-Clause"
] | 89 | 2018-04-09T13:55:49.000Z | 2022-03-24T07:09:31.000Z | defmodule Movement.Builders.RevisionUncorrectAll do
@behaviour Movement.Builder
import Movement.Context, only: [assign: 3]
alias Accent.Scopes.Translation, as: TranslationScope
alias Accent.{Repo, Translation}
alias Movement.Mappers.Operation, as: OperationMapper
@action "uncorrect_conflict"
def build(context) do
context
|> assign_translations()
|> process_operations()
end
defp process_operations(context = %Movement.Context{assigns: assigns, operations: operations}) do
new_operations =
Enum.map(assigns[:translations], fn translation ->
OperationMapper.map(@action, translation, %{text: nil})
end)
%{context | operations: Enum.concat(operations, new_operations)}
end
defp assign_translations(context = %Movement.Context{assigns: assigns}) do
translations =
Translation
|> TranslationScope.active()
|> TranslationScope.not_locked()
|> TranslationScope.not_conflicted()
|> TranslationScope.from_revision(assigns[:revision].id)
|> TranslationScope.no_version()
|> Repo.all()
assign(context, :translations, translations)
end
end
| 28.8 | 99 | 0.716146 |
1cf4c3b9ba6c66424dc5f7c0ba7251b74c738a0a | 108 | ex | Elixir | lib/wolfgang/repo.ex | davidenglishmusic/wolfgang | 7f8c29278be4f12412afb35efcf342455a805004 | [
"MIT"
] | null | null | null | lib/wolfgang/repo.ex | davidenglishmusic/wolfgang | 7f8c29278be4f12412afb35efcf342455a805004 | [
"MIT"
] | null | null | null | lib/wolfgang/repo.ex | davidenglishmusic/wolfgang | 7f8c29278be4f12412afb35efcf342455a805004 | [
"MIT"
] | null | null | null | defmodule Wolfgang.Repo do
use Ecto.Repo,
otp_app: :wolfgang,
adapter: Ecto.Adapters.Postgres
end
| 18 | 35 | 0.731481 |
1cf4f4ba3712b2abc8b35c092af21316d99ee935 | 1,195 | ex | Elixir | lib/exq/heartbeat/server.ex | JamesFerguson/exq | 4457ea50ad4c8a6a7e127da5acafc130960822bb | [
"Apache-2.0"
] | 1,406 | 2015-01-16T03:00:32.000Z | 2022-03-28T11:38:22.000Z | lib/exq/heartbeat/server.ex | JamesFerguson/exq | 4457ea50ad4c8a6a7e127da5acafc130960822bb | [
"Apache-2.0"
] | 372 | 2015-01-08T05:15:11.000Z | 2022-03-18T18:05:34.000Z | lib/exq/heartbeat/server.ex | JamesFerguson/exq | 4457ea50ad4c8a6a7e127da5acafc130960822bb | [
"Apache-2.0"
] | 217 | 2015-02-01T20:21:36.000Z | 2022-01-28T16:19:55.000Z | defmodule Exq.Heartbeat.Server do
use GenServer
require Logger
alias Exq.Support.Config
alias Exq.Redis.Heartbeat
defmodule State do
defstruct [:namespace, :node_id, :redis, :interval]
end
def start_link(options) do
GenServer.start_link(
__MODULE__,
%State{
namespace: Keyword.fetch!(options, :namespace),
node_id: Keyword.get(options, :node_id, Config.node_identifier().node_id()),
redis: Keyword.fetch!(options, :redis),
interval: Keyword.fetch!(options, :heartbeat_interval)
},
[]
)
end
def init(state) do
:ok = schedule_ping(0)
{:ok, state}
end
def handle_info(:ping, state) do
case Heartbeat.register(state.redis, state.namespace, state.node_id) do
:ok ->
:ok = schedule_ping(state.interval)
_error ->
:ok = schedule_ping(Enum.min([state.interval, 5000]))
end
{:noreply, state}
end
def handle_info(msg, state) do
Logger.error("Received unexpected info message in #{__MODULE__} #{inspect(msg)}")
{:noreply, state}
end
defp schedule_ping(interval) do
_reference = Process.send_after(self(), :ping, interval)
:ok
end
end
| 23.431373 | 85 | 0.651883 |
1cf53aec216374751ed743ac5d32a6ff03ea76dd | 1,707 | ex | Elixir | lib/gremlex/deserializer.ex | elljoh/gremlex | a83fc3a0c46c19efebd6d4881a71adb058a80e38 | [
"MIT"
] | 61 | 2018-05-10T17:03:41.000Z | 2021-04-28T21:45:27.000Z | lib/gremlex/deserializer.ex | elljoh/gremlex | a83fc3a0c46c19efebd6d4881a71adb058a80e38 | [
"MIT"
] | 19 | 2018-05-10T17:14:42.000Z | 2020-12-17T20:43:07.000Z | lib/gremlex/deserializer.ex | elljoh/gremlex | a83fc3a0c46c19efebd6d4881a71adb058a80e38 | [
"MIT"
] | 24 | 2018-08-23T17:42:26.000Z | 2022-03-22T20:31:16.000Z | defmodule Gremlex.Deserializer do
@moduledoc """
Deserializer module for deserializing data returned back from Gremlin.
"""
alias Gremlex.{Edge, Vertex, VertexProperty}
def deserialize(%{"result" => result}) do
case result["data"] do
nil ->
nil
%{"@type" => type, "@value" => value} ->
deserialize(type, value)
end
end
def deserialize(%{"@type" => type, "@value" => value}) do
deserialize(type, value)
end
def deserialize(val), do: val
def deserialize("g:List", value) do
Enum.map(value, fn
%{"@type" => type, "@value" => value} ->
deserialize(type, value)
value ->
value
end)
end
def deserialize("g:Set", value) do
value
|> Enum.map(fn
%{"@type" => type, "@value" => value} ->
deserialize(type, value)
value ->
value
end)
|> MapSet.new()
end
def deserialize("g:Vertex", value) do
Vertex.from_response(value)
end
def deserialize("g:VertexProperty", value) do
VertexProperty.from_response(value)
end
def deserialize("g:Edge", value) do
Edge.from_response(value)
end
def deserialize("g:Int64", value) when is_number(value), do: value
def deserialize("g:Int32", value) when is_number(value), do: value
def deserialize("g:Double", value) when is_number(value), do: value
def deserialize("g:Float", value) when is_number(value), do: value
def deserialize("g:UUID", value), do: value
def deserialize("g:Date", value) do
DateTime.from_unix!(value, :microsecond)
end
def deserialize("g:Timestamp", value) do
DateTime.from_unix!(value, :microsecond)
end
def deserialize(_type, value), do: value
end
| 22.168831 | 72 | 0.630931 |
1cf550ff7eca21734755bc9ca60d104e334797f3 | 4,613 | ex | Elixir | clients/page_speed_online/lib/google_api/page_speed_online/v4/api/pagespeedapi.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/page_speed_online/lib/google_api/page_speed_online/v4/api/pagespeedapi.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/page_speed_online/lib/google_api/page_speed_online/v4/api/pagespeedapi.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.PageSpeedOnline.V4.Api.Pagespeedapi do
@moduledoc """
API calls for all endpoints tagged `Pagespeedapi`.
"""
alias GoogleApi.PageSpeedOnline.V4.Connection
alias GoogleApi.Gax.{Request, Response}
@library_version Mix.Project.config() |> Keyword.get(:version, "")
@doc """
Runs PageSpeed analysis on the page at the specified URL, and returns PageSpeed scores, a list of suggestions to make that page faster, and other information.
## Parameters
* `connection` (*type:* `GoogleApi.PageSpeedOnline.V4.Connection.t`) - Connection to server
* `url` (*type:* `String.t`) - The URL to fetch and analyze
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `:filter_third_party_resources` (*type:* `boolean()`) - Indicates if third party resources should be filtered out before PageSpeed analysis.
* `:locale` (*type:* `String.t`) - The locale used to localize formatted results
* `:rule` (*type:* `list(String.t)`) - A PageSpeed rule to run; if none are given, all rules are run
* `:screenshot` (*type:* `boolean()`) - Indicates if binary data containing a screenshot should be included
* `:snapshots` (*type:* `boolean()`) - Indicates if binary data containing snapshot images should be included
* `:strategy` (*type:* `String.t`) - The analysis strategy (desktop or mobile) to use, and desktop is the default
* `:utm_campaign` (*type:* `String.t`) - Campaign name for analytics.
* `:utm_source` (*type:* `String.t`) - Campaign source for analytics.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.PageSpeedOnline.V4.Model.PagespeedApiPagespeedResponseV4{}}` on success
* `{:error, info}` on failure
"""
@spec pagespeedonline_pagespeedapi_runpagespeed(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.PageSpeedOnline.V4.Model.PagespeedApiPagespeedResponseV4.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def pagespeedonline_pagespeedapi_runpagespeed(
connection,
url,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:filter_third_party_resources => :query,
:locale => :query,
:rule => :query,
:screenshot => :query,
:snapshots => :query,
:strategy => :query,
:utm_campaign => :query,
:utm_source => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/runPagespeed", %{})
|> Request.add_param(:query, :url, url)
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.PageSpeedOnline.V4.Model.PagespeedApiPagespeedResponseV4{}]
)
end
end
| 43.518868 | 187 | 0.653588 |
1cf55604dc4c7ae621363255fae4add71826752c | 2,013 | ex | Elixir | clients/network_management/lib/google_api/network_management/v1beta1/model/audit_log_config.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/network_management/lib/google_api/network_management/v1beta1/model/audit_log_config.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/network_management/lib/google_api/network_management/v1beta1/model/audit_log_config.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.NetworkManagement.V1beta1.Model.AuditLogConfig do
@moduledoc """
Provides the configuration for logging a type of permissions. Example: { "audit_log_configs": [ { "log_type": "DATA_READ", "exempted_members": [ "user:jose@example.com" ] }, { "log_type": "DATA_WRITE" } ] } This enables 'DATA_READ' and 'DATA_WRITE' logging, while exempting jose@example.com from DATA_READ logging.
## Attributes
* `exemptedMembers` (*type:* `list(String.t)`, *default:* `nil`) - Specifies the identities that do not cause logging for this type of permission. Follows the same format of Binding.members.
* `logType` (*type:* `String.t`, *default:* `nil`) - The log type that this config enables.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:exemptedMembers => list(String.t()),
:logType => String.t()
}
field(:exemptedMembers, type: :list)
field(:logType)
end
defimpl Poison.Decoder, for: GoogleApi.NetworkManagement.V1beta1.Model.AuditLogConfig do
def decode(value, options) do
GoogleApi.NetworkManagement.V1beta1.Model.AuditLogConfig.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.NetworkManagement.V1beta1.Model.AuditLogConfig do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 40.26 | 316 | 0.735221 |
1cf559acdfc0fdef63eac9502af17d295cfef5a1 | 1,941 | ex | Elixir | lib/todays_pizza_web.ex | JohnB/todays_pizza | 816c8f4f566558bbf8910ef6dd20e363bcddf8fd | [
"MIT"
] | null | null | null | lib/todays_pizza_web.ex | JohnB/todays_pizza | 816c8f4f566558bbf8910ef6dd20e363bcddf8fd | [
"MIT"
] | null | null | null | lib/todays_pizza_web.ex | JohnB/todays_pizza | 816c8f4f566558bbf8910ef6dd20e363bcddf8fd | [
"MIT"
] | null | null | null | defmodule TodaysPizzaWeb do
@moduledoc """
The entrypoint for defining your web interface, such
as controllers, views, channels and so on.
This can be used in your application as:
use TodaysPizzaWeb, :controller
use TodaysPizzaWeb, :view
The definitions below will be executed for every view,
controller, etc, so keep them short and clean, focused
on imports, uses and aliases.
Do NOT define functions inside the quoted expressions
below. Instead, define any helper function in modules
and import those modules here.
"""
def controller do
quote do
use Phoenix.Controller, namespace: TodaysPizzaWeb
import Plug.Conn
import TodaysPizzaWeb.Gettext
alias TodaysPizzaWeb.Router.Helpers, as: Routes
end
end
def view do
quote do
use Phoenix.View,
root: "lib/todays_pizza_web/templates",
namespace: TodaysPizzaWeb
# Import convenience functions from controllers
import Phoenix.Controller,
only: [get_flash: 1, get_flash: 2, view_module: 1, view_template: 1]
# Include shared imports and aliases for views
unquote(view_helpers())
end
end
def router do
quote do
use Phoenix.Router
import Plug.Conn
import Phoenix.Controller
end
end
def channel do
quote do
use Phoenix.Channel
import TodaysPizzaWeb.Gettext
end
end
defp view_helpers do
quote do
# Use all HTML functionality (forms, tags, etc)
use Phoenix.HTML
# Import basic rendering functionality (render, render_layout, etc)
import Phoenix.View
import TodaysPizzaWeb.ErrorHelpers
import TodaysPizzaWeb.Gettext
alias TodaysPizzaWeb.Router.Helpers, as: Routes
end
end
@doc """
When used, dispatch to the appropriate controller/view/etc.
"""
defmacro __using__(which) when is_atom(which) do
apply(__MODULE__, which, [])
end
end
| 23.670732 | 76 | 0.693457 |
1cf564c7dcc640f42307346ad70292c58aa7f50c | 926 | ex | Elixir | lib/lib/token.ex | ntsai/xin | f4faeb73a3041a2923559f16a2255712e1615aae | [
"MIT"
] | null | null | null | lib/lib/token.ex | ntsai/xin | f4faeb73a3041a2923559f16a2255712e1615aae | [
"MIT"
] | null | null | null | lib/lib/token.ex | ntsai/xin | f4faeb73a3041a2923559f16a2255712e1615aae | [
"MIT"
] | null | null | null | defmodule Xin.Token do
import Joken
@moduledoc """
jwt token模块
配置 config.exs
config :xin, :token,
secret: "xxxxxxxxxxxxxxxx"
"""
@secret Application.get_env(:xin, :token)[:secret] || "secret_key"
# 创建一个token
def new(_conn, data) do
data
|> token
|> with_signer(hs256(@secret))
|> sign
|> get_compact
end
# 解析一个token
def get(key \\ %{}) do
t = key
|> token
|> with_signer(hs256(@secret))
|> verify
t.claims
end
end
defmodule Xin.Token.Auth do
import Plug.Conn
@moduledoc """
jwt token plug 模块
"""
def init(default) do
default
end
def call(conn, _default) do
authorization = Xin.Http.authorization(conn)
if authorization do
Map.put conn, :user, Xin.Token.get(authorization)
else
data = conn |> fetch_session |> get_session(:user)
if data, do: Map.put(conn, :user, data), else: conn
end
end
end
| 17.148148 | 68 | 0.616631 |
1cf5b94815c9e8ddb33f06232bb7de0f2f0d1718 | 1,738 | ex | Elixir | clients/dfa_reporting/lib/google_api/dfa_reporting/v35/model/metros_list_response.ex | renovate-bot/elixir-google-api | 1da34cd39b670c99f067011e05ab90af93fef1f6 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/dfa_reporting/lib/google_api/dfa_reporting/v35/model/metros_list_response.ex | swansoffiee/elixir-google-api | 9ea6d39f273fb430634788c258b3189d3613dde0 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/dfa_reporting/lib/google_api/dfa_reporting/v35/model/metros_list_response.ex | dazuma/elixir-google-api | 6a9897168008efe07a6081d2326735fe332e522c | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DFAReporting.V35.Model.MetrosListResponse do
@moduledoc """
Metro List Response
## Attributes
* `kind` (*type:* `String.t`, *default:* `nil`) - Identifies what kind of resource this is. Value: the fixed string "dfareporting#metrosListResponse".
* `metros` (*type:* `list(GoogleApi.DFAReporting.V35.Model.Metro.t)`, *default:* `nil`) - Metro collection.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:kind => String.t() | nil,
:metros => list(GoogleApi.DFAReporting.V35.Model.Metro.t()) | nil
}
field(:kind)
field(:metros, as: GoogleApi.DFAReporting.V35.Model.Metro, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.DFAReporting.V35.Model.MetrosListResponse do
def decode(value, options) do
GoogleApi.DFAReporting.V35.Model.MetrosListResponse.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.DFAReporting.V35.Model.MetrosListResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 34.76 | 154 | 0.7313 |
1cf5bbfae52621f4be147915294dfc62d869c988 | 3,063 | exs | Elixir | mix.exs | Api2sem2021/5-ADS2020-2-equipe6-api | 11484e0232c1edd7fc928aa68d5014f2e3a20d07 | [
"MIT"
] | null | null | null | mix.exs | Api2sem2021/5-ADS2020-2-equipe6-api | 11484e0232c1edd7fc928aa68d5014f2e3a20d07 | [
"MIT"
] | 3 | 2020-09-20T22:59:35.000Z | 2020-09-20T23:00:47.000Z | mix.exs | SEGURANCA-DA-INFORMACAO-LGPD/sakavault_api | 11484e0232c1edd7fc928aa68d5014f2e3a20d07 | [
"MIT"
] | null | null | null | defmodule SakaVault.MixProject do
use Mix.Project
def project do
[
app: :sakavault,
version: "0.1.0",
elixir: "~> 1.9",
elixirc_paths: elixirc_paths(Mix.env()),
test_coverage: [tool: ExCoveralls],
compilers: [:phoenix] ++ Mix.compilers(),
start_permanent: Mix.env() == :prod,
aliases: aliases(),
deps: deps(),
preferred_cli_env: [
ci: :test,
fast_ci: :test,
coveralls: :test,
"coveralls.html": :test
]
]
end
# Configuration for the OTP application.
#
# Type `mix help compile.app` for more information.
def application do
[
mod: {SakaVault.Application, []},
extra_applications: [:logger, :runtime_tools, :crypto]
]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:dev), do: ["lib", "test/support/factories.ex"]
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
# Specifies your project dependencies.
#
# Type `mix help deps` for examples and options.
defp deps do
[
# Environment
{:confex, "~> 3.4.0"},
# General
{:jason, "~> 1.0"},
# Auth
{:guardian, "~> 2.1"},
{:corsica, "~> 1.1"},
# Aws
{:ex_aws, "~> 2.1"},
{:ex_aws_secretsmanager, "~> 2.0"},
{:hackney, "~> 1.9"},
# Securely hashing & verifying passwords
{:argon2_elixir, "~> 1.3"},
# Phoenix
{:phoenix, "~> 1.5.0"},
# Ecto
{:phoenix_ecto, "~> 4.1"},
{:ecto_sql, "~> 3.4.4"},
{:postgrex, ">= 0.0.0"},
{:plug_cowboy, "~> 2.0"},
# Metrics
{:telemetry_metrics, "~> 0.4"},
{:telemetry_poller, "~> 0.4"},
# Development and testing
{:mox, "~> 1.0", only: :test},
{:faker, "~> 0.15", only: [:dev, :test]},
{:ex_machina, "~> 2.4", only: [:dev, :test]},
{:credo, "~> 1.4", only: [:dev, :test], runtime: false},
{:sobelow, "~> 0.10", only: [:dev, :test], runtime: false},
{:excoveralls, "~> 0.13", only: [:dev, :test], runtime: false},
# Production releases
{:distillery, "~> 2.1", runtime: false}
]
end
# Aliases are shortcuts or tasks specific to the current project.
# For example, to install project dependencies and perform other setup tasks, run:
#
# $ mix setup
#
# See the documentation for `Mix` for more info on aliases.
defp aliases do
[
setup: ["deps.get", "ecto.setup"],
"ecto.setup": ["ecto.create", "ecto.migrate", "run priv/repo/seeds.exs"],
"ecto.reset": ["ecto.drop", "ecto.setup"],
test: ["ecto.create --quiet", "ecto.migrate --quiet", "test"],
ci: [
"format --check-formatted",
"clean",
"compile --warnings-as-errors --all-warnings",
"fast_ci"
],
fast_ci: [
"ecto.create",
"ecto.migrate",
"credo --strict",
"sobelow -i Config.HTTPS --skip --verbose --exit",
"coveralls.html --raise --slowest 10"
]
]
end
end
| 26.405172 | 84 | 0.538035 |
1cf5ca44a87cba4d2f10c70ea76a68ffe5228b01 | 76,712 | ex | Elixir | lib/mint/http2.ex | moogle19/mint | 80ecfad07dfcefd70d3c729624f7ae3c147ae052 | [
"Apache-2.0"
] | null | null | null | lib/mint/http2.ex | moogle19/mint | 80ecfad07dfcefd70d3c729624f7ae3c147ae052 | [
"Apache-2.0"
] | null | null | null | lib/mint/http2.ex | moogle19/mint | 80ecfad07dfcefd70d3c729624f7ae3c147ae052 | [
"Apache-2.0"
] | null | null | null | defmodule Mint.HTTP2 do
@moduledoc """
Processless HTTP client with support for HTTP/2.
This module provides a data structure that represents an HTTP/2 connection to
a given server. The connection is represented as an opaque struct `%Mint.HTTP2{}`.
The connection is a data structure and is not backed by a process, and all the
connection handling happens in the process that creates the struct.
This module and data structure work exactly like the ones described in the `Mint.HTTP`
module, with the exception that `Mint.HTTP2` specifically deals with HTTP/2 while
`Mint.HTTP` deals seamlessly with HTTP/1.1 and HTTP/2. For more information on
how to use the data structure and client architecture, see `Mint.HTTP`.
## HTTP/2 streams and requests
HTTP/2 introduces the concept of **streams**. A stream is an isolated conversation
between the client and the server. Each stream is unique and identified by a unique
**stream ID**, which means that there's no order when data comes on different streams
since they can be identified uniquely. A stream closely corresponds to a request, so
in this documentation and client we will mostly refer to streams as "requests".
We mentioned data on streams can come in arbitrary order, and streams are requests,
so the practical effect of this is that performing request A and then request B
does not mean that the response to request A will come before the response to request B.
This is why we identify each request with a unique reference returned by `request/5`.
See `request/5` for more information.
## Closed connection
In HTTP/2, the connection can either be open, closed, or only closed for writing.
When a connection is closed for writing, the client cannot send requests or stream
body chunks, but it can still read data that the server might be sending. When the
connection gets closed on the writing side, a `:server_closed_connection` error is
returned. `{:error, request_ref, error}` is returned for requests that haven't been
processed by the server, with the reason of `error` being `:unprocessed`.
These requests are safe to retry.
## HTTP/2 settings
HTTP/2 supports settings negotiation between servers and clients. The server advertises
its settings to the client and the client advertises its settings to the server. A peer
(server or client) has to acknowledge the settings advertised by the other peer before
those settings come into action (that's why it's called a negotiation).
A first settings negotiation happens right when the connection starts.
Servers and clients can renegotiate settings at any time during the life of the
connection.
Mint users don't need to care about settings acknowledgements directly since they're
handled transparently by `stream/2`.
To retrieve the server settings, you can use `get_server_setting/2`. Doing so is often
useful to be able to tune your requests based on the server settings.
To communicate client settings to the server, use `put_settings/2` or pass them when
starting up a connection with `connect/4`. Note that the server needs to acknowledge
the settings sent through `put_setting/2` before those settings come into effect. The
server ack is processed transparently by `stream/2`, but this means that if you change
a setting through `put_settings/2` and try to retrieve the value of that setting right
after with `get_client_setting/2`, you'll likely get the old value of that setting. Once
the server acknowledges the new settings, the updated value will be returned by
`get_client_setting/2`.
## Server push
HTTP/2 supports [server push](https://en.wikipedia.org/wiki/HTTP/2_Server_Push), which
is a way for a server to send a response to a client without the client needing to make
the corresponding request. The server sends a `:push_promise` response to a normal request:
this creates a new request reference. Then, the server sends normal responses for the newly
created request reference.
Let's see an example. We will ask the server for `"/index.html"` and the server will
send us a push promise for `"/style.css"`.
{:ok, conn} = Mint.HTTP2.connect(:https, "example.com", 443)
{:ok, conn, request_ref} = Mint.HTTP2.request(conn, "GET", "/index.html", _headers = [], _body = "")
next_message =
receive do
msg -> msg
end
{:ok, conn, responses} = Mint.HTTP2.stream(conn, next_message)
[
{:push_promise, ^request_ref, promised_request_ref, promised_headers},
{:status, ^request_ref, 200},
{:headers, ^request_ref, []},
{:data, ^request_ref, "<html>..."},
{:done, ^request_ref}
] = responses
promised_headers
#=> [{":method", "GET"}, {":path", "/style.css"}]
As you can see in the example above, when the server sends a push promise then a
`:push_promise` response is returned as a response to a request. The `:push_promise`
response contains a `promised_request_ref` and some `promised_headers`. The
`promised_request_ref` is the new request ref that pushed responses will be tagged with.
`promised_headers` are headers that tell the client *what request* the promised response
will respond to. The idea is that the server tells the client a request the client will
want to make and then preemptively sends a response for that request. Promised headers
will always include `:method`, `:path`, and `:authority`.
next_message =
receive do
msg -> msg
end
{:ok, conn, responses} = Mint.HTTP2.stream(conn, next_message)
[
{:status, ^promised_request_ref, 200},
{:headers, ^promised_request_ref, []},
{:data, ^promised_request_ref, "body { ... }"},
{:done, ^promised_request_ref}
]
The response to a promised request is like a response to any normal request.
### Disabling server pushes
HTTP/2 exposes a boolean setting for enabling or disabling server pushes with `:enable_push`.
You can pass this option when connecting or in `put_settings/2`. By default server push
is enabled.
"""
use Bitwise, skip_operators: true
import Mint.Core.Util
import Mint.HTTP2.Frame, except: [encode: 1, decode_next: 1]
alias Mint.{HTTPError, TransportError}
alias Mint.Types
alias Mint.Core.Util
alias Mint.HTTP2.Frame
require Logger
require Integer
@behaviour Mint.Core.Conn
## Constants
@connection_preface "PRI * HTTP/2.0\r\n\r\nSM\r\n\r\n"
@transport_opts [alpn_advertised_protocols: ["h2"]]
@default_window_size 65_535
@max_window_size 2_147_483_647
@default_max_frame_size 16_384
@valid_max_frame_size_range @default_max_frame_size..16_777_215
@user_agent "mint/" <> Mix.Project.config()[:version]
# HTTP/2 connection struct.
defstruct [
# Transport things.
:transport,
:socket,
:mode,
# Host things.
:hostname,
:port,
:scheme,
# Connection state (open, closed, and so on).
:state,
# Fields of the connection.
buffer: "",
window_size: @default_window_size,
encode_table: HPAX.new(4096),
decode_table: HPAX.new(4096),
# Queue for sent PING frames.
ping_queue: :queue.new(),
# Queue for sent SETTINGS frames.
client_settings_queue: :queue.new(),
# Stream-set-related things.
next_stream_id: 3,
streams: %{},
open_client_stream_count: 0,
open_server_stream_count: 0,
ref_to_stream_id: %{},
# Settings that the server communicates to the client.
server_settings: %{
enable_push: true,
max_concurrent_streams: 100,
initial_window_size: @default_window_size,
max_frame_size: @default_max_frame_size,
max_header_list_size: :infinity,
# Only supported by the server: https://www.rfc-editor.org/rfc/rfc8441.html#section-3
enable_connect_protocol: false
},
# Settings that the client communicates to the server.
client_settings: %{
max_concurrent_streams: 100,
max_frame_size: @default_max_frame_size,
enable_push: true
},
# Headers being processed (when headers are split into multiple frames with CONTINUATIONS, all
# the continuation frames must come one right after the other).
headers_being_processed: nil,
# Stores the headers returned by the proxy in the `CONNECT` method
proxy_headers: [],
# Private store.
private: %{}
]
## Types
@typedoc """
HTTP/2 setting with its value.
This type represents both server settings as well as client settings. To retrieve
server settings use `get_server_setting/2` and to retrieve client settings use
`get_client_setting/2`. To send client settings to the server, see `put_settings/2`.
The supported settings are the following:
* `:header_table_size` - (integer) corresponds to `SETTINGS_HEADER_TABLE_SIZE`.
* `:enable_push` - (boolean) corresponds to `SETTINGS_ENABLE_PUSH`. Sets whether
push promises are supported. If you don't want to support push promises,
use `put_settings/2` to tell the server that your client doesn't want push promises.
* `:max_concurrent_streams` - (integer) corresponds to `SETTINGS_MAX_CONCURRENT_STREAMS`.
Tells what is the maximum number of streams that the peer sending this (client or server)
supports. As mentioned in the module documentation, HTTP/2 streams are equivalent to
requests, so knowing the maximum number of streams that the server supports can be useful
to know how many concurrent requests can be open at any time. Use `get_server_setting/2`
to find out how many concurrent streams the server supports.
* `:initial_window_size` - (integer smaller than `#{inspect(@max_window_size)}`)
corresponds to `SETTINGS_INITIAL_WINDOW_SIZE`. Tells what is the value of
the initial HTTP/2 window size for the peer that sends this setting.
* `:max_frame_size` - (integer in the range `#{inspect(@valid_max_frame_size_range)}`)
corresponds to `SETTINGS_MAX_FRAME_SIZE`. Tells what is the maximum size of an HTTP/2
frame for the peer that sends this setting.
* `:max_header_list_size` - (integer) corresponds to `SETTINGS_MAX_HEADER_LIST_SIZE`.
* `:enable_connect_protocol` - (boolean) corresponds to `SETTINGS_ENABLE_CONNECT_PROTOCOL`.
Sets whether the client may invoke the extended connect protocol which is used to
bootstrap WebSocket connections.
"""
@type setting() ::
{:enable_push, boolean()}
| {:max_concurrent_streams, pos_integer()}
| {:initial_window_size, 1..2_147_483_647}
| {:max_frame_size, 16_384..16_777_215}
| {:max_header_list_size, :infinity | pos_integer()}
| {:enable_connect_protocol, boolean()}
@typedoc """
HTTP/2 settings.
See `t:setting/0`.
"""
@type settings() :: [setting()]
@typedoc """
An HTTP/2-specific error reason.
The values can be:
* `:closed` - when you try to make a request or stream a body chunk but the connection
is closed.
* `:closed_for_writing` - when you try to make a request or stream a body chunk but
the connection is closed for writing. This means you cannot issue any more requests.
See the "Closed connection" section in the module documentation for more information.
* `:too_many_concurrent_requests` - when the maximum number of concurrent requests
allowed by the server is reached. To find out what this limit is, use `get_setting/2`
with the `:max_concurrent_streams` setting name.
* `{:max_header_list_size_exceeded, size, max_size}` - when the maximum size of
the header list is reached. `size` is the actual value of the header list size,
`max_size` is the maximum value allowed. See `get_setting/2` to retrieve the
value of the max size.
* `{:exceeds_window_size, what, window_size}` - when the data you're trying to send
exceeds the window size of the connection (if `what` is `:connection`) or of a request
(if `what` is `:request`). `window_size` is the allowed window size. See
`get_window_size/2`.
* `{:stream_not_found, stream_id}` - when the given request is not found.
* `:unknown_request_to_stream` - when you're trying to stream data on an unknown
request.
* `:request_is_not_streaming` - when you try to send data (with `stream_request_body/3`)
on a request that is not open for streaming.
* `:unprocessed` - when a request was closed because it was not processed by the server.
When this error is returned, it means that the server hasn't processed the request at all,
so it's safe to retry the given request on a different or new connection.
* `{:server_closed_request, error_code}` - when the server closes the request.
`error_code` is the reason why the request was closed.
* `{:server_closed_connection, reason, debug_data}` - when the server closes the connection
gracefully or because of an error. In HTTP/2, this corresponds to a `GOAWAY` frame.
`error` is the reason why the connection was closed. `debug_data` is additional debug data.
* `{:frame_size_error, frame}` - when there's an error with the size of a frame.
`frame` is the frame type, such as `:settings` or `:window_update`.
* `{:protocol_error, debug_data}` - when there's a protocol error.
`debug_data` is a string that explains the nature of the error.
* `{:compression_error, debug_data}` - when there's a header compression error.
`debug_data` is a string that explains the nature of the error.
* `{:flow_control_error, debug_data}` - when there's a flow control error.
`debug_data` is a string that explains the nature of the error.
"""
@type error_reason() :: term()
@opaque t() :: %Mint.HTTP2{}
## Public interface
@doc """
Same as `Mint.HTTP.connect/4`, but forces a HTTP/2 connection.
"""
@spec connect(Types.scheme(), Types.address(), :inet.port_number(), keyword()) ::
{:ok, t()} | {:error, Types.error()}
def connect(scheme, address, port, opts \\ []) do
hostname = Mint.Core.Util.hostname(opts, address)
transport_opts =
opts
|> Keyword.get(:transport_opts, [])
|> Keyword.merge(@transport_opts)
|> Keyword.put(:hostname, hostname)
case negotiate(address, port, scheme, transport_opts) do
{:ok, socket} ->
initiate(scheme, socket, hostname, port, opts)
{:error, reason} ->
{:error, reason}
end
end
@doc false
@spec upgrade(
Types.scheme(),
Mint.Types.socket(),
Types.scheme(),
String.t(),
:inet.port_number(),
keyword()
) :: {:ok, t()} | {:error, Types.error()}
def upgrade(old_scheme, socket, new_scheme, hostname, port, opts) do
transport = scheme_to_transport(new_scheme)
transport_opts =
opts
|> Keyword.get(:transport_opts, [])
|> Keyword.merge(@transport_opts)
with {:ok, socket} <- transport.upgrade(socket, old_scheme, hostname, port, transport_opts) do
initiate(new_scheme, socket, hostname, port, opts)
end
end
@doc """
See `Mint.HTTP.close/1`.
"""
@impl true
@spec close(t()) :: {:ok, t()}
def close(conn)
def close(%__MODULE__{state: :open} = conn) do
send_connection_error!(conn, :no_error, "connection peacefully closed by client")
catch
{:mint, conn, %HTTPError{reason: {:no_error, _}}} ->
{:ok, conn}
# We could have an error sending the GOAWAY frame, but we want to ignore that since
# we're closing the connection anyways.
{:mint, conn, %TransportError{}} ->
conn = put_in(conn.state, :closed)
{:ok, conn}
end
def close(%__MODULE__{state: {:goaway, _error_code, _debug_data}} = conn) do
_ = conn.transport.close(conn.socket)
{:ok, put_in(conn.state, :closed)}
end
def close(%__MODULE__{state: :closed} = conn) do
{:ok, conn}
end
@doc """
See `Mint.HTTP.open?/1`.
"""
@impl true
@spec open?(t(), :read | :write | :read_write) :: boolean()
def open?(%Mint.HTTP2{state: state} = _conn, type \\ :read_write)
when type in [:read, :write, :read_write] do
case state do
:open -> true
{:goaway, _error_code, _debug_data} -> type == :read
:closed -> false
end
end
@doc """
See `Mint.HTTP.request/5`.
In HTTP/2, opening a request means opening a new HTTP/2 stream (see the
module documentation). This means that a request could fail because the
maximum number of concurrent streams allowed by the server has been reached.
In that case, the error reason `:too_many_concurrent_requests` is returned.
If you want to avoid incurring in this error, you can retrieve the value of
the maximum number of concurrent streams supported by the server through
`get_server_setting/2` (passing in the `:max_concurrent_streams` setting name).
## Header list size
In HTTP/2, the server can optionally specify a maximum header list size that
the client needs to respect when sending headers. The header list size is calculated
by summing the length (in bytes) of each header name plus value, plus 32 bytes for
each header. Note that pseudo-headers (like `:path` or `:method`) count towards
this size. If the size is exceeded, an error is returned. To check what the size
is, use `get_server_setting/2`.
## Request body size
If the request body size will exceed the window size of the HTTP/2 stream created by the
request or the window size of the connection Mint will return a `:exceeds_window_size`
error.
To ensure you do not exceed the window size it is recommended to stream the request
body by initially passing `:stream` as the body and sending the body in chunks using
`stream_request_body/3` and using `get_window_size/2` to get the window size of the
request and connection.
"""
@impl true
@spec request(
t(),
method :: String.t(),
path :: String.t(),
Types.headers(),
body :: iodata() | nil | :stream
) ::
{:ok, t(), Types.request_ref()}
| {:error, t(), Types.error()}
def request(conn, method, path, headers, body)
def request(%Mint.HTTP2{state: :closed} = conn, _method, _path, _headers, _body) do
{:error, conn, wrap_error(:closed)}
end
def request(
%Mint.HTTP2{state: {:goaway, _error_code, _debug_data}} = conn,
_method,
_path,
_headers,
_body
) do
{:error, conn, wrap_error(:closed_for_writing)}
end
def request(%Mint.HTTP2{} = conn, method, path, headers, body)
when is_binary(method) and is_binary(path) and is_list(headers) do
headers =
headers
|> downcase_header_names()
|> add_pseudo_headers(conn, method, path)
|> add_default_headers(body)
|> sort_pseudo_headers_to_front()
{conn, stream_id, ref} = open_stream(conn)
{conn, payload} = encode_request_payload(conn, stream_id, headers, body)
conn = send!(conn, payload)
{:ok, conn, ref}
catch
:throw, {:mint, _conn, reason} ->
# The stream is invalid and "_conn" may be tracking it, so we return the original connection instead.
{:error, conn, reason}
end
@doc """
See `Mint.HTTP.stream_request_body/3`.
"""
@impl true
@spec stream_request_body(
t(),
Types.request_ref(),
iodata() | :eof | {:eof, trailing_headers :: Types.headers()}
) :: {:ok, t()} | {:error, t(), Types.error()}
def stream_request_body(conn, request_ref, chunk)
def stream_request_body(%Mint.HTTP2{state: :closed} = conn, _request_ref, _chunk) do
{:error, conn, wrap_error(:closed)}
end
def stream_request_body(
%Mint.HTTP2{state: {:goaway, _error_code, _debug_data}} = conn,
_request_ref,
_chunk
) do
{:error, conn, wrap_error(:closed_for_writing)}
end
def stream_request_body(%Mint.HTTP2{} = conn, request_ref, chunk)
when is_reference(request_ref) do
case Map.fetch(conn.ref_to_stream_id, request_ref) do
{:ok, stream_id} ->
{conn, payload} = encode_stream_body_request_payload(conn, stream_id, chunk)
conn = send!(conn, payload)
{:ok, conn}
:error ->
{:error, conn, wrap_error(:unknown_request_to_stream)}
end
catch
:throw, {:mint, _conn, reason} ->
# The stream is invalid and "_conn" may be tracking it, so we return the original connection instead.
{:error, conn, reason}
end
@doc """
Pings the server.
This function is specific to HTTP/2 connections. It sends a **ping** request to
the server `conn` is connected to. A `{:ok, conn, request_ref}` tuple is returned,
where `conn` is the updated connection and `request_ref` is a unique reference that
identifies this ping request. The response to a ping request is returned by `stream/2`
as a `{:pong, request_ref}` tuple. If there's an error, this function returns
`{:error, conn, reason}` where `conn` is the updated connection and `reason` is the
error reason.
`payload` must be an 8-byte binary with arbitrary content. When the server responds to
a ping request, it will use that same payload. By default, the payload is an 8-byte
binary with all bits set to `0`.
Pinging can be used to measure the latency with the server and to ensure the connection
is alive and well.
## Examples
{:ok, conn, ref} = Mint.HTTP2.ping(conn)
"""
@spec ping(t(), <<_::8>>) :: {:ok, t(), Types.request_ref()} | {:error, t(), Types.error()}
def ping(%Mint.HTTP2{} = conn, payload \\ :binary.copy(<<0>>, 8))
when byte_size(payload) == 8 do
{conn, ref} = send_ping(conn, payload)
{:ok, conn, ref}
catch
:throw, {:mint, conn, error} -> {:error, conn, error}
end
@doc """
Communicates the given client settings to the server.
This function is HTTP/2-specific.
This function takes a connection and a keyword list of HTTP/2 settings and sends
the values of those settings to the server. The settings won't be effective until
the server acknowledges them, which will be handled transparently by `stream/2`.
This function returns `{:ok, conn}` when sending the settings to the server is
successful, with `conn` being the updated connection. If there's an error, this
function returns `{:error, conn, reason}` with `conn` being the updated connection
and `reason` being the reason of the error.
## Supported settings
See `t:setting/0` for the supported settings. You can see the meaning
of these settings [in the corresponding section in the HTTP/2
RFC](https://httpwg.org/specs/rfc7540.html#SettingValues).
See the "HTTP/2 settings" section in the module documentation for more information.
## Examples
{:ok, conn} = Mint.HTTP2.put_settings(conn, max_frame_size: 100)
"""
@spec put_settings(t(), settings()) :: {:ok, t()} | {:error, t(), Types.error()}
def put_settings(%Mint.HTTP2{} = conn, settings) when is_list(settings) do
conn = send_settings(conn, settings)
{:ok, conn}
catch
:throw, {:mint, conn, error} -> {:error, conn, error}
end
@doc """
Gets the value of the given HTTP/2 server settings.
This function returns the value of the given HTTP/2 setting that the server
advertised to the client. This function is HTTP/2 specific.
For more information on HTTP/2 settings, see [the related section in
the RFC](https://httpwg.org/specs/rfc7540.html#SettingValues).
See the "HTTP/2 settings" section in the module documentation for more information.
## Supported settings
The possible settings that can be retrieved are described in `t:setting/0`.
Any other atom passed as `name` will raise an error.
## Examples
Mint.HTTP2.get_server_setting(conn, :max_concurrent_streams)
#=> 500
"""
@spec get_server_setting(t(), atom()) :: term()
def get_server_setting(%Mint.HTTP2{} = conn, name) when is_atom(name) do
get_setting(conn.server_settings, name)
end
@doc """
Gets the value of the given HTTP/2 client setting.
This function returns the value of the given HTTP/2 setting that the client
advertised to the server. Client settings can be advertised through `put_settings/2`
or when starting up a connection.
Client settings have to be acknowledged by the server before coming into effect.
This function is HTTP/2 specific. For more information on HTTP/2 settings, see
[the related section in the RFC](https://httpwg.org/specs/rfc7540.html#SettingValues).
See the "HTTP/2 settings" section in the module documentation for more information.
## Supported settings
The possible settings that can be retrieved are described in `t:setting/0`.
Any other atom passed as `name` will raise an error.
## Examples
Mint.HTTP2.get_client_setting(conn, :max_concurrent_streams)
#=> 500
"""
@spec get_client_setting(t(), atom()) :: term()
def get_client_setting(%Mint.HTTP2{} = conn, name) when is_atom(name) do
get_setting(conn.client_settings, name)
end
defp get_setting(settings, name) do
case Map.fetch(settings, name) do
{:ok, value} -> value
:error -> raise ArgumentError, "unknown HTTP/2 setting: #{inspect(name)}"
end
end
@doc """
Cancels an in-flight request.
This function is HTTP/2 specific. It cancels an in-flight request. The server could have
already sent responses for the request you want to cancel: those responses will be parsed
by the connection but not returned to the user. No more responses
to a request will be returned after you call `cancel_request/2` on that request.
If there's no error in canceling the request, `{:ok, conn}` is returned where `conn` is
the updated connection. If there's an error, `{:error, conn, reason}` is returned where
`conn` is the updated connection and `reason` is the error reason.
## Examples
{:ok, conn, ref} = Mint.HTTP2.request(conn, "GET", "/", _headers = [])
{:ok, conn} = Mint.HTTP2.cancel_request(conn, ref)
"""
@spec cancel_request(t(), Types.request_ref()) :: {:ok, t()} | {:error, t(), Types.error()}
def cancel_request(%Mint.HTTP2{} = conn, request_ref) when is_reference(request_ref) do
case Map.fetch(conn.ref_to_stream_id, request_ref) do
{:ok, stream_id} ->
conn = close_stream!(conn, stream_id, _error_code = :cancel)
{:ok, conn}
:error ->
{:ok, conn}
end
catch
:throw, {:mint, conn, error} -> {:error, conn, error}
end
@doc """
Returns the window size of the connection or of a single request.
This function is HTTP/2 specific. It returns the window size of
either the connection if `connection_or_request` is `:connection` or of a single
request if `connection_or_request` is `{:request, request_ref}`.
Use this function to check the window size of the connection before sending a
full request. Also use this function to check the window size of both the
connection and of a request if you want to stream body chunks on that request.
For more information on flow control and window sizes in HTTP/2, see the section
below.
## HTTP/2 flow control
In HTTP/2, flow control is implemented through a
window size. When the client sends data to the server, the window size is decreased
and the server needs to "refill" it on the client side. You don't need to take care of
the refilling of the client window as it happens behind the scenes in `stream/2`.
A window size is kept for the entire connection and all requests affect this window
size. A window size is also kept per request.
The only thing that affects the window size is the body of a request, regardless of
if it's a full request sent with `request/5` or body chunks sent through
`stream_request_body/3`. That means that if we make a request with a body that is
five bytes long, like `"hello"`, the window size of the connection and the window size
of that particular request will decrease by five bytes.
If we use all the window size before the server refills it, functions like
`request/5` will return an error.
## Examples
On the connection:
HTTP.get_window_size(conn, :connection)
#=> 65_536
On a single streamed request:
{:ok, conn, request_ref} = HTTP2.request(conn, "GET", "/", [], :stream)
HTTP.get_window_size(conn, {:request_ref, request_ref})
#=> 65_536
{:ok, conn} = HTTP2.stream_request_body(conn, request_ref, "hello")
HTTP.get_window_size(conn, {:request_ref, request_ref})
#=> 65_531
"""
@spec get_window_size(t(), :connection | {:request, Types.request_ref()}) :: non_neg_integer()
def get_window_size(conn, connection_or_request)
def get_window_size(%Mint.HTTP2{} = conn, :connection) do
conn.window_size
end
def get_window_size(%Mint.HTTP2{} = conn, {:request, request_ref}) do
case Map.fetch(conn.ref_to_stream_id, request_ref) do
{:ok, stream_id} ->
conn.streams[stream_id].window_size
:error ->
raise ArgumentError,
"request with request reference #{inspect(request_ref)} was not found"
end
end
@doc """
See `Mint.HTTP.stream/2`.
"""
@impl true
@spec stream(t(), term()) ::
{:ok, t(), [Types.response()]}
| {:error, t(), Types.error(), [Types.response()]}
| :unknown
def stream(conn, message)
def stream(%Mint.HTTP2{socket: socket} = conn, {tag, socket, reason})
when tag in [:tcp_error, :ssl_error] do
error = conn.transport.wrap_error(reason)
{:error, %{conn | state: :closed}, error, _responses = []}
end
def stream(%Mint.HTTP2{socket: socket} = conn, {tag, socket})
when tag in [:tcp_closed, :ssl_closed] do
handle_closed(conn)
end
def stream(%Mint.HTTP2{transport: transport, socket: socket} = conn, {tag, socket, data})
when tag in [:tcp, :ssl] do
case maybe_concat_and_handle_new_data(conn, data) do
{:ok, %{mode: mode, state: state} = conn, responses}
when mode == :active and state != :closed ->
case transport.setopts(socket, active: :once) do
:ok -> {:ok, conn, responses}
{:error, reason} -> {:error, put_in(conn.state, :closed), reason, responses}
end
other ->
other
end
catch
:throw, {:mint, conn, error, responses} -> {:error, conn, error, responses}
end
def stream(%Mint.HTTP2{}, _message) do
:unknown
end
@doc """
See `Mint.HTTP.open_request_count/1`.
In HTTP/2, the number of open requests is the number of requests **opened by the client**
that have not yet received a `:done` response. It's important to note that only
requests opened by the client (with `request/5`) count towards the number of open
requests, as requests opened from the server with server pushes (see the "Server push"
section in the module documentation) are not considered open requests. We do this because
clients might need to know how many open requests there are because the server limits
the number of concurrent requests the client can open. To know how many requests the client
can open, see `get_server_setting/2` with the `:max_concurrent_streams` setting.
"""
@impl true
@spec open_request_count(t()) :: non_neg_integer()
def open_request_count(%Mint.HTTP2{} = conn) do
conn.open_client_stream_count
end
@doc """
See `Mint.HTTP.recv/3`.
"""
@impl true
@spec recv(t(), non_neg_integer(), timeout()) ::
{:ok, t(), [Types.response()]}
| {:error, t(), Types.error(), [Types.response()]}
def recv(conn, byte_count, timeout)
def recv(%__MODULE__{mode: :passive} = conn, byte_count, timeout) do
case conn.transport.recv(conn.socket, byte_count, timeout) do
{:ok, data} ->
maybe_concat_and_handle_new_data(conn, data)
{:error, %TransportError{reason: :closed}} ->
handle_closed(conn)
{:error, error} ->
{:error, %{conn | state: :closed}, error, _responses = []}
end
catch
:throw, {:mint, conn, error, responses} -> {:error, conn, error, responses}
end
def recv(_conn, _byte_count, _timeout) do
raise ArgumentError,
"can't use recv/3 to synchronously receive data when the mode is :active. " <>
"Use Mint.HTTP.set_mode/2 to set the connection to passive mode"
end
@doc """
See `Mint.HTTP.set_mode/2`.
"""
@impl true
@spec set_mode(t(), :active | :passive) :: {:ok, t()} | {:error, Types.error()}
def set_mode(%__MODULE__{} = conn, mode) when mode in [:active, :passive] do
active =
case mode do
:active -> :once
:passive -> false
end
with :ok <- conn.transport.setopts(conn.socket, active: active) do
{:ok, put_in(conn.mode, mode)}
end
end
@doc """
See `Mint.HTTP.controlling_process/2`.
"""
@impl true
@spec controlling_process(t(), pid()) :: {:ok, t()} | {:error, Types.error()}
def controlling_process(%__MODULE__{} = conn, new_pid) when is_pid(new_pid) do
with :ok <- conn.transport.controlling_process(conn.socket, new_pid) do
{:ok, conn}
end
end
@doc """
See `Mint.HTTP.put_private/3`.
"""
@impl true
@spec put_private(t(), atom(), term()) :: t()
def put_private(%Mint.HTTP2{private: private} = conn, key, value) when is_atom(key) do
%{conn | private: Map.put(private, key, value)}
end
@doc """
See `Mint.HTTP.get_private/3`.
"""
@impl true
@spec get_private(t(), atom(), term()) :: term()
def get_private(%Mint.HTTP2{private: private} = _conn, key, default \\ nil) when is_atom(key) do
Map.get(private, key, default)
end
@doc """
See `Mint.HTTP.delete_private/2`.
"""
@impl true
@spec delete_private(t(), atom()) :: t()
def delete_private(%Mint.HTTP2{private: private} = conn, key) when is_atom(key) do
%{conn | private: Map.delete(private, key)}
end
# http://httpwg.org/specs/rfc7540.html#rfc.section.6.5
# SETTINGS parameters are not negotiated. We keep client settings and server settings separate.
@doc false
@impl true
@spec initiate(
Types.scheme(),
Types.socket(),
String.t(),
:inet.port_number(),
keyword()
) :: {:ok, t()} | {:error, Types.error()}
def initiate(scheme, socket, hostname, port, opts) do
transport = scheme_to_transport(scheme)
mode = Keyword.get(opts, :mode, :active)
client_settings_params = Keyword.get(opts, :client_settings, [])
validate_settings!(client_settings_params)
unless mode in [:active, :passive] do
raise ArgumentError,
"the :mode option must be either :active or :passive, got: #{inspect(mode)}"
end
conn = %Mint.HTTP2{
hostname: hostname,
port: port,
transport: scheme_to_transport(scheme),
socket: socket,
mode: mode,
scheme: Atom.to_string(scheme),
state: :open
}
with :ok <- inet_opts(transport, socket),
client_settings = settings(stream_id: 0, params: client_settings_params),
preface = [@connection_preface, Frame.encode(client_settings)],
:ok <- transport.send(socket, preface),
conn = update_in(conn.client_settings_queue, &:queue.in(client_settings_params, &1)),
{:ok, server_settings, buffer, socket} <- receive_server_settings(transport, socket),
server_settings_ack =
settings(stream_id: 0, params: [], flags: set_flags(:settings, [:ack])),
:ok <- transport.send(socket, Frame.encode(server_settings_ack)),
conn = put_in(conn.buffer, buffer),
conn = put_in(conn.socket, socket),
conn = apply_server_settings(conn, settings(server_settings, :params)),
:ok <- if(mode == :active, do: transport.setopts(socket, active: :once), else: :ok) do
{:ok, conn}
else
error ->
transport.close(socket)
error
end
catch
{:mint, conn, error} ->
{:ok, _conn} = close(conn)
{:error, error}
end
@doc """
See `Mint.HTTP.get_socket/1`.
"""
@impl true
@spec get_socket(t()) :: Mint.Types.socket()
def get_socket(%Mint.HTTP2{socket: socket} = _conn) do
socket
end
@doc """
See `Mint.HTTP.get_proxy_headers/1`.
"""
if Version.compare(System.version(), "1.7.0") in [:eq, :gt] do
@doc since: "1.4.0"
end
@impl true
@spec get_proxy_headers(t()) :: Mint.Types.headers()
def get_proxy_headers(%__MODULE__{proxy_headers: proxy_headers}), do: proxy_headers
## Helpers
defp handle_closed(conn) do
conn = put_in(conn.state, :closed)
if conn.open_client_stream_count > 0 or conn.open_server_stream_count > 0 do
error = conn.transport.wrap_error(:closed)
{:error, conn, error, _responses = []}
else
{:ok, conn, _responses = []}
end
end
defp negotiate(address, port, :http, transport_opts) do
# We don't support protocol negotiation for TCP connections
# so currently we just assume the HTTP/2 protocol
transport = scheme_to_transport(:http)
transport.connect(address, port, transport_opts)
end
defp negotiate(address, port, :https, transport_opts) do
transport = scheme_to_transport(:https)
with {:ok, socket} <- transport.connect(address, port, transport_opts),
{:ok, protocol} <- transport.negotiated_protocol(socket) do
if protocol == "h2" do
{:ok, socket}
else
{:error, transport.wrap_error({:bad_alpn_protocol, protocol})}
end
end
end
defp receive_server_settings(transport, socket) do
case recv_next_frame(transport, socket, _buffer = "") do
{:ok, settings(), _buffer, _socket} = result ->
result
{:ok, goaway(error_code: error_code, debug_data: debug_data), _buffer, _socket} ->
error = wrap_error({:server_closed_connection, error_code, debug_data})
{:error, error}
{:ok, frame, _buffer, _socket} ->
debug_data = "received invalid frame #{elem(frame, 0)} during handshake"
{:error, wrap_error({:protocol_error, debug_data})}
{:error, error} ->
{:error, error}
end
end
defp recv_next_frame(transport, socket, buffer) do
case Frame.decode_next(buffer, @default_max_frame_size) do
{:ok, frame, rest} ->
{:ok, frame, rest, socket}
:more ->
with {:ok, data} <- transport.recv(socket, 0, _timeout = 10_000) do
data = maybe_concat(buffer, data)
recv_next_frame(transport, socket, data)
end
{:error, {kind, _info} = reason} when kind in [:frame_size_error, :protocol_error] ->
{:error, wrap_error(reason)}
end
end
defp open_stream(conn) do
max_concurrent_streams = conn.server_settings.max_concurrent_streams
if conn.open_client_stream_count >= max_concurrent_streams do
throw({:mint, conn, wrap_error(:too_many_concurrent_requests)})
end
stream = %{
id: conn.next_stream_id,
ref: make_ref(),
state: :idle,
window_size: conn.server_settings.initial_window_size,
received_first_headers?: false
}
conn = put_in(conn.streams[stream.id], stream)
conn = put_in(conn.ref_to_stream_id[stream.ref], stream.id)
conn = update_in(conn.next_stream_id, &(&1 + 2))
{conn, stream.id, stream.ref}
end
defp encode_stream_body_request_payload(conn, stream_id, :eof) do
encode_data(conn, stream_id, "", [:end_stream])
end
defp encode_stream_body_request_payload(conn, stream_id, {:eof, trailing_headers}) do
lowered_headers = downcase_header_names(trailing_headers)
if unallowed_trailing_header = Util.find_unallowed_trailing_header(lowered_headers) do
error = wrap_error({:unallowed_trailing_header, unallowed_trailing_header})
throw({:mint, conn, error})
end
encode_headers(conn, stream_id, trailing_headers, [:end_headers, :end_stream])
end
defp encode_stream_body_request_payload(conn, stream_id, iodata) do
encode_data(conn, stream_id, iodata, [])
end
defp encode_request_payload(conn, stream_id, headers, :stream) do
encode_headers(conn, stream_id, headers, [:end_headers])
end
defp encode_request_payload(conn, stream_id, headers, nil) do
encode_headers(conn, stream_id, headers, [:end_stream, :end_headers])
end
defp encode_request_payload(conn, stream_id, headers, iodata) do
{conn, headers_payload} = encode_headers(conn, stream_id, headers, [:end_headers])
{conn, data_payload} = encode_data(conn, stream_id, iodata, [:end_stream])
{conn, [headers_payload, data_payload]}
end
defp encode_headers(conn, stream_id, headers, enabled_flags) do
assert_headers_smaller_than_max_header_list_size(conn, headers)
headers = Enum.map(headers, fn {name, value} -> {:store_name, name, value} end)
{hbf, conn} = get_and_update_in(conn.encode_table, &HPAX.encode(headers, &1))
payload = headers_to_encoded_frames(conn, stream_id, hbf, enabled_flags)
stream_state = if :end_stream in enabled_flags, do: :half_closed_local, else: :open
conn = put_in(conn.streams[stream_id].state, stream_state)
conn = update_in(conn.open_client_stream_count, &(&1 + 1))
{conn, payload}
end
defp assert_headers_smaller_than_max_header_list_size(
%{server_settings: %{max_header_list_size: :infinity}},
_headers
) do
:ok
end
defp assert_headers_smaller_than_max_header_list_size(conn, headers) do
# The value is based on the uncompressed size of header fields, including the length
# of the name and value in octets plus an overhead of 32 octets for each header field.
total_size =
Enum.reduce(headers, 0, fn {name, value}, acc ->
acc + byte_size(name) + byte_size(value) + 32
end)
max_header_list_size = conn.server_settings.max_header_list_size
if total_size <= max_header_list_size do
:ok
else
error = wrap_error({:max_header_list_size_exceeded, total_size, max_header_list_size})
throw({:mint, conn, error})
end
end
defp headers_to_encoded_frames(conn, stream_id, hbf, enabled_flags) do
if IO.iodata_length(hbf) > conn.server_settings.max_frame_size do
hbf
|> IO.iodata_to_binary()
|> split_payload_in_chunks(conn.server_settings.max_frame_size)
|> split_hbf_to_encoded_frames(stream_id, enabled_flags)
else
Frame.encode(
headers(stream_id: stream_id, hbf: hbf, flags: set_flags(:headers, enabled_flags))
)
end
end
defp split_hbf_to_encoded_frames({[first_chunk | chunks], last_chunk}, stream_id, enabled_flags) do
flags = set_flags(:headers, enabled_flags -- [:end_headers])
first_frame = Frame.encode(headers(stream_id: stream_id, hbf: first_chunk, flags: flags))
middle_frames =
Enum.map(chunks, fn chunk ->
Frame.encode(continuation(stream_id: stream_id, hbf: chunk))
end)
flags =
if :end_headers in enabled_flags do
set_flags(:continuation, [:end_headers])
else
set_flags(:continuation, [])
end
last_frame = Frame.encode(continuation(stream_id: stream_id, hbf: last_chunk, flags: flags))
[first_frame, middle_frames, last_frame]
end
defp encode_data(conn, stream_id, data, enabled_flags) do
stream = fetch_stream!(conn, stream_id)
if stream.state != :open do
error = wrap_error(:request_is_not_streaming)
throw({:mint, conn, error})
end
data_size = IO.iodata_length(data)
cond do
data_size > stream.window_size ->
throw({:mint, conn, wrap_error({:exceeds_window_size, :request, stream.window_size})})
data_size > conn.window_size ->
throw({:mint, conn, wrap_error({:exceeds_window_size, :connection, conn.window_size})})
# If the data size is greater than the max frame size, we chunk automatically based
# on the max frame size.
data_size > conn.server_settings.max_frame_size ->
{chunks, last_chunk} =
data
|> IO.iodata_to_binary()
|> split_payload_in_chunks(conn.server_settings.max_frame_size)
{encoded_chunks, conn} =
Enum.map_reduce(chunks, conn, fn chunk, acc ->
{acc, encoded} = encode_data_chunk(acc, stream_id, chunk, [])
{encoded, acc}
end)
{conn, encoded_last_chunk} = encode_data_chunk(conn, stream_id, last_chunk, enabled_flags)
{conn, [encoded_chunks, encoded_last_chunk]}
true ->
encode_data_chunk(conn, stream_id, data, enabled_flags)
end
end
defp encode_data_chunk(%__MODULE__{} = conn, stream_id, chunk, enabled_flags)
when is_integer(stream_id) and is_list(enabled_flags) do
chunk_size = IO.iodata_length(chunk)
frame = data(stream_id: stream_id, flags: set_flags(:data, enabled_flags), data: chunk)
conn = update_in(conn.streams[stream_id].window_size, &(&1 - chunk_size))
conn = update_in(conn.window_size, &(&1 - chunk_size))
conn =
if :end_stream in enabled_flags do
put_in(conn.streams[stream_id].state, :half_closed_local)
else
conn
end
{conn, Frame.encode(frame)}
end
defp split_payload_in_chunks(binary, chunk_size),
do: split_payload_in_chunks(binary, chunk_size, [])
defp split_payload_in_chunks(chunk, chunk_size, acc) when byte_size(chunk) <= chunk_size do
{Enum.reverse(acc), chunk}
end
defp split_payload_in_chunks(binary, chunk_size, acc) do
<<chunk::size(chunk_size)-binary, rest::binary>> = binary
split_payload_in_chunks(rest, chunk_size, [chunk | acc])
end
defp send_ping(conn, payload) do
frame = Frame.ping(stream_id: 0, opaque_data: payload)
conn = send!(conn, Frame.encode(frame))
ref = make_ref()
conn = update_in(conn.ping_queue, &:queue.in({ref, payload}, &1))
{conn, ref}
end
defp send_settings(conn, settings) do
validate_settings!(settings)
frame = settings(stream_id: 0, params: settings)
conn = send!(conn, Frame.encode(frame))
conn = update_in(conn.client_settings_queue, &:queue.in(settings, &1))
conn
end
defp validate_settings!(settings) do
unless Keyword.keyword?(settings) do
raise ArgumentError, "settings must be a keyword list"
end
Enum.each(settings, fn
{:header_table_size, value} ->
unless is_integer(value) do
raise ArgumentError, ":header_table_size must be an integer, got: #{inspect(value)}"
end
{:enable_push, value} ->
unless is_boolean(value) do
raise ArgumentError, ":enable_push must be a boolean, got: #{inspect(value)}"
end
{:max_concurrent_streams, value} ->
unless is_integer(value) do
raise ArgumentError,
":max_concurrent_streams must be an integer, got: #{inspect(value)}"
end
{:initial_window_size, value} ->
unless is_integer(value) and value <= @max_window_size do
raise ArgumentError,
":initial_window_size must be an integer < #{@max_window_size}, " <>
"got: #{inspect(value)}"
end
{:max_frame_size, value} ->
unless is_integer(value) and value in @valid_max_frame_size_range do
raise ArgumentError,
":max_frame_size must be an integer in #{inspect(@valid_max_frame_size_range)}, " <>
"got: #{inspect(value)}"
end
{:max_header_list_size, value} ->
unless is_integer(value) do
raise ArgumentError, ":max_header_list_size must be an integer, got: #{inspect(value)}"
end
{:enable_connect_protocol, value} ->
unless is_boolean(value) do
raise ArgumentError,
":enable_connect_protocol must be a boolean, got: #{inspect(value)}"
end
{name, _value} ->
raise ArgumentError, "unknown setting parameter #{inspect(name)}"
end)
end
defp downcase_header_names(headers) do
for {name, value} <- headers, do: {Util.downcase_ascii(name), value}
end
defp add_default_headers(headers, body) do
headers
|> Util.put_new_header("user-agent", @user_agent)
|> add_default_content_length_header(body)
end
defp add_default_content_length_header(headers, body) when body in [nil, :stream] do
headers
end
defp add_default_content_length_header(headers, body) do
Util.put_new_header_lazy(headers, "content-length", fn ->
body |> IO.iodata_length() |> Integer.to_string()
end)
end
defp add_pseudo_headers(headers, conn, method, path) do
if String.upcase(method) == "CONNECT" do
[
{":method", method},
{":authority", authority_pseudo_header(conn.scheme, conn.port, conn.hostname)}
| headers
]
else
[
{":method", method},
{":path", path},
{":scheme", conn.scheme},
{":authority", authority_pseudo_header(conn.scheme, conn.port, conn.hostname)}
| headers
]
end
end
defp sort_pseudo_headers_to_front(headers) do
Enum.sort_by(headers, fn {key, _value} ->
not String.starts_with?(key, ":")
end)
end
## Frame handling
defp maybe_concat_and_handle_new_data(conn, data) do
data = maybe_concat(conn.buffer, data)
{conn, responses} = handle_new_data(conn, data, [])
{:ok, conn, Enum.reverse(responses)}
end
defp handle_new_data(%Mint.HTTP2{} = conn, data, responses) do
case Frame.decode_next(data, conn.client_settings.max_frame_size) do
{:ok, frame, rest} ->
assert_valid_frame(conn, frame)
{conn, responses} = handle_frame(conn, frame, responses)
handle_new_data(conn, rest, responses)
:more ->
conn = put_in(conn.buffer, data)
handle_consumed_all_frames(conn, responses)
{:error, :payload_too_big} ->
# TODO: sometimes, this could be handled with RST_STREAM instead of a GOAWAY frame (for
# example, if the payload of a DATA frame is too big).
# http://httpwg.org/specs/rfc7540.html#rfc.section.4.2
debug_data = "frame payload exceeds connection's max frame size"
send_connection_error!(conn, :frame_size_error, debug_data)
{:error, {:frame_size_error, frame}} ->
debug_data = "error with size of frame: #{inspect(frame)}"
send_connection_error!(conn, :frame_size_error, debug_data)
{:error, {:protocol_error, info}} ->
debug_data = "error when decoding frame: #{inspect(info)}"
send_connection_error!(conn, :protocol_error, debug_data)
end
catch
:throw, {:mint, conn, error} -> throw({:mint, conn, error, responses})
:throw, {:mint, _conn, _error, _responses} = thrown -> throw(thrown)
end
defp handle_consumed_all_frames(%{state: state} = conn, responses) do
case state do
# TODO: should we do something with the debug data here, like logging it?
{:goaway, :no_error, _debug_data} ->
{conn, responses}
{:goaway, error_code, debug_data} ->
error = wrap_error({:server_closed_connection, error_code, debug_data})
throw({:mint, conn, error, responses})
_ ->
{conn, responses}
end
end
defp assert_valid_frame(_conn, unknown()) do
# Unknown frames MUST be ignored:
# https://datatracker.ietf.org/doc/html/rfc7540#section-4.1
:ok
end
defp assert_valid_frame(conn, frame) do
stream_id = elem(frame, 1)
assert_frame_on_right_level(conn, elem(frame, 0), stream_id)
assert_stream_id_is_allowed(conn, stream_id)
assert_frame_doesnt_interrupt_header_streaming(conn, frame)
end
# http://httpwg.org/specs/rfc7540.html#HttpSequence
defp assert_frame_doesnt_interrupt_header_streaming(conn, frame) do
case {conn.headers_being_processed, frame} do
{nil, continuation()} ->
debug_data = "CONTINUATION received outside of headers streaming"
send_connection_error!(conn, :protocol_error, debug_data)
{nil, _frame} ->
:ok
{{stream_id, _, _}, continuation(stream_id: stream_id)} ->
:ok
_other ->
debug_data =
"headers are streaming but got a #{inspect(elem(frame, 0))} frame instead " <>
"of a CONTINUATION frame"
send_connection_error!(conn, :protocol_error, debug_data)
end
end
stream_level_frames = [:data, :headers, :priority, :rst_stream, :push_promise, :continuation]
connection_level_frames = [:settings, :ping, :goaway]
defp assert_frame_on_right_level(conn, frame, _stream_id = 0)
when frame in unquote(stream_level_frames) do
debug_data = "frame #{inspect(frame)} not allowed at the connection level (stream_id = 0)"
send_connection_error!(conn, :protocol_error, debug_data)
end
defp assert_frame_on_right_level(conn, frame, stream_id)
when frame in unquote(connection_level_frames) and stream_id != 0 do
debug_data = "frame #{inspect(frame)} only allowed at the connection level"
send_connection_error!(conn, :protocol_error, debug_data)
end
defp assert_frame_on_right_level(_conn, _frame, _stream_id) do
:ok
end
defp assert_stream_id_is_allowed(conn, stream_id) do
if Integer.is_odd(stream_id) and stream_id >= conn.next_stream_id do
debug_data = "frame with stream ID #{inspect(stream_id)} has not been opened yet"
send_connection_error!(conn, :protocol_error, debug_data)
else
:ok
end
end
for frame_name <- stream_level_frames ++ connection_level_frames ++ [:window_update, :unknown] do
function_name = :"handle_#{frame_name}"
defp handle_frame(conn, Frame.unquote(frame_name)() = frame, responses) do
unquote(function_name)(conn, frame, responses)
end
end
defp handle_unknown(conn, _frame, responses) do
# Implementations MUST ignore and discard any frame that has a type that is unknown.
# see: https://datatracker.ietf.org/doc/html/rfc7540#section-4.1
{conn, responses}
end
# DATA
defp handle_data(conn, frame, responses) do
data(stream_id: stream_id, flags: flags, data: data, padding: padding) = frame
# Regardless of whether we have the stream or not, we need to abide by flow
# control rules so we still refill the client window for the stream_id we got.
window_size_increment = byte_size(data) + byte_size(padding || "")
conn =
if window_size_increment > 0 do
refill_client_windows(conn, stream_id, window_size_increment)
else
conn
end
case Map.fetch(conn.streams, stream_id) do
{:ok, stream} ->
assert_stream_in_state(conn, stream, [:open, :half_closed_local])
responses = [{:data, stream.ref, data} | responses]
if flag_set?(flags, :data, :end_stream) do
conn = close_stream!(conn, stream.id, :no_error)
{conn, [{:done, stream.ref} | responses]}
else
{conn, responses}
end
:error ->
_ = Logger.debug(fn -> "Received DATA frame on closed stream ID #{stream_id}" end)
{conn, responses}
end
end
defp refill_client_windows(conn, stream_id, data_size) do
connection_frame = window_update(stream_id: 0, window_size_increment: data_size)
stream_frame = window_update(stream_id: stream_id, window_size_increment: data_size)
if open?(conn) do
send!(conn, [Frame.encode(connection_frame), Frame.encode(stream_frame)])
else
conn
end
end
# HEADERS
defp handle_headers(conn, frame, responses) do
headers(stream_id: stream_id, flags: flags, hbf: hbf) = frame
stream = Map.get(conn.streams, stream_id)
end_stream? = flag_set?(flags, :headers, :end_stream)
if stream do
assert_stream_in_state(conn, stream, [:open, :half_closed_local, :reserved_remote])
end
if flag_set?(flags, :headers, :end_headers) do
decode_hbf_and_add_responses(conn, responses, hbf, stream, end_stream?)
else
callback = &decode_hbf_and_add_responses(&1, &2, &3, &4, end_stream?)
conn = put_in(conn.headers_being_processed, {stream_id, hbf, callback})
{conn, responses}
end
end
# Here, "stream" can be nil in case the stream was closed. In that case, we
# still need to process the hbf so that the HPACK table is updated, but then
# we don't add any responses.
defp decode_hbf_and_add_responses(conn, responses, hbf, stream, end_stream?) do
{conn, headers} = decode_hbf(conn, hbf)
if stream do
handle_decoded_headers_for_stream(conn, responses, stream, headers, end_stream?)
else
_ = Logger.debug(fn -> "Received HEADERS frame on closed stream ID" end)
{conn, responses}
end
end
defp handle_decoded_headers_for_stream(conn, responses, stream, headers, end_stream?) do
%{ref: ref, received_first_headers?: received_first_headers?} = stream
case headers do
[{":status", status} | headers] when not received_first_headers? ->
conn = put_in(conn.streams[stream.id].received_first_headers?, true)
status = String.to_integer(status)
headers = join_cookie_headers(headers)
new_responses = [{:headers, ref, headers}, {:status, ref, status} | responses]
cond do
# :reserved_remote means that this was a promised stream. As soon as headers come,
# the stream goes in the :half_closed_local state (unless it's not allowed because
# of the client's max concurrent streams limit, or END_STREAM is set).
stream.state == :reserved_remote ->
cond do
conn.open_server_stream_count >= conn.client_settings.max_concurrent_streams ->
conn = close_stream!(conn, stream.id, :refused_stream)
{conn, responses}
end_stream? ->
conn = close_stream!(conn, stream.id, :no_error)
{conn, [{:done, ref} | new_responses]}
true ->
conn = update_in(conn.open_server_stream_count, &(&1 + 1))
conn = put_in(conn.streams[stream.id].state, :half_closed_local)
{conn, new_responses}
end
end_stream? ->
conn = close_stream!(conn, stream.id, :no_error)
{conn, [{:done, ref} | new_responses]}
true ->
{conn, new_responses}
end
# Trailing headers. We don't care about the :status header here.
headers when received_first_headers? ->
if end_stream? do
conn = close_stream!(conn, stream.id, :no_error)
headers = headers |> Util.remove_unallowed_trailing_headers() |> join_cookie_headers()
{conn, [{:done, ref}, {:headers, ref, headers} | responses]}
else
# Trailing headers must set the END_STREAM flag because they're
# the last thing allowed on the stream (other than RST_STREAM and
# the usual frames).
conn = close_stream!(conn, stream.id, :protocol_error)
debug_data = "trailing headers didn't set the END_STREAM flag"
error = wrap_error({:protocol_error, debug_data})
responses = [{:error, stream.ref, error} | responses]
{conn, responses}
end
# Non-trailing headers need to have a :status header, otherwise
# it's a protocol error.
_headers ->
conn = close_stream!(conn, stream.id, :protocol_error)
error = wrap_error(:missing_status_header)
responses = [{:error, stream.ref, error} | responses]
{conn, responses}
end
end
defp decode_hbf(conn, hbf) do
case HPAX.decode(hbf, conn.decode_table) do
{:ok, headers, decode_table} ->
conn = put_in(conn.decode_table, decode_table)
{conn, headers}
{:error, reason} ->
debug_data = "unable to decode headers: #{inspect(reason)}"
send_connection_error!(conn, :compression_error, debug_data)
end
end
# If the port is the default for the scheme, don't add it to the :authority pseudo-header
defp authority_pseudo_header(scheme, port, hostname) do
if URI.default_port(scheme) == port do
hostname
else
"#{hostname}:#{port}"
end
end
defp join_cookie_headers(headers) do
# If we have 0 or 1 Cookie headers, we just use the old list of headers.
case Enum.split_with(headers, fn {name, _value} -> Util.downcase_ascii(name) == "cookie" end) do
{[], _headers} ->
headers
{[_], _headers} ->
headers
{cookies, headers} ->
cookie = Enum.map_join(cookies, "; ", fn {_name, value} -> value end)
[{"cookie", cookie} | headers]
end
end
# PRIORITY
# For now we ignore all PRIORITY frames. This shouldn't cause practical trouble.
defp handle_priority(conn, frame, responses) do
_ = Logger.warn(fn -> "Ignoring PRIORITY frame: #{inspect(frame)}" end)
{conn, responses}
end
# RST_STREAM
defp handle_rst_stream(conn, frame, responses) do
rst_stream(stream_id: stream_id, error_code: error_code) = frame
# If we receive RST_STREAM on a closed stream, we ignore it.
case Map.fetch(conn.streams, stream_id) do
{:ok, stream} ->
# If we receive RST_STREAM then the stream is definitely closed.
# We won't send anything else on the stream so we can simply delete
# it, so that if we get things like DATA on that stream we error out.
conn = delete_stream(conn, stream)
if error_code == :no_error do
{conn, [{:done, stream.ref} | responses]}
else
error = wrap_error({:server_closed_request, error_code})
{conn, [{:error, stream.ref, error} | responses]}
end
:error ->
{conn, responses}
end
end
# SETTINGS
defp handle_settings(conn, frame, responses) do
settings(flags: flags, params: params) = frame
if flag_set?(flags, :settings, :ack) do
{{:value, params}, conn} = get_and_update_in(conn.client_settings_queue, &:queue.out/1)
conn = apply_client_settings(conn, params)
{conn, responses}
else
conn = apply_server_settings(conn, params)
frame = settings(flags: set_flags(:settings, [:ack]), params: [])
conn = send!(conn, Frame.encode(frame))
{conn, responses}
end
end
defp apply_server_settings(conn, server_settings) do
Enum.reduce(server_settings, conn, fn
{:header_table_size, header_table_size}, conn ->
update_in(conn.encode_table, &HPAX.resize(&1, header_table_size))
{:enable_push, enable_push?}, conn ->
put_in(conn.server_settings.enable_push, enable_push?)
{:max_concurrent_streams, max_concurrent_streams}, conn ->
put_in(conn.server_settings.max_concurrent_streams, max_concurrent_streams)
{:initial_window_size, initial_window_size}, conn ->
if initial_window_size > @max_window_size do
debug_data = "INITIAL_WINDOW_SIZE setting of #{initial_window_size} is too big"
send_connection_error!(conn, :flow_control_error, debug_data)
end
update_server_initial_window_size(conn, initial_window_size)
{:max_frame_size, max_frame_size}, conn ->
if max_frame_size not in @valid_max_frame_size_range do
debug_data = "MAX_FRAME_SIZE setting parameter outside of allowed range"
send_connection_error!(conn, :protocol_error, debug_data)
end
put_in(conn.server_settings.max_frame_size, max_frame_size)
{:max_header_list_size, max_header_list_size}, conn ->
put_in(conn.server_settings.max_header_list_size, max_header_list_size)
{:enable_connect_protocol, enable_connect_protocol?}, conn ->
put_in(conn.server_settings.enable_connect_protocol, enable_connect_protocol?)
end)
end
defp apply_client_settings(conn, client_settings) do
Enum.reduce(client_settings, conn, fn
{:max_frame_size, value}, conn ->
put_in(conn.client_settings.max_frame_size, value)
{:max_concurrent_streams, value}, conn ->
put_in(conn.client_settings.max_concurrent_streams, value)
{:enable_push, value}, conn ->
put_in(conn.client_settings.enable_push, value)
end)
end
defp update_server_initial_window_size(conn, new_iws) do
diff = new_iws - conn.server_settings.initial_window_size
conn =
update_in(conn.streams, fn streams ->
for {stream_id, stream} <- streams,
stream.state in [:open, :half_closed_remote],
into: streams do
window_size = stream.window_size + diff
if window_size > @max_window_size do
debug_data =
"INITIAL_WINDOW_SIZE parameter of #{window_size} makes some window sizes too big"
send_connection_error!(conn, :flow_control_error, debug_data)
end
{stream_id, %{stream | window_size: window_size}}
end
end)
put_in(conn.server_settings.initial_window_size, new_iws)
end
# PUSH_PROMISE
defp handle_push_promise(
%Mint.HTTP2{client_settings: %{enable_push: false}} = conn,
push_promise(),
_responses
) do
debug_data = "received PUSH_PROMISE frame when SETTINGS_ENABLE_PUSH was false"
send_connection_error!(conn, :protocol_error, debug_data)
end
defp handle_push_promise(conn, push_promise() = frame, responses) do
push_promise(
stream_id: stream_id,
flags: flags,
promised_stream_id: promised_stream_id,
hbf: hbf
) = frame
assert_valid_promised_stream_id(conn, promised_stream_id)
stream = fetch_stream!(conn, stream_id)
assert_stream_in_state(conn, stream, [:open, :half_closed_local])
if flag_set?(flags, :push_promise, :end_headers) do
decode_push_promise_headers_and_add_response(
conn,
responses,
hbf,
stream,
promised_stream_id
)
else
callback = &decode_push_promise_headers_and_add_response(&1, &2, &3, &4, promised_stream_id)
conn = put_in(conn.headers_being_processed, {stream_id, hbf, callback})
{conn, responses}
end
end
defp decode_push_promise_headers_and_add_response(
conn,
responses,
hbf,
stream,
promised_stream_id
) do
{conn, headers} = decode_hbf(conn, hbf)
promised_stream = %{
id: promised_stream_id,
ref: make_ref(),
state: :reserved_remote,
window_size: conn.server_settings.initial_window_size,
received_first_headers?: false
}
conn = put_in(conn.streams[promised_stream.id], promised_stream)
new_response = {:push_promise, stream.ref, promised_stream.ref, headers}
{conn, [new_response | responses]}
end
defp assert_valid_promised_stream_id(conn, promised_stream_id) do
cond do
not is_integer(promised_stream_id) or Integer.is_odd(promised_stream_id) ->
debug_data = "invalid promised stream ID: #{inspect(promised_stream_id)}"
send_connection_error!(conn, :protocol_error, debug_data)
Map.has_key?(conn.streams, promised_stream_id) ->
debug_data =
"stream with ID #{inspect(promised_stream_id)} already exists and can't be " <>
"reserved by the server"
send_connection_error!(conn, :protocol_error, debug_data)
true ->
:ok
end
end
# PING
defp handle_ping(conn, Frame.ping() = frame, responses) do
Frame.ping(flags: flags, opaque_data: opaque_data) = frame
if flag_set?(flags, :ping, :ack) do
handle_ping_ack(conn, opaque_data, responses)
else
ack = Frame.ping(stream_id: 0, flags: set_flags(:ping, [:ack]), opaque_data: opaque_data)
conn = send!(conn, Frame.encode(ack))
{conn, responses}
end
end
defp handle_ping_ack(conn, opaque_data, responses) do
case :queue.peek(conn.ping_queue) do
{:value, {ref, ^opaque_data}} ->
conn = update_in(conn.ping_queue, &:queue.drop/1)
{conn, [{:pong, ref} | responses]}
{:value, _} ->
_ = Logger.warn("Received PING ack that doesn't match next PING request in the queue")
{conn, responses}
:empty ->
_ = Logger.warn("Received PING ack but no PING requests are pending")
{conn, responses}
end
end
# GOAWAY
defp handle_goaway(conn, frame, responses) do
goaway(
last_stream_id: last_stream_id,
error_code: error_code,
debug_data: debug_data
) = frame
# We gather all the unprocessed requests and form {:error, _, _} tuples for each one.
# At the same time, we delete all the unprocessed requests from the stream set.
{unprocessed_request_responses, conn} =
Enum.flat_map_reduce(conn.streams, conn, fn
{stream_id, _stream}, conn_acc when stream_id <= last_stream_id ->
{[], conn_acc}
{_stream_id, stream}, conn_acc ->
conn_acc = delete_stream(conn_acc, stream)
{[{:error, stream.ref, wrap_error(:unprocessed)}], conn_acc}
end)
conn = put_in(conn.state, {:goaway, error_code, debug_data})
{conn, unprocessed_request_responses ++ responses}
end
# WINDOW_UPDATE
defp handle_window_update(
conn,
window_update(stream_id: 0, window_size_increment: wsi),
responses
) do
new_window_size = conn.window_size + wsi
if new_window_size > @max_window_size do
send_connection_error!(conn, :flow_control_error, "window size too big")
else
conn = put_in(conn.window_size, new_window_size)
{conn, responses}
end
end
defp handle_window_update(
conn,
window_update(stream_id: stream_id, window_size_increment: wsi),
responses
) do
stream = fetch_stream!(conn, stream_id)
new_window_size = conn.streams[stream_id].window_size + wsi
if new_window_size > @max_window_size do
conn = close_stream!(conn, stream_id, :flow_control_error)
error = wrap_error({:flow_control_error, "window size too big"})
{conn, [{:error, stream.ref, error} | responses]}
else
conn = put_in(conn.streams[stream_id].window_size, new_window_size)
{conn, responses}
end
end
# CONTINUATION
defp handle_continuation(conn, frame, responses) do
continuation(stream_id: stream_id, flags: flags, hbf: hbf_chunk) = frame
stream = Map.get(conn.streams, stream_id)
if stream do
assert_stream_in_state(conn, stream, [:open, :half_closed_local, :reserved_remote])
end
{^stream_id, hbf_acc, callback} = conn.headers_being_processed
if flag_set?(flags, :continuation, :end_headers) do
hbf = IO.iodata_to_binary([hbf_acc, hbf_chunk])
conn = put_in(conn.headers_being_processed, nil)
callback.(conn, responses, hbf, stream)
else
conn = put_in(conn.headers_being_processed, {stream_id, [hbf_acc, hbf_chunk], callback})
{conn, responses}
end
end
## General helpers
defp send_connection_error!(conn, error_code, debug_data) do
frame =
goaway(stream_id: 0, last_stream_id: 2, error_code: error_code, debug_data: debug_data)
conn = send!(conn, Frame.encode(frame))
_ = conn.transport.close(conn.socket)
conn = put_in(conn.state, :closed)
throw({:mint, conn, wrap_error({error_code, debug_data})})
end
defp close_stream!(conn, stream_id, error_code) do
stream = Map.fetch!(conn.streams, stream_id)
# First of all we send a RST_STREAM with the given error code so that we
# move the stream to the :closed state (that is, we remove it).
rst_stream_frame = rst_stream(stream_id: stream_id, error_code: error_code)
conn =
if open?(conn) do
send!(conn, Frame.encode(rst_stream_frame))
else
conn
end
delete_stream(conn, stream)
end
defp delete_stream(conn, stream) do
conn = update_in(conn.streams, &Map.delete(&1, stream.id))
conn = update_in(conn.ref_to_stream_id, &Map.delete(&1, stream.ref))
stream_open? = stream.state in [:open, :half_closed_local, :half_closed_remote]
conn =
cond do
# Stream initiated by the client.
stream_open? and Integer.is_odd(stream.id) ->
update_in(conn.open_client_stream_count, &(&1 - 1))
# Stream initiated by the server.
stream_open? and Integer.is_even(stream.id) ->
update_in(conn.open_server_stream_count, &(&1 - 1))
true ->
conn
end
conn
end
defp fetch_stream!(conn, stream_id) do
case Map.fetch(conn.streams, stream_id) do
{:ok, stream} -> stream
:error -> throw({:mint, conn, wrap_error({:stream_not_found, stream_id})})
end
end
defp assert_stream_in_state(conn, %{state: state}, expected_states) do
if state not in expected_states do
debug_data =
"stream was in state #{inspect(state)} and not in one of the expected states: " <>
Enum.map_join(expected_states, ", ", &inspect/1)
send_connection_error!(conn, :protocol_error, debug_data)
end
end
defp send!(%Mint.HTTP2{transport: transport, socket: socket} = conn, bytes) do
case transport.send(socket, bytes) do
:ok ->
conn
{:error, %TransportError{reason: :closed} = error} ->
throw({:mint, %{conn | state: :closed}, error})
{:error, reason} ->
throw({:mint, conn, reason})
end
end
defp wrap_error(reason) do
%HTTPError{reason: reason, module: __MODULE__}
end
@doc false
def format_error(reason)
def format_error(:closed) do
"the connection is closed"
end
def format_error(:closed_for_writing) do
"the connection is closed for writing, which means that you cannot issue any more " <>
"requests on the connection but you can expect responses to still be delivered for " <>
"part of the requests that are in flight. If a connection is closed for writing, " <>
"it usually means that you got a :server_closed_request error already."
end
def format_error(:too_many_concurrent_requests) do
"the number of max concurrent HTTP/2 requests supported by the server has been reached. " <>
"Use Mint.HTTP2.get_server_setting/2 with the :max_concurrent_streams setting name " <>
"to find out the maximum number of concurrent requests supported by the server."
end
def format_error({:max_header_list_size_exceeded, size, max_size}) do
"the given header list (of size #{size}) goes over the max header list size of " <>
"#{max_size} supported by the server. In HTTP/2, the header list size is calculated " <>
"by summing up the size in bytes of each header name, value, plus 32 for each header."
end
def format_error({:exceeds_window_size, what, window_size}) do
what =
case what do
:request -> "request"
:connection -> "connection"
end
"the given data exceeds the #{what} window size, which is #{window_size}. " <>
"The server will refill the window size of the #{what} when ready. This will be " <>
"handled transparently by stream/2."
end
def format_error({:stream_not_found, stream_id}) do
"request not found (with stream_id #{inspect(stream_id)})"
end
def format_error(:unknown_request_to_stream) do
"can't stream chunk of data because the request is unknown"
end
def format_error(:request_is_not_streaming) do
"can't send more data on this request since it's not streaming"
end
def format_error({:unallowed_trailing_header, {name, value}}) do
"header #{inspect(name)} (with value #{inspect(value)}) is not allowed as a trailing header"
end
def format_error(:missing_status_header) do
"the :status pseudo-header (which is required in HTTP/2) is missing from the response"
end
def format_error({:server_closed_request, error_code}) do
"server closed request with error code #{inspect(error_code)}"
end
def format_error({:server_closed_connection, error, debug_data}) do
"server closed connection with error code #{inspect(error)} and debug data: " <> debug_data
end
def format_error(:unprocessed) do
"request was not processed by the server, which means that it's safe to retry on a " <>
"different or new connection"
end
def format_error({:frame_size_error, frame}) do
"frame size error for #{inspect(frame)} frame"
end
def format_error({:protocol_error, debug_data}) do
"protocol error: " <> debug_data
end
def format_error({:compression_error, debug_data}) do
"compression error: " <> debug_data
end
def format_error({:flow_control_error, debug_data}) do
"flow control error: " <> debug_data
end
end
| 35.351152 | 107 | 0.675931 |
1cf5d22600d56e6a1d2257e6bb1dee33b7cbdd58 | 10,222 | ex | Elixir | lib/ex_unit/lib/ex_unit/runner.ex | mk/elixir | 2b2c66ecf7b1cc2167cae9cc3e88f950994223f1 | [
"Apache-2.0"
] | null | null | null | lib/ex_unit/lib/ex_unit/runner.ex | mk/elixir | 2b2c66ecf7b1cc2167cae9cc3e88f950994223f1 | [
"Apache-2.0"
] | null | null | null | lib/ex_unit/lib/ex_unit/runner.ex | mk/elixir | 2b2c66ecf7b1cc2167cae9cc3e88f950994223f1 | [
"Apache-2.0"
] | null | null | null | defmodule ExUnit.Runner do
@moduledoc false
alias ExUnit.EventManager, as: EM
def run(async, sync, opts, load_us) do
{opts, config} = configure(opts)
:erlang.system_flag(:backtrace_depth,
Keyword.fetch!(opts, :stacktrace_depth))
{run_us, _} =
:timer.tc fn ->
EM.suite_started(config.manager, opts)
loop %{config | sync_cases: shuffle(config, sync),
async_cases: shuffle(config, async)}
end
EM.suite_finished(config.manager, run_us, load_us)
EM.call(config.manager, ExUnit.RunnerStats, :stop, :infinity)
end
def configure(opts) do
opts = normalize_opts(opts)
{:ok, pid} = EM.start_link
formatters = [ExUnit.RunnerStats|opts[:formatters]]
Enum.each formatters, &(:ok = EM.add_handler(pid, &1, opts))
config = %{
async_cases: [],
capture_log: opts[:capture_log],
exclude: opts[:exclude],
include: opts[:include],
manager: pid,
max_cases: opts[:max_cases],
seed: opts[:seed],
sync_cases: [],
taken_cases: 0,
timeout: opts[:timeout],
trace: opts[:trace]
}
{opts, config}
end
defp normalize_opts(opts) do
{include, exclude} = ExUnit.Filters.normalize(opts[:include], opts[:exclude])
opts
|> Keyword.put(:exclude, exclude)
|> Keyword.put(:include, include)
|> Keyword.put(:max_cases, max_cases(opts))
|> Keyword.put_new(:seed, :os.timestamp |> elem(2))
end
defp max_cases(opts) do
cond do
opts[:trace] -> 1
max = opts[:max_cases] -> max
true -> :erlang.system_info(:schedulers_online)
end
end
defp loop(config) do
available = config.max_cases - config.taken_cases
cond do
# No cases available, wait for one
available <= 0 ->
wait_until_available config
# Slots are available, start with async cases
tuple = take_async_cases(config, available) ->
{config, cases} = tuple
spawn_cases(config, cases)
# No more async cases, wait for them to finish
config.taken_cases > 0 ->
wait_until_available config
# So we can start all sync cases
tuple = take_sync_cases(config) ->
{config, cases} = tuple
spawn_cases(config, cases)
# No more cases, we are done!
true ->
config
end
end
# Loop expecting messages from the spawned cases. Whenever
# a test case has finished executing, decrease the taken
# cases counter and attempt to spawn new ones.
defp wait_until_available(config) do
receive do
{_pid, :case_finished, _test_case} ->
loop %{config | taken_cases: config.taken_cases - 1}
end
end
defp spawn_cases(config, cases) do
pid = self()
Enum.each cases, fn case_name ->
spawn_link fn ->
run_case(config, pid, case_name)
end
end
loop %{config | taken_cases: config.taken_cases + length(cases)}
end
defp run_case(config, pid, case_name) do
test_case = case_name.__ex_unit__(:case)
EM.case_started(config.manager, test_case)
# Prepare tests, selecting which ones should
# run and which ones were skipped.
tests = prepare_tests(config, test_case.tests)
{test_case, pending} =
if Enum.all?(tests, &(&1.state)) do
{test_case, tests}
else
spawn_case(config, test_case, tests)
end
# Run the pending tests. We don't actually spawn those
# tests but we do send the notifications to formatter.
Enum.each pending, &run_test(config, &1, [])
EM.case_finished(config.manager, test_case)
send pid, {self, :case_finished, test_case}
end
defp prepare_tests(config, tests) do
tests = shuffle(config, tests)
include = config.include
exclude = config.exclude
for test <- tests do
tags = Map.put(test.tags, :test, test.name)
case ExUnit.Filters.eval(include, exclude, tags, tests) do
:ok -> %{test | tags: tags}
{:error, msg} -> %{test | state: {:skip, msg}}
end
end
end
defp spawn_case(config, test_case, tests) do
parent = self
{case_pid, case_ref} =
spawn_monitor(fn ->
ExUnit.OnExitHandler.register(self)
case exec_case_setup(test_case) do
{:ok, test_case, context} ->
Enum.each tests, &run_test(config, &1, context)
send parent, {self, :case_finished, test_case, []}
{:error, test_case} ->
failed_tests = Enum.map tests, & %{&1 | state: {:invalid, test_case}}
send parent, {self, :case_finished, test_case, failed_tests}
end
exit(:shutdown)
end)
{test_case, pending} =
receive do
{^case_pid, :case_finished, test_case, tests} ->
receive do
{:DOWN, ^case_ref, :process, ^case_pid, _} -> :ok
end
{test_case, tests}
{:DOWN, ^case_ref, :process, ^case_pid, error} ->
test_case = %{test_case | state: failed({:EXIT, case_pid}, error, [])}
{test_case, []}
end
{exec_on_exit(test_case, case_pid), pending}
end
defp exec_case_setup(%ExUnit.TestCase{name: case_name} = test_case) do
{:ok, context} = case_name.__ex_unit__(:setup_all, %{case: case_name})
{:ok, test_case, context}
catch
kind, error ->
failed = failed(kind, error, pruned_stacktrace())
{:error, %{test_case | state: failed}}
end
defp run_test(true, config, test, context) do
run_test([], config, test, context)
end
defp run_test(false, config, test, context) do
spawn_test(config, test, context)
end
defp run_test(opts, config, test, context) do
ref = make_ref()
try do
ExUnit.CaptureLog.capture_log(opts, fn ->
send self(), {ref, spawn_test(config, test, context)}
end)
catch
:exit, :noproc ->
message =
"could not run test, it uses @tag :capture_log" <>
" but the :logger application is not running"
%{test | state: failed(:error, RuntimeError.exception(message), [])}
else
logged ->
receive do
{^ref, test} -> %{test | logs: logged}
end
end
end
defp run_test(config, %{tags: tags} = test, context) do
EM.test_started(config.manager, test)
if is_nil(test.state) do
capture_log? = Map.get(tags, :capture_log, config.capture_log)
test = run_test(capture_log?, config, test, Map.merge(tags, context))
end
EM.test_finished(config.manager, test)
end
defp spawn_test(config, test, context) do
parent = self()
{test_pid, test_ref} =
spawn_monitor(fn ->
ExUnit.OnExitHandler.register(self)
{us, test} =
:timer.tc(fn ->
case exec_test_setup(test, context) do
{:ok, test} ->
exec_test(test)
{:error, test} ->
test
end
end)
send parent, {self, :test_finished, %{test | time: us}}
exit(:shutdown)
end)
timeout = get_timeout(test.tags, config)
test =
receive do
{^test_pid, :test_finished, test} ->
receive do
{:DOWN, ^test_ref, :process, ^test_pid, _} -> :ok
end
test
{:DOWN, ^test_ref, :process, ^test_pid, error} ->
%{test | state: failed({:EXIT, test_pid}, error, [])}
after
timeout ->
stacktrace =
try do
Process.info(test_pid, :current_stacktrace)
catch
_, _ -> []
else
{:current_stacktrace, stacktrace} -> stacktrace
end
Process.exit(test_pid, :kill)
Process.demonitor(test_ref, [:flush])
%{test | state: failed(:error, %ExUnit.TimeoutError{timeout: timeout}, stacktrace)}
end
exec_on_exit(test, test_pid)
end
defp exec_test_setup(%ExUnit.Test{case: case} = test, context) do
{:ok, context} = case.__ex_unit__(:setup, context)
{:ok, %{test | tags: context}}
catch
kind, error ->
{:error, %{test | state: failed(kind, error, pruned_stacktrace())}}
end
defp exec_test(%ExUnit.Test{case: case, name: name, tags: context} = test) do
apply(case, name, [context])
test
catch
kind, error ->
%{test | state: failed(kind, error, pruned_stacktrace())}
end
defp exec_on_exit(test_or_case, pid) do
case ExUnit.OnExitHandler.run(pid) do
:ok ->
test_or_case
{kind, reason, stack} ->
state = test_or_case.state || failed(kind, reason, prune_stacktrace(stack))
%{test_or_case | state: state}
end
end
## Helpers
defp get_timeout(tags, config) do
if config.trace do
:infinity
else
Map.get(tags, :timeout, config.timeout)
end
end
defp shuffle(%{seed: 0}, list) do
Enum.reverse(list)
end
defp shuffle(%{seed: seed}, list) do
_ = :rand.seed(:exsplus, {3172, 9814, seed})
Enum.shuffle(list)
end
defp take_async_cases(config, count) do
case config.async_cases do
[] -> nil
cases ->
{response, remaining} = Enum.split(cases, count)
{%{config | async_cases: remaining}, response}
end
end
defp take_sync_cases(config) do
case config.sync_cases do
[h|t] -> {%{config | sync_cases: t}, [h]}
[] -> nil
end
end
defp failed(:error, %ExUnit.MultiError{errors: errors}, _stack) do
{:failed,
Enum.map(errors, fn {kind, reason, stack} ->
{kind, Exception.normalize(kind, reason), prune_stacktrace(stack)}
end)}
end
defp failed(kind, reason, stack) do
{:failed, [{kind, Exception.normalize(kind, reason), stack}]}
end
defp pruned_stacktrace, do: prune_stacktrace(System.stacktrace)
# Assertions can pop-up in the middle of the stack
defp prune_stacktrace([{ExUnit.Assertions, _, _, _}|t]), do: prune_stacktrace(t)
# As soon as we see a Runner, it is time to ignore the stacktrace
defp prune_stacktrace([{ExUnit.Runner, _, _, _}|_]), do: []
# All other cases
defp prune_stacktrace([h|t]), do: [h|prune_stacktrace(t)]
defp prune_stacktrace([]), do: []
end
| 27.777174 | 93 | 0.603404 |
1cf5e969a5959f57e21b3f2ba95ec08099022b19 | 206 | exs | Elixir | test/test_helper.exs | StephenTurley/opencensus_ecto | b15f0b54ee07203e61714edb840ff165f8d4a719 | [
"BSD-3-Clause"
] | null | null | null | test/test_helper.exs | StephenTurley/opencensus_ecto | b15f0b54ee07203e61714edb840ff165f8d4a719 | [
"BSD-3-Clause"
] | null | null | null | test/test_helper.exs | StephenTurley/opencensus_ecto | b15f0b54ee07203e61714edb840ff165f8d4a719 | [
"BSD-3-Clause"
] | 1 | 2022-01-11T19:06:00.000Z | 2022-01-11T19:06:00.000Z | Application.ensure_all_started(:opencensus)
Application.ensure_all_started(:telemetry)
OpencensusEcto.TestRepo.start_link()
ExUnit.start()
Ecto.Adapters.SQL.Sandbox.mode(OpencensusEcto.TestRepo, :manual)
| 25.75 | 64 | 0.84466 |
1cf5f288f18c2ceb81d6c70dee874a07e05675d7 | 1,218 | ex | Elixir | lib/portunes_core/repository.ex | portunes/portunes-core | 24112f9f593275611e96d59cc0f0e4117ed0d21f | [
"Apache-2.0"
] | 1 | 2021-02-25T16:49:20.000Z | 2021-02-25T16:49:20.000Z | lib/portunes_core/repository.ex | portunes/portunes-core | 24112f9f593275611e96d59cc0f0e4117ed0d21f | [
"Apache-2.0"
] | null | null | null | lib/portunes_core/repository.ex | portunes/portunes-core | 24112f9f593275611e96d59cc0f0e4117ed0d21f | [
"Apache-2.0"
] | null | null | null | defmodule PortunesCore.Repository do
@moduledoc """
Defines the API for accessing configuration repositories, for example a Git repo, a http site or
any other place where we might persist the configuration.
"""
alias PortunesCore.Snapshot
alias PortunesCore.Repositories
@type repo_name :: String.t()
@type version :: String.t()
@type opts :: map
@type ok_snapshot :: {:ok, Snapshot.t()}
@type error :: {:error, term}
@callback get_last_version(repo_name, opts) :: {:ok, String.t()} | error
@callback get_last_snapshot(repo_name, opts) :: ok_snapshot | error
@callback get_snapshot(repo_name, version, opts) :: ok_snapshot | error | :noop
## Dispatcher
@adapter Application.compile_env(:portunes_core, [:repository, :adapter], Repositories.Stub)
@spec get_last_version(repo_name, opts) :: {:ok, String.t()} | error
defdelegate get_last_version(repo_name, opts \\ %{}), to: @adapter
@spec get_last_snapshot(repo_name, opts) :: ok_snapshot | error
defdelegate get_last_snapshot(repo_name, opts \\ %{}), to: @adapter
@spec get_snapshot(repo_name, version, opts) :: ok_snapshot | error | :noop
defdelegate get_snapshot(repo_name, version, opts \\ %{}), to: @adapter
end
| 36.909091 | 98 | 0.716749 |
1cf60d5fda31b6f8264b3becdc519ab67bd82dc1 | 1,351 | ex | Elixir | lib/ex_mon_web/router.ex | tarcisiooliveira/ExMon-Web-Api | 7771fe9db907d395a4141b8a3c8501ac48a028e6 | [
"MIT"
] | null | null | null | lib/ex_mon_web/router.ex | tarcisiooliveira/ExMon-Web-Api | 7771fe9db907d395a4141b8a3c8501ac48a028e6 | [
"MIT"
] | null | null | null | lib/ex_mon_web/router.ex | tarcisiooliveira/ExMon-Web-Api | 7771fe9db907d395a4141b8a3c8501ac48a028e6 | [
"MIT"
] | null | null | null | defmodule ExMonWeb.Router do
use ExMonWeb, :router
pipeline :api do
plug :accepts, ["json"]
end
pipeline :auth do
plug ExMonWeb.Auth.Pipeline
end
scope "/api", ExMonWeb do
pipe_through :api
post "/trainers", TrainersController, :create
post "/trainers/signin", TrainersController, :sign_in
get "/pokemons/:name", PokemonsController, :show
end
scope "/api", ExMonWeb do
pipe_through [:api, :auth]
resources "/trainers", TrainersController, only: [:show, :delete, :update]
resources "/trainer_pokemons", TrainerPokemonsController,
only: [:create, :show, :delete, :update]
end
scope "/", ExMonWeb do
pipe_through :api
get "/", WelcomeController, :index
end
# Enables LiveDashboard only for development
#
# If you want to use the LiveDashboard in production, you should put
# it behind authentication and allow only admins to access it.
# If your application does not have an admins-only section yet,
# you can use Plug.BasicAuth to set up some basic authentication
# as long as you are also using SSL (which you should anyway).
if Mix.env() in [:dev, :test] do
import Phoenix.LiveDashboard.Router
scope "/" do
pipe_through [:fetch_session, :protect_from_forgery]
live_dashboard "/dashboard", metrics: ExMonWeb.Telemetry
end
end
end
| 28.145833 | 78 | 0.698742 |
1cf66fbb79ec35b7d31773217ada5c91ce51b2a6 | 718 | ex | Elixir | lib/chatbot_api_web/gettext.ex | GregoireLoens/chatbot_api | 0a09ae821332b40d9b0fe1cec0b348fccb874613 | [
"Apache-2.0"
] | null | null | null | lib/chatbot_api_web/gettext.ex | GregoireLoens/chatbot_api | 0a09ae821332b40d9b0fe1cec0b348fccb874613 | [
"Apache-2.0"
] | null | null | null | lib/chatbot_api_web/gettext.ex | GregoireLoens/chatbot_api | 0a09ae821332b40d9b0fe1cec0b348fccb874613 | [
"Apache-2.0"
] | null | null | null | defmodule ChatbotApiWeb.Gettext do
@moduledoc """
A module providing Internationalization with a gettext-based API.
By using [Gettext](https://hexdocs.pm/gettext),
your module gains a set of macros for translations, for example:
import ChatbotApiWeb.Gettext
# Simple translation
gettext "Here is the string to translate"
# Plural translation
ngettext "Here is the string to translate",
"Here are the strings to translate",
3
# Domain-based translation
dgettext "errors", "Here is the error message to translate"
See the [Gettext Docs](https://hexdocs.pm/gettext) for detailed usage.
"""
use Gettext, otp_app: :chatbot_api
end
| 28.72 | 72 | 0.685237 |
1cf6734286115304ccfa9cb9bb8a8937ca1e65b7 | 726 | ex | Elixir | lib/plausible_web/plugs/session_timeout_plug.ex | pmhoudry/plausible | 454feec36e62b866ae86e07a1f4133d9782d4365 | [
"MIT"
] | 1 | 2020-04-08T16:39:00.000Z | 2020-04-08T16:39:00.000Z | lib/plausible_web/plugs/session_timeout_plug.ex | pmhoudry/plausible | 454feec36e62b866ae86e07a1f4133d9782d4365 | [
"MIT"
] | null | null | null | lib/plausible_web/plugs/session_timeout_plug.ex | pmhoudry/plausible | 454feec36e62b866ae86e07a1f4133d9782d4365 | [
"MIT"
] | null | null | null | defmodule PlausibleWeb.SessionTimeoutPlug do
import Plug.Conn
def init(opts \\ []) do
opts
end
def call(conn, opts) do
timeout_at = get_session(conn, :session_timeout_at)
user_id = get_session(conn, :current_user_id)
cond do
user_id && timeout_at && now() > timeout_at ->
conn
|> configure_session(drop: true)
|> delete_resp_cookie("logged_in")
user_id ->
put_session(conn, :session_timeout_at, new_session_timeout_at(opts[:timeout_after_seconds]))
true ->
conn
end
end
defp now do
DateTime.utc_now() |> DateTime.to_unix
end
defp new_session_timeout_at(timeout_after_seconds) do
now() + timeout_after_seconds
end
end
| 22.6875 | 100 | 0.669421 |
1cf6765e9719bd10194a717f9d229991dee7cb41 | 1,983 | ex | Elixir | lib/advent_of_code_2019/day08.ex | scorphus/advent-of-code-2019 | 48305ff3b13b23cac60bed02349775d8feb05a3b | [
"BSD-3-Clause"
] | null | null | null | lib/advent_of_code_2019/day08.ex | scorphus/advent-of-code-2019 | 48305ff3b13b23cac60bed02349775d8feb05a3b | [
"BSD-3-Clause"
] | null | null | null | lib/advent_of_code_2019/day08.ex | scorphus/advent-of-code-2019 | 48305ff3b13b23cac60bed02349775d8feb05a3b | [
"BSD-3-Clause"
] | null | null | null | defmodule AdventOfCode2019.SpaceImageFormat do
@moduledoc """
Day 8 — https://adventofcode.com/2019/day/8
"""
@doc """
iex> ["123456789012"] |> AdventOfCode2019.SpaceImageFormat.part1()
4
"""
@spec part1(Enumerable.t()) :: integer()
def part1(in_stream) do
in_stream
|> Stream.map(fn line ->
line
|> read_image_data()
|> Stream.map(fn l ->
[Enum.count(l, &(&1 == "0")), Enum.count(l, &(&1 == "1")) * Enum.count(l, &(&1 == "2"))]
end)
|> Enum.min_by(fn [a, _] -> a end)
end)
|> Enum.take(1)
|> Enum.at(0)
|> Enum.at(1)
end
@doc """
iex> ["123456789012"] |> AdventOfCode2019.SpaceImageFormat.part2()
"▓▓ ░░▓▓ "
iex> [Enum.join((for _ <- 1..151, do: 1), "")] |> AdventOfCode2019.SpaceImageFormat.part2()
"▓▓"
iex> [Enum.join((for _ <- 1..151, do: 2), "")] |> AdventOfCode2019.SpaceImageFormat.part2()
" "
"""
@spec part2(Enumerable.t()) :: String.t()
def part2(in_stream) do
in_stream
|> Stream.map(fn line ->
line
|> read_image_data()
|> Enum.reduce(&merge_layers/2)
|> Stream.chunk_every(25)
|> Enum.map_join("\n", &join_row/1)
end)
|> Enum.take(1)
|> Enum.at(0)
end
@spec read_image_data(Enumerable.t()) :: Enumerable.t()
defp read_image_data(line) do
line
|> String.trim()
|> String.graphemes()
|> Stream.chunk_every(25 * 6)
end
@spec merge_layers(String.t(), String.t()) :: Enumerable.t()
defp merge_layers(back, fore) do
Enum.zip(back, fore)
|> Enum.map(&merge_pixels/1)
end
@spec merge_pixels(tuple()) :: String.t()
defp merge_pixels({back, "2"}), do: back
defp merge_pixels({_, fore}), do: fore
@spec join_row(Enumerable.t()) :: String.t()
defp join_row(row), do: Enum.map_join(row, &paint_pixels/1)
@spec paint_pixels(String.t()) :: String.t()
defp paint_pixels("0"), do: "░░"
defp paint_pixels("1"), do: "▓▓"
defp paint_pixels(_), do: " "
end
| 26.797297 | 96 | 0.577408 |
1cf686f6626e674388209d3c39dcfd8288c96e39 | 1,092 | ex | Elixir | bleacher_fire/lib/bleacher_fire/application.ex | nyolamike/bleacher_fire | 4ec43b01d06b44c3e39200248f709400a1d60b70 | [
"Apache-2.0"
] | null | null | null | bleacher_fire/lib/bleacher_fire/application.ex | nyolamike/bleacher_fire | 4ec43b01d06b44c3e39200248f709400a1d60b70 | [
"Apache-2.0"
] | 1 | 2021-03-10T05:45:29.000Z | 2021-03-10T05:45:29.000Z | bleacher_fire/lib/bleacher_fire/application.ex | nyolamike/bleacher_fire | 4ec43b01d06b44c3e39200248f709400a1d60b70 | [
"Apache-2.0"
] | null | null | null | defmodule BleacherFire.Application do
# See https://hexdocs.pm/elixir/Application.html
# for more information on OTP Applications
@moduledoc false
use Application
def start(_type, _args) do
# List all child processes to be supervised
children = [
# Start the endpoint when the application starts
BleacherFireWeb.Endpoint,
# Starts a worker by calling: BleacherFire.Worker.start_link(arg)
# {BleacherFire.Worker, arg},
{ReactionsServer.ReactionsAgent, :reactions_agent_process},
{UsersServer.UsersAgent, :users_agent_process},
{DashboardServer.DashboardAgent, :dashboard_agent_process}
]
# See https://hexdocs.pm/elixir/Supervisor.html
# for other strategies and supported options
opts = [strategy: :one_for_one, name: BleacherFire.Supervisor]
Supervisor.start_link(children, opts)
end
# Tell Phoenix to update the endpoint configuration
# whenever the application is updated.
def config_change(changed, _new, removed) do
BleacherFireWeb.Endpoint.config_change(changed, removed)
:ok
end
end
| 33.090909 | 71 | 0.737179 |
1cf6957206bb416d23becaf425b7cf450e4103c5 | 1,983 | exs | Elixir | meetup/meetup.exs | nlhuykhang/elixir-exercism | 0462661cc411cb28b4bf800639b16684480a06a7 | [
"MIT"
] | null | null | null | meetup/meetup.exs | nlhuykhang/elixir-exercism | 0462661cc411cb28b4bf800639b16684480a06a7 | [
"MIT"
] | null | null | null | meetup/meetup.exs | nlhuykhang/elixir-exercism | 0462661cc411cb28b4bf800639b16684480a06a7 | [
"MIT"
] | null | null | null | defmodule Meetup do
@moduledoc """
Calculate meetup dates.
"""
@map %{
1 => :monday,
2 => :tuesday,
3 => :wednesday,
4 => :thursday,
5 => :friday,
6 => :saturday,
7 => :sunday,
}
@type weekday ::
:monday | :tuesday | :wednesday
| :thursday | :friday | :saturday | :sunday
@type schedule :: :first | :second | :third | :fourth | :last | :teenth
@doc """
Calculate a meetup date.
The schedule is in which week (1..4, last or "teenth") the meetup date should
fall.
"""
@spec meetup(pos_integer, pos_integer, weekday, schedule) :: :calendar.date
def meetup(year, month, weekday, schedule) do
cond do
schedule == :teenth -> do_teenth(year, month, weekday, 13)
schedule == :first -> do_first(year, month, weekday, 1)
schedule == :second -> do_first(year, month, weekday, 8)
schedule == :third -> do_first(year, month, weekday, 15)
schedule == :fourth -> do_first(year, month, weekday, 22)
true -> do_last(year, month, weekday, :calendar.last_day_of_the_month(year, month))
end
end
defp do_teenth(year, month, _, 19) do
{year, month, 19}
end
defp do_teenth(year, month, weekday, day) do
a = :calendar.day_of_the_week(year, month, day)
cond do
@map[a] == weekday -> {year, month, day}
true -> do_teenth(year, month, weekday, day + 1)
end
end
defp do_first(year, month, _, 31) do
{year, month, 31}
end
defp do_first(year, month, weekday, cur) do
a = :calendar.day_of_the_week(year, month, cur)
cond do
@map[a] == weekday -> {year, month, cur}
true -> do_first(year, month, weekday, cur + 1)
end
end
defp do_last(year, month, _, 22) do
{year, month, 22}
end
defp do_last(year, month, weekday, day) do
a = :calendar.day_of_the_week(year, month, day)
cond do
@map[a] == weekday -> {year, month, day}
true -> do_last(year, month, weekday, day - 1)
end
end
end
| 25.423077 | 89 | 0.602118 |
1cf695d9181c543ff0bab6203caaf7246b811e72 | 46 | ex | Elixir | lib/ex_no_cache.ex | zentetsukenz/ex_no_cache | b88c02ab0c91d81a1e4d516f843310f2d7db45df | [
"Apache-2.0"
] | null | null | null | lib/ex_no_cache.ex | zentetsukenz/ex_no_cache | b88c02ab0c91d81a1e4d516f843310f2d7db45df | [
"Apache-2.0"
] | null | null | null | lib/ex_no_cache.ex | zentetsukenz/ex_no_cache | b88c02ab0c91d81a1e4d516f843310f2d7db45df | [
"Apache-2.0"
] | null | null | null | defmodule ExNoCache do
@moduledoc false
end
| 11.5 | 22 | 0.804348 |
1cf6b37f87ce2748364271e218b9b68e592d9cf2 | 2,803 | exs | Elixir | mix.exs | crwedman/earmark | 55b38ed5ff5ee3d0b48edcecb61f40635b68d17c | [
"Apache-1.1"
] | null | null | null | mix.exs | crwedman/earmark | 55b38ed5ff5ee3d0b48edcecb61f40635b68d17c | [
"Apache-1.1"
] | null | null | null | mix.exs | crwedman/earmark | 55b38ed5ff5ee3d0b48edcecb61f40635b68d17c | [
"Apache-1.1"
] | null | null | null | defmodule Earmark.Mixfile do
use Mix.Project
@version "1.4.4"
@url "https://github.com/pragdave/earmark"
@deps [
# {:credo, "~> 0.10", only: [:dev, :test]},
# {:dialyxir, "~> 0.5", only: [:dev, :test]}
{:benchfella, "~> 0.3.0", only: [:dev]},
{:excoveralls, "~> 0.11.2", only: [:test]},
{:floki, "~> 0.21", only: [:dev, :test]},
]
@description """
Earmark is a pure-Elixir Markdown converter.
It is intended to be used as a library (just call Earmark.as_html),
but can also be used as a command-line tool (run mix escript.build
first).
Output generation is pluggable.
"""
############################################################
def project do
[
app: :earmark,
version: @version,
elixir: "~> 1.7",
elixirc_paths: elixirc_paths(Mix.env()),
escript: escript_config(),
deps: @deps,
description: @description,
package: package(),
preferred_cli_env: [
coveralls: :test,
"coveralls.detail": :test,
"coveralls.post": :test,
"coveralls.html": :test
],
test_coverage: [tool: ExCoveralls],
aliases: [docs: &build_docs/1, readme: &readme/1]
]
end
def application do
[
applications: []
]
end
defp package do
[
files: [
"lib",
"src/*.xrl",
"src/*.yrl",
"mix.exs",
"README.md"
],
maintainers: [
"Robert Dober <robert.dober@gmail.com>",
"Dave Thomas <dave@pragdave.me>"
],
licenses: [
"Apache 2 (see the file LICENSE for details)"
],
links: %{
"GitHub" => "https://github.com/pragdave/earmark"
}
]
end
defp escript_config do
[main_module: Earmark.CLI]
end
defp elixirc_paths(:test), do: ["lib", "test/support", "dev"]
defp elixirc_paths(:dev), do: ["lib", "bench", "dev"]
defp elixirc_paths(_), do: ["lib"]
@prerequisites """
run `mix escript.install hex ex_doc` and adjust `PATH` accordingly
"""
defp build_docs(_) do
Mix.Task.run("compile")
ex_doc = Path.join(Mix.Local.path_for(:escript), "ex_doc")
Mix.shell.info("Using escript: #{ex_doc} to build the docs")
unless File.exists?(ex_doc) do
raise "cannot build docs because escript for ex_doc is not installed, make sure to \n#{@prerequisites}"
end
args = ["Earmark", @version, Mix.Project.compile_path()]
opts = ~w[--main Earmark --source-ref v#{@version} --source-url #{@url}]
Mix.shell.info("Running: #{ex_doc} #{inspect(args ++ opts)}")
System.cmd(ex_doc, args ++ opts)
Mix.shell.info("Docs built successfully")
end
defp readme(args) do
Code.load_file("tasks/readme.exs")
Mix.Tasks.Readme.run(args)
end
end
# SPDX-License-Identifier: Apache-2.0
| 25.026786 | 109 | 0.572958 |
1cf6b40b7144b9db07863c6e8a6befec0d4eaf62 | 2,036 | ex | Elixir | clients/dlp/lib/google_api/dlp/v2beta1/model/google_privacy_dlp_v2beta1_inspect_content_request.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/dlp/lib/google_api/dlp/v2beta1/model/google_privacy_dlp_v2beta1_inspect_content_request.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | null | null | null | clients/dlp/lib/google_api/dlp/v2beta1/model/google_privacy_dlp_v2beta1_inspect_content_request.ex | GoNZooo/elixir-google-api | cf3ad7392921177f68091f3d9001f1b01b92f1cc | [
"Apache-2.0"
] | 1 | 2018-07-28T20:50:50.000Z | 2018-07-28T20:50:50.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.DLP.V2beta1.Model.GooglePrivacyDlpV2beta1InspectContentRequest do
@moduledoc """
Request to search for potentially sensitive info in a list of items.
## Attributes
- inspectConfig (GooglePrivacyDlpV2beta1InspectConfig): Configuration for the inspector. Defaults to: `null`.
- items (List[GooglePrivacyDlpV2beta1ContentItem]): The list of items to inspect. Items in a single request are considered \"related\" unless inspect_config.independent_inputs is true. Up to 100 are allowed per request. Defaults to: `null`.
"""
defstruct [
:"inspectConfig",
:"items"
]
end
defimpl Poison.Decoder, for: GoogleApi.DLP.V2beta1.Model.GooglePrivacyDlpV2beta1InspectContentRequest do
import GoogleApi.DLP.V2beta1.Deserializer
def decode(value, options) do
value
|> deserialize(:"inspectConfig", :struct, GoogleApi.DLP.V2beta1.Model.GooglePrivacyDlpV2beta1InspectConfig, options)
|> deserialize(:"items", :list, GoogleApi.DLP.V2beta1.Model.GooglePrivacyDlpV2beta1ContentItem, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.DLP.V2beta1.Model.GooglePrivacyDlpV2beta1InspectContentRequest do
def encode(value, options) do
GoogleApi.DLP.V2beta1.Deserializer.serialize_non_nil(value, options)
end
end
| 39.921569 | 252 | 0.777505 |
1cf6d22dbf967f682c1ae3d0f9e3d1da446edfbd | 228 | ex | Elixir | lib/rockelivery_web/views/orders_view.ex | cassiofariasmachado/rockelivery | 3d88d4d8af1cdc3f2988edc69162d848009babbd | [
"MIT"
] | 1 | 2021-09-27T06:15:08.000Z | 2021-09-27T06:15:08.000Z | lib/rockelivery_web/views/orders_view.ex | cassiofariasmachado/rockelivery | 3d88d4d8af1cdc3f2988edc69162d848009babbd | [
"MIT"
] | null | null | null | lib/rockelivery_web/views/orders_view.ex | cassiofariasmachado/rockelivery | 3d88d4d8af1cdc3f2988edc69162d848009babbd | [
"MIT"
] | 1 | 2021-12-21T12:47:59.000Z | 2021-12-21T12:47:59.000Z | defmodule RockeliveryWeb.OrdersView do
use RockeliveryWeb, :view
alias Rockelivery.Order
def render("create.json", %{order: %Order{} = order}) do
%{
message: "Order created",
order: order
}
end
end
| 17.538462 | 58 | 0.649123 |
1cf6f16fb8b0537e5e123467ffd233c36cb6cc3f | 99 | exs | Elixir | rtp_to_hls/.iex.exs | membraneframework/videoroom_advanced | d5bfbcec7558bb7ddd4f74742c0d842e5d759b21 | [
"Apache-2.0"
] | 84 | 2020-08-01T14:57:29.000Z | 2022-03-27T13:28:23.000Z | rtp_to_hls/.iex.exs | membraneframework/videoroom_advanced | d5bfbcec7558bb7ddd4f74742c0d842e5d759b21 | [
"Apache-2.0"
] | 75 | 2020-08-24T08:01:53.000Z | 2022-03-17T10:41:22.000Z | rtp_to_hls/.iex.exs | membraneframework/videoroom_advanced | d5bfbcec7558bb7ddd4f74742c0d842e5d759b21 | [
"Apache-2.0"
] | 17 | 2020-09-15T21:04:23.000Z | 2022-03-31T07:43:48.000Z | {:ok, variable}= Membrane.Demo.RtpToHls.Pipeline.start_link(5000)
Membrane.Pipeline.play(variable)
| 33 | 65 | 0.808081 |
1cf738a3deb227df63d54c69ea93399163b3ffd5 | 1,188 | exs | Elixir | test/exceptions_test.exs | blake-education/csv | 7a9ace3b0f844f0832f96e2b04286ef1a6f4689c | [
"MIT"
] | 464 | 2015-03-24T22:03:51.000Z | 2022-03-31T06:24:40.000Z | test/exceptions_test.exs | blake-education/csv | 7a9ace3b0f844f0832f96e2b04286ef1a6f4689c | [
"MIT"
] | 104 | 2015-03-25T09:52:34.000Z | 2022-03-11T11:22:47.000Z | test/exceptions_test.exs | blake-education/csv | 7a9ace3b0f844f0832f96e2b04286ef1a6f4689c | [
"MIT"
] | 93 | 2015-03-25T08:58:00.000Z | 2021-12-08T19:44:45.000Z | defmodule ExceptionsTest do
use ExUnit.Case
alias CSV.EncodingError
alias CSV.EscapeSequenceError
alias CSV.StrayQuoteError
test "exception messaging about encoding errors" do
exception = EncodingError.exception(line: 1, message: "BAD ENCODING")
assert exception.message == "BAD ENCODING on line 1"
end
test "exception messaging about unfinished escape sequences" do
exception =
EscapeSequenceError.exception(
line: 1,
escape_sequence: "SEQUENCE END",
escape_max_lines: 2
)
assert exception.message ==
"Escape sequence started on line 1 near \"SEQUENCE E\" did not terminate.\n\n" <>
"Escape sequences are allowed to span up to 2 lines. This threshold avoids " <>
"collecting the whole file into memory when an escape sequence does not terminate. " <>
"You can change it using the escape_max_lines option: https://hexdocs.pm/csv/CSV.html#decode/2"
end
test "exception messaging about stray quote errors" do
exception = StrayQuoteError.exception(line: 1, field: "THIS")
assert exception.message == "Stray quote on line 1 near \"THIS\""
end
end
| 33.942857 | 110 | 0.689394 |
1cf73eea495834e8a9d4421932bf730082932799 | 1,674 | ex | Elixir | lib/twivia/accounts.ex | liviaab/twivia | ce00db2ef8375ef5c6f1c1f996aa7c44fa994a8e | [
"MIT"
] | null | null | null | lib/twivia/accounts.ex | liviaab/twivia | ce00db2ef8375ef5c6f1c1f996aa7c44fa994a8e | [
"MIT"
] | null | null | null | lib/twivia/accounts.ex | liviaab/twivia | ce00db2ef8375ef5c6f1c1f996aa7c44fa994a8e | [
"MIT"
] | null | null | null | defmodule Twivia.Accounts do
@moduledoc """
The Accounts context.
"""
import Ecto.Query, warn: false
alias Twivia.Repo
alias Twivia.Accounts.User
@doc """
Returns the list of users.
## Examples
iex> list_users()
[%User{}, ...]
"""
def list_users do
Repo.all(User)
end
@doc """
Gets a single user.
Raises `Ecto.NoResultsError` if the User does not exist.
## Examples
iex> get_user!(123)
%User{}
iex> get_user!(456)
** (Ecto.NoResultsError)
"""
def get_user!(id), do: Repo.get!(User, id)
@doc """
Creates a user.
## Examples
iex> create_user(%{field: value})
{:ok, %User{}}
iex> create_user(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_user(attrs \\ %{}) do
%User{}
|> User.changeset(attrs)
|> Repo.insert()
end
@doc """
Updates a user.
## Examples
iex> update_user(user, %{field: new_value})
{:ok, %User{}}
iex> update_user(user, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_user(%User{} = user, attrs) do
user
|> User.changeset(attrs)
|> Repo.update()
end
@doc """
Deletes a user.
## Examples
iex> delete_user(user)
{:ok, %User{}}
iex> delete_user(user)
{:error, %Ecto.Changeset{}}
"""
def delete_user(%User{} = user) do
Repo.delete(user)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking user changes.
## Examples
iex> change_user(user)
%Ecto.Changeset{data: %User{}}
"""
def change_user(%User{} = user, attrs \\ %{}) do
User.changeset(user, attrs)
end
end
| 15.942857 | 59 | 0.565114 |
1cf74977e72e985a9082acaeeb02ac2429a749bd | 1,570 | exs | Elixir | test/order_management_test.exs | fbettag/ingram_marketplace.ex | 1c63d391707058fb8cf58fdefd54e2ade97acf4b | [
"MIT"
] | null | null | null | test/order_management_test.exs | fbettag/ingram_marketplace.ex | 1c63d391707058fb8cf58fdefd54e2ade97acf4b | [
"MIT"
] | null | null | null | test/order_management_test.exs | fbettag/ingram_marketplace.ex | 1c63d391707058fb8cf58fdefd54e2ade97acf4b | [
"MIT"
] | null | null | null | defmodule Multicloud.Ingram.OrderManagementTest do
use ExUnit.Case
alias Ingram.Marketplace.Model
alias Ingram.Marketplace.{CustomerManagement, OrderManagement}
test "Order Management" do
customer = TestHelper.customer_factory()
assert {:ok, %Model.GetCustomerDetails{} = customer} =
CustomerManagement.create_customer(customer)
estimate = %Model.OrderEstimationRequest{
type: "sales",
customerId: customer.id,
products: [
%Model.OrderProductToEstimation{
mpn: "aaaaaaaa-aaaa-ffff-ffff-ffffffffffff",
quantity: 1
}
]
}
assert {:ok, %Model.OrderEstimationDetails{} = _estimation} =
OrderManagement.estimate_order(estimate)
order = %Model.OrderDetails{
poNumber: "#{:rand.uniform(1_000_000)}",
products: [
%Model.OrderProduct{
mpn: "aaaaaaaa-aaaa-ffff-ffff-ffffffffffff",
quantity: 1
# parameters: [
# %Model.ProductParameter{
# name: "",
# value: false
# }
# ]
}
],
type: "sales",
customerId: customer.id
}
assert {:ok, %Model.Order{} = ordered} = OrderManagement.create_order(order)
assert {:ok, %Model.OrderDetailsWithPrices{} = _details} =
OrderManagement.get_order_details(ordered.id)
assert {:ok, %{:data => orders}} =
OrderManagement.get_orders(customerId: customer.id, limit: 1000)
assert Enum.any?(orders, fn order -> order.id == ordered.id end)
end
end
| 28.545455 | 80 | 0.612102 |
1cf79efb8bb98114a62daaa52e0f6d303e5c6381 | 2,159 | ex | Elixir | apps/olx/lib/problem.ex | betrybe/esperanto | 65e40c28914397ef77f5d2c4c29001330beba19f | [
"Apache-2.0"
] | 6 | 2021-07-19T20:00:22.000Z | 2021-11-03T03:27:40.000Z | apps/olx/lib/problem.ex | betrybe/esperanto | 65e40c28914397ef77f5d2c4c29001330beba19f | [
"Apache-2.0"
] | null | null | null | apps/olx/lib/problem.ex | betrybe/esperanto | 65e40c28914397ef77f5d2c4c29001330beba19f | [
"Apache-2.0"
] | null | null | null | defmodule Esperanto.Olx.Problem do
@moduledoc """
Parse and OLX problem
"""
alias Esperanto.Parser
alias Esperanto.Parsers.TopLevel
alias Esperanto.Walker
def parse(input) do
parsers = [
{Esperanto.Olx.Parsers.Label, nil},
{Esperanto.Olx.Parsers.IncorrectChoice, nil},
{Esperanto.Olx.Parsers.CorrectChoice, nil},
{Esperanto.Olx.Parsers.ChoiceHint, nil}
]
walker =
if String.ends_with?(input, "\n") do
Walker.start(input)
else
Walker.start(input <> "\n")
end
multiple_choice_response = NaryTree.Node.new(:multiplechoiceresponse)
tree =
NaryTree.Node.new(:problem)
|> NaryTree.new()
|> NaryTree.add_child(multiple_choice_response)
TopLevel.parse(walker, tree, multiple_choice_response.id,
parsers: TopLevel.default_parsers() ++ parsers
)
end
def to_xml(input, opts \\ []) do
input
|> parse()
|> elem(0)
|> Parser.to_xml(opts)
end
def to_struct(doc) do
import SweetXml
doc
|> SweetXml.parse()
|> xpath(~x"/problem/multiplechoiceresponse"e)
|> question_to_struct()
end
defp to_xml_str(element) do
element
|> :xmerl.export_simple_content(:xmerl_xml)
|> List.flatten()
|> to_string()
end
defp question_to_struct(element) do
%{
question: label_to_struct(element),
choices: choices_to_struct(element)
}
end
defp label_to_struct(element) do
import SweetXml
element
|> xpath(~x"./label/*"l)
|> to_xml_str()
end
defp choices_to_struct(element) do
import SweetXml
element
|> xpath(~x"./choicegroup/choice"l)
|> Enum.map(&choice_to_struct/1)
end
defp choice_to_struct(choice_node) do
import SweetXml
is_correct =
choice_node
|> xpath(~x"./@correct"l)
|> to_xml_str()
choice_hint =
choice_node
|> xpath(~x"./choicehint/*"l)
|> to_xml_str()
content =
choice_node
|> xpath(~x"./*[not(self::choicehint)]"l)
|> to_xml_str()
%{
content: content,
choicehint: choice_hint,
is_correct: is_correct
}
end
end
| 19.990741 | 73 | 0.624826 |
1cf7c1fddf469a07419a656f3d0d0d1299d42fe4 | 817 | exs | Elixir | mix.exs | kianmeng/gun | fcd889a598ff90aab21f24fc3ca1a304dad00b0a | [
"0BSD"
] | null | null | null | mix.exs | kianmeng/gun | fcd889a598ff90aab21f24fc3ca1a304dad00b0a | [
"0BSD"
] | 2 | 2020-02-03T03:25:53.000Z | 2021-12-23T13:27:11.000Z | mix.exs | kianmeng/gun | fcd889a598ff90aab21f24fc3ca1a304dad00b0a | [
"0BSD"
] | 7 | 2019-08-15T15:38:45.000Z | 2021-12-31T13:15:07.000Z | defmodule Gun.Mixfile do
use Mix.Project
def project do
[
app: :gun,
version: "2.0.0",
description: description(),
deps: deps(),
package: package()
]
end
defp description do
"""
HTTP/1.1, HTTP/2 and Websocket client for Erlang/OTP.
"""
end
defp deps do
[
{:cowlib, "~> 2.8.0"},
{:ex_doc, ">= 0.0.0", only: :dev, runtime: false}
]
end
defp package do
[
name: "grpc_gun",
maintainers: ["Loïc Hoguin"],
licenses: ["ISC"],
build_tools: ["rebar3"],
links: %{
"GitHub" => "https://github.com/elixir-grpc/gun",
"Original GitHub" => "https://github.com/ninenines/gun"
},
files: ["erlang.mk", "LICENSE", "Makefile", "README.asciidoc", "rebar.config", "src"]
]
end
end
| 19.926829 | 91 | 0.531212 |
1cf7ceb4711acf4911ae67e28b53754fae741c56 | 2,585 | ex | Elixir | lib/credo/execution/task/validate_config.ex | sgerrand/credo | 56d3163310d3ef50a2ea1bb26b322842f73c9db0 | [
"MIT"
] | 1 | 2021-12-01T13:37:43.000Z | 2021-12-01T13:37:43.000Z | lib/credo/execution/task/validate_config.ex | sgerrand/credo | 56d3163310d3ef50a2ea1bb26b322842f73c9db0 | [
"MIT"
] | null | null | null | lib/credo/execution/task/validate_config.ex | sgerrand/credo | 56d3163310d3ef50a2ea1bb26b322842f73c9db0 | [
"MIT"
] | null | null | null | defmodule Credo.Execution.Task.ValidateConfig do
@moduledoc false
use Credo.Execution.Task
alias Credo.Check
alias Credo.CLI.Output.UI
def call(exec, _opts) do
exec
|> validate_checks()
|> remove_missing_checks()
|> inspect_config_if_debug()
end
defp validate_checks(%Execution{checks: checks} = exec) do
Enum.each(checks, fn check_tuple ->
warn_if_check_missing(check_tuple)
warn_if_check_params_invalid(check_tuple)
end)
exec
end
defp warn_if_check_params_invalid({_check, false}), do: nil
defp warn_if_check_params_invalid({_check, []}), do: nil
defp warn_if_check_params_invalid({check, params}) do
if Check.defined?(check) do
valid_param_names = check.param_names ++ Credo.Check.builtin_param_names()
check = check |> to_string |> String.to_existing_atom()
Enum.each(params, fn {param_name, _param_value} ->
unless Enum.member?(valid_param_names, param_name) do
candidate = find_best_match(valid_param_names, param_name)
warning =
if candidate do
"** (config) #{check_name(check)}: unknown param `#{param_name}`. Did you mean `#{
candidate
}`?"
else
"** (config) #{check_name(check)}: unknown param `#{param_name}`."
end
UI.warn([:red, warning])
end
end)
end
end
defp find_best_match(candidates, given, threshold \\ 0.8) do
given_string = to_string(given)
{jaro_distance, candidate} =
candidates
|> Enum.map(fn candidate_name ->
distance = String.jaro_distance(given_string, to_string(candidate_name))
{distance, candidate_name}
end)
|> Enum.sort()
|> List.last()
if jaro_distance > threshold do
candidate
end
end
defp warn_if_check_missing({check, _params}) do
unless Check.defined?(check) do
UI.warn([:red, "** (config) Ignoring an undefined check: #{check_name(check)}"])
end
end
defp check_name(atom) do
atom
|> to_string()
|> String.replace(~r/^Elixir\./, "")
end
defp inspect_config_if_debug(%Execution{debug: true} = exec) do
require Logger
Logger.debug(fn ->
"""
Execution struct after #{__MODULE__}:
#{inspect(exec, pretty: true)}
"""
end)
exec
end
defp inspect_config_if_debug(exec), do: exec
defp remove_missing_checks(%Execution{checks: checks} = exec) do
checks = Enum.filter(checks, &Check.defined?/1)
%Execution{exec | checks: checks}
end
end
| 25.097087 | 96 | 0.638685 |
1cf7d5a7349f0c29ba6a5e5f46b9dcb97541fdfa | 1,765 | ex | Elixir | lib/weather/cli.ex | LanfordCai/ElixirWeather | a7667bde35b6ee1073c324266ae15dbadb6d33f7 | [
"MIT"
] | 1 | 2016-02-17T19:10:42.000Z | 2016-02-17T19:10:42.000Z | lib/weather/cli.ex | LanfordCai/ElixirWeather | a7667bde35b6ee1073c324266ae15dbadb6d33f7 | [
"MIT"
] | null | null | null | lib/weather/cli.ex | LanfordCai/ElixirWeather | a7667bde35b6ee1073c324266ae15dbadb6d33f7 | [
"MIT"
] | null | null | null | defmodule Weather.CLI do
@moduledoc """
Handle the command line parsing and dispatching to the respective functions
that list the weather conditions of provided city.
"""
def main(argv) do
argv
|> parse_args
|> process
end
@doc """
'argv' can be -h or --help, which returns :help.
Otherwise it is the name of a China city in Chinese.
Return a string of city name, or ':help' if help was given.
"""
defp parse_args(argv) do
parse = OptionParser.parse(argv, switches: [ help: :boolean ], aliases: [ h: :help ])
case parse do
{ [ help: true ], _, _ } -> :help
{ _, [ city ], _ } -> city
_ -> :help
end
end
@doc """
Prints the help message if the command line argv is -h or --help.
"""
defp process(:help) do
IO.puts """
Usage: weather <city>, For example: weather 广州
"""
System.halt(0)
end
@doc """
Fetches the weather_info of the city specified, and finally prints the weather condition
of that city.
"""
defp process(city) do
IO.puts """
Processing...
"""
Weather.WeatherFetcher.fetch_weather(city)
|> decode_response
end
@doc """
If the weather_info successfully fetched, format the data into some tables and prints them out.
"""
defp decode_response({:ok, body}) do
weather_info = body |> Map.get("HeWeather data service 3.0")
[info | _] = weather_info
Weather.TableFormatter.print_info(info)
end
@doc """
In case there is an error when fetching data, prints the error message and the application
exists with code 2
"""
defp decode_response({:error, error}) do
{_, message} = List.keyfind(error, "message", 0)
IO.puts "Error Fetching WeatherInfo: #{message}"
System.halt(2)
end
end
| 24.513889 | 97 | 0.642493 |
1cf7f872619cfec14bce547409585ee08fc6d583 | 2,497 | ex | Elixir | lib/git_bisect_app_web.ex | bdubaut/git-bisect-talk | 5b8691935a4e73307d3099d8b1b3af7dddbc2904 | [
"MIT"
] | null | null | null | lib/git_bisect_app_web.ex | bdubaut/git-bisect-talk | 5b8691935a4e73307d3099d8b1b3af7dddbc2904 | [
"MIT"
] | null | null | null | lib/git_bisect_app_web.ex | bdubaut/git-bisect-talk | 5b8691935a4e73307d3099d8b1b3af7dddbc2904 | [
"MIT"
] | null | null | null | defmodule GitBisectAppWeb do
@moduledoc """
The entrypoint for defining your web interface, such
as controllers, views, channels and so on.
This can be used in your application as:
use GitBisectAppWeb, :controller
use GitBisectAppWeb, :view
The definitions below will be executed for every view,
controller, etc, so keep them short and clean, focused
on imports, uses and aliases.
Do NOT define functions inside the quoted expressions
below. Instead, define any helper function in modules
and import those modules here.
"""
def controller do
quote do
use Phoenix.Controller, namespace: GitBisectAppWeb
import Plug.Conn
import GitBisectAppWeb.Gettext
alias GitBisectAppWeb.Router.Helpers, as: Routes
end
end
def view do
quote do
use Phoenix.View,
root: "lib/git_bisect_app_web/templates",
namespace: GitBisectAppWeb
# Import convenience functions from controllers
import Phoenix.Controller,
only: [get_flash: 1, get_flash: 2, view_module: 1, view_template: 1]
# Include shared imports and aliases for views
unquote(view_helpers())
end
end
def live_view do
quote do
use Phoenix.LiveView,
layout: {GitBisectAppWeb.LayoutView, "live.html"}
unquote(view_helpers())
end
end
def live_component do
quote do
use Phoenix.LiveComponent
unquote(view_helpers())
end
end
def component do
quote do
use Phoenix.Component
unquote(view_helpers())
end
end
def router do
quote do
use Phoenix.Router
import Plug.Conn
import Phoenix.Controller
import Phoenix.LiveView.Router
end
end
def channel do
quote do
use Phoenix.Channel
import GitBisectAppWeb.Gettext
end
end
defp view_helpers do
quote do
# Use all HTML functionality (forms, tags, etc)
use Phoenix.HTML
# Import LiveView and .heex helpers (live_render, live_patch, <.form>, etc)
import Phoenix.LiveView.Helpers
# Import basic rendering functionality (render, render_layout, etc)
import Phoenix.View
import GitBisectAppWeb.ErrorHelpers
import GitBisectAppWeb.Gettext
alias GitBisectAppWeb.Router.Helpers, as: Routes
end
end
@doc """
When used, dispatch to the appropriate controller/view/etc.
"""
defmacro __using__(which) when is_atom(which) do
apply(__MODULE__, which, [])
end
end
| 22.495495 | 81 | 0.685623 |
1cf7ffbe3e1db9dadc6f76b54a2d26cae180d08f | 452 | ex | Elixir | lib/lle_cymraeg/ecto_enums.ex | danielcavanagh/lle-cymraeg | 01aebc939254e0dd76427cb460722b830c232e82 | [
"MIT"
] | null | null | null | lib/lle_cymraeg/ecto_enums.ex | danielcavanagh/lle-cymraeg | 01aebc939254e0dd76427cb460722b830c232e82 | [
"MIT"
] | null | null | null | lib/lle_cymraeg/ecto_enums.ex | danielcavanagh/lle-cymraeg | 01aebc939254e0dd76427cb460722b830c232e82 | [
"MIT"
] | null | null | null | import EctoEnum
defenum(AccountRole, :account_role, [:admin, :person])
defenum(Gender, :gender, [:male, :female, :other])
defenum(InvitationType, :invitation_type, [:direct, :location])
defenum(InvitationStatus, :invitation_status, [:pending, :accepted, :declined, :maybe])
defenum(LanguageFrequency, :language_frequency, [:sometimes, :often, :always])
defenum(LanguageProficiency, :language_proficiency, [:beginner, :intermediate, :expert, :native])
| 50.222222 | 97 | 0.767699 |
1cf80bdc26496efde9083d6b940e0be8d84cc690 | 6,883 | ex | Elixir | clients/logging/lib/google_api/logging/v2/model/log_metric.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | clients/logging/lib/google_api/logging/v2/model/log_metric.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | clients/logging/lib/google_api/logging/v2/model/log_metric.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Logging.V2.Model.LogMetric do
@moduledoc """
Describes a logs-based metric. The value of the metric is the number of log entries that match a logs filter in a given time interval.Logs-based metric can also be used to extract values from logs and create a a distribution of the values. The distribution records the statistics of the extracted values along with an optional histogram of the values as specified by the bucket options.
## Attributes
- bucketOptions (BucketOptions): Optional. The bucket_options are required when the logs-based metric is using a DISTRIBUTION value type and it describes the bucket boundaries used to create a histogram of the extracted values. Defaults to: `null`.
- description (String.t): Optional. A description of this metric, which is used in documentation. The maximum length of the description is 8000 characters. Defaults to: `null`.
- filter (String.t): Required. An advanced logs filter which is used to match log entries. Example: \"resource.type=gae_app AND severity>=ERROR\" The maximum length of the filter is 20000 characters. Defaults to: `null`.
- labelExtractors (%{optional(String.t) => String.t}): Optional. A map from a label key string to an extractor expression which is used to extract data from a log entry field and assign as the label value. Each label key specified in the LabelDescriptor must have an associated extractor expression in this map. The syntax of the extractor expression is the same as for the value_extractor field.The extracted value is converted to the type defined in the label descriptor. If the either the extraction or the type conversion fails, the label will have a default value. The default value for a string label is an empty string, for an integer label its 0, and for a boolean label its false.Note that there are upper bounds on the maximum number of labels and the number of active time series that are allowed in a project. Defaults to: `null`.
- metricDescriptor (MetricDescriptor): Optional. The metric descriptor associated with the logs-based metric. If unspecified, it uses a default metric descriptor with a DELTA metric kind, INT64 value type, with no labels and a unit of \"1\". Such a metric counts the number of log entries matching the filter expression.The name, type, and description fields in the metric_descriptor are output only, and is constructed using the name and description field in the LogMetric.To create a logs-based metric that records a distribution of log values, a DELTA metric kind with a DISTRIBUTION value type must be used along with a value_extractor expression in the LogMetric.Each label in the metric descriptor must have a matching label name as the key and an extractor expression as the value in the label_extractors map.The metric_kind and value_type fields in the metric_descriptor cannot be updated once initially configured. New labels can be added in the metric_descriptor, but existing labels cannot be modified except for their description. Defaults to: `null`.
- name (String.t): Required. The client-assigned metric identifier. Examples: \"error_count\", \"nginx/requests\".Metric identifiers are limited to 100 characters and can include only the following characters: A-Z, a-z, 0-9, and the special characters _-.,+!*',()%/. The forward-slash character (/) denotes a hierarchy of name pieces, and it cannot be the first character of the name.The metric identifier in this field must not be URL-encoded (https://en.wikipedia.org/wiki/Percent-encoding). However, when the metric identifier appears as the [METRIC_ID] part of a metric_name API parameter, then the metric identifier must be URL-encoded. Example: \"projects/my-project/metrics/nginx%2Frequests\". Defaults to: `null`.
- valueExtractor (String.t): Optional. A value_extractor is required when using a distribution logs-based metric to extract the values to record from a log entry. Two functions are supported for value extraction: EXTRACT(field) or REGEXP_EXTRACT(field, regex). The argument are: 1. field: The name of the log entry field from which the value is to be extracted. 2. regex: A regular expression using the Google RE2 syntax (https://github.com/google/re2/wiki/Syntax) with a single capture group to extract data from the specified log entry field. The value of the field is converted to a string before applying the regex. It is an error to specify a regex that does not include exactly one capture group.The result of the extraction must be convertible to a double type, as the distribution always records double values. If either the extraction or the conversion to double fails, then those values are not recorded in the distribution.Example: REGEXP_EXTRACT(jsonPayload.request, \".*quantity=(\\d+).*\") Defaults to: `null`.
- version (String.t): Deprecated. The API version that created or updated this metric. The v2 format is used by default and cannot be changed. Defaults to: `null`.
- Enum - one of [V2, V1]
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:bucketOptions => GoogleApi.Logging.V2.Model.BucketOptions.t(),
:description => any(),
:filter => any(),
:labelExtractors => map(),
:metricDescriptor => GoogleApi.Logging.V2.Model.MetricDescriptor.t(),
:name => any(),
:valueExtractor => any(),
:version => any()
}
field(:bucketOptions, as: GoogleApi.Logging.V2.Model.BucketOptions)
field(:description)
field(:filter)
field(:labelExtractors, type: :map)
field(:metricDescriptor, as: GoogleApi.Logging.V2.Model.MetricDescriptor)
field(:name)
field(:valueExtractor)
field(:version)
end
defimpl Poison.Decoder, for: GoogleApi.Logging.V2.Model.LogMetric do
def decode(value, options) do
GoogleApi.Logging.V2.Model.LogMetric.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Logging.V2.Model.LogMetric do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 98.328571 | 1,075 | 0.766236 |
1cf82a0b61cbefb7c8827f65039bf416d3e93c9c | 13,377 | ex | Elixir | lib/oli/delivery/attempts/core.ex | ChristianMurphy/oli-torus | ffeee4996b66b7c6c6eb3e0082d030b8cc6cea97 | [
"MIT"
] | null | null | null | lib/oli/delivery/attempts/core.ex | ChristianMurphy/oli-torus | ffeee4996b66b7c6c6eb3e0082d030b8cc6cea97 | [
"MIT"
] | null | null | null | lib/oli/delivery/attempts/core.ex | ChristianMurphy/oli-torus | ffeee4996b66b7c6c6eb3e0082d030b8cc6cea97 | [
"MIT"
] | null | null | null | defmodule Oli.Delivery.Attempts.Core do
import Ecto.Query, warn: false
alias Oli.Repo
alias Oli.Delivery.Sections
alias Oli.Delivery.Sections.{Section, Enrollment}
alias Oli.Publishing.PublishedResource
alias Oli.Resources.Revision
alias Oli.Delivery.Attempts.Core.{
PartAttempt,
ResourceAccess,
ResourceAttempt,
ActivityAttempt
}
@moduledoc """
Core attempt related functions.
"""
@doc """
Creates or updates an access record for a given resource, section context id and user. When
created the access count is set to 1, otherwise on updates the
access count is incremented.
## Examples
iex> track_access(resource_id, section_slug, user_id)
{:ok, %ResourceAccess{}}
iex> track_access(resource_id, section_slug, user_id)
{:error, %Ecto.Changeset{}}
"""
def track_access(resource_id, section_slug, user_id) do
section = Sections.get_section_by(slug: section_slug)
Oli.Repo.insert!(
%ResourceAccess{
access_count: 1,
user_id: user_id,
section_id: section.id,
resource_id: resource_id
},
on_conflict: [inc: [access_count: 1]],
conflict_target: [:resource_id, :user_id, :section_id]
)
end
@doc """
Retrieves all graded resource access for a given context
`[%ResourceAccess{}, ...]`
"""
def get_graded_resource_access_for_context(section_slug) do
Repo.all(
from(a in ResourceAccess,
join: s in Section,
on: a.section_id == s.id,
join: p in PublishedResource,
on: s.publication_id == p.publication_id,
join: r in Revision,
on: p.revision_id == r.id,
where: s.slug == ^section_slug and s.status != :deleted and r.graded == true,
select: a
)
)
end
@doc """
Retrieves all graded resource access for a given context
`[%ResourceAccess{}, ...]`
"""
def get_resource_access_for_page(section_slug, resource_id) do
Repo.all(
from(a in ResourceAccess,
join: s in Section,
on: a.section_id == s.id,
join: p in PublishedResource,
on: s.publication_id == p.publication_id,
join: r in Revision,
on: p.revision_id == r.id,
where:
s.slug == ^section_slug and s.status != :deleted and r.graded == true and
r.resource_id == ^resource_id,
select: a
)
)
end
@doc """
Retrieves all resource accesses for a given context and user
`[%ResourceAccess{}, ...]`
"""
def get_user_resource_accesses_for_context(section_slug, user_id) do
Repo.all(
from(a in ResourceAccess,
join: s in Section,
on: a.section_id == s.id,
join: p in PublishedResource,
on: s.publication_id == p.publication_id,
join: r in Revision,
on: p.revision_id == r.id,
where: s.slug == ^section_slug and s.status != :deleted and a.user_id == ^user_id,
distinct: a.id,
select: a
)
)
end
def get_resource_access(resource_id, section_slug, user_id) do
Repo.one(
from(a in ResourceAccess,
join: s in Section,
on: a.section_id == s.id,
where:
a.user_id == ^user_id and s.slug == ^section_slug and s.status != :deleted and
a.resource_id == ^resource_id,
select: a
)
)
end
def get_part_attempts_and_users_for_publication(publication_id) do
student_role_id = Lti_1p3.Tool.ContextRoles.get_role(:context_learner).id
Repo.all(
from(section in Section,
join: enrollment in Enrollment,
on: enrollment.section_id == section.id,
join: user in Oli.Accounts.User,
on: enrollment.user_id == user.id,
join: raccess in ResourceAccess,
on: user.id == raccess.user_id,
join: rattempt in ResourceAttempt,
on: raccess.id == rattempt.resource_access_id,
join: aattempt in ActivityAttempt,
on: rattempt.id == aattempt.resource_attempt_id,
join: pattempt in PartAttempt,
on: aattempt.id == pattempt.activity_attempt_id,
where: section.publication_id == ^publication_id and
# Only look at evaluated part attempts -> date evaluated not nil
not is_nil(pattempt.date_evaluated),
# only fetch records for users enrolled as students
left_join: er in "enrollments_context_roles",
on: enrollment.id == er.enrollment_id,
left_join: context_role in Lti_1p3.DataProviders.EctoProvider.ContextRole,
on: er.context_role_id == context_role.id and context_role.id == ^student_role_id,
select: %{part_attempt: pattempt, user: user}
)
)
# TODO: This should be done in the query, but can't get the syntax right
|> Enum.map(
&%{
user: &1.user,
part_attempt:
Repo.preload(&1.part_attempt,
activity_attempt: [:revision, revision: :activity_type, resource_attempt: :revision]
)
}
)
end
def has_any_active_attempts?(resource_attempts) do
Enum.any?(resource_attempts, fn r -> r.date_evaluated == nil end)
end
@doc """
Gets a section by activity attempt guid.
## Examples
iex> get_section_by_activity_attempt_guid("123")
%Section{}
iex> get_section_by_activity_attempt_guid("111")
nil
"""
def get_section_by_activity_attempt_guid(activity_attempt_guid) do
Repo.one(
from(activity_attempt in ActivityAttempt,
join: resource_attempt in ResourceAttempt,
on: resource_attempt.id == activity_attempt.resource_attempt_id,
join: resource_access in ResourceAccess,
on: resource_access.id == resource_attempt.resource_access_id,
join: section in Section,
on: section.id == resource_access.section_id,
where: activity_attempt.attempt_guid == ^activity_attempt_guid,
select: section
)
)
end
def get_latest_part_attempts(activity_attempt_guid) do
Repo.all(
from(aa in ActivityAttempt,
join: pa1 in PartAttempt,
on: aa.id == pa1.activity_attempt_id,
left_join: pa2 in PartAttempt,
on:
aa.id == pa2.activity_attempt_id and pa1.part_id == pa2.part_id and pa1.id < pa2.id and
pa1.activity_attempt_id == pa2.activity_attempt_id,
where: aa.attempt_guid == ^activity_attempt_guid and is_nil(pa2),
select: pa1
)
)
end
@doc """
Retrieves the latest resource attempt for a given resource id,
context id and user id. If no attempts exist, returns nil.
"""
def get_latest_resource_attempt(resource_id, section_slug, user_id) do
Repo.one(
from(a in ResourceAccess,
join: s in Section,
on: a.section_id == s.id,
join: ra1 in ResourceAttempt,
on: a.id == ra1.resource_access_id,
left_join: ra2 in ResourceAttempt,
on:
a.id == ra2.resource_access_id and ra1.id < ra2.id and
ra1.resource_access_id == ra2.resource_access_id,
where:
a.user_id == ^user_id and s.slug == ^section_slug and s.status != :deleted and
a.resource_id == ^resource_id and
is_nil(ra2),
select: ra1
)
)
|> Repo.preload([:revision])
end
@doc """
Retrieves the resource access record and all (if any) attempts related to it
in a two element tuple of the form:
`{%ResourceAccess, [%ResourceAttempt{}]}`
The empty list `[]` will be present if there are no resource attempts.
"""
def get_resource_attempt_history(resource_id, section_slug, user_id) do
access = get_resource_access(resource_id, section_slug, user_id)
id = access.id
attempts =
Repo.all(
from(ra in ResourceAttempt,
where: ra.resource_access_id == ^id,
select: ra,
preload: [:revision]
)
)
attempt_representation =
case attempts do
nil -> []
records -> records
end
{access, attempt_representation}
end
@doc """
Retrieves all graded resource attempts for a given resource access.
`[%ResourceAccess{}, ...]`
"""
def get_graded_attempts_from_access(resource_access_id) do
Repo.all(
from(a in ResourceAttempt,
join: r in Revision,
on: a.revision_id == r.id,
where: a.resource_access_id == ^resource_access_id and r.graded == true,
select: a
)
)
end
@doc """
Gets a collection of activity attempt records that pertain to a collection of activity
attempt guids. There is no guarantee that the ordering of the results of the activity attempts
will match the ordering of the input guids. Any attempt guids that cannot be found are simply omitted
from the result.
## Examples
iex> get_activity_attempts(["20595ef0-e5f1-474e-880d-f2c20f3a4459", "30b59817-e193-488f-94b1-597420b8670e"])
{:ok, [%ActivityAttempt{}, %ActivityAttempt{}]
iex> get_activity_attempts(["20595ef0-e5f1-474e-880d-f2c20f3a4459", "a-missing-one"])
{:ok, [%ActivityAttempt{}]}
iex> get_activity_attempts(["a-missing-one"])
{:ok, []}
"""
def get_activity_attempts(activity_attempt_guids) do
results =
Repo.all(
from(activity_attempt in ActivityAttempt,
where: activity_attempt.attempt_guid in ^activity_attempt_guids,
preload: [:part_attempts]
)
)
{:ok, results}
end
@doc """
Gets an activity attempt by a clause.
## Examples
iex> get_activity_attempt_by(attempt_guid: "123")
%ActivityAttempt
iex> get_activity_attempt_by(attempt_guid: "111")
nil
"""
def get_activity_attempt_by(clauses),
do: Repo.get_by(ActivityAttempt, clauses) |> Repo.preload(revision: [:activity_type])
@doc """
Gets a part attempt by a clause.
## Examples
iex> get_part_attempt_by(attempt_guid: "123")
%PartAttempt{}
iex> get_part_attempt_by(attempt_guid: "111")
nil
"""
def get_part_attempt_by(clauses), do: Repo.get_by(PartAttempt, clauses)
@doc """
Gets a resource attempt by a clause.
## Examples
iex> get_resource_attempt_by(attempt_guid: "123")
%ResourceAttempt{}
iex> get_resource_attempt_by(attempt_guid: "111")
nil
"""
def get_resource_attempt_by(clauses),
do: Repo.get_by(ResourceAttempt, clauses) |> Repo.preload([:activity_attempts, :revision])
@doc """
Creates a part attempt.
## Examples
iex> create_part_attempt(%{field: value})
{:ok, %PartAttempt{}}
iex> create_part_attempt(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_part_attempt(attrs \\ %{}) do
%PartAttempt{}
|> PartAttempt.changeset(attrs)
|> Repo.insert()
end
@doc """
Updates a part attempt.
## Examples
iex> update_part_attempt(part_attempt, %{field: new_value})
{:ok, %PartAttempt{}}
iex> update_part_attempt(part_attempt, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_part_attempt(part_attempt, attrs) do
PartAttempt.changeset(part_attempt, attrs)
|> Repo.update()
end
@doc """
Creates a resource attempt.
## Examples
iex> create_resource_attempt(%{field: value})
{:ok, %ResourceAttempt{}}
iex> create_resource_attempt(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_resource_attempt(attrs \\ %{}) do
%ResourceAttempt{}
|> ResourceAttempt.changeset(attrs)
|> Repo.insert()
end
@doc """
Updates an activity attempt.
## Examples
iex> update_activity_attempt(revision, %{field: new_value})
{:ok, %ActivityAttempt{}}
iex> update_activity_attempt(revision, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_activity_attempt(activity_attempt, attrs) do
ActivityAttempt.changeset(activity_attempt, attrs)
|> Repo.update()
end
@doc """
Updates an resource access.
## Examples
iex> update_resource_access(revision, %{field: new_value})
{:ok, %ResourceAccess{}}
iex> update_resource_access(revision, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_resource_access(activity_attempt, attrs) do
ResourceAccess.changeset(activity_attempt, attrs)
|> Repo.update()
end
@doc """
Creates an activity attempt.
## Examples
iex> create_activity_attempt(%{field: value})
{:ok, %ActivityAttempt{}}
iex> create_activity_attempt(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_activity_attempt(attrs \\ %{}) do
%ActivityAttempt{}
|> ActivityAttempt.changeset(attrs)
|> Repo.insert()
end
@doc """
Updates a resource attempt.
## Examples
iex> update_resource_attempt(revision, %{field: new_value})
{:ok, %ResourceAttempt{}}
iex> update_resource_attempt(revision, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_resource_attempt(resource_attempt, attrs) do
ResourceAttempt.changeset(resource_attempt, attrs)
|> Repo.update()
end
@doc """
Gets a resource attempt by parameter.
## Examples
iex> get_resource_attempt(attempt_guid: "123")
%ResourceAttempt{}
iex> get_resource_attempt(attempt_guid: "notfound")
nil
"""
def get_resource_attempt(clauses),
do: Repo.get_by(ResourceAttempt, clauses)
end
| 30.402273 | 114 | 0.650146 |
1cf83f8adf9c0b788e073e4cd87765b993e1613d | 6,042 | ex | Elixir | clients/classroom/lib/google_api/classroom/v1/model/student_submission.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | clients/classroom/lib/google_api/classroom/v1/model/student_submission.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | clients/classroom/lib/google_api/classroom/v1/model/student_submission.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Classroom.V1.Model.StudentSubmission do
@moduledoc """
Student submission for course work.
StudentSubmission items are generated when a CourseWork item is created.
StudentSubmissions that have never been accessed (i.e. with `state` = NEW)
may not have a creation time or update time.
## Attributes
* `alternateLink` (*type:* `String.t`, *default:* `nil`) - Absolute link to the submission in the Classroom web UI.
Read-only.
* `assignedGrade` (*type:* `float()`, *default:* `nil`) - Optional grade. If unset, no grade was set.
This value must be non-negative. Decimal (that is, non-integer) values are
allowed, but are rounded to two decimal places.
This may be modified only by course teachers.
* `assignmentSubmission` (*type:* `GoogleApi.Classroom.V1.Model.AssignmentSubmission.t`, *default:* `nil`) - Submission content when course_work_type is ASSIGNMENT.
Students can modify this content using
ModifyAttachments.
* `associatedWithDeveloper` (*type:* `boolean()`, *default:* `nil`) - Whether this student submission is associated with the Developer Console
project making the request.
See CreateCourseWork for more
details.
Read-only.
* `courseId` (*type:* `String.t`, *default:* `nil`) - Identifier of the course.
Read-only.
* `courseWorkId` (*type:* `String.t`, *default:* `nil`) - Identifier for the course work this corresponds to.
Read-only.
* `courseWorkType` (*type:* `String.t`, *default:* `nil`) - Type of course work this submission is for.
Read-only.
* `creationTime` (*type:* `DateTime.t`, *default:* `nil`) - Creation time of this submission.
This may be unset if the student has not accessed this item.
Read-only.
* `draftGrade` (*type:* `float()`, *default:* `nil`) - Optional pending grade. If unset, no grade was set.
This value must be non-negative. Decimal (that is, non-integer) values are
allowed, but are rounded to two decimal places.
This is only visible to and modifiable by course teachers.
* `id` (*type:* `String.t`, *default:* `nil`) - Classroom-assigned Identifier for the student submission.
This is unique among submissions for the relevant course work.
Read-only.
* `late` (*type:* `boolean()`, *default:* `nil`) - Whether this submission is late.
Read-only.
* `multipleChoiceSubmission` (*type:* `GoogleApi.Classroom.V1.Model.MultipleChoiceSubmission.t`, *default:* `nil`) - Submission content when course_work_type is MULTIPLE_CHOICE_QUESTION.
* `shortAnswerSubmission` (*type:* `GoogleApi.Classroom.V1.Model.ShortAnswerSubmission.t`, *default:* `nil`) - Submission content when course_work_type is SHORT_ANSWER_QUESTION.
* `state` (*type:* `String.t`, *default:* `nil`) - State of this submission.
Read-only.
* `submissionHistory` (*type:* `list(GoogleApi.Classroom.V1.Model.SubmissionHistory.t)`, *default:* `nil`) - The history of the submission (includes state and grade histories).
Read-only.
* `updateTime` (*type:* `DateTime.t`, *default:* `nil`) - Last update time of this submission.
This may be unset if the student has not accessed this item.
Read-only.
* `userId` (*type:* `String.t`, *default:* `nil`) - Identifier for the student that owns this submission.
Read-only.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:alternateLink => String.t(),
:assignedGrade => float(),
:assignmentSubmission => GoogleApi.Classroom.V1.Model.AssignmentSubmission.t(),
:associatedWithDeveloper => boolean(),
:courseId => String.t(),
:courseWorkId => String.t(),
:courseWorkType => String.t(),
:creationTime => DateTime.t(),
:draftGrade => float(),
:id => String.t(),
:late => boolean(),
:multipleChoiceSubmission => GoogleApi.Classroom.V1.Model.MultipleChoiceSubmission.t(),
:shortAnswerSubmission => GoogleApi.Classroom.V1.Model.ShortAnswerSubmission.t(),
:state => String.t(),
:submissionHistory => list(GoogleApi.Classroom.V1.Model.SubmissionHistory.t()),
:updateTime => DateTime.t(),
:userId => String.t()
}
field(:alternateLink)
field(:assignedGrade)
field(:assignmentSubmission, as: GoogleApi.Classroom.V1.Model.AssignmentSubmission)
field(:associatedWithDeveloper)
field(:courseId)
field(:courseWorkId)
field(:courseWorkType)
field(:creationTime, as: DateTime)
field(:draftGrade)
field(:id)
field(:late)
field(:multipleChoiceSubmission, as: GoogleApi.Classroom.V1.Model.MultipleChoiceSubmission)
field(:shortAnswerSubmission, as: GoogleApi.Classroom.V1.Model.ShortAnswerSubmission)
field(:state)
field(:submissionHistory, as: GoogleApi.Classroom.V1.Model.SubmissionHistory, type: :list)
field(:updateTime, as: DateTime)
field(:userId)
end
defimpl Poison.Decoder, for: GoogleApi.Classroom.V1.Model.StudentSubmission do
def decode(value, options) do
GoogleApi.Classroom.V1.Model.StudentSubmission.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Classroom.V1.Model.StudentSubmission do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 42.549296 | 190 | 0.693644 |
1cf8a0ceae1c4379ab4e04e6495eb86a3d092461 | 1,323 | ex | Elixir | web/models/oauth/client.ex | deadmp/core | 4c244af3326f362ac6411e8996fb1cff0b5a29e6 | [
"MIT"
] | null | null | null | web/models/oauth/client.ex | deadmp/core | 4c244af3326f362ac6411e8996fb1cff0b5a29e6 | [
"MIT"
] | null | null | null | web/models/oauth/client.ex | deadmp/core | 4c244af3326f362ac6411e8996fb1cff0b5a29e6 | [
"MIT"
] | null | null | null | defmodule Core.OAuth.Client do
@moduledoc """
OAuth2 Clients
"""
#https://developers.google.com/oauthplayground/#step1&url=https%3A%2F%2F&content_type=application%2Fjson&http_method=GET&useDefaultOauthCred=unchecked&oauthEndpointSelect=Custom&oauthAuthEndpointValue=http%3A%2F%2Flocalhost%3A4000%2Foauth%2Fv1%2Fauthorize&oauthTokenEndpointValue=http%3A%2F%2Flocalhost%3A4000&oauthClientId=c39c96cc-686e-447f-8af2-2c6c4c0a9980&oauthClientSecret=074cfbd9-4ab0-49a4-9a7f-aa15a7a90c76&includeCredentials=checked&accessTokenType=bearer&autoRefreshToken=unchecked&accessType=offline&prompt=consent&response_type=code
use Ecto.Schema
import Ecto.Changeset
alias Core.OAuth.Client
schema "oauth_clients" do
field :cid, :binary_id
field :name, :string
field :url, :string
field :image, :string
belongs_to :user, Core.User
field :redirect, :string
field :secret, :binary_id
has_many :oauth_authorizations, Core.OAuth.Authorization
field :trusted, :boolean
timestamps()
end
@doc false
def changeset(%Client{} = client, attrs) do
client
|> cast(attrs, [:cid, :name, :url, :image, :redirect, :secret, :trusted])
|> validate_required([:cid, :name, :url, :redirect, :secret, :trusted])
|> unique_constraint(:name)
|> unique_constraint(:cid)
end
end
| 37.8 | 547 | 0.751323 |
1cf8b801a12cb6536cc49ed131aa1158a641296d | 2,311 | ex | Elixir | apps/tfcon/lib/tfcon/schemas/user.ex | shtanaka/elixir-transfer-control | 1b325a4c0c5ee10551515d7a440e503ed68d4ddf | [
"MIT"
] | null | null | null | apps/tfcon/lib/tfcon/schemas/user.ex | shtanaka/elixir-transfer-control | 1b325a4c0c5ee10551515d7a440e503ed68d4ddf | [
"MIT"
] | 2 | 2021-03-10T02:00:57.000Z | 2021-05-10T21:22:39.000Z | apps/tfcon/lib/tfcon/schemas/user.ex | shtanaka/elixir-transfer-control | 1b325a4c0c5ee10551515d7a440e503ed68d4ddf | [
"MIT"
] | null | null | null | defmodule Tfcon.Accounts.User do
@moduledoc """
This schema is used for storing the user information.
Only name is required for create a user. However, you won't be able
to authenticate without a password
Account number and balance are auto generated.
fields:
* name - self described
* password - self described
* balance - user balance for transferring and withdraw
* account_number - humanized identification for performing banking operatons
"""
use Ecto.Schema
import Ecto.Query, warn: false
import Ecto.Changeset
alias Tfcon.Repo
alias Tfcon.Utils.DateUtils
@timestamps_opts [autogenerate: {DateUtils, :naive_now, [:naive_datetime]}]
@primary_key {:user_id, :binary_id, autogenerate: true}
schema "users" do
field :name, :string
field :password, :string
field :balance, :float, default: 1000.0
field :account_number, :integer, unique: true
timestamps()
end
@doc false
def changeset(user, attrs) do
user
|> cast(attrs, [:name, :account_number, :balance, :password])
|> put_account_number()
|> unique_constraint(:account_number)
|> validate_required([:name, :account_number])
|> validate_number(:balance, greater_than_or_equal_to: 0.0)
|> put_validate_balance_error_message()
|> validate_length(:password, min: 6)
|> put_password_hash()
end
defp generate_account_number() do
Repo.one(from u in "users", select: count(u.user_id)) + 1
end
defp put_account_number(%Ecto.Changeset{valid?: true, data: %{account_number: nil}} = changeset) do
put_change(changeset, :account_number, generate_account_number())
end
defp put_account_number(changeset), do: changeset
defp put_password_hash(
%Ecto.Changeset{valid?: true, changes: %{password: password}} = changeset
) do
put_change(changeset, :password, Bcrypt.hash_pwd_salt(password))
end
defp put_password_hash(changeset), do: changeset
defp put_validate_balance_error_message(
%Ecto.Changeset{valid?: false, errors: _} = changeset
) do
update_in(
changeset.errors,
&Enum.map(&1, fn
{:balance, _} -> {:balance, "Not enough balance"}
{_key, _error} = tuple -> tuple
end)
)
end
defp put_validate_balance_error_message(changeset), do: changeset
end
| 30.012987 | 101 | 0.701428 |
1cf8bd1bedc3efc744857e202ae27f2f89670ff5 | 608 | exs | Elixir | mix.exs | jschoch/ndecode | d7718f28359068e5b56e2f7e615b39b7bbddebd5 | [
"MIT"
] | 1 | 2016-05-05T14:08:48.000Z | 2016-05-05T14:08:48.000Z | mix.exs | jschoch/ndecode | d7718f28359068e5b56e2f7e615b39b7bbddebd5 | [
"MIT"
] | 1 | 2015-04-09T15:34:10.000Z | 2015-04-09T15:34:10.000Z | mix.exs | jschoch/ndecode | d7718f28359068e5b56e2f7e615b39b7bbddebd5 | [
"MIT"
] | null | null | null | defmodule Ndecode.Mixfile do
use Mix.Project
def project do
[app: :ndecode,
version: "0.0.2",
elixir: "~> 1.0",
deps: deps]
end
# Configuration for the OTP application
#
# Type `mix help compile.app` for more information
def application do
[applications: [:logger]]
end
# Dependencies can be Hex packages:
#
# {:mydep, "~> 0.3.0"}
#
# Or git/path repositories:
#
# {:mydep, git: "https://github.com/elixir-lang/mydep.git", tag: "0.1.0"}
#
# Type `mix help deps` for more examples and options
defp deps do
[{:poison,"~> 1.4.0"}]
end
end
| 19.612903 | 77 | 0.601974 |
1cf8d34933f6b30bb4b7ca5deb02776db59187b9 | 1,511 | exs | Elixir | test/connection_test.exs | nulian/amqp | 4b22f46f2b7b6b6d98e17ad97629bab91a559dfb | [
"MIT"
] | null | null | null | test/connection_test.exs | nulian/amqp | 4b22f46f2b7b6b6d98e17ad97629bab91a559dfb | [
"MIT"
] | null | null | null | test/connection_test.exs | nulian/amqp | 4b22f46f2b7b6b6d98e17ad97629bab91a559dfb | [
"MIT"
] | null | null | null | defmodule ConnectionTest do
use ExUnit.Case
import AMQP.Core
alias AMQP.Connection
test "open connection with default settings" do
assert {:ok, conn} = Connection.open()
assert :ok = Connection.close(conn)
end
test "open connection with host as binary" do
assert {:ok, conn} = Connection.open(host: "localhost")
assert :ok = Connection.close(conn)
end
test "open connection with host as char list" do
assert {:ok, conn} = Connection.open(host: 'localhost')
assert :ok = Connection.close(conn)
end
test "open connection using uri" do
assert {:ok, conn} = Connection.open("amqp://localhost")
assert :ok = Connection.close(conn)
end
test "open connection using both uri and options" do
assert {:ok, conn} = Connection.open("amqp://nonexistent:5672", host: 'localhost')
assert :ok = Connection.close(conn)
end
test "open connection with uri, name, and options" do
assert {:ok, conn} =
Connection.open("amqp://nonexistent:5672", "my-connection", host: 'localhost')
assert :ok = Connection.close(conn)
end
test "override uri with options" do
uri = "amqp://foo:bar@amqp.test.com:12345"
{:ok, amqp_params} = uri |> String.to_charlist() |> :amqp_uri.parse()
record = Connection.merge_options_to_amqp_params(amqp_params, username: "me")
params = amqp_params_network(record)
assert params[:username] == "me"
assert params[:password] == "bar"
assert params[:host] == 'amqp.test.com'
end
end
| 30.836735 | 91 | 0.677035 |
1cf8ddddffdc670f2650a729085ba4feb269115d | 15,180 | ex | Elixir | clients/compute/lib/google_api/compute/v1/api/region_operations.ex | kyleVsteger/elixir-google-api | 3a0dd498af066a4361b5b0fd66ffc04a57539488 | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/api/region_operations.ex | kyleVsteger/elixir-google-api | 3a0dd498af066a4361b5b0fd66ffc04a57539488 | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/api/region_operations.ex | kyleVsteger/elixir-google-api | 3a0dd498af066a4361b5b0fd66ffc04a57539488 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Compute.V1.Api.RegionOperations do
@moduledoc """
API calls for all endpoints tagged `RegionOperations`.
"""
alias GoogleApi.Compute.V1.Connection
alias GoogleApi.Gax.{Request, Response}
@library_version Mix.Project.config() |> Keyword.get(:version, "")
@doc """
Deletes the specified region-specific Operations resource.
## Parameters
* `connection` (*type:* `GoogleApi.Compute.V1.Connection.t`) - Connection to server
* `project` (*type:* `String.t`) - Project ID for this request.
* `region` (*type:* `String.t`) - Name of the region for this request.
* `operation` (*type:* `String.t`) - Name of the Operations resource to delete.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %{}}` on success
* `{:error, info}` on failure
"""
@spec compute_region_operations_delete(
Tesla.Env.client(),
String.t(),
String.t(),
String.t(),
keyword(),
keyword()
) :: {:ok, nil} | {:ok, Tesla.Env.t()} | {:ok, list()} | {:error, any()}
def compute_region_operations_delete(
connection,
project,
region,
operation,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query
}
request =
Request.new()
|> Request.method(:delete)
|> Request.url("/projects/{project}/regions/{region}/operations/{operation}", %{
"project" => URI.encode(project, &URI.char_unreserved?/1),
"region" => URI.encode(region, &URI.char_unreserved?/1),
"operation" => URI.encode(operation, &(URI.char_unreserved?(&1) || &1 == ?/))
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [decode: false])
end
@doc """
Retrieves the specified region-specific Operations resource.
## Parameters
* `connection` (*type:* `GoogleApi.Compute.V1.Connection.t`) - Connection to server
* `project` (*type:* `String.t`) - Project ID for this request.
* `region` (*type:* `String.t`) - Name of the region for this request.
* `operation` (*type:* `String.t`) - Name of the Operations resource to return.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Compute.V1.Model.Operation{}}` on success
* `{:error, info}` on failure
"""
@spec compute_region_operations_get(
Tesla.Env.client(),
String.t(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Compute.V1.Model.Operation.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def compute_region_operations_get(
connection,
project,
region,
operation,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/projects/{project}/regions/{region}/operations/{operation}", %{
"project" => URI.encode(project, &URI.char_unreserved?/1),
"region" => URI.encode(region, &URI.char_unreserved?/1),
"operation" => URI.encode(operation, &(URI.char_unreserved?(&1) || &1 == ?/))
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Compute.V1.Model.Operation{}])
end
@doc """
Retrieves a list of Operation resources contained within the specified region.
## Parameters
* `connection` (*type:* `GoogleApi.Compute.V1.Connection.t`) - Connection to server
* `project` (*type:* `String.t`) - Project ID for this request.
* `region` (*type:* `String.t`) - Name of the region for this request.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `:filter` (*type:* `String.t`) - A filter expression that filters resources listed in the response. The expression must specify the field name, a comparison operator, and the value that you want to use for filtering. The value must be a string, a number, or a boolean. The comparison operator must be either `=`, `!=`, `>`, or `<`.
For example, if you are filtering Compute Engine instances, you can exclude instances named `example-instance` by specifying `name != example-instance`.
You can also filter nested fields. For example, you could specify `scheduling.automaticRestart = false` to include instances only if they are not scheduled for automatic restarts. You can use filtering on nested fields to filter based on resource labels.
To filter on multiple expressions, provide each separate expression within parentheses. For example: ``` (scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake") ``` By default, each expression is an `AND` expression. However, you can include `AND` and `OR` expressions explicitly. For example: ``` (cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true) ```
* `:maxResults` (*type:* `integer()`) - The maximum number of results per page that should be returned. If the number of available results is larger than `maxResults`, Compute Engine returns a `nextPageToken` that can be used to get the next page of results in subsequent list requests. Acceptable values are `0` to `500`, inclusive. (Default: `500`)
* `:orderBy` (*type:* `String.t`) - Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource name.
You can also sort results in descending order based on the creation timestamp using `orderBy="creationTimestamp desc"`. This sorts results based on the `creationTimestamp` field in reverse chronological order (newest result first). Use this to sort resources like operations so that the newest operation is returned first.
Currently, only sorting by `name` or `creationTimestamp desc` is supported.
* `:pageToken` (*type:* `String.t`) - Specifies a page token to use. Set `pageToken` to the `nextPageToken` returned by a previous list request to get the next page of results.
* `:returnPartialSuccess` (*type:* `boolean()`) - Opt-in for partial success behavior which provides partial results in case of failure. The default value is false.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Compute.V1.Model.OperationList{}}` on success
* `{:error, info}` on failure
"""
@spec compute_region_operations_list(
Tesla.Env.client(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Compute.V1.Model.OperationList.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def compute_region_operations_list(
connection,
project,
region,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:filter => :query,
:maxResults => :query,
:orderBy => :query,
:pageToken => :query,
:returnPartialSuccess => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/projects/{project}/regions/{region}/operations", %{
"project" => URI.encode(project, &URI.char_unreserved?/1),
"region" => URI.encode(region, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Compute.V1.Model.OperationList{}])
end
@doc """
Waits for the specified Operation resource to return as `DONE` or for the request to approach the 2 minute deadline, and retrieves the specified Operation resource. This method differs from the `GET` method in that it waits for no more than the default deadline (2 minutes) and then returns the current state of the operation, which might be `DONE` or still in progress.
This method is called on a best-effort basis. Specifically:
- In uncommon cases, when the server is overloaded, the request might return before the default deadline is reached, or might return after zero seconds.
- If the default deadline is reached, there is no guarantee that the operation is actually done when the method returns. Be prepared to retry if the operation is not `DONE`.
## Parameters
* `connection` (*type:* `GoogleApi.Compute.V1.Connection.t`) - Connection to server
* `project` (*type:* `String.t`) - Project ID for this request.
* `region` (*type:* `String.t`) - Name of the region for this request.
* `operation` (*type:* `String.t`) - Name of the Operations resource to return.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Compute.V1.Model.Operation{}}` on success
* `{:error, info}` on failure
"""
@spec compute_region_operations_wait(
Tesla.Env.client(),
String.t(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Compute.V1.Model.Operation.t()}
| {:ok, Tesla.Env.t()}
| {:ok, list()}
| {:error, any()}
def compute_region_operations_wait(
connection,
project,
region,
operation,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/projects/{project}/regions/{region}/operations/{operation}/wait", %{
"project" => URI.encode(project, &URI.char_unreserved?/1),
"region" => URI.encode(region, &URI.char_unreserved?/1),
"operation" => URI.encode(operation, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Compute.V1.Model.Operation{}])
end
end
| 46.851852 | 434 | 0.637154 |
1cf8ed64b1f21902f808ef4a0409f8bff5cf41ac | 1,185 | ex | Elixir | Chapter9/dynamic_workers/lib/todo/database.ex | benjamindburke/elixir-studies | 65231b5af83dcf701041cae2879107c3bd3e5078 | [
"Unlicense"
] | null | null | null | Chapter9/dynamic_workers/lib/todo/database.ex | benjamindburke/elixir-studies | 65231b5af83dcf701041cae2879107c3bd3e5078 | [
"Unlicense"
] | null | null | null | Chapter9/dynamic_workers/lib/todo/database.ex | benjamindburke/elixir-studies | 65231b5af83dcf701041cae2879107c3bd3e5078 | [
"Unlicense"
] | null | null | null | # Todo Database [supervisor]
# This module supervises the creation of database workers
defmodule Todo.Database do
@db_folder "./persist"
@pool_size 3
# ---------
# Interface functions
# ---------
@spec store(charlist, any) :: :ok
def store(key, data) do
key
|> choose_worker()
|> Todo.DatabaseWorker.store(key, data)
end
@spec get(charlist) :: any
def get(key) do
key
|> choose_worker()
|> Todo.DatabaseWorker.get(key)
end
# ---------
# Supervisor hook functions
# ---------
def start_link do
File.mkdir_p!(@db_folder)
children = Enum.map(1..@pool_size, &worker_spec/1)
Supervisor.start_link(children, strategy: :one_for_one)
end
defp worker_spec(worker_id) do
default_worker_spec = {Todo.DatabaseWorker, {@db_folder, worker_id}}
Supervisor.child_spec(default_worker_spec, id: worker_id)
end
def child_spec(_) do
%{
id: __MODULE__,
start: {__MODULE__, :start_link, []},
type: :supervisor
}
end
# ---------
# Helper functions
# ---------
@spec choose_worker(charlist) :: integer
defp choose_worker(key) do
:erlang.phash2(key, @pool_size) + 1
end
end
| 20.431034 | 72 | 0.629536 |
1cf8ef79fa2b61fa27fc8827c8b20875427d664b | 3,607 | ex | Elixir | apps/dockup_ui/lib/dockup_ui/callback_protocol.ex | sleekr/dockup | 11d068879253af655ebabbd3e39cb21400c8e283 | [
"MIT"
] | 1 | 2019-08-20T07:45:10.000Z | 2019-08-20T07:45:10.000Z | apps/dockup_ui/lib/dockup_ui/callback_protocol.ex | sleekr/dockup | 11d068879253af655ebabbd3e39cb21400c8e283 | [
"MIT"
] | null | null | null | apps/dockup_ui/lib/dockup_ui/callback_protocol.ex | sleekr/dockup | 11d068879253af655ebabbd3e39cb21400c8e283 | [
"MIT"
] | 2 | 2019-03-08T10:51:34.000Z | 2019-08-20T07:45:13.000Z | defprotocol DockupUi.CallbackProtocol do
# All deployments are created with this initial status. Payload is nil
def queued(callback_data, deployment, payload)
# Called when deployment is picked up from the queue and handed off for deployment
def processing(callback_data, deployment, payload)
# Called before cloning git repo. Payload is nil
def cloning_repo(callback_data, deployment, payload)
# Called when docker containers are being started. Payload is a string
# containing the log URL. Format: "/deployment_logs/#?projectName=<project id>"
def starting(callback_data, deployment, payload)
# Called when docker containers are started and service URLs are assigned
# and tested for HTTP response status of 200
def checking_urls(callback_data, deployment, payload)
# Called when docker containers are started and service URLs
# are assigned. Payload is a map of the format:
# %{"service_name" => [{"container_port", "url"}, ...], ...}
def started(callback_data, deployment, payload)
# Called when deployment fails. Payload is a string containing
# the reason for failure
def deployment_failed(callback_data, deployment, payload)
# Hibernating and waking up deployments
def hibernating_deployment(callback_data, deployment, payload)
def deployment_hibernated(callback_data, deployment, payload)
def waking_up_deployment(callback_data, deployment, payload)
def deleting_deployment(callback_data, deployment, payload)
def deployment_deleted(callback_data, deployment, payload)
def delete_deployment_failed(callback_data, deployment, payload)
# CallbackProtocol implementors may implement a function named
# common_callback(data, deployment, payload) if multiple events are to
# be handled in the same way
end
defmodule DockupUi.CallbackProtocol.Defaults do
defmacro __using__(_) do
quote do
def queued(data, deployment, payload), do: common_callback(data, deployment, payload)
def processing(data, deployment, payload), do: common_callback(data, deployment, payload)
def cloning_repo(data, deployment, payload), do: common_callback(data, deployment, payload)
def starting(data, deployment, payload), do: common_callback(data, deployment, payload)
def checking_urls(data, deployment, payload), do: common_callback(data, deployment, payload)
def started(data, deployment, payload), do: common_callback(data, deployment, payload)
def deployment_failed(data, deployment, payload), do: common_callback(data, deployment, payload)
def hibernating_deployment(data, deployment, payload), do: common_callback(data, deployment, payload)
def deployment_hibernated(data, deployment, payload), do: common_callback(data, deployment, payload)
def waking_up_deployment(data, deployment, payload), do: common_callback(data, deployment, payload)
def deleting_deployment(data, deployment, payload), do: common_callback(data, deployment, payload)
def deployment_deleted(data, deployment, payload), do: common_callback(data, deployment, payload)
def delete_deployment_failed(data, deployment, payload), do: common_callback(data, deployment, payload)
def common_callback(_data, _deployment, _payload), do: :ok
defoverridable [
queued: 3, processing: 3, cloning_repo: 3, starting: 3, checking_urls: 3,
started: 3, deployment_failed: 3, hibernating_deployment: 3,
deployment_hibernated: 3, waking_up_deployment: 3, deleting_deployment: 3,
deployment_deleted: 3, delete_deployment_failed: 3,common_callback: 3
]
end
end
end
| 52.275362 | 109 | 0.764624 |
1cf8ff3c16434c257faf83d5db57056dc2e1f73a | 44 | exs | Elixir | test/cli_test.exs | samullen/d | 1f3168ee8bd9fef4402edb528a394943ee31e3ab | [
"Unlicense"
] | null | null | null | test/cli_test.exs | samullen/d | 1f3168ee8bd9fef4402edb528a394943ee31e3ab | [
"Unlicense"
] | null | null | null | test/cli_test.exs | samullen/d | 1f3168ee8bd9fef4402edb528a394943ee31e3ab | [
"Unlicense"
] | null | null | null | defmodule CliTest do
use ExUnit.Case
end
| 8.8 | 20 | 0.772727 |
1cf902d54a7e016a91e6bc501eb74cb849dd9f7d | 1,467 | exs | Elixir | config/config.exs | exredorg/exred | 0ece8e6680747ba8f30b4413ede598a45495aa7c | [
"MIT"
] | null | null | null | config/config.exs | exredorg/exred | 0ece8e6680747ba8f30b4413ede598a45495aa7c | [
"MIT"
] | null | null | null | config/config.exs | exredorg/exred | 0ece8e6680747ba8f30b4413ede598a45495aa7c | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# By default, the umbrella project as well as each child
# application will require this configuration file, ensuring
# they all use the same configuration. While one could
# configure all applications here, we prefer to delegate
# back to each application for organization purposes.
import_config "../apps/*/config/config.exs"
# config :logger, :console,
# level: :debug,
# format: "$date $time | [$level]$levelpad | $metadata | $message\n",
# metadata: [:module]
# db config for Phoenix
# the dbproxy in exred_library also uses this if it exists
# config :exred_ui, ExredUI.Repo,
# adapter: Ecto.Adapters.Postgres,
# username: "exred_user",
# password: "hello",
# database: "exred_ui_dev",
# hostname: "localhost_main_for_repo",
# pool_size: 10
# config :exred_library, :psql_conn,
# username: "exred_user",
# password: "hello",
# database: "exred_ui_dev",
# hostname: "localhost_main",
# port: 5432
# ssl options for the MQTT client
# these get passed to the Erlang ssl module
# see ssl_option() here: http://erlang.org/doc/man/ssl.html
config :exred_node_aws_iot_daemon, :ssl,
keyfile: "/Users/zkeszthelyi/exred_data/c592a5868f-private.pem.key",
certfile: "/Users/zkeszthelyi/exred_data/c592a5868f-certificate.pem.crt",
cacertfile: "/Users/zkeszthelyi/exred_data/symantec_ca_root.pem"
| 34.116279 | 75 | 0.738923 |
1cf908507ec33bd43d0e136e90225a9f411eff53 | 260 | ex | Elixir | test/support/test_setup/shipment_unit.ex | harry-gao/ex-cart | 573e7f977bb3b710d11618dd215d4ddd8f819fb3 | [
"Apache-2.0"
] | 356 | 2016-03-16T12:37:28.000Z | 2021-12-18T03:22:39.000Z | test/support/test_setup/shipment_unit.ex | harry-gao/ex-cart | 573e7f977bb3b710d11618dd215d4ddd8f819fb3 | [
"Apache-2.0"
] | 30 | 2016-03-16T09:19:10.000Z | 2021-01-12T08:10:52.000Z | test/support/test_setup/shipment_unit.ex | harry-gao/ex-cart | 573e7f977bb3b710d11618dd215d4ddd8f819fb3 | [
"Apache-2.0"
] | 72 | 2016-03-16T13:32:14.000Z | 2021-03-23T11:27:43.000Z | defmodule Nectar.TestSetup.ShipmentUnit do
def create_shipment_units do
cart = Nectar.TestSetup.Order.setup_cart
{:ok, %{shipment_units: shipment_units}} =
Nectar.Workflow.CreateShipmentUnits.run(Nectar.Repo, cart)
shipment_units
end
end
| 28.888889 | 64 | 0.761538 |
1cf90dedd67edf3480b1616da39a7ac3e29fdf36 | 2,336 | ex | Elixir | lib/open890_web.ex | VE2guy/open890 | 9206ad51d0ee036020f854725e4e1023088fda4e | [
"MIT"
] | 45 | 2021-01-24T01:54:04.000Z | 2022-02-28T03:28:13.000Z | lib/open890_web.ex | VE2guy/open890 | 9206ad51d0ee036020f854725e4e1023088fda4e | [
"MIT"
] | 57 | 2021-01-28T19:18:33.000Z | 2022-01-23T17:31:50.000Z | lib/open890_web.ex | VE2guy/open890 | 9206ad51d0ee036020f854725e4e1023088fda4e | [
"MIT"
] | 5 | 2021-01-27T02:29:42.000Z | 2022-01-15T14:03:24.000Z | defmodule Open890Web do
@moduledoc """
The entrypoint for defining your web interface, such
as controllers, views, channels and so on.
This can be used in your application as:
use Open890Web, :controller
use Open890Web, :view
The definitions below will be executed for every view,
controller, etc, so keep them short and clean, focused
on imports, uses and aliases.
Do NOT define functions inside the quoted expressions
below. Instead, define any helper function in modules
and import those modules here.
"""
def controller do
quote do
require Logger
use Phoenix.Controller, namespace: Open890Web
import Plug.Conn
import Open890Web.Gettext
alias Open890Web.Router.Helpers, as: Routes
end
end
def view do
quote do
use Phoenix.View,
root: "lib/open890_web/templates",
namespace: Open890Web
# Import convenience functions from controllers
import Phoenix.Controller,
only: [get_flash: 1, get_flash: 2, view_module: 1, view_template: 1]
# Include shared imports and aliases for views
unquote(view_helpers())
end
end
def live_view do
quote do
use Phoenix.LiveView,
layout: {Open890Web.LayoutView, "live.html"}
unquote(view_helpers())
end
end
def live_component do
quote do
use Phoenix.LiveComponent
unquote(view_helpers())
end
end
def router do
quote do
use Phoenix.Router
import Plug.Conn
import Phoenix.Controller
import Phoenix.LiveView.Router
end
end
def channel do
quote do
use Phoenix.Channel
import Open890Web.Gettext
end
end
defp view_helpers do
quote do
# Use all HTML functionality (forms, tags, etc)
use Phoenix.HTML
import Phoenix.LiveView.Helpers
# Import basic rendering functionality (render, render_layout, etc)
import Phoenix.View
import Open890Web.ErrorHelpers
import Open890Web.Gettext
alias Open890Web.Router.Helpers, as: Routes
import Open890Web.RadioViewHelpers
import Phoenix.LiveView
end
end
@doc """
When used, dispatch to the appropriate controller/view/etc.
"""
defmacro __using__(which) when is_atom(which) do
apply(__MODULE__, which, [])
end
end
| 21.831776 | 76 | 0.680651 |
1cf91e6aa2ac316bae9853cfc3498e0fad3b74ce | 6,765 | exs | Elixir | test/lib/bamboo/adapters/mandrill_adapter_test.exs | davidlibrera/bamboo | 57d6f7627df4370ea243688dbc626937ca3b78c1 | [
"MIT"
] | null | null | null | test/lib/bamboo/adapters/mandrill_adapter_test.exs | davidlibrera/bamboo | 57d6f7627df4370ea243688dbc626937ca3b78c1 | [
"MIT"
] | null | null | null | test/lib/bamboo/adapters/mandrill_adapter_test.exs | davidlibrera/bamboo | 57d6f7627df4370ea243688dbc626937ca3b78c1 | [
"MIT"
] | 1 | 2021-03-29T17:43:54.000Z | 2021-03-29T17:43:54.000Z | defmodule Bamboo.MandrillAdapterTest do
use ExUnit.Case
alias Bamboo.Email
alias Bamboo.MandrillHelper
alias Bamboo.MandrillAdapter
@config %{adapter: MandrillAdapter, api_key: "123_abc"}
@config_with_bad_key %{adapter: MandrillAdapter, api_key: nil}
defmodule FakeMandrill do
use Plug.Router
plug(
Plug.Parsers,
parsers: [:urlencoded, :multipart, :json],
pass: ["*/*"],
json_decoder: Jason
)
plug(:match)
plug(:dispatch)
def start_server(parent) do
Agent.start_link(fn -> Map.new() end, name: __MODULE__)
Agent.update(__MODULE__, &Map.put(&1, :parent, parent))
port = get_free_port()
Application.put_env(:bamboo, :mandrill_base_uri, "http://localhost:#{port}")
Plug.Adapters.Cowboy.http(__MODULE__, [], port: port, ref: __MODULE__)
end
defp get_free_port do
{:ok, socket} = :ranch_tcp.listen(port: 0)
{:ok, port} = :inet.port(socket)
:erlang.port_close(socket)
port
end
def shutdown do
Plug.Adapters.Cowboy.shutdown(__MODULE__)
end
post "/api/1.0/messages/send.json" do
case get_in(conn.params, ["message", "from_email"]) do
"INVALID_EMAIL" -> conn |> send_resp(500, "Error!!") |> send_to_parent
_ -> conn |> send_resp(200, "SENT") |> send_to_parent
end
end
post "/api/1.0/messages/send-template.json" do
case get_in(conn.params, ["message", "from_email"]) do
"INVALID_EMAIL" -> conn |> send_resp(500, "Error!!") |> send_to_parent
_ -> conn |> send_resp(200, "SENT") |> send_to_parent
end
end
defp send_to_parent(conn) do
parent = Agent.get(__MODULE__, fn set -> Map.get(set, :parent) end)
send(parent, {:fake_mandrill, conn})
conn
end
end
setup do
FakeMandrill.start_server(self())
on_exit(fn ->
FakeMandrill.shutdown()
end)
:ok
end
test "raises if the api key is nil" do
assert_raise ArgumentError, ~r/no API key set/, fn ->
new_email(from: "foo@bar.com") |> MandrillAdapter.deliver(@config_with_bad_key)
end
assert_raise ArgumentError, ~r/no API key set/, fn ->
MandrillAdapter.handle_config(%{})
end
end
test "deliver/2 sends the to the right url" do
new_email() |> MandrillAdapter.deliver(@config)
assert_receive {:fake_mandrill, %{request_path: request_path}}
assert request_path == "/api/1.0/messages/send.json"
end
test "deliver/2 returns an {:ok, response} tuple on success" do
{:ok, response} = new_email() |> MandrillAdapter.deliver(@config)
assert %{status_code: 200, headers: _, body: _} = response
end
test "deliver/2 sends the to the right url for templates" do
new_email() |> MandrillHelper.template("hello") |> MandrillAdapter.deliver(@config)
assert_receive {:fake_mandrill, %{request_path: request_path}}
assert request_path == "/api/1.0/messages/send-template.json"
end
test "deliver/2 sends from, html and text body, subject, headers and attachment" do
email =
new_email(
from: {"From", "from@foo.com"},
subject: "My Subject",
text_body: "TEXT BODY",
html_body: "HTML BODY"
)
|> Email.put_header("Reply-To", "reply@foo.com")
|> Email.put_attachment(Path.join(__DIR__, "../../../support/attachment.txt"))
email |> MandrillAdapter.deliver(@config)
assert MandrillAdapter.supports_attachments?(@config)
assert_receive {:fake_mandrill, %{params: params}}
assert params["key"] == @config[:api_key]
message = params["message"]
assert message["from_name"] == email.from |> elem(0)
assert message["from_email"] == email.from |> elem(1)
assert message["subject"] == email.subject
assert message["text"] == email.text_body
assert message["html"] == email.html_body
assert message["headers"] == email.headers
assert message["attachments"] == [
%{
"type" => "text/plain",
"name" => "attachment.txt",
"content" => "VGVzdCBBdHRhY2htZW50Cg=="
}
]
end
test "deliver/2 correctly formats recipients" do
email =
new_email(
to: [{"To", "to@bar.com"}],
cc: [{"CC", "cc@bar.com"}],
bcc: [{"BCC", "bcc@bar.com"}]
)
email |> MandrillAdapter.deliver(@config)
assert_receive {:fake_mandrill, %{params: %{"message" => message}}}
assert message["to"] == [
%{"name" => "To", "email" => "to@bar.com", "type" => "to"},
%{"name" => "CC", "email" => "cc@bar.com", "type" => "cc"},
%{"name" => "BCC", "email" => "bcc@bar.com", "type" => "bcc"}
]
end
test "deliver/2 adds extra params to the message " do
email = new_email() |> MandrillHelper.put_param("important", true)
email |> MandrillAdapter.deliver(@config)
assert_receive {:fake_mandrill, %{params: %{"message" => message}}}
assert message["important"] == true
end
test "deliver/2 puts template name and empty content" do
email = new_email() |> MandrillHelper.template("hello")
email |> MandrillAdapter.deliver(@config)
assert_receive {:fake_mandrill,
%{
params: %{
"template_name" => template_name,
"template_content" => template_content
}
}}
assert template_name == "hello"
assert template_content == []
end
test "deliver/2 puts template name and content" do
email =
new_email()
|> MandrillHelper.template("hello", [
%{name: 'example name', content: 'example content'}
])
email |> MandrillAdapter.deliver(@config)
assert_receive {:fake_mandrill,
%{
params: %{
"template_name" => template_name,
"template_content" => template_content
}
}}
assert template_name == "hello"
assert template_content == [%{"content" => 'example content', "name" => 'example name'}]
end
test "returns an error if the response is not a success" do
email = new_email(from: "INVALID_EMAIL")
{:error, error} = email |> MandrillAdapter.deliver(@config)
assert %Bamboo.ApiError{} = error
end
test "removes api key from error output" do
email = new_email(from: "INVALID_EMAIL")
{:error, error} = email |> MandrillAdapter.deliver(@config)
assert error.message =~ ~r/"key" => "\[FILTERED\]"/
end
defp new_email(attrs \\ []) do
attrs = Keyword.merge([from: "foo@bar.com", to: []], attrs)
Email.new_email(attrs) |> Bamboo.Mailer.normalize_addresses()
end
end
| 30.066667 | 92 | 0.602365 |
1cf92fbb10593b4908106828faa9038fa1d593af | 672 | exs | Elixir | episode15/todo_list/mix.exs | paulfioravanti/learn_elixir | 8424b1a7a89cb9fd1dacb85bcca487601958b8fa | [
"MIT"
] | null | null | null | episode15/todo_list/mix.exs | paulfioravanti/learn_elixir | 8424b1a7a89cb9fd1dacb85bcca487601958b8fa | [
"MIT"
] | null | null | null | episode15/todo_list/mix.exs | paulfioravanti/learn_elixir | 8424b1a7a89cb9fd1dacb85bcca487601958b8fa | [
"MIT"
] | null | null | null | defmodule TodoList.Mixfile do
use Mix.Project
def project do
[app: :todo_list,
version: "0.1.0",
elixir: "~> 1.3",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
deps: deps()]
end
# Configuration for the OTP application
#
# Type "mix help compile.app" for more information
def application do
[applications: [:logger]]
end
# Dependencies can be Hex packages:
#
# {:mydep, "~> 0.3.0"}
#
# Or git/path repositories:
#
# {:mydep, git: "https://github.com/elixir-lang/mydep.git", tag: "0.1.0"}
#
# Type "mix help deps" for more examples and options
defp deps do
[]
end
end
| 20.363636 | 77 | 0.608631 |
1cf93ab61ca7a7b541cf2c6ddbe7d41040213799 | 2,428 | ex | Elixir | lib/ex_doc/markdown/earmark.ex | henrik/ex_doc | f09796782121f935e70cc51405d4773130fd70ca | [
"Apache-2.0",
"CC-BY-4.0"
] | null | null | null | lib/ex_doc/markdown/earmark.ex | henrik/ex_doc | f09796782121f935e70cc51405d4773130fd70ca | [
"Apache-2.0",
"CC-BY-4.0"
] | null | null | null | lib/ex_doc/markdown/earmark.ex | henrik/ex_doc | f09796782121f935e70cc51405d4773130fd70ca | [
"Apache-2.0",
"CC-BY-4.0"
] | null | null | null | defmodule ExDoc.Markdown.Earmark do
@moduledoc """
ExDoc extension for the Earmark MarkDown parser.
"""
@behaviour ExDoc.Markdown
@doc """
Check if the Earmark Markdown parser module is available.
"""
def available? do
match?({:ok, _}, Application.ensure_all_started(:earmark)) and Code.ensure_loaded?(Earmark)
end
@doc """
Generate HTML AST.
## Options
* `:gfm` - boolean. Turns on Github Flavored Markdown extensions. True by default
* `:breaks` - boolean. Only applicable if `gfm` is enabled. Makes all line
breaks significant (so every line in the input is a new line in the output)
* `:smartypants` - boolean. Turns on smartypants processing, so quotes become curly,
two or three hyphens become en and em dashes, and so on. False by default
"""
@impl true
def to_ast(text, opts) do
options =
struct(Earmark.Options,
gfm: Keyword.get(opts, :gfm, true),
line: Keyword.get(opts, :line, 1),
file: Keyword.get(opts, :file, "nofile"),
breaks: Keyword.get(opts, :breaks, false),
smartypants: Keyword.get(opts, :smartypants, false),
pure_links: true
)
case Earmark.as_ast(text, options) do
{:ok, ast, messages} ->
print_messages(messages, options)
fixup(ast)
{:error, ast, messages} ->
print_messages(messages, options)
ast
end
end
defp print_messages(messages, options) do
for {severity, line, message} <- messages do
file = options.file
IO.warn("#{inspect(__MODULE__)} (#{severity}) #{file}:#{line} #{message}", [])
end
end
defp fixup(list) when is_list(list) do
fixup_list(list, [])
end
defp fixup(binary) when is_binary(binary) do
binary
end
defp fixup({tag, attrs, ast}) when is_binary(tag) do
{fixup_tag(tag), Enum.map(attrs, &fixup_attr/1), fixup(ast)}
end
defp fixup({tag, attrs, ast, _meta}) when is_binary(tag) do
fixup({tag, attrs, ast})
end
# E.g. `{:comment, _}`
defp fixup(_) do
[]
end
defp fixup_list([head | tail], acc) do
fixed = fixup(head)
if fixed == [] do
fixup_list(tail, acc)
else
fixup_list(tail, [fixed | acc])
end
end
defp fixup_list([], acc) do
Enum.reverse(acc)
end
defp fixup_tag(tag) do
String.to_atom(tag)
end
defp fixup_attr({name, value}) do
{String.to_atom(name), value}
end
end
| 24.039604 | 95 | 0.631796 |
1cf93df33eb17405622b26d4795bf8e6e24f89ca | 4,447 | ex | Elixir | apps/admin_api/lib/admin_api/v1/controllers/transaction_request_controller.ex | vanmil/ewallet | 6c1aca95a83e0a9d93007670a40d8c45764a8122 | [
"Apache-2.0"
] | null | null | null | apps/admin_api/lib/admin_api/v1/controllers/transaction_request_controller.ex | vanmil/ewallet | 6c1aca95a83e0a9d93007670a40d8c45764a8122 | [
"Apache-2.0"
] | null | null | null | apps/admin_api/lib/admin_api/v1/controllers/transaction_request_controller.ex | vanmil/ewallet | 6c1aca95a83e0a9d93007670a40d8c45764a8122 | [
"Apache-2.0"
] | null | null | null | defmodule AdminAPI.V1.TransactionRequestController do
use AdminAPI, :controller
import AdminAPI.V1.ErrorHandler
alias AdminAPI.V1.AccountHelper
alias EWallet.TransactionRequestPolicy
alias EWallet.Web.{SearchParser, SortParser, Paginator, Preloader}
alias EWallet.{
TransactionRequestGate,
TransactionRequestFetcher
}
alias EWalletDB.{TransactionRequest, Account}
@mapped_fields %{"created_at" => "inserted_at"}
@preload_fields [:user, :account, :token, :wallet]
@search_fields [:id, :status, :type, :correlation_id, :expiration_reason]
@sort_fields [:id, :status, :type, :correlation_id, :inserted_at, :expired_at]
@spec all(Plug.Conn.t(), map) :: Plug.Conn.t()
def all(conn, attrs) do
with :ok <- permit(:all, conn.assigns, nil),
account_uuids <- AccountHelper.get_accessible_account_uuids(conn.assigns),
linked_user_uuids <-
account_uuids |> Account.get_all_users() |> Enum.map(fn user -> user.uuid end) do
account_uuids
|> TransactionRequest.query_all_for_account_and_user_uuids(linked_user_uuids)
|> do_all(attrs, conn)
else
error -> respond(error, conn)
end
end
@spec all_for_account(Plug.Conn.t(), map()) :: Plug.Conn.t()
def all_for_account(conn, %{"id" => account_id, "owned" => true} = attrs) do
with %Account{} = account <- Account.get(account_id) || {:error, :unauthorized},
:ok <- permit(:all, conn.assigns, account),
linked_user_uuids <-
[account.uuid] |> Account.get_all_users() |> Enum.map(fn user -> user.uuid end) do
[account.uuid]
|> TransactionRequest.query_all_for_account_and_user_uuids(linked_user_uuids)
|> do_all(attrs, conn)
else
error -> respond(error, conn)
end
end
def all_for_account(conn, %{"id" => account_id} = attrs) do
with %Account{} = account <- Account.get(account_id) || {:error, :unauthorized},
:ok <- permit(:all, conn.assigns, account),
descendant_uuids <- Account.get_all_descendants_uuids(account),
linked_user_uuids <-
descendant_uuids |> Account.get_all_users() |> Enum.map(fn user -> user.uuid end) do
descendant_uuids
|> TransactionRequest.query_all_for_account_and_user_uuids(linked_user_uuids)
|> do_all(attrs, conn)
else
error -> respond(error, conn)
end
end
def all_for_account(conn, _) do
handle_error(conn, :invalid_parameter, "Parameter 'id' is required.")
end
@spec do_all(Ecto.Queryable.t(), map(), Plug.Conn.t()) :: Plug.Conn.t()
defp do_all(query, attrs, conn) do
query
|> Preloader.to_query(@preload_fields)
|> SearchParser.to_query(attrs, @search_fields)
|> SortParser.to_query(attrs, @sort_fields, @mapped_fields)
|> Paginator.paginate_attrs(attrs)
|> respond_multiple(conn)
end
@spec get(Plug.Conn.t(), map) :: Plug.Conn.t()
def get(conn, %{"formatted_id" => formatted_id}) do
with {:ok, request} <- TransactionRequestFetcher.get(formatted_id),
:ok <- permit(:get, conn.assigns, request) do
respond({:ok, request}, conn)
else
{:error, :transaction_request_not_found} ->
respond({:error, :unauthorized}, conn)
error ->
respond(error, conn)
end
end
@spec create(Plug.Conn.t(), map()) :: Plug.Conn.t()
def create(conn, attrs) do
attrs
|> Map.put("creator", conn.assigns)
|> TransactionRequestGate.create()
|> respond(conn)
end
# Respond with a list of transaction requests
defp respond_multiple(%Paginator{} = paged_transaction_requests, conn) do
render(conn, :transaction_requests, %{transaction_requests: paged_transaction_requests})
end
defp respond_multiple({:error, code, description}, conn) do
handle_error(conn, code, description)
end
defp respond({:error, error}, conn) when is_atom(error), do: handle_error(conn, error)
defp respond({:error, changeset}, conn) do
handle_error(conn, :invalid_parameter, changeset)
end
defp respond({:ok, request}, conn) do
render(conn, :transaction_request, %{
transaction_request: request
})
end
@spec permit(
:all | :create | :get | :update,
map(),
String.t() | %Account{} | %TransactionRequest{} | nil
) :: :ok | {:error, any()} | no_return()
defp permit(action, params, request) do
Bodyguard.permit(TransactionRequestPolicy, action, params, request)
end
end
| 34.742188 | 95 | 0.67034 |
1cf97c8982f5dd89368b055ca2ab7dcbb4c2f9b0 | 2,075 | exs | Elixir | mix.exs | cyberhck/guardian_db | 57bc0fecaf26e4c7d63e5fa1d18a7d527ba73bff | [
"MIT"
] | 3 | 2021-04-22T03:44:56.000Z | 2021-11-15T11:13:53.000Z | mix.exs | aleDsz/guardian_db | 72b2799a0f97e55e7f1efb1c807f5bc9435e656d | [
"MIT"
] | null | null | null | mix.exs | aleDsz/guardian_db | 72b2799a0f97e55e7f1efb1c807f5bc9435e656d | [
"MIT"
] | null | null | null | defmodule Guardian.DB.Mixfile do
use Mix.Project
@version "2.1.0"
@source_url "https://github.com/ueberauth/guardian_db"
def project do
[
name: "Guardian.DB",
app: :guardian_db,
version: @version,
description: "DB tracking for token validity",
elixir: "~> 1.4 or ~> 1.5",
elixirc_paths: elixirc_paths(Mix.env()),
package: package(),
docs: docs(),
build_embedded: Mix.env() == :prod,
start_permanent: Mix.env() == :prod,
aliases: aliases(),
deps: deps(),
test_coverage: [tool: ExCoveralls],
preferred_cli_env: [
coveralls: :test,
"coveralls.html": :test,
"coveralls.json": :test
]
]
end
def application do
[extra_applications: [:logger]]
end
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
defp deps do
[
{:guardian, "~> 1.0 or ~> 2.0"},
{:ecto, "~> 3.0"},
{:ecto_sql, "~> 3.1"},
{:postgrex, "~> 0.13", optional: true},
# Tools
{:dialyxir, ">= 0.0.0", only: [:dev], runtime: false},
{:credo, ">= 0.0.0", only: [:dev, :test], runtime: false},
{:excoveralls, ">= 0.0.0", only: [:test], runtime: false},
{:ex_doc, ">= 0.0.0", only: [:dev], runtime: false},
{:inch_ex, ">= 0.0.0", only: [:dev], runtime: false}
]
end
defp package do
[
maintainers: ["Daniel Neighman", "Sean Callan", "Sonny Scroggin"],
licenses: ["MIT"],
links: %{GitHub: @source_url},
files: [
"lib",
"CHANGELOG.md",
"LICENSE",
"mix.exs",
"README.md",
"priv/templates"
]
]
end
defp docs do
[
main: "readme",
homepage_url: @source_url,
source_ref: "v#{@version}",
source_url: @source_url,
extras: ["README.md"]
]
end
defp aliases do
[
test: [
"ecto.drop --quiet",
"ecto.create --quiet",
"guardian.db.gen.migration",
"ecto.migrate",
"test"
]
]
end
end
| 22.802198 | 72 | 0.519518 |
1cf996cc027344622533e2e84730ff8e8ecdf5be | 1,373 | ex | Elixir | lib/examples/static_data.ex | QuinnWilton/wormwood | 320bec700cb454e81f953bb8a13debadff2a70ad | [
"MIT"
] | null | null | null | lib/examples/static_data.ex | QuinnWilton/wormwood | 320bec700cb454e81f953bb8a13debadff2a70ad | [
"MIT"
] | null | null | null | lib/examples/static_data.ex | QuinnWilton/wormwood | 320bec700cb454e81f953bb8a13debadff2a70ad | [
"MIT"
] | null | null | null | defmodule Wormwood.Examples.StaticData do
@moduledoc false
def users do
[
%{:id => 1, :name => "Foilz", :email => "ilovesecurity@please-dont-email-this.really"},
%{:id => 2, :name => "Batman", :email => "i@ambat.man"},
%{:id => 3, :name => "Vivi", :email => "blackmages@zidane.beep"},
%{:id => 4, :name => "Parrot", :email => "bird@caw.squak"},
%{:id => 5, :name => "Foilz", :email => "ilovesecurity@please-dont-email-this.really"}
]
end
def messages do
[
%{:id => 1, :message => "Christmas is coming.", :from => 1},
%{:id => 2, :message => "The quick brown fox jumps over the lazy dog.", :from => 1},
%{:id => 3, :message => "Joe made the sugar cookies; Susan decorated them.", :from => 2},
%{:id => 4, :message => "I am never at home on Sundays.", :from => 4},
%{:id => 5, :message => "There were white out conditions in the town; subsequently, the roads were impassable.", :from => 5},
%{:id => 6, :message => "I'd rather be a bird than a fish.", :from => 2},
%{:id => 7, :message => "He told us a very exciting adventure story.", :from => 3},
%{:id => 8, :message => "The lake is a long way from here.", :from => 5},
%{:id => 9, :message => "How was the math test?", :from => 5},
%{:id => 10, :message => "Christmas is coming.", :from => 1},
]
end
end
| 49.035714 | 131 | 0.538966 |
1cf9c6c4d3804ae3a09093f9f12554388e810769 | 1,613 | exs | Elixir | mix.exs | huiqing/libcluster | 78d366c03d8b180aa4db8bb25bf85a1ee70deb59 | [
"MIT"
] | null | null | null | mix.exs | huiqing/libcluster | 78d366c03d8b180aa4db8bb25bf85a1ee70deb59 | [
"MIT"
] | null | null | null | mix.exs | huiqing/libcluster | 78d366c03d8b180aa4db8bb25bf85a1ee70deb59 | [
"MIT"
] | null | null | null | defmodule Cluster.Mixfile do
use Mix.Project
@version "3.2.2"
@source_url "https://github.com/bitwalker/libcluster"
def project do
[app: :libcluster,
version: @version,
elixir: "~> 1.5",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
description: "Automatic Erlang cluster formation and management for Elixir/Erlang applications",
package: package(),
docs: docs(),
deps: deps(),
elixirc_paths: elixirc_paths(Mix.env),
dialyzer: [
flags: ~w(-Wunmatched_returns -Werror_handling -Wrace_conditions -Wno_opaque -Wunderspecs)
],
preferred_cli_env: [
vcr: :test,
"vcr.delete": :test,
"vcr.check": :test,
"vcr.show": :test,
]
]
end
def application do
[applications: [:logger, :inets, :crypto, :ssl]]
# [applications: [:logger, :inets, jason, :crypto, :ssl]]
end
defp deps do
[
#{:ex_doc, "~> 0.20", only: :dev},
# {:dialyxir, "~> 1.0", only: :dev},
# {:exvcr, "~> 0.11", only: :test},
# {:jason, git: "https://github.com/huiqing/jason.git", branch: "master"}
]
end
defp package do
[files: ["lib", "mix.exs", "README.md", "LICENSE.md"],
maintainers: ["Paul Schoenfelder"],
licenses: ["MIT"],
links: %{"GitHub": @source_url}]
end
defp docs do
[main: "readme",
source_url: @source_url,
source_ref: "v#{@version}",
formatter_opts: [gfm: true],
extras: [
"README.md"
]]
end
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
end
| 25.203125 | 101 | 0.584005 |
1cf9fddce9246aa27aa6b2180a7b2f3dd532b16f | 407 | ex | Elixir | lib/stixex/marking_definition/tlp.ex | FloatingGhost/stixex | c3b012d0e8596fde6bd512f856f05b0187bb5273 | [
"MIT"
] | 1 | 2019-05-07T22:44:45.000Z | 2019-05-07T22:44:45.000Z | lib/stixex/marking_definition/tlp.ex | FloatingGhost/stixex | c3b012d0e8596fde6bd512f856f05b0187bb5273 | [
"MIT"
] | null | null | null | lib/stixex/marking_definition/tlp.ex | FloatingGhost/stixex | c3b012d0e8596fde6bd512f856f05b0187bb5273 | [
"MIT"
] | null | null | null | defmodule StixEx.MarkingDefinition.TLP do
use Ecto.Schema
import Ecto.Changeset
@derive Jason.Encoder
@valid_colours ~w[white green amber red]
@required_fields [:tlp]
embedded_schema do
field(:tlp, :string)
end
def changeset(struct, params) do
struct
|> cast(params, [:tlp])
|> validate_required(@required_fields)
|> validate_inclusion(:tlp, @valid_colours)
end
end
| 20.35 | 47 | 0.707617 |
1cfa0b6cce4991eb0b130d1efa1e441699614dc4 | 403 | exs | Elixir | priv/repo/migrations/20161214024453_create_coto.exs | reallinfo/cotoami | faaee71710019fa55a8215ea60d1d3bafc30d506 | [
"Apache-2.0"
] | 337 | 2016-11-28T15:46:58.000Z | 2022-03-01T06:21:25.000Z | priv/repo/migrations/20161214024453_create_coto.exs | reallinfo/cotoami | faaee71710019fa55a8215ea60d1d3bafc30d506 | [
"Apache-2.0"
] | 79 | 2017-02-27T05:44:36.000Z | 2021-12-09T00:28:11.000Z | priv/repo/migrations/20161214024453_create_coto.exs | reallinfo/cotoami | faaee71710019fa55a8215ea60d1d3bafc30d506 | [
"Apache-2.0"
] | 47 | 2018-02-03T01:32:13.000Z | 2021-11-08T07:54:43.000Z | defmodule Cotoami.Repo.Migrations.CreateCoto do
use Ecto.Migration
def change do
create table(:cotos, primary_key: false) do
add :id, :uuid, primary_key: true
add :content, :text, null: false
add :amishi_id, references(:amishis, on_delete: :delete_all, type: :uuid), null: false
timestamps(type: :utc_datetime)
end
create index(:cotos, [:amishi_id])
end
end
| 25.1875 | 92 | 0.682382 |
1cfa7b73493b6af14f90e0cc630bb97c8c01c34a | 563 | ex | Elixir | lib/inmana/supplies/get_by_expiration.ex | andrermartins/inmana | 19f36e3b7ab509ea72af2eff22981adf2ffcacab | [
"MIT"
] | null | null | null | lib/inmana/supplies/get_by_expiration.ex | andrermartins/inmana | 19f36e3b7ab509ea72af2eff22981adf2ffcacab | [
"MIT"
] | null | null | null | lib/inmana/supplies/get_by_expiration.ex | andrermartins/inmana | 19f36e3b7ab509ea72af2eff22981adf2ffcacab | [
"MIT"
] | null | null | null | defmodule Inmana.Supplies.GetByExpiration do
import Ecto.Query
alias Inmana.{Repo, Restaurant, Supply}
def call do
today = Date.utc_today()
begin_of_week = Date.beginning_of_week(today)
end_of_week = Date.end_of_week(today)
query =
from supply in Supply,
where:
supply.expiration_date >= ^begin_of_week and
supply.expiration_date <= ^end_of_week,
preload: [:restaurant]
query
|> Repo.all()
|> Enum.group_by(fn %Supply{restaurant: %Restaurant{email: email}} -> email end)
end
end
| 24.478261 | 84 | 0.662522 |
1cfa7ea6b786c65ef2a3b7376b210fd82f75c216 | 3,739 | ex | Elixir | lib/chat_api_web/controllers/lambda_controller.ex | hakerspeak/hakerspeak.com | efd9e75f4854fdd19fc1873300deae0b160fb629 | [
"MIT"
] | null | null | null | lib/chat_api_web/controllers/lambda_controller.ex | hakerspeak/hakerspeak.com | efd9e75f4854fdd19fc1873300deae0b160fb629 | [
"MIT"
] | null | null | null | lib/chat_api_web/controllers/lambda_controller.ex | hakerspeak/hakerspeak.com | efd9e75f4854fdd19fc1873300deae0b160fb629 | [
"MIT"
] | null | null | null | defmodule ChatApiWeb.LambdaController do
use ChatApiWeb, :controller
alias ChatApi.Lambdas
alias ChatApi.Lambdas.Lambda
action_fallback(ChatApiWeb.FallbackController)
plug(:authorize when action in [:show, :update, :delete, :deploy, :invoke])
@spec authorize(Plug.Conn.t(), any()) :: any()
defp authorize(conn, _) do
id = conn.path_params["id"]
with %{account_id: account_id} <- conn.assigns.current_user,
%{account_id: ^account_id} = lambda <- Lambdas.get_lambda!(id) do
assign(conn, :current_lambda, lambda)
else
_ -> ChatApiWeb.FallbackController.call(conn, {:error, :not_found}) |> halt()
end
end
@spec index(Plug.Conn.t(), map()) :: Plug.Conn.t()
def index(%{assigns: %{current_user: %{account_id: account_id}}} = conn, params) do
lambdas = Lambdas.list_lambdas(account_id, params)
render(conn, "index.json", lambdas: lambdas)
end
@spec create(Plug.Conn.t(), map()) :: Plug.Conn.t()
def create(%{assigns: %{current_user: %{account_id: account_id, id: creator_id}}} = conn, %{
"lambda" => lambda_params
}) do
with {:ok, %Lambda{} = lambda} <-
lambda_params
|> Map.merge(%{"creator_id" => creator_id, "account_id" => account_id})
|> Lambdas.create_lambda() do
conn
|> put_status(:created)
|> put_resp_header("location", Routes.lambda_path(conn, :show, lambda))
|> render("show.json", lambda: lambda)
end
end
@spec show(Plug.Conn.t(), map()) :: Plug.Conn.t()
def show(conn, _params) do
render(conn, "show.json", lambda: conn.assigns.current_lambda)
end
@spec update(Plug.Conn.t(), map()) :: Plug.Conn.t()
def update(conn, %{"lambda" => lambda_params}) do
with {:ok, %Lambda{} = lambda} <-
Lambdas.update_lambda(conn.assigns.current_lambda, lambda_params) do
render(conn, "show.json", lambda: lambda)
end
end
@spec delete(Plug.Conn.t(), map()) :: Plug.Conn.t()
def delete(conn, _params) do
with {:ok, %Lambda{}} <- Lambdas.delete_lambda(conn.assigns.current_lambda) do
send_resp(conn, :no_content, "")
end
end
@spec deploy(Plug.Conn.t(), map()) :: Plug.Conn.t()
def deploy(conn, %{"file" => file} = params) do
with %{current_lambda: lambda, current_user: %{id: user_id, account_id: account_id}} <-
conn.assigns,
# NB: for now we just get the most recent API key rather than passing it through as a param
%ChatApi.ApiKeys.PersonalApiKey{value: api_key} <-
ChatApi.ApiKeys.list_personal_api_keys(user_id, account_id) |> List.last(),
opts <-
params
|> Map.delete("file")
|> Map.merge(%{"env" => %{"Hakerspeak_API_KEY" => api_key}}),
{:ok, %Lambda{} = lambda} <- Lambdas.deploy_file(lambda, file, opts),
updates <- Map.take(params, ["name", "description", "code"]),
{:ok, %Lambda{} = lambda} <- Lambdas.update_lambda(lambda, updates) do
render(conn, "show.json", lambda: lambda)
end
end
def deploy(conn, params) do
with {:ok, %Lambda{} = lambda} <-
Lambdas.deploy(conn.assigns.current_lambda, params) do
render(conn, "show.json", lambda: lambda)
end
end
@spec invoke(Plug.Conn.t(), map()) :: Plug.Conn.t()
def invoke(conn, params) do
case conn.assigns.current_lambda do
%Lambda{lambda_function_name: lambda_function_name}
when is_binary(lambda_function_name) ->
json(conn, %{data: ChatApi.Aws.invoke_lambda_function(lambda_function_name, params)})
%Lambda{lambda_function_name: _} ->
json(conn, %{data: nil})
end
end
def deps(conn, _params) do
send_file(conn, 200, "./priv/static/deps.zip")
end
end
| 35.273585 | 100 | 0.632255 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.