hexsha stringlengths 40 40 | size int64 2 991k | ext stringclasses 2 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 4 208 | max_stars_repo_name stringlengths 6 106 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses list | max_stars_count int64 1 33.5k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 208 | max_issues_repo_name stringlengths 6 106 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses list | max_issues_count int64 1 16.3k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 208 | max_forks_repo_name stringlengths 6 106 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses list | max_forks_count int64 1 6.91k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 2 991k | avg_line_length float64 1 36k | max_line_length int64 1 977k | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
1c29a399036b4d344162b10386c6ece8e5321e7e | 2,419 | exs | Elixir | test/controllers/franchise_controller_test.exs | rustedgrail/where_its_due | 2140006ecfbe29e9d4ebc1895b147a76b5376fc3 | [
"MIT"
] | null | null | null | test/controllers/franchise_controller_test.exs | rustedgrail/where_its_due | 2140006ecfbe29e9d4ebc1895b147a76b5376fc3 | [
"MIT"
] | null | null | null | test/controllers/franchise_controller_test.exs | rustedgrail/where_its_due | 2140006ecfbe29e9d4ebc1895b147a76b5376fc3 | [
"MIT"
] | null | null | null | defmodule WhereItsDue.FranchiseControllerTest do
use WhereItsDue.ConnCase
alias WhereItsDue.Franchise
@valid_attrs %{deck: "some content", description: "some content", giant_bomb_id: 42, name: "some content"}
@invalid_attrs %{}
setup %{conn: conn} do
{:ok, conn: put_req_header(conn, "accept", "application/json")}
end
test "lists all entries on index", %{conn: conn} do
conn = get conn, franchise_path(conn, :index)
assert json_response(conn, 200)["data"] == []
end
test "shows chosen resource", %{conn: conn} do
franchise = Repo.insert! %Franchise{}
conn = get conn, franchise_path(conn, :show, franchise)
assert json_response(conn, 200)["data"] == %{"id" => franchise.id,
"name" => franchise.name,
"deck" => franchise.deck,
"description" => franchise.description,
"giant_bomb_id" => franchise.giant_bomb_id}
end
test "does not show resource and instead throw error when id is nonexistent", %{conn: conn} do
assert_error_sent 404, fn ->
get conn, franchise_path(conn, :show, -1)
end
end
test "creates and renders resource when data is valid", %{conn: conn} do
conn = post conn, franchise_path(conn, :create), franchise: @valid_attrs
assert json_response(conn, 201)["data"]["id"]
assert Repo.get_by(Franchise, @valid_attrs)
end
test "does not create resource and renders errors when data is invalid", %{conn: conn} do
conn = post conn, franchise_path(conn, :create), franchise: @invalid_attrs
assert json_response(conn, 422)["errors"] != %{}
end
test "updates and renders chosen resource when data is valid", %{conn: conn} do
franchise = Repo.insert! %Franchise{}
conn = put conn, franchise_path(conn, :update, franchise), franchise: @valid_attrs
assert json_response(conn, 200)["data"]["id"]
assert Repo.get_by(Franchise, @valid_attrs)
end
test "does not update chosen resource and renders errors when data is invalid", %{conn: conn} do
franchise = Repo.insert! %Franchise{}
conn = put conn, franchise_path(conn, :update, franchise), franchise: @invalid_attrs
assert json_response(conn, 422)["errors"] != %{}
end
test "deletes chosen resource", %{conn: conn} do
franchise = Repo.insert! %Franchise{}
conn = delete conn, franchise_path(conn, :delete, franchise)
assert response(conn, 204)
refute Repo.get(Franchise, franchise.id)
end
end
| 37.796875 | 108 | 0.688714 |
1c29a900c771c40b5f4f70e560142b4604b7703d | 1,197 | ex | Elixir | lib/sensor/web.ex | trihome/nervesjp_basis | 1e8f6567c2f3a5669d761226bf665a0ae4c94744 | [
"MIT"
] | 4 | 2020-11-28T18:12:40.000Z | 2020-12-26T15:25:28.000Z | lib/sensor/web.ex | trihome/nervesjp_basis | 1e8f6567c2f3a5669d761226bf665a0ae4c94744 | [
"MIT"
] | 7 | 2020-11-30T03:49:56.000Z | 2021-02-19T09:12:02.000Z | lib/sensor/web.ex | trihome/nervesjp_basis | 1e8f6567c2f3a5669d761226bf665a0ae4c94744 | [
"MIT"
] | 5 | 2020-11-29T03:59:19.000Z | 2020-12-26T15:24:18.000Z | defmodule NervesjpBasis.Sensor.Web do
@moduledoc """
Documentation for `Web`.
WebAPIとのやりとりモジュール
"""
alias NervesjpBasis.Sensor.Aht20
# 定数
## ここの部分を、あなたのハンドル名に書き換えてください
@my_name "nervesjp_algyan"
# 定数
## Web APi
@url_temp "https://phx.japaneast.cloudapp.azure.com/temperatures"
@url_humi "https://phx.japaneast.cloudapp.azure.com/humidities"
@doc """
測定データを打ち上げ
## Examples
iex> Web.senddata
> send: 22.7 degree Celsius, 39.9 %
:ok
"""
def senddata() do
# センサから温度を読み出し
{:ok, {temp, _}} = Aht20.read_from_aht20()
# 現在値の表示
IO.puts(" > send: [name: #{@my_name}] / temp: #{inspect(temp)} (degree Celsius)")
# WebAPIにPOSTする
post(temp, @url_temp)
end
# 指定のURLにPOSTする
## Parameters
## - val: POSTする内容
## - url: POSTするAPIのURL
defp post(val, url) do
HTTPoison.post!(url, body(val), header())
end
# JSONデータの生成
## Parameters
## - val: POSTする内容
defp body(val) do
# 現在時刻を取得
time =
Timex.now()
|> Timex.to_unix()
# JSONに変換
Jason.encode!(%{value: %{name: @my_name, value: val, time: time}})
end
# ヘッダの生成
defp header() do
[{"Content-type", "application/json"}]
end
end
| 19.622951 | 85 | 0.620718 |
1c29c3bf2c2def7d6cada0cf9ef8ca5deeea10aa | 613 | exs | Elixir | nerves.exs | radu/nerves_rpi0_plus | 79ffd6f570fe0a94b2623aed24f1151b5090f33b | [
"Apache-2.0"
] | null | null | null | nerves.exs | radu/nerves_rpi0_plus | 79ffd6f570fe0a94b2623aed24f1151b5090f33b | [
"Apache-2.0"
] | null | null | null | nerves.exs | radu/nerves_rpi0_plus | 79ffd6f570fe0a94b2623aed24f1151b5090f33b | [
"Apache-2.0"
] | null | null | null | use Mix.Config
version =
Path.join(__DIR__, "VERSION")
|> File.read!
|> String.strip
pkg = :nerves_system_rpi0
config pkg, :nerves_env,
type: :system,
version: version,
compiler: :nerves_package,
artifact_url: [
"https://github.com/nerves-project/#{pkg}/releases/download/v#{version}/#{pkg}-v#{version}.tar.gz",
],
platform: Nerves.System.BR,
platform_config: [
defconfig: "nerves_defconfig"
],
checksum: [
"nerves_defconfig",
"rootfs-additions",
"linux-4.4.defconfig",
"fwup.conf",
"cmdline.txt",
"config.txt",
"post-createfs.sh",
"VERSION"
]
| 19.774194 | 103 | 0.641109 |
1c29ee0a0021c68be0d0968e6bd09af1952e418f | 2,290 | exs | Elixir | mix.exs | oestrich/grapevine-legacy | 9d84f8e2d65dda5982686381ffa94a940142e1da | [
"MIT"
] | null | null | null | mix.exs | oestrich/grapevine-legacy | 9d84f8e2d65dda5982686381ffa94a940142e1da | [
"MIT"
] | null | null | null | mix.exs | oestrich/grapevine-legacy | 9d84f8e2d65dda5982686381ffa94a940142e1da | [
"MIT"
] | null | null | null | defmodule Grapevine.Mixfile do
use Mix.Project
def project do
[
app: :grapevine,
version: "0.1.0",
elixir: "~> 1.4",
elixirc_paths: elixirc_paths(Mix.env),
compilers: [:phoenix, :gettext] ++ Mix.compilers,
start_permanent: Mix.env == :prod,
aliases: aliases(),
deps: deps()
]
end
# Configuration for the OTP application.
#
# Type `mix help compile.app` for more information.
def application do
[
mod: {Grapevine.Application, []},
extra_applications: [:logger, :runtime_tools]
]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
# Specifies your project dependencies.
#
# Type `mix help deps` for examples and options.
defp deps do
[
{:backbone, git: "https://github.com/oestrich/gossip-backbone.git"},
{:bamboo, "~> 1.0"},
{:bamboo_smtp, "~> 1.5"},
{:bcrypt_elixir, "~> 1.0"},
{:comeonin, "~> 4.0"},
{:cowboy, "~> 2.0"},
{:credo, "~> 0.10", only: [:dev, :test], runtime: false},
{:distillery, "~> 2.0", runtime: false},
{:ecto, "~> 3.0"},
{:ecto_sql, "~> 3.0"},
{:elixir_uuid, "~> 1.1"},
{:gettext, "~> 0.11"},
{:gossip, "~> 1.0"},
{:phoenix, "~> 1.4.0-rc.3", override: true},
{:phoenix_ecto, "~> 4.0"},
{:phoenix_html, "~> 2.10"},
{:phoenix_live_reload, "~> 1.2-rc.0", only: :dev},
{:phoenix_pubsub, "~> 1.0"},
{:pid_file, "~> 0.1.0"},
{:plug_cowboy, "~> 2.0", override: true},
{:postgrex, ">= 0.0.0"},
{:prometheus_ex, "~> 3.0"},
{:prometheus_plugs, "~> 1.1.1"},
{:sentry, "~> 7.0"}
]
end
# Aliases are shortcuts or tasks specific to the current project.
# For example, to create, migrate and run the seeds file at once:
#
# $ mix ecto.setup
#
# See the documentation for `Mix` for more info on aliases.
defp aliases do
[
"ecto.setup": ["ecto.create", "ecto.migrate", "run priv/repo/seeds.exs"],
"ecto.reset": ["ecto.drop", "ecto.setup"],
"ecto.migrate.reset": ["ecto.drop", "ecto.create", "ecto.migrate"],
test: ["ecto.create --quiet", "ecto.migrate", "test"]
]
end
end
| 29.358974 | 79 | 0.550655 |
1c29ef6f1e6eebdc1f132d5e67a8d18dd26c3f51 | 1,637 | exs | Elixir | mix.exs | doron2402/phoenix | 444a7a40c6595eb9afc976b5dad512664805fcb9 | [
"MIT"
] | null | null | null | mix.exs | doron2402/phoenix | 444a7a40c6595eb9afc976b5dad512664805fcb9 | [
"MIT"
] | null | null | null | mix.exs | doron2402/phoenix | 444a7a40c6595eb9afc976b5dad512664805fcb9 | [
"MIT"
] | null | null | null | defmodule Phoenix.Mixfile do
use Mix.Project
@version "0.16.1"
def project do
[app: :phoenix,
version: @version,
elixir: "~> 1.0.2 or ~> 1.1-dev",
deps: deps,
package: package,
docs: [source_ref: "v#{@version}", main: "Phoenix"],
name: "Phoenix",
source_url: "https://github.com/phoenixframework/phoenix",
homepage_url: "http://www.phoenixframework.org",
description: """
Productive. Reliable. Fast. A productive web framework that
does not compromise speed and maintainability.
"""]
end
def application do
[mod: {Phoenix, []},
applications: [:plug, :poison, :logger, :eex],
env: [template_engines: [],
format_encoders: [],
filter_parameters: ["password"],
serve_endpoints: false,
gzippable_exts: ~w(.js .css .txt .text .html .json)]]
end
defp deps do
[{:cowboy, "~> 1.0", optional: true},
{:plug, "~> 0.14 or ~> 1.0"},
{:poison, "~> 1.3"},
# Docs dependencies
{:earmark, "~> 0.1", only: :docs},
{:ex_doc, "~> 0.7.1", only: :docs},
{:inch_ex, "~> 0.2", only: :docs},
# Test dependencies
{:phoenix_html, "~> 1.2", only: :test},
{:websocket_client, git: "https://github.com/jeremyong/websocket_client.git", only: :test}]
end
defp package do
[contributors: ["Chris McCord", "Darko Fabijan", "José Valim"],
licenses: ["MIT"],
links: %{github: "https://github.com/phoenixframework/phoenix"},
files: ~w(lib priv test/shared web) ++
~w(brunch-config.js CHANGELOG.md LICENSE mix.exs package.json README.md)]
end
end
| 29.763636 | 96 | 0.587049 |
1c2a4c4425f633d2a0fc0c66a0c1409c656ec445 | 2,175 | ex | Elixir | lib/bandwidth/resources/messages.ex | dtolb/elixir-bandwidth | 58dbc1ab8b04be737c08e894d36755d49eb95e87 | [
"MIT"
] | 4 | 2018-11-19T16:22:15.000Z | 2021-01-23T08:44:25.000Z | lib/bandwidth/resources/messages.ex | dtolb/elixir-bandwidth | 58dbc1ab8b04be737c08e894d36755d49eb95e87 | [
"MIT"
] | 3 | 2017-11-07T14:39:10.000Z | 2019-02-18T14:17:52.000Z | lib/bandwidth/resources/messages.ex | wtcross/elixir-bandwidth | 58dbc1ab8b04be737c08e894d36755d49eb95e87 | [
"MIT"
] | 5 | 2017-11-06T22:41:45.000Z | 2019-08-09T03:17:37.000Z | defmodule Bandwidth.Resources.Messages do
@moduledoc """
The Messages resource lets you send SMS/MMS messages and view messages that were
previously sent or received.
[Bandwidth Docs](http://ap.bandwidth.com/docs/rest-api/messages/)
"""
import Bandwidth.Client
@doc ~S"""
Get a list of previous messages.
## Example:
# List all previously sent messages
case Bandwidth.Resources.Messages.list(client) do
{:ok, {200, messages, _}} -> IO.inspect messages
{:error, reason} -> IO.puts "Error: #{reason}"
end
# List messages sent by a specific number.
case Bandwidth.Resources.Messages.list(client, from: "+12223334444") do
{:ok, {200, messages, _}} -> IO.inspect messages
{:error, reason} -> IO.puts "Error: #{reason}"
end
[Bandwidth Docs](http://ap.bandwidth.com/docs/rest-api/messages/#resourceGETv1usersuserIdmessages)
"""
@spec list(Client.t, Client.params) :: Client.response
def list(client, params \\ []) do
get_user_resource(client, [ "messages" ], params)
end
@doc ~S"""
Send a message.
## Example:
message = %{ from: "+12223334444", to: "+3335557777", text: "HEY!" }
case Bandwidth.Resources.Messages.create(client, message) do
{:ok, {201, _, _}} -> IO.puts "Message sent"
{:error, reason} -> IO.puts "Error: #{reason}"
end
[Bandwidth Docs](http://ap.bandwidth.com/docs/rest-api/messages/#resourcePOSTv1usersuserIdmessages)
"""
@spec create(Client.t, Map.t) :: Client.response
def create(client, message) do
post_user_resource(client, [ "messages" ], message)
end
@doc ~S"""
Get information about a message.
## Example:
case Bandwidth.Resources.Messages.find(client, "some-message-id") do
{:ok, {200, message, _}} -> IO.inspect message
{:error, reason} -> IO.puts "Error: #{reason}"
end
[Bandwidth Docs](http://ap.bandwidth.com/docs/rest-api/messages/#resourceGETv1usersuserIdmessagesmessageId)
"""
@spec find(Client.t, binary) :: Client.response
def find(client, id) do
get_user_resource(client, [ "messages", id ])
end
end
| 31.071429 | 109 | 0.645057 |
1c2a627a63919568e2c033ab82df8fb8f68854ec | 683 | ex | Elixir | lib/plate_slate_web/schema/ordering_types.ex | conradwt/crafting-graphql-apis-in-elixir-with-absinthe | b26d050a350425ac189ea6d205834da1e47d4f90 | [
"MIT"
] | 1 | 2019-05-11T21:50:09.000Z | 2019-05-11T21:50:09.000Z | lib/plate_slate_web/schema/ordering_types.ex | conradwt/crafting-graphql-apis-in-elixir-with-absinthe | b26d050a350425ac189ea6d205834da1e47d4f90 | [
"MIT"
] | null | null | null | lib/plate_slate_web/schema/ordering_types.ex | conradwt/crafting-graphql-apis-in-elixir-with-absinthe | b26d050a350425ac189ea6d205834da1e47d4f90 | [
"MIT"
] | null | null | null | defmodule PlateSlateWeb.Schema.OrderingTypes do
use Absinthe.Schema.Notation
input_object :order_item_input do
field :menu_item_id, non_null(:id)
field :quantity, non_null(:integer)
end
input_object :place_order_input do
field :customer_number, :integer
field :items, non_null(list_of(non_null(:order_item_input)))
end
object :order_result do
field :order, :order
field :errors, list_of(:input_error)
end
object :order do
field :id, :id
field :customer_number, :integer
field :items, list_of(:order_item)
field :state, :string
end
object :order_item do
field :name, :string
field :quantity, :integer
end
end
| 22.032258 | 64 | 0.713031 |
1c2ab57315b023474545fb59d9f5c3f995558a34 | 7,576 | exs | Elixir | test/elixir/test/view_include_docs_test.exs | RGS-IT-Development/couchdb | e25ae03a75eafe5ecb286399da5186f2fac25835 | [
"Apache-2.0"
] | null | null | null | test/elixir/test/view_include_docs_test.exs | RGS-IT-Development/couchdb | e25ae03a75eafe5ecb286399da5186f2fac25835 | [
"Apache-2.0"
] | 1 | 2018-02-08T23:08:43.000Z | 2018-02-08T23:08:43.000Z | test/elixir/test/view_include_docs_test.exs | RGS-IT-Development/couchdb | e25ae03a75eafe5ecb286399da5186f2fac25835 | [
"Apache-2.0"
] | null | null | null | defmodule ViewIncludeDocsTest do
use CouchTestCase
@moduletag :view_include_docs
@moduletag kind: :single_node
@ddoc %{
_id: "_design/test",
language: "javascript",
views: %{
all_docs: %{
map: "function(doc) { emit(doc.integer, doc.string) }"
},
with_prev: %{
map:
"function(doc){if(doc.prev) emit(doc._id,{'_rev':doc.prev}); else emit(doc._id,{'_rev':doc._rev});}"
},
with_id: %{
map:
"function(doc) {if(doc.link_id) { var value = {'_id':doc.link_id}; if (doc.link_rev) {value._rev = doc.link_rev}; emit(doc._id, value);}};"
},
summate: %{
map:
"function (doc) { if (typeof doc.integer === 'number') {emit(doc.integer, doc.integer)};}",
reduce: "function (keys, values) { return sum(values); };"
}
}
}
setup_all do
db_name = random_db_name()
{:ok, _} = create_db(db_name)
on_exit(fn -> delete_db(db_name) end)
bulk_save(db_name, make_docs(0..99))
create_doc(db_name, @ddoc)
{:ok, [db_name: db_name]}
end
test "include docs in view", context do
db_name = context[:db_name]
resp = view(db_name, "test/all_docs", %{include_docs: true, limit: 2})
assert length(resp.body["rows"]) == 2
row0 = Enum.at(resp.body["rows"], 0)
assert row0["id"] == "0"
assert row0["doc"]["_id"] == "0"
row1 = Enum.at(resp.body["rows"], 1)
assert row1["id"] == "1"
assert row1["doc"]["_id"] == "1"
resp = view(db_name, "test/all_docs", %{include_docs: true}, [29, 74])
assert length(resp.body["rows"]) == 2
row0 = Enum.at(resp.body["rows"], 0)
assert row0["doc"]["_id"] == "29"
row1 = Enum.at(resp.body["rows"], 1)
assert row1["doc"]["integer"] == 74
end
test "include docs in all_docs", context do
db_name = context[:db_name]
resp =
Couch.get("/#{db_name}/_all_docs",
query: [limit: 2, skip: 1, include_docs: true]
)
assert length(resp.body["rows"]) == 2
row0 = Enum.at(resp.body["rows"], 0)
row1 = Enum.at(resp.body["rows"], 1)
assert row0["doc"]["integer"] == 1
assert row1["doc"]["integer"] == 10
resp =
Couch.post("/#{db_name}/_all_docs",
query: [include_docs: true],
headers: ["Content-Type": "application/json"],
body: %{"keys" => ["not_a_doc"]}
)
assert length(resp.body["rows"]) == 1
row0 = Enum.at(resp.body["rows"], 0)
assert not Map.has_key?(row0, "doc")
resp =
Couch.post("/#{db_name}/_all_docs",
query: [include_docs: true],
headers: ["Content-Type": "application/json"],
body: %{"keys" => ["1", "foo"]}
)
assert length(resp.body["rows"]) == 2
row0 = Enum.at(resp.body["rows"], 0)
row1 = Enum.at(resp.body["rows"], 1)
assert row0["doc"]["integer"] == 1
assert not Map.has_key?(row1, "doc")
resp =
Couch.get("/#{db_name}/_all_docs",
query: [limit: 0, include_docs: true]
)
assert Enum.empty?(resp.body["rows"])
end
test "no reduce support", context do
db_name = context[:db_name]
resp =
Couch.get("/#{db_name}/_design/test/_view/summate", query: [include_docs: true])
assert resp.status_code == 400
assert resp.body["error"] == "query_parse_error"
end
test "Reduce support when reduce=false", context do
db_name = context[:db_name]
resp =
Couch.get("/#{db_name}/_design/test/_view/summate",
query: [reduce: false, include_docs: true]
)
assert length(resp.body["rows"]) == 100
end
test "Not an error with include_docs=false&reduce=true", context do
db_name = context[:db_name]
resp =
Couch.get("/#{db_name}/_design/test/_view/summate",
query: [reduce: true, include_docs: false]
)
assert length(resp.body["rows"]) == 1
row0 = Enum.at(resp.body["rows"], 0)
assert row0["value"] == 4950
end
@tag :with_db
test "link to another doc from a value", context do
db_name = context[:db_name]
bulk_save(db_name, make_docs(0..99))
create_doc(db_name, @ddoc)
doc_link = %{
_id: "link-to-10",
link_id: "10"
}
{:ok, _} = create_doc(db_name, doc_link)
resp = view(db_name, "test/with_id", %{key: ~s("link-to-10")})
assert length(resp.body["rows"]) == 1
row0 = Enum.at(resp.body["rows"], 0)
assert row0["key"] == "link-to-10"
assert row0["value"]["_id"] == "10"
resp = view(db_name, "test/with_id", %{key: ~s("link-to-10"), include_docs: true})
assert length(resp.body["rows"]) == 1
row0 = Enum.at(resp.body["rows"], 0)
assert row0["value"]["_id"] == "10"
assert row0["doc"]["_id"] == "10"
end
@tag :with_db
test "emitted _rev controls things", context do
db_name = context[:db_name]
bulk_save(db_name, make_docs(0..99))
create_doc(db_name, @ddoc)
resp =
Couch.post("/#{db_name}/_all_docs",
query: [include_docs: true],
headers: ["Content-Type": "application/json"],
body: %{"keys" => ["0"]}
)
doc_before = Enum.at(resp.body["rows"], 0)["doc"]
resp = Couch.get("/#{db_name}/0")
assert resp.status_code == 200
prev = resp.body["_rev"]
doc_after =
resp.body
|> Map.put("integer", 100)
|> Map.put("prev", prev)
saved_doc = save(db_name, doc_after)
resp = Couch.get("/#{db_name}/0")
assert resp.status_code == 200
doc_after = resp.body
assert doc_after["_rev"] == saved_doc["_rev"]
assert doc_after["_rev"] != doc_after["prev"]
assert doc_after["integer"] == 100
resp = view(db_name, "test/with_prev", %{include_docs: true}, ["0"])
row0 = Enum.at(resp.body["rows"], 0)["doc"]
assert row0["_id"] == "0"
assert row0["_rev"] == doc_before["_rev"]
assert not Map.has_key?(row0, "prev")
assert assert row0["integer"] == 0
end
test "COUCHDB-549 - include_docs=true with conflicts=true" do
db_name_a = random_db_name()
db_name_b = random_db_name()
create_db(db_name_a)
create_db(db_name_b)
on_exit(fn -> delete_db(db_name_a) end)
on_exit(fn -> delete_db(db_name_b) end)
ddoc = %{
_id: "_design/mydesign",
language: "javascript",
views: %{
myview: %{
map: """
function(doc) {
emit(doc.value, 1);
}
"""
}
}
}
{:ok, _} = create_doc(db_name_a, ddoc)
doc1a = %{_id: "foo", value: 1, str: "1"}
{:ok, _} = create_doc(db_name_a, doc1a)
doc1b = %{_id: "foo", value: 1, str: "666"}
{:ok, _} = create_doc(db_name_b, doc1b)
doc2 = %{_id: "bar", value: 2, str: "2"}
{:ok, _} = create_doc(db_name_a, doc2)
replicate(db_name_a, db_name_b)
resp = Couch.get("/#{db_name_b}/foo", query: [conflicts: true])
assert resp.status_code == 200
doc1b = resp.body
assert Map.has_key?(doc1b, "_conflicts")
assert length(doc1b["_conflicts"]) == 1
conflict_rev = Enum.at(doc1b["_conflicts"], 0)
resp = Couch.get("/#{db_name_b}/bar", query: [conflicts: true])
assert resp.status_code == 200
doc2 = resp.body
assert not Map.has_key?(doc2, "_conflicts")
resp = view(db_name_b, "mydesign/myview", %{include_docs: true, conflicts: true})
assert length(resp.body["rows"]) == 2
row0 = Enum.at(resp.body["rows"], 0)["doc"]
assert length(row0["_conflicts"]) == 1
assert Enum.at(row0["_conflicts"], 0) == conflict_rev
row1 = Enum.at(resp.body["rows"], 1)["doc"]
assert not Map.has_key?(row1, "_conflicts")
end
end
| 28.588679 | 149 | 0.580913 |
1c2abd04f437580796b5a5e036c80d300872e220 | 821 | exs | Elixir | src/proto_app/apps/marketplace/mix.exs | cjimison/ethermint_prototype | 5d8924e2ddc167c59cf712c2496f43976bcbad00 | [
"MIT"
] | null | null | null | src/proto_app/apps/marketplace/mix.exs | cjimison/ethermint_prototype | 5d8924e2ddc167c59cf712c2496f43976bcbad00 | [
"MIT"
] | null | null | null | src/proto_app/apps/marketplace/mix.exs | cjimison/ethermint_prototype | 5d8924e2ddc167c59cf712c2496f43976bcbad00 | [
"MIT"
] | 1 | 2020-01-22T19:18:34.000Z | 2020-01-22T19:18:34.000Z | defmodule Marketplace.MixProject do
use Mix.Project
def project do
[
app: :marketplace,
version: "0.1.0",
build_path: "../../_build",
config_path: "../../config/config.exs",
deps_path: "../../deps",
lockfile: "../../mix.lock",
elixir: "~> 1.9",
start_permanent: Mix.env() == :prod,
deps: deps()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger],
mod: {Marketplace.Application, []}
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
# {:dep_from_hexpm, "~> 0.3.0"},
# {:dep_from_git, git: "https://github.com/elixir-lang/my_dep.git", tag: "0.1.0"},
# {:sibling_app_in_umbrella, in_umbrella: true}
]
end
end
| 23.457143 | 88 | 0.572473 |
1c2ac8fe9602308b7182825b289515469f9cc45b | 2,581 | ex | Elixir | lib/teiserver_web/controllers/admin/tool_controller.ex | badosu/teiserver | 19b623aeb7c2ab28756405f7486e92b714777c54 | [
"MIT"
] | 4 | 2021-07-29T16:23:20.000Z | 2022-02-23T05:34:36.000Z | lib/teiserver_web/controllers/admin/tool_controller.ex | badosu/teiserver | 19b623aeb7c2ab28756405f7486e92b714777c54 | [
"MIT"
] | 14 | 2021-08-01T02:36:14.000Z | 2022-01-30T21:15:03.000Z | lib/teiserver_web/controllers/admin/tool_controller.ex | badosu/teiserver | 19b623aeb7c2ab28756405f7486e92b714777c54 | [
"MIT"
] | 7 | 2021-05-13T12:55:28.000Z | 2022-01-14T06:39:06.000Z | defmodule TeiserverWeb.Admin.ToolController do
use CentralWeb, :controller
alias Teiserver.Telemetry
alias Central.Helpers.TimexHelper
plug(AssignPlug,
sidemenu_active: ["teiserver", "teiserver_admin"]
)
plug Bodyguard.Plug.Authorize,
policy: Teiserver.Account.Admin,
action: {Phoenix.Controller, :action_name},
user: {Central.Account.AuthLib, :current_user}
plug(:add_breadcrumb, name: 'Teiserver', url: '/teiserver')
plug(:add_breadcrumb, name: 'Admin', url: '/teiserver/admin')
@spec index(Plug.Conn.t(), map) :: Plug.Conn.t()
def index(conn, _params) do
render(conn, "index.html")
end
@spec convert_form(Plug.Conn.t(), map) :: Plug.Conn.t()
def convert_form(conn, _params) do
render(conn, "convert_form.html")
end
@spec convert_post(Plug.Conn.t(), map) :: Plug.Conn.t()
def convert_post(conn, %{"file_upload" => file_upload}) do
{:ok, _job} =
case File.read(file_upload.path) do
{:ok, body} ->
%{body: body}
|> Teiserver.UberserverConvert.new()
|> Oban.insert()
error ->
throw(error)
end
render(conn, "convert_post.html")
end
@spec day_metrics_list(Plug.Conn.t(), map) :: Plug.Conn.t()
def day_metrics_list(conn, _params) do
logs =
Telemetry.list_server_day_logs(
# search: [user_id: params["user_id"]],
# joins: [:user],
order: "Newest first",
limit: 31
)
conn
|> assign(:logs, logs)
|> render("day_metrics_list.html")
end
@spec day_metrics_show(Plug.Conn.t(), map) :: Plug.Conn.t()
def day_metrics_show(conn, %{"date" => date_str}) do
date = TimexHelper.parse_ymd(date_str)
log = Telemetry.get_server_day_log(date)
users =
[log]
|> Telemetry.user_lookup()
conn
|> assign(:date, date)
|> assign(:data, log.data)
|> assign(:users, users)
|> render("day_metrics_show.html")
end
@spec day_metrics_today(Plug.Conn.t(), map) :: Plug.Conn.t()
def day_metrics_today(conn, _params) do
data = Telemetry.get_todays_log()
users =
[%{data: data}]
|> Telemetry.user_lookup()
conn
|> assign(:date, Timex.today())
|> assign(:data, data)
|> assign(:users, users)
|> render("day_metrics_show.html")
end
@spec day_metrics_export(Plug.Conn.t(), map) :: Plug.Conn.t()
def day_metrics_export(conn, _params = %{"date" => _date}) do
# anonymous = params["anonymous"]
# log = date
# |> TimexHelper.parse_ymd
# |> Telemetry.get_server_day_log
conn
end
end
| 25.554455 | 63 | 0.627276 |
1c2accdf8b51f9157f5bdf557676c5c6873f500f | 1,088 | ex | Elixir | apps/grapevine/lib/web/controllers/event_controller.ex | oestrich/grapevine | 7fb745a3a6e4eb68bd761baa190b2df32fa1f73d | [
"MIT"
] | 107 | 2018-10-05T18:20:32.000Z | 2022-02-28T04:02:50.000Z | apps/grapevine/lib/web/controllers/event_controller.ex | oestrich/grapevine | 7fb745a3a6e4eb68bd761baa190b2df32fa1f73d | [
"MIT"
] | 33 | 2018-10-05T14:11:18.000Z | 2022-02-10T22:19:18.000Z | apps/grapevine/lib/web/controllers/event_controller.ex | oestrich/grapevine | 7fb745a3a6e4eb68bd761baa190b2df32fa1f73d | [
"MIT"
] | 18 | 2019-02-03T03:08:20.000Z | 2021-12-28T04:29:36.000Z | defmodule Web.EventController do
use Web, :controller
alias GrapevineData.Events
action_fallback(Web.FallbackController)
def index(conn, _params) do
conn
|> assign(:events, Events.next_month())
|> assign(:title, "Upcoming Events - Grapevine")
|> assign(:open_graph_title, "Upcoming Events")
|> assign(:open_graph_description, "See upcoming events for games on Grapevine.")
|> assign(:open_graph_url, event_url(conn, :index))
|> render("index.html")
end
def show(conn, %{"id" => id}) do
with {:ok, event} <- Events.get_uid(id),
{:ok, _} <- Events.inc_view_count(event) do
conn
|> assign(:event, event)
|> assign(:game, event.game)
|> assign(:title, "#{event.title} - Grapevine")
|> assign(:open_graph_title, open_graph_title(event))
|> assign(:open_graph_url, Routes.event_url(conn, :show, event.uid))
|> render("show.html")
end
end
def open_graph_title(%{game: nil, title: title}), do: title
def open_graph_title(%{game: game, title: title}), do: "#{game.name} - #{title}"
end
| 31.085714 | 85 | 0.645221 |
1c2aebc9e5c2058d61395ce3c46963553e3238d2 | 18,286 | ex | Elixir | lib/bamboo/adapters/smtp_adapter.ex | steady-media/bamboo_smtp | 0b989da5be9b9c7c9d22dacf31ef58dbd06bb991 | [
"MIT"
] | null | null | null | lib/bamboo/adapters/smtp_adapter.ex | steady-media/bamboo_smtp | 0b989da5be9b9c7c9d22dacf31ef58dbd06bb991 | [
"MIT"
] | null | null | null | lib/bamboo/adapters/smtp_adapter.ex | steady-media/bamboo_smtp | 0b989da5be9b9c7c9d22dacf31ef58dbd06bb991 | [
"MIT"
] | null | null | null | defmodule Bamboo.SMTPAdapter do
@moduledoc """
Sends email using SMTP.
Use this adapter to send emails through SMTP. This adapter requires
that some settings are set in the config. See the example section below.
*Sensitive credentials should not be committed to source control and are best kept in environment variables.
Using `{:system, "ENV_NAME"}` configuration is read from the named environment variable at runtime.*
## Example config
# In config/config.exs, or config.prod.exs, etc.
config :my_app, MyApp.Mailer,
adapter: Bamboo.SMTPAdapter,
server: "smtp.domain",
hostname: "www.mydomain.com",
port: 1025,
username: "your.name@your.domain", # or {:system, "SMTP_USERNAME"}
password: "pa55word", # or {:system, "SMTP_PASSWORD"}
tls: :if_available, # can be `:always` or `:never`
allowed_tls_versions: [:"tlsv1", :"tlsv1.1", :"tlsv1.2"],
# or {":system", ALLOWED_TLS_VERSIONS"} w/ comma seprated values (e.g. "tlsv1.1,tlsv1.2")
tls_log_level: :error,
tls_verify: :verify_peer, # optional, can be `:verify_peer` or `:verify_none`
tls_cacertfile: "/somewhere/on/disk", # optional, path to the ca truststore
tls_cacerts: "…", # optional, DER-encoded trusted certificates
tls_depth: 3, # optional, tls certificate chain depth
tls_verify_fun: {&:ssl_verify_hostname.verify_fun/3, check_hostname: "example.com"}, # optional, tls verification function
ssl: false, # can be `true`,
retries: 1,
no_mx_lookups: false, # can be `true`
auth: :if_available # can be `:always`. If your smtp relay requires authentication set it to `:always`.
# Define a Mailer. Maybe in lib/my_app/mailer.ex
defmodule MyApp.Mailer do
use Bamboo.Mailer, otp_app: :my_app
end
"""
@behaviour Bamboo.Adapter
require Logger
@required_configuration [:server, :port]
@default_configuration %{
tls: :if_available,
ssl: false,
retries: 1,
transport: :gen_smtp_client,
auth: :if_available
}
@tls_versions ~w(tlsv1 tlsv1.1 tlsv1.2)
@log_levels [:critical, :error, :warning, :notice]
@tls_verify [:verify_peer, :verify_none]
defmodule SMTPError do
@moduledoc false
defexception [:message, :raw]
def exception(raw = {reason, detail}) do
message = """
There was a problem sending the email through SMTP.
The error is #{inspect(reason)}
More detail below:
#{inspect(detail)}
"""
%SMTPError{message: message, raw: raw}
end
end
def deliver(email, config) do
gen_smtp_config =
config
|> to_gen_smtp_server_config
response =
try do
email
|> Bamboo.Mailer.normalize_addresses()
|> to_gen_smtp_message
|> config[:transport].send_blocking(gen_smtp_config)
catch
e ->
raise SMTPError, {:not_specified, e}
end
handle_response(response)
end
@doc false
def handle_config(config) do
config
|> check_required_configuration
|> put_default_configuration
end
@doc false
def supports_attachments?, do: true
defp handle_response({:error, :no_credentials}) do
{:error, "Username and password were not provided for authentication."}
end
defp handle_response({:error, _reason, detail}) do
{:error, detail}
end
defp handle_response({:error, detail}) do
{:error, detail}
end
defp handle_response(response) do
{:ok, response}
end
defp add_bcc(body, %Bamboo.Email{bcc: []}) do
body
end
defp add_bcc(body, %Bamboo.Email{bcc: recipients}) do
add_smtp_header_line(body, :bcc, format_email_as_string(recipients, :bcc))
end
defp add_cc(body, %Bamboo.Email{cc: []}) do
body
end
defp add_cc(body, %Bamboo.Email{cc: recipients}) do
add_smtp_header_line(body, :cc, format_email_as_string(recipients, :cc))
end
defp add_custom_header(body, {key, value}) do
add_smtp_header_line(body, key, value)
end
defp add_custom_headers(body, %Bamboo.Email{headers: headers}) do
Enum.reduce(headers, body, &add_custom_header(&2, &1))
end
defp add_ending_header(body) do
add_smtp_line(body, "")
end
defp add_ending_multipart(body, delimiter) do
add_smtp_line(body, "--#{delimiter}--")
end
defp add_html_body(body, %Bamboo.Email{html_body: html_body}, _multi_part_delimiter)
when html_body == nil do
body
end
defp add_html_body(body, %Bamboo.Email{html_body: html_body}, multi_part_delimiter) do
base64_html_body = base64_and_split(html_body)
body
|> add_multipart_delimiter(multi_part_delimiter)
|> add_smtp_header_line("Content-Type", "text/html;charset=UTF-8")
|> add_smtp_line("Content-Transfer-Encoding: base64")
|> add_smtp_line("")
|> add_smtp_line(base64_html_body)
end
defp add_from(body, %Bamboo.Email{from: from}) do
add_smtp_header_line(body, :from, format_email_as_string(from, :from))
end
defp add_mime_header(body) do
add_smtp_header_line(body, "MIME-Version", "1.0")
end
defp add_multipart_delimiter(body, delimiter) do
add_smtp_line(body, "--#{delimiter}")
end
defp add_multipart_header(body, delimiter) do
add_smtp_header_line(body, "Content-Type", ~s(multipart/alternative; boundary="#{delimiter}"))
end
defp add_multipart_mixed_header(body, delimiter) do
add_smtp_header_line(body, "Content-Type", ~s(multipart/mixed; boundary="#{delimiter}"))
end
defp add_smtp_header_line(body, type, content) when is_list(content) do
Enum.reduce(content, body, &add_smtp_header_line(&2, type, &1))
end
defp add_smtp_header_line(body, type, content) when is_atom(type) do
add_smtp_header_line(body, String.capitalize(to_string(type)), content)
end
defp add_smtp_header_line(body, type, content) when is_binary(type) do
add_smtp_line(body, "#{type}: #{content}")
end
defp add_smtp_line(body, content), do: body <> content <> "\r\n"
defp add_subject(body, %Bamboo.Email{subject: subject}) when is_nil(subject) do
add_smtp_header_line(body, :subject, "")
end
defp add_subject(body, %Bamboo.Email{subject: subject}) do
add_smtp_header_line(body, :subject, rfc822_encode(subject))
end
defp rfc822_encode(content) do
"=?UTF-8?B?#{Base.encode64(content)}?="
end
defp rfc2231_encode(content) do
"UTF-8''#{URI.encode(content)}"
end
def base64_and_split(data) do
data
|> Base.encode64()
|> Stream.unfold(&String.split_at(&1, 76))
|> Enum.take_while(&(&1 != ""))
|> Enum.join("\r\n")
end
defp add_text_body(body, %Bamboo.Email{text_body: text_body}, _multi_part_delimiter)
when text_body == nil do
body
end
defp add_text_body(body, %Bamboo.Email{text_body: text_body}, multi_part_delimiter) do
body
|> add_multipart_delimiter(multi_part_delimiter)
|> add_smtp_header_line("Content-Type", "text/plain;charset=UTF-8")
|> add_smtp_line("")
|> add_smtp_line(text_body)
end
defp add_attachment_header(body, attachment) do
case attachment.content_id do
nil ->
add_common_attachment_header(body, attachment)
cid ->
body
|> add_common_attachment_header(attachment)
|> add_smtp_line("Content-ID: <#{cid}>")
end
end
defp add_common_attachment_header(body, %{content_type: content_type} = attachment)
when content_type == "message/rfc822" do
<<random::size(32)>> = :crypto.strong_rand_bytes(4)
rfc2231_encoded_filename = rfc2231_encode(attachment.filename)
body
|> add_smtp_line(
"Content-Type: #{attachment.content_type}; name*=#{rfc2231_encoded_filename}"
)
|> add_smtp_line("Content-Disposition: attachment; filename*=#{rfc2231_encoded_filename}")
|> add_smtp_line("X-Attachment-Id: #{random}")
end
defp add_common_attachment_header(body, attachment) do
<<random::size(32)>> = :crypto.strong_rand_bytes(4)
rfc2231_encoded_filename = rfc2231_encode(attachment.filename)
body
|> add_smtp_line(
"Content-Type: #{attachment.content_type}; name*=#{rfc2231_encoded_filename}"
)
|> add_smtp_line("Content-Disposition: attachment; filename*=#{rfc2231_encoded_filename}")
|> add_smtp_line("Content-Transfer-Encoding: base64")
|> add_smtp_line("X-Attachment-Id: #{random}")
end
defp add_attachment_body(body, data) do
data =
if String.contains?(body, "Content-Type: message/rfc822") do
data
else
base64_and_split(data)
end
add_smtp_line(body, data)
end
defp add_attachment(nil, _), do: ""
defp add_attachment(attachment, multi_part_mixed_delimiter) do
""
|> add_multipart_delimiter(multi_part_mixed_delimiter)
|> add_attachment_header(attachment)
|> add_smtp_line("")
|> add_attachment_body(attachment.data)
end
defp add_attachments(body, %Bamboo.Email{attachments: nil}, _), do: body
defp add_attachments(body, %Bamboo.Email{attachments: attachments}, multi_part_mixed_delimiter) do
attachment_part =
attachments
|> Enum.map(fn attachment -> add_attachment(attachment, multi_part_mixed_delimiter) end)
"#{body}#{attachment_part}"
end
defp add_to(body, %Bamboo.Email{to: recipients}) do
add_smtp_header_line(body, :to, format_email_as_string(recipients, :to))
end
defp aggregate_errors(config, key, errors) do
config
|> Map.fetch(key)
|> build_error(key, errors)
end
defp apply_default_configuration({:ok, value}, _default, config) when value != nil do
config
end
defp apply_default_configuration(_not_found_value, {key, default_value}, config) do
Map.put_new(config, key, default_value)
end
defp generate_multi_part_delimiter do
<<random1::size(32), random2::size(32), random3::size(32)>> = :crypto.strong_rand_bytes(12)
"----=_Part_#{random1}_#{random2}.#{random3}"
end
defp body(email = %Bamboo.Email{}) do
multi_part_delimiter = generate_multi_part_delimiter()
multi_part_mixed_delimiter = generate_multi_part_delimiter()
""
|> add_subject(email)
|> add_from(email)
|> add_bcc(email)
|> add_cc(email)
|> add_to(email)
|> add_custom_headers(email)
|> add_mime_header
|> add_multipart_mixed_header(multi_part_mixed_delimiter)
|> add_ending_header
|> add_multipart_delimiter(multi_part_mixed_delimiter)
|> add_multipart_header(multi_part_delimiter)
|> add_ending_header
|> add_text_body(email, multi_part_delimiter)
|> add_html_body(email, multi_part_delimiter)
|> add_ending_multipart(multi_part_delimiter)
|> add_attachments(email, multi_part_mixed_delimiter)
|> add_ending_multipart(multi_part_mixed_delimiter)
end
defp build_error({:ok, value}, _key, errors) when value != nil, do: errors
defp build_error(_not_found_value, key, errors) do
["Key #{key} is required for SMTP Adapter" | errors]
end
defp check_required_configuration(config) do
@required_configuration
|> Enum.reduce([], &aggregate_errors(config, &1, &2))
|> raise_on_missing_configuration(config)
end
defp puny_encode(email) do
[local_part, domain_part] = String.split(email, "@")
Enum.join([local_part, :idna.utf8_to_ascii(domain_part)], "@")
end
defp format_email({nil, email}, _format), do: puny_encode(email)
defp format_email({name, email}, true), do: "#{rfc822_encode(name)} <#{puny_encode(email)}>"
defp format_email({_name, email}, false), do: puny_encode(email)
defp format_email(emails, format) when is_list(emails) do
Enum.map(emails, &format_email(&1, format))
end
defp format_email(email, type, format \\ true) do
email
|> Bamboo.Formatter.format_email_address(type)
|> format_email(format)
end
defp format_email_as_string(emails) when is_list(emails) do
Enum.join(emails, ", ")
end
defp format_email_as_string(email) do
email
end
defp format_email_as_string(email, type) do
email
|> format_email(type)
|> format_email_as_string
end
defp from_without_format(%Bamboo.Email{from: from}) do
from
|> format_email(:from, false)
end
defp put_default_configuration(config) do
@default_configuration
|> Enum.reduce(config, &put_default_configuration(&2, &1))
end
defp put_default_configuration(config, default = {key, _default_value}) do
config
|> Map.fetch(key)
|> apply_default_configuration(default, config)
end
defp raise_on_missing_configuration([], config), do: config
defp raise_on_missing_configuration(errors, config) do
formatted_errors =
errors
|> Enum.map(&"* #{&1}")
|> Enum.join("\n")
raise ArgumentError, """
The following settings have not been found in your settings:
#{formatted_errors}
They are required to make the SMTP adapter work. Here you configuration:
#{inspect(config)}
"""
end
defp to_without_format(email = %Bamboo.Email{}) do
email
|> Bamboo.Email.all_recipients()
|> format_email(:to, false)
end
defp to_gen_smtp_message(email = %Bamboo.Email{}) do
{from_without_format(email), to_without_format(email), body(email)}
end
defp to_gen_smtp_server_config(config) do
Enum.reduce(config, [], &to_gen_smtp_server_config/2)
end
defp to_gen_smtp_server_config({:server, value}, config) when is_binary(value) do
[{:relay, value} | config]
end
defp to_gen_smtp_server_config({:username, value}, config) when is_binary(value) do
[{:username, value} | config]
end
defp to_gen_smtp_server_config({:password, value}, config) when is_binary(value) do
[{:password, value} | config]
end
defp to_gen_smtp_server_config({:tls, "if_available"}, config) do
[{:tls, :if_available} | config]
end
defp to_gen_smtp_server_config({:tls, "always"}, config) do
[{:tls, :always} | config]
end
defp to_gen_smtp_server_config({:tls, "never"}, config) do
[{:tls, :never} | config]
end
defp to_gen_smtp_server_config({:tls, value}, config) when is_atom(value) do
[{:tls, value} | config]
end
defp to_gen_smtp_server_config({:allowed_tls_versions, value}, config) when is_binary(value) do
Keyword.update(config, :tls_options, [{:versions, string_to_tls_versions(value)}], fn c ->
[{:versions, string_to_tls_versions(value)} | c]
end)
end
defp to_gen_smtp_server_config({:allowed_tls_versions, value}, config) when is_list(value) do
Keyword.update(config, :tls_options, [{:versions, value}], fn c ->
[{:versions, value} | c]
end)
end
defp to_gen_smtp_server_config({:tls_log_level, value}, config)
when value in @log_levels do
Keyword.update(config, :tls_options, [{:log_level, value}], fn c ->
[{:log_level, value} | c]
end)
end
defp to_gen_smtp_server_config({:tls_verify, value}, config) when value in @tls_verify do
Keyword.update(config, :tls_options, [{:verify, value}], fn c -> [{:verify, value} | c] end)
end
defp to_gen_smtp_server_config({:tls_cacertfile, value}, config)
when is_binary(value) do
Keyword.update(config, :tls_options, [{:cacertfile, value}], fn c ->
[{:cacertfile, value} | c]
end)
end
defp to_gen_smtp_server_config({:tls_cacerts, value}, config)
when is_binary(value) do
Keyword.update(config, :tls_options, [{:cacerts, value}], fn c -> [{:cacerts, value} | c] end)
end
defp to_gen_smtp_server_config({:tls_depth, value}, config)
when is_integer(value) and value >= 0 do
Keyword.update(config, :tls_options, [{:depth, value}], fn c -> [{:depth, value} | c] end)
end
defp to_gen_smtp_server_config({:tls_verify_fun, value}, config) when is_tuple(value) do
Keyword.update(config, :tls_options, [{:verify_fun, value}], fn c ->
[{:verify_fun, value} | c]
end)
end
defp to_gen_smtp_server_config({:tls_customize_hostname_check, value}, config) do
Keyword.update(config, :tls_options, [{:customize_hostname_check, value}], fn c ->
[{:customize_hostname_check, value} | c]
end)
end
defp to_gen_smtp_server_config({:port, value}, config) when is_binary(value) do
[{:port, String.to_integer(value)} | config]
end
defp to_gen_smtp_server_config({:port, value}, config) when is_integer(value) do
[{:port, value} | config]
end
defp to_gen_smtp_server_config({:ssl, "true"}, config) do
[{:ssl, true} | config]
end
defp to_gen_smtp_server_config({:ssl, "false"}, config) do
[{:ssl, false} | config]
end
defp to_gen_smtp_server_config({:ssl, value}, config) when is_boolean(value) do
[{:ssl, value} | config]
end
defp to_gen_smtp_server_config({:retries, value}, config) when is_binary(value) do
[{:retries, String.to_integer(value)} | config]
end
defp to_gen_smtp_server_config({:retries, value}, config) when is_integer(value) do
[{:retries, value} | config]
end
defp to_gen_smtp_server_config({:hostname, value}, config) when is_binary(value) do
[{:hostname, value} | config]
end
defp to_gen_smtp_server_config({:no_mx_lookups, "true"}, config) do
[{:no_mx_lookups, true} | config]
end
defp to_gen_smtp_server_config({:no_mx_lookups, "false"}, config) do
[{:no_mx_lookups, false} | config]
end
defp to_gen_smtp_server_config({:no_mx_lookups, value}, config) when is_boolean(value) do
[{:no_mx_lookups, value} | config]
end
defp to_gen_smtp_server_config({:auth, "if_available"}, config) do
[{:auth, :if_available} | config]
end
defp to_gen_smtp_server_config({:auth, "always"}, config) do
[{:auth, :always} | config]
end
defp to_gen_smtp_server_config({:auth, value}, config) when is_atom(value) do
[{:auth, value} | config]
end
defp to_gen_smtp_server_config({:sockopts, value}, config) do
[{:sockopts, value} | config]
end
defp to_gen_smtp_server_config({conf, {:system, var}}, config) do
to_gen_smtp_server_config({conf, System.get_env(var)}, config)
end
defp to_gen_smtp_server_config({_key, _value}, config) do
config
end
defp string_to_tls_versions(version_string) do
version_string
|> String.split(",")
|> Enum.filter(&(&1 in @tls_versions))
|> Enum.map(&String.to_atom/1)
end
end
| 30.174917 | 130 | 0.690583 |
1c2afbc5308fb206397ce6413bc30e7bfa1ad00f | 1,971 | exs | Elixir | config/dev.exs | pdamoc/cowboy_issue | 3873ffed5cc2b15ead54b2d3e95575b09ba1833a | [
"MIT"
] | null | null | null | config/dev.exs | pdamoc/cowboy_issue | 3873ffed5cc2b15ead54b2d3e95575b09ba1833a | [
"MIT"
] | null | null | null | config/dev.exs | pdamoc/cowboy_issue | 3873ffed5cc2b15ead54b2d3e95575b09ba1833a | [
"MIT"
] | null | null | null | use Mix.Config
# For development, we disable any cache and enable
# debugging and code reloading.
#
# The watchers configuration can be used to run external
# watchers to your application. For example, we use it
# with brunch.io to recompile .js and .css sources.
config :cowboy_issue, CowboyIssueWeb.Endpoint,
http: [port: 4000],
debug_errors: true,
code_reloader: true,
check_origin: false,
watchers: [node: ["node_modules/brunch/bin/brunch", "watch", "--stdin",
cd: Path.expand("../assets", __DIR__)]]
# ## SSL Support
#
# In order to use HTTPS in development, a self-signed
# certificate can be generated by running the following
# command from your terminal:
#
# openssl req -new -newkey rsa:4096 -days 365 -nodes -x509 -subj "/C=US/ST=Denial/L=Springfield/O=Dis/CN=www.example.com" -keyout priv/server.key -out priv/server.pem
#
# The `http:` config above can be replaced with:
#
# https: [port: 4000, keyfile: "priv/server.key", certfile: "priv/server.pem"],
#
# If desired, both `http:` and `https:` keys can be
# configured to run both http and https servers on
# different ports.
# Watch static and templates for browser reloading.
config :cowboy_issue, CowboyIssueWeb.Endpoint,
live_reload: [
patterns: [
~r{priv/static/.*(js|css|png|jpeg|jpg|gif|svg)$},
~r{priv/gettext/.*(po)$},
~r{lib/cowboy_issue_web/views/.*(ex)$},
~r{lib/cowboy_issue_web/templates/.*(eex)$}
]
]
# Do not include metadata nor timestamps in development logs
config :logger, :console, format: "[$level] $message\n"
# Set a higher stacktrace during development. Avoid configuring such
# in production as building large stacktraces may be expensive.
config :phoenix, :stacktrace_depth, 20
# Configure your database
config :cowboy_issue, CowboyIssue.Repo,
adapter: Ecto.Adapters.Postgres,
username: "postgres",
password: "postgres",
database: "cowboy_issue_dev",
hostname: "localhost",
pool_size: 10
| 33.40678 | 170 | 0.709285 |
1c2b17a1d5cc888b4c79f701ba1a137a85b94560 | 2,195 | exs | Elixir | mix.exs | tonjo/oban | 9b1a6edfec2a4d225ab6969f8886dc021e2df34c | [
"Apache-2.0"
] | null | null | null | mix.exs | tonjo/oban | 9b1a6edfec2a4d225ab6969f8886dc021e2df34c | [
"Apache-2.0"
] | null | null | null | mix.exs | tonjo/oban | 9b1a6edfec2a4d225ab6969f8886dc021e2df34c | [
"Apache-2.0"
] | null | null | null | defmodule Oban.MixProject do
use Mix.Project
@version "1.2.0"
def project do
[
app: :oban,
version: @version,
elixir: "~> 1.8",
elixirc_paths: elixirc_paths(Mix.env()),
start_permanent: Mix.env() == :prod,
deps: deps(),
aliases: aliases(),
preferred_cli_env: [
bench: :test,
ci: :test,
"test.setup": :test
],
# Hex
package: package(),
description: """
Robust job processing, backed by modern PostgreSQL.
""",
# Dialyzer
dialyzer: [
plt_add_apps: [:ex_unit],
plt_core_path: "_build/#{Mix.env()}",
flags: [:error_handling, :race_conditions, :underspecs]
],
# Docs
name: "Oban",
docs: [
main: "Oban",
source_ref: "v#{@version}",
source_url: "https://github.com/sorentwo/oban",
extras: ["README.md", "CHANGELOG.md": [filename: "CHANGELOG.md", title: "CHANGELOG"]]
]
]
end
def application do
[
extra_applications: [:logger]
]
end
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_env), do: ["lib"]
def package do
[
maintainers: ["Parker Selbert"],
licenses: ["Apache-2.0"],
links: %{github: "https://github.com/sorentwo/oban"}
]
end
defp deps do
[
{:ecto_sql, "~> 3.1"},
{:jason, "~> 1.1"},
{:postgrex, "~> 0.14"},
{:telemetry, "~> 0.4"},
{:stream_data, "~> 0.4", only: [:test, :dev]},
{:tzdata, "~> 1.0", only: [:test, :dev]},
{:benchee, "~> 1.0", only: [:test, :dev], runtime: false},
{:credo, "~> 1.0", only: [:test, :dev], runtime: false},
{:dialyxir, "~> 0.5", only: [:test, :dev], runtime: false},
{:ex_doc, "~> 0.20", only: [:test, :dev], runtime: false},
{:nimble_parsec, "~> 0.5", only: [:test, :dev], runtime: false}
]
end
defp aliases do
[
bench: "run bench/bench_helper.exs",
"test.setup": ["ecto.create", "ecto.migrate"],
ci: [
"format --check-formatted",
"credo --strict",
"test --raise",
"dialyzer --halt-exit-status"
]
]
end
end
| 24.120879 | 93 | 0.512984 |
1c2b1ec0d95bafb92e65232424008fd5c6b6a8aa | 10,212 | exs | Elixir | test/course_planner_web/controllers/bulk_controller_test.exs | digitalnatives/course_planner | 27b1c8067edc262685e9c4dcbfcf82633bc8b8dc | [
"MIT"
] | 38 | 2017-04-11T13:37:38.000Z | 2021-05-22T19:35:36.000Z | test/course_planner_web/controllers/bulk_controller_test.exs | digitalnatives/course_planner | 27b1c8067edc262685e9c4dcbfcf82633bc8b8dc | [
"MIT"
] | 226 | 2017-04-07T13:14:14.000Z | 2018-03-08T16:50:11.000Z | test/course_planner_web/controllers/bulk_controller_test.exs | digitalnatives/course_planner | 27b1c8067edc262685e9c4dcbfcf82633bc8b8dc | [
"MIT"
] | 7 | 2017-08-30T23:58:13.000Z | 2021-03-28T11:50:45.000Z | defmodule CoursePlanner.BulkControllerTest do
use CoursePlannerWeb.ConnCase
alias CoursePlanner.{Repo, Accounts.User}
import CoursePlanner.Factory
setup(%{user_role: role}) do
conn =
role
|> insert()
|> guardian_login_html()
{:ok, conn: conn}
end
defp create_input_params(target, title, csv_data) do
path = Plug.Upload.random_file!("csv")
File.write!(path, csv_data)
%{input: %{csv_file: %Plug.Upload{path: path}, target: target, title: title}}
end
@moduletag user_role: :student
describe "settings functionality for student user" do
test "does not render new page", %{conn: conn} do
conn = get conn, bulk_path(conn, :new), target: "user", title: "Bulk Users"
assert html_response(conn, 403)
end
test "does not create bulk request for student user", %{conn: conn} do
params = create_input_params("user", "user bulk creation", "Aname,AFamile,Anickname,a@a.com,Student")
conn = post conn, bulk_path(conn, :create), params
assert html_response(conn, 403)
refute Repo.get_by(User, name: "Aname", family_name: "AFamile", role: "Student")
end
end
@moduletag user_role: :teacher
describe "settings functionality for teacher user" do
test "does not render new page", %{conn: conn} do
conn = get conn, bulk_path(conn, :new), target: "user", title: "Bulk Users"
html_response(conn, 403)
end
test "does not create bulk request for teacher user", %{conn: conn} do
params = create_input_params("user", "user bulk creation", "Aname,AFamile,Anickname,a@a.com,student")
conn = post conn, bulk_path(conn, :create), params
assert html_response(conn, 403)
refute Repo.get_by(User, name: "Aname", family_name: "AFamile", role: "Student")
end
end
@moduletag user_role: :volunteer
describe "settings functionality for volunteer user" do
test "does not render new page", %{conn: conn} do
conn = get conn, bulk_path(conn, :new), target: "user", title: "Bulk Users"
assert html_response(conn, 403)
end
test "does not create bulk request for volunteer user", %{conn: conn} do
params = create_input_params("user", "user bulk creation", "Aname,AFamile,Anickname,a@a.com,Student")
conn = post conn, bulk_path(conn, :create), params
assert html_response(conn, 403)
refute Repo.get_by(User, name: "Aname", family_name: "AFamile", role: "Student")
end
end
@moduletag user_role: :supervisor
describe "settings functionality for supervisor user" do
test "does not render new page", %{conn: conn} do
conn = get conn, bulk_path(conn, :new), target: "user", title: "Bulk Users"
assert html_response(conn, 403)
end
test "does not create bulk request for supervisor user", %{conn: conn} do
params = create_input_params("user", "user bulk creation", "Aname,AFamile,Anickname,a@a.com,Student")
conn = post conn, bulk_path(conn, :create), params
assert html_response(conn, 403)
refute Repo.get_by(User, name: "Aname", family_name: "AFamile", role: "Student")
end
end
@moduletag user_role: :coordinator
describe "settings functionality for coordinator user" do
test "render new page", %{conn: conn} do
conn = get conn, bulk_path(conn, :new), target: "user", title: "Bulk Users"
assert html_response(conn, 200) =~ "Bulk Users"
end
@tag user_role: :coordinator
test "creates bulk request with one row of data", %{conn: conn} do
params = create_input_params("user", "user bulk creation", "Aname,AFamile,Anickname,a@a.com,Student")
conn = post conn, bulk_path(conn, :create), params
assert redirected_to(conn) == user_path(conn, :index)
assert get_flash(conn, "info") == "All users are created and notified by"
assert Repo.get_by(User, name: "Aname", family_name: "AFamile", role: "Student")
end
test "creates bulk request with multiple rows of data", %{conn: conn} do
params = create_input_params("user", "user bulk creation",
"""
Aname,AFamile,Anickname,a@a.com,Student
Bname,BFamile,Bnickname,b@b.com,Teacher
Cname,CFamile,Cnickname,c@c.com,Volunteer
Dname,DFamile,Dnickname,d@d.com,Coordinator
""")
conn = post conn, bulk_path(conn, :create), params
assert redirected_to(conn) == user_path(conn, :index)
assert get_flash(conn, "info") == "All users are created and notified by"
assert Repo.get_by(User, email: "a@a.com")
assert Repo.get_by(User, email: "b@b.com")
assert Repo.get_by(User, email: "c@c.com")
assert Repo.get_by(User, email: "d@d.com")
end
test "creates bulk request with multiple rows of data even if role are downcased", %{conn: conn} do
params = create_input_params("user", "user bulk creation",
"""
Aname, AFamile,Anickname,a@a.com,student
Bname,BFamile,Bnickname,b@b.com,teacher
Cname,CFamile,Cnickname,c@c.com,volunteer
Dname,DFamile,Dnickname,d@d.com,coordinator
""")
conn = post conn, bulk_path(conn, :create), params
assert redirected_to(conn) == user_path(conn, :index)
assert get_flash(conn, "info") == "All users are created and notified by"
assert Repo.get_by(User, email: "a@a.com")
assert Repo.get_by(User, email: "b@b.com")
assert Repo.get_by(User, email: "c@c.com")
assert Repo.get_by(User, email: "d@d.com")
end
test "does not create bulk request if input file is missing", %{conn: conn} do
params = %{"input" => %{"target" => "user", "title" => "user bulk creation"}}
conn = post conn, bulk_path(conn, :create), params
assert html_response(conn, 200) =~ "You have to select a file"
end
test "does not create bulk request if input file is empty", %{conn: conn} do
params = create_input_params("user", "user bulk creation", "")
conn = post conn, bulk_path(conn, :create), params
assert html_response(conn, 200) =~ "Input can not be empty"
end
test "does not create bulk request if input file is just new lines", %{conn: conn} do
params = create_input_params("user", "user bulk creation",
"""
""")
conn = post conn, bulk_path(conn, :create), params
assert get_flash(conn, "error") == "Row has length 1 - expected length 5 on line 1"
end
test "does not create bulk request even if one of the lines is invalid when changeset fails", %{conn: conn} do
params = create_input_params("user", "user bulk creation",
"""
Aname,AFamile,Anickname,a@a.com,Student
Bname,BFamile,Bnickname,b@b.com,Teacher
Cname,CFamile,Cnickname,,Volunteer
Dname,DFamile,Dnickname,d@d.com,Coordinator
""")
conn = post conn, bulk_path(conn, :create), params
assert get_flash(conn, "error") == "email can't be blank"
refute Repo.get_by(User, email: "a@a.com")
refute Repo.get_by(User, email: "b@b.com")
refute Repo.get_by(User, email: "c@c.com")
refute Repo.get_by(User, email: "d@d.com")
end
test "does not create bulk request even if one of the lines is invalid when csv fails", %{conn: conn} do
params = create_input_params("user", "user bulk creation",
"""
Aname,AFamile,Anickname,a@a.com,Student
Bname,BFamile,Bnickname,b@b.com,Teacher
Cname,CFamile,Cnickname,Volunteer
Dname,DFamile,Dnickname,d@d.com,Coordinator
""")
conn = post conn, bulk_path(conn, :create), params
assert get_flash(conn, "error") == "Row has length 4 - expected length 5 on line 3"
refute Repo.get_by(User, email: "a@a.com")
refute Repo.get_by(User, email: "b@b.com")
refute Repo.get_by(User, email: "c@c.com")
refute Repo.get_by(User, email: "d@d.com")
end
test "does not create bulk request if input file does not have the correct format", %{conn: conn} do
params = create_input_params("user", "user bulk creation", "Aname,AFamile,,Anickname,a@a.com\nBname")
conn = post conn, bulk_path(conn, :create), params
assert html_response(conn, 200) =~ "Row has length 1 - expected length 5 on line 2"
refute Repo.get_by(User, name: "Aname", family_name: "AFamile", email: "a@a.com", role: "Student")
end
test "does not create bulk request if input fields are not enough", %{conn: conn} do
params = create_input_params("user", "user bulk creation", "Aname,AFamile,Anickname,a@a.com")
conn = post conn, bulk_path(conn, :create), params
assert html_response(conn, 200) =~ "Row has length 4 - expected length 5 on line 1"
refute Repo.get_by(User, name: "Aname", family_name: "AFamile", role: "Student")
end
test "does not create bulk request if role is unknown", %{conn: conn} do
params = create_input_params("user", "user bulk creation", "Aname,AFamile,Anickname,a@a.com,unknown")
conn = post conn, bulk_path(conn, :create), params
assert html_response(conn, 200)
assert get_flash(conn, "error") == "role is invalid"
refute Repo.get_by(User, name: "Aname", family_name: "AFamile", role: "Student")
end
test "does not create bulk request if email is already taken", %{conn: conn} do
insert(:student, email: "a@a.com")
params = create_input_params("user", "user bulk creation", "Aname,AFamile,Anickname,a@a.com,Student")
conn = post conn, bulk_path(conn, :create), params
assert html_response(conn, 200)
assert get_flash(conn, "error") == "email has already been taken"
refute Repo.get_by(User, name: "Aname", family_name: "AFamile", role: "Student")
end
test "returning error if no action is implemented for the requested target", %{conn: conn} do
params = create_input_params("invalid target", "user bulk creation", "Aname,AFamile,Anickname,a@a.com,Student")
conn = post conn, bulk_path(conn, :create), params
assert html_response(conn, 200)
assert get_flash(conn, "error") == "Something went wrong"
refute Repo.get_by(User, name: "Aname", family_name: "AFamile", role: "Student")
end
end
end
| 44.789474 | 117 | 0.6604 |
1c2b3edb54b5e1bcdd7e9b82b43a877b706ec3b3 | 47,693 | ex | Elixir | lib/axon/shape.ex | brodeuralexis/axon | 6897a49a6300c0e15924c4b9fd48104c3972b673 | [
"Apache-2.0"
] | null | null | null | lib/axon/shape.ex | brodeuralexis/axon | 6897a49a6300c0e15924c4b9fd48104c3972b673 | [
"Apache-2.0"
] | null | null | null | lib/axon/shape.ex | brodeuralexis/axon | 6897a49a6300c0e15924c4b9fd48104c3972b673 | [
"Apache-2.0"
] | null | null | null | defmodule Axon.Shape do
@moduledoc false
# Collection of shape calculations for calculating the
# output and trainable parameter shapes for high-level
# layers.
#
# `nil` is often used as a stand-in for unknown batch
# size, so each of these methods must account for that.
@doc """
Calculates the shape of an input layer.
## Examples
iex> Axon.Shape.input({nil, 784})
{nil, 784}
iex> Axon.Shape.input({32, 784})
{32, 784}
iex> Axon.Shape.input({})
{}
iex> Axon.Shape.input({nil})
{nil}
iex> Axon.Shape.input({5})
{5}
### Error cases
iex> Axon.Shape.input(5)
** (ArgumentError) invalid input shape 5, input shape must be a tuple with only the leading dimension as nil, if any
iex> Axon.Shape.input({32, nil, 28, 28})
** (ArgumentError) invalid input shape {32, nil, 28, 28}, input shape must be a tuple with only the leading dimension as nil, if any
"""
def input(input_shape)
def input({}), do: {}
def input(input_shape) when is_tuple(input_shape) do
first_elem_nil_or_integer = is_integer(elem(input_shape, 0)) or elem(input_shape, 0) == nil
all_other_elems_integer =
input_shape
|> Tuple.delete_at(0)
|> Tuple.to_list()
|> Enum.filter(&(not is_integer(&1)))
|> Enum.count()
|> Kernel.==(0)
unless first_elem_nil_or_integer and all_other_elems_integer do
raise ArgumentError,
"invalid input shape #{inspect(input_shape)}, input" <>
" shape must be a tuple with only the leading dimension" <>
" as nil, if any"
end
input_shape
end
def input(input_shape) do
raise ArgumentError,
"invalid input shape #{inspect(input_shape)}, input" <>
" shape must be a tuple with only the leading dimension" <>
" as nil, if any"
end
@doc """
Determines if two shapes are compatible. Shapes are compatible
if they are equal, or if all non-nil dimensions are equal.
## Examples
iex> Axon.Shape.compatible?({nil, 32}, {2, 32})
true
iex> Axon.Shape.compatible?({1, 32}, {2, 32})
false
iex> Axon.Shape.compatible?({1, 3, 2}, {3, 2})
false
"""
def compatible?(s1, s2) do
if Nx.rank(s1) == Nx.rank(s2) do
s1
|> Tuple.to_list()
|> Enum.zip(Tuple.to_list(s2))
|> Enum.reduce(true, fn {d1, d2}, acc ->
(acc and d1 == d2) or d1 == nil or d2 == nil
end)
else
false
end
end
## Linear
@doc """
Calculates the shape of a dense kernel given the input
shape and output units.
## Examples
iex> Axon.Shape.dense_kernel({nil, 784}, 128)
{784, 128}
iex> Axon.Shape.dense_kernel({nil, 128}, 256)
{128, 256}
iex> Axon.Shape.dense_kernel({nil, 3, 256, 256}, 128)
{256, 128}
"""
def dense_kernel(input_shape, units) do
unless Nx.rank(input_shape) >= 2 do
raise ArgumentError,
"input shape must have at least rank 2, got rank" <>
" #{Nx.rank(input_shape)}"
end
{elem(input_shape, Nx.rank(input_shape) - 1), units}
end
@doc """
Calculates the shape of a dense bias given the input
shape and output units.
## Examples
iex> Axon.Shape.dense_bias({nil, 784}, 128)
{128}
iex> Axon.Shape.dense_bias({nil, 128}, 256)
{256}
iex> Axon.Shape.dense_bias({nil, 3, 256, 256}, 128)
{128}
"""
def dense_bias(input_shape, units) do
unless Nx.rank(input_shape) >= 2 do
raise ArgumentError,
"input shape must have at least rank 2, got rank" <>
" #{Nx.rank(input_shape)}"
end
{units}
end
@doc """
Calculates the output shape of a dense layer given the
input shape and output units.
## Examples
iex> Axon.Shape.dense({nil, 784}, 128)
{nil, 128}
iex> Axon.Shape.dense({nil, 256}, 512)
{nil, 512}
iex> Axon.Shape.dense({nil, 128}, 128)
{nil, 128}
### Errors
iex> Axon.Shape.dense({}, 32)
** (ArgumentError) input shape must have at least rank 2, got rank 0
iex> Axon.Shape.dense({1}, 32)
** (ArgumentError) input shape must have at least rank 2, got rank 1
"""
def dense(input_shape, units) do
unless Nx.rank(input_shape) >= 2 do
raise ArgumentError,
"input shape must have at least rank 2, got rank" <>
" #{Nx.rank(input_shape)}"
end
{elem(input_shape, 0), units}
end
@doc """
Calculates the shape of a bilinear kernel given both input
shapes and output units.
## Examples
iex> Axon.Shape.bilinear_kernel({nil, 32}, {nil, 64}, 128)
{128, 32, 64}
iex> Axon.Shape.bilinear_kernel({nil, 32, 64}, {nil, 16}, 32)
{32, 64, 16}
"""
def bilinear_kernel(parent1, parent2, units) do
unless Nx.rank(parent1) >= 2 and Nx.rank(parent2) >= 2 do
raise ArgumentError,
"input shapes must both have at least rank 2" <>
" got ranks #{Nx.rank(parent1)} and #{Nx.rank(parent2)}"
end
parent1_features = elem(parent1, Nx.rank(parent1) - 1)
parent2_features = elem(parent2, Nx.rank(parent2) - 1)
{units, parent1_features, parent2_features}
end
@doc """
Calculates the shape of a bilinear bias given both input
shapes and output units.
## Examples
iex> Axon.Shape.bilinear_bias({nil, 32}, {nil, 64}, 128)
{128}
iex> Axon.Shape.bilinear_bias({nil, 32, 64}, {nil, 32, 16}, 32)
{32}
"""
def bilinear_bias(parent1, parent2, units) do
unless Nx.rank(parent1) >= 2 and Nx.rank(parent2) >= 2 do
raise ArgumentError,
"input shapes must both have at least rank 2" <>
" got ranks #{Nx.rank(parent1)} and #{Nx.rank(parent2)}"
end
{units}
end
@doc """
Calculates the output shape of a bilinear layer given both input
shapes and output units.
## Examples
iex> Axon.Shape.bilinear({nil, 32}, {nil, 64}, 128)
{nil, 128}
iex> Axon.Shape.bilinear({nil, 32, 64}, {nil, 32, 16}, 32)
{nil, 32, 32}
iex> Axon.Shape.bilinear({nil, 32, 64}, {16, 32, 16}, 32)
{16, 32, 32}
### Errors
iex> Axon.Shape.bilinear({32, 32}, {16, 16}, 32)
** (ArgumentError) all input dimensions but the last must match, got 32 and 16 for shapes {32, 32} and {16, 16}
iex> Axon.Shape.bilinear({nil, 16, 32}, {nil, 16}, 32)
** (ArgumentError) input ranks must match, got 3 and 2
iex> Axon.Shape.bilinear({nil, 16, 32}, {}, 32)
** (ArgumentError) input shapes must both have at least rank 2, got ranks 3 and 0
iex> Axon.Shape.bilinear({nil}, {12}, 32)
** (ArgumentError) input shapes must both have at least rank 2, got ranks 1 and 1
"""
def bilinear(parent1, parent2, units) do
unless Nx.rank(parent1) >= 2 and Nx.rank(parent2) >= 2 do
raise ArgumentError,
"input shapes must both have at least rank 2," <>
" got ranks #{Nx.rank(parent1)} and #{Nx.rank(parent2)}"
end
unless Nx.rank(parent1) == Nx.rank(parent2) do
raise ArgumentError,
"input ranks must match, got #{inspect(Nx.rank(parent1))}" <>
" and #{inspect(Nx.rank(parent2))}"
end
parent1_without_features =
parent1
|> Tuple.delete_at(Nx.rank(parent1) - 1)
|> Tuple.to_list()
parent2_without_features =
parent2
|> Tuple.delete_at(Nx.rank(parent2) - 1)
|> Tuple.to_list()
output_shape_no_features =
parent1_without_features
|> Enum.zip_with(parent2_without_features, fn p1, p2 ->
unless is_nil(p1) or is_nil(p2) or p1 == p2 do
raise ArgumentError,
"all input dimensions but the last must match, got #{inspect(p1)}" <>
" and #{inspect(p2)} for shapes #{inspect(parent1)} and #{inspect(parent2)}"
end
if is_nil(p1) do
p2
else
p1
end
end)
|> List.to_tuple()
Tuple.append(output_shape_no_features, units)
end
## Sparse
@doc """
Calculates the shape of an embedding kernel given input shape
vocab size and embedding size.
## Examples
iex> Axon.Shape.embedding_kernel({nil, 10}, 128, 32)
{128, 32}
iex> Axon.Shape.embedding_kernel({nil, 32}, 10, 10)
{10, 10}
"""
def embedding_kernel(_input_shape, vocab_size, embedding_size) do
{vocab_size, embedding_size}
end
@doc """
Calculates the output shape of an embedding layer given input shape
vocab size and embedding size.
## Examples
iex> Axon.Shape.embedding({nil, 10}, 128, 32)
{nil, 10, 32}
iex> Axon.Shape.embedding({nil, 32}, 10, 10)
{nil, 32, 10}
"""
def embedding(input_shape, _vocab_size, embedding_size) do
Tuple.append(input_shape, embedding_size)
end
## Conv
@doc """
Calculates the shape of a convolution kernel given the
input shape, output filters, and kernel size.
Kernel size must match the number of spatial dimensions
in the input (input rank - 2).
## Examples
iex> Axon.Shape.conv_kernel({nil, 3, 224, 224}, 32, {3, 3}, :first)
{32, 3, 3, 3}
iex> Axon.Shape.conv_kernel({nil, 3, 28}, 64, {2}, :first)
{64, 3, 2}
iex> Axon.Shape.conv_kernel({nil, 1, 32, 32, 10}, 32, {2, 1, 3}, :first)
{32, 1, 2, 1, 3}
iex> Axon.Shape.conv_kernel({nil, 28, 3}, 64, {2}, :last)
{64, 3, 2}
### Error cases
iex> Axon.Shape.conv_kernel({nil, 1, 28, 28}, 32, {2}, :first)
** (ArgumentError) kernel size must have same rank (1) as number of spatial dimensions in the input (2)
"""
def conv_kernel(input_shape, output_filters, kernel_size, channels) do
unless Nx.rank(kernel_size) == Nx.rank(input_shape) - 2 do
raise ArgumentError,
"kernel size must have same rank (#{Nx.rank(kernel_size)})" <>
" as number of spatial dimensions in the input (#{Nx.rank(input_shape) - 2})"
end
input_channels =
if channels == :first do
elem(input_shape, 1)
else
elem(input_shape, tuple_size(input_shape) - 1)
end
List.to_tuple([output_filters, input_channels | Tuple.to_list(kernel_size)])
end
@doc """
Calculates the shape of a convolution bias given the
input shape, output filters, and kernel size.
Kernel size must match the number of spatial dimensions
in the input (input rank - 2).
## Examples
iex> Axon.Shape.conv_bias({nil, 3, 224, 224}, 32, {3, 3}, :first)
{32}
iex> Axon.Shape.conv_bias({nil, 3, 28}, 64, {2}, :first)
{64}
iex> Axon.Shape.conv_bias({nil, 1, 32, 32, 10}, 32, {2, 1, 3}, :first)
{32}
iex> Axon.Shape.conv_bias({nil, 28, 3}, 64, {2}, :last)
{64}
### Error cases
iex> Axon.Shape.conv_bias({nil, 1, 28, 28}, 32, {2}, :first)
** (ArgumentError) kernel size must have same rank (1) as number of spatial dimensions in the input (2)
"""
def conv_bias(input_shape, output_filters, kernel_size, _channels) do
unless Nx.rank(kernel_size) == Nx.rank(input_shape) - 2 do
raise ArgumentError,
"kernel size must have same rank (#{Nx.rank(kernel_size)})" <>
" as number of spatial dimensions in the input (#{Nx.rank(input_shape) - 2})"
end
{output_filters}
end
@doc """
Calculates the shape after a convolution layer with
the given parent shape, kernel shape, strides, padding,
input dilation and kernel dilation.
## Examples
iex> Axon.Shape.conv({nil, 3, 224, 224}, {64, 3, 7, 7}, [3, 3], :same, [1, 1], [1, 1], :first)
{nil, 64, 75, 75}
iex> Axon.Shape.conv({32, 3, 32, 32}, {64, 3, 2, 2}, [1, 1], :valid, [1, 2], [1, 1], :first)
{32, 64, 31, 62}
iex> Axon.Shape.conv({nil, 3, 32}, {32, 3, 2}, [1], :valid, [1], [2], :first)
{nil, 32, 30}
iex> Axon.Shape.conv({nil, 28, 28, 3}, {64, 3, 4, 4}, [1, 1], :valid, [1, 1], [2, 2], :last)
{nil, 22, 22, 64}
"""
def conv(
parent_shape,
kernel_shape,
strides,
padding,
input_dilation,
kernel_dilation,
channels
) do
unless Nx.rank(parent_shape) >= 3 do
raise ArgumentError,
"input shape must be at least rank 3," <>
" got rank #{Nx.rank(parent_shape)}"
end
permutation = for i <- 0..(Nx.rank(parent_shape) - 1), do: i
in_out_permutation =
if channels == :first do
permutation
else
rank = tuple_size(parent_shape) - 1
spatial = Enum.to_list(1..(rank - 1)//1)
[0, rank | spatial]
end
names = List.duplicate(nil, Nx.rank(parent_shape))
# Account for possibly nil batch dimension
input_shape =
if elem(parent_shape, 0) do
parent_shape
else
put_elem(parent_shape, 0, 1)
end
{shape, _, _} =
Nx.Shape.conv(
input_shape,
names,
kernel_shape,
names,
strides,
padding,
1,
1,
input_dilation,
kernel_dilation,
in_out_permutation,
permutation,
in_out_permutation
)
put_elem(shape, 0, elem(parent_shape, 0))
end
@doc """
Calculates the reshape needed to broadcast convolution bias
over the given input shape.
In order to effectively broadcast, we need to expand
the dimensions of the bias term in convolutions - if
the input bias shape is a vector, otherwise we'll just
attempt to let it broadcast itself.
"""
def conv_bias_reshape(input_shape, spatial_rank, channels) do
case input_shape do
{} ->
{}
{shape} ->
spatial_dims = List.duplicate(1, spatial_rank)
if channels == :first do
List.to_tuple([1, shape | spatial_dims])
else
List.to_tuple([1 | spatial_dims] ++ [shape])
end
shape when is_tuple(shape) ->
shape
end
end
@doc """
Calculates the shape after a transposed convolution layer
with the given parent shape, kernel shape, strides, padding,
and kernel dilation.
## Examples
iex> Axon.Shape.conv_transpose({nil, 3, 3}, {6, 3, 2}, [1], :valid, [1], :first)
{nil, 6, 4}
iex> Axon.Shape.conv_transpose({nil, 3, 3}, {6, 3, 2}, [1], :valid, [1], :last)
{nil, 4, 6}
"""
def conv_transpose(parent_shape, kernel_shape, strides, padding, kernel_dilation, channels) do
unless Nx.rank(parent_shape) >= 3 do
raise ArgumentError,
"input shape must be at least rank 3," <>
" got rank #{Nx.rank(parent_shape)}"
end
permutation = for i <- 0..(Nx.rank(parent_shape) - 1), do: i
in_out_permutation =
if channels == :first do
permutation
else
rank = tuple_size(parent_shape) - 1
spatial = Enum.to_list(1..(rank - 1)//1)
[0, rank | spatial]
end
names = List.duplicate(nil, Nx.rank(parent_shape))
input_dilation = strides
one = List.duplicate(1, Nx.rank(parent_shape) - 2)
padding = conv_transpose_padding(kernel_shape, kernel_dilation, strides, padding)
input_shape =
if elem(parent_shape, 0) do
parent_shape
else
put_elem(parent_shape, 0, 1)
end
{shape, _, _} =
Nx.Shape.conv(
input_shape,
names,
kernel_shape,
names,
one,
padding,
1,
1,
input_dilation,
kernel_dilation,
in_out_permutation,
permutation,
in_out_permutation
)
put_elem(shape, 0, elem(parent_shape, 0))
end
@doc """
Calculates the padding needed for a transposed convolution.
"""
def conv_transpose_padding(kernel_shape, kernel_dilation, strides, padding)
when padding in [:valid, :same] do
kernel_spatial_dims =
kernel_shape
|> Tuple.delete_at(0)
|> Tuple.delete_at(0)
kernel_dilation =
if is_list(kernel_dilation),
do: kernel_dilation,
else: List.duplicate(kernel_dilation, tuple_size(kernel_spatial_dims))
effective_kernel_size =
kernel_spatial_dims
|> Tuple.to_list()
|> Enum.zip(kernel_dilation)
|> Enum.map(fn {k, r} -> (k - 1) * r + 1 end)
case padding do
:valid ->
effective_kernel_size
|> Enum.zip(strides)
|> Enum.map(fn {k, s} ->
pad_len = k + s - 2 + max(k - s, 0)
pad_a = k - 1
{pad_a, pad_len - pad_a}
end)
:same ->
effective_kernel_size
|> Enum.zip(strides)
|> Enum.map(fn {k, s} ->
pad_len = k + s - 2
pad_a =
if s > k - 1 do
k - 1
else
ceil(pad_len / 2)
end
{pad_a, pad_len - pad_a}
end)
end
end
def conv_transpose_padding(_, _, _, padding), do: padding
@doc """
Calculates the shape of a depthwise convolution kernel given the
input shape, output filters, and kernel size.
Kernel size must match the number of spatial dimensions
in the input (input rank - 2).
## Examples
iex> Axon.Shape.depthwise_conv_kernel({nil, 3, 224, 224}, 3, {3, 3}, :first)
{9, 1, 3, 3}
iex> Axon.Shape.depthwise_conv_kernel({nil, 3, 28}, 2, {2}, :first)
{6, 1, 2}
iex> Axon.Shape.depthwise_conv_kernel({nil, 1, 32, 32, 10}, 1, {2, 1, 3}, :first)
{1, 1, 2, 1, 3}
iex> Axon.Shape.depthwise_conv_kernel({nil, 28, 3}, 2, {2}, :last)
{6, 1, 2}
### Error cases
iex> Axon.Shape.depthwise_conv_kernel({nil, 1, 28, 28}, 32, {2}, :first)
** (ArgumentError) kernel size must have same rank (1) as number of spatial dimensions in the input (2)
"""
def depthwise_conv_kernel(input_shape, channel_multiplier, kernel_size, channels) do
unless Nx.rank(kernel_size) == Nx.rank(input_shape) - 2 do
raise ArgumentError,
"kernel size must have same rank (#{Nx.rank(kernel_size)})" <>
" as number of spatial dimensions in the input (#{Nx.rank(input_shape) - 2})"
end
input_channels =
if channels == :first do
elem(input_shape, 1)
else
elem(input_shape, tuple_size(input_shape) - 1)
end
List.to_tuple([input_channels * channel_multiplier, 1 | Tuple.to_list(kernel_size)])
end
@doc """
Calculates the shape of a convolution bias given the
input shape, channel multiplier, and kernel size.
Kernel size must match the number of spatial dimensions
in the input (input rank - 2).
## Examples
iex> Axon.Shape.depthwise_conv_bias({nil, 3, 224, 224}, 3, {3, 3}, :first)
{9}
iex> Axon.Shape.depthwise_conv_bias({nil, 3, 28}, 2, {2}, :first)
{6}
iex> Axon.Shape.depthwise_conv_bias({nil, 1, 32, 32, 10}, 1, {2, 1, 3}, :first)
{1}
iex> Axon.Shape.depthwise_conv_bias({nil, 28, 3}, 2, {2}, :last)
{6}
### Error cases
iex> Axon.Shape.depthwise_conv_bias({nil, 1, 28, 28}, 2, {2}, :first)
** (ArgumentError) kernel size must have same rank (1) as number of spatial dimensions in the input (2)
"""
def depthwise_conv_bias(input_shape, channel_multiplier, kernel_size, channels) do
unless Nx.rank(kernel_size) == Nx.rank(input_shape) - 2 do
raise ArgumentError,
"kernel size must have same rank (#{Nx.rank(kernel_size)})" <>
" as number of spatial dimensions in the input (#{Nx.rank(input_shape) - 2})"
end
input_channels =
if channels == :first do
elem(input_shape, 1)
else
elem(input_shape, tuple_size(input_shape) - 1)
end
{input_channels * channel_multiplier}
end
@doc """
Calculates the shape after a depthwise convolution layer with
the given parent shape, kernel shape, strides, padding, input
dilation, and kernel dilation.
## Examples
iex> Axon.Shape.depthwise_conv({nil, 3, 224, 224}, {9, 1, 7, 7}, [3, 3], :same, [1, 1], [1, 1], :first)
{nil, 9, 75, 75}
iex> Axon.Shape.depthwise_conv({32, 3, 32, 32}, {9, 1, 2, 2}, [1, 1], :valid, [1, 2], [1, 1], :first)
{32, 9, 31, 62}
iex> Axon.Shape.depthwise_conv({nil, 3, 32}, {9, 1, 2}, [1], :valid, [1], [2], :first)
{nil, 9, 30}
"""
def depthwise_conv(
parent_shape,
kernel_shape,
strides,
padding,
input_dilation,
kernel_dilation,
channels
) do
unless Nx.rank(parent_shape) >= 3 do
raise ArgumentError,
"input shape must be at least rank 3," <>
" got rank #{Nx.rank(parent_shape)}"
end
permutation = for i <- 0..(Nx.rank(parent_shape) - 1), do: i
in_out_permutation =
if channels == :first do
permutation
else
rank = tuple_size(parent_shape) - 1
spatial = Enum.to_list(1..(rank - 1)//1)
[0, rank | spatial]
end
names = List.duplicate(nil, Nx.rank(parent_shape))
# Account for possibly nil batch dimension
input_shape =
if elem(parent_shape, 0) do
parent_shape
else
put_elem(parent_shape, 0, 1)
end
input_channels =
if channels == :first do
elem(parent_shape, 1)
else
elem(parent_shape, tuple_size(parent_shape) - 1)
end
{shape, _, _} =
Nx.Shape.conv(
input_shape,
names,
kernel_shape,
names,
strides,
padding,
input_channels,
1,
input_dilation,
kernel_dilation,
in_out_permutation,
permutation,
in_out_permutation
)
put_elem(shape, 0, elem(parent_shape, 0))
end
@doc """
Calculates the shape of a 2d depthwise separable convolution
kernel given the input shape, channel multiplier, kernel size
and parameter number.
Kernel size must match the number of spatial dimensions
in the input (input rank - 2).
## Examples
iex> Axon.Shape.separable_conv2d_kernel({nil, 3, 32, 32}, 3, {3, 3}, 1, :first)
{9, 1, 3, 1}
iex> Axon.Shape.separable_conv2d_kernel({nil, 3, 32, 32}, 3, {3, 3}, 2, :first)
{9, 1, 1, 3}
### Error cases
iex> Axon.Shape.separable_conv2d_kernel({nil, 1, 28, 28}, 2, {2}, 1, :first)
** (ArgumentError) kernel size must have same rank (1) as number of spatial dimensions in the input (2)
iex> Axon.Shape.separable_conv2d_kernel({nil, 1, 28, 28}, 2, {2, 2}, 3, :first)
** (ArgumentError) invalid kernel number
"""
def separable_conv2d_kernel(input_shape, channel_multiplier, kernel_size, num, channels) do
unless Nx.rank(kernel_size) == Nx.rank(input_shape) - 2 do
raise ArgumentError,
"kernel size must have same rank (#{Nx.rank(kernel_size)})" <>
" as number of spatial dimensions in the input (#{Nx.rank(input_shape) - 2})"
end
idx =
if channels == :first do
1
else
tuple_size(input_shape) - 1
end
cond do
num == 1 ->
{elem(input_shape, idx) * channel_multiplier, 1, elem(kernel_size, 0), 1}
num == 2 ->
{elem(input_shape, idx) * channel_multiplier, 1, 1, elem(kernel_size, 1)}
true ->
raise ArgumentError, "invalid kernel number"
end
end
@doc """
Calculates the shape of a depthwise separable convolution
bias given the input shape, channel multiplier and kernel size.
Kernel size must match the number of spatial dimensions
in the input (input rank - 2).
## Examples
iex> Axon.Shape.separable_conv2d_bias({nil, 3, 32, 32}, 3, {3, 3}, :first)
{9}
iex> Axon.Shape.separable_conv2d_bias({nil, 3, 32, 32}, 4, {3, 3}, :first)
{12}
### Error cases
iex> Axon.Shape.separable_conv2d_bias({nil, 1, 28, 28}, 2, {2}, :first)
** (ArgumentError) kernel size must have same rank (1) as number of spatial dimensions in the input (2)
"""
def separable_conv2d_bias(input_shape, channel_multiplier, kernel_size, channels) do
unless Nx.rank(kernel_size) == Nx.rank(input_shape) - 2 do
raise ArgumentError,
"kernel size must have same rank (#{Nx.rank(kernel_size)})" <>
" as number of spatial dimensions in the input (#{Nx.rank(input_shape) - 2})"
end
input_channels =
if channels == :first do
elem(input_shape, 1)
else
elem(input_shape, tuple_size(input_shape) - 1)
end
{input_channels * channel_multiplier}
end
@doc """
Calculates the shape of a 3-d depthwise separable convolution
kernel given the input shape, channel multiplier, kernel size,
and parameter number.
Kernel size must match the number of spatial dimensions
in the input (input rank - 2).
## Examples
iex> Axon.Shape.separable_conv3d_kernel({nil, 3, 32, 32, 3}, 3, {3, 3, 3}, 1, :first)
{9, 1, 3, 1, 1}
iex> Axon.Shape.separable_conv3d_kernel({nil, 3, 32, 32, 3}, 4, {3, 3, 3}, 2, :first)
{12, 1, 1, 3, 1}
iex> Axon.Shape.separable_conv3d_kernel({nil, 3, 32, 32, 3}, 4, {3, 3, 3}, 3, :first)
{12, 1, 1, 1, 3}
### Error cases
iex> Axon.Shape.separable_conv3d_kernel({nil, 1, 28, 28, 3}, 3, {2}, 1, :first)
** (ArgumentError) kernel size must have same rank (1) as number of spatial dimensions in the input (3)
"""
def separable_conv3d_kernel(input_shape, channel_multiplier, kernel_size, num, channels) do
unless Nx.rank(kernel_size) == Nx.rank(input_shape) - 2 do
raise ArgumentError,
"kernel size must have same rank (#{Nx.rank(kernel_size)})" <>
" as number of spatial dimensions in the input (#{Nx.rank(input_shape) - 2})"
end
idx =
if channels == :first do
1
else
tuple_size(input_shape) - 1
end
cond do
num == 1 ->
{elem(input_shape, idx) * channel_multiplier, 1, elem(kernel_size, 0), 1, 1}
num == 2 ->
{elem(input_shape, idx) * channel_multiplier, 1, 1, elem(kernel_size, 1), 1}
num == 3 ->
{elem(input_shape, idx) * channel_multiplier, 1, 1, 1, elem(kernel_size, 2)}
end
end
@doc """
Calculates the shape of a depthwise separable convolution
bias.
## Examples
iex> Axon.Shape.separable_conv3d_bias({nil, 3, 224, 224, 3}, 3, {3, 3, 2}, :first)
{9}
iex> Axon.Shape.separable_conv3d_bias({nil, 3, 32, 32, 3}, 2, {2, 3, 2}, :first)
{6}
iex> Axon.Shape.separable_conv3d_bias({nil, 1, 224, 224, 3}, 5, {3, 3, 1}, :first)
{5}
### Error cases
iex> Axon.Shape.separable_conv3d_bias({nil, 1, 224, 224, 3}, 2, {2, 2}, :first)
** (ArgumentError) kernel size must have same rank (2) as number of spatial dimensions in the input (3)
"""
def separable_conv3d_bias(input_shape, channel_multiplier, kernel_size, channels) do
unless Nx.rank(kernel_size) == Nx.rank(input_shape) - 2 do
raise ArgumentError,
"kernel size must have same rank (#{Nx.rank(kernel_size)})" <>
" as number of spatial dimensions in the input (#{Nx.rank(input_shape) - 2})"
end
input_channels =
if channels == :first do
elem(input_shape, 1)
else
elem(input_shape, tuple_size(input_shape) - 1)
end
{input_channels * channel_multiplier}
end
@doc """
Calculates the output shape after a pooling operation
with the given parent shape, kernel size, strides, and
padding.
## Examples
iex> Axon.Shape.pool({nil, 3, 32, 32}, {2, 2}, [1, 2], :valid, [1, 1], :first)
{nil, 3, 31, 16}
iex> Axon.Shape.pool({32, 1, 28, 28}, {1, 2}, [1, 1], :same, [1, 1], :first)
{32, 1, 28, 28}
"""
def pool(parent_shape, kernel_size, strides, padding, dilations, channels) do
unless Nx.rank(parent_shape) >= 3 do
raise ArgumentError,
"input shape must be at least rank 3," <>
" got rank #{Nx.rank(parent_shape)}"
end
# Account for possibly nil batch dimension
input_shape =
if elem(parent_shape, 0) do
parent_shape
else
put_elem(parent_shape, 0, 1)
end
kernel_dilation = [1, 1 | dilations]
padding =
if is_list(padding),
do: [{0, 0}, {0, 0} | padding],
else: padding
kernel_size =
kernel_size
|> Tuple.insert_at(0, 1)
kernel_size =
if channels == :first do
Tuple.insert_at(kernel_size, 0, 1)
else
Tuple.append(kernel_size, 1)
end
strides = [1, 1 | strides]
{shape, _} =
Nx.Shape.pool(
input_shape,
kernel_size,
strides,
padding,
kernel_dilation
)
put_elem(shape, 0, elem(parent_shape, 0))
end
@doc """
Calculates the output shape after a global pooling operation with
the given parent shape and option to keep axes.
Assumes input is in a channels-first like format.
## Examples
iex> Axon.Shape.global_pool({nil, 3, 2, 1, 1}, false, :first)
{nil, 3}
iex> Axon.Shape.global_pool({nil, 3, 1}, true, :first)
{nil, 3, 1}
iex> Axon.Shape.global_pool({nil, 1, 3, 3, 2, 4, 2}, true, :first)
{nil, 1, 1, 1, 1, 1, 1}
iex> Axon.Shape.global_pool({nil, 28, 28, 3}, true, :last)
{nil, 1, 1, 3}
iex> Axon.Shape.global_pool({nil, 28, 28, 3}, false, :last)
{nil, 3}
"""
def global_pool(parent_shape, keep_axes, channels) do
for i <- 1..(Nx.rank(parent_shape) - 2), reduce: parent_shape do
new_shape ->
# Delete last element or replace last element with 1
last_elem = tuple_size(new_shape)
if channels == :first do
if keep_axes do
put_elem(new_shape, last_elem - i, 1)
else
Tuple.delete_at(new_shape, last_elem - 1)
end
else
if keep_axes do
put_elem(new_shape, i, 1)
else
Tuple.delete_at(new_shape, 1)
end
end
end
end
@doc """
Calculates the window size of a pooling operation based on given
kernel size and spatial rank of the input.
`window_x` functions expect a window which matches the
rank of the input shape. For basic pooling we don't pool
across batch or channel dimensions, so we just specify
a size of `1` for each of those.
"""
def pool_window_size(window, spatial_rank, channels) do
spatial_dims =
case window do
x when is_integer(x) ->
List.duplicate(x, spatial_rank)
x when is_tuple(x) ->
Tuple.to_list(x)
x ->
raise ArgumentError,
"expected pool window to be tuple or integer" <>
" , got #{inspect(x)}"
end
if channels == :first do
List.to_tuple([1, 1 | spatial_dims])
else
List.to_tuple([1 | spatial_dims] ++ [1])
end
end
@doc """
Calculates the output shape after an adaptive pooling operation
with the given parent shape and output size.
## Examples
iex> Axon.Shape.adaptive_pool({nil, 3, 32, 32}, {27, 27}, :first)
{nil, 3, 27, 27}
iex> Axon.Shape.adaptive_pool({nil, 1, 28, 28}, {25, 25}, :first)
{nil, 1, 25, 25}
### Error cases
iex> Axon.Shape.adaptive_pool({nil, 1, 28, 28}, {30, 30}, :first)
** (ArgumentError) invalid output size for adaptive pool operation for input with shape {nil, 1, 28, 28} and output size {30, 30} each dimension of output size must be greater than or equal to spatial dimension of input
"""
def adaptive_pool(parent_shape, output_size, channels) do
unless Nx.rank(parent_shape) >= 3 do
raise ArgumentError,
"input shape must be at least rank 3," <>
" got rank #{Nx.rank(parent_shape)}"
end
idx =
if channels == :first do
1
else
tuple_size(parent_shape) - 1
end
valid_output_size? =
parent_shape
|> Tuple.delete_at(0)
|> Tuple.delete_at(idx - 1)
|> Tuple.to_list()
|> Enum.zip(Tuple.to_list(output_size))
|> Enum.all?(&(elem(&1, 0) >= elem(&1, 1)))
unless valid_output_size? do
raise ArgumentError,
"invalid output size for adaptive pool operation for" <>
" input with shape #{inspect(parent_shape)} and output" <>
" size #{inspect(output_size)} each dimension" <>
" of output size must be greater than or equal to spatial" <>
" dimension of input"
end
if channels == :first do
List.to_tuple([elem(parent_shape, 0), elem(parent_shape, idx) | Tuple.to_list(output_size)])
else
List.to_tuple(
[elem(parent_shape, 0) | Tuple.to_list(output_size)] ++ [elem(parent_shape, idx)]
)
end
end
@doc """
Calculates strides needed for an adaptive pooling operation
with the given input shape, output spatial shape, and spatial
rank.
Adaptive pooling functions adapt the strides of the window
according to:
stride = div(input, output)
This preserves the size of the channel/batch dimension.
"""
def adaptive_pool_window_strides(input_shape, output_spatial, spatial_rank, channels) do
idx =
if channels == :first do
1
else
tuple_size(input_shape) - 1
end
input_spatial =
input_shape
|> Tuple.delete_at(0)
|> Tuple.delete_at(idx - 1)
|> Tuple.to_list()
output_spatial =
case output_spatial do
x when is_integer(x) ->
List.duplicate(x, spatial_rank)
x when is_tuple(x) ->
Tuple.to_list(x)
x ->
raise ArgumentError,
"expected output spatial dimensions to be tuple" <>
" or integer, got #{inspect(x)}"
end
strides =
input_spatial
|> Enum.zip_with(output_spatial, &Kernel.div/2)
if channels == :first do
[1, 1 | strides]
else
[1 | strides] ++ [1]
end
end
@doc """
Calculates the window size for an adaptive pooling operation
given input shape, strides, output spatial dimensions, and spatial
rank.
Adaptive pooling functions adopt the size of the window
according to:
size = input_size - (output_size - 1) * stride
This preserves the size of the channel/batch dimension.
"""
def adaptive_pool_window_size(
input_shape,
[_, _ | stride],
output_spatial,
spatial_rank,
channels
) do
idx =
if channels == :first do
1
else
tuple_size(input_shape) - 1
end
input_spatial =
input_shape
|> Tuple.delete_at(0)
|> Tuple.delete_at(idx - 1)
|> Tuple.to_list()
output_spatial =
case output_spatial do
x when is_integer(x) ->
List.duplicate(x, spatial_rank)
x when is_tuple(x) ->
Tuple.to_list(x)
x ->
raise ArgumentError,
"expected output spatial dimensions to be tuple" <>
" or integer, got #{inspect(x)}"
end
zip_all = [input_spatial, output_spatial, stride]
output_size =
zip_all
|> Enum.zip()
|> Enum.map(fn {input, output, s} -> input - (output - 1) * s end)
if channels == :first do
List.to_tuple([1, 1 | output_size])
else
List.to_tuple([1 | output_size] ++ [1])
end
end
@doc """
Calculates the gamma/beta shape of a normalization layer
given the input shape and channel index.
## Examples
iex> Axon.Shape.norm_param({nil, 3, 28, 28}, 1)
{3}
iex> Axon.Shape.norm_param({nil, 28, 28, 3}, 3)
{3}
"""
def norm_param(parent_shape, channel_index) do
{elem(parent_shape, channel_index)}
end
@doc """
Calculates the reduction axes for batch normalization.
"""
def batch_norm_axes(axes, channel_index) do
axes
|> Enum.filter(&(&1 != channel_index))
end
@doc """
Calculates the reduction axes for instance normalization.
"""
def instance_norm_axes(axes, channel_index) do
reduction_axes = axes -- [0, channel_index]
if reduction_axes == [] do
raise ArgumentError, "rank of input shape must be at least 3"
else
reduction_axes
end
end
@doc """
Calculates the reduction axes for group normalization.
"""
def group_norm_axes(rank) do
for(i <- 1..(rank - 2), do: i) ++ [rank - 1]
end
@doc """
Calculates the reshape for group normalization.
"""
def group_norm_shape(shape, group_size, channel_index) do
channels = :erlang.element(channel_index + 1, shape)
num_groups = div(channels, group_size)
Tuple.delete_at(shape, channel_index)
|> Tuple.insert_at(channel_index, num_groups)
|> Tuple.insert_at(channel_index + 1, group_size)
end
@doc """
Calculates the shape after a flatten layer, which
flattens the non-minibatch dimensions into a single
dimension.
## Examples
iex> Axon.Shape.flatten({nil, 1, 28, 28})
{nil, 784}
iex> Axon.Shape.flatten({32, 128})
{32, 128}
iex> Axon.Shape.flatten({nil, 10, 10})
{nil, 100}
### Error cases
iex> Axon.Shape.flatten({nil})
** (ArgumentError) expected flatten input shape to have at least rank 2, got {nil} with rank 1
"""
def flatten(shape) do
unless Nx.rank(shape) >= 2 do
raise ArgumentError,
"expected flatten input shape to have at least" <>
" rank 2, got #{inspect(shape)} with rank #{Nx.rank(shape)}"
end
# Account for possibly `nil` batch dimension
out_units =
if elem(shape, 0) do
div(Nx.size(shape), elem(shape, 0))
else
Nx.size(Tuple.delete_at(shape, 0))
end
{elem(shape, 0), out_units}
end
@doc """
Calculates the shape after a concatenate layer, which
concatenates inputs along the given dimension.
## Examples
iex> Axon.Shape.concatenate([{nil, 32}, {nil, 12}], 1)
{nil, 44}
iex> Axon.Shape.concatenate([{nil, 24, 32}, {nil, 24, 15}, {nil, 24, 10}], 2)
{nil, 24, 57}
### Error cases
iex> Axon.Shape.concatenate([{10, 32}, {5, 32}], 1)
** (ArgumentError) non-concat dims must be equal got 5 and 10 while concatenating on axis 1
"""
def concatenate([s1 | _] = input_shapes, axis) do
nil_names = for _ <- 1..length(input_shapes), do: List.duplicate(nil, Nx.rank(s1))
{shape, _} = Nx.Shape.concatenate(input_shapes, nil_names, axis)
shape
end
@doc """
Calculates the shape after a reshape layer, which
reshapes non-batch dimensions.
## Examples
iex> Axon.Shape.reshape({nil, 8}, {4, 2}, false)
{nil, 4, 2}
iex> Axon.Shape.reshape({32, 8, 8}, {4, 4, 4}, false)
{32, 4, 4, 4}
iex> Axon.Shape.reshape({12, 2, 2}, {6, 2, 2, 2}, true)
{6, 2, 2, 2}
### Error cases
iex> Axon.Shape.reshape({nil, 4, 2}, {9}, false)
** (ArgumentError) new shape invalid for reshape operation, layer shape {nil, 4, 2} is incompatible with new shape {9}, new shape must have same size as non-batch dimensions of old shape
"""
def reshape(shape, new_shape, is_constant_reshape?) do
original_shape =
if is_constant_reshape? do
shape
else
Tuple.delete_at(shape, 0)
end
unless Nx.size(original_shape) == Nx.size(new_shape) do
raise ArgumentError,
"new shape invalid for reshape operation," <>
" layer shape #{inspect(shape)} is incompatible" <>
" with new shape #{inspect(new_shape)}, new shape" <>
" must have same size as non-batch dimensions of old shape"
end
if is_constant_reshape? do
new_shape
else
Tuple.insert_at(new_shape, 0, elem(shape, 0))
end
end
@doc """
Calculates the shape after a transpose layer, which
transposes non-batch dimensions.
## Examples
iex> Axon.Shape.transpose({nil, 64, 10}, [1, 0], true)
{nil, 10, 64}
iex> Axon.Shape.transpose({nil, 3, 224, 224}, [1, 0, 2], true)
{nil, 224, 3, 224}
iex> Axon.Shape.transpose({1, 2, 3}, [2, 1, 0], false)
{3, 2, 1}
"""
def transpose(shape, permutation, ignore_batch?) do
original_shape =
if ignore_batch? do
Tuple.delete_at(shape, 0)
else
shape
end
nil_names = List.duplicate(nil, Nx.rank(original_shape))
{transposed_shape, _} = Nx.Shape.transpose(original_shape, permutation, nil_names)
if ignore_batch? do
Tuple.insert_at(transposed_shape, 0, elem(shape, 0))
else
transposed_shape
end
end
@doc """
Calculates the shape after a pad layer, which pads
the spatial dimensions of an input.
## Examples
iex> Axon.Shape.pad({nil, 3, 28, 28}, [{0, 1}, {1, 1}])
{nil, 3, 29, 30}
iex> Axon.Shape.pad({nil, 3, 30, 30}, [{2, -1}, {1, 1}])
{nil, 3, 31, 32}
### Error cases
iex> Axon.Shape.pad({nil, 784}, [{0, 1}])
** (ArgumentError) invalid padding configuration [{0, 1}], length of padding configuration must be equal to the rank of the spatial dimensions of the input
"""
def pad(shape, config) do
unless Nx.rank(shape) >= 1 do
raise ArgumentError,
"input shape must be at least rank 3," <>
" got rank #{Nx.rank(shape)}"
end
unless length(config) == Nx.rank(shape) - 2 do
raise ArgumentError,
"invalid padding configuration #{inspect(config)}," <>
" length of padding configuration must be equal" <>
" to the rank of the spatial dimensions of the" <>
" input"
end
inp_shape =
if elem(shape, 0) == nil do
put_elem(shape, 0, 1)
else
shape
end
padding_config = [{0, 0, 0}, {0, 0, 0} | Enum.map(config, fn {x, y} -> {x, y, 0} end)]
output_shape = Nx.Shape.pad(inp_shape, padding_config)
put_elem(output_shape, 0, elem(shape, 0))
end
@doc """
Calculates the noise shape from a spatial dropout operation
based on the input shape.
Spatial dropout shapes are broadcasted across feature
channels, so we set the channel size to 1 and preserve
the spatial dimensions.
"""
def spatial_dropout_noise_shape(input_shape) do
:erlang.setelement(2, input_shape, 1)
end
@doc """
Calculates output shape of RNN.
"""
def rnn(shape, units, type) do
unless Nx.rank(shape) == 3 do
raise ArgumentError,
"#{inspect(type)} input shape must be rank 3 {batch_size, sequence_length, sequence_features}" <>
" got #{inspect(shape)}"
end
{elem(shape, 0), elem(shape, 1), units}
end
@doc """
Calculates the shape of RNN input kernel.
"""
def rnn_input_kernel(shape, units, type) do
unless Nx.rank(shape) == 3 do
raise ArgumentError,
"#{inspect(type)} input shape must be rank 3 {batch_size, sequence_length, sequence_features}" <>
" got #{inspect(shape)}"
end
{elem(shape, 2), units}
end
@doc """
Calculates the shape of RNN hidden kernel.
"""
def rnn_hidden_kernel(shape, units, type) do
unless Nx.rank(shape) == 3 do
raise ArgumentError,
"#{inspect(type)} input shape must be rank 3 {batch_size, sequence_length, sequence_features}" <>
" got #{inspect(shape)}"
end
{units, units}
end
@doc """
Calculates the shape of RNN bias.
"""
def rnn_bias(shape, units, type) do
unless Nx.rank(shape) == 3 do
raise ArgumentError,
"#{inspect(type)} input shape must be rank 3 {batch_size, sequence_length, sequence_features}" <>
" got #{inspect(shape)}"
end
{units}
end
@doc """
Calculates the shape of RNN hidden state.
"""
def rnn_hidden_state(shape, units, :conv_lstm) do
# input shape must be rank > 3 {batch_size, sequence_length, spacial_dimensions...}"
shape
|> put_elem(1, 1)
|> put_elem(2, units)
end
def rnn_hidden_state(shape, units, type) do
unless Nx.rank(shape) == 3 do
raise ArgumentError,
"#{inspect(type)} input shape must be rank 3 {batch_size, sequence_length, sequence_features}" <>
" got #{inspect(shape)}"
end
{elem(shape, 0), 1, units}
end
@doc """
Calculates the base shape and slice size of a split operation.
## Examples
iex> Axon.Shape.split({nil, 1, 10}, 2, -1)
{5, {nil, 1, 5}}
iex> Axon.Shape.split({32, 1, 10}, 16, 0)
{2, {2, 1, 10}}
### Error cases
iex> Axon.Shape.split({nil, 1, 10}, 2, 0)
** (ArgumentError) cannot split along batch dimension with dynamic batch size, please provide a static (non-nil) batch size
iex> Axon.Shape.split({nil, 5, 10}, 2, 1)
** (ArgumentError) unable to create 2 even splits along axis 1 of size 5
"""
def split(shape, n, axis) do
unless Nx.rank(shape) >= 1 do
raise ArgumentError,
"input shape must be at least rank 3," <>
" got rank #{Nx.rank(shape)}"
end
nil_names = List.duplicate(nil, Nx.rank(shape))
non_nil_shape = if elem(shape, 0) == nil, do: put_elem(shape, 0, 1), else: shape
axis = Nx.Shape.normalize_axis(non_nil_shape, axis, nil_names)
if axis == 0 and elem(shape, 0) == nil do
raise ArgumentError,
"cannot split along batch dimension with dynamic" <>
" batch size, please provide a static (non-nil)" <>
" batch size"
end
unless rem(elem(shape, axis), n) == 0 do
raise ArgumentError,
"unable to create #{n} even splits along axis #{axis}" <>
" of size #{elem(shape, axis)}"
end
slice_size = div(elem(shape, axis), n)
{slice_size, put_elem(shape, axis, slice_size)}
end
@doc """
Checks if input shapes are broadcast compatible and returns
the output shape of the element-wise operation.
## Examples
iex> Axon.Shape.element_wise([{1, 128}, {128, 128}])
{128, 128}
iex> Axon.Shape.element_wise([{1, 32, 1}, {28, 1, 1}, {28, 1, 14}])
{28, 32, 14}
iex> Axon.Shape.element_wise([{nil, 32}, {nil, 32}])
{nil, 32}
### Error cases
iex> Axon.Shape.element_wise([{128, 1}, {nil, 32}])
** (ArgumentError) cannot broadcast tensor of dimensions {nil, 32} to {128, 1}
"""
def element_wise([first | rest]) do
Enum.reduce(rest, first, fn shape, target_shape ->
lnames = List.duplicate(nil, tuple_size(shape))
rnames = List.duplicate(nil, tuple_size(target_shape))
# TODO(seanmor5): If this fails, I wonder if it's better to rescue
# and re-raise with Axon specific messages?
{out_shape, _} = Nx.Shape.binary_broadcast(shape, lnames, target_shape, rnames)
out_shape
end)
end
@doc """
Computes the output shape after a resize layer.
"""
def resize(input_shape, output_shape, channels) do
unless Nx.rank(input_shape) >= 3 do
raise ArgumentError, "input shape must be at least rank 3"
end
unless Nx.rank(output_shape) == Nx.rank(input_shape) - 2 do
raise ArgumentError,
"output shape must be equal to number of" <>
" spatial dimensions in the input"
end
spatial_dimensions =
case channels do
:first ->
Enum.to_list(2..(Nx.rank(input_shape) - 1))
:last ->
Enum.to_list(1..(Nx.rank(output_shape) - 2))
invalid ->
raise ArgumentError, "invalid channel configuration #{inspect(invalid)}"
end
for {d, i} <- Enum.with_index(spatial_dimensions), reduce: input_shape do
shape ->
put_elem(shape, d, elem(output_shape, i))
end
end
end
| 27.809329 | 225 | 0.599291 |
1c2b5d5b3300c2edefd8ac8f5dea7ec4d3b89008 | 866 | exs | Elixir | mix.exs | kentaro/neotomex | 3f334813e79d69d31d439acd76224ecf3f9554b2 | [
"BSD-3-Clause"
] | 58 | 2015-08-21T22:20:27.000Z | 2021-11-01T16:37:33.000Z | mix.exs | kentaro/neotomex | 3f334813e79d69d31d439acd76224ecf3f9554b2 | [
"BSD-3-Clause"
] | 10 | 2015-08-24T11:50:10.000Z | 2020-02-28T03:55:03.000Z | mix.exs | kentaro/neotomex | 3f334813e79d69d31d439acd76224ecf3f9554b2 | [
"BSD-3-Clause"
] | 14 | 2015-09-28T23:37:08.000Z | 2021-11-01T16:34:15.000Z | defmodule Neotomex.Mixfile do
use Mix.Project
use Mix.Config
@github "https://github.com/jtmoulia/neotomex"
def project do
[app: :neotomex,
version: "0.1.7",
elixir: ">= 1.3.0",
name: "Neotomex",
source_url: @github,
deps: deps(),
description: description(),
package: package()]
end
def application do
[applications: applications(Mix.env)]
end
defp applications(:dev) do
[:dbg]
end
defp applications(_) do
[]
end
defp deps do
[{:dbg, "~> 1.0", only: :dev},
{:earmark, "~> 1.2", only: :dev},
{:ex_doc, "~> 0.16", only: :dev}]
end
defp description do
"""
A PEG parser/transformer with a pleasant Elixir DSL.
"""
end
defp package do
[maintainers: ["Thomas Moulia"],
licenses: ["BSD 3-Clause License"],
links: %{github: @github}]
end
end
| 18.425532 | 56 | 0.584296 |
1c2b73e0839ae3d90edc4feb7721fb3d3ce59ec4 | 2,403 | exs | Elixir | rel/config.exs | SmartColumbusOS/smartcitiesdata | c8553d34631c822b034945eebf396994bf1001ff | [
"Apache-2.0"
] | 1 | 2021-04-05T19:17:18.000Z | 2021-04-05T19:17:18.000Z | rel/config.exs | SmartColumbusOS/smartcitiesdata | c8553d34631c822b034945eebf396994bf1001ff | [
"Apache-2.0"
] | 11 | 2020-01-07T15:43:42.000Z | 2020-12-22T15:23:25.000Z | rel/config.exs | SmartColumbusOS/smartcitiesdata | c8553d34631c822b034945eebf396994bf1001ff | [
"Apache-2.0"
] | null | null | null | ~w(rel plugins *.exs)
|> Path.join()
|> Path.wildcard()
|> Enum.map(&Code.eval_file(&1))
use Distillery.Releases.Config,
default_release: :default,
default_environment: Mix.env()
environment :dev do
set dev_mode: true
set include_erts: false
set cookie: :"_q24F/BBb@JBm)P%lZ~ign&WJ:]:TaT3i5M)ARX>^Sv$1@5!8vdEWb3uuTpj4pH<"
end
environment :prod do
set include_erts: true
set include_src: false
set cookie: :"UmFgS>>q;?q`kquWZ`G=c!4/Qrug]c]EuL}9koyB}a)=R)E|H>4Q(t$H`0f8)OZ~"
set vm_args: "rel/vm.args"
set config_providers: [{Distillery.Releases.Config.Providers.Elixir, ["${RELEASE_ROOT_DIR}/etc/runtime.exs"]}]
set pre_configure_hooks: "rel/hooks/pre_configure.d"
end
release :andi do
set version: current_version(:andi)
set applications: [:runtime_tools, :andi]
set overlays: [{:copy, "apps/andi/runtime.exs", "etc/runtime.exs"}]
end
release :discovery_streams do
set version: current_version(:discovery_streams)
set applications: [:runtime_tools, :discovery_streams]
set overlays: [{:copy, "apps/discovery_streams/runtime.exs", "etc/runtime.exs"}]
end
release :estuary do
set version: current_version(:estuary)
set applications: [:runtime_tools, :estuary]
set overlays: [{:copy, "apps/estuary/runtime.exs", "etc/runtime.exs"}]
end
release :flair do
set version: current_version(:flair)
set applications: [:runtime_tools, :flair]
set overlays: [{:copy, "apps/flair/runtime.exs", "etc/runtime.exs"}]
end
release :forklift do
set version: current_version(:forklift)
set applications: [:runtime_tools, :forklift]
set overlays: [{:copy, "apps/forklift/runtime.exs", "etc/runtime.exs"}]
end
release :odo do
set version: current_version(:odo)
set applications: [:runtime_tools, :odo]
set overlays: [{:copy, "apps/odo/runtime.exs", "etc/runtime.exs"}]
end
release :reaper do
set version: current_version(:reaper)
set applications: [:runtime_tools, :reaper]
set overlays: [{:copy, "apps/reaper/runtime.exs", "etc/runtime.exs"}]
end
release :valkyrie do
set version: current_version(:valkyrie)
set applications: [:runtime_tools, :valkyrie]
set overlays: [{:copy, "apps/valkyrie/runtime.exs", "etc/runtime.exs"}]
end
release :discovery_api do
set version: current_version(:discovery_api)
set applications: [:runtime_tools, :discovery_api]
set overlays: [{:copy, "apps/discovery_api/runtime.exs", "etc/runtime.exs"}]
end
| 30.807692 | 112 | 0.726176 |
1c2ba0ed1c0f0ece5c535461fb24449943057bb0 | 492 | ex | Elixir | apps/hefty/lib/hefty/algos/naive/symbol_supervisor.ex | Cinderella-Man/crypto-streamer | b1e990d375f7143c5149930be991249f0d9c3ee3 | [
"MIT"
] | 49 | 2019-10-28T22:27:28.000Z | 2021-10-11T06:40:29.000Z | apps/hefty/lib/hefty/algos/naive/symbol_supervisor.ex | Cinderella-Man/crypto-streamer | b1e990d375f7143c5149930be991249f0d9c3ee3 | [
"MIT"
] | 9 | 2019-08-30T13:15:36.000Z | 2019-10-10T21:25:14.000Z | apps/hefty/lib/hefty/algos/naive/symbol_supervisor.ex | Cinderella-Man/crypto-streamer | b1e990d375f7143c5149930be991249f0d9c3ee3 | [
"MIT"
] | 7 | 2019-10-31T06:19:26.000Z | 2021-09-30T04:20:58.000Z | defmodule Hefty.Algos.Naive.SymbolSupervisor do
use Supervisor
def start_link(symbol) do
Supervisor.start_link(
__MODULE__,
symbol,
name: :"#{__MODULE__}-#{symbol}"
)
end
def init(symbol) do
Supervisor.init(
[
{
DynamicSupervisor,
strategy: :one_for_one, name: :"Hefty.Algos.Naive.DynamicSupervisor-#{symbol}"
},
{Hefty.Algos.Naive.Leader, symbol}
],
strategy: :one_for_all
)
end
end
| 19.68 | 88 | 0.597561 |
1c2bbb1cec539996b6818fb51fb1012df2ae4581 | 1,869 | ex | Elixir | apps/tai/lib/tai/venue_adapters/bitmex/positions.ex | chrism2671/tai-1 | 847827bd23908adfad4a82c83d5295bdbc022796 | [
"MIT"
] | null | null | null | apps/tai/lib/tai/venue_adapters/bitmex/positions.ex | chrism2671/tai-1 | 847827bd23908adfad4a82c83d5295bdbc022796 | [
"MIT"
] | null | null | null | apps/tai/lib/tai/venue_adapters/bitmex/positions.ex | chrism2671/tai-1 | 847827bd23908adfad4a82c83d5295bdbc022796 | [
"MIT"
] | 1 | 2020-05-03T23:32:11.000Z | 2020-05-03T23:32:11.000Z | defmodule Tai.VenueAdapters.Bitmex.Positions do
def positions(venue_id, credential_id, credentials) do
venue_credentials = to_venue_credentials(credentials)
with {:ok, venue_positions, _rate_limit} <-
ExBitmex.Rest.Position.Index.get(venue_credentials) do
positions =
venue_positions
|> Enum.map(&build(&1, venue_id, credential_id))
|> Enum.filter(& &1)
{:ok, positions}
else
{:error, reason, _} ->
{:error, reason}
end
end
defdelegate to_venue_credentials(credentials),
to: Tai.VenueAdapters.Bitmex.Credentials,
as: :from
defp build(%ExBitmex.Position{current_qty: 0}, _, _), do: nil
defp build(venue_position, venue_id, credential_id) do
# TODO: This should come from products
product_symbol =
venue_position.symbol
|> String.downcase()
|> String.to_atom()
%Tai.Trading.Position{
venue_id: venue_id,
credential_id: credential_id,
product_symbol: product_symbol,
side: venue_position |> side(),
qty: venue_position |> qty(),
entry_price: venue_position |> entry_price(),
leverage: venue_position |> leverage(),
margin_mode: venue_position |> margin_mode()
}
end
defp side(%ExBitmex.Position{current_qty: qty}) when qty > 0, do: :long
defp side(%ExBitmex.Position{current_qty: qty}) when qty < 0, do: :short
defp qty(%ExBitmex.Position{current_qty: qty}) when qty > 0, do: Decimal.new(qty)
defp qty(%ExBitmex.Position{current_qty: qty}) when qty < 0, do: Decimal.new(-qty)
defp entry_price(%ExBitmex.Position{avg_entry_price: p}), do: Decimal.cast(p)
defp leverage(%ExBitmex.Position{leverage: l}), do: Decimal.new(l)
defp margin_mode(%ExBitmex.Position{cross_margin: true}), do: :crossed
defp margin_mode(%ExBitmex.Position{cross_margin: false}), do: :fixed
end
| 32.789474 | 84 | 0.683253 |
1c2c551b561fcd9cddd5181717e9a00a51e21334 | 2,189 | ex | Elixir | lib/aoc2019_day4.ex | hvnsweeting/adventofcode2018 | 8e5a85ebb7b102361b844b0f92522c18148a672a | [
"BSD-3-Clause"
] | 1 | 2022-01-10T02:34:18.000Z | 2022-01-10T02:34:18.000Z | lib/aoc2019_day4.ex | hvnsweeting/adventofcode2018 | 8e5a85ebb7b102361b844b0f92522c18148a672a | [
"BSD-3-Clause"
] | null | null | null | lib/aoc2019_day4.ex | hvnsweeting/adventofcode2018 | 8e5a85ebb7b102361b844b0f92522c18148a672a | [
"BSD-3-Clause"
] | 1 | 2019-12-02T09:42:17.000Z | 2019-12-02T09:42:17.000Z | defmodule Aoc2019Day4 do
@moduledoc """
https://adventofcode.com/2019/day/4
"""
def has_at_least_to_same_adjacent_digits?([]) do
false
end
def has_at_least_to_same_adjacent_digits?([h | t]) do
next = List.first(t)
if next == nil do
false
else
if h == next do
true
else
has_at_least_to_same_adjacent_digits?(t)
end
end
end
def has_two_adjacent_digits?([h | t]) do
has_two_adjacent_digits?(t, h, 1, [])
end
defp has_two_adjacent_digits?([], tmp, count, acc) do
final = acc ++ [{tmp, count}]
good = final |> Enum.filter(fn {x, count} -> count == 2 end)
length(good) > 0
end
defp has_two_adjacent_digits?([h | t], tmp, count, acc) do
if h == tmp do
has_two_adjacent_digits?(t, h, count + 1, acc)
else
has_two_adjacent_digits?(t, h, 1, acc ++ [{tmp, count}])
end
end
def is_never_decrease?([h | t]) do
next = List.first(t)
if next == nil do
true
else
if h > next do
false
else
is_never_decrease?(t)
end
end
end
def is_within_range?(password, start, stop) do
n = String.to_integer(password)
start <= n && n <= stop
end
def valid_password?(password, start, stop) do
Enum.all?([
is_within_range?(password, start, stop),
has_at_least_to_same_adjacent_digits?(String.to_charlist(password)),
is_never_decrease?(String.to_charlist(password))
])
end
def valid_password2?(password, start, stop) do
Enum.all?([
is_within_range?(password, start, stop),
has_two_adjacent_digits?(String.to_charlist(password)),
is_never_decrease?(String.to_charlist(password))
])
end
def solve_part_1(start, stop) do
start..stop
|> Enum.map(&Integer.to_string/1)
|> Enum.reduce(0, fn x, acc ->
if valid_password?(x, start, stop) do
acc + 1
else
acc
end
end)
end
def solve_part_2(start, stop) do
start..stop
|> Enum.map(&Integer.to_string/1)
|> Enum.reduce(0, fn x, acc ->
if valid_password2?(x, start, stop) do
acc + 1
else
acc
end
end)
end
end
| 21.673267 | 74 | 0.604842 |
1c2c6e7ae4df58c3bd6ad28e766976bf5047883b | 1,699 | ex | Elixir | clients/docs/lib/google_api/docs/v1/model/named_ranges.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/docs/lib/google_api/docs/v1/model/named_ranges.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/docs/lib/google_api/docs/v1/model/named_ranges.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Docs.V1.Model.NamedRanges do
@moduledoc """
A collection of all the NamedRanges in the document that share a given name.
## Attributes
* `name` (*type:* `String.t`, *default:* `nil`) - The name that all the named ranges share.
* `namedRanges` (*type:* `list(GoogleApi.Docs.V1.Model.NamedRange.t)`, *default:* `nil`) - The NamedRanges that share the same name.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:name => String.t() | nil,
:namedRanges => list(GoogleApi.Docs.V1.Model.NamedRange.t()) | nil
}
field(:name)
field(:namedRanges, as: GoogleApi.Docs.V1.Model.NamedRange, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.Docs.V1.Model.NamedRanges do
def decode(value, options) do
GoogleApi.Docs.V1.Model.NamedRanges.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Docs.V1.Model.NamedRanges do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 33.98 | 136 | 0.720424 |
1c2c6f188aac452e272b0a5223d1f7908f64eb29 | 561 | ex | Elixir | test/support/simple_provider.ex | kianmeng/ueberauth | 4b8b1e9002ba91d9d4ec14d94bdfaf13221e1515 | [
"MIT"
] | null | null | null | test/support/simple_provider.ex | kianmeng/ueberauth | 4b8b1e9002ba91d9d4ec14d94bdfaf13221e1515 | [
"MIT"
] | null | null | null | test/support/simple_provider.ex | kianmeng/ueberauth | 4b8b1e9002ba91d9d4ec14d94bdfaf13221e1515 | [
"MIT"
] | null | null | null | defmodule Support.SimpleProvider do
@moduledoc false
use Ueberauth.Strategy
def uid(%{params: %{"id" => id}} = _conn), do: id
def credentials(%{params: %{"code" => code}} = conn) do
prefix = options(conn)[:token_prefix]
%Ueberauth.Auth.Credentials{
token: "#{prefix}#{code}"
}
end
def handle_request!(conn) do
callback = options(conn)[:callback_path]
conn
|> redirect!("#{callback}?code=#{uid(conn)}")
end
def handle_callback!(conn) do
conn
|> Plug.Conn.assign(:ueberauth_auth, auth(conn))
end
end
| 20.035714 | 57 | 0.632799 |
1c2ca182965556ceaa15943ba353a21277cab274 | 1,547 | ex | Elixir | apps/voyager/lib/voyager_web/views/error_helpers.ex | msk-access/seqosystem | 4d99c50a0b0bc74c7f9f899be4eda8eddf6e5a39 | [
"Apache-2.0"
] | 3 | 2020-11-24T07:45:26.000Z | 2021-07-29T13:37:02.000Z | apps/voyager/lib/voyager_web/views/error_helpers.ex | mskcc/seqosystem | 4d99c50a0b0bc74c7f9f899be4eda8eddf6e5a39 | [
"Apache-2.0"
] | 52 | 2020-10-21T19:47:59.000Z | 2021-09-09T18:42:33.000Z | apps/voyager/lib/voyager_web/views/error_helpers.ex | msk-access/seqosystem | 4d99c50a0b0bc74c7f9f899be4eda8eddf6e5a39 | [
"Apache-2.0"
] | 1 | 2020-12-15T03:33:31.000Z | 2020-12-15T03:33:31.000Z | defmodule VoyagerWeb.ErrorHelpers do
@moduledoc """
Conveniences for translating and building error messages.
"""
use Phoenix.HTML
@doc """
Generates tag for inlined form input errors.
"""
def error_tag(form, field) do
Enum.map(Keyword.get_values(form.errors, field), fn error ->
content_tag(:span, translate_error(error),
class: "invalid-feedback",
phx_feedback_for: input_id(form, field)
)
end)
end
@doc """
Translates an error message using gettext.
"""
def translate_error({msg, opts}) do
# When using gettext, we typically pass the strings we want
# to translate as a static argument:
#
# # Translate "is invalid" in the "errors" domain
# dgettext("errors", "is invalid")
#
# # Translate the number of files with plural rules
# dngettext("errors", "1 file", "%{count} files", count)
#
# Because the error messages we show in our forms and APIs
# are defined inside Ecto, we need to translate them dynamically.
# This requires us to call the Gettext module passing our gettext
# backend as first argument.
#
# Note we use the "errors" domain, which means translations
# should be written to the errors.po file. The :count option is
# set by Ecto and indicates we should also apply plural rules.
if count = opts[:count] do
Gettext.dngettext(VoyagerWeb.Gettext, "errors", msg, msg, count, opts)
else
Gettext.dgettext(VoyagerWeb.Gettext, "errors", msg, opts)
end
end
end
| 32.229167 | 76 | 0.665158 |
1c2cc1dd78dd5c93cbc0f297ee2897d0dffab7b2 | 1,052 | ex | Elixir | lib/offer_hunters_web/controllers/comment_controler.ex | Ryandls/offer_hunters-backend | abedff162d8623e2fbaa4c5e4a518f1726bef436 | [
"MIT"
] | 3 | 2021-10-01T21:13:02.000Z | 2021-11-05T22:25:55.000Z | lib/offer_hunters_web/controllers/comment_controler.ex | Ryandls/offer_hunters-backend | abedff162d8623e2fbaa4c5e4a518f1726bef436 | [
"MIT"
] | null | null | null | lib/offer_hunters_web/controllers/comment_controler.ex | Ryandls/offer_hunters-backend | abedff162d8623e2fbaa4c5e4a518f1726bef436 | [
"MIT"
] | null | null | null | defmodule OfferHuntersWeb.CommentController do
use OfferHuntersWeb, :controller
alias OfferHunters.{Comment, Offer, User}
action_fallback OfferHuntersWeb.FallbackController
def create(
conn,
%{
"comment" => comment,
"name" => name,
"offer_id" => offer_id,
"user_id" => user_id,
"created_date" => created_date
} = params
) do
with {:ok, %Offer{}} <- OfferHunters.get_offer_by_id(offer_id),
{:ok, %User{}} <- OfferHunters.get_user_by_id(user_id),
{:ok, %Comment{id: id}} <- OfferHunters.create_comment(params) do
conn
|> put_status(:created)
|> render("created.json", comment: comment, name: name, created_date: created_date, id: id)
end
end
def delete(conn, %{"id" => id}) do
with {:ok, _comment} <-
OfferHunters.get_comment_by_id(id) do
OfferHunters.delete_comment_by_id(id)
conn
|> put_status(:ok)
|> render("comment.json", comment: "Comment deleted!")
end
end
end
| 27.684211 | 97 | 0.606464 |
1c2d28be7b7cf70d3af787cf3af891c286d3ca0f | 2,678 | ex | Elixir | example_applications/web_app/lib/web_app/prom_ex.ex | maartenvanvliet/prom_ex | 8eb4f86c169af3b184a1a45cf42e298af2b05816 | [
"MIT"
] | null | null | null | example_applications/web_app/lib/web_app/prom_ex.ex | maartenvanvliet/prom_ex | 8eb4f86c169af3b184a1a45cf42e298af2b05816 | [
"MIT"
] | null | null | null | example_applications/web_app/lib/web_app/prom_ex.ex | maartenvanvliet/prom_ex | 8eb4f86c169af3b184a1a45cf42e298af2b05816 | [
"MIT"
] | null | null | null | defmodule WebApp.PromEx do
@moduledoc """
Be sure to add the following to finish setting up PromEx:
1. Update your configuration (config.exs, dev.exs, prod.exs, releases.exs, etc) to
configure the necessary bit of PromEx. Be sure to check out `PromEx.Config` for
more details regarding configuring PromEx:
```
config :web_app, WebApp.PromEx,
manual_metrics_start_delay: :no_delay,
drop_metrics_groups: [],
grafana: :disabled,
metrics_server: :disabled
```
2. Add this module to your application supervision tree. It should be one of the first
things that is started so that no Telemetry events are missed. For example, if PromEx
is started after your Repo module, you will miss Ecto's init events and the dashbaords
will be missing some data points:
```
def start(_type, _args) do
children = [
WebApp.PromEx,
...
]
...
end
```
3. Update your `endpoint.ex` file to expose your metrics (or configure a standalone
server using the `:metrics_server` config options). Be sure to put this plug before
your `Plug.Telemetry` entry so that you can avoid having calls to your `/metrics`
endpoint create their own metrics and logs which can pollute your logs/metrics given
that Prometheus will scrape at a regular interval and that can get noisy:
```
defmodule WebAppWeb.Endpoint do
use Phoenix.Endpoint, otp_app: :web_app
...
plug PromEx.Plug, prom_ex_module: WebApp.PromEx
...
end
```
"""
use PromEx, otp_app: :web_app
@impl true
def plugins do
[
# PromEx built in plugins
PromEx.Plugins.Application,
PromEx.Plugins.Beam,
{PromEx.Plugins.Phoenix, router: WebAppWeb.Router},
{PromEx.Plugins.Ecto, repos: [WebApp.Repo, WebApp.Repo2]},
{PromEx.Plugins.Oban, oban_supervisors: [Oban, Oban.SuperSecret]},
PromEx.Plugins.PhoenixLiveView
# Add your own PromEx metrics plugins
# WebApp.Users.PromEx
]
end
@impl true
def dashboard_assigns do
[
datasource_id: "prometheus"
]
end
@impl true
def dashboards do
[
# PromEx built in dashboard definitions. Remove dashboards that you do not need
{:prom_ex, "application.json"},
{:prom_ex, "beam.json"},
{:prom_ex, "phoenix.json"},
{:prom_ex, "ecto.json"},
{:prom_ex, "oban.json"},
{:prom_ex, "phoenix_live_view.json"}
# Add your dashboard definitions here with the format: {:otp_app, "path_in_priv"}
# {:web_app, "/grafana_dashboards/user_metrics.json"}
]
end
end
| 29.428571 | 91 | 0.652353 |
1c2d60079f2517e3818b90d49af999c3d07ce9d4 | 1,667 | ex | Elixir | clients/container_analysis/lib/google_api/container_analysis/v1/model/batch_create_notes_request.ex | renovate-bot/elixir-google-api | 1da34cd39b670c99f067011e05ab90af93fef1f6 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/container_analysis/lib/google_api/container_analysis/v1/model/batch_create_notes_request.ex | swansoffiee/elixir-google-api | 9ea6d39f273fb430634788c258b3189d3613dde0 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/container_analysis/lib/google_api/container_analysis/v1/model/batch_create_notes_request.ex | dazuma/elixir-google-api | 6a9897168008efe07a6081d2326735fe332e522c | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.ContainerAnalysis.V1.Model.BatchCreateNotesRequest do
@moduledoc """
Request to create notes in batch.
## Attributes
* `notes` (*type:* `%{optional(String.t) => GoogleApi.ContainerAnalysis.V1.Model.Note.t}`, *default:* `nil`) - Required. The notes to create. Max allowed length is 1000.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:notes => %{optional(String.t()) => GoogleApi.ContainerAnalysis.V1.Model.Note.t()} | nil
}
field(:notes, as: GoogleApi.ContainerAnalysis.V1.Model.Note, type: :map)
end
defimpl Poison.Decoder, for: GoogleApi.ContainerAnalysis.V1.Model.BatchCreateNotesRequest do
def decode(value, options) do
GoogleApi.ContainerAnalysis.V1.Model.BatchCreateNotesRequest.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.ContainerAnalysis.V1.Model.BatchCreateNotesRequest do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 35.468085 | 173 | 0.746251 |
1c2d88403ff365780b0d1070680c2eecb877044e | 7,481 | ex | Elixir | lib/inline_svg.ex | boydm/inline_svg | ffbb1bbc50d9e7a02c59b0ad74665c8ce834b7bd | [
"Apache-2.0"
] | 8 | 2021-01-04T06:33:37.000Z | 2021-08-06T21:37:05.000Z | lib/inline_svg.ex | boydm/inline_svg | ffbb1bbc50d9e7a02c59b0ad74665c8ce834b7bd | [
"Apache-2.0"
] | 1 | 2021-01-21T06:01:01.000Z | 2021-01-21T06:01:01.000Z | lib/inline_svg.ex | boydm/inline_svg | ffbb1bbc50d9e7a02c59b0ad74665c8ce834b7bd | [
"Apache-2.0"
] | null | null | null | defmodule InlineSvg do
require Logger
@moduledoc """
Simple and fast in-line SVG library and renderer for web applications.
SVG files are images that are formatted as very simple, and usually small, text
files. It is faster, and recommended, that you directly include the svg data
in-line with your web pages instead of asking the browser to make additional
calls to servers before it can render your pages. This makes your pages load faster.
`inline_svg` renders your svg files as quickly as possible. To do this, it reads
the svg files at compile-time and provides runtime access through a term
stored in your beamfile.
If you use `nimble_publisher`, this should be a familiar concept.
To use `inline_svg`, you create a module in your project that wraps it, providing
a compile-time place to build the library and runtime access to it. It also happens
to make your template svg rendering code very simple.
You do __not__ need to store your svg files in the "assets/static" directory. Those files
are copied into your application via a file based mechanism, whereas `inline_svg` compiles
them in directly. I recommend simply using "assets/svg".
Each `*.svg` file must contain a single valid `<svg></svg>` tag set with data as appropriate.
Anything before the `<svg>` tag or after the `</svg>` is treated as comment and stripped
from the text during compilation.
## Example wrapper module
defmodule MyAppWeb.Svg do
# Build the library at compile time
@library InlineSvg.compile( "assets/svg" )
# Accesses the library at run time
defp library(), do: @library
# Render an svg from the library
def render( key, opts \\ [] ) do
InlineSvg.render( library(), key, opts )
end
end
To use the library, you would `alias MyAppWeb.Svg` in a controller, live_view or
your your main app module. This allows your template code to call Svg.render directly.
## Example use in a template
<%= Svg.render( "heroicons/user", class: "h-5 w-5 inline" ) %>
### Live reloading
If you are using Phoenix, you can enable live reloading by simply telling Phoenix to watch the svgs directory.
Open up "config/dev.exs", search for `live_reload:` and add this to the list of patterns:
```elixir
live_reload: [
patterns: [
...,
~r"assets/svg/*/.*(svg)$"
]
]
```
"""
defmodule Error do
@moduledoc false
defexception message: nil, svg: nil
end
#--------------------------------------------------------
@doc """
Compile a folder of `*.svg` files into a library you can render from.
The folder and it's subfolders will be traversed and all valid `*.svg` files will
be added to the library. Each svg will be added to the library with a key that is
relative path of the svg file, minus the .svg part. For example, if you compile
the folder "assets/svg" and it finds a file with the path "assets/svg/heroicons/calendar.svg",
then the key for that svg is `"heroicons/calendar"` in the library.
## Usage
The best way to use InlineSvg is to create a new module in your project that wraps
it, providing storage for the generated library term. This also allows you to customize
naming, rendering or compiling as required.
## Example
defmodule MyAppWeb.Svg do
# Build the library at compile time
@library InlineSvg.compile( "assets/svg" )
# Accesses the library at run time
defp library(), do: @library
# Render an svg from the library
def render( key, opts \\ [] ) do
InlineSvg.render( library(), key, opts )
end
end
Note that @library is accessed through a function. The library could become large,
so you want to wrap it with a function to ensure that it is only stored as a term
in your beam file once.
"""
@spec compile(map(), String.t()) :: map()
def compile( %{} = library \\ %{}, svg_root ) when is_bitstring(svg_root) do
svg_root
|> Kernel.<>( "/**/*.svg" )
|> Path.wildcard()
|> Enum.reduce( library, fn(path, acc) ->
with {:ok, key, svg} <- read_svg( path, svg_root ),
:ok <- unique_key( library, key, path ) do
Map.put( acc, key, svg <> "</svg>" )
else
{:file_error, err, path} ->
raise %Error{message: "SVG file #{inspect(path)} is invalid, err: #{err}", svg: path}
{:duplicate, key, path} ->
Logger.warn("SVG file: #{path} overwrites existing svg: #{key}")
end
end)
end
defp read_svg( path, root ) do
with {:ok, svg} <- File.read( path ),
true <- String.valid?(svg),
[_,svg] <- String.split(svg, "<svg"),
[svg,_] <- String.split(svg, "</svg>") do
{
:ok,
path # make the key
|> String.trim(root)
|> String.trim("/")
|> String.trim_trailing(".svg"),
svg
}
else
err -> {:file_error, err, path}
end
end
defp unique_key(library, key, path) do
case Map.fetch( library, key ) do
{:ok, _} -> {:duplicate, key, path}
_ -> :ok
end
end
#--------------------------------------------------------
@doc """
Renders an svg into a safe string that can be inserted directly into a Phoenix template.
The named svg must be in the provided library, which should be build using the compile function.
_Optional_: pass in a keyword list of attributes to insert into the svg tag. This can be
used to add `class="something"` tag attributes, phoenix directives such as `phx-click`, or
even alpine directives such as `@click="some action"`. Note that key names containing
the underscore character `"_"` will be converted to the hyphen `"-"` character.
You don't normally call `InlineSvg.render()` directly, except in your wrapper module. Instead,
you would `alias MyAppWeb.Svg` in a controller, live view or
your your main app module. This allows your template code to call Svg.render directly, which
is simple and looks nice.
The following examples all use an aliased `MyAppWeb.Svg`, which wraps `InlineSvg`.
## Example use in a template
<%= Svg.render( "heroicons/menu" ) %>
<%= Svg.render( "heroicons/user", class: "h-5 w-5 inline" ) %>
## Other examples
Without attributes:
Svg.render( "heroicons/menu" )
{:safe, "<svg xmlns= ... </svg>"}
With options:
Svg.render( "heroicons/menu", class: "h-5 w-5" )
{:safe, "<svg class=\"h-5 w-5\" xmlns= ... </svg>"}
Svg.render( "heroicons/menu", phx_click: "action" )
{:safe, "<svg phx-click=\"action\" xmlns= ... </svg>"}
Svg.render( "heroicons/menu", "@click": "alpine_action" )
{:safe, "<svg @click=\"alpine_action\" xmlns= ... </svg>"}
"""
@spec render(map(), String.t(), list()) ::String.t()
def render( %{} = library, key, attrs \\ [] ) do
case Map.fetch( library, key ) do
{:ok, svg} -> {:safe, "<svg" <> render_attrs(attrs) <> svg}
_ -> raise %Error{message: "SVG #{inspect(key)} not found", svg: key}
end
end
#--------------------------------------------------------
# transform an opts list into a string of tag options
defp render_attrs( attrs ), do: do_render_attrs( attrs, "" )
defp do_render_attrs( [], acc ), do: acc
defp do_render_attrs( [{key,value} | tail ], acc ) do
key = to_string(key) |> String.replace("_", "-")
do_render_attrs( tail, "#{acc} #{key}=#{inspect(value)}" )
end
end
| 36.140097 | 112 | 0.633872 |
1c2de8c1b65bc685b3f4fa09fc0aac17f7a96ebc | 2,948 | ex | Elixir | apps/proxy/lib/proxy.ex | nicolasbaer/thirsty_throttler | 04d68f94d97fa2a661e6c00898b3b140cdba9b4a | [
"Apache-2.0"
] | null | null | null | apps/proxy/lib/proxy.ex | nicolasbaer/thirsty_throttler | 04d68f94d97fa2a661e6c00898b3b140cdba9b4a | [
"Apache-2.0"
] | null | null | null | apps/proxy/lib/proxy.ex | nicolasbaer/thirsty_throttler | 04d68f94d97fa2a661e6c00898b3b140cdba9b4a | [
"Apache-2.0"
] | null | null | null | defmodule Proxy do
@doc """
Start up a cowboy http server. The start_http method of cowboy takes
four arguments:
* The protocol of the server
* "NbAcceptors" - a non-negative-integer. This isn't further documented in
the cowboy docs. I used 100, from an Erlang example.
* TCP options for Ranch as a list of tuples. In this case the one one
we are using is :port, to set the server listening on port 8080.
You could also, for example, set ipv6, timeouts, and a number of other things here.
SEE ALSO: http://ninenines.eu/docs/en/ranch/1.2/manual/ranch_tcp/
* Protocol options for cowboy as a list of tuples. This can be a very big
structure because it includes you "middleware environment", which among
other things includes your entire routing table. Here that is the only option
we are specifying.
SEE ALSO: http://ninenines.eu/docs/en/cowboy/1.0/manual/cowboy_protocol/
SEE ALSO: http://ninenines.eu/docs/en/cowboy/1.0/guide/getting_started/
"""
def start(_type, _args) do
LruCache.start_link(:sessions, Proxy.ProxyHandler.cache_size)
dispatch_config = build_dispatch_config
{ :ok, _ } = :cowboy.start_http(:http,
100,
[{:port, 8080}],
[{ :env, [{:dispatch, dispatch_config}]}]
)
end
@doc """
The dispatch configuration specifies your routing table: how incoming
URLs are mapped to the Module and function that should get run for that
request. It's built with the `:cowboy_router.compile` function, which
takes a list of tuples. Each tuple specifies a hostname (or wildcard)
that can match, and the options -- including routes -- for that hostname.
Each route is a tuple of the form `{ PathMatch, Handler, Options}`.
Individual components of the configuration are documented in comments
line-by-line in the code below.
SEE ALSO: http://ninenines.eu/docs/en/cowboy/1.0/guide/routing/
"""
def build_dispatch_config do
# Compile takes as argument a list of tuples that represent hosts to
# match against.So, for example if your DNS routed two different
# hostnames to the same server, you could handle requests for those
# names with different sets of routes. See "Compilation" in:
# http://ninenines.eu/docs/en/cowboy/1.0/guide/routing/
:cowboy_router.compile([
# :_ causes a match on all hostnames. So, in this example we are treating
# all hostnames the same. You'll probably only be accessing this
# example with localhost:8080.
{ :_,
# The following list specifies all the routes for hosts matching the
# previous specification. The list takes the form of tuples, each one
# being { PathMatch, Handler, Options}
[
{"/[...]", Proxy.ProxyHandler, []}
]}
])
end
end
| 41.521127 | 89 | 0.663501 |
1c2e07875c33e4864ae68d869e52e975d7270363 | 257 | exs | Elixir | priv/repo/migrations/20160221045342_add_loglines_table.exs | avdi/loggex | 0a678148cbb5a9e4236cd633e7901f7014dee805 | [
"MIT"
] | 3 | 2016-03-12T09:40:58.000Z | 2020-05-25T13:39:31.000Z | priv/repo/migrations/20160221045342_add_loglines_table.exs | avdi/loggex | 0a678148cbb5a9e4236cd633e7901f7014dee805 | [
"MIT"
] | 1 | 2020-05-22T20:39:57.000Z | 2020-05-22T20:39:57.000Z | priv/repo/migrations/20160221045342_add_loglines_table.exs | avdi/loggex | 0a678148cbb5a9e4236cd633e7901f7014dee805 | [
"MIT"
] | 2 | 2019-12-01T14:08:35.000Z | 2020-05-22T20:36:51.000Z | defmodule Loggex.Repo.Migrations.AddLoglinesTable do
use Ecto.Migration
def change do
create table(:loglines) do
add :sender, :string
add :sendtime, :datetime
add :responseCode, :integer
add :body, :string
end
end
end
| 19.769231 | 52 | 0.673152 |
1c2e07e76d381de2110d873f332c934f35679673 | 1,055 | exs | Elixir | test/oli_web/controllers/help_controller_test.exs | malav2110/oli-torus | 8af64e762a7c8a2058bd27a7ab8e96539ffc055f | [
"MIT"
] | 1 | 2022-03-17T20:35:47.000Z | 2022-03-17T20:35:47.000Z | test/oli_web/controllers/help_controller_test.exs | malav2110/oli-torus | 8af64e762a7c8a2058bd27a7ab8e96539ffc055f | [
"MIT"
] | 9 | 2021-11-02T16:52:09.000Z | 2022-03-25T15:14:01.000Z | test/oli_web/controllers/help_controller_test.exs | malav2110/oli-torus | 8af64e762a7c8a2058bd27a7ab8e96539ffc055f | [
"MIT"
] | null | null | null | defmodule OliWeb.HelpControllerTest do
use OliWeb.ConnCase
alias Oli.Test.MockHTTP
import Mox
describe "request_help" do
test "send help request", %{conn: conn} do
expect_recaptcha_http_post()
freshdesk_url = System.get_env("FRESHDESK_API_URL", "example.edu")
MockHTTP
|> expect(:post, fn ^freshdesk_url, _body, _headers, _opts ->
{:ok,
%HTTPoison.Response{
status_code: 200
}}
end)
conn =
post(
conn,
Routes.help_path(conn, :create),
help: %{
location: "https://localhost/project/philosophy",
cookies_enabled: "true",
full_name: "Help Me",
email: "help@example.edu",
subject: "help_login",
message: "Lorem Ipsum is simply dummy text of the printing and typesetting industry."
},
"g-recaptcha-response": "any"
)
assert keys = json_response(conn, 200)
assert Map.get(keys, "result") == "success"
end
end
end
| 25.119048 | 97 | 0.574408 |
1c2e7bf8307cf5e53dfc07806349446cb73cd3fd | 160 | ex | Elixir | lib/protobuf/error.ex | llxff/protobuf-elixir | 365b26833fce85f0f46759a7c50c8858bd3cc1c1 | [
"MIT"
] | 4 | 2018-03-19T13:34:35.000Z | 2018-07-11T19:20:34.000Z | lib/protobuf/error.ex | llxff/protobuf-elixir | 365b26833fce85f0f46759a7c50c8858bd3cc1c1 | [
"MIT"
] | 1 | 2019-07-08T09:57:25.000Z | 2019-07-08T09:57:25.000Z | lib/protobuf/error.ex | k10pr0f/exproto | 3ae86c1308527a723465a14236849c7ab9ba949c | [
"MIT"
] | 1 | 2019-05-27T11:46:01.000Z | 2019-05-27T11:46:01.000Z | defmodule Protobuf.DecodeError do
defexception message: "something wrong when decoding"
end
defmodule Protobuf.InvalidError do
defexception [:message]
end
| 20 | 55 | 0.81875 |
1c2e85c7dc2d9a8d36fc9c05139423c51c7d7557 | 1,630 | ex | Elixir | lib/tweet_grafana_img/grafana.ex | ringo156/tweet_grafana_img | 876e122a77f36e2b2e8d06c901b08b0cbaa51b3a | [
"MIT"
] | 1 | 2020-12-15T13:20:24.000Z | 2020-12-15T13:20:24.000Z | lib/tweet_grafana_img/grafana.ex | ringo156/tweet_grafana_img | 876e122a77f36e2b2e8d06c901b08b0cbaa51b3a | [
"MIT"
] | null | null | null | lib/tweet_grafana_img/grafana.ex | ringo156/tweet_grafana_img | 876e122a77f36e2b2e8d06c901b08b0cbaa51b3a | [
"MIT"
] | null | null | null | defmodule TweetGrafanaImg.Grafana do
use Application
@url ""
@token ""
# ToDoパネルの選択
# ToDo時間の間隔可変にする
def get_panel() do
headers = make_headers(@token)
# from = System.os_time - 86400 * 1000000000 |> div(1000000)
# params = make_params(from, div(System.os_time, 1000000))
params = System.os_time - 86400 * 1000000000
|> div(1000000)
|> make_params(System.os_time |> div(1000000))
options = make_options(8000)
request = make_request(:get, headers, options, params, @url)
HTTPoison.request(request)
end
def save_panel() do
{:ok, resp} = get_panel()
File.write!("./teset.png", resp.body)
end
def make_request(method, headers, options, params, url) do
%HTTPoison.Request{
method: method,
headers: headers,
options: options,
params: params,
url: url
}
end
def make_headers(token) do
["Authorization": "Bearer #{token}"]
end
# 86400 = 24 * 60 * 60
# params = make_params(System.os_time - 86400 * 1000000000, System.os_time)
def make_params(from, to) do
[
{~s|orgId|, ~s|1|},
{~s|from|, ~s|#{from}|},
{~s|to|, ~s|#{to}|},
{~s|panelId|, ~s|2|},
{~s|width|, ~s|1000|},
{~s|height|, ~s|500|},
{~s|tz|, ~s|Asia/Tokyo|},
]
end
def make_options(timeout) do
[hackney: [:insecure], recv_timeout: timeout]
end
end
# 処理の流れ(イメージ)
# url
# headers
# params
# options: [hackney: [:insecure], recv_timeout: 8000]
# request = %HTTPoison.Request{method: :get, headers: headers, options: options, params: params, url: url}
# {:ok, resp} = HTTPoison.request(request)
| 23.285714 | 106 | 0.614724 |
1c2ebc967d22acc7991e901b1eb62036b993ccda | 806 | ex | Elixir | examples/phx_server/lib/phx_server_web/views/error_helpers.ex | hrzndhrn/json_rpc | c9a1d70b92abbf59260be6394bb5ecc41229a599 | [
"MIT"
] | null | null | null | examples/phx_server/lib/phx_server_web/views/error_helpers.ex | hrzndhrn/json_rpc | c9a1d70b92abbf59260be6394bb5ecc41229a599 | [
"MIT"
] | null | null | null | examples/phx_server/lib/phx_server_web/views/error_helpers.ex | hrzndhrn/json_rpc | c9a1d70b92abbf59260be6394bb5ecc41229a599 | [
"MIT"
] | null | null | null | defmodule PhxServerWeb.ErrorHelpers do
@moduledoc """
Conveniences for translating and building error messages.
"""
use Phoenix.HTML
@doc """
Generates tag for inlined form input errors.
"""
def error_tag(form, field) do
Enum.map(Keyword.get_values(form.errors, field), fn error ->
content_tag(:span, translate_error(error),
class: "invalid-feedback",
phx_feedback_for: input_id(form, field)
)
end)
end
@doc """
Translates an error message.
"""
def translate_error({msg, opts}) do
# Because the error messages we show in our forms and APIs
# are defined inside Ecto, we need to translate them dynamically.
Enum.reduce(opts, msg, fn {key, value}, acc ->
String.replace(acc, "%{#{key}}", to_string(value))
end)
end
end
| 26 | 69 | 0.663772 |
1c2ecbb0e4b75cf64e769456800787795ff3bfbe | 2,551 | ex | Elixir | lib/web/router.ex | fabianherrera/roger_ui_revamp | 7541148d6c24a5d3209e4eedccfb6a43f2b38fdf | [
"MIT"
] | 1 | 2020-01-20T19:42:20.000Z | 2020-01-20T19:42:20.000Z | lib/web/router.ex | fabianherrera/roger_ui_revamp | 7541148d6c24a5d3209e4eedccfb6a43f2b38fdf | [
"MIT"
] | 2 | 2018-02-19T20:07:29.000Z | 2018-03-06T17:18:18.000Z | lib/web/router.ex | fabianherrera/roger_ui_revamp | 7541148d6c24a5d3209e4eedccfb6a43f2b38fdf | [
"MIT"
] | 2 | 2018-03-02T00:00:08.000Z | 2018-03-02T13:35:49.000Z | defmodule RogerUI.Web.RouterPlug do
@moduledoc """
Plug implementation to expose RogerUI API
This module contains a Plug Router extensión, Plug ships
with many plugs that you can add to the router plug pipeline,
allowing you to plug something before a route matches or before a route is dispatched to
Note Plug.Router compiles all routes into a single function and relies on the Erlang VM to optimize the underlying routes into
a tree lookup, instead of a linear lookup that would instead match route-per-route
Catch all match is recommended to be defined, otherwise routing fails with a function clause
error (as it would in any regular Elixir function)
Each route needs to return the connection as per the Plug specification
See Plug.Router docs for more information
"""
require Logger
require EEx
alias RogerUI.Web.RouterPlug.Router
alias Plug.Conn
def init(opts), do: opts
def call(conn, opts) do
ns = opts[:namespace] || ""
conn = Conn.assign(conn, :namespace, ns)
case ns do
"" ->
Router.call(conn, Router.init(opts))
_ ->
namespace(conn, opts, ns)
end
end
defp namespace(%Conn{path_info: [ns | path]} = conn, opts, ns) do
Router.call(%Conn{conn | path_info: path}, Router.init(opts))
end
defp namespace(conn, _opts, _ns), do: conn
defmodule Router do
@moduledoc """
Plug Router extension
"""
import Plug.Conn
alias RogerUI.Web.Helpers.Response
use Plug.Router
@roger_api Application.get_env(:roger_ui, :roger_api, RogerUI.RogerApi)
plug(
Plug.Static,
at: "/",
from: :roger_ui,
only: ~w(css js)
)
plug(:match)
plug(:dispatch)
forward("/api/jobs", to: RogerUI.Web.JobsPlug)
forward("/api/partitions", to: RogerUI.Web.PartitionsPlug)
forward("/api/queues", to: RogerUI.Web.QueuesPlug)
# {nodes: {:node_name_1 {partition_name_1: {queue_name_1: {...}}}}}}
get "/api/nodes" do
nodes =
@roger_api.partitions()
|> Enum.into(%{})
Response.json(conn, %{nodes: nodes})
end
index_path = Path.join([Application.app_dir(:roger_ui), "priv/static/index.html"])
EEx.function_from_file(:defp, :render_index, index_path, [:assigns])
match _ do
base =
case conn.assigns[:namespace] do
"" -> ""
namespace -> "#{namespace}"
end
conn
|> put_resp_header("content-type", "text/html")
|> send_resp(200, render_index(base: base))
|> halt()
end
end
end
| 26.852632 | 128 | 0.656213 |
1c2eccadc2e08c9e85a3345727b884cedb04bb1f | 1,252 | exs | Elixir | mix.exs | wsmoak/chargifyex | fae205d6ed7f227ac5eda451124d50c311447440 | [
"MIT"
] | 1 | 2015-07-16T14:17:17.000Z | 2015-07-16T14:17:17.000Z | mix.exs | wsmoak/chargify-elixir | fae205d6ed7f227ac5eda451124d50c311447440 | [
"MIT"
] | null | null | null | mix.exs | wsmoak/chargify-elixir | fae205d6ed7f227ac5eda451124d50c311447440 | [
"MIT"
] | null | null | null | defmodule Chargify.Mixfile do
use Mix.Project
def project do
[app: :chargify,
version: "0.0.1",
elixir: "~> 1.0",
description: description,
package: package,
deps: deps]
end
# Configuration for the OTP application
#
# Type `mix help compile.app` for more information
def application do
[
mod: { Chargify, [] },
applications: [:httpoison, :exjsx, :logger]
]
end
# Dependencies can be Hex packages:
#
# {:mydep, "~> 0.3.0"}
#
# Or git/path repositories:
#
# {:mydep, git: "https://github.com/elixir-lang/mydep.git", tag: "0.1.0"}
#
# Type `mix help deps` for more examples and options
defp deps do
[
{ :httpoison, "~> 0.6" },
{ :exjsx, "~> 3.1.0", app: false },
{ :ex_doc, "~> 0.6.1", only: :docs },
{ :earmark, "~> 0.1.12", only: :docs }
]
end
defp description do
"""
A Chargify API wrapper for Elixir
Requires an active account with Chargify (https://chargify.com).
"""
end
defp package do
[ files: [ "lib", "mix.exs", "README.md", "LICENSE" ],
contributors: [ "Wendy Smoak" ],
licenses: [ "MIT" ],
links: %{ "GitHub" => "https://github.com/wsmoak/chargify-elixir" } ]
end
end
| 23.185185 | 77 | 0.563099 |
1c2ecd9689a23570ce3d326cfaba7a01b407294c | 979 | exs | Elixir | mix.exs | zven21/turbo_html | 462a41d57d84a96e9e86dfa31c3a12040a02e712 | [
"MIT"
] | 4 | 2019-01-23T14:10:31.000Z | 2019-05-14T15:41:11.000Z | mix.exs | zven21/turbo_html | 462a41d57d84a96e9e86dfa31c3a12040a02e712 | [
"MIT"
] | 35 | 2019-02-19T02:11:33.000Z | 2021-09-01T07:09:42.000Z | mix.exs | zven21/turbo_html | 462a41d57d84a96e9e86dfa31c3a12040a02e712 | [
"MIT"
] | 4 | 2019-11-03T16:11:39.000Z | 2022-03-05T14:34:23.000Z | defmodule Turbo.HTML.MixProject do
use Mix.Project
@version "0.3.0"
@github "https://github.com/zven21/turbo_html"
def project do
[
app: :turbo_html,
description: "A elixir lib for search, sort, paginate.",
version: @version,
elixir: "~> 1.6",
start_permanent: Mix.env() == :prod,
deps: deps(),
package: package()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:ex_doc, ">= 0.0.0", only: :dev},
{:credo, "~> 0.10.0", only: [:dev, :test], runtime: false},
{:phoenix_html, "~> 2.13"},
{:plug, "~> 1.7"},
{:gettext, ">= 0.0.0"}
]
end
defp package do
[
files: ["lib", "mix.exs", "README.md"],
maintainers: ["zven21"],
licenses: ["MIT"],
links: %{"GitHub" => @github}
]
end
end
| 21.282609 | 65 | 0.54239 |
1c2ed6a71f0a029f837d1a9c3fbd9f2ff72cf6be | 254 | exs | Elixir | test/doctests_test.exs | LegendsOfLearning/redix_pool | 2a7edabd0491567cdd8fadea6cc1085391efb95a | [
"MIT"
] | null | null | null | test/doctests_test.exs | LegendsOfLearning/redix_pool | 2a7edabd0491567cdd8fadea6cc1085391efb95a | [
"MIT"
] | null | null | null | test/doctests_test.exs | LegendsOfLearning/redix_pool | 2a7edabd0491567cdd8fadea6cc1085391efb95a | [
"MIT"
] | 1 | 2020-04-02T12:39:41.000Z | 2020-04-02T12:39:41.000Z | defmodule DoctestsTest do
# Doctests must run with a clean db and asynchronously
use ExUnit.Case
setup do
:ok = Application.ensure_started(:redix_pool)
RedixPool.command(:redix_default, ["FLUSHDB"])
:ok
end
doctest RedixPool
end
| 18.142857 | 56 | 0.724409 |
1c2ee92c217f0c5622d36ac526791385de36870f | 551 | ex | Elixir | lib/app/commands/outside.ex | johannesE/elixir-telegram-bot-boilerplate | af6ceb910183e1927d0c22f8d0f10805662326d9 | [
"MIT"
] | 15 | 2019-03-19T16:36:18.000Z | 2021-05-29T19:10:43.000Z | lib/app/commands/outside.ex | johannesE/elixir-telegram-bot-boilerplate | af6ceb910183e1927d0c22f8d0f10805662326d9 | [
"MIT"
] | 14 | 2018-04-11T02:55:52.000Z | 2020-03-07T17:00:22.000Z | lib/app/commands/outside.ex | johannesE/elixir-telegram-bot-boilerplate | af6ceb910183e1927d0c22f8d0f10805662326d9 | [
"MIT"
] | 4 | 2019-03-14T14:38:33.000Z | 2021-01-23T22:42:03.000Z | defmodule App.Commands.Outside do
# Notice that here we just `use` Commander. Router is only
# used to map commands to actions. It's best to keep routing
# only in App.Commands file. Commander gives us helpful
# macros to deal with Nadia functions.
use App.Commander
# Functions must have as first parameter a variable named
# update. Otherwise, macros (like `send_message`) will not
# work as expected.
def outside(update) do
Logger.log :info, "Command /outside"
send_message "This came from a separate module."
end
end
| 32.411765 | 62 | 0.731397 |
1c2ee9c7e4030d14e8da22178a30de5eb54625c9 | 300 | ex | Elixir | lib/tweet_bot/repo.ex | chenxsan/telegram-bot-for-twitter | 892107c7609123028ac2375342cd7b2329931635 | [
"MIT"
] | 14 | 2018-03-23T04:13:29.000Z | 2021-07-26T06:15:50.000Z | lib/tweet_bot/repo.ex | chenxsan/telegram-bot-for-twitter | 892107c7609123028ac2375342cd7b2329931635 | [
"MIT"
] | 2 | 2021-03-08T18:20:20.000Z | 2021-05-06T22:47:09.000Z | lib/tweet_bot/repo.ex | chenxsan/telegram-bot-for-twitter | 892107c7609123028ac2375342cd7b2329931635 | [
"MIT"
] | 3 | 2020-11-04T08:11:47.000Z | 2022-01-13T17:41:42.000Z | defmodule TweetBot.Repo do
use Ecto.Repo, otp_app: :tweet_bot, adapter: Ecto.Adapters.Postgres
@doc """
Dynamically loads the repository url from the
DATABASE_URL environment variable.
"""
def init(_, opts) do
{:ok, Keyword.put(opts, :url, System.get_env("DATABASE_URL"))}
end
end
| 25 | 69 | 0.713333 |
1c2ef3cfd52a943e1397b4fdcebf33bdc44f8c15 | 22,724 | ex | Elixir | lib/ex_unit/lib/ex_unit/assertions.ex | saydulk/elixir | 17a0278a28a23d533bc012b5a61852fdb7b2fcc3 | [
"Apache-2.0"
] | null | null | null | lib/ex_unit/lib/ex_unit/assertions.ex | saydulk/elixir | 17a0278a28a23d533bc012b5a61852fdb7b2fcc3 | [
"Apache-2.0"
] | null | null | null | lib/ex_unit/lib/ex_unit/assertions.ex | saydulk/elixir | 17a0278a28a23d533bc012b5a61852fdb7b2fcc3 | [
"Apache-2.0"
] | 8 | 2018-02-20T18:30:53.000Z | 2019-06-18T14:23:31.000Z | defmodule ExUnit.AssertionError do
@moduledoc """
Raised to signal an assertion error.
"""
@no_value :ex_unit_no_meaningful_value
defexception left: @no_value, right: @no_value, message: @no_value, expr: @no_value
@doc """
Indicates no meaningful value for a field.
"""
def no_value do
@no_value
end
def message(exception) do
"\n\n" <> ExUnit.Formatter.format_assertion_error(exception)
end
end
defmodule ExUnit.MultiError do
@moduledoc """
Raised to signal multiple errors happened in a test case.
"""
defexception errors: []
def message(%{errors: errors}) do
"got the following errors:\n\n" <>
Enum.map_join(errors, "\n\n", fn {kind, error, stack} ->
Exception.format_banner(kind, error, stack)
end)
end
end
defmodule ExUnit.Assertions do
@moduledoc """
This module contains a set of assertion functions that are
imported by default into your test cases.
In general, a developer will want to use the general
`assert` macro in tests. This macro introspects your code
and provides good reporting whenever there is a failure.
For example, `assert some_fun() == 10` will fail (assuming
`some_fun()` returns `13`):
Comparison (using ==) failed in:
code: some_fun() == 10
left: 13
right: 10
This module also provides other convenience functions
like `assert_in_delta` and `assert_raise` to easily handle
other common cases such as checking a floating-point number
or handling exceptions.
"""
@doc """
Asserts its argument is a truthy value.
`assert` introspects the underlying expression and provides
good reporting whenever there is a failure. For example,
if the expression uses the comparison operator, the message
will show the values of the two sides. The assertion
assert 1 + 2 + 3 + 4 > 15
will fail with the message:
Assertion with > failed
code: 1 + 2 + 3 + 4 > 15
left: 10
right: 15
Similarly, if a match expression is given, it will report
any failure in terms of that match. Given
assert [1] = [2]
you'll see:
match (=) failed
code: [1] = [2]
right: [2]
Keep in mind that `assert` does not change its semantics
based on the expression. In other words, the expression
is still required to return a truthy value. For example,
the following will fail:
assert nil = some_function_that_returns_nil()
Even though the match works, `assert` still expects a truth
value. In such cases, simply use `Kernel.==/2` or
`Kernel.match?/2`.
"""
defmacro assert({:=, _, [left, right]} = assertion) do
code = escape_quoted(:assert, assertion)
left = expand_pattern(left, __CALLER__)
vars = collect_vars_from_pattern(left)
pins = collect_pins_from_pattern(left, __CALLER__.vars)
# If the match works, we need to check if the value
# is not nil nor false. We need to rewrite the if
# to avoid silly warnings though.
return =
suppress_warning(
quote do
case right do
x when x in [nil, false] ->
raise ExUnit.AssertionError,
expr: expr,
message: "Expected truthy, got #{inspect(right)}"
_ ->
:ok
end
end
)
match_expr =
suppress_warning(
quote do
case right do
unquote(left) ->
unquote(return)
unquote(vars)
_ ->
raise ExUnit.AssertionError,
right: right,
expr: expr,
message: "match (=) failed" <> ExUnit.Assertions.__pins__(unquote(pins))
end
end
)
quote do
right = unquote(right)
expr = unquote(code)
unquote(vars) = unquote(match_expr)
right
end
end
defmacro assert({:match?, meta, [left, right]} = assertion) do
code = escape_quoted(:assert, assertion)
match? = {:match?, meta, [left, Macro.var(:right, __MODULE__)]}
pins = collect_pins_from_pattern(left, __CALLER__.vars)
quote do
right = unquote(right)
assert unquote(match?),
right: right,
expr: unquote(code),
message: "match (match?) failed" <> ExUnit.Assertions.__pins__(unquote(pins))
end
end
defmacro assert(assertion) do
case translate_assertion(:assert, assertion, __CALLER__) do
nil ->
quote do
value = unquote(assertion)
unless value do
raise ExUnit.AssertionError,
expr: unquote(escape_quoted(:assert, assertion)),
message: "Expected truthy, got #{inspect(value)}"
end
value
end
value ->
value
end
end
@doc """
A negative assertion, expects the expression to be `false` or `nil`.
Keep in mind that `refute` does not change the semantics of
the given expression. In other words, the following will fail:
refute {:ok, _} = some_function_that_returns_error_tuple()
The code above will fail because the `=` operator always fails
when the sides do not match and `refute/2` does not change it.
The correct way to write the refutation above is to use
`Kernel.match?/2`:
refute match? {:ok, _}, some_function_that_returns_error_tuple()
## Examples
refute age < 0
"""
defmacro refute({:match?, meta, [left, right]} = assertion) do
code = escape_quoted(:refute, assertion)
match? = {:match?, meta, [left, Macro.var(:right, __MODULE__)]}
pins = collect_pins_from_pattern(left, __CALLER__.vars)
quote do
right = unquote(right)
refute unquote(match?),
right: right,
expr: unquote(code),
message:
"match (match?) succeeded, but should have failed" <>
ExUnit.Assertions.__pins__(unquote(pins))
end
end
defmacro refute(assertion) do
case translate_assertion(:refute, assertion, __CALLER__) do
nil ->
quote do
value = unquote(assertion)
if value do
raise ExUnit.AssertionError,
expr: unquote(escape_quoted(:refute, assertion)),
message: "Expected false or nil, got #{inspect(value)}"
end
value
end
value ->
{:!, [], [value]}
end
end
## START HELPERS
@operator [:==, :<, :>, :<=, :>=, :===, :=~, :!==, :!=, :in]
defp translate_assertion(:assert, {operator, meta, [_, _]} = expr, caller)
when operator in @operator do
left = Macro.var(:left, __MODULE__)
right = Macro.var(:right, __MODULE__)
call = {operator, meta, [left, right]}
equality_check? = operator in [:<, :>, :!==, :!=]
message = "Assertion with #{operator} failed"
translate_assertion(:assert, expr, call, message, equality_check?, caller)
end
defp translate_assertion(:refute, {operator, meta, [_, _]} = expr, caller)
when operator in @operator do
left = Macro.var(:left, __MODULE__)
right = Macro.var(:right, __MODULE__)
call = {:not, meta, [{operator, meta, [left, right]}]}
equality_check? = operator in [:<=, :>=, :===, :==, :=~]
message = "Refute with #{operator} failed"
translate_assertion(:refute, expr, call, message, equality_check?, caller)
end
defp translate_assertion(_kind, _expected, _caller) do
nil
end
defp translate_assertion(kind, {_, _, [left, right]} = expr, call, message, true, _caller) do
expr = escape_quoted(kind, expr)
quote do
left = unquote(left)
right = unquote(right)
if ExUnit.Assertions.__equal__?(left, right) do
assert false,
left: left,
expr: unquote(expr),
message: unquote(message <> ", both sides are exactly equal")
else
assert unquote(call),
left: left,
right: right,
expr: unquote(expr),
message: unquote(message)
end
end
end
defp translate_assertion(kind, {_, _, [left, right]} = expr, call, message, false, _caller) do
expr = escape_quoted(kind, expr)
quote do
left = unquote(left)
right = unquote(right)
assert unquote(call),
left: left,
right: right,
expr: unquote(expr),
message: unquote(message)
end
end
@doc false
def __equal__?(left, right) do
left === right
end
defp escape_quoted(kind, expr) do
Macro.escape({kind, [], [expr]})
end
## END HELPERS
@doc """
Asserts `value` is `true`, displaying the given `message` otherwise.
## Examples
assert false, "it will never be true"
"""
def assert(value, message) when is_binary(message) do
assert(value, message: message)
end
def assert(value, opts) when is_list(opts) do
unless value, do: raise(ExUnit.AssertionError, opts)
true
end
@doc """
Asserts that a message matching `pattern` was or is going to be received
within the `timeout` period, specified in milliseconds.
Unlike `assert_received`, it has a default `timeout`
of 100 milliseconds.
The `pattern` argument must be a match pattern. Flunks with `failure_message`
if a message matching `pattern` is not received.
## Examples
assert_receive :hello
Asserts against a larger timeout:
assert_receive :hello, 20_000
You can also match against specific patterns:
assert_receive {:hello, _}
x = 5
assert_receive {:count, ^x}
"""
defmacro assert_receive(
pattern,
timeout \\ Application.fetch_env!(:ex_unit, :assert_receive_timeout),
failure_message \\ nil
) do
assert_receive(pattern, timeout, failure_message, __CALLER__)
end
@doc """
Asserts that a message matching `pattern` was received and is in the
current process' mailbox.
The `pattern` argument must be a match pattern. Flunks with `failure_message`
if a message matching `pattern` was not received.
Timeout is set to 0, so there is no waiting time.
## Examples
send self(), :hello
assert_received :hello
send self(), :bye
assert_received :hello, "Oh No!"
** (ExUnit.AssertionError) Oh No!
Process mailbox:
:bye
You can also match against specific patterns:
send self(), {:hello, "world"}
assert_received {:hello, _}
"""
defmacro assert_received(pattern, failure_message \\ nil) do
assert_receive(pattern, 0, failure_message, __CALLER__)
end
defp assert_receive(pattern, timeout, failure_message, caller) do
binary = Macro.to_string(pattern)
# Expand before extracting metadata
caller = Macro.Env.to_match(caller)
expanded = expand_pattern(pattern, caller)
vars = collect_vars_from_pattern(expanded)
pins = collect_pins_from_pattern(expanded, caller.vars)
pattern =
case pattern do
{:when, meta, [left, right]} ->
{:when, meta, [quote(do: unquote(left) = received), right]}
left ->
quote(do: unquote(left) = received)
end
quoted_pattern =
quote do
case message do
unquote(pattern) ->
_ = unquote(vars)
true
_ ->
false
end
end
pattern_finder =
quote do
fn message ->
unquote(suppress_warning(quoted_pattern))
end
end
failure_message_hit =
failure_message ||
quote do
"""
Found message matching #{unquote(binary)} after #{timeout}ms.
This means the message was delivered too close to the timeout value, you may want to either:
1. Give an increased timeout to `assert_receive/2`
2. Increase the default timeout to all `assert_receive` in your
test_helper.exs by setting ExUnit.configure(assert_receive_timeout: ...)
"""
end
failure_message_miss =
failure_message ||
quote do
"No message matching #{unquote(binary)} after #{timeout}ms."
end
quote do
timeout = unquote(timeout)
{received, unquote(vars)} =
receive do
unquote(pattern) ->
{received, unquote(vars)}
after
timeout ->
{:messages, messages} = Process.info(self(), :messages)
if Enum.any?(messages, unquote(pattern_finder)) do
flunk(unquote(failure_message_hit))
else
flunk(
unquote(failure_message_miss) <>
ExUnit.Assertions.__pins__(unquote(pins)) <>
ExUnit.Assertions.__mailbox__(messages)
)
end
end
received
end
end
@indent "\n "
@max_mailbox_length 10
@doc false
def __mailbox__(messages) do
length = length(messages)
mailbox =
messages
|> Enum.take(@max_mailbox_length)
|> Enum.map_join(@indent, &inspect/1)
mailbox_message(length, @indent <> mailbox)
end
@doc false
def __pins__([]), do: ""
def __pins__(pins) do
content =
pins
|> Enum.reverse()
|> Enum.map_join(@indent, fn {name, var} -> "#{name} = #{inspect(var)}" end)
"\nThe following variables were pinned:" <> @indent <> content
end
defp mailbox_message(0, _mailbox), do: "\nThe process mailbox is empty."
defp mailbox_message(length, mailbox) when length > 10 do
"\nProcess mailbox:" <>
mailbox <> "\nShowing only #{@max_mailbox_length} of #{length} messages."
end
defp mailbox_message(_length, mailbox) do
"\nProcess mailbox:" <> mailbox
end
defp collect_pins_from_pattern(expr, vars) do
{_, pins} =
Macro.prewalk(expr, [], fn
{:^, _, [{name, _, nil} = var]}, acc ->
if {name, nil} in vars do
{:ok, [{name, var} | acc]}
else
{:ok, acc}
end
form, acc ->
{form, acc}
end)
Enum.uniq_by(pins, &elem(&1, 0))
end
defp collect_vars_from_pattern({:when, _, [left, right]}) do
pattern = collect_vars_from_pattern(left)
for {name, _, context} = var <- collect_vars_from_pattern(right),
Enum.any?(pattern, &match?({^name, _, ^context}, &1)),
into: pattern,
do: var
end
defp collect_vars_from_pattern(expr) do
Macro.prewalk(expr, [], fn
{:::, _, [left, _]}, acc ->
{[left], acc}
{skip, _, [_]}, acc when skip in [:^, :@] ->
{:ok, acc}
{:_, _, context}, acc when is_atom(context) ->
{:ok, acc}
{name, meta, context}, acc when is_atom(name) and is_atom(context) ->
{:ok, [{name, [generated: true] ++ meta, context} | acc]}
node, acc ->
{node, acc}
end)
|> elem(1)
end
defp expand_pattern(expr, caller) do
Macro.prewalk(expr, fn
{var, _, context} = node when is_atom(var) and is_atom(context) ->
node
other ->
Macro.expand(other, caller)
end)
end
defp suppress_warning({name, meta, [expr, [do: clauses]]}) do
clauses =
Enum.map(clauses, fn {:->, meta, args} ->
{:->, [generated: true] ++ meta, args}
end)
{name, meta, [expr, [do: clauses]]}
end
@doc """
Asserts the `exception` is raised during `function` execution with
the expected `message`, which can be a `Regex` or an exact `String`.
Returns the rescued exception, fails otherwise.
## Examples
assert_raise ArithmeticError, "bad argument in arithmetic expression", fn ->
1 + "test"
end
assert_raise RuntimeError, ~r/^today's lucky number is 0\.\d+!$/, fn ->
raise "today's lucky number is #{:rand.uniform()}!"
end
"""
def assert_raise(exception, message, function) when is_function(function) do
error = assert_raise(exception, function)
match? =
cond do
is_binary(message) -> Exception.message(error) == message
Regex.regex?(message) -> Exception.message(error) =~ message
end
message =
"Wrong message for #{inspect(exception)}\n" <>
"expected:\n #{inspect(message)}\n" <>
"actual:\n" <> " #{inspect(Exception.message(error))}"
assert match?, message: message
error
end
@doc """
Asserts the `exception` is raised during `function` execution.
Returns the rescued exception, fails otherwise.
## Examples
assert_raise ArithmeticError, fn ->
1 + "test"
end
"""
def assert_raise(exception, function) when is_function(function) do
try do
function.()
rescue
error ->
stacktrace = System.stacktrace()
name = error.__struct__
cond do
name == exception ->
check_error_message(name, error)
error
name == ExUnit.AssertionError ->
reraise(error, stacktrace)
true ->
message =
"Expected exception #{inspect(exception)} " <>
"but got #{inspect(name)} (#{Exception.message(error)})"
reraise ExUnit.AssertionError, [message: message], stacktrace
end
else
_ -> flunk("Expected exception #{inspect(exception)} but nothing was raised")
end
end
defp check_error_message(module, error) do
module.message(error)
catch
kind, reason ->
stacktrace = System.stacktrace()
message =
"Got exception #{inspect(module)} but it failed to produce a message with:\n\n" <>
Exception.format(kind, reason, stacktrace)
flunk(message)
end
@doc """
Asserts that `value1` and `value2` differ by no more than `delta`.
This difference is inclusive, so the test will pass if the difference
and the `delta` are equal.
## Examples
assert_in_delta 1.1, 1.5, 0.2
assert_in_delta 10, 15, 2
assert_in_delta 10, 15, 5
"""
def assert_in_delta(value1, value2, delta, message \\ nil)
def assert_in_delta(_, _, delta, _) when delta < 0 do
raise ArgumentError, "delta must always be a positive number, got: #{inspect(delta)}"
end
def assert_in_delta(value1, value2, delta, message) do
diff = abs(value1 - value2)
message =
message ||
"Expected the difference between #{inspect(value1)} and " <>
"#{inspect(value2)} (#{inspect(diff)}) to be less than or equal to #{inspect(delta)}"
assert diff <= delta, message
end
@doc """
Asserts `expression` will throw a value.
Returns the thrown value or fails otherwise.
## Examples
assert catch_throw(throw 1) == 1
"""
defmacro catch_throw(expression) do
do_catch(:throw, expression)
end
@doc """
Asserts `expression` will exit.
Returns the exit status/message or fails otherwise.
## Examples
assert catch_exit(exit 1) == 1
"""
defmacro catch_exit(expression) do
do_catch(:exit, expression)
end
@doc """
Asserts `expression` will cause an error.
Returns the error or fails otherwise.
## Examples
assert catch_error(error 1) == 1
"""
defmacro catch_error(expression) do
do_catch(:error, expression)
end
defp do_catch(kind, expr) do
quote do
try do
_ = unquote(expr)
flunk("Expected to catch #{unquote(kind)}, got nothing")
rescue
e in [ExUnit.AssertionError] ->
reraise(e, System.stacktrace())
catch
unquote(kind), we_got -> we_got
end
end
end
@doc """
Asserts `value` is `nil` or `false` (that is, `value` is not truthy).
## Examples
refute true, "This will obviously fail"
"""
def refute(value, message) do
not assert(!value, message)
end
@doc """
Asserts that a message matching `pattern` was not received (and won't be received)
within the `timeout` period, specified in milliseconds.
The `pattern` argument must be a match pattern. Flunks with `failure_message`
if a message matching `pattern` is received.
## Examples
refute_receive :bye
Refute received with an explicit timeout:
refute_receive :bye, 1000
"""
defmacro refute_receive(
pattern,
timeout \\ Application.fetch_env!(:ex_unit, :refute_receive_timeout),
failure_message \\ nil
) do
do_refute_receive(pattern, timeout, failure_message)
end
@doc """
Asserts a message matching `pattern` was not received (i.e. it is not in the
current process' mailbox).
The `pattern` argument must be a match pattern. Flunks with `failure_message`
if a message matching `pattern` was received.
Timeout is set to 0, so there is no waiting time.
## Examples
send self(), :hello
refute_received :bye
send self(), :hello
refute_received :hello, "Oh No!"
** (ExUnit.AssertionError) Oh No!
Process mailbox:
:bye
"""
defmacro refute_received(pattern, failure_message \\ nil) do
do_refute_receive(pattern, 0, failure_message)
end
defp do_refute_receive(pattern, timeout, failure_message) do
receive_clause = refute_receive_clause(pattern, failure_message)
quote do
receive do
unquote(receive_clause)
after
unquote(timeout) -> false
end
end
end
defp refute_receive_clause(pattern, nil) do
binary = Macro.to_string(pattern)
quote do
unquote(pattern) = actual ->
flunk(
"Unexpectedly received message #{inspect(actual)} (which matched #{unquote(binary)})"
)
end
end
defp refute_receive_clause(pattern, failure_message) do
quote do
unquote(pattern) -> flunk(unquote(failure_message))
end
end
@doc """
Asserts `value1` and `value2` are not within `delta`.
This difference is exclusive, so the test will fail if the difference
and the delta are equal.
If you supply `message`, information about the values will
automatically be appended to it.
## Examples
refute_in_delta 1.1, 1.2, 0.2
refute_in_delta 10, 11, 2
"""
def refute_in_delta(value1, value2, delta, message \\ nil) do
diff = abs(value1 - value2)
message =
if message do
message <>
" (difference between #{inspect(value1)} " <>
"and #{inspect(value2)} is less than #{inspect(delta)})"
else
"Expected the difference between #{inspect(value1)} and " <>
"#{inspect(value2)} (#{inspect(diff)}) to be more than #{inspect(delta)}"
end
refute diff < delta, message
end
@doc """
Fails with a message.
## Examples
flunk "This should raise an error"
"""
@spec flunk :: no_return
@spec flunk(String.t()) :: no_return
def flunk(message \\ "Flunked!") when is_binary(message) do
assert false, message: message
end
end
| 25.59009 | 102 | 0.616397 |
1c2f2095732ead480b2dd0e276cf1e78c0a8022d | 446 | ex | Elixir | web/models/congregation.ex | melbystyle/jw_ministry_api | 2065d628a84e829b805c71b7e73fb0bee3d0dd44 | [
"Apache-2.0"
] | 1 | 2017-06-19T18:18:11.000Z | 2017-06-19T18:18:11.000Z | web/models/congregation.ex | melbystyle/jw_ministry_api | 2065d628a84e829b805c71b7e73fb0bee3d0dd44 | [
"Apache-2.0"
] | null | null | null | web/models/congregation.ex | melbystyle/jw_ministry_api | 2065d628a84e829b805c71b7e73fb0bee3d0dd44 | [
"Apache-2.0"
] | null | null | null | defmodule JwMinistryApi.Congregation do
use JwMinistryApi.Web, :model
schema "congregations" do
field :name, :string
belongs_to :coordinator, JwMinistryApi.Publisher
has_many :groups, JwMinistryApi.Group
timestamps()
end
@doc """
Builds a changeset based on the `struct` and `params`.
"""
def changeset(struct, params \\ %{}) do
struct
|> cast(params, [:name])
|> validate_required([:name])
end
end
| 21.238095 | 56 | 0.674888 |
1c2f287b24de6ce21becb40b2da45e7712272c89 | 8,788 | ex | Elixir | lib/telemetry_influx_db.ex | paranojik/telemetry_influxdb | aab1e7860b8cc8ce1a55a5c4f2b2496b323834ec | [
"MIT"
] | null | null | null | lib/telemetry_influx_db.ex | paranojik/telemetry_influxdb | aab1e7860b8cc8ce1a55a5c4f2b2496b323834ec | [
"MIT"
] | 3 | 2020-07-15T18:12:34.000Z | 2021-10-09T02:04:36.000Z | lib/telemetry_influx_db.ex | paranojik/telemetry_influxdb | aab1e7860b8cc8ce1a55a5c4f2b2496b323834ec | [
"MIT"
] | 4 | 2020-01-27T05:42:17.000Z | 2022-03-25T10:32:44.000Z | defmodule TelemetryInfluxDB do
alias TelemetryInfluxDB.BatchReporter
alias TelemetryInfluxDB.BatchHandler
alias TelemetryInfluxDB.EventHandler
alias TelemetryInfluxDB.HTTP
alias TelemetryInfluxDB.UDP
require Logger
@moduledoc """
`Telemetry` reporter for InfluxDB compatible events.
To use it, start the reporter with the `start_link/1` function, providing it a list of
`Telemetry` event names:
```elixir
TelemetryMetricsInfluxDB.start_link(
events: [
%{name: [:memory, :usage], metadata_tag_keys: [:host, :ip_address]},
%{name: [:http, :request]},
]
)
```
> Note that in the real project the reporter should be started under a supervisor, e.g. the main
> supervisor of your application.
By default, the reporter sends events through UDP to localhost:8089.
Note that the reporter doesn't aggregate events in-process - it sends updates to InfluxDB
whenever a relevant Telemetry event is emitted.
#### Configuration
Possible options for the reporter:
Options for any InfluxDB version:
* `:version` - :v1 or :v2. The version of InfluxDB to use; defaults to :v1 if not provided
* `:reporter_name` - unique name for the reporter. The purpose is to distinguish between different reporters running in the system.
* `:batch_size` - maximum number of events to send to InfluxDB in a single batch (default 1: no batching)
One can run separate independent InfluxDB reporters, with different configurations and goals.
* `:protocol` - :udp or :http. Which protocol to use for connecting to InfluxDB. Default option is :udp. InfluxDB v2 only supports :http for now.
* `:host` - host, where InfluxDB is running.
* `:port` - port, where InfluxDB is running.
* `:events` - list of `Telemetry` events' names that we want to send to InfluxDB.
Each event should be specified by the map with the field `name`, e.g. %{name: [:sample, :event, :name]}.
Event names should be compatible with `Telemetry` events' format.
It is also possible to specify an optional list of metadata keys that will be included in the event body and sent to InfluxDB as tags.
The list of metadata keys should be specified in the event data with the field `metadata_tag_keys`, e.g. %{name: [:sample, :event, :name], metadata_tag_keys: [:sample_meta, sample_meta2]}
* `:tags` - list of global static tags, that will be attached to each reported event. The format is a map,
where the key and the value are tag's name and value, respectively.
Both the tag's name and the value could be atoms or binaries.
V1 Only Options
* `:db` - name of the location where time series data is stored in InfluxDB v1
* `:username` - username of InfluxDB's user that has writes privileges. Only required in v1.
* `:password` - password for the user. Only required in v1.
V2 Only Options
* `:bucket` - name of the location where time series data is stored in InfluxDB v2
* `:org` - workspace in InfluxDB v2 where a bucket belongs
* `:token` - InfluxDB v2 authentication token used for authenticating requests. Must have write privileges to the bucket and org specified.
#### Notes
For the HTTP protocol, [worker_pool](https://github.com/inaka/worker_pool) is used for sending requests asynchronously.
Therefore the HTTP requests are sent in the context of the separate workers' pool, which does not block the client's application
(it is not sent in the critical path of the client's process).
The events are sent straightaway without any batching techniques.
On the other hand, UDP packets are sent in the context of the processes that execute the events.
However, the lightweight nature of UDP should not cause any bottlenecks in such a solution.
Once the reporter is started, it is attached to specified `Telemetry` events.
The events are detached when the reporter is shutdown.
"""
@default_port 8089
@type option ::
{:port, :inet.port_number()}
| {:host, String.t()}
| {:protocol, atom()}
| {:reporter_name, binary()}
| {:batch_size, non_neg_integer()}
| {:version, atom()}
| {:db, String.t()}
| {:org, String.t()}
| {:bucket, String.t()}
| {:username, String.t()}
| {:password, String.t()}
| {:token, String.t()}
| {:events, [event]}
| {:tags, tags}
| {:worker_pool_size, non_neg_integer()}
@type options :: [option]
@type event :: %{required(:name) => :telemetry.event_name()}
@type tags :: map()
@type event_spec() :: map()
@type event_name() :: [atom()]
@type event_measurements :: map()
@type event_metadata :: map()
@type config :: map()
@type handler_id() :: term()
@spec start_link(options) :: GenServer.on_start()
def start_link(options) do
config =
options
|> Enum.into(%{})
|> Map.put_new(:reporter_name, "default")
|> Map.put_new(:batch_size, 1)
|> Map.put_new(:protocol, :udp)
|> Map.put_new(:host, "localhost")
|> Map.put_new(:port, @default_port)
|> Map.put_new(:tags, %{})
|> Map.put_new(:worker_pool_size, 3)
|> Map.put_new(:version, :v1)
|> validate_required!([:events])
|> validate_event_fields!()
|> validate_protocol!()
|> validate_version_params!()
|> add_publisher()
create_ets(config.reporter_name)
specs = child_specs(config.protocol, config)
Supervisor.start_link(specs, strategy: :one_for_all)
end
defp add_publisher(%{protocol: :http} = config) do
Map.put(config, :publisher, HTTP.Publisher)
end
defp add_publisher(%{protocol: :udp} = config) do
Map.put(config, :publisher, UDP.Publisher)
end
defp create_ets(prefix) do
try do
:ets.new(table_name(prefix), [:set, :public, :named_table])
rescue
_ ->
:ok
end
end
defp table_name(prefix) do
:erlang.binary_to_atom(prefix <> "_influx_reporter", :utf8)
end
def stop(pid) do
Supervisor.stop(pid)
end
defp child_specs(protocol, config) do
publisher_child_specs(protocol, config) ++ common_child_specs(config)
end
defp publisher_child_specs(:http, config), do: [HTTP.Pool.child_spec(config)]
defp publisher_child_specs(:udp, config),
do: [%{id: UDP.Connector, start: {UDP.Connector, :start_link, [config]}}]
defp common_child_specs(config) do
[
%{id: EventHandler, start: {EventHandler, :start_link, [config]}},
%{id: BatchReporter, start: {BatchReporter, :start_link, [batch_reporter_options(config)]}}
]
end
def batch_reporter_options(config) do
[
name: BatchReporter.get_name(config),
batch_size: config.batch_size,
report_fn: &BatchHandler.handle_batch/1
]
end
defp validate_protocol!(%{protocol: :udp} = opts), do: opts
defp validate_protocol!(%{protocol: :http} = opts), do: opts
defp validate_protocol!(_) do
raise(ArgumentError, "protocol has to be :udp or :http")
end
defp validate_version_params!(%{version: :v2} = opts), do: validate_v2_params!(opts)
defp validate_version_params!(%{version: :v1} = opts), do: validate_v1_params!(opts)
defp validate_version_params!(_opts) do
raise(
ArgumentError,
"version must be :v1 or :v2"
)
end
defp validate_v2_params!(%{protocol: :http, org: _org, bucket: _bucket, token: _token} = opts),
do: opts
defp validate_v2_params!(%{protocol: :udp}) do
raise(
ArgumentError,
"the udp protocol is not currently supported for InfluxDB v2; please use http instead"
)
end
defp validate_v2_params!(_) do
raise(ArgumentError, "for InfluxDB v2 you need to specify :bucket, :org, and :token fields")
end
defp validate_v1_params!(%{protocol: :udp} = opts), do: opts
defp validate_v1_params!(%{protocol: :http, db: _db} = opts), do: opts
defp validate_v1_params!(_),
do: raise(ArgumentError, "for http protocol in v1 you need to specify :db field")
defp validate_event_fields!(%{events: []}) do
raise(ArgumentError, "you need to attach to at least one event")
end
defp validate_event_fields!(%{events: events} = opts) when is_list(events) do
Enum.map(events, &validate_required!(&1, :name))
opts
end
defp validate_event_fields!(%{events: _}) do
raise(ArgumentError, ":events needs to be list of events")
end
defp validate_required!(opts, fields) when is_list(fields) do
Enum.map(fields, &validate_required!(opts, &1))
opts
end
defp validate_required!(opts, field) do
case Map.has_key?(opts, field) do
true ->
opts
false ->
raise(ArgumentError, "#{inspect(field)} field needs to be specified")
end
end
end
| 36.31405 | 192 | 0.676149 |
1c2f2e80adba3af66e0fccda849da63f2c0ef639 | 2,595 | ex | Elixir | clients/dataproc/lib/google_api/dataproc/v1/model/software_config.ex | linjunpop/elixir-google-api | 444cb2b2fb02726894535461a474beddd8b86db4 | [
"Apache-2.0"
] | null | null | null | clients/dataproc/lib/google_api/dataproc/v1/model/software_config.ex | linjunpop/elixir-google-api | 444cb2b2fb02726894535461a474beddd8b86db4 | [
"Apache-2.0"
] | null | null | null | clients/dataproc/lib/google_api/dataproc/v1/model/software_config.ex | linjunpop/elixir-google-api | 444cb2b2fb02726894535461a474beddd8b86db4 | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Dataproc.V1.Model.SoftwareConfig do
@moduledoc """
Specifies the selection and config of software inside the cluster.
## Attributes
- imageVersion (String.t): Optional. The version of software inside the cluster. It must be one of the supported Cloud Dataproc Versions, such as \"1.2\" (including a subminor version, such as \"1.2.29\"), or the \"preview\" version. If unspecified, it defaults to the latest Debian version. Defaults to: `null`.
- optionalComponents ([String.t]): The set of optional components to activate on the cluster. Defaults to: `null`.
- Enum - one of
- properties (%{optional(String.t) => String.t}): Optional. The properties to set on daemon config files.Property keys are specified in prefix:property format, for example core:hadoop.tmp.dir. The following are supported prefixes and their mappings: capacity-scheduler: capacity-scheduler.xml core: core-site.xml distcp: distcp-default.xml hdfs: hdfs-site.xml hive: hive-site.xml mapred: mapred-site.xml pig: pig.properties spark: spark-defaults.conf yarn: yarn-site.xmlFor more information, see Cluster properties. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:imageVersion => any(),
:optionalComponents => list(any()),
:properties => map()
}
field(:imageVersion)
field(:optionalComponents, type: :list)
field(:properties, type: :map)
end
defimpl Poison.Decoder, for: GoogleApi.Dataproc.V1.Model.SoftwareConfig do
def decode(value, options) do
GoogleApi.Dataproc.V1.Model.SoftwareConfig.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Dataproc.V1.Model.SoftwareConfig do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 47.181818 | 546 | 0.743738 |
1c2f3f1081f8fe2519b3271e70ca0b5a34b284f3 | 2,053 | exs | Elixir | apps/andi/test/integration/andi_web/live/user_live_view_test.exs | calebcarroll1/smartcitiesdata | b0f03496f6c592c82ba14aebf6c5996311cf3cd0 | [
"Apache-2.0"
] | 18 | 2020-11-13T15:38:24.000Z | 2021-05-26T00:40:08.000Z | apps/andi/test/integration/andi_web/live/user_live_view_test.exs | calebcarroll1/smartcitiesdata | b0f03496f6c592c82ba14aebf6c5996311cf3cd0 | [
"Apache-2.0"
] | 365 | 2020-09-21T12:31:40.000Z | 2021-09-25T14:54:21.000Z | apps/andi/test/integration/andi_web/live/user_live_view_test.exs | calebcarroll1/smartcitiesdata | b0f03496f6c592c82ba14aebf6c5996311cf3cd0 | [
"Apache-2.0"
] | 3 | 2020-10-06T16:17:49.000Z | 2021-09-03T17:11:41.000Z | defmodule AndiWeb.UserLiveViewTest do
use ExUnit.Case
use Andi.DataCase
use AndiWeb.Test.AuthConnCase.IntegrationCase
@moduletag shared_data_connection: true
import Placebo
import Phoenix.LiveViewTest
import SmartCity.TestHelper, only: [eventually: 1]
import FlokiHelpers,
only: [
get_texts: 2
]
alias SmartCity.TestDataGenerator, as: TDG
alias Andi.Schemas.User
@instance_name Andi.instance_name()
@url_path "/users"
describe "public user access" do
setup do
user_one_subject_id = UUID.uuid4()
{:ok, user} =
User.create_or_update(user_one_subject_id, %{
subject_id: user_one_subject_id,
email: "blahblahblah@blah.com"
})
[user: user]
end
test "public users cannot view or edit organizations", %{public_conn: conn, user: user} do
assert {:error,
{
:redirect,
%{
to: "/auth/auth0?prompt=login&error_message=Unauthorized"
}
}} = live(conn, @url_path)
end
end
describe "curator users access" do
setup do
user_one_subject_id = UUID.uuid4()
user_two_subject_id = UUID.uuid4()
{:ok, user1} =
User.create_or_update(user_one_subject_id, %{
subject_id: user_one_subject_id,
email: "blah@blah.com"
})
{:ok, user2} =
User.create_or_update(user_two_subject_id, %{
subject_id: user_two_subject_id,
email: "foo@foo.com"
})
[user1: user1, user2: user2]
end
test "curators can view all the users", %{curator_conn: conn} do
assert {:ok, view, html} = live(conn, @url_path)
end
test "all users are presented in the users table", %{curator_conn: conn, user1: user1, user2: user2} do
assert {:ok, view, html} = live(conn, @url_path)
users = get_texts(html, ".users-table__cell--email")
assert Enum.member?(users, "foo@foo.com")
assert Enum.member?(users, "blah@blah.com")
end
end
end
| 25.345679 | 107 | 0.618607 |
1c2f50047226252293c781cf72433b3c209bd408 | 925 | ex | Elixir | lib/nys_etl/commcare/index_case.ex | RatioPBC/epi-viaduct-nys | 99fb637785ea207aee5449fa01fa59dd18ec8bf2 | [
"MIT"
] | 2 | 2021-06-22T21:01:49.000Z | 2021-11-04T18:36:48.000Z | lib/nys_etl/commcare/index_case.ex | RatioPBC/epi-viaduct-nys | 99fb637785ea207aee5449fa01fa59dd18ec8bf2 | [
"MIT"
] | null | null | null | lib/nys_etl/commcare/index_case.ex | RatioPBC/epi-viaduct-nys | 99fb637785ea207aee5449fa01fa59dd18ec8bf2 | [
"MIT"
] | null | null | null | defmodule NYSETL.Commcare.IndexCase do
use NYSETL, :schema
alias NYSETL.Commcare
alias NYSETL.ECLRS
schema "index_cases" do
field :case_id, :string, read_after_writes: true
field :closed, :boolean, default: false
field :data, :map
field :tid, :string
belongs_to :county, ECLRS.County
belongs_to :person, Commcare.Person
has_many :lab_results, Commcare.LabResult
has_many :index_case_events, Commcare.IndexCaseEvent
has_many :events, through: [:index_case_events, :event]
timestamps()
end
def changeset(struct \\ %__MODULE__{}, attrs) do
struct
|> cast(attrs, __schema__(:fields) -- [:id])
|> cast_closed()
|> validate_required([:data])
|> unique_constraint(:case_id)
end
defp cast_closed(changeset) do
if changeset |> get_field(:closed) |> is_nil() do
put_change(changeset, :closed, false)
else
changeset
end
end
end
| 24.342105 | 59 | 0.684324 |
1c2f55adbddaff3c4ac43ff7375202abed486872 | 218 | ex | Elixir | lib/iex/lib/iex/app.ex | chulkilee/elixir | 699231dcad52916a76f38856cbd7cf7c7bdadc51 | [
"Apache-2.0"
] | 2 | 2018-11-15T06:38:14.000Z | 2018-11-17T18:03:14.000Z | lib/iex/lib/iex/app.ex | chulkilee/elixir | 699231dcad52916a76f38856cbd7cf7c7bdadc51 | [
"Apache-2.0"
] | 1 | 2018-09-10T23:36:45.000Z | 2018-09-10T23:36:45.000Z | lib/iex/lib/iex/app.ex | chulkilee/elixir | 699231dcad52916a76f38856cbd7cf7c7bdadc51 | [
"Apache-2.0"
] | 8 | 2018-02-20T18:30:53.000Z | 2019-06-18T14:23:31.000Z | defmodule IEx.App do
@moduledoc false
use Application
def start(_type, _args) do
children = [IEx.Config, IEx.Pry]
Supervisor.start_link(children, strategy: :one_for_one, name: IEx.Supervisor)
end
end
| 19.818182 | 81 | 0.724771 |
1c2f632005927b0a3d2f23e3ee3079fea7cf376b | 77 | ex | Elixir | backend/lib/functional_vote_web/views/layout_view.ex | maxrchung/FunctionalVote | 95c54c7614a74718e14c6fe74fd0bd4e84f85444 | [
"MIT"
] | 10 | 2020-03-13T12:56:06.000Z | 2021-06-28T22:13:27.000Z | backend/lib/functional_vote_web/views/layout_view.ex | maxrchung/FunctionalVote | 95c54c7614a74718e14c6fe74fd0bd4e84f85444 | [
"MIT"
] | 132 | 2020-02-08T02:01:03.000Z | 2022-02-18T20:38:38.000Z | backend/lib/functional_vote_web/views/layout_view.ex | maxrchung/FunctionalVote | 95c54c7614a74718e14c6fe74fd0bd4e84f85444 | [
"MIT"
] | 1 | 2021-03-17T06:22:55.000Z | 2021-03-17T06:22:55.000Z | defmodule FunctionalVoteWeb.LayoutView do
use FunctionalVoteWeb, :view
end
| 19.25 | 41 | 0.844156 |
1c2fa08cc605095c12eb26c455e723bbc7c0f258 | 491 | exs | Elixir | elixir/leap/leap.exs | jjdonov/Exercism | 3585420c5b0e74ea51a6fbd60e8263620061a1d9 | [
"MIT"
] | null | null | null | elixir/leap/leap.exs | jjdonov/Exercism | 3585420c5b0e74ea51a6fbd60e8263620061a1d9 | [
"MIT"
] | null | null | null | elixir/leap/leap.exs | jjdonov/Exercism | 3585420c5b0e74ea51a6fbd60e8263620061a1d9 | [
"MIT"
] | null | null | null | defmodule Year do
@doc """
Returns whether 'year' is a leap year.
A leap year occurs:
on every year that is evenly divisible by 4
except every year that is evenly divisible by 100
unless the year is also evenly divisible by 400
"""
@spec leap_year?(non_neg_integer) :: boolean
def leap_year?(year) when rem(year, 4) === 0 do
case rem(year, 100) do
0 ->
rem(year, 400) == 0
_ ->
true
end
end
def leap_year?(_), do: false
end
| 21.347826 | 53 | 0.621181 |
1c2fc48ca7f4fc2e513ba194329adf7cbb1ae92e | 4,807 | exs | Elixir | config/rig_tests/test.exs | prakriti07/reactive-interaction-gateway | c9d33064982b5ae12e9af7a300e90b3e7973d0b3 | [
"Apache-2.0"
] | null | null | null | config/rig_tests/test.exs | prakriti07/reactive-interaction-gateway | c9d33064982b5ae12e9af7a300e90b3e7973d0b3 | [
"Apache-2.0"
] | null | null | null | config/rig_tests/test.exs | prakriti07/reactive-interaction-gateway | c9d33064982b5ae12e9af7a300e90b3e7973d0b3 | [
"Apache-2.0"
] | 1 | 2020-07-17T05:17:32.000Z | 2020-07-17T05:17:32.000Z | use Mix.Config
config :rig, :systest_kafka_config, %{
consumer_topics: [],
ssl_enabled?: false
}
config :rig, RigTests.Proxy.ResponseFrom.KafkaTest,
server_id: :rig_proxy_responsefrom_kafkatest_genserver,
# The list of brokers, given by a comma-separated list of host:port items:
brokers: {:system, :list, "KAFKA_BROKERS", []},
serializer: {:system, "KAFKA_SERIALIZER", nil},
schema_registry_host: {:system, "KAFKA_SCHEMA_REGISTRY_HOST", nil},
# The list of topics to consume messages from:
consumer_topics: {:system, :list, "KAFKA_SOURCE_TOPICS", []},
# If KAFKA_SSL_ENABLED=0, the KAFKA_SSL_* settings are ignored; otherwise, they're required.
ssl_enabled?: {:system, :boolean, "KAFKA_SSL_ENABLED", false},
# If use_enabled?, the following paths are expected (relative to the `priv` directory):
ssl_ca_certfile: {:system, "KAFKA_SSL_CA_CERTFILE", "ca.crt.pem"},
ssl_certfile: {:system, "KAFKA_SSL_CERTFILE", "client.crt.pem"},
ssl_keyfile: {:system, "KAFKA_SSL_KEYFILE", "client.key.pem"},
# In case the private key is password protected:
ssl_keyfile_pass: {:system, "KAFKA_SSL_KEYFILE_PASS", ""},
# Credentials for SASL/Plain authentication. Example: "plain:myusername:mypassword"
sasl: {:system, "KAFKA_SASL", nil},
response_topic: "rig-proxy-response"
config :rig, RigTests.Proxy.PublishToEventStream.KafkaTest,
server_id: :rig_proxy_publish_kafkatest_genserver,
# The list of brokers, given by a comma-separated list of host:port items:
brokers: {:system, :list, "KAFKA_BROKERS", []},
serializer: {:system, "KAFKA_SERIALIZER", nil},
schema_registry_host: {:system, "KAFKA_SCHEMA_REGISTRY_HOST", nil},
# The list of topics to consume messages from:
consumer_topics: {:system, :list, "KAFKA_SOURCE_TOPICS", []},
# If KAFKA_SSL_ENABLED=0, the KAFKA_SSL_* settings are ignored; otherwise, they're required.
ssl_enabled?: {:system, :boolean, "KAFKA_SSL_ENABLED", false},
# If use_enabled?, the following paths are expected (relative to the `priv` directory):
ssl_ca_certfile: {:system, "KAFKA_SSL_CA_CERTFILE", "ca.crt.pem"},
ssl_certfile: {:system, "KAFKA_SSL_CERTFILE", "client.crt.pem"},
ssl_keyfile: {:system, "KAFKA_SSL_KEYFILE", "client.key.pem"},
# In case the private key is password protected:
ssl_keyfile_pass: {:system, "KAFKA_SSL_KEYFILE_PASS", ""},
# Credentials for SASL/Plain authentication. Example: "plain:myusername:mypassword"
sasl: {:system, "KAFKA_SASL", nil}
config :rig, RigTests.Avro.AvroTest,
server_id: :rig_avro_kafkatest_genserver,
# The list of brokers, given by a comma-separated list of host:port items:
brokers: {:system, :list, "KAFKA_BROKERS", []},
serializer: {:system, "KAFKA_SERIALIZER", nil},
schema_registry_host: {:system, "KAFKA_SCHEMA_REGISTRY_HOST", nil},
# The list of topics to consume messages from:
consumer_topics: {:system, :list, "KAFKA_SOURCE_TOPICS", []},
# If KAFKA_SSL_ENABLED=0, the KAFKA_SSL_* settings are ignored; otherwise, they're required.
ssl_enabled?: {:system, :boolean, "KAFKA_SSL_ENABLED", false},
# If use_enabled?, the following paths are expected (relative to the `priv` directory):
ssl_ca_certfile: {:system, "KAFKA_SSL_CA_CERTFILE", "ca.crt.pem"},
ssl_certfile: {:system, "KAFKA_SSL_CERTFILE", "client.crt.pem"},
ssl_keyfile: {:system, "KAFKA_SSL_KEYFILE", "client.key.pem"},
# In case the private key is password protected:
ssl_keyfile_pass: {:system, "KAFKA_SSL_KEYFILE_PASS", ""},
# Credentials for SASL/Plain authentication. Example: "plain:myusername:mypassword"
sasl: {:system, "KAFKA_SASL", nil}
config :rig, RigTests.Proxy.RequestLogger.KafkaTest,
server_id: :rig_proxy_logger_kafkatest_genserver,
# The list of brokers, given by a comma-separated list of host:port items:
brokers: {:system, :list, "KAFKA_BROKERS", []},
serializer: {:system, "KAFKA_SERIALIZER", nil},
schema_registry_host: {:system, "KAFKA_SCHEMA_REGISTRY_HOST", nil},
# The list of topics to consume messages from:
consumer_topics: ["rig-request-log"],
# If KAFKA_SSL_ENABLED=0, the KAFKA_SSL_* settings are ignored; otherwise, they're required.
ssl_enabled?: {:system, :boolean, "KAFKA_SSL_ENABLED", false},
# If use_enabled?, the following paths are expected (relative to the `priv` directory):
ssl_ca_certfile: {:system, "KAFKA_SSL_CA_CERTFILE", "ca.crt.pem"},
ssl_certfile: {:system, "KAFKA_SSL_CERTFILE", "client.crt.pem"},
ssl_keyfile: {:system, "KAFKA_SSL_KEYFILE", "client.key.pem"},
# In case the private key is password protected:
ssl_keyfile_pass: {:system, "KAFKA_SSL_KEYFILE_PASS", ""},
# Credentials for SASL/Plain authentication. Example: "plain:myusername:mypassword"
sasl: {:system, "KAFKA_SASL", nil}
config :rig, RigTests.Proxy.ResponseFrom.KinesisTest, response_topic: "rig-proxy-response"
| 55.895349 | 94 | 0.739546 |
1c2fe8b33ebb79916f2bcc76040201953ceacf2e | 1,919 | ex | Elixir | clients/compute/lib/google_api/compute/v1/model/url_rewrite.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-10-01T09:20:41.000Z | 2021-10-01T09:20:41.000Z | clients/compute/lib/google_api/compute/v1/model/url_rewrite.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/model/url_rewrite.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Compute.V1.Model.UrlRewrite do
@moduledoc """
The spec for modifying the path before sending the request to the matched backend service.
## Attributes
* `hostRewrite` (*type:* `String.t`, *default:* `nil`) - Prior to forwarding the request to the selected service, the request's host header is replaced with contents of hostRewrite. The value must be between 1 and 255 characters.
* `pathPrefixRewrite` (*type:* `String.t`, *default:* `nil`) - Prior to forwarding the request to the selected backend service, the matching portion of the request's path is replaced by pathPrefixRewrite. The value must be between 1 and 1024 characters.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:hostRewrite => String.t() | nil,
:pathPrefixRewrite => String.t() | nil
}
field(:hostRewrite)
field(:pathPrefixRewrite)
end
defimpl Poison.Decoder, for: GoogleApi.Compute.V1.Model.UrlRewrite do
def decode(value, options) do
GoogleApi.Compute.V1.Model.UrlRewrite.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Compute.V1.Model.UrlRewrite do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 38.38 | 257 | 0.739448 |
1c2fef131f15f365803620818952aca05aba1940 | 6,824 | exs | Elixir | clients/gax/test/gax/connection_test.exs | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/gax/test/gax/connection_test.exs | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/gax/test/gax/connection_test.exs | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule Gax.ConnectionTest do
use ExUnit.Case
doctest GoogleApi.Gax.Connection
alias GoogleApi.Gax.{Connection, Request}
test "builds a basic request with a query" do
request =
Request.new()
|> Request.add_param(:query, :foo, "bar")
|> Connection.build_request()
assert [foo: "bar"] == Keyword.get(request, :query)
end
test "builds a basic request with a url" do
request =
Request.new()
|> Request.url("/foo/bar")
|> Connection.build_request()
assert "/foo/bar" == Keyword.get(request, :url)
end
test "builds a basic request with a body" do
request =
Request.new()
|> Request.add_param(:body, :foo, "bar")
|> Request.add_param(:body, :asdf, "qwer")
|> Connection.build_request()
body = %Tesla.Multipart{} = Keyword.get(request, :body)
assert 2 == length(body.parts)
end
test "builds a request with a single file" do
request =
Request.new()
|> Request.add_param(:file, Path.basename(__ENV__.file), __ENV__.file)
|> Connection.build_request()
body = %Tesla.Multipart{} = Keyword.get(request, :body)
assert 1 == length(body.parts)
end
test "builds a request with multiple files" do
request =
Request.new()
|> Request.add_param(:file, Path.basename(__ENV__.file), __ENV__.file)
|> Request.add_param(:file, Path.basename(__ENV__.file), __ENV__.file)
|> Connection.build_request()
body = %Tesla.Multipart{} = Keyword.get(request, :body)
assert 2 == length(body.parts)
end
test "builds a request with body and files" do
request =
Request.new()
|> Request.add_param(:body, :foo, "bar")
|> Request.add_param(:body, :asdf, "qwer")
|> Request.add_param(:file, Path.basename(__ENV__.file), __ENV__.file)
|> Request.add_param(:file, Path.basename(__ENV__.file), __ENV__.file)
|> Connection.build_request()
body = %Tesla.Multipart{} = Keyword.get(request, :body)
assert 4 == length(body.parts)
end
test "builds a multipart upload request with iodata and content type" do
metadata = %{contentType: "text/plain"}
data = ["1", ["2"]]
request =
Request.new()
|> Request.add_param(:body, :metadata, metadata)
|> Request.add_param(:body, :data, data)
|> Connection.build_request()
body = %Tesla.Multipart{} = Keyword.get(request, :body)
[part1, part2] = body.parts
assert "{\"contentType\":\"text/plain\"}" == part1.body
assert [{:"Content-Type", "application/json"}] == part1.headers
assert data == part2.body
assert [{:"Content-Type", "text/plain"}] == part2.headers
end
test "builds a multipart upload request with iodata but no content type" do
metadata = %{foo: "bar"}
data = ["1", ["2"]]
request =
Request.new()
|> Request.add_param(:body, :metadata, metadata)
|> Request.add_param(:body, :data, data)
|> Connection.build_request()
body = %Tesla.Multipart{} = Keyword.get(request, :body)
[part1, part2] = body.parts
assert "{\"foo\":\"bar\"}" == part1.body
assert [{:"Content-Type", "application/json"}] == part1.headers
assert data == part2.body
assert [{:"Content-Type", "application/octet-stream"}] == part2.headers
end
test "builds a multipart upload request with a JSON decodable struct" do
metadata = %{foo: "bar"}
data = %{baz: "qux"}
request =
Request.new()
|> Request.add_param(:body, :metadata, metadata)
|> Request.add_param(:body, :data, data)
|> Connection.build_request()
body = %Tesla.Multipart{} = Keyword.get(request, :body)
[part1, part2] = body.parts
assert "{\"foo\":\"bar\"}" == part1.body
assert [{:"Content-Type", "application/json"}] == part1.headers
assert "{\"baz\":\"qux\"}" == part2.body
assert [{:"Content-Type", "application/json"}] == part2.headers
end
test "builds a multipart upload request with a non-JSON struct" do
metadata = %{foo: "bar"}
data = %{baz: {}}
assert_raise(Poison.EncodeError, fn ->
Request.new()
|> Request.add_param(:body, :metadata, metadata)
|> Request.add_param(:body, :data, data)
|> Connection.build_request()
end)
end
test "creates api client header without library version" do
request =
Request.new()
|> Connection.build_request()
elixir_version = System.version()
gax_version = Application.spec(:google_gax, :vsn)
assert [{"x-goog-api-client", "gl-elixir/#{elixir_version} gax/#{gax_version} gdcl/"}] ==
Keyword.get(request, :headers)
end
test "creates api client header with library version" do
request =
Request.new()
|> Request.library_version("1.2.3")
|> Connection.build_request()
elixir_version = System.version()
gax_version = Application.spec(:google_gax, :vsn)
assert [{"x-goog-api-client", "gl-elixir/#{elixir_version} gax/#{gax_version} gdcl/1.2.3"}] ==
Keyword.get(request, :headers)
end
test "Appends existing api client header" do
request =
Request.new()
|> Map.put(:header, [{"user-agent", "hello"}, {"x-goog-api-client", "whoops/3.2.1"}])
|> Connection.build_request()
elixir_version = System.version()
gax_version = Application.spec(:google_gax, :vsn)
assert [
{"x-goog-api-client",
"gl-elixir/#{elixir_version} gax/#{gax_version} gdcl/ whoops/3.2.1"},
{"user-agent", "hello"}
] == Keyword.get(request, :headers)
end
test "Appends multiple existing api client headers" do
request =
Request.new()
|> Map.put(:header, [
{"user-agent", "hello"},
{"X-Goog-Api-Client", "foo/4.3.2"},
{"x-goog-api-client", "whoops/3.2.1"}
])
|> Request.library_version("0.1.1")
|> Connection.build_request()
elixir_version = System.version()
gax_version = Application.spec(:google_gax, :vsn)
assert [
{"x-goog-api-client",
"gl-elixir/#{elixir_version} gax/#{gax_version} gdcl/0.1.1 foo/4.3.2 whoops/3.2.1"},
{"user-agent", "hello"}
] == Keyword.get(request, :headers)
end
end
| 32.807692 | 98 | 0.629396 |
1c305e814f12ff58de44407f398ca4c9db8ae0ad | 1,601 | exs | Elixir | test/football_api/protobuf/match_test.exs | afatsini/football_api | c17af14733fc7a649c38516045867c72ecab469f | [
"MIT"
] | null | null | null | test/football_api/protobuf/match_test.exs | afatsini/football_api | c17af14733fc7a649c38516045867c72ecab469f | [
"MIT"
] | null | null | null | test/football_api/protobuf/match_test.exs | afatsini/football_api | c17af14733fc7a649c38516045867c72ecab469f | [
"MIT"
] | null | null | null | defmodule FootballApi.Protobuf.MatchTest do
use ExUnit.Case, async: true
alias FootballApi.Protobuf.Match
alias FootballApi.Protobuf.Match.Match, as: ProtoMatch
alias FootballApi.Schemas.Match, as: MatchObject
setup do
decoded_protobuf = %MatchObject{
AwayTeam: "Mallorca",
Date: "22/08/15",
Div: "SP2",
FTAG: "0",
FTHG: "2",
FTR: "H",
HTAG: "0",
HTHG: "1",
HTR: "H",
HomeTeam: "Alcorcon",
Season: "201516",
id: "1"
}
encoded_protobuf =
decoded_protobuf |> Map.from_struct() |> ProtoMatch.new() |> ProtoMatch.encode()
%{
decoded_protobuf: decoded_protobuf,
encoded_protobuf: encoded_protobuf
}
end
describe "encode/1" do
test "encode map to protobuffer", %{
decoded_protobuf: decoded_protobuf,
encoded_protobuf: encoded_protobuf
} do
encoded = Match.encode(decoded_protobuf)
assert encoded == encoded_protobuf
end
test "encode list of maps to protobuffer", %{
decoded_protobuf: decoded_protobuf,
encoded_protobuf: encoded_protobuf
} do
list_of_maps = [decoded_protobuf, decoded_protobuf, decoded_protobuf]
encoded = Match.encode(list_of_maps)
Enum.each(encoded, fn result -> result == encoded_protobuf end)
end
end
describe "dencode/1" do
test "dencode map to protobuffer", %{
decoded_protobuf: decoded_protobuf,
encoded_protobuf: encoded_protobuf
} do
decoded_pb = Match.decode(encoded_protobuf)
assert decoded_protobuf == decoded_pb
end
end
end
| 24.630769 | 86 | 0.656465 |
1c30669bfbe50820e0515efa43072504193207bf | 2,263 | ex | Elixir | lib/absinthe/phase/schema/validation/no_circular_field_imports.ex | pulkit110/absinthe | fa2060307a401d0943bde72d08267602e4027889 | [
"MIT"
] | null | null | null | lib/absinthe/phase/schema/validation/no_circular_field_imports.ex | pulkit110/absinthe | fa2060307a401d0943bde72d08267602e4027889 | [
"MIT"
] | null | null | null | lib/absinthe/phase/schema/validation/no_circular_field_imports.ex | pulkit110/absinthe | fa2060307a401d0943bde72d08267602e4027889 | [
"MIT"
] | null | null | null | defmodule Absinthe.Phase.Schema.Validation.NoCircularFieldImports do
@moduledoc false
use Absinthe.Phase
alias Absinthe.Blueprint
alias Absinthe.Blueprint.Schema
def run(blueprint, _opts) do
blueprint = Blueprint.prewalk(blueprint, &validate_schema/1)
{:ok, blueprint}
end
def validate_schema(%Schema.SchemaDefinition{type_definitions: types} = schema) do
{:halt, %{schema | type_definitions: sort_and_validate_types(types)}}
end
def validate_schema(node), do: node
def sort_and_validate_types(types) do
graph = :digraph.new([:cyclic])
try do
_ = build_import_graph(types, graph)
{types, cycles?} =
Enum.reduce(types, {%{}, false}, fn type, {types, cycles?} ->
if cycle = :digraph.get_cycle(graph, type.identifier) do
type = type |> put_error(error(type, cycle))
{Map.put(types, type.identifier, type), true}
else
{Map.put(types, type.identifier, type), cycles?}
end
end)
if cycles? do
Map.values(types)
else
graph
|> :digraph_utils.topsort()
|> Enum.reverse()
|> Enum.flat_map(fn identifier ->
case Map.fetch(types, identifier) do
{:ok, type} -> [type]
_ -> []
end
end)
end
after
:digraph.delete(graph)
end
end
defp error(type, deps) do
%Absinthe.Phase.Error{
message:
String.trim("""
Field Import Cycle Error
Field Import in object `#{type.identifier}' `import_fields(#{inspect(type.imports)}) forms a cycle via: (#{inspect(deps)})
"""),
locations: [type.__reference__.location],
phase: __MODULE__,
extra: type.identifier
}
end
defp build_import_graph(types, graph) do
Enum.each(types, &add_to_graph(&1, graph))
end
defp add_to_graph(type, graph) do
:digraph.add_vertex(graph, type.identifier)
with %{imports: imports} <- type do
for {ident, _} <- imports do
:digraph.add_vertex(graph, ident)
case :digraph.add_edge(graph, type.identifier, ident) do
{:error, _} ->
raise "edge failed"
_ ->
:ok
end
end
end
end
end
| 25.426966 | 130 | 0.599205 |
1c306f9d8cd34c5f857834cae320c682b04797d7 | 1,918 | ex | Elixir | lib/console_web/controllers/v1/label_notification_webhooks_controller.ex | isabella232/console-2 | d4a4aca0e11c945c9698f46cb171d4645177038a | [
"Apache-2.0"
] | null | null | null | lib/console_web/controllers/v1/label_notification_webhooks_controller.ex | isabella232/console-2 | d4a4aca0e11c945c9698f46cb171d4645177038a | [
"Apache-2.0"
] | 1 | 2021-04-03T09:29:31.000Z | 2021-04-03T09:29:31.000Z | lib/console_web/controllers/v1/label_notification_webhooks_controller.ex | isabella232/console-2 | d4a4aca0e11c945c9698f46cb171d4645177038a | [
"Apache-2.0"
] | null | null | null | defmodule ConsoleWeb.V1.LabelNotificationWebhooksController do
use ConsoleWeb, :controller
alias Console.LabelNotificationWebhooks
alias Console.Labels
alias Console.Labels.Label
action_fallback(ConsoleWeb.FallbackController)
plug CORSPlug, origin: "*"
def update(conn, _webhook_params = %{ "key" => key, "url" => url, "value" => value, "id" => label_id, "notes" => notes }) do
update(conn, key, url, value, label_id, notes)
end
def update(conn, _webhook_params = %{ "key" => key, "url" => url, "value" => value, "id" => label_id }) do
update(conn, key, url, value, label_id, nil)
end
defp update(conn, key, url, value, label_id, notes) do
case Labels.get_label!(label_id) do
nil ->
{:error, :not_found, "Label not found"}
%Label{} = _label ->
key_ok = key in ["device_deleted", "device_join_otaa_first_time", "device_stops_transmitting", "integration_stops_working", "integration_receives_first_event", "downlink_unsuccessful", "integration_with_devices_deleted", "integration_with_devices_updated"]
case key_ok do
false -> {:error, :forbidden, "Key must be: \"device_deleted\", \"device_join_otaa_first_time\", \"device_stops_transmitting\", \"integration_stops_working\", \"integration_receives_first_event\", \"downlink_unsuccessful\", \"integration_with_devices_deleted\", or \"integration_with_devices_updated\""}
true ->
webhook_params = %{ "key" => key, "url" => url, "value" => value, "label_id" => label_id, "notes" => notes }
case value do
"0" -> LabelNotificationWebhooks.delete(key, label_id)
_ -> LabelNotificationWebhooks.upsert_webhook(webhook_params)
end
conn
|> put_status(:accepted)
|> render("label_notification_webhooks.json", label_notification_webhooks: webhook_params)
end
end
end
end | 49.179487 | 313 | 0.674661 |
1c309b01ed1d96f022d119175213f4faad61ff92 | 505 | exs | Elixir | test/coupled_to_model/delete_test.exs | Q2Worlds/ecto_shortcuts | 45e1373d2ade79b909f515314db060f0ca7fd547 | [
"MIT"
] | 32 | 2016-09-01T19:09:54.000Z | 2021-04-08T21:35:00.000Z | test/coupled_to_model/delete_test.exs | Q2Worlds/ecto_shortcuts | 45e1373d2ade79b909f515314db060f0ca7fd547 | [
"MIT"
] | 1 | 2016-09-28T18:06:40.000Z | 2016-09-28T18:06:40.000Z | test/coupled_to_model/delete_test.exs | Q2Worlds/ecto_shortcuts | 45e1373d2ade79b909f515314db060f0ca7fd547 | [
"MIT"
] | 3 | 2016-09-28T07:14:39.000Z | 2017-07-02T14:03:22.000Z | defmodule DeleteTest do
use ExUnit.Case, async: false
doctest EctoShortcuts
setup do
on_exit fn ->
TestData.reset
end
end
test "delete_all deletes all users" do
assert 0 < MyApp.User.count
MyApp.User.delete_all
assert 0 == MyApp.User.count
end
test "delete_by deletes all users with status 3" do
assert 0 < MyApp.User.count_where user_status_id: 3
MyApp.User.delete_by user_status_id: 3
assert 0 == MyApp.User.count_where user_status_id: 3
end
end
| 21.956522 | 56 | 0.708911 |
1c309e4084e9411c22d171daede5f175746e1082 | 580 | ex | Elixir | users_service/lib/command/repository/nickname_repository.ex | lcnascimento/pokerapp | d7928aebc867c49f01546611ed65a5befa3a6429 | [
"MIT"
] | 2 | 2020-04-20T16:31:01.000Z | 2020-04-20T16:45:03.000Z | users_service/lib/command/repository/nickname_repository.ex | lcnascimento/pokerapp | d7928aebc867c49f01546611ed65a5befa3a6429 | [
"MIT"
] | null | null | null | users_service/lib/command/repository/nickname_repository.ex | lcnascimento/pokerapp | d7928aebc867c49f01546611ed65a5befa3a6429 | [
"MIT"
] | null | null | null | defmodule UsersService.Command.NicknameRepository do
alias UsersService.Infra.Eventstore
alias UsersService.Infra.MapHelper
alias UsersService.Command.Nickname
@spec get(nickname :: String.t()) :: {:ok, Nickname.t()}
def get(nickname) do
result =
Eventstore.stream("#{nickname}:NicknameAggregate")
|> Enum.reduce([], &Enum.concat(&1, Enum.to_list(&2)))
|> Enum.map(&Map.put(&1, "payload", Poison.decode!(&1["payload"])))
|> Enum.map(&MapHelper.atomize_keys/1)
|> Enum.reduce(nil, &Nickname.apply(&2, &1))
{:ok, result}
end
end
| 30.526316 | 73 | 0.662069 |
1c30a824fbac72da1f138b8fa285cdff7cc1c528 | 22,060 | exs | Elixir | test/lib/ex_aws/sns/parser_test.exs | bettyblocks/ex_aws | 0c9b43b5a1afbfb5fb26131be7f80f69de4431d1 | [
"MIT",
"Unlicense"
] | null | null | null | test/lib/ex_aws/sns/parser_test.exs | bettyblocks/ex_aws | 0c9b43b5a1afbfb5fb26131be7f80f69de4431d1 | [
"MIT",
"Unlicense"
] | null | null | null | test/lib/ex_aws/sns/parser_test.exs | bettyblocks/ex_aws | 0c9b43b5a1afbfb5fb26131be7f80f69de4431d1 | [
"MIT",
"Unlicense"
] | 1 | 2021-01-22T12:16:23.000Z | 2021-01-22T12:16:23.000Z | defmodule ExAws.SNS.ParserTest do
use ExUnit.Case, async: true
alias ExAws.SNS.Parsers
def to_success(doc) do
{:ok, %{body: doc}}
end
def to_error(doc) do
{:error, {:http_error, 403, %{body: doc}}}
end
test "#parsing a list_topics response" do
rsp = """
<ListTopicsResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/">
<ListTopicsResult>
<Topics>
<member>
<TopicArn>arn:aws:sns:us-east-1:123456789012:My-Topic</TopicArn>
</member>
</Topics>
</ListTopicsResult>
<ResponseMetadata>
<RequestId>3f1478c7-33a9-11df-9540-99d0768312d3</RequestId>
</ResponseMetadata>
</ListTopicsResponse>
"""
|> to_success
{:ok, %{body: parsed_doc}} = Parsers.parse(rsp, :list_topics)
assert parsed_doc[:topics] == ["arn:aws:sns:us-east-1:123456789012:My-Topic"]
assert parsed_doc[:request_id] == "3f1478c7-33a9-11df-9540-99d0768312d3"
end
test "#parsing a create_topic response" do
rsp = """
<CreateTopicResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/">
<CreateTopicResult>
<TopicArn>arn:aws:sns:us-east-1:123456789012:My-Topic</TopicArn>
</CreateTopicResult>
<ResponseMetadata>
<RequestId>a8dec8b3-33a4-11df-8963-01868b7c937a</RequestId>
</ResponseMetadata>
</CreateTopicResponse>
"""
|> to_success
{:ok, %{body: parsed_doc}} = Parsers.parse(rsp, :create_topic)
assert parsed_doc[:topic_arn] == "arn:aws:sns:us-east-1:123456789012:My-Topic"
assert parsed_doc[:request_id] == "a8dec8b3-33a4-11df-8963-01868b7c937a"
end
test "#parsing a get_topic_attributes response" do
rsp = """
<GetTopicAttributesResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/">
<GetTopicAttributesResult>
<Attributes>
<entry>
<key>Owner</key>
<value>123456789012</value>
</entry>
<entry>
<key>Policy</key>
<value>some_json</value>
</entry>
<entry>
<key>TopicArn</key>
<value>arn:aws:sns:us-east-1:123456789012:My-Topic</value>
</entry>
<entry>
<key>SubscriptionsPending</key>
<value>12</value>
</entry>
<entry>
<key>SubscriptionsConfirmed</key>
<value>4</value>
</entry>
<entry>
<key>SubscriptionsDeleted</key>
<value>9</value>
</entry>
</Attributes>
</GetTopicAttributesResult>
<ResponseMetadata>
<RequestId>057f074c-33a7-11df-9540-99d0768312d3</RequestId>
</ResponseMetadata>
</GetTopicAttributesResponse>
"""
|> to_success
{:ok, %{body: parsed_doc}} = Parsers.parse(rsp, :get_topic_attributes)
assert parsed_doc[:owner] == "123456789012"
assert parsed_doc[:policy] == "some_json"
assert parsed_doc[:topic_arn] == "arn:aws:sns:us-east-1:123456789012:My-Topic"
assert parsed_doc[:subscriptions_pending] == 12
assert parsed_doc[:subscriptions_confirmed] == 4
assert parsed_doc[:subscriptions_deleted] == 9
assert parsed_doc[:request_id] == "057f074c-33a7-11df-9540-99d0768312d3"
end
test "#parsing a set_topic_attributes response" do
rsp = """
<SetTopicAttributesResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/">
<ResponseMetadata>
<RequestId>a8763b99-33a7-11df-a9b7-05d48da6f042</RequestId>
</ResponseMetadata>
</SetTopicAttributesResponse>
"""
|> to_success
{:ok, %{body: parsed_doc}} = Parsers.parse(rsp, :set_topic_attributes)
assert parsed_doc[:request_id] == "a8763b99-33a7-11df-a9b7-05d48da6f042"
end
test "#parsing a delete_topic response" do
rsp = """
<DeleteTopicResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/">
<ResponseMetadata>
<RequestId>f3aa9ac9-3c3d-11df-8235-9dab105e9c32</RequestId>
</ResponseMetadata>
</DeleteTopicResponse>
"""
|> to_success
{:ok, %{body: parsed_doc}} = Parsers.parse(rsp, :delete_topic)
assert parsed_doc[:request_id] == "f3aa9ac9-3c3d-11df-8235-9dab105e9c32"
end
test "#parsing a publish response" do
rsp = """
<PublishResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/">
<PublishResult>
<MessageId>94f20ce6-13c5-43a0-9a9e-ca52d816e90b</MessageId>
</PublishResult>
<ResponseMetadata>
<RequestId>f187a3c1-376f-11df-8963-01868b7c937a</RequestId>
</ResponseMetadata>
</PublishResponse>
"""
|> to_success
{:ok, %{body: parsed_doc}} = Parsers.parse(rsp, :publish)
assert parsed_doc[:message_id] == "94f20ce6-13c5-43a0-9a9e-ca52d816e90b"
assert parsed_doc[:request_id] == "f187a3c1-376f-11df-8963-01868b7c937a"
end
test "#parsing a create_platform_application response" do
rsp = """
<CreatePlatformApplicationResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/">
<CreatePlatformApplicationResult>
<PlatformApplicationArn>arn:aws:sns:us-west-2:123456789012:app/GCM/gcmpushapp</PlatformApplicationArn>
</CreatePlatformApplicationResult>
<ResponseMetadata>
<RequestId>b6f0e78b-e9d4-5a0e-b973-adc04e8a4ff9</RequestId>
</ResponseMetadata>
</CreatePlatformApplicationResponse>
"""
|> to_success
{:ok, %{body: parsed_doc}} = Parsers.parse(rsp, :create_platform_application)
assert parsed_doc[:platform_application_arn] == "arn:aws:sns:us-west-2:123456789012:app/GCM/gcmpushapp"
assert parsed_doc[:request_id] == "b6f0e78b-e9d4-5a0e-b973-adc04e8a4ff9"
end
test "#parsing a delete_platform_application response" do
rsp = """
<DeletePlatformApplicationResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/">
<ResponseMetadata>
<RequestId>097dac18-7a77-5823-a8dd-e65476dcb037</RequestId>
</ResponseMetadata>
</DeletePlatformApplicationResponse>
"""
|> to_success
{:ok, %{body: parsed_doc}} = Parsers.parse(rsp, :delete_platform_application)
assert parsed_doc[:request_id] == "097dac18-7a77-5823-a8dd-e65476dcb037"
end
test "#parsing a create_platform_endpoint response" do
rsp = """
<CreatePlatformEndpointResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/">
<CreatePlatformEndpointResult>
<EndpointArn>arn:aws:sns:us-west-2:123456789012:endpoint/GCM/gcmpushapp/5e3e9847-3183-3f18-a7e8-671c3a57d4b3</EndpointArn>
</CreatePlatformEndpointResult>
<ResponseMetadata>
<RequestId>6613341d-3e15-53f7-bf3c-7e56994ba278</RequestId>
</ResponseMetadata>
</CreatePlatformEndpointResponse>
"""
|> to_success
{:ok, %{body: parsed_doc}} = Parsers.parse(rsp, :create_platform_endpoint)
assert parsed_doc[:endpoint_arn] == "arn:aws:sns:us-west-2:123456789012:endpoint/GCM/gcmpushapp/5e3e9847-3183-3f18-a7e8-671c3a57d4b3"
assert parsed_doc[:request_id] == "6613341d-3e15-53f7-bf3c-7e56994ba278"
end
test "#parsing a list_platform_applications response" do
rsp = """
<ListPlatformApplicationsResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/">
<ListPlatformApplicationsResult>
<PlatformApplications>
<member>
<PlatformApplicationArn>arn:aws:sns:us-west-2:123456789012:app/APNS_SANDBOX/apnspushapp</PlatformApplicationArn>
<Attributes>
<entry>
<key>AllowEndpointPolicies</key>
<value>false</value>
</entry>
</Attributes>
</member>
<member>
<PlatformApplicationArn>arn:aws:sns:us-west-2:123456789012:app/GCM/gcmpushapp</PlatformApplicationArn>
<Attributes>
<entry>
<key>AllowEndpointPolicies</key>
<value>true</value>
</entry>
</Attributes>
</member>
</PlatformApplications>
</ListPlatformApplicationsResult>
<ResponseMetadata>
<RequestId>315a335e-85d8-52df-9349-791283cbb529</RequestId>
</ResponseMetadata>
</ListPlatformApplicationsResponse>
"""
|> to_success
{:ok, %{body: parsed_doc}} = Parsers.parse(rsp, :list_platform_applications)
assert parsed_doc[:applications] == [
%{
platform_application_arn: "arn:aws:sns:us-west-2:123456789012:app/APNS_SANDBOX/apnspushapp",
attributes: [%{key: "AllowEndpointPolicies", value: "false"}]
},
%{
platform_application_arn: "arn:aws:sns:us-west-2:123456789012:app/GCM/gcmpushapp",
attributes: [%{key: "AllowEndpointPolicies", value: "true"}]
}
]
assert parsed_doc[:next_token] == ""
assert parsed_doc[:request_id] == "315a335e-85d8-52df-9349-791283cbb529"
end
test "#parsing a list_platform_applications response with next_token" do
rsp = """
<ListPlatformApplicationsResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/">
<ListPlatformApplicationsResult>
<PlatformApplications>
<member>
<PlatformApplicationArn>arn:aws:sns:us-west-2:123456789012:app/APNS_SANDBOX/apnspushapp</PlatformApplicationArn>
<Attributes>
<entry>
<key>AllowEndpointPolicies</key>
<value>false</value>
</entry>
</Attributes>
</member>
<member>
<PlatformApplicationArn>arn:aws:sns:us-west-2:123456789012:app/GCM/gcmpushapp</PlatformApplicationArn>
<Attributes>
<entry>
<key>AllowEndpointPolicies</key>
<value>true</value>
</entry>
</Attributes>
</member>
</PlatformApplications>
<NextToken>123456789</NextToken>
</ListPlatformApplicationsResult>
<ResponseMetadata>
<RequestId>315a335e-85d8-52df-9349-791283cbb529</RequestId>
</ResponseMetadata>
</ListPlatformApplicationsResponse>
"""
|> to_success
{:ok, %{body: parsed_doc}} = Parsers.parse(rsp, :list_platform_applications)
assert parsed_doc[:applications] == [
%{
platform_application_arn: "arn:aws:sns:us-west-2:123456789012:app/APNS_SANDBOX/apnspushapp",
attributes: [%{key: "AllowEndpointPolicies", value: "false"}]
},
%{
platform_application_arn: "arn:aws:sns:us-west-2:123456789012:app/GCM/gcmpushapp",
attributes: [%{key: "AllowEndpointPolicies", value: "true"}]
}
]
assert parsed_doc[:next_token] == "123456789"
assert parsed_doc[:request_id] == "315a335e-85d8-52df-9349-791283cbb529"
end
test "#parsing a get_platform_application_attributes response" do
created_arn = "arn:aws:sns:us-east-1:123456789012:Event-Created"
updated_arn = "arn:aws:sns:us-east-1:123456789012:Event-Updated"
deleted_arn = "arn:aws:sns:us-east-1:123456789012:Event-Deleted"
failure_arn = "arn:aws:sns:us-east-1:123456789012:Delivery-Failure"
rsp = """
<GetPlatformApplicationAttributesResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/">
<GetPlatformApplicationAttributesResult>
<Attributes>
<entry>
<key>EventEndpointCreated</key>
<value>#{created_arn}</value>
</entry>
<entry>
<key>EventEndpointUpdated</key>
<value>#{updated_arn}</value>
</entry>
<entry>
<key>EventEndpointDeleted</key>
<value>#{deleted_arn}</value>
</entry>
<entry>
<key>EventDeliveryFailure</key>
<value>#{failure_arn}</value>
</entry>
</Attributes>
</GetPlatformApplicationAttributesResult>
<ResponseMetadata>
<RequestId>74848df2-87f6-55ed-890c-c7be80442462</RequestId>
</ResponseMetadata>
</GetPlatformApplicationAttributesResponse>
"""
|> to_success
{:ok, %{body: parsed_doc}} = Parsers.parse(rsp, :get_platform_application_attributes)
assert parsed_doc[:event_endpoint_created] == created_arn
assert parsed_doc[:event_endpoint_updated] == updated_arn
assert parsed_doc[:event_endpoint_deleted] == deleted_arn
assert parsed_doc[:event_delivery_failure] == failure_arn
end
test "#parsing a subscribe response" do
rsp = """
<SubscribeResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/">
<SubscribeResult>
<SubscriptionArn>arn:aws:sns:us-west-2:123456789012:MyTopic:6b0e71bd-7e97-4d97-80ce-4a0994e55286</SubscriptionArn>
</SubscribeResult>
<ResponseMetadata>
<RequestId>c4407779-24a4-56fa-982c-3d927f93a775</RequestId>
</ResponseMetadata>
</SubscribeResponse>
"""
|> to_success
{:ok, %{body: parsed_doc}} = Parsers.parse(rsp, :subscribe)
assert parsed_doc[:subscription_arn] == "arn:aws:sns:us-west-2:123456789012:MyTopic:6b0e71bd-7e97-4d97-80ce-4a0994e55286"
assert parsed_doc[:request_id] == "c4407779-24a4-56fa-982c-3d927f93a775"
end
test "#parsing a list_subscriptions response" do
rsp = """
<ListSubscriptionsResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/">
<ListSubscriptionsResult>
<Subscriptions>
<member>
<TopicArn>arn:aws:sns:us-east-1:698519295917:My-Topic</TopicArn>
<Protocol>email</Protocol>
<SubscriptionArn>arn:aws:sns:us-east-1:123456789012:My-Topic:80289ba6-0fd4-4079-afb4-ce8c8260f0ca</SubscriptionArn>
<Owner>123456789012</Owner>
<Endpoint>example@amazon.com</Endpoint>
</member>
</Subscriptions>
</ListSubscriptionsResult>
<ResponseMetadata>
<RequestId>384ac68d-3775-11df-8963-01868b7c937a</RequestId>
</ResponseMetadata>
</ListSubscriptionsResponse>
"""
|> to_success
{:ok, %{body: parsed_doc}} = Parsers.parse(rsp, :list_subscriptions)
assert parsed_doc[:subscriptions] == [
%{
owner: "123456789012",
endpoint: "example@amazon.com",
protocol: "email",
subscription_arn: "arn:aws:sns:us-east-1:123456789012:My-Topic:80289ba6-0fd4-4079-afb4-ce8c8260f0ca",
topic_arn: "arn:aws:sns:us-east-1:698519295917:My-Topic"
}
]
assert parsed_doc[:next_token] == ""
assert parsed_doc[:request_id] == "384ac68d-3775-11df-8963-01868b7c937a"
end
test "#parsing a list_subscriptions response with a next token" do
rsp = """
<ListSubscriptionsResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/">
<ListSubscriptionsResult>
<Subscriptions>
<member>
<TopicArn>arn:aws:sns:us-east-1:698519295917:My-Topic</TopicArn>
<Protocol>email</Protocol>
<SubscriptionArn>arn:aws:sns:us-east-1:123456789012:My-Topic:80289ba6-0fd4-4079-afb4-ce8c8260f0ca</SubscriptionArn>
<Owner>123456789012</Owner>
<Endpoint>example@amazon.com</Endpoint>
</member>
</Subscriptions>
<NextToken>123456789</NextToken>
</ListSubscriptionsResult>
<ResponseMetadata>
<RequestId>384ac68d-3775-11df-8963-01868b7c937a</RequestId>
</ResponseMetadata>
</ListSubscriptionsResponse>
"""
|> to_success
{:ok, %{body: parsed_doc}} = Parsers.parse(rsp, :list_subscriptions)
assert parsed_doc[:subscriptions] == [
%{
owner: "123456789012",
endpoint: "example@amazon.com",
protocol: "email",
subscription_arn: "arn:aws:sns:us-east-1:123456789012:My-Topic:80289ba6-0fd4-4079-afb4-ce8c8260f0ca",
topic_arn: "arn:aws:sns:us-east-1:698519295917:My-Topic"
}
]
assert parsed_doc[:next_token] == "123456789"
assert parsed_doc[:request_id] == "384ac68d-3775-11df-8963-01868b7c937a"
end
test "#parsing an unsubscribe response" do
rsp = """
<UnsubscribeResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/">
<ResponseMetadata>
<RequestId>18e0ac39-3776-11df-84c0-b93cc1666b84</RequestId>
</ResponseMetadata>
</UnsubscribeResponse>
"""
|> to_success
{:ok, %{body: parsed_doc}} = Parsers.parse(rsp, :unsubscribe)
assert parsed_doc[:request_id] == "18e0ac39-3776-11df-84c0-b93cc1666b84"
end
test "#parsing a get_subscription_attributes response" do
rsp = """
<GetSubscriptionAttributesResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/">
<GetSubscriptionAttributesResult>
<Attributes>
<entry>
<key>Owner</key>
<value>123456789012</value>
</entry>
<entry>
<key>DeliveryPolicy</key>
<value>{"healthyRetryPolicy":{"numRetries":10}}</value>
</entry>
<entry>
<key>EffectiveDeliveryPolicy</key>
<value>{"healthyRetryPolicy":{"numRetries":10}}</value>
</entry>
<entry>
<key>SubscriptionArn</key>
<value>arn:aws:sns:us-east-1:123456789012:My-Topic:80289ba6-0fd4-4079-afb4-ce8c8260f0ca</value>
</entry>
<entry>
<key>ConfirmationWasAuthenticated</key>
<value>true</value>
</entry>
</Attributes>
</GetSubscriptionAttributesResult>
<ResponseMetadata>
<RequestId>057f074c-33a7-11df-9540-99d0768312d3</RequestId>
</ResponseMetadata>
</GetSubscriptionAttributesResponse>
"""
|> to_success
{:ok, %{body: parsed_doc}} = Parsers.parse(rsp, :get_subscription_attributes)
assert parsed_doc[:owner] == "123456789012"
assert parsed_doc[:delivery_policy] == ~s({"healthyRetryPolicy":{"numRetries":10}})
assert parsed_doc[:effective_delivery_policy] == ~s({"healthyRetryPolicy":{"numRetries":10}})
assert parsed_doc[:subscription_arn] == "arn:aws:sns:us-east-1:123456789012:My-Topic:80289ba6-0fd4-4079-afb4-ce8c8260f0ca"
assert parsed_doc[:confirmation_was_authenticated] == true
end
test "#parsing a set_subscription_attributes response" do
rsp = """
<SetSubscriptionAttributesResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/">
<ResponseMetadata>
<RequestId>a8763b99-33a7-11df-a9b7-05d48da6f042</RequestId>
</ResponseMetadata>
</SetSubscriptionAttributesResponse>
"""
|> to_success
{:ok, %{body: parsed_doc}} = Parsers.parse(rsp, :set_subscription_attributes)
assert parsed_doc[:request_id] == "a8763b99-33a7-11df-a9b7-05d48da6f042"
end
test "#parsing a get_endpoint_attributes response" do
rsp = """
<GetEndpointAttributesResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/">
<GetEndpointAttributesResult>
<Attributes>
<entry>
<key>Enabled</key>
<value>true</value>
</entry>
<entry>
<key>CustomUserData</key>
<value>UserId=01234567</value>
</entry>
<entry>
<key>Token</key>
<value>APA91bGi7fFachkC1xjlqT66VYEucGHochmf1VQAr9k...jsM0PKPxKhddCzx6paEsyay9Zn3D4wNUJb8m6HZrBEXAMPLE</value>
</entry>
</Attributes>
</GetEndpointAttributesResult>
<ResponseMetadata>
<RequestId>6c725a19-a142-5b77-94f9-1055a9ea04e7</RequestId>
</ResponseMetadata>
</GetEndpointAttributesResponse>
"""
|> to_success
{:ok, %{body: parsed_doc}} = Parsers.parse(rsp, :get_endpoint_attributes)
assert parsed_doc[:enabled] == true
assert parsed_doc[:custom_user_data] == "UserId=01234567"
assert parsed_doc[:token] == "APA91bGi7fFachkC1xjlqT66VYEucGHochmf1VQAr9k...jsM0PKPxKhddCzx6paEsyay9Zn3D4wNUJb8m6HZrBEXAMPLE"
assert parsed_doc[:request_id] == "6c725a19-a142-5b77-94f9-1055a9ea04e7"
end
test "#parsing a set_endpoint_attributes response" do
rsp = """
<SetEndpointAttributesResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/">
<ResponseMetadata>
<RequestId>2fe0bfc7-3e85-5ee5-a9e2-f58b35e85f6a</RequestId>
</ResponseMetadata>
</SetEndpointAttributesResponse>
"""
|> to_success
{:ok, %{body: parsed_doc}} = Parsers.parse(rsp, :set_endpoint_attributes)
assert parsed_doc[:request_id] == "2fe0bfc7-3e85-5ee5-a9e2-f58b35e85f6a"
end
test "#parsing a delete_endpoint response" do
rsp = """
<DeleteEndpointResponse xmlns="http://sns.amazonaws.com/doc/2010-03-31/">
<ResponseMetadata>
<RequestId>c1d2b191-353c-5a5f-8969-fbdd3900afa8</RequestId>
</ResponseMetadata>
</DeleteEndpointResponse>
"""
|> to_success
{:ok, %{body: parsed_doc}} = Parsers.parse(rsp, :delete_endpoint)
assert parsed_doc[:request_id] == "c1d2b191-353c-5a5f-8969-fbdd3900afa8"
end
test "it should handle parsing an error" do
rsp = """
<?xml version=\"1.0\"?>
<ErrorResponse xmlns=\"http://queue.amazonaws.com/doc/2012-11-05/\">
<Error>
<Type>Sender</Type>
<Code>ExpiredToken</Code>
<Message>The security token included in the request is expired</Message>
<Detail/>
</Error>
<RequestId>f7ac5905-2fb6-5529-a86d-09628dae67f4</RequestId>
</ErrorResponse>
"""
|> to_error
{:error, {:http_error, 403, err}} = Parsers.parse(rsp, :set_endpoint_attributes)
assert "f7ac5905-2fb6-5529-a86d-09628dae67f4" == err[:request_id]
assert "Sender" == err[:type]
assert "ExpiredToken" == err[:code]
assert "The security token included in the request is expired" == err[:message]
end
end
| 38.16609 | 137 | 0.635993 |
1c30d9835a903231b46f2daf9f119941e50a6711 | 336 | exs | Elixir | apps/ewallet_db/priv/repo/migrations/20190320060752_add_creator_user_uuid_and_creator_key_uuid_to_api_key.exs | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 322 | 2018-02-28T07:38:44.000Z | 2020-05-27T23:09:55.000Z | apps/ewallet_db/priv/repo/migrations/20190320060752_add_creator_user_uuid_and_creator_key_uuid_to_api_key.exs | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 643 | 2018-02-28T12:05:20.000Z | 2020-05-22T08:34:38.000Z | apps/ewallet_db/priv/repo/migrations/20190320060752_add_creator_user_uuid_and_creator_key_uuid_to_api_key.exs | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 63 | 2018-02-28T10:57:06.000Z | 2020-05-27T23:10:38.000Z | defmodule EWalletDB.Repo.Migrations.AddCreatorUserUuidAndCreatorKeyUuidToApiKey do
use Ecto.Migration
@table "api_key"
def change do
alter table(@table) do
add :creator_user_uuid, references(:user, type: :uuid, column: :uuid)
add :creator_key_uuid, references(:key, type: :uuid, column: :uuid)
end
end
end
| 25.846154 | 82 | 0.72619 |
1c30ddf9312ecb7708b0790465c32b6e1c5167c9 | 917 | ex | Elixir | web/router.ex | Namuraid/backend | 04a10248bfeb156eb291207931621b40585d8f7e | [
"MIT"
] | null | null | null | web/router.ex | Namuraid/backend | 04a10248bfeb156eb291207931621b40585d8f7e | [
"MIT"
] | null | null | null | web/router.ex | Namuraid/backend | 04a10248bfeb156eb291207931621b40585d8f7e | [
"MIT"
] | null | null | null | defmodule Namuraid.Router do
use Namuraid.Web, :router
pipeline :browser do
plug :accepts, ["html"]
plug :fetch_session
plug :fetch_flash
plug :protect_from_forgery
plug :put_secure_browser_headers
end
pipeline :api do
plug :accepts, ["json"]
end
scope "/", Namuraid do
pipe_through :browser # Use the default browser stack
get "/", PageController, :index
get "/live", LiveController, :index
post "/live", LiveController, :create
get "/start", StartController, :index
post "/start", StartController, :create
get "/results", ResultsController, :index
post "/results", ResultsController, :create
resources "/screens", ScreenController
resources "/sitewalk", SiteWalkController
resources "/sponsors", SponsorController
end
# Other scopes may use custom stacks.
# scope "/api", Namuraid do
# pipe_through :api
# end
end
| 22.925 | 57 | 0.684842 |
1c30e54d503aa658ea331b5656ba1747a382d8c6 | 2,684 | ex | Elixir | clients/analytics_data/lib/google_api/analytics_data/v1beta/model/filter.ex | renovate-bot/elixir-google-api | 1da34cd39b670c99f067011e05ab90af93fef1f6 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/analytics_data/lib/google_api/analytics_data/v1beta/model/filter.ex | swansoffiee/elixir-google-api | 9ea6d39f273fb430634788c258b3189d3613dde0 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/analytics_data/lib/google_api/analytics_data/v1beta/model/filter.ex | dazuma/elixir-google-api | 6a9897168008efe07a6081d2326735fe332e522c | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.AnalyticsData.V1beta.Model.Filter do
@moduledoc """
An expression to filter dimension or metric values.
## Attributes
* `betweenFilter` (*type:* `GoogleApi.AnalyticsData.V1beta.Model.BetweenFilter.t`, *default:* `nil`) - A filter for two values.
* `fieldName` (*type:* `String.t`, *default:* `nil`) - The dimension name or metric name. Must be a name defined in dimensions or metrics.
* `inListFilter` (*type:* `GoogleApi.AnalyticsData.V1beta.Model.InListFilter.t`, *default:* `nil`) - A filter for in list values.
* `numericFilter` (*type:* `GoogleApi.AnalyticsData.V1beta.Model.NumericFilter.t`, *default:* `nil`) - A filter for numeric or date values.
* `stringFilter` (*type:* `GoogleApi.AnalyticsData.V1beta.Model.StringFilter.t`, *default:* `nil`) - Strings related filter.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:betweenFilter => GoogleApi.AnalyticsData.V1beta.Model.BetweenFilter.t() | nil,
:fieldName => String.t() | nil,
:inListFilter => GoogleApi.AnalyticsData.V1beta.Model.InListFilter.t() | nil,
:numericFilter => GoogleApi.AnalyticsData.V1beta.Model.NumericFilter.t() | nil,
:stringFilter => GoogleApi.AnalyticsData.V1beta.Model.StringFilter.t() | nil
}
field(:betweenFilter, as: GoogleApi.AnalyticsData.V1beta.Model.BetweenFilter)
field(:fieldName)
field(:inListFilter, as: GoogleApi.AnalyticsData.V1beta.Model.InListFilter)
field(:numericFilter, as: GoogleApi.AnalyticsData.V1beta.Model.NumericFilter)
field(:stringFilter, as: GoogleApi.AnalyticsData.V1beta.Model.StringFilter)
end
defimpl Poison.Decoder, for: GoogleApi.AnalyticsData.V1beta.Model.Filter do
def decode(value, options) do
GoogleApi.AnalyticsData.V1beta.Model.Filter.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.AnalyticsData.V1beta.Model.Filter do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 45.491525 | 143 | 0.736587 |
1c30ed92d483414f97bc7bce4f8f3a1cec3c6cf0 | 10,977 | ex | Elixir | lib/elixir/lib/module/types.ex | irisTa56/elixir | 0a953d75fb2cbdc6f3d33040aa60738d85512a1f | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/module/types.ex | irisTa56/elixir | 0a953d75fb2cbdc6f3d33040aa60738d85512a1f | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/module/types.ex | irisTa56/elixir | 0a953d75fb2cbdc6f3d33040aa60738d85512a1f | [
"Apache-2.0"
] | null | null | null | defmodule Module.Types do
@moduledoc false
import Module.Types.Helpers
alias Module.Types.{Expr, Pattern}
@doc """
Infer function definitions' types.
"""
def infer_definitions(file, module, defs) do
signatures_and_bodies = infer_signatures(file, module, defs)
infer_bodies(signatures_and_bodies)
end
defp infer_signatures(file, module, defs) do
Enum.map(defs, fn {{fun, _arity} = function, kind, meta, clauses} ->
stack = head_stack()
context = head_context(file, module, function)
signature_and_body =
Enum.map(clauses, fn {_meta, params, guards, body} ->
def_expr = {kind, meta, [guards_to_expr(guards, {fun, [], params})]}
stack = push_expr_stack(def_expr, stack)
case of_head(params, guards, stack, context) do
{:ok, signature, context} -> {:ok, {signature, context, body}}
{:error, reason} -> {:error, reason}
end
end)
{function, oks_or_errors(signature_and_body)}
end)
end
defp infer_bodies(signatures_and_bodies) do
signatures =
Enum.flat_map(signatures_and_bodies, fn
{function, {:ok, clauses}} ->
signatures = Enum.map(clauses, fn {signature, _context, _body} -> signature end)
[{function, signatures}]
{_function, {:error, _reason}} ->
[]
end)
|> Map.new()
Enum.map(signatures_and_bodies, fn
{function, {:ok, clauses}} ->
signatures =
Enum.map(clauses, fn {signature, head_context, body} ->
stack = body_stack()
context = body_context(head_context, signatures)
case Expr.of_expr(body, stack, context) do
{:ok, _type, _context} -> {:ok, signature}
{:error, reason} -> {:error, reason}
end
end)
{function, oks_or_errors(signatures)}
{function, {:error, reason}} ->
{function, {:error, reason}}
end)
end
@doc false
def of_head(params, guards, stack, context) do
with {:ok, types, context} <-
map_reduce_ok(params, context, &Pattern.of_pattern(&1, stack, &2)),
# TODO: Check that of_guard/3 returns a boolean
{:ok, _, context} <- Pattern.of_guard(guards_to_or(guards), stack, context),
do: {:ok, lift_types(types, context), context}
end
@doc false
def head_context(file, module, function) do
%{
# File of module
file: file,
# Module of definitions
module: module,
# Current function
function: function,
# Expression variable to type variable
vars: %{},
# Type variable to expression variable
types_to_vars: %{},
# Type variable to type
types: %{},
# Trace of all variables that have been refined to a type,
# including the type they were refined to, why, and where
traces: %{},
# Counter to give type variables unique names
counter: 0,
# Track if a variable was infered from a type guard function such is_tuple/1
# or a guard function that fails such as elem/2, possible values are:
# `:guarded` when `is_tuple(x)`
# `:fail` when `elem(x, 0)`
# `:guarded_fail` when `is_tuple and elem(x, 0)`
guard_sources: %{}
}
end
@doc false
def head_stack() do
%{
# Stack of expression we have recursed through during inference,
# used for tracing
expr_stack: [],
# When false do not add a trace when a type variable is refined,
# useful when merging contexts where the variables already have traces
trace: true,
# Track if we are in a context where type guard functions should
# affect inference
type_guards_enabled?: true,
# Context used to determine if unification is bi-directional, :expr
# is directional, :pattern is bi-directional
context: :pattern
}
end
@doc false
def body_context(head_context, signatures) do
%{
# File of module
file: head_context.file,
# Module of definitions
module: head_context.module,
# Current function
function: head_context.function,
# Expression variable to type variable
vars: head_context.vars,
# Type variable to expression variable
types_to_vars: head_context.types_to_vars,
# Type variable to type
types: head_context.types,
# Trace of all variables that have been refined to a type,
# including the type they were refined to, why, and where
traces: head_context.traces,
# Counter to give type variables unique names
counter: head_context.counter,
# Local function signatures from the current module
local_funs: signatures
}
end
@doc false
def body_stack() do
%{
# Stack of expression we have recursed through during inference,
# used for tracing
expr_stack: [],
# When false do not add a trace when a type variable is refined,
# useful when merging contexts where the variables already have traces
trace: true,
# Context used to determine if unification is bi-directional, :expr
# is directional, :pattern is bi-directional
context: :expr
}
end
@doc """
Lifts type variables to their infered types from the context.
"""
def lift_types(types, context) do
context = %{
types: context.types,
lifted_types: %{},
lifted_counter: 0
}
{types, _context} = Enum.map_reduce(types, context, &do_lift_type/2)
types
end
@doc false
def lift_type(type, context) do
context = %{
types: context.types,
lifted_types: %{},
lifted_counter: 0
}
{type, _context} = do_lift_type(type, context)
type
end
## GUARDS
# TODO: Remove this and let multiple when be treated as multiple clauses,
# meaning they will be intersection types
defp guards_to_or([]) do
[]
end
defp guards_to_or(guards) do
Enum.reduce(guards, fn guard, acc -> {{:., [], [:erlang, :orelse]}, [], [guard, acc]} end)
end
defp guards_to_expr([], left) do
left
end
defp guards_to_expr([guard | guards], left) do
guards_to_expr(guards, {:when, [], [left, guard]})
end
## VARIABLE LIFTING
# Lift type variable to its infered (hopefully concrete) types from the context
defp do_lift_type({:var, var}, context) do
case Map.fetch(context.lifted_types, var) do
{:ok, lifted_var} ->
{{:var, lifted_var}, context}
:error ->
case Map.fetch(context.types, var) do
{:ok, :unbound} ->
new_lifted_var(var, context)
{:ok, type} ->
# Remove visited types to avoid infinite loops
# then restore after we are done recursing on vars
types = context.types
context = %{context | types: Map.delete(context.types, var)}
{type, context} = do_lift_type(type, context)
{type, %{context | types: types}}
:error ->
new_lifted_var(var, context)
end
end
end
defp do_lift_type({:tuple, types}, context) do
{types, context} = Enum.map_reduce(types, context, &do_lift_type/2)
{{:tuple, types}, context}
end
defp do_lift_type({:map, pairs}, context) do
{pairs, context} =
Enum.map_reduce(pairs, context, fn {key, value}, context ->
{key, context} = do_lift_type(key, context)
{value, context} = do_lift_type(value, context)
{{key, value}, context}
end)
{{:map, pairs}, context}
end
defp do_lift_type({:list, type}, context) do
{type, context} = do_lift_type(type, context)
{{:list, type}, context}
end
defp do_lift_type(other, context) do
{other, context}
end
defp new_lifted_var(original_var, context) do
types = Map.put(context.lifted_types, original_var, context.lifted_counter)
counter = context.lifted_counter + 1
type = {:var, context.lifted_counter}
context = %{context | lifted_types: types, lifted_counter: counter}
{type, context}
end
## ERROR FORMATTING
def format_warning({:unable_unify, left, right, expr, traces}) do
[
"function clause will never match, found incompatibility:\n\n ",
format_type(left),
" !~ ",
format_type(right),
"\n\n",
format_expr(expr),
format_traces(traces),
"Conflict found at"
]
end
defp format_expr(nil) do
[]
end
defp format_expr(expr) do
[
"in expression:\n\n ",
expr_to_string(expr),
"\n\n"
]
end
defp format_traces([]) do
[]
end
defp format_traces(traces) do
Enum.map(traces, fn
{var, {:type, type, expr, location}} ->
[
"where \"",
Macro.to_string(var),
"\" was given the type ",
Module.Types.format_type(type),
" in:\n\n # ",
format_location(location),
" ",
expr_to_string(expr),
"\n\n"
]
{var1, {:var, var2, expr, location}} ->
[
"where \"",
Macro.to_string(var1),
"\" was given the same type as \"",
Macro.to_string(var2),
"\" in:\n\n # ",
format_location(location),
" ",
expr_to_string(expr),
"\n\n"
]
end)
end
defp format_location({file, line}) do
file = Path.relative_to_cwd(file)
line = if line, do: [Integer.to_string(line)], else: []
[file, ?:, line, ?\n]
end
@doc false
def format_type({:union, types}) do
"#{Enum.map_join(types, " | ", &format_type/1)}"
end
def format_type({:tuple, types}) do
"{#{Enum.map_join(types, ", ", &format_type/1)}}"
end
def format_type({:list, type}) do
"[#{format_type(type)}]"
end
def format_type({:map, pairs}) do
case List.keytake(pairs, :__struct__, 0) do
{{:__struct__, struct}, pairs} ->
"%#{inspect(struct)}{#{format_map_pairs(pairs)}}"
nil ->
"%{#{format_map_pairs(pairs)}}"
end
end
def format_type({:atom, literal}) do
inspect(literal)
end
def format_type(atom) when is_atom(atom) do
"#{atom}()"
end
def format_type({:var, index}) do
"var#{index}"
end
defp format_map_pairs(pairs) do
Enum.map_join(pairs, ", ", fn {left, right} ->
"#{format_type(left)} => #{format_type(right)}"
end)
end
@doc false
def expr_to_string(expr) do
expr
|> reverse_rewrite()
|> Macro.to_string()
end
defp reverse_rewrite(guard) do
Macro.prewalk(guard, fn
{:., _, [:erlang, :orelse]} -> :or
{:., _, [:erlang, :andalso]} -> :and
{{:., _, [mod, fun]}, _, args} -> erl_to_ex(mod, fun, args)
other -> other
end)
end
defp erl_to_ex(mod, fun, args) do
case :elixir_rewrite.erl_to_ex(mod, fun, args) do
{Kernel, fun, args} -> {fun, [], args}
{mod, fun, args} -> {{:., [], [mod, fun]}, [], args}
end
end
end
| 27.374065 | 94 | 0.599708 |
1c3115d41b04a07de3b76e86a5ecb326cd4296b0 | 3,383 | ex | Elixir | questions/aggregates/00075000-countmembers.ex | morenoh149/pgexercises | ae5b46d84d5e4cdb6af49047960d7abd76e80f30 | [
"BSD-2-Clause"
] | null | null | null | questions/aggregates/00075000-countmembers.ex | morenoh149/pgexercises | ae5b46d84d5e4cdb6af49047960d7abd76e80f30 | [
"BSD-2-Clause"
] | null | null | null | questions/aggregates/00075000-countmembers.ex | morenoh149/pgexercises | ae5b46d84d5e4cdb6af49047960d7abd76e80f30 | [
"BSD-2-Clause"
] | 1 | 2019-10-08T04:46:40.000Z | 2019-10-08T04:46:40.000Z | |QUESTIONNAME|
Produce a list of member names, with each row containing the total member count
|QUESTION|
Produce a list of member names, with each row containing the total member count. Order by join date.
|QUERY|
select count(*) over(), firstname, surname
from cd.members
order by joindate
|ANSWER|
<p>Using the knowledge we've built up so far, the most obvious answer to this is below. We use a subquery because otherwise SQL will require us to group by firstname and surname, producing a different result to what we're looking for.</p>
<sql>
select (select count(*) from cd.members) as count, firstname, surname
from cd.members
order by joindate
</sql>
<p>There's nothing at all wrong with this answer, but we've chosen a different approach to introduce a new concept called window functions. Window functions provide enormously powerful capabilities, in a form often more convenient than the standard aggregation functions. While this exercise is only a toy, we'll be working on more complicated examples in the near future.</p>
<p>Window functions operate on the result set of your (sub-)query, after the <c>WHERE</c> clause and all standard aggregation. They operate on a <i>window</i> of data. By default this is unrestricted: the entire result set, but it can be restricted to provide more useful results. For example, suppose instead of wanting the count of all members, we want the count of all members who joined in the same month as that member:</p>
<sql>
select count(*) over(partition by date_trunc('month',joindate)),
firstname, surname
from cd.members
order by joindate
</sql>
<p>In this example, we partition the data by month. For each row the window function operates over, the window is any rows that have a joindate in the same month. The window function thus produces a count of the number of members who joined in that month.</p>
<p>You can go further. Imagine if, instead of the total number of members who joined that month, you want to know what number joinee they were that month. You can do this by adding in an <c>ORDER BY</c> to the window function:</p>
<sql>
select count(*) over(partition by date_trunc('month',joindate) order by joindate),
firstname, surname
from cd.members
order by joindate
</sql>
<p>The <c>ORDER BY</c> changes the window again. Instead of the window for each row being the entire partition, the window goes from the start of the partition to the current row, and not beyond. Thus, for the first member who joins in a given month, the count is 1. For the second, the count is 2, and so on.</p>
<p>One final thing that's worth mentioning about window functions: you can have multiple unrelated ones in the same query. Try out the query below for an example - you'll see the numbers for the members going in opposite directions! This flexibility can lead to more concise, readable, and maintainable queries.</p>
<sql>
select count(*) over(partition by date_trunc('month',joindate) order by joindate asc),
count(*) over(partition by date_trunc('month',joindate) order by joindate desc),
firstname, surname
from cd.members
order by joindate
</sql>
<p>Window functions are extraordinarily powerful, and they will change the way you write and think about SQL. Make good use of them!</p>
|HINT|
Read up on the <c>COUNT</c> window function.
|SORTED|
1
|PAGEID|
1DAA4B87-48BB-49A3-AC62-EB2EC1A11FB2
| 56.383333 | 431 | 0.765593 |
1c3116bc5bca19b56fde89625e12f3249d2a7630 | 381 | exs | Elixir | priv/repo/migrations/20180712100738_repos_builders_join_table.exs | coderplanets/coderplanets_server | 3663e56340d6d050e974c91f7e499d8424fc25e9 | [
"Apache-2.0"
] | 240 | 2018-11-06T09:36:54.000Z | 2022-02-20T07:12:36.000Z | priv/repo/migrations/20180712100738_repos_builders_join_table.exs | coderplanets/coderplanets_server | 3663e56340d6d050e974c91f7e499d8424fc25e9 | [
"Apache-2.0"
] | 363 | 2018-07-11T03:38:14.000Z | 2021-12-14T01:42:40.000Z | priv/repo/migrations/20180712100738_repos_builders_join_table.exs | mydearxym/mastani_server | f24034a4a5449200165cf4a547964a0961793eab | [
"Apache-2.0"
] | 22 | 2019-01-27T11:47:56.000Z | 2021-02-28T13:17:52.000Z | defmodule GroupherServer.Repo.Migrations.ReposBuildersJoinTable do
use Ecto.Migration
def change do
create table(:repos_builders) do
add(:repo_id, references(:cms_repos, on_delete: :delete_all), null: false)
add(:builder_id, references(:cms_repo_builders, on_delete: :delete_all), null: false)
end
create(index(:repos_builders, [:repo_id]))
end
end
| 29.307692 | 91 | 0.737533 |
1c31230846d57e29a7884c8fded4e813107e8883 | 4,828 | ex | Elixir | apps/tai/lib/tai/venue_adapters/okex/create_order.ex | yurikoval/tai | 94254b45d22fa0307b01577ff7c629c7280c0295 | [
"MIT"
] | null | null | null | apps/tai/lib/tai/venue_adapters/okex/create_order.ex | yurikoval/tai | 94254b45d22fa0307b01577ff7c629c7280c0295 | [
"MIT"
] | 78 | 2020-10-12T06:21:43.000Z | 2022-03-28T09:02:00.000Z | apps/tai/lib/tai/venue_adapters/okex/create_order.ex | yurikoval/tai | 94254b45d22fa0307b01577ff7c629c7280c0295 | [
"MIT"
] | null | null | null | defmodule Tai.VenueAdapters.OkEx.CreateOrder do
@moduledoc """
Create orders for the OkEx adapter
"""
alias Tai.VenueAdapters.OkEx.ClientId
alias Tai.Orders
@type credentials :: Tai.Venues.Adapter.credentials()
@type order :: Orders.Order.t()
@type response :: Orders.Responses.CreateAccepted.t()
@type reason :: :insufficient_balance | :insufficient_position
@spec create_order(order, credentials) :: {:ok, response} | {:error, reason}
def create_order(%Orders.Order{} = order, credentials) do
{order, credentials}
|> send_to_venue()
|> parse_response()
end
def send_to_venue({order, credentials}) do
venue_config = credentials |> to_venue_config
params = order |> build_params()
mod = order |> module_for()
{mod.create_bulk_orders(params, venue_config), order}
end
defp module_for(%Orders.Order{product_type: :future}), do: ExOkex.Futures.Private
defp module_for(%Orders.Order{product_type: :swap}), do: ExOkex.Swap.Private
defp module_for(%Orders.Order{product_type: :spot}), do: ExOkex.Spot.Private
defp build_params(%Orders.Order{product_type: :future} = order) do
%{
instrument_id: order.venue_product_symbol,
leverage: 20,
orders_data: [
order |> build_order_params()
]
}
end
defp build_params(%Orders.Order{product_type: :swap} = order) do
%{
instrument_id: order.venue_product_symbol,
leverage: 20,
order_data: [
order |> build_order_params()
]
}
end
defp build_params(%Orders.Order{product_type: :spot} = order) do
[
%{
instrument_id: order.venue_product_symbol,
client_oid: order.client_id |> ClientId.to_venue(),
price: order.price |> to_decimal_string,
size: order.qty |> to_decimal_string,
type: order.type,
side: order.side,
order_type: order |> to_venue_order_type
}
]
end
defp build_order_params(order) do
%{
client_oid: order.client_id |> ClientId.to_venue(),
price: order.price |> to_decimal_string,
size: order.qty |> to_decimal_string,
type: order |> to_venue_type,
order_type: order |> to_venue_order_type,
match_price: 0
}
end
defdelegate to_venue_config(credentials),
to: Tai.VenueAdapters.OkEx.Credentials,
as: :from
defp to_decimal_string(price), do: price |> Decimal.to_string(:normal)
@open_long 1
@open_short 2
@close_long 3
@close_short 4
defp to_venue_type(%Orders.Order{side: :buy, close: true}), do: @close_short
defp to_venue_type(%Orders.Order{side: :sell, close: true}), do: @close_long
defp to_venue_type(%Orders.Order{side: :buy}), do: @open_long
defp to_venue_type(%Orders.Order{side: :sell}), do: @open_short
defp to_venue_order_type(%Orders.Order{time_in_force: :gtc, post_only: true}), do: 1
defp to_venue_order_type(%Orders.Order{time_in_force: :fok}), do: 2
defp to_venue_order_type(%Orders.Order{time_in_force: :ioc}), do: 3
defp to_venue_order_type(_), do: 0
defp parse_response({
{:ok, %{"order_info" => [%{"error_code" => "35008", "error_message" => _} | _]}},
%Orders.Order{product_type: :swap}
}),
do: {:error, :insufficient_balance}
defp parse_response({
{:ok, %{"order_info" => [%{"error_code" => "35010", "error_message" => _} | _]}},
%Orders.Order{product_type: :swap}
}),
do: {:error, :insufficient_position}
defp parse_response({
{:ok, %{"order_info" => [%{"error_code" => "32015", "error_message" => _} | _]}},
%Orders.Order{product_type: :future}
}),
do: {:error, :insufficient_balance}
defp parse_response({
{:ok, %{"order_info" => [%{"error_code" => "32019", "error_message" => _} | _]}},
%Orders.Order{product_type: :future}
}),
do: {:error, :insufficient_position}
defp parse_response({{:ok, response}, %Orders.Order{product_type: :spot}}) do
response
|> Map.values()
|> List.flatten()
|> parse_spot_response()
end
@invalid_venue_order_id "-1"
defp parse_response({
{:ok, %{"order_info" => [%{"order_id" => venue_order_id} | _]}},
_
})
when venue_order_id != @invalid_venue_order_id do
received_at = Tai.Time.monotonic_time()
response = %Orders.Responses.CreateAccepted{id: venue_order_id, received_at: received_at}
{:ok, response}
end
defp parse_spot_response([%{"error_code" => "33017"} | _]), do: {:error, :insufficient_balance}
defp parse_spot_response([%{"order_id" => venue_order_id} | _])
when venue_order_id != @invalid_venue_order_id do
received_at = Tai.Time.monotonic_time()
response = %Orders.Responses.CreateAccepted{id: venue_order_id, received_at: received_at}
{:ok, response}
end
end
| 32.621622 | 97 | 0.65493 |
1c31251430e9f2c0a8d908d6e7f74e018ffe0cc2 | 1,907 | ex | Elixir | clients/compute/lib/google_api/compute/v1/model/http_fault_delay.ex | renovate-bot/elixir-google-api | 1da34cd39b670c99f067011e05ab90af93fef1f6 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/compute/lib/google_api/compute/v1/model/http_fault_delay.ex | swansoffiee/elixir-google-api | 9ea6d39f273fb430634788c258b3189d3613dde0 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/compute/lib/google_api/compute/v1/model/http_fault_delay.ex | dazuma/elixir-google-api | 6a9897168008efe07a6081d2326735fe332e522c | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Compute.V1.Model.HttpFaultDelay do
@moduledoc """
Specifies the delay introduced by the load balancer before forwarding the request to the backend service as part of fault injection.
## Attributes
* `fixedDelay` (*type:* `GoogleApi.Compute.V1.Model.Duration.t`, *default:* `nil`) - Specifies the value of the fixed delay interval.
* `percentage` (*type:* `float()`, *default:* `nil`) - The percentage of traffic for connections, operations, or requests for which a delay is introduced as part of fault injection. The value must be from 0.0 to 100.0 inclusive.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:fixedDelay => GoogleApi.Compute.V1.Model.Duration.t() | nil,
:percentage => float() | nil
}
field(:fixedDelay, as: GoogleApi.Compute.V1.Model.Duration)
field(:percentage)
end
defimpl Poison.Decoder, for: GoogleApi.Compute.V1.Model.HttpFaultDelay do
def decode(value, options) do
GoogleApi.Compute.V1.Model.HttpFaultDelay.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Compute.V1.Model.HttpFaultDelay do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 38.14 | 232 | 0.737808 |
1c316b57f487ffeecd46b48be3e60d1ad2517426 | 584 | exs | Elixir | mix.exs | ghost-in-the-shell/nerve | 71a4a79cfeddb27d487793e93d2532b0d085785f | [
"MIT"
] | null | null | null | mix.exs | ghost-in-the-shell/nerve | 71a4a79cfeddb27d487793e93d2532b0d085785f | [
"MIT"
] | null | null | null | mix.exs | ghost-in-the-shell/nerve | 71a4a79cfeddb27d487793e93d2532b0d085785f | [
"MIT"
] | null | null | null | defmodule Nerve.Mixfile do
use Mix.Project
def project do
[app: :nerve,
version: "0.0.1",
elixir: "~> 1.0",
deps: deps]
end
# Configuration for the OTP application
#
# Type `mix help compile.app` for more information
def application do
[applications: [:logger]]
end
# Dependencies can be Hex packages:
#
# {:mydep, "~> 0.3.0"}
#
# Or git/path repositories:
#
# {:mydep, git: "https://github.com/elixir-lang/mydep.git", tag: "0.1.0"}
#
# Type `mix help deps` for more examples and options
defp deps do
[]
end
end
| 18.83871 | 77 | 0.604452 |
1c31ab2d521d43e78cdc5a1db1633bdbb4239f98 | 1,133 | exs | Elixir | config/config.exs | takasehideki/logistic_map | a35bc295afea79eec10bb5ba18f2566702894258 | [
"Apache-2.0"
] | null | null | null | config/config.exs | takasehideki/logistic_map | a35bc295afea79eec10bb5ba18f2566702894258 | [
"Apache-2.0"
] | null | null | null | config/config.exs | takasehideki/logistic_map | a35bc295afea79eec10bb5ba18f2566702894258 | [
"Apache-2.0"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# 3rd-party users, it should be done in your "mix.exs" file.
# You can configure your application as:
#
# config :logistic_map, key: :value
#
# and access this configuration in your application as:
#
# Application.get_env(:logistic_map, :key)
#
# You can also configure a 3rd-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env}.exs"
| 36.548387 | 73 | 0.753751 |
1c31c32b2e477bd459bd85910ebebf55eb3f2b8c | 2,254 | exs | Elixir | mix.exs | pedrogh/pact_elixir | 50c7281137121e6ebfdfb900b4f702946013283f | [
"MIT"
] | null | null | null | mix.exs | pedrogh/pact_elixir | 50c7281137121e6ebfdfb900b4f702946013283f | [
"MIT"
] | null | null | null | mix.exs | pedrogh/pact_elixir | 50c7281137121e6ebfdfb900b4f702946013283f | [
"MIT"
] | null | null | null | defmodule PactElixir.MixProject do
@moduledoc false
use Mix.Project
def project do
[
app: :pact_elixir,
version: "0.5.2",
elixir: "~> 1.7",
name: "PactElixir",
start_permanent: Mix.env() == :prod,
description: description(),
package: package(),
deps: deps(),
test_coverage: [tool: ExCoveralls],
preferred_cli_env: [
coveralls: :test,
"coveralls.detail": :test,
"coveralls.post": :test,
"coveralls.html": :test
],
compilers: [:rustler] ++ Mix.compilers(),
rustler_crates: rustler_crates(),
source_url: "https://github.com/elitau/pact_elixir",
homepage_url: "https://github.com/elitau/pact_elixir",
# The main page in the docs
docs: [main: "readme", extras: ["README.md"]]
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger],
mod: {PactElixir.Application, []}
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:rustler, "~> 0.21"},
{:poison, "~> 4.0"},
{:ex_doc, "~> 0.24.2", only: :dev, runtime: false},
{:httpoison, "~> 1.0", only: :test},
{:excoveralls, "~> 0.12", only: :test},
{:temp, "~> 0.4", only: :test},
{:credo, "~> 1.0", only: [:dev, :test], runtime: false},
{:inch_ex, "~> 2.0.0", only: :docs},
{:dialyxir, "~> 1.1.0", only: [:dev, :test], runtime: false}
]
end
def rustler_crates do
[
pactmockserver: [
path: "native/pactmockserver",
mode: if(Mix.env() == :prod, do: :release, else: :debug)
]
]
end
defp description do
"""
Elixir version of Pact. Enables consumer driven contract testing, providing a mock service and DSL for the consumer project.
"""
# TODO Also provides interaction playback and verification for the service provider project.
end
defp package do
[
maintainers: ["Eduard Litau"],
licenses: ["MIT"],
files: ["lib", "native", "mix.exs", "README.md", "LICENSE"],
links: %{"GitHub" => "https://github.com/elitau/pact_elixir"},
source_url: "https://github.com/elitau/pact_elixir"
]
end
end
| 27.82716 | 128 | 0.575865 |
1c31e53764a32a591d3ee35c7ea0b267cdde5360 | 16,708 | exs | Elixir | test/phoenix/test/conn_test.exs | Douvi/phoenix | 7832e45e056a48f9dd4bd8178d6c6b0569f19194 | [
"MIT"
] | null | null | null | test/phoenix/test/conn_test.exs | Douvi/phoenix | 7832e45e056a48f9dd4bd8178d6c6b0569f19194 | [
"MIT"
] | null | null | null | test/phoenix/test/conn_test.exs | Douvi/phoenix | 7832e45e056a48f9dd4bd8178d6c6b0569f19194 | [
"MIT"
] | null | null | null | defmodule Phoenix.Test.ConnTest.CatchAll do
def init(opts), do: opts
def call(conn, :stat), do: conn.params["action"].(conn)
def call(conn, _opts), do: Plug.Conn.assign(conn, :catch_all, true)
end
alias Phoenix.Test.ConnTest.CatchAll
defmodule Phoenix.Test.ConnTest.RedirRouter do
use Phoenix.Router
get "/", CatchAll, :foo
get "/posts/:id", CatchAll, :some_action
end
defmodule Phoenix.Test.ConnTest.Router do
use Phoenix.Router
pipeline :browser do
plug :put_bypass, :browser
end
pipeline :api do
plug :put_bypass, :api
end
scope "/" do
pipe_through :browser
get "/stat", CatchAll, :stat, private: %{route: :stat}
forward "/", CatchAll
end
def put_bypass(conn, pipeline) do
bypassed = (conn.assigns[:bypassed] || []) ++ [pipeline]
Plug.Conn.assign(conn, :bypassed, bypassed)
end
end
defmodule Phoenix.Test.ConnTest do
use ExUnit.Case, async: true
use Phoenix.ConnTest
alias Phoenix.Test.ConnTest.{Router, RedirRouter}
@moduletag :capture_log
defmodule ConnError do
defexception [message: nil, plug_status: 500]
end
Application.put_env(:phoenix, Phoenix.Test.ConnTest.Endpoint, [])
defmodule Endpoint do
use Phoenix.Endpoint, otp_app: :phoenix
def init(opts), do: opts
def call(conn, :set), do: resp(conn, 200, "ok")
def call(conn, opts) do
put_in(super(conn, opts).private[:endpoint], opts)
|> Router.call(Router.init([]))
end
end
@endpoint Endpoint
setup_all do
Logger.disable(self())
Endpoint.start_link()
:ok
end
test "build_conn/0 returns a new connection" do
conn = build_conn()
assert conn.method == "GET"
assert conn.path_info == []
assert conn.private.plug_skip_csrf_protection
assert conn.private.phoenix_recycled
end
test "build_conn/2 returns a new connection" do
conn = build_conn(:post, "/hello")
assert conn.method == "POST"
assert conn.path_info == ["hello"]
assert conn.private.plug_skip_csrf_protection
assert conn.private.phoenix_recycled
end
test "dispatch/5 with path" do
conn = post build_conn(), "/hello", foo: "bar"
assert conn.method == "POST"
assert conn.path_info == ["hello"]
assert conn.params == %{"foo" => "bar"}
assert conn.private.endpoint == []
refute conn.private.phoenix_recycled
end
test "dispatch/5 with action" do
conn = post build_conn(), :hello, %{foo: "bar"}
assert conn.method == "POST"
assert conn.path_info == []
assert conn.params == %{"foo" => "bar"}
assert conn.private.endpoint == :hello
refute conn.private.phoenix_recycled
end
test "dispatch/5 with binary body" do
assert_raise ArgumentError, fn ->
post build_conn(), :hello, "foo=bar"
end
conn =
build_conn()
|> put_req_header("content-type", "application/json")
|> post(:hello, "[1, 2, 3]")
|> Plug.Parsers.call(Plug.Parsers.init(parsers: [:json], json_decoder: Phoenix.json_library()))
assert conn.method == "POST"
assert conn.path_info == []
assert conn.params == %{"_json" => [1, 2, 3]}
end
test "dispatch/5 with recycling" do
conn =
build_conn()
|> put_req_header("hello", "world")
|> post(:hello)
assert get_req_header(conn, "hello") == ["world"]
conn =
conn
|> put_req_header("hello", "skipped")
|> post(:hello)
assert get_req_header(conn, "hello") == []
conn =
conn
|> recycle()
|> put_req_header("hello", "world")
|> post(:hello)
assert get_req_header(conn, "hello") == ["world"]
end
test "dispatch/5 with :set state automatically sends" do
conn = get build_conn(), :set
assert conn.state == :sent
assert conn.status == 200
assert conn.resp_body == "ok"
refute conn.private.phoenix_recycled
end
describe "recycle/1" do
test "relevant request headers are persisted" do
conn =
build_conn()
|> get("/")
|> put_req_header("accept", "text/html")
|> put_req_header("authorization", "Bearer mytoken")
|> put_req_header("hello", "world")
conn = conn |> recycle()
assert get_req_header(conn, "accept") == ["text/html"]
assert get_req_header(conn, "authorization") == ["Bearer mytoken"]
assert get_req_header(conn, "hello") == []
end
test "host is persisted" do
conn =
build_conn(:get, "http://localhost/", nil)
|> recycle()
assert conn.host == "localhost"
end
test "cookies are persisted" do
conn =
build_conn()
|> get("/")
|> put_req_cookie("req_cookie", "req_cookie")
|> put_req_cookie("del_cookie", "del_cookie")
|> put_req_cookie("over_cookie", "pre_cookie")
|> put_resp_cookie("over_cookie", "pos_cookie")
|> put_resp_cookie("resp_cookie", "resp_cookie")
|> delete_resp_cookie("del_cookie")
conn = conn |> recycle() |> fetch_cookies()
assert conn.cookies == %{"req_cookie" => "req_cookie",
"over_cookie" => "pos_cookie",
"resp_cookie" => "resp_cookie"}
end
test "peer data is persisted" do
peer_data = %{
address: {127, 0, 0, 1},
port: 111317,
ssl_cert: <<1, 2, 3, 4>>
}
conn =
build_conn()
|> Plug.Test.put_peer_data(peer_data)
conn = conn |> recycle()
assert Plug.Conn.get_peer_data(conn) == peer_data
end
end
test "ensure_recycled/1" do
conn =
build_conn()
|> put_req_header("hello", "world")
|> ensure_recycled()
assert get_req_header(conn, "hello") == ["world"]
conn =
put_in(conn.private.phoenix_recycled, false)
|> ensure_recycled()
assert get_req_header(conn, "hello") == []
end
test "put_req_header/3 and delete_req_header/3" do
conn = build_conn(:get, "/")
assert get_req_header(conn, "foo") == []
conn = put_req_header(conn, "foo", "bar")
assert get_req_header(conn, "foo") == ["bar"]
conn = put_req_header(conn, "foo", "baz")
assert get_req_header(conn, "foo") == ["baz"]
conn = delete_req_header(conn, "foo")
assert get_req_header(conn, "foo") == []
end
test "put_req_cookie/3 and delete_req_cookie/2" do
conn = build_conn(:get, "/")
assert get_req_header(conn, "cookie") == []
conn = conn |> put_req_cookie("foo", "bar")
assert get_req_header(conn, "cookie") == ["foo=bar"]
conn = conn |> delete_req_cookie("foo")
assert get_req_header(conn, "cookie") == []
end
test "response/2" do
conn = build_conn(:get, "/")
assert conn |> resp(200, "ok") |> response(200) == "ok"
assert conn |> send_resp(200, "ok") |> response(200) == "ok"
assert conn |> send_resp(200, "ok") |> response(:ok) == "ok"
assert_raise RuntimeError,
~r"expected connection to have a response but no response was set/sent", fn ->
build_conn(:get, "/") |> response(200)
end
assert_raise RuntimeError,
"expected response with status 200, got: 404, with body:\noops", fn ->
build_conn(:get, "/") |> resp(404, "oops") |> response(200)
end
end
test "html_response/2" do
assert build_conn(:get, "/") |> put_resp_content_type("text/html")
|> resp(200, "ok") |> html_response(:ok) == "ok"
assert_raise RuntimeError,
"no content-type was set, expected a html response", fn ->
build_conn(:get, "/") |> resp(200, "ok") |> html_response(200)
end
end
test "json_response/2" do
assert build_conn(:get, "/") |> put_resp_content_type("application/json")
|> resp(200, "{}") |> json_response(:ok) == %{}
assert build_conn(:get, "/") |> put_resp_content_type("application/vnd.api+json")
|> resp(200, "{}") |> json_response(:ok) == %{}
assert build_conn(:get, "/") |> put_resp_content_type("application/vnd.collection+json")
|> resp(200, "{}") |> json_response(:ok) == %{}
assert build_conn(:get, "/") |> put_resp_content_type("application/vnd.hal+json")
|> resp(200, "{}") |> json_response(:ok) == %{}
assert build_conn(:get, "/") |> put_resp_content_type("application/ld+json")
|> resp(200, "{}") |> json_response(:ok) == %{}
assert_raise RuntimeError,
"no content-type was set, expected a json response", fn ->
build_conn(:get, "/") |> resp(200, "ok") |> json_response(200)
end
assert_raise Jason.DecodeError,
"unexpected byte at position 0: 0x6F ('o')", fn ->
build_conn(:get, "/") |> put_resp_content_type("application/json")
|> resp(200, "ok") |> json_response(200)
end
assert_raise Jason.DecodeError, ~r/unexpected end of input at position 0/, fn ->
build_conn(:get, "/")
|> put_resp_content_type("application/json")
|> resp(200, "")
|> json_response(200)
end
assert_raise RuntimeError, ~s(expected response with status 200, got: 400, with body:\n{"error": "oh oh"}), fn ->
build_conn(:get, "/")
|> put_resp_content_type("application/json")
|> resp(400, ~s({"error": "oh oh"}))
|> json_response(200)
end
end
test "text_response/2" do
assert build_conn(:get, "/") |> put_resp_content_type("text/plain")
|> resp(200, "ok") |> text_response(:ok) == "ok"
assert_raise RuntimeError,
"no content-type was set, expected a text response", fn ->
build_conn(:get, "/") |> resp(200, "ok") |> text_response(200)
end
end
test "response_content_type/2" do
conn = build_conn(:get, "/")
assert put_resp_content_type(conn, "text/html") |> response_content_type(:html) ==
"text/html; charset=utf-8"
assert put_resp_content_type(conn, "text/plain") |> response_content_type(:text) ==
"text/plain; charset=utf-8"
assert put_resp_content_type(conn, "application/json") |> response_content_type(:json) ==
"application/json; charset=utf-8"
assert_raise RuntimeError,
"no content-type was set, expected a html response", fn ->
conn |> response_content_type(:html)
end
assert_raise RuntimeError,
"expected content-type for html, got: \"text/plain; charset=utf-8\"", fn ->
put_resp_content_type(conn, "text/plain") |> response_content_type(:html)
end
end
test "redirected_to/1" do
conn =
build_conn(:get, "/")
|> put_resp_header("location", "new location")
|> send_resp(302, "foo")
assert redirected_to(conn) == "new location"
end
test "redirected_to/2" do
Enum.each 300..308, fn(status) ->
conn =
build_conn(:get, "/")
|> put_resp_header("location", "new location")
|> send_resp(status, "foo")
assert redirected_to(conn, status) == "new location"
end
end
test "redirected_to/2 with status atom" do
conn =
build_conn(:get, "/")
|> put_resp_header("location", "new location")
|> send_resp(301, "foo")
assert redirected_to(conn, :moved_permanently) == "new location"
end
test "redirected_to/2 without header" do
assert_raise RuntimeError,
"no location header was set on redirected_to", fn ->
assert build_conn(:get, "/")
|> send_resp(302, "ok")
|> redirected_to()
end
end
test "redirected_to/2 without redirection" do
assert_raise RuntimeError,
"expected redirection with status 302, got: 200", fn ->
build_conn(:get, "/")
|> put_resp_header("location", "new location")
|> send_resp(200, "ok")
|> redirected_to()
end
end
test "redirected_to/2 without response" do
assert_raise RuntimeError,
~r"expected connection to have redirected but no response was set/sent", fn ->
build_conn(:get, "/")
|> redirected_to()
end
end
describe "redirected_params/1" do
test "with matching route" do
conn =
build_conn(:get, "/")
|> RedirRouter.call(RedirRouter.init([]))
|> put_resp_header("location", "/posts/123")
|> send_resp(302, "foo")
assert redirected_params(conn) == %{id: "123"}
end
test "raises Phoenix.Router.NoRouteError for unmatched location" do
conn =
build_conn(:get, "/")
|> RedirRouter.call(RedirRouter.init([]))
|> put_resp_header("location", "/unmatched")
|> send_resp(302, "foo")
assert_raise Phoenix.Router.NoRouteError, fn ->
redirected_params(conn)
end
end
test "without redirection" do
assert_raise RuntimeError,
"expected redirection with status 302, got: 200", fn ->
build_conn(:get, "/")
|> RedirRouter.call(RedirRouter.init([]))
|> put_resp_header("location", "new location")
|> send_resp(200, "ok")
|> redirected_params()
end
end
end
test "bypass_through/3 bypasses route match and invokes pipeline" do
conn = get(build_conn(), "/")
assert conn.assigns[:catch_all]
conn =
build_conn()
|> bypass_through(Router, :browser)
|> get("/stat")
assert conn.assigns[:bypassed] == [:browser]
assert conn.private[:route] == :stat
refute conn.assigns[:catch_all]
conn =
build_conn()
|> bypass_through(Router, [:api])
|> get("/stat")
assert conn.assigns[:bypassed] == [:api]
assert conn.private[:route] == :stat
refute conn.assigns[:catch_all]
conn =
build_conn()
|> bypass_through(Router, [:browser, :api])
|> get("/stat")
assert conn.assigns[:bypassed] == [:browser, :api]
assert conn.private[:route] == :stat
refute conn.assigns[:catch_all]
end
test "bypass_through/3 with empty pipeline" do
conn =
build_conn()
|> bypass_through(Router, [])
|> get("/stat")
refute conn.assigns[:bypassed]
assert conn.private[:route] == :stat
refute conn.assigns[:catch_all]
end
test "bypass_through/2 with route pipeline" do
conn =
build_conn()
|> bypass_through(Router)
|> get("/stat")
assert conn.assigns[:bypassed] == [:browser]
assert conn.private[:route] == :stat
refute conn.assigns[:catch_all]
end
test "bypass_through/1 without router" do
conn =
build_conn()
|> bypass_through()
|> get("/stat")
refute conn.assigns[:bypassed]
assert conn.private[:route] == :stat
refute conn.assigns[:catch_all]
end
test "assert_error_sent/2 with expected error response" do
response = assert_error_sent :not_found, fn ->
get(build_conn(), "/stat", action: fn _ -> raise ConnError, plug_status: 404 end)
end
assert {404, [_h | _t], "404.html from Phoenix.ErrorView"} = response
response = assert_error_sent 400, fn ->
get(build_conn(), "/stat", action: fn _ -> raise ConnError, plug_status: 400 end)
end
assert {400, [_h | _t], "400.html from Phoenix.ErrorView"} = response
end
test "assert_error_sent/2 with status mismatch assertion" do
assert_raise ExUnit.AssertionError, ~r/expected error to be sent as 400 status, but got 500 from.*RuntimeError/s, fn ->
assert_error_sent 400, fn ->
get(build_conn(), "/stat", action: fn _conn -> raise RuntimeError end)
end
end
end
test "assert_error_sent/2 with no error" do
assert_raise ExUnit.AssertionError, ~r/expected error to be sent as 404 status, but no error happened/, fn ->
assert_error_sent 404, fn -> get(build_conn(), "/") end
end
end
test "assert_error_sent/2 with error but no response" do
assert_raise ExUnit.AssertionError, ~r/expected error to be sent as 404 status, but got an error with no response from.*RuntimeError/s, fn ->
assert_error_sent 404, fn -> raise "oops" end
end
end
test "assert_error_sent/2 with response but no error" do
assert_raise ExUnit.AssertionError, ~r/expected error to be sent as 400 status, but response sent 400 without error/, fn ->
assert_error_sent :bad_request, fn ->
get(build_conn(), "/stat", action: fn conn -> Plug.Conn.send_resp(conn, 400, "") end)
end
end
end
for method <- [:get, :post, :put, :delete] do
@method method
test "#{method} helper raises ArgumentError for mismatched conn" do
assert_raise ArgumentError, ~r/expected first argument to #{@method} to be/, fn ->
unquote(@method)("/foo/bar", %{baz: "baz"})
end
end
end
end
| 30.600733 | 145 | 0.612042 |
1c31eb10256b4ee8c440674c14f149479d59998d | 3,291 | ex | Elixir | lib/mix2nix.ex | ydlr/mix2nix | d7ecbf63b7e5e31c1aea4c1c60c21f2d0f69e9f6 | [
"MIT"
] | 3 | 2021-05-19T07:33:28.000Z | 2021-08-25T12:43:16.000Z | lib/mix2nix.ex | ydlr/mix2nix | d7ecbf63b7e5e31c1aea4c1c60c21f2d0f69e9f6 | [
"MIT"
] | 11 | 2021-05-01T03:44:41.000Z | 2021-08-09T13:47:56.000Z | lib/mix2nix.ex | ydlr/mix2nix | d7ecbf63b7e5e31c1aea4c1c60c21f2d0f69e9f6 | [
"MIT"
] | 1 | 2021-05-11T06:21:36.000Z | 2021-05-11T06:21:36.000Z | defmodule Mix2nix do
def process(filename) do
filename
|> read
|> expression_set
end
def expression_set(deps) do
deps
|> Map.to_list()
|> Enum.sort(:asc)
|> Enum.map(fn {_, v} -> nix_expression(deps, v) end)
|> Enum.reject(fn x -> x == "" end)
|> Enum.join("\n")
|> String.trim("\n")
|> wrap
end
defp read(filename) do
opts = [file: filename, warn_on_unnecessary_quotes: false]
with {:ok, contents} <- File.read(filename),
{:ok, quoted} <- Code.string_to_quoted(contents, opts),
{%{} = lock, _} <- Code.eval_quoted(quoted, opts) do
lock
else
{:error, posix} when is_atom(posix) ->
raise to_string(:file.format_error(posix))
{:error, {line, error, token}} when is_integer(line) ->
raise "Error on line #{line}: #{error} (#{inspect(token)})"
end
end
def is_required(allpkgs, [ hex: name, repo: _, optional: optional ]) do
Map.has_key?(allpkgs, name) or ! optional
end
def dep_string(allpkgs, deps) do
depString =
deps
|> Enum.filter(fn x -> is_required(allpkgs, elem(x, 2)) end)
|> Enum.map(fn x -> Atom.to_string(elem(x, 0)) end)
|> Enum.join(" ")
if String.length(depString) > 0 do
"[ " <> depString <> " ]"
else
"[]"
end
end
def specific_workaround(pkg) do
case pkg do
"cowboy" -> "buildErlangMk"
"ssl_verify_fun" -> "buildRebar3"
"jose" -> "buildMix"
_ -> false
end
end
def get_build_env(builders, pkgname) do
cond do
specific_workaround(pkgname) ->
specific_workaround(pkgname)
Enum.member?(builders, :mix) ->
"buildMix"
Enum.member?(builders, :rebar3) or Enum.member?(builders, :rebar) ->
"buildRebar3"
Enum.member?(builders, :make) ->
"buildErlangMk"
true ->
"buildMix"
end
end
def get_hash(name, version) do
url = "https://repo.hex.pm/tarballs/#{name}-#{version}.tar"
{ result, status } = System.cmd("nix-prefetch-url", [url])
case status do
0 -> String.trim(result)
_ -> raise "Use of nix-prefetch-url failed."
end
end
def nix_expression(
allpkgs,
{:hex, name, version, _hash, builders, deps, "hexpm", _hash2}
), do: get_hexpm_expression(allpkgs, name, version, builders, deps)
def nix_expression(
allpkgs,
{:hex, name, version, _hash, builders, deps, "hexpm"}
), do: get_hexpm_expression(allpkgs, name, version, builders, deps)
def nix_expression(_allpkgs, _pkg) do
""
end
defp get_hexpm_expression(allpkgs, name, version, builders, deps) do
name = Atom.to_string(name)
buildEnv = get_build_env(builders, name)
sha256 = get_hash(name, version)
deps = dep_string(allpkgs, deps)
"""
#{name} = #{buildEnv} rec {
name = "#{name}";
version = "#{version}";
src = fetchHex {
pkg = "${name}";
version = "${version}";
sha256 = "#{sha256}";
};
beamDeps = #{deps};
};
"""
end
defp wrap(pkgs) do
"""
{ lib, beamPackages, overrides ? (x: y: {}) }:
let
buildRebar3 = lib.makeOverridable beamPackages.buildRebar3;
buildMix = lib.makeOverridable beamPackages.buildMix;
buildErlangMk = lib.makeOverridable beamPackages.buildErlangMk;
self = packages // (overrides self packages);
packages = with beamPackages; with self; {
#{pkgs}
};
in self
"""
end
end
| 23.340426 | 72 | 0.625646 |
1c3212341c292c5d71b6a3e0443c26c78434eb4b | 3,574 | ex | Elixir | lib/k8s/client/runner/watch.ex | iautom8things/k8s | 6184c70581b754eb560701ba7954900089403939 | [
"MIT"
] | null | null | null | lib/k8s/client/runner/watch.ex | iautom8things/k8s | 6184c70581b754eb560701ba7954900089403939 | [
"MIT"
] | null | null | null | lib/k8s/client/runner/watch.ex | iautom8things/k8s | 6184c70581b754eb560701ba7954900089403939 | [
"MIT"
] | null | null | null | defmodule K8s.Client.Runner.Watch do
@moduledoc """
`K8s.Client` runner that will watch a resource or resources and stream results back to a process.
"""
@resource_version_json_path ~w(metadata resourceVersion)
alias K8s.Client.Runner.Base
alias K8s.Operation
@doc """
Watch a resource or list of resources. Provide the `stream_to` option or results will be stream to `self()`.
Note: Current resource version will be looked up automatically.
## Examples
```elixir
{:ok, conn} = K8s.Conn.lookup(:test)
operation = K8s.Client.list("v1", "Namespace")
{:ok, reference} = Watch.run(operation, conn, stream_to: self())
```
```elixir
{:ok, conn} = K8s.Conn.lookup(:test)
operation = K8s.Client.get("v1", "Namespace", [name: "test"])
{:ok, reference} = Watch.run(operation, conn, stream_to: self())
```
"""
@spec run(Operation.t(), K8s.Conn.t(), keyword(atom)) :: Base.result_t()
def run(%Operation{method: :get} = operation, conn, opts) do
case get_resource_version(operation, conn) do
{:ok, rv} -> run(operation, conn, rv, opts)
error -> error
end
end
def run(op, _, _),
do: {:error, "Only HTTP GET operations (list, get) are supported. #{inspect(op)}"}
@doc """
Watch a resource or list of resources from a specific resource version. Provide the `stream_to` option or results will be stream to `self()`.
## Examples
```elixir
{:ok, conn} = K8s.Conn.lookup(:test)
operation = K8s.Client.list("v1", "Namespace")
resource_version = 3003
{:ok, reference} = Watch.run(operation, conn, resource_version, stream_to: self())
```
```elixir
{:ok, conn} = K8s.Conn.lookup(:test)
operation = K8s.Client.get("v1", "Namespace", [name: "test"])
resource_version = 3003
{:ok, reference} = Watch.run(operation, conn, resource_version, stream_to: self())
```
"""
@spec run(Operation.t(), K8s.Conn.t(), binary, keyword(atom)) :: Base.result_t()
def run(%Operation{method: :get, verb: verb} = operation, conn, rv, opts)
when verb in [:list, :list_all_namespaces] do
opts_w_watch_params = add_watch_params_to_opts(opts, rv)
Base.run(operation, conn, opts_w_watch_params)
end
def run(%Operation{method: :get, verb: :get} = operation, conn, rv, opts) do
{list_op, field_selector_param} = get_to_list(operation)
params = Map.merge(opts[:params] || %{}, field_selector_param)
opts = Keyword.put(opts, :params, params)
run(list_op, conn, rv, opts)
end
def run(op, _, _, _),
do: {:error, "Only HTTP GET operations (list, get) are supported. #{inspect(op)}"}
@spec get_resource_version(Operation.t(), K8s.Conn.t()) :: {:ok, binary} | {:error, binary}
defp get_resource_version(%Operation{} = operation, conn) do
case Base.run(operation, conn) do
{:ok, payload} ->
rv = parse_resource_version(payload)
{:ok, rv}
error ->
error
end
end
@spec add_watch_params_to_opts(keyword, binary) :: keyword
defp add_watch_params_to_opts(opts, rv) do
params = Map.merge(opts[:params] || %{}, %{"resourceVersion" => rv, "watch" => true})
Keyword.put(opts, :params, params)
end
@spec parse_resource_version(any) :: binary
defp parse_resource_version(%{} = payload),
do: get_in(payload, @resource_version_json_path) || "0"
defp parse_resource_version(_), do: "0"
defp get_to_list(get_op) do
list_op = %{get_op | verb: :list, path_params: []}
name = get_op.path_params[:name]
params = %{"fieldSelector" => "metadata.name%3D#{name}"}
{list_op, params}
end
end
| 32.788991 | 143 | 0.659765 |
1c322488cb029f117f03400cfd54aeeccecb355b | 3,914 | exs | Elixir | test/club/support/validators_test.exs | vheathen/club.wallprint.pro | d58d2409d8879d23ed4d60fe3b9c2e1bd82e924d | [
"MIT"
] | null | null | null | test/club/support/validators_test.exs | vheathen/club.wallprint.pro | d58d2409d8879d23ed4d60fe3b9c2e1bd82e924d | [
"MIT"
] | 34 | 2019-11-10T11:31:37.000Z | 2019-11-27T21:26:48.000Z | test/club/support/validators_test.exs | vheathen/club.wallprint.pro | d58d2409d8879d23ed4d60fe3b9c2e1bd82e924d | [
"MIT"
] | null | null | null | defmodule Club.Support.ValidatorsTest do
use ExUnit.Case
import Club.Support.Validators
describe "validate_url" do
setup context do
dataset =
case context do
%{url: url} -> Ecto.Changeset.cast({%{}, %{url: :string}}, %{url: url}, [:url])
_ -> nil
end
{:ok, dataset: dataset}
end
@tag unit: true, url: "http://microsoft.com/some_path"
test "must be valid on a proper url", %{dataset: dataset} do
assert %{valid?: true, errors: []} = validate_url(dataset, :url)
end
@tag unit: true, url: "//microsoft.com/some_path"
test "must return error on scheme absence", %{dataset: dataset} do
assert %{valid?: false, errors: [url: {"doesn't have scheme", [validation: :url]}]} =
validate_url(dataset, :url)
end
@tag unit: true, url: "//microsoft.com/some_path"
test "must return custom error on scheme absence", %{dataset: dataset} do
custom_error = "no scheme"
assert %{valid?: false, errors: [url: {^custom_error, [validation: :url]}]} =
validate_url(dataset, :url, no_scheme_message: custom_error)
end
@tag unit: true, url: "ftp://microsoft.com/some_path"
test "must return error when scheme is not on the list allowed_schemes: []", %{
dataset: dataset
} do
assert %{valid?: false, errors: [url: {"scheme not allowed", [validation: :url]}]} =
validate_url(dataset, :url, allowed_schemes: ["http", "https"])
end
@tag unit: true, url: "ftp://microsoft.com/some_path"
test "must return custom error when scheme is not on the list allowed_schemes: []", %{
dataset: dataset
} do
custom_error = "scheme is not in the allowed list"
assert %{valid?: false, errors: [url: {^custom_error, [validation: :url]}]} =
validate_url(
dataset,
:url,
allowed_schemes: ["http", "https"],
scheme_not_allowed_message: custom_error
)
end
@tag unit: true, url: "http://microsoft.com/some_path"
test "must be valid on a proper url with resolvable host name if resolve: true set", %{
dataset: dataset
} do
assert %{valid?: true, errors: []} = validate_url(dataset, :url, resolve: true)
end
@tag unit: true, url: "smb://#{UUID.uuid4()}.com/some_path"
test "must return error on a proper url with unresolvable host name if resolve: true set", %{
dataset: dataset
} do
assert %{valid?: false, errors: [url: {"hostname unknown: NXDOMAIN", [validation: :url]}]} =
validate_url(dataset, :url, resolve: true)
end
@tag unit: true, url: "smb://#{UUID.uuid4()}.com/some_path"
test "must return custom error on a proper url with unresolvable host name if resolve: true set",
%{dataset: dataset} do
#
custom_error = "can't resolve domain"
assert %{valid?: false, errors: [url: {^custom_error, [validation: :url]}]} =
validate_url(dataset, :url, resolve: true, unresolvable_message: custom_error)
end
@tag unit: true, url: "smb://#{UUID.uuid4()}.com/some_path"
test "must return a list of errors", %{dataset: dataset} do
unresolvable_message = "can't resolve hostname"
scheme_not_allowed_message = "scheme not allowed"
validation_result =
validate_url(
dataset,
:url,
resolve: true,
unresolvable_message: unresolvable_message,
allowed_schemes: ["http", "https"],
scheme_not_allowed_message: scheme_not_allowed_message
)
assert %{valid?: false, errors: errors} = validation_result
assert {:url, {unresolvable_message, [validation: :url]}} in errors
assert {:url, {scheme_not_allowed_message, [validation: :url]}} in errors
assert length(errors) == 2
end
end
end
| 36.924528 | 101 | 0.612928 |
1c324647374dc3217e89c3b95d894424df4ef7dd | 493 | ex | Elixir | test/support/path_helpers.ex | gabheadz/annon.api | 9921d98e44c8472c133554dd55ea649c0f55726c | [
"MIT"
] | 328 | 2017-05-05T15:19:46.000Z | 2022-03-11T10:52:45.000Z | test/support/path_helpers.ex | gabheadz/annon.api | 9921d98e44c8472c133554dd55ea649c0f55726c | [
"MIT"
] | 83 | 2017-04-30T10:36:15.000Z | 2019-10-14T13:14:34.000Z | test/support/path_helpers.ex | gabheadz/annon.api | 9921d98e44c8472c133554dd55ea649c0f55726c | [
"MIT"
] | 29 | 2017-05-02T14:36:50.000Z | 2021-09-03T13:36:17.000Z | defmodule Annon.PathHelpers do
@moduledoc """
This module provider path generation helpers for various entities in Management API.
"""
def plugins_path(api_id),
do: "apis/#{api_id}/plugins"
def plugin_path(api_id, name),
do: "#{plugins_path(api_id)}/#{name}"
def apis_path,
do: "apis"
def api_path(api_id),
do: "#{apis_path()}/#{api_id}"
def requests_path,
do: "requests"
def request_path(request_id),
do: "#{requests_path()}/#{request_id}"
end
| 20.541667 | 86 | 0.663286 |
1c3253ce9d7925811a77fabd620e3c42b3ccca35 | 47,369 | ex | Elixir | lib/livebook/session.ex | apoorv-2204/elixir_learn_work_notebook_setup | 7ea75c1b53e0e9ce7ae89d386cc16b7277a1d86e | [
"Apache-2.0"
] | null | null | null | lib/livebook/session.ex | apoorv-2204/elixir_learn_work_notebook_setup | 7ea75c1b53e0e9ce7ae89d386cc16b7277a1d86e | [
"Apache-2.0"
] | null | null | null | lib/livebook/session.ex | apoorv-2204/elixir_learn_work_notebook_setup | 7ea75c1b53e0e9ce7ae89d386cc16b7277a1d86e | [
"Apache-2.0"
] | null | null | null | defmodule Livebook.Session do
@moduledoc false
# Server corresponding to a single notebook session.
#
# The process keeps the current notebook state and serves
# as a source of truth that multiple clients talk to.
# Receives update requests from the clients and notifies
# them of any changes applied to the notebook.
#
# ## Collaborative state
#
# The core concept is the `Livebook.Session.Data` structure
# to which we can apply reproducible operations.
# See `Livebook.Session.Data` for more information.
#
# ## Evaluation
#
# All regular sections are evaluated in the same process
# (the :main_flow evaluation container). On the other hand,
# each branching section is evaluated in its own process
# and thus runs concurrently.
#
# ### Implementation considerations
#
# In practice, every evaluation container is a `Livebook.Runtime.Evaluator`
# process, so we have one such process for the main flow and one
# for each branching section. Since a branching section inherits
# the evaluation context from the parent section, the last context
# needs to be copied from the main flow evaluator to the branching
# section evaluator. The latter synchronously asks the former for
# that context using `Livebook.Runtime.Evaluator.fetch_evaluation_context/3`.
# Consequently, in order to evaluate the first cell in a branching
# section, the main flow needs to be free of work, otherwise we wait.
# This assumptions are mirrored in by `Livebook.Session.Data` when
# determining cells for evaluation.
#
# Note: the context could be copied asynchronously if evaluator
# kept the contexts in its process dictionary, however the other
# evaluator could only read the whole process dictionary, thus
# allocating a lot of memory unnecessarily, which would be unacceptable
# for large data. By making a synchronous request to the evalutor
# for a single specific evaluation context we make sure to copy
# as little memory as necessary.
# The struct holds the basic session information that we track
# and pass around. The notebook and evaluation state is kept
# within the process state.
defstruct [:id, :pid, :origin, :notebook_name, :file, :images_dir, :created_at, :memory_usage]
use GenServer, restart: :temporary
alias Livebook.Session.{Data, FileGuard}
alias Livebook.{Utils, Notebook, Delta, Runtime, LiveMarkdown, FileSystem}
alias Livebook.Users.User
alias Livebook.Notebook.{Cell, Section}
@timeout :infinity
@main_container_ref :main_flow
@type t :: %__MODULE__{
id: id(),
pid: pid(),
origin: Notebook.ContentLoader.location() | nil,
notebook_name: String.t(),
file: FileSystem.File.t() | nil,
images_dir: FileSystem.File.t(),
created_at: DateTime.t(),
memory_usage: memory_usage()
}
@type state :: %{
session_id: id(),
data: Data.t(),
created_at: DateTime.t(),
runtime_monitor_ref: reference() | nil,
autosave_timer_ref: reference() | nil,
save_task_pid: pid() | nil,
saved_default_file: FileSystem.File.t() | nil,
memory_usage: memory_usage()
}
@type memory_usage ::
%{
runtime: Livebook.Runtime.runtime_memory() | nil,
system: Livebook.SystemResources.memory()
}
@typedoc """
An id assigned to every running session process.
"""
@type id :: Utils.id()
## API
@doc """
Starts a session server process.
## Options
* `:id` (**required**) - a unique session identifier
* `:notebook` - the initial `Notebook` structure (e.g. imported from a file)
* `:origin` - location from where the notebook was obtained, can be either
`{:file, file}`, a remote `{:url, url}`, or `nil`
* `:file` - the file to which the notebook should be saved
* `:copy_images_from` - a directory file to copy notebook images from
* `:images` - a map from image name to its binary content, an alternative
to `:copy_images_from` when the images are in memory
* `:autosave_path` - a local directory to save notebooks without a file into.
Defaults to `Livebook.Settings.autosave_path/0`
"""
@spec start_link(keyword()) :: {:ok, pid} | {:error, any()}
def start_link(opts) do
GenServer.start_link(__MODULE__, opts)
end
@doc """
Fetches session information from the session server.
"""
@spec get_by_pid(pid()) :: Session.t()
def get_by_pid(pid) do
GenServer.call(pid, :describe_self, @timeout)
end
@doc """
Registers a session client, so that the session is aware of it.
The client process is automatically unregistered when it terminates.
Returns the current session data, which the client can than
keep in sync with the server by subscribing to the `sessions:id` topic
and receiving operations to apply.
"""
@spec register_client(pid(), pid(), User.t()) :: Data.t()
def register_client(pid, client_pid, user) do
GenServer.call(pid, {:register_client, client_pid, user}, @timeout)
end
@doc """
Returns data of the given session.
"""
@spec get_data(pid()) :: Data.t()
def get_data(pid) do
GenServer.call(pid, :get_data, @timeout)
end
@doc """
Returns the current notebook structure.
"""
@spec get_notebook(pid()) :: Notebook.t()
def get_notebook(pid) do
GenServer.call(pid, :get_notebook, @timeout)
end
@doc """
Computes the file name for download.
Note that the name doesn't have any extension.
If the notebook has an associated file, the same name is used,
otherwise it is computed from the notebook title.
"""
@spec file_name_for_download(t()) :: String.t()
def file_name_for_download(session)
def file_name_for_download(%{file: nil} = session) do
notebook_name_to_file_name(session.notebook_name)
end
def file_name_for_download(session) do
session.file
|> FileSystem.File.name()
|> Path.rootname()
end
@doc """
Fetches assets matching the given hash.
The assets are cached locally and fetched from the runtime
only once.
See `local_asset_path/2` for locating a specific asset.
"""
@spec fetch_assets(pid(), String.t()) :: :ok | {:error, String.t()}
def fetch_assets(pid, hash) do
local_assets_path = local_assets_path(hash)
if File.exists?(local_assets_path) do
:ok
else
with {:ok, runtime, archive_path} <-
GenServer.call(pid, {:get_runtime_and_archive_path, hash}, @timeout) do
fun = fn ->
# Make sure the file hasn't been fetched by this point
unless File.exists?(local_assets_path) do
{:ok, archive_binary} = Runtime.read_file(runtime, archive_path)
extract_archive!(archive_binary, local_assets_path)
end
end
# Fetch assets in a separate process and avoid several
# simultaneous fateches of the same assets
case Livebook.Utils.UniqueTask.run(hash, fun) do
:ok -> :ok
:error -> {:error, "failed to fetch assets"}
end
end
end
end
@doc """
Sends notebook attributes update to the server.
"""
@spec set_notebook_attributes(pid(), map()) :: :ok
def set_notebook_attributes(pid, attrs) do
GenServer.cast(pid, {:set_notebook_attributes, self(), attrs})
end
@doc """
Sends section insertion request to the server.
"""
@spec insert_section(pid(), non_neg_integer()) :: :ok
def insert_section(pid, index) do
GenServer.cast(pid, {:insert_section, self(), index})
end
@doc """
Sends section insertion request to the server.
"""
@spec insert_section_into(pid(), Section.id(), non_neg_integer()) :: :ok
def insert_section_into(pid, section_id, index) do
GenServer.cast(pid, {:insert_section_into, self(), section_id, index})
end
@doc """
Sends parent update request to the server.
"""
@spec set_section_parent(pid(), Section.id(), Section.id()) :: :ok
def set_section_parent(pid, section_id, parent_id) do
GenServer.cast(pid, {:set_section_parent, self(), section_id, parent_id})
end
@doc """
Sends parent update request to the server.
"""
@spec unset_section_parent(pid(), Section.id()) :: :ok
def unset_section_parent(pid, section_id) do
GenServer.cast(pid, {:unset_section_parent, self(), section_id})
end
@doc """
Sends cell insertion request to the server.
"""
@spec insert_cell(pid(), Section.id(), non_neg_integer(), Cell.type(), map()) :: :ok
def insert_cell(pid, section_id, index, type, attrs \\ %{}) do
GenServer.cast(pid, {:insert_cell, self(), section_id, index, type, attrs})
end
@doc """
Sends section deletion request to the server.
"""
@spec delete_section(pid(), Section.id(), boolean()) :: :ok
def delete_section(pid, section_id, delete_cells) do
GenServer.cast(pid, {:delete_section, self(), section_id, delete_cells})
end
@doc """
Sends cell deletion request to the server.
"""
@spec delete_cell(pid(), Cell.id()) :: :ok
def delete_cell(pid, cell_id) do
GenServer.cast(pid, {:delete_cell, self(), cell_id})
end
@doc """
Sends cell restoration request to the server.
"""
@spec restore_cell(pid(), Cell.id()) :: :ok
def restore_cell(pid, cell_id) do
GenServer.cast(pid, {:restore_cell, self(), cell_id})
end
@doc """
Sends cell move request to the server.
"""
@spec move_cell(pid(), Cell.id(), integer()) :: :ok
def move_cell(pid, cell_id, offset) do
GenServer.cast(pid, {:move_cell, self(), cell_id, offset})
end
@doc """
Sends section move request to the server.
"""
@spec move_section(pid(), Section.id(), integer()) :: :ok
def move_section(pid, section_id, offset) do
GenServer.cast(pid, {:move_section, self(), section_id, offset})
end
@doc """
Sends cell convertion request to the server.
"""
@spec convert_smart_cell(pid(), Cell.id()) :: :ok
def convert_smart_cell(pid, cell_id) do
GenServer.cast(pid, {:convert_smart_cell, self(), cell_id})
end
@doc """
Sends cell evaluation request to the server.
"""
@spec queue_cell_evaluation(pid(), Cell.id()) :: :ok
def queue_cell_evaluation(pid, cell_id) do
GenServer.cast(pid, {:queue_cell_evaluation, self(), cell_id})
end
@doc """
Sends section evaluation request to the server.
"""
@spec queue_section_evaluation(pid(), Section.id()) :: :ok
def queue_section_evaluation(pid, section_id) do
GenServer.cast(pid, {:queue_section_evaluation, self(), section_id})
end
@doc """
Sends input bound cells evaluation request to the server.
"""
@spec queue_bound_cells_evaluation(pid(), Data.input_id()) :: :ok
def queue_bound_cells_evaluation(pid, input_id) do
GenServer.cast(pid, {:queue_bound_cells_evaluation, self(), input_id})
end
@doc """
Sends full evaluation request to the server.
All outdated (new/stale/changed) cells, as well as cells given
as `forced_cell_ids` are scheduled for evaluation.
"""
@spec queue_full_evaluation(pid(), list(Cell.id())) :: :ok
def queue_full_evaluation(pid, forced_cell_ids) do
GenServer.cast(pid, {:queue_full_evaluation, self(), forced_cell_ids})
end
@doc """
Sends cell evaluation cancellation request to the server.
"""
@spec cancel_cell_evaluation(pid(), Cell.id()) :: :ok
def cancel_cell_evaluation(pid, cell_id) do
GenServer.cast(pid, {:cancel_cell_evaluation, self(), cell_id})
end
@doc """
Sends erase outputs request to the server.
"""
@spec erase_outputs(pid()) :: :ok
def erase_outputs(pid) do
GenServer.cast(pid, {:erase_outputs, self()})
end
@doc """
Sends notebook name update request to the server.
"""
@spec set_notebook_name(pid(), String.t()) :: :ok
def set_notebook_name(pid, name) do
GenServer.cast(pid, {:set_notebook_name, self(), name})
end
@doc """
Sends section name update request to the server.
"""
@spec set_section_name(pid(), Section.id(), String.t()) :: :ok
def set_section_name(pid, section_id, name) do
GenServer.cast(pid, {:set_section_name, self(), section_id, name})
end
@doc """
Sends a cell delta to apply to the server.
"""
@spec apply_cell_delta(
pid(),
Cell.id(),
Data.cell_source_tag(),
Delta.t(),
Data.cell_revision()
) :: :ok
def apply_cell_delta(pid, cell_id, tag, delta, revision) do
GenServer.cast(pid, {:apply_cell_delta, self(), cell_id, tag, delta, revision})
end
@doc """
Informs at what revision the given client is.
This helps to remove old deltas that are no longer necessary.
"""
@spec report_cell_revision(
pid(),
Cell.id(),
Data.cell_source_tag(),
Data.cell_revision()
) :: :ok
def report_cell_revision(pid, cell_id, tag, revision) do
GenServer.cast(pid, {:report_cell_revision, self(), cell_id, tag, revision})
end
@doc """
Sends a cell attributes update to the server.
"""
@spec set_cell_attributes(pid(), Cell.id(), map()) :: :ok
def set_cell_attributes(pid, cell_id, attrs) do
GenServer.cast(pid, {:set_cell_attributes, self(), cell_id, attrs})
end
@doc """
Sends a input value update to the server.
"""
@spec set_input_value(pid(), Data.input_id(), term()) :: :ok
def set_input_value(pid, input_id, value) do
GenServer.cast(pid, {:set_input_value, self(), input_id, value})
end
@doc """
Connects to the given runtime.
Note that this results in initializing the corresponding remote node
with modules and processes required for evaluation.
"""
@spec connect_runtime(pid(), Runtime.t()) :: :ok
def connect_runtime(pid, runtime) do
GenServer.cast(pid, {:connect_runtime, self(), runtime})
end
@doc """
Sends file location update request to the server.
"""
@spec set_file(pid(), FileSystem.File.t() | nil) :: :ok
def set_file(pid, file) do
GenServer.cast(pid, {:set_file, self(), file})
end
@doc """
Sends save request to the server.
If there's a file set and the notebook changed since the last save,
it will be persisted to said file.
Note that notebooks are automatically persisted every @autosave_interval
milliseconds.
"""
@spec save(pid()) :: :ok
def save(pid) do
GenServer.cast(pid, :save)
end
@doc """
Synchronous version of `save/1`.
"""
@spec save_sync(pid()) :: :ok
def save_sync(pid) do
GenServer.call(pid, :save_sync, @timeout)
end
@doc """
Closes one or more sessions.
This results in saving the file and broadcasting
a :closed message to the session topic.
"""
@spec close(pid() | [pid()]) :: :ok
def close(pid) do
_ = call_many(List.wrap(pid), :close)
Livebook.SystemResources.update()
:ok
end
@doc """
Disconnects one or more sessions from the current runtime.
Note that this results in clearing the evaluation state.
"""
@spec disconnect_runtime(pid() | [pid()]) :: :ok
def disconnect_runtime(pid) do
_ = call_many(List.wrap(pid), {:disconnect_runtime, self()})
Livebook.SystemResources.update()
:ok
end
defp call_many(list, request) do
list
|> Enum.map(&:gen_server.send_request(&1, request))
|> Enum.map(&:gen_server.wait_response(&1, :infinity))
end
## Callbacks
@impl true
def init(opts) do
id = Keyword.fetch!(opts, :id)
{:ok, worker_pid} = Livebook.Session.Worker.start_link(id)
with {:ok, state} <- init_state(id, worker_pid, opts),
:ok <-
if(copy_images_from = opts[:copy_images_from],
do: copy_images(state, copy_images_from),
else: :ok
),
:ok <-
if(images = opts[:images],
do: dump_images(state, images),
else: :ok
) do
state = schedule_autosave(state)
{:ok, state}
else
{:error, error} ->
{:stop, error}
end
end
defp init_state(id, worker_pid, opts) do
with {:ok, data} <- init_data(opts) do
state = %{
session_id: id,
data: data,
created_at: DateTime.utc_now(),
runtime_monitor_ref: nil,
autosave_timer_ref: nil,
autosave_path: opts[:autosave_path],
save_task_pid: nil,
saved_default_file: nil,
memory_usage: %{runtime: nil, system: Livebook.SystemResources.memory()},
worker_pid: worker_pid
}
{:ok, state}
end
end
defp init_data(opts) do
notebook = Keyword.get_lazy(opts, :notebook, &default_notebook/0)
file = opts[:file]
origin = opts[:origin]
data = Data.new(notebook)
data = %{data | origin: origin}
if file do
case FileGuard.lock(file, self()) do
:ok ->
{:ok, %{data | file: file}}
{:error, :already_in_use} ->
{:error, "the given file is already in use"}
end
else
{:ok, data}
end
end
defp default_notebook() do
%{Notebook.new() | sections: [%{Section.new() | cells: [Cell.new(:code)]}]}
end
defp schedule_autosave(state) do
if interval_s = state.data.notebook.autosave_interval_s do
ref = Process.send_after(self(), :autosave, interval_s * 1000)
%{state | autosave_timer_ref: ref}
else
%{state | autosave_timer_ref: nil}
end
end
defp unschedule_autosave(%{autosave_timer_ref: nil} = state), do: state
defp unschedule_autosave(state) do
if Process.cancel_timer(state.autosave_timer_ref) == false do
receive do
:autosave -> :ok
end
end
%{state | autosave_timer_ref: nil}
end
@impl true
def handle_call(:describe_self, _from, state) do
{:reply, self_from_state(state), state}
end
def handle_call({:register_client, client_pid, user}, _from, state) do
Process.monitor(client_pid)
state = handle_operation(state, {:client_join, client_pid, user})
{:reply, state.data, state}
end
def handle_call(:get_data, _from, state) do
{:reply, state.data, state}
end
def handle_call({:get_runtime_and_archive_path, hash}, _from, state) do
assets_info = Notebook.find_asset_info(state.data.notebook, hash)
runtime = state.data.runtime
reply =
cond do
assets_info == nil ->
{:error, "unknown hash"}
runtime == nil ->
{:error, "no runtime"}
true ->
{:ok, runtime, assets_info.archive_path}
end
{:reply, reply, state}
end
def handle_call(:get_notebook, _from, state) do
{:reply, state.data.notebook, state}
end
def handle_call(:save_sync, _from, state) do
{:reply, :ok, maybe_save_notebook_sync(state)}
end
def handle_call(:close, _from, state) do
maybe_save_notebook_sync(state)
broadcast_message(state.session_id, :session_closed)
{:stop, :shutdown, :ok, state}
end
def handle_call({:disconnect_runtime, client_pid}, _from, state) do
if old_runtime = state.data.runtime do
Runtime.disconnect(old_runtime)
end
{:reply, :ok,
%{state | runtime_monitor_ref: nil}
|> handle_operation({:set_runtime, client_pid, nil})}
end
@impl true
def handle_cast({:set_notebook_attributes, client_pid, attrs}, state) do
operation = {:set_notebook_attributes, client_pid, attrs}
{:noreply, handle_operation(state, operation)}
end
def handle_cast({:insert_section, client_pid, index}, state) do
# Include new id in the operation, so it's reproducible
operation = {:insert_section, client_pid, index, Utils.random_id()}
{:noreply, handle_operation(state, operation)}
end
def handle_cast({:insert_section_into, client_pid, section_id, index}, state) do
# Include new id in the operation, so it's reproducible
operation = {:insert_section_into, client_pid, section_id, index, Utils.random_id()}
{:noreply, handle_operation(state, operation)}
end
def handle_cast({:set_section_parent, client_pid, section_id, parent_id}, state) do
# Include new id in the operation, so it's reproducible
operation = {:set_section_parent, client_pid, section_id, parent_id}
{:noreply, handle_operation(state, operation)}
end
def handle_cast({:unset_section_parent, client_pid, section_id}, state) do
# Include new id in the operation, so it's reproducible
operation = {:unset_section_parent, client_pid, section_id}
{:noreply, handle_operation(state, operation)}
end
def handle_cast({:insert_cell, client_pid, section_id, index, type, attrs}, state) do
# Include new id in the operation, so it's reproducible
operation = {:insert_cell, client_pid, section_id, index, type, Utils.random_id(), attrs}
{:noreply, handle_operation(state, operation)}
end
def handle_cast({:delete_section, client_pid, section_id, delete_cells}, state) do
operation = {:delete_section, client_pid, section_id, delete_cells}
{:noreply, handle_operation(state, operation)}
end
def handle_cast({:delete_cell, client_pid, cell_id}, state) do
operation = {:delete_cell, client_pid, cell_id}
{:noreply, handle_operation(state, operation)}
end
def handle_cast({:restore_cell, client_pid, cell_id}, state) do
operation = {:restore_cell, client_pid, cell_id}
{:noreply, handle_operation(state, operation)}
end
def handle_cast({:move_cell, client_pid, cell_id, offset}, state) do
operation = {:move_cell, client_pid, cell_id, offset}
{:noreply, handle_operation(state, operation)}
end
def handle_cast({:move_section, client_pid, section_id, offset}, state) do
operation = {:move_section, client_pid, section_id, offset}
{:noreply, handle_operation(state, operation)}
end
def handle_cast({:convert_smart_cell, client_pid, cell_id}, state) do
state =
with {:ok, %Cell.Smart{} = cell, section} <-
Notebook.fetch_cell_and_section(state.data.notebook, cell_id) do
index = Enum.find_index(section.cells, &(&1 == cell))
attrs = Map.take(cell, [:source, :outputs])
state
|> handle_operation({:delete_cell, client_pid, cell.id})
|> handle_operation(
{:insert_cell, client_pid, section.id, index, :code, Utils.random_id(), attrs}
)
else
_ -> state
end
{:noreply, state}
end
def handle_cast({:queue_cell_evaluation, client_pid, cell_id}, state) do
operation = {:queue_cells_evaluation, client_pid, [cell_id]}
{:noreply, handle_operation(state, operation)}
end
def handle_cast({:queue_section_evaluation, client_pid, section_id}, state) do
case Notebook.fetch_section(state.data.notebook, section_id) do
{:ok, section} ->
cell_ids = for cell <- section.cells, Cell.evaluable?(cell), do: cell.id
operation = {:queue_cells_evaluation, client_pid, cell_ids}
{:noreply, handle_operation(state, operation)}
:error ->
{:noreply, state}
end
end
def handle_cast({:queue_bound_cells_evaluation, client_pid, input_id}, state) do
cell_ids =
for {bound_cell, _} <- Data.bound_cells_with_section(state.data, input_id),
do: bound_cell.id
operation = {:queue_cells_evaluation, client_pid, cell_ids}
{:noreply, handle_operation(state, operation)}
end
def handle_cast({:queue_full_evaluation, client_pid, forced_cell_ids}, state) do
cell_ids = Data.cell_ids_for_full_evaluation(state.data, forced_cell_ids)
operation = {:queue_cells_evaluation, client_pid, cell_ids}
{:noreply, handle_operation(state, operation)}
end
def handle_cast({:cancel_cell_evaluation, client_pid, cell_id}, state) do
operation = {:cancel_cell_evaluation, client_pid, cell_id}
{:noreply, handle_operation(state, operation)}
end
def handle_cast({:erase_outputs, client_pid}, state) do
operation = {:erase_outputs, client_pid}
{:noreply, handle_operation(state, operation)}
end
def handle_cast({:set_notebook_name, client_pid, name}, state) do
operation = {:set_notebook_name, client_pid, name}
{:noreply, handle_operation(state, operation)}
end
def handle_cast({:set_section_name, client_pid, section_id, name}, state) do
operation = {:set_section_name, client_pid, section_id, name}
{:noreply, handle_operation(state, operation)}
end
def handle_cast({:apply_cell_delta, client_pid, cell_id, tag, delta, revision}, state) do
operation = {:apply_cell_delta, client_pid, cell_id, tag, delta, revision}
{:noreply, handle_operation(state, operation)}
end
def handle_cast({:report_cell_revision, client_pid, cell_id, tag, revision}, state) do
operation = {:report_cell_revision, client_pid, cell_id, tag, revision}
{:noreply, handle_operation(state, operation)}
end
def handle_cast({:set_cell_attributes, client_pid, cell_id, attrs}, state) do
operation = {:set_cell_attributes, client_pid, cell_id, attrs}
{:noreply, handle_operation(state, operation)}
end
def handle_cast({:set_input_value, client_pid, input_id, value}, state) do
operation = {:set_input_value, client_pid, input_id, value}
{:noreply, handle_operation(state, operation)}
end
def handle_cast({:connect_runtime, client_pid, runtime}, state) do
if old_runtime = state.data.runtime do
Runtime.disconnect(old_runtime)
end
state = do_connect_runtime(runtime, state)
{:noreply, handle_operation(state, {:set_runtime, client_pid, runtime})}
end
def handle_cast({:set_file, client_pid, file}, state) do
if file do
FileGuard.lock(file, self())
else
:ok
end
|> case do
:ok ->
if state.data.file do
FileGuard.unlock(state.data.file)
end
{:noreply, handle_operation(state, {:set_file, client_pid, file})}
{:error, :already_in_use} ->
broadcast_error(state.session_id, "failed to set new file because it is already in use")
{:noreply, state}
end
end
def handle_cast(:save, state) do
{:noreply, maybe_save_notebook_async(state)}
end
@impl true
def handle_info({:DOWN, ref, :process, _, _}, %{runtime_monitor_ref: ref} = state) do
broadcast_info(state.session_id, "runtime node terminated unexpectedly")
{:noreply,
%{state | runtime_monitor_ref: nil}
|> handle_operation({:set_runtime, self(), nil})}
end
def handle_info({:DOWN, _, :process, pid, _}, state) do
state =
if Map.has_key?(state.data.clients_map, pid) do
handle_operation(state, {:client_leave, pid})
else
state
end
{:noreply, state}
end
def handle_info({:runtime_evaluation_output, cell_id, output}, state) do
operation = {:add_cell_evaluation_output, self(), cell_id, output}
{:noreply, handle_operation(state, operation)}
end
def handle_info({:runtime_evaluation_response, cell_id, response, metadata}, state) do
{memory_usage, metadata} = Map.pop(metadata, :memory_usage)
operation = {:add_cell_evaluation_response, self(), cell_id, response, metadata}
{:noreply,
state
|> put_memory_usage(memory_usage)
|> handle_operation(operation)
|> notify_update()}
end
def handle_info({:runtime_evaluation_input, cell_id, reply_to, input_id}, state) do
{reply, state} =
with {:ok, cell, _section} <- Notebook.fetch_cell_and_section(state.data.notebook, cell_id),
{:ok, value} <- Map.fetch(state.data.input_values, input_id) do
state = handle_operation(state, {:bind_input, self(), cell.id, input_id})
{{:ok, value}, state}
else
_ -> {:error, state}
end
send(reply_to, {:runtime_evaluation_input_reply, reply})
{:noreply, state}
end
def handle_info({:runtime_container_down, container_ref, message}, state) do
broadcast_error(state.session_id, "evaluation process terminated - #{message}")
operation =
case container_ref do
@main_container_ref -> {:reflect_main_evaluation_failure, self()}
section_id -> {:reflect_evaluation_failure, self(), section_id}
end
{:noreply, handle_operation(state, operation)}
end
def handle_info(:autosave, state) do
{:noreply, state |> maybe_save_notebook_async() |> schedule_autosave()}
end
def handle_info({:user_change, user}, state) do
operation = {:update_user, self(), user}
{:noreply, handle_operation(state, operation)}
end
def handle_info({:save_finished, pid, result, file, default?}, %{save_task_pid: pid} = state) do
state = %{state | save_task_pid: nil}
{:noreply, handle_save_finished(state, result, file, default?)}
end
def handle_info({:runtime_memory_usage, runtime_memory}, state) do
{:noreply, state |> put_memory_usage(runtime_memory) |> notify_update()}
end
def handle_info({:runtime_smart_cell_definitions, definitions}, state) do
operation = {:set_smart_cell_definitions, self(), definitions}
{:noreply, handle_operation(state, operation)}
end
def handle_info({:runtime_smart_cell_started, id, info}, state) do
case Notebook.fetch_cell_and_section(state.data.notebook, id) do
{:ok, cell, _section} ->
delta = Livebook.JSInterop.diff(cell.source, info.source)
operation = {:smart_cell_started, self(), id, delta, info.js_view, info.editor}
{:noreply, handle_operation(state, operation)}
:error ->
{:noreply, state}
end
end
def handle_info({:runtime_smart_cell_update, id, attrs, source}, state) do
case Notebook.fetch_cell_and_section(state.data.notebook, id) do
{:ok, cell, _section} ->
delta = Livebook.JSInterop.diff(cell.source, source)
operation = {:update_smart_cell, self(), id, attrs, delta}
{:noreply, handle_operation(state, operation)}
:error ->
{:noreply, state}
end
end
def handle_info(_message, state), do: {:noreply, state}
@impl true
def terminate(_reason, state) do
cleanup_tmp_dir(state.session_id)
:ok
end
# ---
defp self_from_state(state) do
%__MODULE__{
id: state.session_id,
pid: self(),
origin: state.data.origin,
notebook_name: state.data.notebook.name,
file: state.data.file,
images_dir: images_dir_from_state(state),
created_at: state.created_at,
memory_usage: state.memory_usage
}
end
defp images_dir_from_state(%{data: %{file: nil}, session_id: id}) do
tmp_dir = session_tmp_dir(id)
FileSystem.File.resolve(tmp_dir, "images/")
end
defp images_dir_from_state(%{data: %{file: file}}) do
images_dir_for_notebook(file)
end
@doc """
Returns images directory corresponding to the given notebook file.
"""
@spec images_dir_for_notebook(FileSystem.File.t()) :: FileSystem.File.t()
def images_dir_for_notebook(file) do
file
|> FileSystem.File.containing_dir()
|> FileSystem.File.resolve("images/")
end
defp session_tmp_dir(session_id) do
livebook_tmp_path()
|> Path.join("sessions/#{session_id}")
|> FileSystem.Utils.ensure_dir_path()
|> FileSystem.File.local()
end
defp cleanup_tmp_dir(session_id) do
tmp_dir = session_tmp_dir(session_id)
FileSystem.File.remove(tmp_dir)
end
defp local_assets_path(hash) do
Path.join([livebook_tmp_path(), "assets", encode_path_component(hash)])
end
@doc """
Returns a local path to asset matching the given
hash and path.
The file is not guaranteed to exist. See `fetch_assets/2`
for fetching assets through a particular session.
The path is expected to be a simple relative path
within the assets directory, otherwise an error is
returned.
"""
@spec local_asset_path(String.t(), String.t()) :: {:ok, String.t()} | :error
def local_asset_path(hash, asset_path) do
assets_path = local_assets_path(hash)
local_asset_path = Path.expand(asset_path, assets_path)
if String.starts_with?(local_asset_path, assets_path <> "/") do
{:ok, local_asset_path}
else
:error
end
end
defp encode_path_component(component) do
String.replace(component, [".", "/", "\\", ":"], "_")
end
defp livebook_tmp_path() do
tmp_dir = System.tmp_dir!() |> Path.expand()
Path.join(tmp_dir, "livebook")
end
defp copy_images(state, source) do
images_dir = images_dir_from_state(state)
with {:ok, source_exists?} <- FileSystem.File.exists?(source) do
if source_exists? do
FileSystem.File.copy(source, images_dir)
else
:ok
end
end
end
defp move_images(state, source) do
images_dir = images_dir_from_state(state)
with {:ok, source_exists?} <- FileSystem.File.exists?(source) do
if source_exists? do
with {:ok, destination_exists?} <- FileSystem.File.exists?(images_dir) do
if not destination_exists? do
# If the directory doesn't exist, we can just change
# the directory name, which is more efficient if
# available in the given file system
FileSystem.File.rename(source, images_dir)
else
# If the directory exists, we use copy to place
# the images there
with :ok <- FileSystem.File.copy(source, images_dir) do
FileSystem.File.remove(source)
end
end
end
else
:ok
end
end
end
defp dump_images(state, images) do
images_dir = images_dir_from_state(state)
Enum.reduce(images, :ok, fn {filename, content}, result ->
with :ok <- result do
file = FileSystem.File.resolve(images_dir, filename)
FileSystem.File.write(file, content)
end
end)
end
defp do_connect_runtime(runtime, state) do
runtime_monitor_ref = Runtime.connect(runtime, runtime_broadcast_to: state.worker_pid)
%{state | runtime_monitor_ref: runtime_monitor_ref}
end
# Given any operation on `Livebook.Session.Data`, the process
# does the following:
#
# * broadcasts the operation to all clients immediately,
# so that they can update their local `Livebook.Session.Data`
#
# * applies the operation to own local `Livebook.Session.Data`
#
# * if necessary, performs the relevant actions (e.g. starts cell evaluation),
# to reflect the new `Livebook.Session.Data`
#
defp handle_operation(state, operation) do
broadcast_operation(state.session_id, operation)
case Data.apply_operation(state.data, operation) do
{:ok, new_data, actions} ->
%{state | data: new_data}
|> after_operation(state, operation)
|> handle_actions(actions)
:error ->
state
end
end
defp after_operation(state, _prev_state, {:set_notebook_name, _pid, _name}) do
notify_update(state)
end
defp after_operation(state, _prev_state, {:set_runtime, _pid, runtime}) do
if runtime do
state
else
state
|> put_memory_usage(nil)
|> notify_update()
end
end
defp after_operation(state, prev_state, {:set_file, _pid, _file}) do
prev_images_dir = images_dir_from_state(prev_state)
if prev_state.data.file do
copy_images(state, prev_images_dir)
else
move_images(state, prev_images_dir)
end
|> case do
:ok ->
:ok
{:error, message} ->
broadcast_error(state.session_id, "failed to copy images - #{message}")
end
notify_update(state)
end
defp after_operation(
state,
_prev_state,
{:set_notebook_attributes, _client_pid, %{autosave_interval_s: _}}
) do
state
|> unschedule_autosave()
|> schedule_autosave()
end
defp after_operation(state, prev_state, {:client_join, _client_pid, user}) do
unless Map.has_key?(prev_state.data.users_map, user.id) do
Phoenix.PubSub.subscribe(Livebook.PubSub, "users:#{user.id}")
end
state
end
defp after_operation(state, prev_state, {:client_leave, client_pid}) do
user_id = prev_state.data.clients_map[client_pid]
unless Map.has_key?(state.data.users_map, user_id) do
Phoenix.PubSub.unsubscribe(Livebook.PubSub, "users:#{user_id}")
end
state
end
defp after_operation(state, _prev_state, {:delete_cell, _client_pid, cell_id}) do
entry = Enum.find(state.data.bin_entries, fn entry -> entry.cell.id == cell_id end)
# The session LV drops cell's source, so we send them
# the complete bin entry to override
broadcast_message(state.session_id, {:hydrate_bin_entries, [entry]})
state
end
defp after_operation(state, prev_state, {:delete_section, _client_pid, section_id, true}) do
{:ok, section} = Notebook.fetch_section(prev_state.data.notebook, section_id)
cell_ids = Enum.map(section.cells, & &1.id)
entries = Enum.filter(state.data.bin_entries, fn entry -> entry.cell.id in cell_ids end)
broadcast_message(state.session_id, {:hydrate_bin_entries, entries})
state
end
defp after_operation(
state,
_prev_state,
{:apply_cell_delta, _client_pid, cell_id, tag, _delta, _revision}
) do
with :secondary <- tag,
{:ok, %Cell.Smart{} = cell, _section} <-
Notebook.fetch_cell_and_section(state.data.notebook, cell_id) do
send(cell.js_view.pid, {:editor_source, cell.editor.source})
end
state
end
defp after_operation(state, _prev_state, _operation), do: state
defp handle_actions(state, actions) do
Enum.reduce(actions, state, &handle_action(&2, &1))
end
defp handle_action(state, :start_runtime) do
{runtime_module, args} = Livebook.Config.default_runtime()
case apply(runtime_module, :init, args) do
{:ok, runtime} ->
state = do_connect_runtime(runtime, state)
handle_operation(state, {:set_runtime, self(), runtime})
{:error, error} ->
broadcast_error(state.session_id, "failed to setup runtime - #{error}")
handle_operation(state, {:set_runtime, self(), nil})
end
end
defp handle_action(state, {:start_evaluation, cell, section}) do
path =
case state.data.file do
nil -> ""
file -> file.path
end
file = path <> "#cell"
smart_cell_ref =
case cell do
%Cell.Smart{} -> cell.id
_ -> nil
end
opts = [file: file, smart_cell_ref: smart_cell_ref]
locator = {container_ref_for_section(section), cell.id}
base_locator = find_base_locator(state.data, cell, section)
Runtime.evaluate_code(state.data.runtime, cell.source, locator, base_locator, opts)
evaluation_digest = :erlang.md5(cell.source)
handle_operation(state, {:evaluation_started, self(), cell.id, evaluation_digest})
end
defp handle_action(state, {:stop_evaluation, section}) do
if state.data.runtime do
Runtime.drop_container(state.data.runtime, container_ref_for_section(section))
end
state
end
defp handle_action(state, {:forget_evaluation, cell, section}) do
if state.data.runtime do
Runtime.forget_evaluation(state.data.runtime, {container_ref_for_section(section), cell.id})
end
state
end
defp handle_action(state, {:start_smart_cell, cell, section}) do
if state.data.runtime do
base_locator = find_base_locator(state.data, cell, section, existing: true)
Runtime.start_smart_cell(state.data.runtime, cell.kind, cell.id, cell.attrs, base_locator)
end
state
end
defp handle_action(state, {:set_smart_cell_base, cell, section, parent}) do
if state.data.runtime do
base_locator =
case parent do
nil ->
{container_ref_for_section(section), nil}
{parent_cell, parent_section} ->
{container_ref_for_section(parent_section), parent_cell.id}
end
Runtime.set_smart_cell_base_locator(state.data.runtime, cell.id, base_locator)
end
state
end
defp handle_action(state, {:stop_smart_cell, cell}) do
if state.data.runtime do
Runtime.stop_smart_cell(state.data.runtime, cell.id)
end
state
end
defp handle_action(state, _action), do: state
defp broadcast_operation(session_id, operation) do
broadcast_message(session_id, {:operation, operation})
end
defp broadcast_error(session_id, error) do
broadcast_message(session_id, {:error, error})
end
defp broadcast_info(session_id, info) do
broadcast_message(session_id, {:info, info})
end
defp broadcast_message(session_id, message) do
Phoenix.PubSub.broadcast(Livebook.PubSub, "sessions:#{session_id}", message)
end
defp put_memory_usage(state, runtime) do
put_in(state.memory_usage, %{runtime: runtime, system: Livebook.SystemResources.memory()})
end
defp notify_update(state) do
session = self_from_state(state)
Livebook.Sessions.update_session(session)
broadcast_message(state.session_id, {:session_updated, session})
state
end
defp maybe_save_notebook_async(state) do
{file, default?} = notebook_autosave_file(state)
if file && should_save_notebook?(state) do
pid = self()
notebook = state.data.notebook
{:ok, pid} =
Task.start(fn ->
content = LiveMarkdown.notebook_to_livemd(notebook)
result = FileSystem.File.write(file, content)
send(pid, {:save_finished, self(), result, file, default?})
end)
%{state | save_task_pid: pid}
else
state
end
end
defp maybe_save_notebook_sync(state) do
{file, default?} = notebook_autosave_file(state)
if file && should_save_notebook?(state) do
content = LiveMarkdown.notebook_to_livemd(state.data.notebook)
result = FileSystem.File.write(file, content)
handle_save_finished(state, result, file, default?)
else
state
end
end
defp should_save_notebook?(state) do
state.data.dirty and state.save_task_pid == nil
end
defp notebook_autosave_file(state) do
file = state.data.file || default_notebook_file(state)
default? = state.data.file == nil
{file, default?}
end
defp default_notebook_file(state) do
if path = state.autosave_path || Livebook.Settings.autosave_path() do
dir = path |> FileSystem.Utils.ensure_dir_path() |> FileSystem.File.local()
notebook_rel_path = default_notebook_path(state)
FileSystem.File.resolve(dir, notebook_rel_path)
end
end
defp default_notebook_path(state) do
title_str = notebook_name_to_file_name(state.data.notebook.name)
# We want a random, but deterministic part, so we
# use a few trailing characters from the session id,
# which are random already
random_str = String.slice(state.session_id, -4..-1)
[date_str, time_str, _] =
state.created_at
|> DateTime.to_iso8601()
|> String.replace(["-", ":"], "_")
|> String.split(["T", "."])
"#{date_str}/#{time_str}_#{title_str}_#{random_str}.livemd"
end
defp notebook_name_to_file_name(notebook_name) do
notebook_name
|> String.downcase()
|> String.replace(~r/\s+/, "_")
|> String.replace(~r/[^\w]/, "")
|> case do
"" -> "untitled_notebook"
name -> name
end
end
defp handle_save_finished(state, result, file, default?) do
state =
if default? do
if state.saved_default_file && state.saved_default_file != file do
FileSystem.File.remove(state.saved_default_file)
end
%{state | saved_default_file: file}
else
state
end
case result do
:ok ->
handle_operation(state, {:mark_as_not_dirty, self()})
{:error, message} ->
broadcast_error(state.session_id, "failed to save notebook - #{message}")
state
end
end
defp extract_archive!(binary, path) do
case :erl_tar.extract({:binary, binary}, [:compressed, {:cwd, String.to_charlist(path)}]) do
:ok ->
:ok
{:error, reason} ->
File.rm_rf!(path)
raise "failed to extract archive to #{path}, reason: #{inspect(reason)}"
end
end
@doc """
Subscribes the caller to runtime messages under the given topic.
Broadcasted events are encoded using `encoder`, if successful,
the message is sent directly to `receiver_pid`, otherwise an
`{:encoding_error, error, message}` is sent to the caller.
"""
@spec subscribe_to_runtime_events(
id(),
String.t(),
String.t(),
(term() -> {:ok, term()} | {:error, term()}),
pid()
) :: :ok | {:error, term()}
def subscribe_to_runtime_events(session_id, topic, subtopic, encoder, receiver_pid) do
full_topic = runtime_messages_topic(session_id, topic, subtopic)
Phoenix.PubSub.subscribe(Livebook.PubSub, full_topic, metadata: {encoder, receiver_pid})
end
@doc """
Unsubscribes the caller from runtime messages subscribed earlier
with `subscribe_to_runtime_events/5`.
"""
@spec unsubscribe_from_runtime_events(id(), String.t(), String.t()) :: :ok | {:error, term()}
def unsubscribe_from_runtime_events(session_id, topic, subtopic) do
full_topic = runtime_messages_topic(session_id, topic, subtopic)
Phoenix.PubSub.unsubscribe(Livebook.PubSub, full_topic)
end
@doc false
def broadcast_runtime_event(session_id, topic, subtopic, message) do
full_topic = runtime_messages_topic(session_id, topic, subtopic)
Phoenix.PubSub.broadcast(Livebook.PubSub, full_topic, message, __MODULE__)
end
defp runtime_messages_topic(session_id, topic, subtopic) do
"sessions:#{session_id}:runtime_messages:#{topic}:#{subtopic}"
end
@doc false
# Custom dispatcher for broadcasting runtime events
def dispatch(subscribers, from, message) do
Enum.reduce(subscribers, %{}, fn
{pid, _}, cache when pid == from ->
cache
{pid, {encoder, receiver_pid}}, cache ->
case cache do
%{^encoder => encoded_message} ->
send(receiver_pid, encoded_message)
cache
%{} ->
case encoder.(message) do
{:ok, encoded_message} ->
send(receiver_pid, encoded_message)
Map.put(cache, encoder, encoded_message)
{:error, error} ->
send(pid, {:encoding_error, error, message})
cache
end
end
{pid, _}, cache ->
send(pid, message)
cache
end)
end
@doc """
Finds evaluation locator that the given cell depends on.
By default looks up the direct evaluation parent.
## Options
* `:existing` - considers only cells that have been evaluated
as evaluation parents. Defaults to `false`
"""
@spec find_base_locator(Data.t(), Cell.t(), Section.t(), keyword()) :: Runtime.locator()
def find_base_locator(data, cell, section, opts \\ []) do
parent_filter =
if opts[:existing] do
fn cell ->
info = data.cell_infos[cell.id]
Cell.evaluable?(cell) and info.eval.validity in [:evaluated, :stale]
end
else
&Cell.evaluable?/1
end
default = {container_ref_for_section(section), nil}
data.notebook
|> Notebook.parent_cells_with_section(cell.id)
|> Enum.find_value(default, fn {cell, section} ->
parent_filter.(cell) && {container_ref_for_section(section), cell.id}
end)
end
defp container_ref_for_section(%{parent_id: nil}), do: @main_container_ref
defp container_ref_for_section(section), do: section.id
end
| 30.600129 | 98 | 0.671051 |
1c32554553c5e46256af2fb2d4ab656a3f5eb9dd | 1,217 | ex | Elixir | lib/codes/codes_g50.ex | badubizzle/icd_code | 4c625733f92b7b1d616e272abc3009bb8b916c0c | [
"Apache-2.0"
] | null | null | null | lib/codes/codes_g50.ex | badubizzle/icd_code | 4c625733f92b7b1d616e272abc3009bb8b916c0c | [
"Apache-2.0"
] | null | null | null | lib/codes/codes_g50.ex | badubizzle/icd_code | 4c625733f92b7b1d616e272abc3009bb8b916c0c | [
"Apache-2.0"
] | null | null | null | defmodule IcdCode.ICDCode.Codes_G50 do
alias IcdCode.ICDCode
def _G500 do
%ICDCode{full_code: "G500",
category_code: "G50",
short_code: "0",
full_name: "Trigeminal neuralgia",
short_name: "Trigeminal neuralgia",
category_name: "Trigeminal neuralgia"
}
end
def _G501 do
%ICDCode{full_code: "G501",
category_code: "G50",
short_code: "1",
full_name: "Atypical facial pain",
short_name: "Atypical facial pain",
category_name: "Atypical facial pain"
}
end
def _G508 do
%ICDCode{full_code: "G508",
category_code: "G50",
short_code: "8",
full_name: "Other disorders of trigeminal nerve",
short_name: "Other disorders of trigeminal nerve",
category_name: "Other disorders of trigeminal nerve"
}
end
def _G509 do
%ICDCode{full_code: "G509",
category_code: "G50",
short_code: "9",
full_name: "Disorder of trigeminal nerve, unspecified",
short_name: "Disorder of trigeminal nerve, unspecified",
category_name: "Disorder of trigeminal nerve, unspecified"
}
end
end
| 28.302326 | 68 | 0.604766 |
1c3285958f3e747a82149c5a2bfaf805aa1e68ca | 3,254 | ex | Elixir | lib/changelog_web/views/time_view.ex | joebew42/changelog.com | da4ec68d15f3a2b4b6c29033443d7e7afe814d18 | [
"MIT"
] | 1 | 2018-01-22T20:07:10.000Z | 2018-01-22T20:07:10.000Z | lib/changelog_web/views/time_view.ex | joebew42/changelog.com | da4ec68d15f3a2b4b6c29033443d7e7afe814d18 | [
"MIT"
] | null | null | null | lib/changelog_web/views/time_view.ex | joebew42/changelog.com | da4ec68d15f3a2b4b6c29033443d7e7afe814d18 | [
"MIT"
] | null | null | null | defmodule ChangelogWeb.TimeView do
alias Timex.Duration
def closest_monday_to(date) do
offset = case Timex.weekday(date) do
1 -> 0
2 -> -1
3 -> -2
4 -> -3
5 -> 3
6 -> 2
7 -> 1
end
Timex.shift(date, days: offset)
end
def duration(seconds) when is_nil(seconds), do: duration(0)
def duration(seconds) when seconds < 3600 do
minutes = div(seconds, 60)
seconds = rem(seconds, 60)
"#{leading_zero(minutes)}:#{leading_zero(seconds)}"
end
def duration(seconds) when seconds >= 3600 do
hours = div(seconds, 3600)
remaining = rem(seconds, 3600)
"#{hours}:#{duration(remaining)}"
end
def hacker_date(ts) when is_nil(ts), do: ""
def hacker_date(ts) when is_binary(ts) do
{:ok, result} = Timex.parse(ts, "{YYYY}-{0M}-{0D} {h24}:{m}:{s}")
hacker_date(result)
end
def hacker_date(ts) do
{:ok, result} = Timex.format(ts, "{YYYY}-{0M}-{0D}")
result
end
def hours_ago(hours) do
Timex.subtract(Timex.now, Duration.from_hours(hours))
end
def hours_from_now(hours) do
Timex.add(Timex.now, Duration.from_hours(hours))
end
def pretty_date(ts) when is_nil(ts), do: ""
def pretty_date(ts) when is_binary(ts) do
{:ok, result} = Timex.parse(ts, "{YYYY}-{0M}-{0D} {h24}:{m}:{s}")
pretty_date(result)
end
def pretty_date(ts) do
{:ok, result} = Timex.format(ts, "{Mshort} {D}, {YYYY}")
result
end
def rounded_minutes(seconds) when is_nil(seconds), do: rounded_minutes(0)
def rounded_minutes(seconds) do
(seconds / 60) |> round
end
def rss(ts) when is_nil(ts), do: ""
def rss(ts) do
{:ok, result} =
ts
|> Timex.format("{RFC1123}")
result
end
def seconds(duration) when not is_binary(duration), do: seconds("00")
def seconds(duration) do
case String.split(duration, ":") do
[h, m, s] -> to_seconds(:hours, h) + to_seconds(:minutes, m) + to_seconds(s)
[m, s] -> to_seconds(:minutes, m) + to_seconds(s)
[s] -> to_seconds(s)
_ -> 0
end
end
def ts(ts, style \\ "admin")
def ts(ts, _style) when is_nil(ts), do: ""
def ts(ts, style) do
{:ok, formatted} = Timex.format(ts, "{ISO:Extended:Z}")
{:safe, "<span class='time' data-style='#{style}'>#{formatted}</span>"}
end
def weeks(start_date \\ Timex.today, count \\ 8) do
Timex.Interval.new(from: Timex.beginning_of_week(start_date), until: [weeks: count], step: [weeks: 1])
end
def week_start_end(date) do
start_date = Timex.beginning_of_week(date)
end_date = Timex.end_of_week(date)
{:ok, pretty_start} = Timex.format(start_date, "{Mshort} {0D}")
{:ok, pretty_end} = Timex.format(end_date, "{Mshort} {0D}")
"#{pretty_start} - #{pretty_end}"
end
defp to_seconds(:hours, str), do: string_to_rounded_integer(str) * 3600
defp to_seconds(:minutes, str), do: string_to_rounded_integer(str) * 60
defp to_seconds(str), do: string_to_rounded_integer(str)
defp string_to_rounded_integer(str) do
if String.contains?(str, ".") do
round(String.to_float(str))
else
String.to_integer(str)
end
end
defp leading_zero(integer) do
if integer < 10 do
"0#{integer}"
else
"#{integer}"
end
end
end
| 26.892562 | 106 | 0.626921 |
1c3287e99f7c321e4ec589e919140abd20c60055 | 156 | exs | Elixir | test/mnesia_vs_redis_test.exs | rescircuit/mnesia-vs-redis | 0ef28382d1b858b8a04d4e304240a46e364d91dd | [
"MIT"
] | null | null | null | test/mnesia_vs_redis_test.exs | rescircuit/mnesia-vs-redis | 0ef28382d1b858b8a04d4e304240a46e364d91dd | [
"MIT"
] | null | null | null | test/mnesia_vs_redis_test.exs | rescircuit/mnesia-vs-redis | 0ef28382d1b858b8a04d4e304240a46e364d91dd | [
"MIT"
] | null | null | null | defmodule MnesiaVsRedisTest do
use ExUnit.Case
doctest MnesiaVsRedis
test "greets the world" do
assert MnesiaVsRedis.hello() == :world
end
end
| 17.333333 | 42 | 0.74359 |
1c329c06fa61691cfd10b73c2a1542bfddb6dd1c | 1,218 | exs | Elixir | config/config.exs | konti-kun/Raoreq | 55d09688503189b0e735702da457c68b6b10a24d | [
"MIT"
] | null | null | null | config/config.exs | konti-kun/Raoreq | 55d09688503189b0e735702da457c68b6b10a24d | [
"MIT"
] | null | null | null | config/config.exs | konti-kun/Raoreq | 55d09688503189b0e735702da457c68b6b10a24d | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
config :raoreq, cowboy_port: 8086
config :plug, :mimes, %{
"application/json" => ["json"]
}
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# 3rd-party users, it should be done in your "mix.exs" file.
# You can configure your application as:
#
# config :raoreq, key: :value
#
# and access this configuration in your application as:
#
# Application.get_env(:raoreq, :key)
#
# You can also configure a 3rd-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env()}.exs"
| 33.833333 | 73 | 0.741379 |
1c329deaeab2040d90261a3f8dc16814e249081e | 580 | exs | Elixir | languages/elixir/exercises/concept/take-a-number/mix.exs | AlexLeSang/v3 | 3d35961a961b5a2129b1d42f1d118972d9665357 | [
"MIT"
] | 3 | 2020-07-25T06:24:00.000Z | 2020-09-14T17:39:11.000Z | languages/elixir/exercises/concept/take-a-number/mix.exs | AlexLeSang/v3 | 3d35961a961b5a2129b1d42f1d118972d9665357 | [
"MIT"
] | 1 | 2020-01-26T20:08:06.000Z | 2020-01-26T20:08:06.000Z | languages/elixir/exercises/concept/take-a-number/mix.exs | AlexLeSang/v3 | 3d35961a961b5a2129b1d42f1d118972d9665357 | [
"MIT"
] | null | null | null | defmodule TakeANumber.MixProject do
use Mix.Project
def project do
[
app: :processes,
version: "0.1.0",
# elixir: "~> 1.10",
start_permanent: Mix.env() == :prod,
deps: deps()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
# {:dep_from_hexpm, "~> 0.3.0"},
# {:dep_from_git, git: "https://github.com/elixir-lang/my_dep.git", tag: "0.1.0"}
]
end
end
| 20 | 87 | 0.57931 |
1c32b29fb0178c9190b64f4ccacda1cc7eef7a5f | 116 | exs | Elixir | .formatter.exs | btedev/pair2 | eb7d8f78a13046461f12ff16372e7dc03175c2cc | [
"MIT"
] | null | null | null | .formatter.exs | btedev/pair2 | eb7d8f78a13046461f12ff16372e7dc03175c2cc | [
"MIT"
] | null | null | null | .formatter.exs | btedev/pair2 | eb7d8f78a13046461f12ff16372e7dc03175c2cc | [
"MIT"
] | null | null | null | [
inputs: [
".credo.exs",
".formatter.exs",
"*.exs",
"{apps,config,features}/**/*.{ex,exs}"
]
]
| 12.888889 | 42 | 0.448276 |
1c32c4732aa7f3f7623aad69de211bc3023d5e34 | 1,602 | ex | Elixir | lib/ueberauth_example_web/models/user_from_auth.ex | timwis/ueberauth_example | 7e4d8b41c08d344a606a59554265d385555000a1 | [
"MIT"
] | null | null | null | lib/ueberauth_example_web/models/user_from_auth.ex | timwis/ueberauth_example | 7e4d8b41c08d344a606a59554265d385555000a1 | [
"MIT"
] | null | null | null | lib/ueberauth_example_web/models/user_from_auth.ex | timwis/ueberauth_example | 7e4d8b41c08d344a606a59554265d385555000a1 | [
"MIT"
] | null | null | null | defmodule UserFromAuth do
@moduledoc """
Retrieve the user information from an auth request
"""
require Logger
require Poison
alias Ueberauth.Auth
def find_or_create(%Auth{provider: :identity} = auth) do
case validate_pass(auth.credentials) do
:ok ->
{:ok, basic_info(auth)}
{:error, reason} -> {:error, reason}
end
end
def find_or_create(%Auth{} = auth) do
{:ok, basic_info(auth)}
end
# github does it this way
defp avatar_from_auth( %{info: %{urls: %{avatar_url: image}} }), do: image
#facebook does it this way
defp avatar_from_auth( %{info: %{image: image} }), do: image
# default case if nothing matches
defp avatar_from_auth( auth ) do
Logger.warn auth.provider <> " needs to find an avatar URL!"
Logger.debug(Poison.encode!(auth))
nil
end
defp basic_info(auth) do
%{id: auth.uid, name: name_from_auth(auth), avatar: avatar_from_auth(auth)}
end
defp name_from_auth(auth) do
if auth.info.name do
auth.info.name
else
name = [auth.info.first_name, auth.info.last_name]
|> Enum.filter(&(&1 != nil and &1 != ""))
cond do
length(name) == 0 -> auth.info.nickname
true -> Enum.join(name, " ")
end
end
end
defp validate_pass(%{other: %{password: ""}}) do
{:error, "Password required"}
end
defp validate_pass(%{other: %{password: pw, password_confirmation: pw}}) do
:ok
end
defp validate_pass(%{other: %{password: _}}) do
{:error, "Passwords do not match"}
end
defp validate_pass(_), do: {:error, "Password Required"}
end
| 25.03125 | 79 | 0.637953 |
1c32c4c0aa4aaf2f1a992ad4ba91ba639efece59 | 97 | exs | Elixir | test/day8_test.exs | anamba/adventofcode2019 | a5de43ddce8b40f67c3017f349d8563c73c94e20 | [
"MIT"
] | null | null | null | test/day8_test.exs | anamba/adventofcode2019 | a5de43ddce8b40f67c3017f349d8563c73c94e20 | [
"MIT"
] | null | null | null | test/day8_test.exs | anamba/adventofcode2019 | a5de43ddce8b40f67c3017f349d8563c73c94e20 | [
"MIT"
] | null | null | null | defmodule Day8Test do
use ExUnit.Case
doctest Day8.SIFDecoder
doctest Day8.SIFDecoder2
end
| 16.166667 | 26 | 0.804124 |
1c32c631a6b74c733f0ca213c39c2a3f575ff075 | 3,233 | ex | Elixir | lib/enforce.ex | adamzaninovich/policy_wonk | e372ce25462b4693de36ba673d17de4e7318fd5f | [
"MIT"
] | 178 | 2016-09-15T11:46:23.000Z | 2022-01-09T18:11:02.000Z | lib/enforce.ex | adamzaninovich/policy_wonk | e372ce25462b4693de36ba673d17de4e7318fd5f | [
"MIT"
] | 10 | 2016-09-22T14:49:54.000Z | 2021-02-05T18:10:59.000Z | lib/enforce.ex | adamzaninovich/policy_wonk | e372ce25462b4693de36ba673d17de4e7318fd5f | [
"MIT"
] | 12 | 2016-09-16T11:21:14.000Z | 2021-12-20T17:59:02.000Z | defmodule PolicyWonk.Enforce do
@moduledoc """
This turns your policy module into a plug that can be used in a router.
## Usage
The only time you should directly use the `PolicyWonk.Enforce` module is to call
`use PolicyWonk.Enforce` when defining your policy module.
Example policy module:
defmodule MyAppWeb.Policies do
use PolicyWonk.Policy # set up support for policies
use PolicyWonk.Enforce # turn this module into an enforcement plug
def policy( assigns, :current_user ) do
case assigns[:current_user] do
%MyApp.Account.User{} ->
:ok
_ ->
{:error, :current_user}
end
end
def policy_error(conn, :current_user) do
MyAppWeb.ErrorHandlers.unauthenticated(conn, "Must be logged in")
end
end
To enforce your policies as a plug, you can just use the new module you created.
Enforce policies in a router:
pipeline :browser_session do
plug MyAppWeb.Policies, :current_user
plug MyAppWeb.Policies, [:policy_a, :policy_b]
end
"""
# ===========================================================================
defmacro __using__(use_opts) do
quote do
@doc false
def init(policies_or_opts) do
case Keyword.keyword?(policies_or_opts) do
true ->
policies_or_opts
|> Keyword.put_new(:policy_module, unquote(use_opts[:policy_module]) || __MODULE__)
|> PolicyWonk.Enforce.init()
false ->
PolicyWonk.Enforce.init(policy_module: __MODULE__, policies: policies_or_opts)
end
end
@doc false
def call(conn, opts), do: PolicyWonk.Enforce.call(conn, opts)
end
# quote
end
# defmacro
# ===========================================================================
# define a policy enforcement error here
defmodule Error do
@moduledoc false
defexception message: "#{IO.ANSI.red()}Policy endforcement failed#{IO.ANSI.default_color()}\n"
end
# ===========================================================================
@doc false
def init(opts) when is_list(opts), do: do_init(opts[:policy_module], opts[:policies])
defp do_init(nil, _),
do:
raise(
Error,
message: "#{IO.ANSI.red()}Must supply a valid :policy_module#{IO.ANSI.default_color()}"
)
defp do_init(_, []),
do:
raise(
Error,
message:
"#{IO.ANSI.red()}Must supply at least one policy to enforce#{IO.ANSI.default_color()}"
)
defp do_init(policy_module, policies) when is_atom(policy_module) and is_list(policies) do
%{
policy_module: policy_module,
policies: policies
}
end
defp do_init(policy_module, policy) do
do_init(policy_module, [policy])
end
# ----------------------------------------------------------------------------
# ------------------------------------------------------------------------
@doc false
def call(conn, %{policy_module: policy_module, policies: policies}) do
PolicyWonk.Policy.enforce(conn, policy_module, policies)
end
end
| 28.610619 | 98 | 0.551191 |
1c32f9fdcfd34afe2a3d47599e286b4a4c7e6100 | 10,737 | ex | Elixir | lib/nostrum/shard/dispatch.ex | ushitora-anqou/nostrum | 93ffcf9ed04380e06e6bffa21a1f51b5193a56d4 | [
"MIT"
] | null | null | null | lib/nostrum/shard/dispatch.ex | ushitora-anqou/nostrum | 93ffcf9ed04380e06e6bffa21a1f51b5193a56d4 | [
"MIT"
] | null | null | null | lib/nostrum/shard/dispatch.ex | ushitora-anqou/nostrum | 93ffcf9ed04380e06e6bffa21a1f51b5193a56d4 | [
"MIT"
] | null | null | null | defmodule Nostrum.Shard.Dispatch do
@moduledoc false
alias Nostrum.Cache.{ChannelCache, GuildCache, PresenceCache, UserCache}
alias Nostrum.Cache.Me
alias Nostrum.Shard.{Intents, Session}
alias Nostrum.Struct.Event.{
ChannelPinsUpdate,
GuildBanAdd,
GuildBanRemove,
GuildIntegrationsUpdate,
InviteCreate,
InviteDelete,
MessageDelete,
MessageDeleteBulk,
MessageReactionAdd,
MessageReactionRemove,
MessageReactionRemoveAll,
MessageReactionRemoveEmoji,
SpeakingUpdate,
TypingStart,
VoiceServerUpdate,
VoiceState
}
alias Nostrum.Struct.{Guild, Interaction, Message, User}
alias Nostrum.Struct.Guild.UnavailableGuild
alias Nostrum.Util
alias Nostrum.Voice
alias Nostrum.Voice.Session, as: VoiceSession
require Logger
@large_threshold 250
def handle({payload, state}) do
if Application.get_env(:nostrum, :log_full_events),
do: Logger.debug(inspect(payload.d, pretty: true))
payload.t
|> handle_event(payload.d, state)
|> format_event
end
defp format_event(events) when is_list(events),
do: for(event <- events, do: format_event(event))
# Handles the case of not finding users in the user cache
defp format_event({_name, :noop, _state}), do: :noop
defp format_event({_name, event_info, _state} = event) when is_tuple(event_info), do: event
defp format_event({name, event_info, state}), do: {name, event_info, state}
defp format_event(:noop), do: :noop
defp check_new_or_unavailable(guild_id) do
case :ets.lookup(:unavailable_guilds, guild_id) do
[] -> :GUILD_CREATE
[_] -> :GUILD_AVAILABLE
end
end
def handle_event(:CHANNEL_CREATE = event, %{type: 1} = p, state) do
{event, ChannelCache.create(p), state}
end
def handle_event(:CHANNEL_CREATE = event, %{type: t} = p, state) when t in [0, 2] do
:ets.insert(:channel_guild_map, {p.id, p.guild_id})
{event, GuildCache.channel_create(p.guild_id, p), state}
end
# Ignore group channels
def handle_event(:CHANNEL_CREATE, _p, _state) do
:noop
end
def handle_event(:CHANNEL_DELETE = event, %{type: 1} = p, state) do
{event, ChannelCache.delete(p.id), state}
end
def handle_event(:CHANNEL_DELETE = event, %{type: t} = p, state) when t in [0, 2] do
:ets.delete(:channel_guild_map, p.id)
{event, GuildCache.channel_delete(p.guild_id, p.id), state}
end
def handle_event(:CHANNEL_UPDATE = event, p, state) do
{event, GuildCache.channel_update(p.guild_id, p), state}
end
def handle_event(:CHANNEL_DELETE, _p, _state) do
# Ignore group channels
:noop
end
def handle_event(:CHANNEL_PINS_ACK = event, p, state), do: {event, p, state}
def handle_event(:CHANNEL_PINS_UPDATE = event, p, state) do
{event, ChannelPinsUpdate.to_struct(p), state}
end
def handle_event(:GUILD_BAN_ADD = event, p, state) do
{event, GuildBanAdd.to_struct(p), state}
end
def handle_event(:GUILD_BAN_REMOVE = event, p, state) do
{event, GuildBanRemove.to_struct(p), state}
end
def handle_event(:GUILD_CREATE, %{unavailable: true} = guild, state) do
:ets.insert(:unavailable_guilds, {guild.id, guild})
{:GUILD_UNAVAILABLE, UnavailableGuild.to_struct(guild), state}
end
def handle_event(:GUILD_CREATE, p, state) do
# Ensures every channel will have an associated guild_id
channels_with_guild_id =
p.channels
|> Enum.map(fn channel -> Map.put(channel, :guild_id, p.id) end)
guild = %{p | channels: channels_with_guild_id}
guild.members
|> Enum.each(fn member -> UserCache.create(member.user) end)
:ets.insert(:guild_shard_map, {guild.id, state.shard_num})
Enum.each(guild.channels, fn channel ->
:ets.insert(:channel_guild_map, {channel.id, guild.id})
end)
has_members = Intents.has_intent?(:guild_members)
has_presences = Intents.has_intent?(:guild_presences)
intents_should_request? = has_members and not has_presences
large_server? = guild.member_count >= @large_threshold
should_request? = large_server? or intents_should_request?
if should_request? and Application.get_env(:nostrum, :request_guild_members, false) do
Session.request_guild_members(state.conn_pid, guild.id)
end
{presences, guild} = Map.pop(guild, :presences, [])
PresenceCache.bulk_create(guild.id, presences)
guild = Util.cast(guild, {:struct, Guild})
true = GuildCache.create(guild)
{check_new_or_unavailable(guild.id), guild, state}
end
def handle_event(:GUILD_UPDATE = event, p, state), do: {event, GuildCache.update(p), state}
def handle_event(:GUILD_DELETE = event, p, state) do
:ets.delete(:guild_shard_map, p.id)
{event, {GuildCache.delete(p.id), Map.get(p, :unavailable, false)}, state}
end
def handle_event(:GUILD_EMOJIS_UPDATE = event, p, state),
do: {event, GuildCache.emoji_update(p.guild_id, p.emojis), state}
def handle_event(:GUILD_INTEGRATIONS_UPDATE = event, p, state) do
{event, GuildIntegrationsUpdate.to_struct(p), state}
end
def handle_event(:GUILD_MEMBER_ADD = event, p, state) do
UserCache.create(p.user)
{event, GuildCache.member_add(p.guild_id, p), state}
end
def handle_event(:GUILD_MEMBERS_CHUNK = event, p, state) do
UserCache.bulk_create(p.members)
GuildCache.member_chunk(p.guild_id, p.members)
# note: not casted at the moment, deemed mostly internal
{event, p, state}
end
def handle_event(:GUILD_MEMBER_REMOVE = event, p, state),
do: {event, GuildCache.member_remove(p.guild_id, p.user), state}
def handle_event(:GUILD_MEMBER_UPDATE = event, %{guild_id: guild_id} = p, state) do
{event, GuildCache.member_update(guild_id, p), state}
end
def handle_event(:GUILD_ROLE_CREATE = event, p, state),
do: {event, GuildCache.role_create(p.guild_id, p.role), state}
def handle_event(:GUILD_ROLE_DELETE = event, p, state),
do: {event, GuildCache.role_delete(p.guild_id, p.role_id), state}
def handle_event(:GUILD_ROLE_UPDATE = event, %{guild_id: guild_id} = p, state),
do: {event, GuildCache.role_update(guild_id, p.role), state}
def handle_event(:INVITE_CREATE = event, p, state),
do: {event, InviteCreate.to_struct(p), state}
def handle_event(:INVITE_DELETE = event, p, state),
do: {event, InviteDelete.to_struct(p), state}
def handle_event(:MESSAGE_CREATE = event, p, state), do: {event, Message.to_struct(p), state}
def handle_event(:MESSAGE_DELETE = event, p, state),
do: {event, MessageDelete.to_struct(p), state}
def handle_event(:MESSAGE_DELETE_BULK = event, p, state),
do: {event, MessageDeleteBulk.to_struct(p), state}
def handle_event(:MESSAGE_UPDATE = event, p, state), do: {event, Message.to_struct(p), state}
def handle_event(:MESSAGE_REACTION_ADD = event, p, state) do
{event, MessageReactionAdd.to_struct(p), state}
end
def handle_event(:MESSAGE_REACTION_REMOVE = event, p, state) do
{event, MessageReactionRemove.to_struct(p), state}
end
def handle_event(:MESSAGE_REACTION_REMOVE_ALL = event, p, state) do
{event, MessageReactionRemoveAll.to_struct(p), state}
end
def handle_event(:MESSAGE_REACTION_REMOVE_EMOJI = event, p, state) do
{event, MessageReactionRemoveEmoji.to_struct(p), state}
end
def handle_event(:MESSAGE_ACK = event, p, state), do: {event, p, state}
def handle_event(:PRESENCE_UPDATE = event, p, state) do
[
{event, PresenceCache.update(p), state}
| [handle_event(:USER_UPDATE, p.user, state)]
]
end
def handle_event(:READY = event, p, state) do
p.private_channels
|> Enum.each(fn dm_channel -> ChannelCache.create(dm_channel) end)
ready_guilds =
p.guilds
|> Enum.map(fn guild -> handle_event(:GUILD_CREATE, guild, state) end)
current_user = Util.cast(p.user, {:struct, User})
Me.put(current_user)
[{event, p, state}] ++ ready_guilds
end
def handle_event(:RESUMED = event, p, state), do: {event, p, state}
def handle_event(:TYPING_START = event, p, state) do
{event, TypingStart.to_struct(p), state}
end
def handle_event(:USER_SETTINGS_UPDATE = event, p, state), do: {event, p, state}
def handle_event(:USER_UPDATE = event, p, state) do
if Me.get().id === p.id do
Me.update(p)
end
{event, UserCache.update(p), state}
end
def handle_event(:VOICE_SPEAKING_UPDATE = event, p, state),
do: {event, SpeakingUpdate.to_struct(p), state}
def handle_event(:VOICE_STATE_UPDATE = event, p, state) do
if Me.get().id === p.user_id do
if p.channel_id do
# Joining Channel
voice = Voice.get_voice(p.guild_id)
cond do
# Not yet in a channel:
is_nil(voice) or is_nil(voice.session) ->
Voice.update_voice(p.guild_id,
channel_id: p.channel_id,
session: p.session_id,
self_mute: p.self_mute,
self_deaf: p.self_deaf
)
# Already in different channel:
voice.channel_id != p.channel_id and is_pid(voice.session_pid) ->
v_ws = VoiceSession.get_ws_state(voice.session_pid)
# On the off-chance that we receive Voice Server Update first:
{new_token, new_gateway} =
if voice.token == v_ws.token do
# Need to reset
{nil, nil}
else
# Already updated
{voice.token, voice.gateway}
end
Voice.remove_voice(p.guild_id)
Voice.update_voice(p.guild_id,
channel_id: p.channel_id,
session: p.session_id,
self_mute: p.self_mute,
self_deaf: p.self_deaf,
token: new_token,
gateway: new_gateway
)
# Already in this channel:
true ->
Voice.update_voice(p.guild_id)
end
else
# Leaving Channel:
Voice.remove_voice(p.guild_id)
end
end
GuildCache.voice_state_update(p.guild_id, p)
{event, VoiceState.to_struct(p), state}
end
def handle_event(:VOICE_SERVER_UPDATE = event, p, state) do
Voice.update_voice(p.guild_id,
token: p.token,
gateway: p.endpoint
)
{event, VoiceServerUpdate.to_struct(p), state}
end
def handle_event(:WEBHOOKS_UPDATE = event, p, state), do: {event, p, state}
def handle_event(:INTERACTION_CREATE = event, p, state) do
{event, Interaction.to_struct(p), state}
end
def handle_event(:VOICE_READY = event, p, state) do
{event, p, state}
end
def handle_event(event, p, state) do
Logger.warn("UNHANDLED GATEWAY DISPATCH EVENT TYPE: #{event}, #{inspect(p)}")
{event, p, state}
end
end
| 31.031792 | 95 | 0.675887 |
1c332ac98f2f5b39cddd9c79212dda51a431b7fa | 1,812 | ex | Elixir | lib/ex_saga/utils.ex | naramore/ex_saga | 66c6b64867f28a1bbfb8ec2b6a786469b5f84e47 | [
"MIT"
] | null | null | null | lib/ex_saga/utils.ex | naramore/ex_saga | 66c6b64867f28a1bbfb8ec2b6a786469b5f84e47 | [
"MIT"
] | 17 | 2019-02-06T03:51:03.000Z | 2019-10-22T10:15:48.000Z | lib/ex_saga/utils.ex | naramore/ex_saga | 66c6b64867f28a1bbfb8ec2b6a786469b5f84e47 | [
"MIT"
] | null | null | null | defmodule ExSaga.Utils do
@moduledoc """
"""
import Kernel, except: [get_in: 2]
alias ExSaga.Stage
@doc false
defmacro get_stacktrace() do
if Version.match?(System.version(), "~> 1.7") do
quote do: __STACKTRACE__
else
quote do: System.stacktrace()
end
end
@doc """
"""
@spec get_effects(Stage.effects(), Stage.full_name()) :: Stage.effects() | Stage.effect() | nil
def get_effects(effects, []), do: effects
def get_effects(effects, path) do
Kernel.get_in(effects, Enum.map(path, fn x -> put_or_insert_map(x) end))
end
@doc """
"""
@spec insert_effect(Stage.effects(), Stage.full_name(), Stage.effect()) :: Stage.effects()
def insert_effect(effects, path, effect) do
put_in(
effects,
Enum.map(path, fn x -> put_or_insert_map(x) end),
effect
)
end
@doc """
"""
@spec put_or_insert_map(Stage.name()) :: Access.access_fun(map(), term())
def put_or_insert_map(name) do
fn
:get, data, next ->
next.(Map.get(data, name, %{}))
:get_and_update, data, next ->
value = Map.get(data, name, %{})
case next.(value) do
{get, update} -> {get, Map.put(data, name, update)}
:pop -> Map.pop(data, name)
end
end
end
@doc """
"""
@spec get_local_metadata(Macro.Env.t()) :: Keyword.t()
def get_local_metadata(env \\ __ENV__) do
[
application:
case :application.get_application() do
:undefined -> nil
otherwise -> otherwise
end,
module: env.module,
function: env.function,
line: env.line,
file: env.file,
pid: self()
]
end
@doc """
"""
@spec get_in(term, [term]) :: term
def get_in(data, []), do: data
def get_in(data, path), do: get_in(data, path)
end
| 22.936709 | 97 | 0.583885 |
1c338887a8bf03034eefad0cae4e3a815fe0c52c | 163 | ex | Elixir | lib/mix/tasks/compare.ex | UrbanOS-Examples/Xip | 237a12a633af0b11a1cdebef564d200a27e50fb5 | [
"Apache-2.0"
] | null | null | null | lib/mix/tasks/compare.ex | UrbanOS-Examples/Xip | 237a12a633af0b11a1cdebef564d200a27e50fb5 | [
"Apache-2.0"
] | null | null | null | lib/mix/tasks/compare.ex | UrbanOS-Examples/Xip | 237a12a633af0b11a1cdebef564d200a27e50fb5 | [
"Apache-2.0"
] | null | null | null | defmodule Mix.Tasks.Compare do
use Mix.Task
import Logger
def run(_) do
Faker.start
Xip.compare_message_compress |> inspect |> Logger.info
end
end
| 18.111111 | 58 | 0.717791 |
1c3398931551882b28efbd9175d1acdb83fa0af4 | 9,077 | ex | Elixir | lib/mix/lib/mix/rebar.ex | gsphanikumar/elixir | 6ca225da4e016200a462888348ff1c3feb625b78 | [
"Apache-2.0"
] | 4 | 2015-12-22T02:46:39.000Z | 2016-04-26T06:11:09.000Z | lib/mix/lib/mix/rebar.ex | alco/elixir | 4407170349aa12c58664cab2122374167e827f5e | [
"Apache-2.0"
] | null | null | null | lib/mix/lib/mix/rebar.ex | alco/elixir | 4407170349aa12c58664cab2122374167e827f5e | [
"Apache-2.0"
] | null | null | null | defmodule Mix.Rebar do
@moduledoc false
@doc """
Returns the path supposed to host the local copy of rebar.
"""
def local_rebar_path(manager) do
Path.join(Mix.Utils.mix_home, Atom.to_string(manager))
end
@doc """
Returns the path to the global copy of `rebar`, if one exists.
"""
def global_rebar_cmd(manager) do
wrap_cmd System.find_executable(Atom.to_string(manager))
end
@doc """
Returns the path to the local copy of `rebar`, if one exists.
"""
def local_rebar_cmd(manager) do
cmd = local_rebar_path(manager)
wrap_cmd(if File.regular?(cmd), do: cmd)
end
@doc """
Returns the path to the available `rebar` command.
"""
def rebar_cmd(manager) do
global_rebar_cmd(manager) || local_rebar_cmd(manager)
end
@doc """
Loads `rebar.config` and evaluates `rebar.config.script` if it
exists in the given directory.
"""
def load_config(dir) do
config_path = Path.join(dir, "rebar.config")
script_path = Path.join(dir, "rebar.config.script")
config = case :file.consult(config_path) do
{:ok, config} ->
config
{:error, :enoent} ->
[]
{:error, error} ->
reason = :file.format_error(error)
Mix.raise "Error consulting rebar config #{inspect config_path}: #{reason}"
end
if File.exists?(script_path) do
eval_script(script_path, config)
else
config
end
end
@doc """
Merges a rebar3 parent config with a child config.
"""
# From https://github.com/rebar/rebar3/blob/b1da2ec0674df89599564252734bd4d794436425/src/rebar_opts.erl#L103
def merge_config(old, new) do
Keyword.merge(old, new, fn
:deps, old, _new -> old
{:deps, _}, _old, new -> new
:plugins, _old, new -> new
{:plugins, _}, _old, new -> new
:profiles, old, new -> merge_config(old, new)
:mib_first_files, value, value -> value
:mib_first_files, old, new -> old ++ new
:relx, old, new -> tuple_merge(new, old)
_key, old, new when is_list(new) ->
case :io_lib.printable_list(new) do
true when new == [] ->
if :io_lib.printable_list(old), do: new, else: old
true ->
new
false ->
tuple_merge(old, new)
end
_key, _old, new -> new
end)
end
# From https://github.com/rebar/rebar3/blob/b1da2ec0674df89599564252734bd4d794436425/src/rebar_utils.erl#L282
defp tuple_merge(old, new),
do: do_tuple_merge(tuple_sort(old), tuple_sort(new))
defp do_tuple_merge(old, []),
do: old
defp do_tuple_merge(olds, [new|news]),
do: do_tuple_umerge_dedup(umerge(:new, olds, [], news, new), [])
defp umerge(_, [], [], acc, current),
do: [current|acc]
defp umerge(:new, [], news, acc, current),
do: Enum.reverse(news, [current|acc])
defp umerge(:old, olds, [], acc, current),
do: Enum.reverse(olds, [current|acc])
defp umerge(:new, [old|olds], news, acc, current) do
{dir, merged, new_current} = compare({:new, current}, {:old, old})
umerge(dir, olds, news, [merged|acc], new_current)
end
defp umerge(:old, olds, [new|news], acc, current) do
{dir, merged, new_current} = compare({:new, new}, {:old, current})
umerge(dir, olds, news, [merged|acc], new_current)
end
defp compare({priority, a}, {secondary, b}) when is_tuple(a) and is_tuple(b) do
ka = elem(a, 0)
kb = elem(b, 0)
cond do
ka == kb -> {secondary, a, b}
ka < kb -> {secondary, a, b}
ka > kb -> {priority, b, a}
end
end
defp compare({priority, a}, {secondary, b}) when not is_tuple(a) and not is_tuple(b) do
cond do
a == b -> {secondary, a, b}
a < b -> {secondary, a, b}
a > b -> {priority, b, a}
end
end
defp compare({priority, a}, {secondary, b}) when is_tuple(a) and not is_tuple(b) do
ka = elem(a, 0)
cond do
ka == b -> {secondary, a, b}
ka < b -> {secondary, a, b}
ka > b -> {priority, b, a}
end
end
defp compare({priority, a}, {secondary, b}) when not is_tuple(a) and is_tuple(b) do
kb = elem(b, 0)
cond do
a == kb -> {secondary, a, b}
a < kb -> {secondary, a, b}
a > kb -> {priority, b, a}
end
end
defp do_tuple_umerge_dedup([], acc), do: acc
defp do_tuple_umerge_dedup([h|t], acc) do
if h in t do
do_tuple_umerge_dedup(t, acc)
else
do_tuple_umerge_dedup(t, [h|acc])
end
end
defp tuple_sort(list) do
Enum.sort(list, fn
a, b when is_tuple(a) and is_tuple(b) -> elem(a, 0) <= elem(b, 0)
a, b when is_tuple(a) -> elem(a, 0) <= b
a, b when is_tuple(b) -> a <= elem(b, 0)
a, b -> a <= b
end)
end
@doc """
Serializes a rebar config to a term file.
"""
def serialize_config(config) do
Enum.map(config, &[:io_lib.print(&1) | ".\n"])
end
@doc """
Parses the dependencies in given `rebar.config` to Mix's dependency format.
"""
def deps(app, config, overrides) do
# We don't have to handle rebar3 profiles because dependencies
# are always in the default profile which cannot be customized
config = apply_overrides(app, config, overrides)
if deps = config[:deps] do
Enum.map(deps, &parse_dep/1)
else
[]
end
end
@doc """
Runs `fun` for the given config and for each `sub_dirs` in the
given rebar config.
"""
def recur(config, fun) when is_binary(config) do
recur(load_config(config), fun)
end
def recur(config, fun) do
subs =
(config[:sub_dirs] || [])
|> Enum.map(&Path.wildcard(&1))
|> Enum.concat
|> Enum.filter(&File.dir?(&1))
|> Enum.map(&recur(&1, fun))
|> Enum.concat
[fun.(config)|subs]
end
defp parse_dep(app) when is_atom(app) do
parse_dep({app, nil})
end
defp parse_dep({app, req}) when is_list(req) do
{app, List.to_string(req)}
end
defp parse_dep({app, source}) when is_tuple(source) do
parse_dep({app, nil, source, []})
end
defp parse_dep({app, req, source}) do
parse_dep({app, req, source, []})
end
defp parse_dep({app, req, source, opts}) do
[scm, url | source] = Tuple.to_list(source)
ref =
case source do
[""|_] -> [branch: "HEAD"]
[{:branch, branch}|_] -> [branch: to_string(branch)]
[{:tag, tag}|_] -> [tag: to_string(tag)]
[{:ref, ref}|_] -> [ref: to_string(ref)]
[ref|_] -> [ref: to_string(ref)]
_ -> []
end
compile =
if :proplists.get_value(:raw, opts, false),
do: [compile: false],
else: []
mix_opts = [{scm, to_string(url)}] ++ ref ++ compile
{app, compile_req(req), mix_opts}
end
defp compile_req(nil) do
">= 0.0.0"
end
defp compile_req(req) do
case Regex.compile(List.to_string(req)) do
{:ok, re} ->
re
{:error, reason} ->
Mix.raise "Unable to compile version regex: #{inspect req}, #{reason}"
end
end
defp eval_script(script_path, config) do
script = Path.basename(script_path) |> String.to_char_list
result = File.cd!(Path.dirname(script_path), fn ->
:file.script(script, eval_binds(CONFIG: config, SCRIPT: script))
end)
case result do
{:ok, config} ->
config
{:error, error} ->
reason = :file.format_error(error)
Mix.shell.error("Error evaluating rebar config script #{script_path}:#{reason}")
Mix.shell.error("Any dependency defined in the script won't be available " <>
"unless you add them to your Mix project")
config
end
end
defp eval_binds(binds) do
Enum.reduce(binds, :erl_eval.new_bindings, fn ({k, v}, binds) ->
:erl_eval.add_binding(k, v, binds)
end)
end
defp wrap_cmd(nil), do: nil
defp wrap_cmd(rebar) do
if match?({:win32, _}, :os.type) and not String.ends_with?(rebar, ".cmd") do
"escript.exe \"#{rebar}\""
else
rebar
end
end
defp apply_overrides(app, config, overrides) do
# Inefficient. We want the order we get here though.
config =
Enum.reduce(overrides, config, fn
{:override, overrides}, config ->
Enum.reduce(overrides, config, fn {key, value}, config ->
Keyword.put(config, key, value)
end)
_, config ->
config
end)
config =
Enum.reduce(overrides, config, fn
{:override, oapp, overrides}, config when oapp == app ->
Enum.reduce(overrides, config, fn {key, value}, config ->
Keyword.put(config, key, value)
end)
_, config ->
config
end)
Enum.reduce(overrides, config, fn
{:add, oapp, overrides}, config when oapp == app ->
Enum.reduce(overrides, config, fn {key, value}, config ->
old_value = Keyword.get(config, key, [])
Keyword.put(config, key, value ++ old_value)
end)
_, config ->
config
end)
end
end
| 28.365625 | 111 | 0.585105 |
1c33fd5cf5913adc88daa508b6f2320d0771f337 | 1,068 | ex | Elixir | lib/koans/11_structs.ex | wee911/elixir-koans-excerise | 71bd62fc3e2be91b654b69d3d791c39aaef9244b | [
"MIT"
] | null | null | null | lib/koans/11_structs.ex | wee911/elixir-koans-excerise | 71bd62fc3e2be91b654b69d3d791c39aaef9244b | [
"MIT"
] | null | null | null | lib/koans/11_structs.ex | wee911/elixir-koans-excerise | 71bd62fc3e2be91b654b69d3d791c39aaef9244b | [
"MIT"
] | null | null | null | defmodule Structs do
use Koans
@intro "Structs"
defmodule Person do
defstruct [:name, :age]
end
koan "Structs are defined and named after a module" do
person = %Person{}
assert person == ___
end
koan "Unless previously defined, fields begin as nil" do
nobody = %Person{}
assert nobody.age == ___
end
koan "You can pass initial values to structs" do
joe = %Person{name: "Joe", age: 23}
assert joe.name == ___
end
koan "Update fields with the cons '|' operator" do
joe = %Person{name: "Joe", age: 23}
older = %{joe | age: joe.age + 10}
assert older.age == ___
end
defmodule Plane do
defstruct passengers: 0, maker: :boeing
end
def plane?(%Plane{}), do: true
def plane?(_), do: false
koan "Or onto the type of the struct itself" do
assert plane?(%Plane{passengers: 417, maker: :boeing}) == ___
assert plane?(%Person{}) == ___
end
koan "Struct can be treated like maps" do
silvia = %Person{age: 22, name: "Silvia"}
assert Map.fetch(silvia, :age) == ___
end
end
| 21.795918 | 65 | 0.634831 |
1c340f7575d43c4cb8ae8edc4774dfb2315ddca0 | 2,226 | exs | Elixir | test/console_web/controllers/v1/v1_label_notification_webhooks_controller_test.exs | isabella232/console-2 | d4a4aca0e11c945c9698f46cb171d4645177038a | [
"Apache-2.0"
] | null | null | null | test/console_web/controllers/v1/v1_label_notification_webhooks_controller_test.exs | isabella232/console-2 | d4a4aca0e11c945c9698f46cb171d4645177038a | [
"Apache-2.0"
] | 1 | 2021-04-03T09:29:31.000Z | 2021-04-03T09:29:31.000Z | test/console_web/controllers/v1/v1_label_notification_webhooks_controller_test.exs | isabella232/console-2 | d4a4aca0e11c945c9698f46cb171d4645177038a | [
"Apache-2.0"
] | null | null | null | defmodule ConsoleWeb.V1LabelNotificationWebhooksControllerTest do
use ConsoleWeb.ConnCase
import Console.Factory
describe "label notification webhooks" do
test "inactive api keys do not work", %{conn: _conn} do
key = "upWpTb/J1mCsZupZTFL52tB27QJ2hFNWtT6PvwriQgs"
organization = insert(:organization)
api_key = insert(:api_key, %{
organization_id: organization.id,
key: :crypto.hash(:sha256, key)
})
assert api_key.active == false
resp_conn = build_conn() |> put_req_header("key", key) |> post("/api/v1/labels/some_long_id/notification_webhook")
assert response(resp_conn, 401) == "{\"message\":\"api_key_needs_email_verification\"}"
end
end
test "update works properly", %{conn: _conn} do
key = "upWpTb/J1mCsZupZTFL52tB27QJ2hFNWtT6PvwriQgs"
organization = insert(:organization)
insert(:api_key, %{
organization_id: organization.id,
key: :crypto.hash(:sha256, key),
active: true
})
assert_error_sent 500, fn ->
build_conn() |> post("/api/v1/labels/some_long_id/notification_webhook")
end # no api key attached
assert_error_sent 400, fn ->
build_conn() |> put_req_header("key", key) |> post("/api/v1/labels/some_long_id/notification_webhook", %{})
end # no attrs in body
label_1 = insert(:label)
resp_conn = build_conn() |> put_req_header("key", key) |> post("/api/v1/labels/#{label_1.id}/notification_webhook", %{ "key" => "device_stops_transmitting", "value" => "30", "url" => "http://hello.com", "notes" => "hi" })
assert response(resp_conn, 202) # valid with notes field
label_2 = insert(:label)
resp_conn = build_conn() |> put_req_header("key", key) |> post("/api/v1/labels/#{label_2.id}/notification_webhook", %{ "key" => "invalid_key", "value" => "30", "url" => "http://hi.com" })
assert response(resp_conn, 403) # invalid key not allowed
label_3 = insert(:label)
resp_conn = build_conn() |> put_req_header("key", key) |> post("/api/v1/labels/#{label_3.id}/notification_webhook", %{ "key" => "device_stops_transmitting", "value" => "30", "url" => "http://hello.com" })
assert response(resp_conn, 202) # valid without notes field
end
end | 44.52 | 225 | 0.668464 |
1c3430b96f01f37eaf6d3684e10dbd60d44432fb | 2,358 | ex | Elixir | lib/espec/allow_to.ex | MeneDev/espec | ec4b3d579c5192999e930224a8a2650bb1fdf0bc | [
"Apache-2.0"
] | null | null | null | lib/espec/allow_to.ex | MeneDev/espec | ec4b3d579c5192999e930224a8a2650bb1fdf0bc | [
"Apache-2.0"
] | null | null | null | lib/espec/allow_to.ex | MeneDev/espec | ec4b3d579c5192999e930224a8a2650bb1fdf0bc | [
"Apache-2.0"
] | null | null | null | defmodule ESpec.AllowTo do
@moduledoc """
Defines `to/2` function which makes the mock.
"""
alias ESpec.Mock
@doc "Makes specific mock with ESpec.Mock.expect/3."
def to({:accept, name, function}, {__MODULE__, module}) when is_atom(name) and is_atom(module) do
Mock.expect(module, name, function, [])
end
def to({:accept, name, function}, {__MODULE__, {__MODULE__, module}}) when is_atom(name) and is_atom(module) do
Mock.expect(module, name, function, [])
end
def to({:accept, name, function, meck_options}, {__MODULE__, module})
when is_atom(name) and is_atom(module) and is_list(meck_options) do
Mock.expect(module, name, function, meck_options)
end
def to({:accept, name, function, meck_options}, {__MODULE__, {__MODULE__, module}})
when is_atom(name) and is_atom(module) and is_list(meck_options) do
Mock.expect(module, name, function, meck_options)
end
def to({:accept, list}, {__MODULE__, module}) when is_list(list) and is_atom(module) do
mock_list(module, list)
end
def to({:accept, list}, {__MODULE__, {__MODULE__, module}}) when is_list(list) and is_atom(module) do
mock_list(module, list)
end
def to({:accept, list, meck_options}, {__MODULE__, module})
when is_list(list) and is_list(meck_options) and is_atom(module) do
mock_list(module, list, meck_options)
end
def to({:accept, list, meck_options}, {__MODULE__, {__MODULE__, module}})
when is_list(list) and is_list(meck_options) and is_atom(module) do
mock_list(module, list, meck_options)
end
def to({:accept, name}, {__MODULE__, module}) when is_atom(name) and is_atom(module) do
Mock.expect(module, name, fn -> nil end, [])
Mock.expect(module, name, fn _ -> nil end, [])
end
def to({:accept, name}, {__MODULE__, {__MODULE__, module}}) when is_atom(name) and is_atom(module) do
Mock.expect(module, name, fn -> nil end, [])
Mock.expect(module, name, fn _ -> nil end, [])
end
defp mock_list(module, list, meck_options \\ []) do
if Keyword.keyword?(list) do
Enum.each(list, fn {name, function} ->
Mock.expect(module, name, function, meck_options)
end)
else
Enum.each(list, &Mock.expect(module, &1, fn -> nil end, meck_options))
Enum.each(list, &Mock.expect(module, &1, fn _ -> nil end, meck_options))
end
end
end
| 35.727273 | 113 | 0.678117 |
1c34431de630281b0e178ecd0d5659a8e8371fb5 | 23 | ex | Elixir | apps/world/lib/world.ex | smartlogic/elixir-node-balancing | ddf1ae7ff71f93b7decafa679b2fa4b5fb1937c4 | [
"MIT"
] | 1 | 2017-12-01T23:09:10.000Z | 2017-12-01T23:09:10.000Z | apps/world/lib/world.ex | smartlogic/elixir-node-balancing | ddf1ae7ff71f93b7decafa679b2fa4b5fb1937c4 | [
"MIT"
] | null | null | null | apps/world/lib/world.ex | smartlogic/elixir-node-balancing | ddf1ae7ff71f93b7decafa679b2fa4b5fb1937c4 | [
"MIT"
] | null | null | null | defmodule World do
end
| 7.666667 | 18 | 0.826087 |
1c344d9419d434b0587b667db891def38d36899d | 1,355 | ex | Elixir | lib/basehangul/encode.ex | Dalgona/basehangul | 8cb19429554a532bd1c991977f33e026487a7795 | [
"WTFPL"
] | 1 | 2019-02-09T04:51:42.000Z | 2019-02-09T04:51:42.000Z | lib/basehangul/encode.ex | Dalgona/basehangul | 8cb19429554a532bd1c991977f33e026487a7795 | [
"WTFPL"
] | null | null | null | lib/basehangul/encode.ex | Dalgona/basehangul | 8cb19429554a532bd1c991977f33e026487a7795 | [
"WTFPL"
] | null | null | null | defmodule BaseHangul.Encode do
@moduledoc false
use Bitwise
@padchr [0xC8, 0xE5]
@spec encode_chunk(binary()) :: binary()
def encode_chunk(x) do
{tbits, n_bytes} = repack_8to10(x)
to_euclist(tbits, n_bytes, [])
end
@spec repack_8to10(binary()) :: {[integer()], integer()}
defp repack_8to10(bin) when byte_size(bin) <= 5 do
size = byte_size(bin)
pad_size = 8 * (5 - size)
padded = bin <> <<0::size(pad_size)>>
tbit_packed = for <<tbit::10 <- padded>>, do: tbit
{tbit_packed, size}
end
@spec to_euclist([integer()], integer(), [integer()]) :: [integer()]
defp to_euclist(ords, n_bytes, acc)
defp to_euclist([], _n_bytes, acc) do
acc |> Enum.reverse() |> List.flatten()
end
defp to_euclist([ord], n_bytes, acc) do
euc =
cond do
ord == 0 and n_bytes < 4 -> @padchr
n_bytes == 4 -> get_euc(ord >>> 8 ||| 1024)
:else -> get_euc(ord)
end
to_euclist([], n_bytes, [euc | acc])
end
defp to_euclist([ord | ords] = list, n_bytes, acc) do
euc =
if ord == 0 and n_bytes <= 4 - length(list) do
@padchr
else
get_euc(ord)
end
to_euclist(ords, n_bytes, [euc | acc])
end
@spec get_euc(integer()) :: [integer()]
defp get_euc(ord) when ord <= 1027 do
[0xB0 + div(ord, 94), 0xA1 + rem(ord, 94)]
end
end
| 23.362069 | 70 | 0.583764 |
1c34596505296b39e3f35fafe219ad3a805b1cd7 | 1,372 | ex | Elixir | lib/lens/lensable.ex | alexdeleon/focus | 9212e552b725053d0763cee80baf4882c1f80b99 | [
"BSD-2-Clause"
] | null | null | null | lib/lens/lensable.ex | alexdeleon/focus | 9212e552b725053d0763cee80baf4882c1f80b99 | [
"BSD-2-Clause"
] | null | null | null | lib/lens/lensable.ex | alexdeleon/focus | 9212e552b725053d0763cee80baf4882c1f80b99 | [
"BSD-2-Clause"
] | null | null | null | defprotocol Lensable do
@fallback_to_any true
@doc "A function to get a value out of a data structure"
def getter(structure, view)
@doc "A function to set a value out of a data structure"
def setter(structure, view, func)
end
defimpl Lensable, for: Map do
def getter(s, x), do: Access.get(s, x, {:error, {:lens, :bad_path}})
def setter({:error, {:lens, :bad_path}} = e), do: e
def setter(s, x, f) do
if Map.has_key?(s, x) do
Map.put(s, x, f)
else
s
end
end
end
defimpl Lensable, for: Tuple do
def getter({:error, _e} = error, _x), do: error
def getter(s, x), do: elem(s, x)
def setter(s, x, f) do
s
|> Tuple.delete_at(x)
|> Tuple.insert_at(x, f)
end
end
defimpl Lensable, for: List do
def getter(s, x) do
if Keyword.keyword?(s) && !Enum.empty?(s) do
Keyword.get(s, x)
else
if is_number(x) && !Enum.empty?(s) do
get_in(s, [Access.at(x)])
else
{:error, {:lens, :bad_path}}
end
end
end
def setter([] = s, x, f), do: List.replace_at(s, x, f)
def setter(s, x, f) do
if Keyword.keyword?(s) do
Keyword.put(s, x, f)
else
List.replace_at(s, x, f)
end
end
end
defimpl Lensable, for: Any do
def getter(_, _), do: {:error, {:lens, :bad_data_structure}}
def setter(_s, _x, _f), do: {:error, {:lens, :bad_data_structure}}
end
| 22.866667 | 70 | 0.595481 |
1c34796234d480517694e4804727f2b21d8e8075 | 1,079 | ex | Elixir | test/process_managers/support/example_command_handler.ex | edwardzhou/commanded | f104cbf5ff3a37a6e9b637bc07ccde1d79c0725d | [
"MIT"
] | 1,220 | 2017-10-31T10:56:40.000Z | 2022-03-31T17:40:19.000Z | test/process_managers/support/example_command_handler.ex | edwardzhou/commanded | f104cbf5ff3a37a6e9b637bc07ccde1d79c0725d | [
"MIT"
] | 294 | 2017-11-03T10:33:41.000Z | 2022-03-24T08:36:42.000Z | test/process_managers/support/example_command_handler.ex | edwardzhou/commanded | f104cbf5ff3a37a6e9b637bc07ccde1d79c0725d | [
"MIT"
] | 208 | 2017-11-03T10:56:47.000Z | 2022-03-14T05:49:38.000Z | defmodule Commanded.ProcessManagers.ExampleCommandHandler do
@moduledoc false
@behaviour Commanded.Commands.Handler
alias Commanded.ProcessManagers.ExampleAggregate
alias Commanded.ProcessManagers.ExampleAggregate.Commands.{
Error,
Pause,
Publish,
Raise,
Start,
Stop
}
def handle(%ExampleAggregate{} = aggregate, %Start{aggregate_uuid: aggregate_uuid}),
do: ExampleAggregate.start(aggregate, aggregate_uuid)
def handle(%ExampleAggregate{} = aggregate, %Publish{} = command) do
%Publish{interesting: interesting, uninteresting: uninteresting} = command
ExampleAggregate.publish(aggregate, interesting, uninteresting)
end
def handle(%ExampleAggregate{} = aggregate, %Pause{}),
do: ExampleAggregate.pause(aggregate)
def handle(%ExampleAggregate{} = aggregate, %Stop{}),
do: ExampleAggregate.stop(aggregate)
def handle(%ExampleAggregate{} = aggregate, %Error{}),
do: ExampleAggregate.error(aggregate)
def handle(%ExampleAggregate{} = aggregate, %Raise{}),
do: ExampleAggregate.raise(aggregate)
end
| 29.162162 | 86 | 0.742354 |
1c347bb23eb739e3e1f2531e2f6cf27c890846f8 | 61 | ex | Elixir | lib/minty/repo.ex | wsmoak/minty | dbafd707cc5ce6d0bc251698cefe2c5d5a6eca56 | [
"MIT"
] | 4 | 2016-02-17T03:13:03.000Z | 2017-04-18T20:53:03.000Z | lib/minty/repo.ex | wsmoak/minty | dbafd707cc5ce6d0bc251698cefe2c5d5a6eca56 | [
"MIT"
] | null | null | null | lib/minty/repo.ex | wsmoak/minty | dbafd707cc5ce6d0bc251698cefe2c5d5a6eca56 | [
"MIT"
] | null | null | null | defmodule Minty.Repo do
use Ecto.Repo, otp_app: :minty
end
| 15.25 | 32 | 0.754098 |
1c347e08cae5d8d791b729ad790516a014cb094f | 2,274 | ex | Elixir | apps/astarte_housekeeping_api/lib/astarte_housekeeping_api_web/endpoint.ex | matt-mazzucato/astarte | 34d84941a5019efc42321052f7f34b7d907a38f2 | [
"Apache-2.0"
] | null | null | null | apps/astarte_housekeeping_api/lib/astarte_housekeeping_api_web/endpoint.ex | matt-mazzucato/astarte | 34d84941a5019efc42321052f7f34b7d907a38f2 | [
"Apache-2.0"
] | 5 | 2019-11-18T17:06:34.000Z | 2019-12-19T10:03:59.000Z | apps/astarte_housekeeping_api/lib/astarte_housekeeping_api_web/endpoint.ex | matt-mazzucato/astarte | 34d84941a5019efc42321052f7f34b7d907a38f2 | [
"Apache-2.0"
] | 2 | 2018-02-05T19:23:18.000Z | 2019-11-19T11:44:40.000Z | #
# This file is part of Astarte.
#
# Copyright 2017 Ispirata Srl
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
defmodule Astarte.Housekeeping.APIWeb.Endpoint do
use Phoenix.Endpoint, otp_app: :astarte_housekeeping_api
socket "/socket", Astarte.Housekeeping.APIWeb.UserSocket, websocket: true
# Serve at "/" the static files from "priv/static" directory.
#
# You should set gzip to true if you are running phoenix.digest
# when deploying your static files in production.
plug Plug.Static,
at: "/",
from: :astarte_housekeeping_api,
gzip: false,
only: ~w(css fonts images js favicon.ico robots.txt)
# Code reloading can be explicitly enabled under the
# :code_reloader configuration of your endpoint.
if code_reloading? do
plug Phoenix.CodeReloader
end
plug Plug.RequestId
plug Plug.Logger
plug Plug.Parsers,
parsers: [:urlencoded, :multipart, :json],
pass: ["*/*"],
json_decoder: Phoenix.json_library()
plug Plug.MethodOverride
plug Plug.Head
# The session will be stored in the cookie and signed,
# this means its contents can be read but not tampered with.
# Set :encryption_salt if you would also like to encrypt it.
plug Plug.Session,
store: :cookie,
key: "_astarte_housekeeping_api_key",
signing_salt: "DtYzPxzr"
plug CORSPlug
plug Astarte.Housekeeping.APIWeb.Router
@doc """
Dynamically loads configuration from the system environment
on startup.
It receives the endpoint configuration from the config files
and must return the updated configuration.
"""
def load_from_system_env(config) do
port = System.get_env("PORT") || raise "expected the PORT environment variable to be set"
{:ok, Keyword.put(config, :http, [:inet6, port: port])}
end
end
| 30.72973 | 93 | 0.733509 |
1c34956a806120d18fda10974b19e2fc68566cae | 1,209 | ex | Elixir | clients/games/lib/google_api/games/v1/connection.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/games/lib/google_api/games/v1/connection.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/games/lib/google_api/games/v1/connection.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Games.V1.Connection do
@moduledoc """
Handle Tesla connections for GoogleApi.Games.V1.
"""
@type t :: Tesla.Env.client()
use GoogleApi.Gax.Connection,
scopes: [
# View and manage its own configuration data in your Google Drive
"https://www.googleapis.com/auth/drive.appdata",
# Create, edit, and delete your Google Play Games activity
"https://www.googleapis.com/auth/games"
],
otp_app: :google_api_games,
base_url: "https://games.googleapis.com/"
end
| 33.583333 | 74 | 0.728701 |
1c34a253044878ca0c718d4700bc35c58f5cd9d7 | 243 | ex | Elixir | lib/freaking_awesome_elixir.ex | hvnsweeting/freaking_awesome_elixir | 222223e86a3ad01558780dd92dcab33086e3ef08 | [
"BSD-2-Clause"
] | null | null | null | lib/freaking_awesome_elixir.ex | hvnsweeting/freaking_awesome_elixir | 222223e86a3ad01558780dd92dcab33086e3ef08 | [
"BSD-2-Clause"
] | null | null | null | lib/freaking_awesome_elixir.ex | hvnsweeting/freaking_awesome_elixir | 222223e86a3ad01558780dd92dcab33086e3ef08 | [
"BSD-2-Clause"
] | null | null | null | defmodule FreakingAwesomeElixir do
@moduledoc """
Documentation for `FreakingAwesomeElixir`.
"""
@doc """
Hello world.
## Examples
iex> FreakingAwesomeElixir.hello()
:world
"""
def hello do
:world
end
end
| 12.789474 | 44 | 0.633745 |
1c34b45929d1d77176723a4378fffa2b8e3be22d | 1,410 | exs | Elixir | test/hexpm/accounts/auth_test.exs | findmypast/hexfmp | 38a50f5e1057833fd98748faac230bf4b9cc26a3 | [
"Apache-2.0"
] | null | null | null | test/hexpm/accounts/auth_test.exs | findmypast/hexfmp | 38a50f5e1057833fd98748faac230bf4b9cc26a3 | [
"Apache-2.0"
] | null | null | null | test/hexpm/accounts/auth_test.exs | findmypast/hexfmp | 38a50f5e1057833fd98748faac230bf4b9cc26a3 | [
"Apache-2.0"
] | null | null | null | defmodule Hexpm.Accounts.AuthTest do
use Hexpm.DataCase, async: true
alias Hexpm.Accounts.Auth
setup do
user = insert(:user, password: Auth.gen_password("password"))
%{user: user, password: "password"}
end
describe "password_auth/2" do
test "authorizes correct password", %{user: user, password: password} do
assert {:ok, {auth_user, nil, email}} = Auth.password_auth(user.username, password)
assert auth_user.id == user.id
assert email.id == hd(user.emails).id
end
test "does not authorize wrong password", %{user: user, password: password} do
assert Auth.password_auth("some_invalid_username", password) == :error
assert Auth.password_auth(user.username, "some_wrong_password") == :error
end
end
describe "key_auth/2" do
test "authorizes correct key", %{user: user} do
key = insert(:key, user: user)
assert {:ok, {auth_user, auth_key, email}} = Auth.key_auth(key.user_secret)
assert auth_key.id == key.id
assert auth_user.id == user.id
assert email.id == hd(user.emails).id
end
test "does not authorize wrong key" do
assert Auth.key_auth("0123456789abcdef") == :error
end
test "does not authorize revoked key", %{user: user} do
key = insert(:key, user: user, revoked_at: ~N"2017-01-01 00:00:00")
assert Auth.key_auth(key.user_secret) == :revoked
end
end
end
| 32.045455 | 89 | 0.668085 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.