hexsha stringlengths 40 40 | size int64 2 991k | ext stringclasses 2 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 4 208 | max_stars_repo_name stringlengths 6 106 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses list | max_stars_count int64 1 33.5k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 208 | max_issues_repo_name stringlengths 6 106 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses list | max_issues_count int64 1 16.3k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 208 | max_forks_repo_name stringlengths 6 106 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses list | max_forks_count int64 1 6.91k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 2 991k | avg_line_length float64 1 36k | max_line_length int64 1 977k | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
ffd91451ae02fc3c76ad5fe22ea0d6bdb44b5b49 | 101 | exs | Elixir | test/splitwise/models/expense_test.exs | nathanbegbie/ex_splitwise | 6de8b9f59db9834b342b86dfcd5c41354f349e5d | [
"MIT"
] | 3 | 2019-09-29T04:15:29.000Z | 2021-04-02T14:52:04.000Z | test/splitwise/models/expense_test.exs | nathanbegbie/ex_splitwise | 6de8b9f59db9834b342b86dfcd5c41354f349e5d | [
"MIT"
] | null | null | null | test/splitwise/models/expense_test.exs | nathanbegbie/ex_splitwise | 6de8b9f59db9834b342b86dfcd5c41354f349e5d | [
"MIT"
] | 1 | 2022-02-22T15:32:16.000Z | 2022-02-22T15:32:16.000Z | defmodule Splitwise.Models.ExpenseTest do
use ExUnit.Case
doctest ExSplitwise.Models.Expense
end
| 20.2 | 41 | 0.831683 |
ffd917f7c869fdc98ccaaf64aa03fec31f1028bd | 175 | exs | Elixir | priv/repo/migrations/20180205101949_add_project_description.exs | sitedata/sanbase2 | 8da5e44a343288fbc41b68668c6c80ae8547d557 | [
"MIT"
] | 81 | 2017-11-20T01:20:22.000Z | 2022-03-05T12:04:25.000Z | priv/repo/migrations/20180205101949_add_project_description.exs | rmoorman/sanbase2 | 226784ab43a24219e7332c49156b198d09a6dd85 | [
"MIT"
] | 359 | 2017-10-15T14:40:53.000Z | 2022-01-25T13:34:20.000Z | priv/repo/migrations/20180205101949_add_project_description.exs | sitedata/sanbase2 | 8da5e44a343288fbc41b68668c6c80ae8547d557 | [
"MIT"
] | 16 | 2017-11-19T13:57:40.000Z | 2022-02-07T08:13:02.000Z | defmodule Sanbase.Repo.Migrations.AddProjectDescription do
use Ecto.Migration
def change do
alter table(:project) do
add(:description, :text)
end
end
end
| 17.5 | 58 | 0.72 |
ffd97ca6e7b3f94183c22cc491cd8228a2ba51a7 | 1,622 | ex | Elixir | core/metrics/aggregate_strategy/request_count.ex | wses-yoshida/antikythera | e108e59d2339edd0b0fad31ad4f41f56df45be55 | [
"Apache-2.0"
] | null | null | null | core/metrics/aggregate_strategy/request_count.ex | wses-yoshida/antikythera | e108e59d2339edd0b0fad31ad4f41f56df45be55 | [
"Apache-2.0"
] | null | null | null | core/metrics/aggregate_strategy/request_count.ex | wses-yoshida/antikythera | e108e59d2339edd0b0fad31ad4f41f56df45be55 | [
"Apache-2.0"
] | null | null | null | # Copyright(c) 2015-2019 ACCESS CO., LTD. All rights reserved.
use Croma
alias AntikytheraCore.Metrics.AggregateStrategy, as: Strategy
defmodule Strategy.RequestCount do
@moduledoc """
Aggregate strategy for request counts.
This calculates the following values from HTTP status codes of responses within a time window:
- total number of requests
- number of requests that result in 4XX response (client error)
- number of requests that result in 5XX response (server error)
Note that, strictly speaking, numbers generated by this aggreagate strategy do not include invalid requests
that are rejected by antikythera before arriving at gear's controller action,
i.e. requests which do not match any of the defined routes, requests with malformed body, etc.
"""
@behaviour Strategy.Behaviour
alias Antikythera.Http.Status
@typep data_t :: {pos_integer, non_neg_integer, non_neg_integer}
@impl true
defun init(status :: v[Status.Int.t]) :: data_t do
case div(status, 100) do
4 -> {1, 1, 0}
5 -> {1, 0, 1}
_ -> {1, 0, 0}
end
end
@impl true
defun merge({count_total, count_4xx, count_5xx} :: data_t, status :: v[Status.Int.t]) :: data_t do
case div(status, 100) do
4 -> {count_total + 1, count_4xx + 1, count_5xx }
5 -> {count_total + 1, count_4xx , count_5xx + 1}
_ -> {count_total + 1, count_4xx , count_5xx }
end
end
@impl true
defun results({count_total, count_4xx, count_5xx} :: data_t) :: Strategy.results_t do
[
total: count_total,
"4XX": count_4xx,
"5XX": count_5xx,
]
end
end
| 31.192308 | 109 | 0.68434 |
ffd9a96d252a62cfb395fe1c2d32cf29f7bf5e55 | 968 | ex | Elixir | clients/tpu/lib/google_api/tpu/v1/deserializer.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | clients/tpu/lib/google_api/tpu/v1/deserializer.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | clients/tpu/lib/google_api/tpu/v1/deserializer.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.TPU.V1.Deserializer do
@moduledoc """
Helper functions for deserializing responses into models.
This module is no longer used. Please use GoogleApi.Gax.ModelBase instead.
"""
end
| 37.230769 | 77 | 0.764463 |
ffd9be15d69a8c2cba722e03c6bce2beafdb2c98 | 1,731 | ex | Elixir | lib/server_web/live/file_explorer_live.ex | akoutmos/ex_server | 9daffd543e67c27c7c5b5a1a55bade925ab416b3 | [
"MIT"
] | 24 | 2022-02-18T01:13:32.000Z | 2022-03-18T17:57:57.000Z | lib/server_web/live/file_explorer_live.ex | akoutmos/ex_server | 9daffd543e67c27c7c5b5a1a55bade925ab416b3 | [
"MIT"
] | null | null | null | lib/server_web/live/file_explorer_live.ex | akoutmos/ex_server | 9daffd543e67c27c7c5b5a1a55bade925ab416b3 | [
"MIT"
] | 1 | 2022-02-25T20:31:39.000Z | 2022-02-25T20:31:39.000Z | defmodule ExServerWeb.FileExplorerLive do
@moduledoc """
This LiveView module is used to
"""
use ExServerWeb, :live_view
on_mount {__MODULE__, :contains_index_file}
def mount(%{"path" => requested_path}, _session, socket) do
assigns = [
entries: get_files_from_path(requested_path),
current_path: requested_path
]
{:ok, assign(socket, assigns)}
end
defp get_files_from_path([]) do
get_files_from_path("/")
end
defp get_files_from_path(requested_path) when is_list(requested_path) do
requested_path
|> Path.join()
|> get_files_from_path()
end
defp get_files_from_path(requested_path) do
cwd =
File.cwd!()
|> Path.join(requested_path)
cwd
|> File.ls!()
|> Enum.sort(&(String.downcase(&1) <= String.downcase(&2)))
|> Enum.map(fn entry ->
file_stat =
cwd
|> Path.join(entry)
|> File.stat!()
{entry, file_stat}
end)
end
@doc false
def on_mount(:contains_index_file, %{"path" => []}, session, socket) do
on_mount(:contains_index_file, %{"path" => [""]}, session, socket)
end
def on_mount(:contains_index_file, %{"path" => requested_path}, _session, socket) do
cond do
requested_path |> Path.join() |> Path.join("index.html") |> File.exists?() ->
redirect_path = requested_path |> Path.join() |> Path.join("index.html")
{:halt, redirect(socket, to: "/#{redirect_path}")}
requested_path |> Path.join() |> Path.join("index.htm") |> File.exists?() ->
redirect_path = requested_path |> Path.join() |> Path.join("index.htm")
{:halt, redirect(socket, to: "/#{redirect_path}")}
true ->
{:cont, socket}
end
end
end
| 25.835821 | 86 | 0.621028 |
ffd9cbb02ac20c0ca60aa09b38422a9d05d5cf83 | 454 | ex | Elixir | lib/ambry/first_time_setup.ex | doughsay/ambry | c04e855bf06a6b00b8053c6eacb2eac14a56a37c | [
"MIT"
] | 12 | 2021-09-30T20:51:49.000Z | 2022-01-27T04:09:32.000Z | lib/ambry/first_time_setup.ex | doughsay/ambry | c04e855bf06a6b00b8053c6eacb2eac14a56a37c | [
"MIT"
] | 76 | 2021-10-01T05:45:11.000Z | 2022-03-28T04:12:39.000Z | lib/ambry/first_time_setup.ex | doughsay/ambry | c04e855bf06a6b00b8053c6eacb2eac14a56a37c | [
"MIT"
] | 2 | 2021-10-04T19:27:28.000Z | 2022-01-13T22:36:38.000Z | defmodule Ambry.FirstTimeSetup do
@moduledoc """
Context to handle first-time-setup related tasks.
"""
alias Ambry.Paths
@contents """
This file is created once first-time-setup has been completed.
Please don't delete it.
"""
@doc """
Disables first-time-setup, so that the redirect no longer happens.
"""
def disable!(contents \\ @contents) do
File.write!(Paths.uploads_folder_disk_path("setup.lock"), contents)
end
end
| 22.7 | 71 | 0.700441 |
ffd9e231b6dc79a8bf793255ba3ced4494c70f56 | 246 | ex | Elixir | lib/cadet/auth/empty_guardian.ex | seanlowjk/cadet | 52b55cc0f777cb0d55a78fe1693f762085ab36c2 | [
"Apache-2.0"
] | null | null | null | lib/cadet/auth/empty_guardian.ex | seanlowjk/cadet | 52b55cc0f777cb0d55a78fe1693f762085ab36c2 | [
"Apache-2.0"
] | null | null | null | lib/cadet/auth/empty_guardian.ex | seanlowjk/cadet | 52b55cc0f777cb0d55a78fe1693f762085ab36c2 | [
"Apache-2.0"
] | null | null | null | defmodule Cadet.Auth.EmptyGuardian do
@moduledoc """
This module just provides an empty Guardian configuration, sufficient
to use Guardian.Token.Jwt.Verify.verify_claims.
"""
def config(_a), do: nil
def config(_a, def), do: def
end
| 22.363636 | 71 | 0.731707 |
ffd9e69f11933bc5a0a06498fb35c353bd54ac8f | 1,704 | ex | Elixir | clients/tag_manager/lib/google_api/tag_manager/v2/model/revert_folder_response.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/tag_manager/lib/google_api/tag_manager/v2/model/revert_folder_response.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/tag_manager/lib/google_api/tag_manager/v2/model/revert_folder_response.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.TagManager.V2.Model.RevertFolderResponse do
@moduledoc """
The result of reverting folder changes in a workspace.
## Attributes
* `folder` (*type:* `GoogleApi.TagManager.V2.Model.Folder.t`, *default:* `nil`) - Folder as it appears in the latest container version since the last workspace synchronization operation. If no folder is present, that means the folder was deleted in the latest container version.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:folder => GoogleApi.TagManager.V2.Model.Folder.t()
}
field(:folder, as: GoogleApi.TagManager.V2.Model.Folder)
end
defimpl Poison.Decoder, for: GoogleApi.TagManager.V2.Model.RevertFolderResponse do
def decode(value, options) do
GoogleApi.TagManager.V2.Model.RevertFolderResponse.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.TagManager.V2.Model.RevertFolderResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 36.255319 | 282 | 0.755282 |
ffd9fcda69b2addddf75156da958b3471a0074e9 | 2,039 | ex | Elixir | solomon/web/controllers/role_controller.ex | FoxComm/highlander | 1aaf8f9e5353b94c34d574c2a92206a1c363b5be | [
"MIT"
] | 10 | 2018-04-12T22:29:52.000Z | 2021-10-18T17:07:45.000Z | solomon/web/controllers/role_controller.ex | FoxComm/highlander | 1aaf8f9e5353b94c34d574c2a92206a1c363b5be | [
"MIT"
] | null | null | null | solomon/web/controllers/role_controller.ex | FoxComm/highlander | 1aaf8f9e5353b94c34d574c2a92206a1c363b5be | [
"MIT"
] | 1 | 2018-07-06T18:42:05.000Z | 2018-07-06T18:42:05.000Z | defmodule Solomon.RoleController do
use Solomon.Web, :controller
alias Solomon.Repo
alias Solomon.Role
alias Solomon.ScopeService
def index(conn, _params) do
roles = ScopeService.scoped_index(conn, Role)
|> Repo.preload(:permissions)
render(conn, "index.json", roles: roles)
end
def create(conn, %{"role" => role_params}) do
# scope_id = Map.get(role_params, "scope_id")
changeset = Role.changeset(%Role{}, role_params)
# |> ScopeService.validate_scoped_changeset(conn, scope_id)
case Repo.insert(changeset) do
{:ok, role} ->
conn
|> put_status(:created)
|> put_resp_header("location", role_path(conn, :show, role))
|> render("show.json", role: role)
{:error, changeset} ->
conn
|> put_status(:unprocessable_entity)
|> render(Solomon.ChangesetView, "errors.json", changeset: changeset)
end
end
def show(conn, %{"id" => id}) do
role = ScopeService.scoped_show(conn, Role, id)
case role do
{:error, changeset} ->
conn
|> put_status(:unauthorized)
|> render(Solomon.ChangesetView, "errors.json", changeset: changeset)
_ ->
role_with_permissions =
role
|> Repo.preload(:permissions)
conn
|> render("show_with_permissions.json", role: role_with_permissions)
end
end
def update(conn, %{"id" => id, "role" => role_params}) do
role = Repo.get!(Role, id)
scope_id = Map.get(role_params, "scope_id", role.scope_id)
changeset = Role.update_changeset(role, role_params)
|> ScopeService.validate_scoped_changeset(conn, role.scope_id)
|> ScopeService.validate_scoped_changeset(conn, scope_id)
case Repo.update(changeset) do
{:ok, role} ->
conn
|> render("show.json", role: role)
{:error, changeset} ->
conn
|> put_status(:unprocessable_entity)
|> render(Solomon.ChangesetView, "errors.json", changeset: changeset)
end
end
end
| 30.893939 | 78 | 0.627759 |
ffda1574c9597faa23c8df7f872769f0358139ee | 3,425 | exs | Elixir | mix.exs | itsUnsmart/glimesh.tv | 22c532184bb5046f6c6d8232e8bd66ba534c01c1 | [
"MIT"
] | null | null | null | mix.exs | itsUnsmart/glimesh.tv | 22c532184bb5046f6c6d8232e8bd66ba534c01c1 | [
"MIT"
] | null | null | null | mix.exs | itsUnsmart/glimesh.tv | 22c532184bb5046f6c6d8232e8bd66ba534c01c1 | [
"MIT"
] | null | null | null | defmodule Glimesh.MixProject do
use Mix.Project
def project do
[
app: :glimesh,
version: "0.1.0",
elixir: "~> 1.7",
elixirc_paths: elixirc_paths(Mix.env()),
compilers: [:phoenix, :gettext] ++ Mix.compilers(),
start_permanent: Mix.env() == :prod,
aliases: aliases(),
deps: deps(),
test_coverage: [tool: ExCoveralls]
]
end
# Configuration for the OTP application.
#
# Type `mix help compile.app` for more information.
def application do
[
mod: {Glimesh.Application, []},
extra_applications: [:logger, :runtime_tools, :os_mon]
]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
# Specifies your project dependencies.
#
# Type `mix help deps` for examples and options.
defp deps do
[
# Dev & Test Libs
{:phx_gen_auth, "~> 0.4.0", only: :dev, runtime: false},
{:phoenix_live_reload, "~> 1.2", only: :dev},
{:faker, "~> 0.14", only: :dev},
{:credo, "~> 1.5", only: [:dev, :test], runtime: false},
{:floki, ">= 0.0.0", only: :test},
{:excoveralls, "~> 0.13.1", only: :test},
{:stripe_mock, "~> 0.1.0", only: :test},
# Core
{:bcrypt_elixir, "~> 2.0"},
{:phoenix, "~> 1.5.6"},
{:phoenix_ecto, "~> 4.2.1"},
{:ecto_sql, "~> 3.4"},
{:postgrex, ">= 0.0.0"},
{:phoenix_live_view, "~> 0.14.8"},
{:phoenix_html, "~> 2.11"},
{:phoenix_live_dashboard, "~> 0.2.7"},
{:telemetry_metrics, "~> 0.4"},
{:telemetry_poller, "~> 0.4"},
{:gettext, "~> 0.11"},
{:jason, "~> 1.0"},
{:plug_cowboy, "~> 2.0"},
# Authentication & Authorization
{:comeonin, "~> 5.3"},
{:bodyguard, "~> 2.4"},
# Email
{:bamboo, "~> 1.5"},
# GraphQL API
{:absinthe, "~> 1.5"},
{:absinthe_plug, "~> 1.5"},
{:absinthe_phoenix, "~> 2.0"},
{:dataloader, "~> 1.0.0"},
# HTTP Helpers
{:plug_canonical_host, "~> 2.0"},
{:ex_oauth2_provider, "~> 0.5.6"},
{:slugify, "~> 1.3"},
{:phoenix_markdown, "~> 1.0"},
{:html_sanitize_ex, "~> 1.4.1"},
{:earmark, "~> 1.4"},
{:oauther, "~> 1.1"},
{:oauth2, "~> 2.0"},
{:extwitter, "~> 0.12.2"},
# Uploads
{:waffle, "~> 1.1"},
{:waffle_ecto, "~> 0.0.9"},
{:ex_aws, "~> 2.1.2"},
{:ex_aws_s3, "~> 2.0"},
{:hackney, "~> 1.9"},
{:sweet_xml, "~> 0.6"},
# Other
{:hcaptcha, "~> 0.0.1"},
{:stripity_stripe, "~> 2.0"},
{:eqrcode, "~> 0.1.7"},
{:scrivener_ecto, "~> 2.0"}
]
end
# Aliases are shortcuts or tasks specific to the current project.
# For example, to install project dependencies and perform other setup tasks, run:
#
# $ mix setup
#
# See the documentation for `Mix` for more info on aliases.
defp aliases do
[
setup: ["deps.get", "ecto.setup", "cmd npm install --prefix assets"],
"ecto.seed": ["run priv/repo/seeds.#{Mix.env()}.exs"],
"ecto.setup": ["ecto.create", "ecto.migrate", "ecto.seed"],
"ecto.reset": ["ecto.drop", "ecto.setup"],
test: [
"ecto.create --quiet",
"ecto.migrate --quiet",
"run priv/repo/seeds/categories.exs",
"test"
],
code_quality: ["format", "credo --strict"]
]
end
end
| 29.525862 | 84 | 0.513869 |
ffda4fc7433369ad443a28eba11e14119cf2791e | 2,456 | exs | Elixir | test/exchange_test.exs | lainor/amqpx | a1327978ba000eeba9d9badb354833ebe23f2a36 | [
"MIT"
] | null | null | null | test/exchange_test.exs | lainor/amqpx | a1327978ba000eeba9d9badb354833ebe23f2a36 | [
"MIT"
] | null | null | null | test/exchange_test.exs | lainor/amqpx | a1327978ba000eeba9d9badb354833ebe23f2a36 | [
"MIT"
] | null | null | null | defmodule ExchangeTest do
use ExUnit.Case
alias Amqpx.{Channel, Connection, Exchange}
setup do
{:ok, conn} = Connection.open(Application.get_env(:amqpx, :amqp_connection))
{:ok, chan} = Channel.open(conn)
on_exit(fn -> :ok = Connection.close(conn) end)
{:ok, conn: conn, chan: chan}
end
test "declare exchange with defaults", meta do
name = rand_name()
assert :ok = Exchange.declare(meta[:chan], name)
assert :ok = Exchange.delete(meta[:chan], name)
end
test "declare exchange with type direct", meta do
name = rand_name()
assert :ok = Exchange.declare(meta[:chan], name, :direct)
assert :ok = Exchange.delete(meta[:chan], name)
end
test "declare exchange with type topic", meta do
name = rand_name()
assert :ok = Exchange.declare(meta[:chan], name, :topic)
assert :ok = Exchange.delete(meta[:chan], name)
end
test "declare exchange with type fanout", meta do
name = rand_name()
assert :ok = Exchange.declare(meta[:chan], name, :fanout)
assert :ok = Exchange.delete(meta[:chan], name)
end
test "declare exchange with type headers", meta do
name = rand_name()
assert :ok = Exchange.declare(meta[:chan], name, :headers)
assert :ok = Exchange.delete(meta[:chan], name)
end
test "declare direct exchange with convenience function", meta do
name = rand_name()
assert :ok = Exchange.direct(meta[:chan], name)
assert :ok = Exchange.delete(meta[:chan], name)
end
test "declare fanout exchange with convenience function", meta do
name = rand_name()
assert :ok = Exchange.fanout(meta[:chan], name)
assert :ok = Exchange.delete(meta[:chan], name)
end
test "declare topic exchange with convenience function", meta do
name = rand_name()
assert :ok = Exchange.topic(meta[:chan], name)
assert :ok = Exchange.delete(meta[:chan], name)
end
test "bind and unbind exchange to/from another exchange", meta do
destination = rand_name()
source = rand_name()
assert :ok = Exchange.fanout(meta[:chan], destination)
assert :ok = Exchange.fanout(meta[:chan], source)
assert :ok = Exchange.bind(meta[:chan], destination, source)
assert :ok = Exchange.unbind(meta[:chan], destination, source)
assert :ok = Exchange.delete(meta[:chan], destination)
assert :ok = Exchange.delete(meta[:chan], source)
end
defp rand_name do
:crypto.strong_rand_bytes(8) |> Base.encode64()
end
end
| 32.315789 | 80 | 0.675081 |
ffda5afaa03108a62829b3b1b6aaaf0fa564a455 | 4,270 | ex | Elixir | lib/chat_api_web/controllers/slack_controller.ex | raditya3/papercups | 4657b258ee381ac0b7517e57e4d6261ce94b5871 | [
"MIT"
] | null | null | null | lib/chat_api_web/controllers/slack_controller.ex | raditya3/papercups | 4657b258ee381ac0b7517e57e4d6261ce94b5871 | [
"MIT"
] | null | null | null | lib/chat_api_web/controllers/slack_controller.ex | raditya3/papercups | 4657b258ee381ac0b7517e57e4d6261ce94b5871 | [
"MIT"
] | null | null | null | defmodule ChatApiWeb.SlackController do
use ChatApiWeb, :controller
require Logger
alias ChatApi.{
Messages,
Slack,
SlackAuthorizations,
SlackConversationThreads
}
action_fallback ChatApiWeb.FallbackController
@spec oauth(Plug.Conn.t(), map()) :: Plug.Conn.t()
def oauth(conn, %{"code" => code}) do
Logger.info("Code from Slack OAuth: #{inspect(code)}")
# TODO: improve error handling!
{:ok, response} = Slack.get_access_token(code)
Logger.info("Slack OAuth response: #{inspect(response)}")
%{body: body} = response
if Map.get(body, "ok") do
with %{account_id: account_id} <- conn.assigns.current_user,
%{
"access_token" => access_token,
"app_id" => app_id,
"bot_user_id" => bot_user_id,
"scope" => scope,
"token_type" => token_type,
"authed_user" => authed_user,
"team" => team,
"incoming_webhook" => incoming_webhook
} <- body,
%{"id" => authed_user_id} <- authed_user,
%{"id" => team_id, "name" => team_name} <- team,
%{
"channel" => channel,
"channel_id" => channel_id,
"configuration_url" => configuration_url,
"url" => webhook_url
} <- incoming_webhook do
params = %{
account_id: account_id,
access_token: access_token,
app_id: app_id,
authed_user_id: authed_user_id,
bot_user_id: bot_user_id,
scope: scope,
token_type: token_type,
channel: channel,
channel_id: channel_id,
configuration_url: configuration_url,
team_id: team_id,
team_name: team_name,
webhook_url: webhook_url
}
SlackAuthorizations.create_or_update(account_id, params)
json(conn, %{data: %{ok: true}})
else
_ ->
raise "Unrecognized OAuth response"
end
else
raise "OAuth access denied"
end
end
@spec authorization(Plug.Conn.t(), map()) :: Plug.Conn.t()
def authorization(conn, _payload) do
with %{account_id: account_id} <- conn.assigns.current_user do
auth = SlackAuthorizations.get_authorization_by_account(account_id)
case auth do
nil ->
json(conn, %{data: nil})
_ ->
json(conn, %{
data: %{
created_at: auth.inserted_at,
channel: auth.channel,
configuration_url: auth.configuration_url,
team_name: auth.team_name
}
})
end
end
end
@spec webhook(Plug.Conn.t(), map()) :: Plug.Conn.t()
def webhook(conn, payload) do
Logger.debug("Payload from Slack webhook: #{inspect(payload)}")
case payload do
%{"event" => event} ->
handle_event(event)
send_resp(conn, 200, "")
%{"challenge" => challenge} ->
send_resp(conn, 200, challenge)
_ ->
send_resp(conn, 200, "")
end
end
defp handle_event(%{"bot_id" => _bot_id} = _event) do
# Don't do anything on bot events for now
nil
end
defp handle_event(
%{
"type" => "message",
"text" => text,
"thread_ts" => thread_ts,
"channel" => channel,
"user" => user_id
} = event
) do
Logger.debug("Handling Slack event: #{inspect(event)}")
with {:ok, conversation} <- get_thread_conversation(thread_ts, channel) do
%{id: conversation_id, account_id: account_id} = conversation
sender_id = Slack.get_sender_id(conversation, user_id)
params = %{
"body" => text,
"conversation_id" => conversation_id,
"account_id" => account_id,
"user_id" => sender_id
}
params
|> Messages.create_and_fetch!()
|> Messages.broadcast_to_conversation!()
|> Messages.notify(:webhooks)
end
end
defp handle_event(_), do: nil
defp get_thread_conversation(thread_ts, channel) do
case SlackConversationThreads.get_by_slack_thread_ts(thread_ts, channel) do
%{conversation: conversation} -> {:ok, conversation}
_ -> {:error, "Not found"}
end
end
end
| 27.371795 | 79 | 0.571897 |
ffda7f094b9dc4b9464653ea735118d04e148ce1 | 411 | exs | Elixir | priv/repo/migrations/20170402032240_create_event.exs | itmaster921/crowdfunding-api | 994d75d6c28356b45531b71251eff39a309d414b | [
"MIT"
] | 1 | 2020-05-24T15:18:36.000Z | 2020-05-24T15:18:36.000Z | priv/repo/migrations/20170402032240_create_event.exs | itmaster921/crowdfunding-api | 994d75d6c28356b45531b71251eff39a309d414b | [
"MIT"
] | null | null | null | priv/repo/migrations/20170402032240_create_event.exs | itmaster921/crowdfunding-api | 994d75d6c28356b45531b71251eff39a309d414b | [
"MIT"
] | null | null | null | defmodule CrowdfundingApi.Repo.Migrations.CreateEvent do
use Ecto.Migration
def change do
create table(:events) do
add :title, :string
add :country, :string
add :date, :datetime
add :image_url, :string
add :description, :text
add :project_id, references(:projects, on_delete: :nothing)
timestamps()
end
create index(:events, [:project_id])
end
end
| 21.631579 | 65 | 0.659367 |
ffda91dbe28f093b5ae5791b0b34b7ec2e037c4f | 584 | exs | Elixir | exercises/practice/series/mix.exs | devtayls/elixir | 67824de8209ff1b6ed2f736deedfb5bd815130ca | [
"MIT"
] | 343 | 2017-06-22T16:28:28.000Z | 2022-03-25T21:33:32.000Z | exercises/practice/series/mix.exs | devtayls/elixir | 67824de8209ff1b6ed2f736deedfb5bd815130ca | [
"MIT"
] | 583 | 2017-06-19T10:48:40.000Z | 2022-03-28T21:43:12.000Z | exercises/practice/series/mix.exs | devtayls/elixir | 67824de8209ff1b6ed2f736deedfb5bd815130ca | [
"MIT"
] | 228 | 2017-07-05T07:09:32.000Z | 2022-03-27T08:59:08.000Z | defmodule StringSeries.MixProject do
use Mix.Project
def project do
[
app: :string_series,
version: "0.1.0",
# elixir: "~> 1.8",
start_permanent: Mix.env() == :prod,
deps: deps()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
# {:dep_from_hexpm, "~> 0.3.0"},
# {:dep_from_git, git: "https://github.com/elixir-lang/my_dep.git", tag: "0.1.0"}
]
end
end
| 20.137931 | 87 | 0.582192 |
ffdab1e8ba21d2cb3c449d84e3fb6d9e9b27c79b | 517 | exs | Elixir | test/fixtures/with_path_dep/mix.exs | TraceyOnim/sbom | 55ee8867d4c7f53b6e282a8aad584435c82d5379 | [
"BSD-3-Clause"
] | 21 | 2019-10-24T16:19:21.000Z | 2021-07-21T07:40:53.000Z | test/fixtures/with_path_dep/mix.exs | TraceyOnim/sbom | 55ee8867d4c7f53b6e282a8aad584435c82d5379 | [
"BSD-3-Clause"
] | 5 | 2020-05-31T05:30:51.000Z | 2022-03-21T14:31:18.000Z | test/fixtures/with_path_dep/mix.exs | TraceyOnim/sbom | 55ee8867d4c7f53b6e282a8aad584435c82d5379 | [
"BSD-3-Clause"
] | 8 | 2019-11-02T08:08:57.000Z | 2022-03-02T07:19:24.000Z | defmodule WithPathDep.MixProject do
use Mix.Project
def project do
[
app: :with_path_dep,
version: "0.1.0",
elixir: "~> 1.8",
start_permanent: Mix.env() == :prod,
deps: deps()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:sample1, path: "../sample1"},
{:jason, "~> 1.1"}
]
end
end
| 17.827586 | 59 | 0.566731 |
ffdad0ebf51e982916001622ee268080876bf66e | 4,701 | ex | Elixir | lib/lfu_cache.ex | SteffenBauer/arc_cache | a95c24d415ce4050f53ae948a6cc8c70dd193c36 | [
"MIT"
] | 1 | 2016-12-10T18:56:40.000Z | 2016-12-10T18:56:40.000Z | lib/lfu_cache.ex | SteffenBauer/arc_cache | a95c24d415ce4050f53ae948a6cc8c70dd193c36 | [
"MIT"
] | null | null | null | lib/lfu_cache.ex | SteffenBauer/arc_cache | a95c24d415ce4050f53ae948a6cc8c70dd193c36 | [
"MIT"
] | null | null | null | defmodule LfuCache do
@moduledoc """
This modules implements a simple LRU cache, using 2 ets tables for it.
For using it, you need to start it:
iex> LruCache.start_link(:my_cache, 1000)
Or add it to your supervisor tree, like: `worker(LruCache, [:my_cache, 1000])`
## Using
iex> LruCache.start_link(:my_cache, 1000)
{:ok, #PID<0.60.0>}
iex> LruCache.put(:my_cache, "id", "value")
:ok
iex> LruCache.get(:my_cache, "id", touch = false)
"value"
## Design
First ets table save the key values pairs, the second save order of inserted elements.
"""
use GenServer
@table LfuCache
defstruct table: nil, freq_table: nil, size: 0
@doc """
Creates an LRU of the given size as part of a supervision tree with a registered name
"""
def start_link(name, size) do
Agent.start_link(__MODULE__, :init, [name, size], [name: name])
end
@doc """
Stores the given `value` under `key` in `cache`. If `cache` already has `key`, the stored
`value` is replaced by the new one. This updates the order of LFU cache.
"""
def put(name, key, value), do: Agent.get(name, __MODULE__, :handle_put, [key, value])
@doc """
Updates a `value` in `cache`. If `key` is not present in `cache` then nothing is done.
`touch` defines, if the order in LFU should be actualized. The function assumes, that
the element exists in a cache.
"""
def update(name, key, value, touch \\ true) do
if :ets.update_element(name, key, {3, value}) do
touch && Agent.get(name, __MODULE__, :handle_touch, [key])
end
:ok
end
@doc """
Returns the `value` associated with `key` in `cache`. If `cache` does not contain `key`,
returns nil. `touch` defines, if the order in LFU should be actualized.
"""
def get(name, key, touch \\ true) do
case :ets.lookup(name, key) do
[{_, _, value}] ->
touch && Agent.get(name, __MODULE__, :handle_touch, [key])
value
[] ->
nil
end
end
@doc """
Removes the entry stored under the given `key` from cache.
"""
def delete(name, key), do: Agent.get(name, __MODULE__, :handle_delete, [key])
@doc """
Returns the contents of the LFU Cache.
Only for debugging / testing uses.
"""
def debug(name) do
Agent.get(name, __MODULE__, :handle_debug, [])
end
def handle_debug(state) do
get_all(state, :ets.first(state.freq_table), [])
end
@doc false
defp get_all(state, lastresult, result) do
case lastresult do
:"$end_of_table" -> result
uniq ->
[{^uniq, key}] = :ets.lookup(state.freq_table, uniq)
case :ets.lookup(state.table, key) do
[{^key, ^uniq, value}] ->
get_all(state, :ets.next(state.freq_table, uniq), result ++ [{key, value}])
end
end
end
@doc false
def init(name, size) do
freq_table = :"#{name}_freq"
:ets.new(freq_table, [:named_table, :ordered_set])
:ets.new(name, [:named_table, :public, {:read_concurrency, true}])
%LfuCache{freq_table: freq_table, table: name, size: size}
end
@doc false
def handle_put(state, key, value) do
case :ets.lookup(state.table, key) do
[{_, old_freq, _}] ->
update_entry(state, key, old_freq, value)
_ ->
new_entry(state, key, value)
clean_oversize(state)
end
:ok
end
defp new_entry(state, key, value) do
freq = :rand.uniform
:ets.insert(state.freq_table, {freq, key})
:ets.insert(state.table, {key, freq, value})
end
defp update_entry(state, key, freq, value) do
:ets.delete(state.freq_table, freq)
newfreq = freq + 1.0
:ets.insert(state.freq_table, {newfreq, key})
:ets.update_element(state.table, key, [{2, newfreq}, {3, value}])
end
@doc false
def handle_touch(state, key) do
case :ets.lookup(state.table, key) do
[{_, old_freq, _}] ->
new_freq = old_freq + 1.0
:ets.delete(state.freq_table, old_freq)
:ets.insert(state.freq_table, {new_freq, key})
:ets.update_element(state.table, key, [{2, new_freq}])
_ ->
nil
end
:ok
end
@doc false
def handle_delete(state, key) do
case :ets.lookup(state.table, key) do
[{_, old_freq, _}] ->
:ets.delete(state.freq_table, old_freq)
:ets.delete(state.table, key)
_ ->
nil
end
:ok
end
defp clean_oversize(%{freq_table: freq_table, table: table, size: size}) do
if :ets.info(table, :size) > size do
least_used = :ets.first(freq_table)
[{_, old_key}] = :ets.lookup(freq_table, least_used)
:ets.delete(freq_table, least_used)
:ets.delete(table, old_key)
true
else nil end
end
end
| 27.816568 | 91 | 0.623697 |
ffdb024793a8d1b087f8f0fa45ce7632eb6d1127 | 525 | ex | Elixir | lib/tarantool/api.ex | xronos-i-am/tarantool.ex | 929ee9dd3da1b6dd3e38961870e758fba3e6aa8c | [
"MIT"
] | 27 | 2016-02-11T08:38:46.000Z | 2022-02-23T21:48:13.000Z | lib/tarantool/api.ex | xronos-i-am/tarantool.ex | 929ee9dd3da1b6dd3e38961870e758fba3e6aa8c | [
"MIT"
] | 5 | 2016-06-04T18:32:00.000Z | 2019-04-14T08:14:46.000Z | lib/tarantool/api.ex | xronos-i-am/tarantool.ex | 929ee9dd3da1b6dd3e38961870e758fba3e6aa8c | [
"MIT"
] | 6 | 2016-03-01T08:34:09.000Z | 2019-02-09T17:29:56.000Z | defmodule Tarantool.Api do
use Tarantool.Api.Helper
defrequest :ping
defrequest :auth, [:username, :password]
defrequest :select, [:space_id, :limit, :key, :index_id, :offset, :iterator]
defrequest :insert, [:space_id, :tuple ]
defrequest :replace, [:space_id, :tuple]
defrequest :update, [:space_id, :index_id, :key, :tuple]
defrequest :delete, [:space_id, :index_id, :key]
defrequest :call, [:function_name, :tuple]
defrequest :eval, [:expr, :tuple]
defrequest :upsert, [:space_id, :tuple, :ops]
end
| 35 | 78 | 0.697143 |
ffdb0bd4f8e79a8b10bfc7ad6f7997b2aff9f808 | 1,045 | ex | Elixir | lib/scenic/primitive/style/scissor.ex | bruceme/scenic | bd8a1e63c122c44cc263e1fb5dfab2547ce8ef43 | [
"Apache-2.0"
] | null | null | null | lib/scenic/primitive/style/scissor.ex | bruceme/scenic | bd8a1e63c122c44cc263e1fb5dfab2547ce8ef43 | [
"Apache-2.0"
] | null | null | null | lib/scenic/primitive/style/scissor.ex | bruceme/scenic | bd8a1e63c122c44cc263e1fb5dfab2547ce8ef43 | [
"Apache-2.0"
] | null | null | null | #
# Created by Boyd Multerer on 2018-06-06.
# Copyright © 2018-2021 Kry10 Limited. All rights reserved.
#
defmodule Scenic.Primitive.Style.Scissor do
@moduledoc """
Define a "Scissor Rectangle" that drawing will be clipped to.
Example:
```elixir
graph
|> triangle( {{0,40},{40,40},{40,0}}
miter_limit: 2,
fill: :green,
scissor: {20, 40}
)
```
### Data Format
`{width, height}`
* `width` - Width of the scissor rectangle.
* `height` - Height of the scissor rectangle.
"""
use Scenic.Primitive.Style
# ============================================================================
# data verification and serialization
@doc false
def validate({w, h}) when is_number(w) and is_number(h) do
{:ok, {w, h}}
end
def validate(data) do
{
:error,
"""
#{IO.ANSI.red()}Invalid Scissor specification
Received: #{inspect(data)}
#{IO.ANSI.yellow()}
The :scissor style must be {width, height}#{IO.ANSI.default_color()}
"""
}
end
end
| 20.490196 | 80 | 0.558852 |
ffdb111757edc6729cd36ebc760a565140cd46ab | 25,424 | ex | Elixir | lib/aws/generated/mturk.ex | smanolloff/aws-elixir | c7cb6577802f5010be7e7b6ccb2c0f3c8c73ea84 | [
"Apache-2.0"
] | null | null | null | lib/aws/generated/mturk.ex | smanolloff/aws-elixir | c7cb6577802f5010be7e7b6ccb2c0f3c8c73ea84 | [
"Apache-2.0"
] | null | null | null | lib/aws/generated/mturk.ex | smanolloff/aws-elixir | c7cb6577802f5010be7e7b6ccb2c0f3c8c73ea84 | [
"Apache-2.0"
] | null | null | null | # WARNING: DO NOT EDIT, AUTO-GENERATED CODE!
# See https://github.com/aws-beam/aws-codegen for more details.
defmodule AWS.MTurk do
@moduledoc """
Amazon Mechanical Turk API Reference
"""
@doc """
The `AcceptQualificationRequest` operation approves a Worker's request for a
Qualification.
Only the owner of the Qualification type can grant a Qualification request for
that type.
A successful request for the `AcceptQualificationRequest` operation returns with
no errors and an empty body.
"""
def accept_qualification_request(client, input, options \\ []) do
request(client, "AcceptQualificationRequest", input, options)
end
@doc """
The `ApproveAssignment` operation approves the results of a completed
assignment.
Approving an assignment initiates two payments from the Requester's Amazon.com
account
* The Worker who submitted the results is paid the reward specified
in the HIT.
* Amazon Mechanical Turk fees are debited.
If the Requester's account does not have adequate funds for these payments, the
call to ApproveAssignment returns an exception, and the approval is not
processed. You can include an optional feedback message with the approval, which
the Worker can see in the Status section of the web site.
You can also call this operation for assignments that were previous rejected and
approve them by explicitly overriding the previous rejection. This only works on
rejected assignments that were submitted within the previous 30 days and only if
the assignment's related HIT has not been deleted.
"""
def approve_assignment(client, input, options \\ []) do
request(client, "ApproveAssignment", input, options)
end
@doc """
The `AssociateQualificationWithWorker` operation gives a Worker a Qualification.
`AssociateQualificationWithWorker` does not require that the Worker submit a
Qualification request. It gives the Qualification directly to the Worker.
You can only assign a Qualification of a Qualification type that you created
(using the `CreateQualificationType` operation).
Note: `AssociateQualificationWithWorker` does not affect any pending
Qualification requests for the Qualification by the Worker. If you assign a
Qualification to a Worker, then later grant a Qualification request made by the
Worker, the granting of the request may modify the Qualification score. To
resolve a pending Qualification request without affecting the Qualification the
Worker already has, reject the request with the `RejectQualificationRequest`
operation.
"""
def associate_qualification_with_worker(client, input, options \\ []) do
request(client, "AssociateQualificationWithWorker", input, options)
end
@doc """
The `CreateAdditionalAssignmentsForHIT` operation increases the maximum number
of assignments of an existing HIT.
To extend the maximum number of assignments, specify the number of additional
assignments.
HITs created with fewer than 10 assignments cannot be extended to
have 10 or more assignments. Attempting to add assignments in a way that brings
the total number of assignments for a HIT from fewer than 10 assignments to 10
or more assignments will result in an
`AWS.MechanicalTurk.InvalidMaximumAssignmentsIncrease` exception.
HITs that were created before July 22, 2015 cannot be extended.
Attempting to extend HITs that were created before July 22, 2015 will result in
an `AWS.MechanicalTurk.HITTooOldForExtension` exception.
"""
def create_additional_assignments_for_h_i_t(client, input, options \\ []) do
request(client, "CreateAdditionalAssignmentsForHIT", input, options)
end
@doc """
The `CreateHIT` operation creates a new Human Intelligence Task (HIT).
The new HIT is made available for Workers to find and accept on the Amazon
Mechanical Turk website.
This operation allows you to specify a new HIT by passing in values for the
properties of the HIT, such as its title, reward amount and number of
assignments. When you pass these values to `CreateHIT`, a new HIT is created for
you, with a new `HITTypeID`. The HITTypeID can be used to create additional HITs
in the future without needing to specify common parameters such as the title,
description and reward amount each time.
An alternative way to create HITs is to first generate a HITTypeID using the
`CreateHITType` operation and then call the `CreateHITWithHITType` operation.
This is the recommended best practice for Requesters who are creating large
numbers of HITs.
CreateHIT also supports several ways to provide question data: by providing a
value for the `Question` parameter that fully specifies the contents of the HIT,
or by providing a `HitLayoutId` and associated `HitLayoutParameters`.
If a HIT is created with 10 or more maximum assignments, there is an additional
fee. For more information, see [Amazon Mechanical Turk Pricing](https://requester.mturk.com/pricing).
"""
def create_h_i_t(client, input, options \\ []) do
request(client, "CreateHIT", input, options)
end
@doc """
The `CreateHITType` operation creates a new HIT type.
This operation allows you to define a standard set of HIT properties to use when
creating HITs. If you register a HIT type with values that match an existing HIT
type, the HIT type ID of the existing type will be returned.
"""
def create_h_i_t_type(client, input, options \\ []) do
request(client, "CreateHITType", input, options)
end
@doc """
The `CreateHITWithHITType` operation creates a new Human Intelligence Task (HIT)
using an existing HITTypeID generated by the `CreateHITType` operation.
This is an alternative way to create HITs from the `CreateHIT` operation. This
is the recommended best practice for Requesters who are creating large numbers
of HITs.
CreateHITWithHITType also supports several ways to provide question data: by
providing a value for the `Question` parameter that fully specifies the contents
of the HIT, or by providing a `HitLayoutId` and associated
`HitLayoutParameters`.
If a HIT is created with 10 or more maximum assignments, there is an additional
fee. For more information, see [Amazon Mechanical Turk Pricing](https://requester.mturk.com/pricing).
"""
def create_h_i_t_with_h_i_t_type(client, input, options \\ []) do
request(client, "CreateHITWithHITType", input, options)
end
@doc """
The `CreateQualificationType` operation creates a new Qualification type, which
is represented by a `QualificationType` data structure.
"""
def create_qualification_type(client, input, options \\ []) do
request(client, "CreateQualificationType", input, options)
end
@doc """
The `CreateWorkerBlock` operation allows you to prevent a Worker from working on
your HITs.
For example, you can block a Worker who is producing poor quality work. You can
block up to 100,000 Workers.
"""
def create_worker_block(client, input, options \\ []) do
request(client, "CreateWorkerBlock", input, options)
end
@doc """
The `DeleteHIT` operation is used to delete HIT that is no longer needed.
Only the Requester who created the HIT can delete it.
You can only dispose of HITs that are in the `Reviewable` state, with all of
their submitted assignments already either approved or rejected. If you call the
DeleteHIT operation on a HIT that is not in the `Reviewable` state (for example,
that has not expired, or still has active assignments), or on a HIT that is
Reviewable but without all of its submitted assignments already approved or
rejected, the service will return an error.
HITs are automatically disposed of after 120 days.
After you dispose of a HIT, you can no longer approve the HIT's
rejected assignments.
Disposed HITs are not returned in results for the ListHITs
operation.
Disposing HITs can improve the performance of operations such as
ListReviewableHITs and ListHITs.
"""
def delete_h_i_t(client, input, options \\ []) do
request(client, "DeleteHIT", input, options)
end
@doc """
The `DeleteQualificationType` deletes a Qualification type and deletes any HIT
types that are associated with the Qualification type.
This operation does not revoke Qualifications already assigned to Workers
because the Qualifications might be needed for active HITs. If there are any
pending requests for the Qualification type, Amazon Mechanical Turk rejects
those requests. After you delete a Qualification type, you can no longer use it
to create HITs or HIT types.
DeleteQualificationType must wait for all the HITs that use the deleted
Qualification type to be deleted before completing. It may take up to 48 hours
before DeleteQualificationType completes and the unique name of the
Qualification type is available for reuse with CreateQualificationType.
"""
def delete_qualification_type(client, input, options \\ []) do
request(client, "DeleteQualificationType", input, options)
end
@doc """
The `DeleteWorkerBlock` operation allows you to reinstate a blocked Worker to
work on your HITs.
This operation reverses the effects of the CreateWorkerBlock operation. You need
the Worker ID to use this operation. If the Worker ID is missing or invalid,
this operation fails and returns the message “WorkerId is invalid.” If the
specified Worker is not blocked, this operation returns successfully.
"""
def delete_worker_block(client, input, options \\ []) do
request(client, "DeleteWorkerBlock", input, options)
end
@doc """
The `DisassociateQualificationFromWorker` revokes a previously granted
Qualification from a user.
You can provide a text message explaining why the Qualification was revoked. The
user who had the Qualification can see this message.
"""
def disassociate_qualification_from_worker(client, input, options \\ []) do
request(client, "DisassociateQualificationFromWorker", input, options)
end
@doc """
The `GetAccountBalance` operation retrieves the amount of money in your Amazon
Mechanical Turk account.
"""
def get_account_balance(client, input, options \\ []) do
request(client, "GetAccountBalance", input, options)
end
@doc """
The `GetAssignment` operation retrieves the details of the specified Assignment.
"""
def get_assignment(client, input, options \\ []) do
request(client, "GetAssignment", input, options)
end
@doc """
The `GetFileUploadURL` operation generates and returns a temporary URL.
You use the temporary URL to retrieve a file uploaded by a Worker as an answer
to a FileUploadAnswer question for a HIT. The temporary URL is generated the
instant the GetFileUploadURL operation is called, and is valid for 60 seconds.
You can get a temporary file upload URL any time until the HIT is disposed.
After the HIT is disposed, any uploaded files are deleted, and cannot be
retrieved. Pending Deprecation on December 12, 2017. The Answer Specification
structure will no longer support the `FileUploadAnswer` element to be used for
the QuestionForm data structure. Instead, we recommend that Requesters who want
to create HITs asking Workers to upload files to use Amazon S3.
"""
def get_file_upload_u_r_l(client, input, options \\ []) do
request(client, "GetFileUploadURL", input, options)
end
@doc """
The `GetHIT` operation retrieves the details of the specified HIT.
"""
def get_h_i_t(client, input, options \\ []) do
request(client, "GetHIT", input, options)
end
@doc """
The `GetQualificationScore` operation returns the value of a Worker's
Qualification for a given Qualification type.
To get a Worker's Qualification, you must know the Worker's ID. The Worker's ID
is included in the assignment data returned by the `ListAssignmentsForHIT`
operation.
Only the owner of a Qualification type can query the value of a Worker's
Qualification of that type.
"""
def get_qualification_score(client, input, options \\ []) do
request(client, "GetQualificationScore", input, options)
end
@doc """
The `GetQualificationType`operation retrieves information about a Qualification
type using its ID.
"""
def get_qualification_type(client, input, options \\ []) do
request(client, "GetQualificationType", input, options)
end
@doc """
The `ListAssignmentsForHIT` operation retrieves completed assignments for a HIT.
You can use this operation to retrieve the results for a HIT.
You can get assignments for a HIT at any time, even if the HIT is not yet
Reviewable. If a HIT requested multiple assignments, and has received some
results but has not yet become Reviewable, you can still retrieve the partial
results with this operation.
Use the AssignmentStatus parameter to control which set of assignments for a HIT
are returned. The ListAssignmentsForHIT operation can return submitted
assignments awaiting approval, or it can return assignments that have already
been approved or rejected. You can set AssignmentStatus=Approved,Rejected to get
assignments that have already been approved and rejected together in one result
set.
Only the Requester who created the HIT can retrieve the assignments for that
HIT.
Results are sorted and divided into numbered pages and the operation returns a
single page of results. You can use the parameters of the operation to control
sorting and pagination.
"""
def list_assignments_for_h_i_t(client, input, options \\ []) do
request(client, "ListAssignmentsForHIT", input, options)
end
@doc """
The `ListBonusPayments` operation retrieves the amounts of bonuses you have paid
to Workers for a given HIT or assignment.
"""
def list_bonus_payments(client, input, options \\ []) do
request(client, "ListBonusPayments", input, options)
end
@doc """
The `ListHITs` operation returns all of a Requester's HITs.
The operation returns HITs of any status, except for HITs that have been deleted
of with the DeleteHIT operation or that have been auto-deleted.
"""
def list_h_i_ts(client, input, options \\ []) do
request(client, "ListHITs", input, options)
end
@doc """
The `ListHITsForQualificationType` operation returns the HITs that use the given
Qualification type for a Qualification requirement.
The operation returns HITs of any status, except for HITs that have been deleted
with the `DeleteHIT` operation or that have been auto-deleted.
"""
def list_h_i_ts_for_qualification_type(client, input, options \\ []) do
request(client, "ListHITsForQualificationType", input, options)
end
@doc """
The `ListQualificationRequests` operation retrieves requests for Qualifications
of a particular Qualification type.
The owner of the Qualification type calls this operation to poll for pending
requests, and accepts them using the AcceptQualification operation.
"""
def list_qualification_requests(client, input, options \\ []) do
request(client, "ListQualificationRequests", input, options)
end
@doc """
The `ListQualificationTypes` operation returns a list of Qualification types,
filtered by an optional search term.
"""
def list_qualification_types(client, input, options \\ []) do
request(client, "ListQualificationTypes", input, options)
end
@doc """
The `ListReviewPolicyResultsForHIT` operation retrieves the computed results and
the actions taken in the course of executing your Review Policies for a given
HIT.
For information about how to specify Review Policies when you call CreateHIT,
see Review Policies. The ListReviewPolicyResultsForHIT operation can return
results for both Assignment-level and HIT-level review results.
"""
def list_review_policy_results_for_h_i_t(client, input, options \\ []) do
request(client, "ListReviewPolicyResultsForHIT", input, options)
end
@doc """
The `ListReviewableHITs` operation retrieves the HITs with Status equal to
Reviewable or Status equal to Reviewing that belong to the Requester calling the
operation.
"""
def list_reviewable_h_i_ts(client, input, options \\ []) do
request(client, "ListReviewableHITs", input, options)
end
@doc """
The `ListWorkersBlocks` operation retrieves a list of Workers who are blocked
from working on your HITs.
"""
def list_worker_blocks(client, input, options \\ []) do
request(client, "ListWorkerBlocks", input, options)
end
@doc """
The `ListWorkersWithQualificationType` operation returns all of the Workers that
have been associated with a given Qualification type.
"""
def list_workers_with_qualification_type(client, input, options \\ []) do
request(client, "ListWorkersWithQualificationType", input, options)
end
@doc """
The `NotifyWorkers` operation sends an email to one or more Workers that you
specify with the Worker ID.
You can specify up to 100 Worker IDs to send the same message with a single call
to the NotifyWorkers operation. The NotifyWorkers operation will send a
notification email to a Worker only if you have previously approved or rejected
work from the Worker.
"""
def notify_workers(client, input, options \\ []) do
request(client, "NotifyWorkers", input, options)
end
@doc """
The `RejectAssignment` operation rejects the results of a completed assignment.
You can include an optional feedback message with the rejection, which the
Worker can see in the Status section of the web site. When you include a
feedback message with the rejection, it helps the Worker understand why the
assignment was rejected, and can improve the quality of the results the Worker
submits in the future.
Only the Requester who created the HIT can reject an assignment for the HIT.
"""
def reject_assignment(client, input, options \\ []) do
request(client, "RejectAssignment", input, options)
end
@doc """
The `RejectQualificationRequest` operation rejects a user's request for a
Qualification.
You can provide a text message explaining why the request was rejected. The
Worker who made the request can see this message.
"""
def reject_qualification_request(client, input, options \\ []) do
request(client, "RejectQualificationRequest", input, options)
end
@doc """
The `SendBonus` operation issues a payment of money from your account to a
Worker.
This payment happens separately from the reward you pay to the Worker when you
approve the Worker's assignment. The SendBonus operation requires the Worker's
ID and the assignment ID as parameters to initiate payment of the bonus. You
must include a message that explains the reason for the bonus payment, as the
Worker may not be expecting the payment. Amazon Mechanical Turk collects a fee
for bonus payments, similar to the HIT listing fee. This operation fails if your
account does not have enough funds to pay for both the bonus and the fees.
"""
def send_bonus(client, input, options \\ []) do
request(client, "SendBonus", input, options)
end
@doc """
The `SendTestEventNotification` operation causes Amazon Mechanical Turk to send
a notification message as if a HIT event occurred, according to the provided
notification specification.
This allows you to test notifications without setting up notifications for a
real HIT type and trying to trigger them using the website. When you call this
operation, the service attempts to send the test notification immediately.
"""
def send_test_event_notification(client, input, options \\ []) do
request(client, "SendTestEventNotification", input, options)
end
@doc """
The `UpdateExpirationForHIT` operation allows you update the expiration time of
a HIT.
If you update it to a time in the past, the HIT will be immediately expired.
"""
def update_expiration_for_h_i_t(client, input, options \\ []) do
request(client, "UpdateExpirationForHIT", input, options)
end
@doc """
The `UpdateHITReviewStatus` operation updates the status of a HIT.
If the status is Reviewable, this operation can update the status to Reviewing,
or it can revert a Reviewing HIT back to the Reviewable status.
"""
def update_h_i_t_review_status(client, input, options \\ []) do
request(client, "UpdateHITReviewStatus", input, options)
end
@doc """
The `UpdateHITTypeOfHIT` operation allows you to change the HITType properties
of a HIT.
This operation disassociates the HIT from its old HITType properties and
associates it with the new HITType properties. The HIT takes on the properties
of the new HITType in place of the old ones.
"""
def update_h_i_t_type_of_h_i_t(client, input, options \\ []) do
request(client, "UpdateHITTypeOfHIT", input, options)
end
@doc """
The `UpdateNotificationSettings` operation creates, updates, disables or
re-enables notifications for a HIT type.
If you call the UpdateNotificationSettings operation for a HIT type that already
has a notification specification, the operation replaces the old specification
with a new one. You can call the UpdateNotificationSettings operation to enable
or disable notifications for the HIT type, without having to modify the
notification specification itself by providing updates to the Active status
without specifying a new notification specification. To change the Active status
of a HIT type's notifications, the HIT type must already have a notification
specification, or one must be provided in the same call to
`UpdateNotificationSettings`.
"""
def update_notification_settings(client, input, options \\ []) do
request(client, "UpdateNotificationSettings", input, options)
end
@doc """
The `UpdateQualificationType` operation modifies the attributes of an existing
Qualification type, which is represented by a QualificationType data structure.
Only the owner of a Qualification type can modify its attributes.
Most attributes of a Qualification type can be changed after the type has been
created. However, the Name and Keywords fields cannot be modified. The
RetryDelayInSeconds parameter can be modified or added to change the delay or to
enable retries, but RetryDelayInSeconds cannot be used to disable retries.
You can use this operation to update the test for a Qualification type. The test
is updated based on the values specified for the Test, TestDurationInSeconds and
AnswerKey parameters. All three parameters specify the updated test. If you are
updating the test for a type, you must specify the Test and
TestDurationInSeconds parameters. The AnswerKey parameter is optional; omitting
it specifies that the updated test does not have an answer key.
If you omit the Test parameter, the test for the Qualification type is
unchanged. There is no way to remove a test from a Qualification type that has
one. If the type already has a test, you cannot update it to be AutoGranted. If
the Qualification type does not have a test and one is provided by an update,
the type will henceforth have a test.
If you want to update the test duration or answer key for an existing test
without changing the questions, you must specify a Test parameter with the
original questions, along with the updated values.
If you provide an updated Test but no AnswerKey, the new test will not have an
answer key. Requests for such Qualifications must be granted manually.
You can also update the AutoGranted and AutoGrantedValue attributes of the
Qualification type.
"""
def update_qualification_type(client, input, options \\ []) do
request(client, "UpdateQualificationType", input, options)
end
@spec request(AWS.Client.t(), binary(), map(), list()) ::
{:ok, map() | nil, map()}
| {:error, term()}
defp request(client, action, input, options) do
client = %{client | service: "mturk-requester",
region: ""}
host = build_host("mturk-requester", client)
url = build_url(host, client)
headers = [
{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "MTurkRequesterServiceV20170117.#{action}"}
]
payload = encode!(client, input)
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
post(client, url, payload, headers, options)
end
defp post(client, url, payload, headers, options) do
case AWS.Client.request(client, :post, url, payload, headers, options) do
{:ok, %{status_code: 200, body: body} = response} ->
body = if body != "", do: decode!(client, body)
{:ok, body, response}
{:ok, response} ->
{:error, {:unexpected_response, response}}
error = {:error, _reason} -> error
end
end
defp build_host(_endpoint_prefix, %{region: "local", endpoint: endpoint}) do
endpoint
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{endpoint: endpoint}) do
"#{endpoint_prefix}.#{endpoint}"
end
defp build_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
defp encode!(client, payload) do
AWS.Client.encode!(client, payload, :json)
end
defp decode!(client, payload) do
AWS.Client.decode!(client, payload, :json)
end
end
| 40.548644 | 103 | 0.751259 |
ffdb1c69a96356d32f799023327594ce8c9fd751 | 800 | ex | Elixir | chapter_9/todo_superdatabase/lib/todo/server/store.ex | librity/elixir_in_action | d2df441ceb7e6a0d3f18bc3ab3c59570125fcdec | [
"MIT"
] | 3 | 2021-04-22T11:55:58.000Z | 2021-08-22T13:19:56.000Z | chapter_7/todo_persistent/lib/todo/server/store.ex | librity/elixir_in_action | d2df441ceb7e6a0d3f18bc3ab3c59570125fcdec | [
"MIT"
] | null | null | null | chapter_7/todo_persistent/lib/todo/server/store.ex | librity/elixir_in_action | d2df441ceb7e6a0d3f18bc3ab3c59570125fcdec | [
"MIT"
] | 3 | 2021-04-22T21:19:45.000Z | 2021-08-22T13:20:03.000Z | defmodule Todo.Server.Store do
def start, do: GenServer.start(Todo.Server, nil, name: __MODULE__)
def all(), do: GenServer.call(__MODULE__, {:all})
def by_date(date), do: GenServer.call(__MODULE__, {:by_date, date})
def entries(date), do: GenServer.call(__MODULE__, {:entries, date})
def by_title(title), do: GenServer.call(__MODULE__, {:by_title, title})
def by_id(id), do: GenServer.call(__MODULE__, {:by_id, id})
def add_entry(entry), do: GenServer.cast(__MODULE__, {:add_entry, entry})
def update_entry(entry), do: GenServer.cast(__MODULE__, {:update_entry, entry})
def update_entry(entry_id, updater_fun),
do: GenServer.cast(__MODULE__, {:update_entry, entry_id, updater_fun})
def delete_entry(entry_id), do: GenServer.cast(__MODULE__, {:delete_entry, entry_id})
end
| 44.444444 | 87 | 0.7325 |
ffdb31094152867c3a37cc308dac494e77adc832 | 769 | ex | Elixir | lib/rankings_web/controllers/session_controller.ex | spkane31/cc-rankings | 0acda9f3ca35abd4874ab06478ad22aa473811bf | [
"MIT"
] | 1 | 2020-06-28T19:31:07.000Z | 2020-06-28T19:31:07.000Z | lib/rankings_web/controllers/session_controller.ex | spkane31/rankings | 0acda9f3ca35abd4874ab06478ad22aa473811bf | [
"MIT"
] | 5 | 2019-07-25T17:00:39.000Z | 2019-07-25T17:12:07.000Z | lib/rankings_web/controllers/session_controller.ex | spkane31/rankings | 0acda9f3ca35abd4874ab06478ad22aa473811bf | [
"MIT"
] | null | null | null | defmodule RankingsWeb.SessionController do
use RankingsWeb, :controller
def new(conn, _) do
render(conn, "new.html")
end
def create(
conn,
%{"session" => %{"username" => username, "password" => pass}}
) do
case Rankings.Accounts.authenticate_by_username_and_pass(username, pass) do
{:ok, user} ->
conn
|> RankingsWeb.Auth.login(user)
|> put_flash(:info, "Welcome back!")
|> redirect(to: Routes.page_path(conn, :index))
{:error, _reason} ->
conn
|> put_flash(:error, "Invalid username/password combination")
|> render("new.html")
end
end
def delete(conn, _) do
conn
|> RankingsWeb.Auth.logout()
|> redirect(to: Routes.page_path(conn, :index))
end
end
| 24.806452 | 79 | 0.609883 |
ffdb479427be3d5a43c8dec3f27f32c1ebdf3d16 | 122 | ex | Elixir | test/support/list/repo_found.ex | fartek/barna | cdcc7a89fa3e66459568863cf7713651abb0c688 | [
"MIT"
] | null | null | null | test/support/list/repo_found.ex | fartek/barna | cdcc7a89fa3e66459568863cf7713651abb0c688 | [
"MIT"
] | null | null | null | test/support/list/repo_found.ex | fartek/barna | cdcc7a89fa3e66459568863cf7713651abb0c688 | [
"MIT"
] | null | null | null | defmodule Barna.List.RepoFound do
def all(query) do
send(self(), {:list_repo_found, query})
[:result]
end
end
| 17.428571 | 43 | 0.672131 |
ffdb576f30ef09efdcfeb869206500db00f1c1f3 | 2,381 | ex | Elixir | clients/slides/lib/google_api/slides/v1/model/group_objects_request.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/slides/lib/google_api/slides/v1/model/group_objects_request.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/slides/lib/google_api/slides/v1/model/group_objects_request.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Slides.V1.Model.GroupObjectsRequest do
@moduledoc """
Groups objects to create an object group. For example, groups PageElements to create a Group on the same page as all the children.
## Attributes
* `childrenObjectIds` (*type:* `list(String.t)`, *default:* `nil`) - The object IDs of the objects to group. Only page elements can be grouped. There should be at least two page elements on the same page that are not already in another group. Some page elements, such as videos, tables and placeholders cannot be grouped.
* `groupObjectId` (*type:* `String.t`, *default:* `nil`) - A user-supplied object ID for the group to be created. If you specify an ID, it must be unique among all pages and page elements in the presentation. The ID must start with an alphanumeric character or an underscore (matches regex `[a-zA-Z0-9_]`); remaining characters may include those as well as a hyphen or colon (matches regex `[a-zA-Z0-9_-:]`). The length of the ID must not be less than 5 or greater than 50. If you don't specify an ID, a unique one is generated.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:childrenObjectIds => list(String.t()) | nil,
:groupObjectId => String.t() | nil
}
field(:childrenObjectIds, type: :list)
field(:groupObjectId)
end
defimpl Poison.Decoder, for: GoogleApi.Slides.V1.Model.GroupObjectsRequest do
def decode(value, options) do
GoogleApi.Slides.V1.Model.GroupObjectsRequest.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Slides.V1.Model.GroupObjectsRequest do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 47.62 | 532 | 0.740865 |
ffdb814468bfd01b4490122088cb0bb87dc362f7 | 3,311 | ex | Elixir | clients/display_video/lib/google_api/display_video/v1/model/advertiser_general_config.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | clients/display_video/lib/google_api/display_video/v1/model/advertiser_general_config.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/display_video/lib/google_api/display_video/v1/model/advertiser_general_config.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DisplayVideo.V1.Model.AdvertiserGeneralConfig do
@moduledoc """
General settings of an advertiser.
## Attributes
* `currencyCode` (*type:* `String.t`, *default:* `nil`) - Required. Immutable. Advertiser's currency in ISO 4217 format. Accepted codes and the currencies they represent are: Currency Code : Currency Name * `ARS` : Argentine Peso * `AUD` : Australian Dollar * `BRL` : Brazilian Real * `CAD` : Canadian Dollar * `CHF` : Swiss Franc * `CLP` : Chilean Peso * `CNY` : Chinese Yuan * `COP` : Colombian Peso * `CZK` : Czech Koruna * `DKK` : Danish Krone * `EGP` : Egyption Pound * `EUR` : Euro * `GBP` : British Pound * `HKD` : Hong Kong Dollar * `HUF` : Hungarian Forint * `IDR` : Indonesian Rupiah * `ILS` : Israeli Shekel * `INR` : Indian Rupee * `JPY` : Japanese Yen * `KRW` : South Korean Won * `MXN` : Mexican Pesos * `MYR` : Malaysian Ringgit * `NGN` : Nigerian Naira * `NOK` : Norwegian Krone * `NZD` : New Zealand Dollar * `PEN` : Peruvian Nuevo Sol * `PLN` : Polish Zloty * `RON` : New Romanian Leu * `RUB` : Russian Ruble * `SEK` : Swedish Krona * `TRY` : Turkish Lira * `TWD` : New Taiwan Dollar * `USD` : US Dollar * `ZAR` : South African Rand
* `domainUrl` (*type:* `String.t`, *default:* `nil`) - Required. The domain URL of the advertiser's primary website. The system will send this information to publishers that require website URL to associate a campaign with an advertiser. Provide a URL with no path or query string, beginning with `http:` or `https:`. For example, http://www.example.com
* `timeZone` (*type:* `String.t`, *default:* `nil`) - Output only. The standard TZ database name of the advertiser's time zone. For example, `America/New_York`. See more at: https://en.wikipedia.org/wiki/List_of_tz_database_time_zones For CM360 hybrid advertisers, the time zone is the same as that of the associated CM360 account; for third-party only advertisers, the time zone is the same as that of the parent partner.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:currencyCode => String.t(),
:domainUrl => String.t(),
:timeZone => String.t()
}
field(:currencyCode)
field(:domainUrl)
field(:timeZone)
end
defimpl Poison.Decoder, for: GoogleApi.DisplayVideo.V1.Model.AdvertiserGeneralConfig do
def decode(value, options) do
GoogleApi.DisplayVideo.V1.Model.AdvertiserGeneralConfig.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.DisplayVideo.V1.Model.AdvertiserGeneralConfig do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 62.471698 | 1,054 | 0.713682 |
ffdb8b7f441391e75bd860bbdebb7f55fdbe49e0 | 555 | ex | Elixir | lib/gexbot_web/views/changeset_view.ex | jakewilkins/gexbot | b92cd5bae794becaedbee9c8884e4b66942198b7 | [
"WTFPL"
] | null | null | null | lib/gexbot_web/views/changeset_view.ex | jakewilkins/gexbot | b92cd5bae794becaedbee9c8884e4b66942198b7 | [
"WTFPL"
] | null | null | null | lib/gexbot_web/views/changeset_view.ex | jakewilkins/gexbot | b92cd5bae794becaedbee9c8884e4b66942198b7 | [
"WTFPL"
] | null | null | null | defmodule GexbotWeb.ChangesetView do
use GexbotWeb, :view
@doc """
Traverses and translates changeset errors.
See `Ecto.Changeset.traverse_errors/2` and
`GexbotWeb.ErrorHelpers.translate_error/1` for more details.
"""
def translate_errors(changeset) do
Ecto.Changeset.traverse_errors(changeset, &translate_error/1)
end
def render("error.json", %{changeset: changeset}) do
# When encoded, the changeset returns its errors
# as a JSON object. So we just pass it forward.
%{errors: translate_errors(changeset)}
end
end
| 27.75 | 65 | 0.736937 |
ffdb8beac37944cf49d8ffa9de1b8c10db7260b0 | 17,039 | exs | Elixir | test/mgp/accounts_test.exs | imprest/mgp | 61457315243d0e0c26713601b9930ca34a116a16 | [
"MIT"
] | null | null | null | test/mgp/accounts_test.exs | imprest/mgp | 61457315243d0e0c26713601b9930ca34a116a16 | [
"MIT"
] | 2 | 2020-12-22T12:30:58.000Z | 2021-05-19T10:07:26.000Z | test/mgp/accounts_test.exs | imprest/mgp | 61457315243d0e0c26713601b9930ca34a116a16 | [
"MIT"
] | null | null | null | defmodule Mgp.AccountsTest do
use Mgp.DataCase
alias Mgp.Accounts
import Mgp.AccountsFixtures
alias Mgp.Accounts.{User, UserToken}
describe "get_user_by_email/1" do
test "does not return the user if the email does not exist" do
refute Accounts.get_user_by_email("unknown@example.com")
end
test "returns the user if the email exists" do
%{id: id} = user = user_fixture()
assert %User{id: ^id} = Accounts.get_user_by_email(user.email)
end
end
describe "get_user_by_email_and_password/2" do
test "does not return the user if the email does not exist" do
refute Accounts.get_user_by_email_and_password("unknown@example.com", "hello world!")
end
test "does not return the user if the password is not valid" do
user = user_fixture()
refute Accounts.get_user_by_email_and_password(user.email, "invalid")
end
test "returns the user if the email and password are valid" do
%{id: id} = user = user_fixture()
assert %User{id: ^id} =
Accounts.get_user_by_email_and_password(user.email, valid_user_password())
end
end
describe "get_user!/1" do
test "raises if id is invalid" do
assert_raise Ecto.NoResultsError, fn ->
Accounts.get_user!(-1)
end
end
test "returns the user with the given id" do
%{id: id} = user = user_fixture()
assert %User{id: ^id} = Accounts.get_user!(user.id)
end
end
describe "register_user/1" do
test "requires email and password to be set" do
{:error, changeset} = Accounts.register_user(%{})
assert %{
password: ["can't be blank"],
email: ["can't be blank"]
} = errors_on(changeset)
end
test "validates email and password when given" do
{:error, changeset} = Accounts.register_user(%{email: "not valid", password: "not valid"})
assert %{
email: ["must have the @ sign and no spaces"],
password: ["should be at least 12 character(s)"]
} = errors_on(changeset)
end
test "validates maximum values for email and password for security" do
too_long = String.duplicate("db", 100)
{:error, changeset} = Accounts.register_user(%{email: too_long, password: too_long})
assert "should be at most 160 character(s)" in errors_on(changeset).email
assert "should be at most 80 character(s)" in errors_on(changeset).password
end
test "validates email uniqueness" do
%{email: email} = user_fixture()
{:error, changeset} = Accounts.register_user(%{email: email})
assert "has already been taken" in errors_on(changeset).email
# Now try with the upper cased email too, to check that email case is ignored.
{:error, changeset} = Accounts.register_user(%{email: String.upcase(email)})
assert "has already been taken" in errors_on(changeset).email
end
test "registers users with a hashed password" do
email = unique_user_email()
{:ok, user} = Accounts.register_user(%{email: email, password: valid_user_password()})
assert user.email == email
assert is_binary(user.hashed_password)
assert is_nil(user.confirmed_at)
assert is_nil(user.password)
end
end
describe "change_user_registration/2" do
test "returns a changeset" do
assert %Ecto.Changeset{} = changeset = Accounts.change_user_registration(%User{})
assert changeset.required == [:password, :email]
end
test "allows fields to be set" do
email = unique_user_email()
password = valid_user_password()
changeset =
Accounts.change_user_registration(%User{}, %{"email" => email, "password" => password})
assert changeset.valid?
assert get_change(changeset, :email) == email
assert get_change(changeset, :password) == password
assert is_nil(get_change(changeset, :hashed_password))
end
end
describe "change_user_email/2" do
test "returns a user changeset" do
assert %Ecto.Changeset{} = changeset = Accounts.change_user_email(%User{})
assert changeset.required == [:email]
end
end
describe "apply_user_email/3" do
setup do
%{user: user_fixture()}
end
test "requires email to change", %{user: user} do
{:error, changeset} = Accounts.apply_user_email(user, valid_user_password(), %{})
assert %{email: ["did not change"]} = errors_on(changeset)
end
test "validates email", %{user: user} do
{:error, changeset} =
Accounts.apply_user_email(user, valid_user_password(), %{email: "not valid"})
assert %{email: ["must have the @ sign and no spaces"]} = errors_on(changeset)
end
test "validates maximum value for email for security", %{user: user} do
too_long = String.duplicate("db", 100)
{:error, changeset} =
Accounts.apply_user_email(user, valid_user_password(), %{email: too_long})
assert "should be at most 160 character(s)" in errors_on(changeset).email
end
test "validates email uniqueness", %{user: user} do
%{email: email} = user_fixture()
{:error, changeset} =
Accounts.apply_user_email(user, valid_user_password(), %{email: email})
assert "has already been taken" in errors_on(changeset).email
end
test "validates current password", %{user: user} do
{:error, changeset} =
Accounts.apply_user_email(user, "invalid", %{email: unique_user_email()})
assert %{current_password: ["is not valid"]} = errors_on(changeset)
end
test "applies the email without persisting it", %{user: user} do
email = unique_user_email()
{:ok, user} = Accounts.apply_user_email(user, valid_user_password(), %{email: email})
assert user.email == email
assert Accounts.get_user!(user.id).email != email
end
end
describe "deliver_update_email_instructions/3" do
setup do
%{user: user_fixture()}
end
test "sends token through notification", %{user: user} do
token =
extract_user_token(fn url ->
Accounts.deliver_update_email_instructions(user, "current@example.com", url)
end)
{:ok, token} = Base.url_decode64(token, padding: false)
assert user_token = Repo.get_by(UserToken, token: :crypto.hash(:sha256, token))
assert user_token.user_id == user.id
assert user_token.sent_to == user.email
assert user_token.context == "change:current@example.com"
end
end
describe "update_user_email/2" do
setup do
user = user_fixture()
email = unique_user_email()
token =
extract_user_token(fn url ->
Accounts.deliver_update_email_instructions(%{user | email: email}, user.email, url)
end)
%{user: user, token: token, email: email}
end
test "updates the email with a valid token", %{user: user, token: token, email: email} do
assert Accounts.update_user_email(user, token) == :ok
changed_user = Repo.get!(User, user.id)
assert changed_user.email != user.email
assert changed_user.email == email
assert changed_user.confirmed_at
assert changed_user.confirmed_at != user.confirmed_at
refute Repo.get_by(UserToken, user_id: user.id)
end
test "does not update email with invalid token", %{user: user} do
assert Accounts.update_user_email(user, "oops") == :error
assert Repo.get!(User, user.id).email == user.email
assert Repo.get_by(UserToken, user_id: user.id)
end
test "does not update email if user email changed", %{user: user, token: token} do
assert Accounts.update_user_email(%{user | email: "current@example.com"}, token) == :error
assert Repo.get!(User, user.id).email == user.email
assert Repo.get_by(UserToken, user_id: user.id)
end
test "does not update email if token expired", %{user: user, token: token} do
{1, nil} = Repo.update_all(UserToken, set: [inserted_at: ~N[2020-01-01 00:00:00]])
assert Accounts.update_user_email(user, token) == :error
assert Repo.get!(User, user.id).email == user.email
assert Repo.get_by(UserToken, user_id: user.id)
end
end
describe "change_user_password/2" do
test "returns a user changeset" do
assert %Ecto.Changeset{} = changeset = Accounts.change_user_password(%User{})
assert changeset.required == [:password]
end
test "allows fields to be set" do
changeset =
Accounts.change_user_password(%User{}, %{
"password" => "new valid password"
})
assert changeset.valid?
assert get_change(changeset, :password) == "new valid password"
assert is_nil(get_change(changeset, :hashed_password))
end
end
describe "update_user_password/3" do
setup do
%{user: user_fixture()}
end
test "validates password", %{user: user} do
{:error, changeset} =
Accounts.update_user_password(user, valid_user_password(), %{
password: "not valid",
password_confirmation: "another"
})
assert %{
password: ["should be at least 12 character(s)"],
password_confirmation: ["does not match password"]
} = errors_on(changeset)
end
test "validates maximum values for password for security", %{user: user} do
too_long = String.duplicate("db", 100)
{:error, changeset} =
Accounts.update_user_password(user, valid_user_password(), %{password: too_long})
assert "should be at most 80 character(s)" in errors_on(changeset).password
end
test "validates current password", %{user: user} do
{:error, changeset} =
Accounts.update_user_password(user, "invalid", %{password: valid_user_password()})
assert %{current_password: ["is not valid"]} = errors_on(changeset)
end
test "updates the password", %{user: user} do
{:ok, user} =
Accounts.update_user_password(user, valid_user_password(), %{
password: "new valid password"
})
assert is_nil(user.password)
assert Accounts.get_user_by_email_and_password(user.email, "new valid password")
end
test "deletes all tokens for the given user", %{user: user} do
_ = Accounts.generate_user_session_token(user)
{:ok, _} =
Accounts.update_user_password(user, valid_user_password(), %{
password: "new valid password"
})
refute Repo.get_by(UserToken, user_id: user.id)
end
end
describe "generate_user_session_token/1" do
setup do
%{user: user_fixture()}
end
test "generates a token", %{user: user} do
token = Accounts.generate_user_session_token(user)
assert user_token = Repo.get_by(UserToken, token: token)
assert user_token.context == "session"
# Creating the same token for another user should fail
assert_raise Ecto.ConstraintError, fn ->
Repo.insert!(%UserToken{
token: user_token.token,
user_id: user_fixture().id,
context: "session"
})
end
end
end
describe "get_user_by_session_token/1" do
setup do
user = user_fixture()
token = Accounts.generate_user_session_token(user)
%{user: user, token: token}
end
test "returns user by token", %{user: user, token: token} do
assert session_user = Accounts.get_user_by_session_token(token)
assert session_user.id == user.id
end
test "does not return user for invalid token" do
refute Accounts.get_user_by_session_token("oops")
end
test "does not return user for expired token", %{token: token} do
{1, nil} = Repo.update_all(UserToken, set: [inserted_at: ~N[2020-01-01 00:00:00]])
refute Accounts.get_user_by_session_token(token)
end
end
describe "delete_session_token/1" do
test "deletes the token" do
user = user_fixture()
token = Accounts.generate_user_session_token(user)
assert Accounts.delete_session_token(token) == :ok
refute Accounts.get_user_by_session_token(token)
end
end
describe "deliver_user_confirmation_instructions/2" do
setup do
%{user: user_fixture()}
end
test "sends token through notification", %{user: user} do
token =
extract_user_token(fn url ->
Accounts.deliver_user_confirmation_instructions(user, url)
end)
{:ok, token} = Base.url_decode64(token, padding: false)
assert user_token = Repo.get_by(UserToken, token: :crypto.hash(:sha256, token))
assert user_token.user_id == user.id
assert user_token.sent_to == user.email
assert user_token.context == "confirm"
end
end
describe "confirm_user/2" do
setup do
user = user_fixture()
token =
extract_user_token(fn url ->
Accounts.deliver_user_confirmation_instructions(user, url)
end)
%{user: user, token: token}
end
test "confirms the email with a valid token", %{user: user, token: token} do
assert {:ok, confirmed_user} = Accounts.confirm_user(token)
assert confirmed_user.confirmed_at
assert confirmed_user.confirmed_at != user.confirmed_at
assert Repo.get!(User, user.id).confirmed_at
refute Repo.get_by(UserToken, user_id: user.id)
end
test "does not confirm with invalid token", %{user: user} do
assert Accounts.confirm_user("oops") == :error
refute Repo.get!(User, user.id).confirmed_at
assert Repo.get_by(UserToken, user_id: user.id)
end
test "does not confirm email if token expired", %{user: user, token: token} do
{1, nil} = Repo.update_all(UserToken, set: [inserted_at: ~N[2020-01-01 00:00:00]])
assert Accounts.confirm_user(token) == :error
refute Repo.get!(User, user.id).confirmed_at
assert Repo.get_by(UserToken, user_id: user.id)
end
end
describe "deliver_user_reset_password_instructions/2" do
setup do
%{user: user_fixture()}
end
test "sends token through notification", %{user: user} do
token =
extract_user_token(fn url ->
Accounts.deliver_user_reset_password_instructions(user, url)
end)
{:ok, token} = Base.url_decode64(token, padding: false)
assert user_token = Repo.get_by(UserToken, token: :crypto.hash(:sha256, token))
assert user_token.user_id == user.id
assert user_token.sent_to == user.email
assert user_token.context == "reset_password"
end
end
describe "get_user_by_reset_password_token/1" do
setup do
user = user_fixture()
token =
extract_user_token(fn url ->
Accounts.deliver_user_reset_password_instructions(user, url)
end)
%{user: user, token: token}
end
test "returns the user with valid token", %{user: %{id: id}, token: token} do
assert %User{id: ^id} = Accounts.get_user_by_reset_password_token(token)
assert Repo.get_by(UserToken, user_id: id)
end
test "does not return the user with invalid token", %{user: user} do
refute Accounts.get_user_by_reset_password_token("oops")
assert Repo.get_by(UserToken, user_id: user.id)
end
test "does not return the user if token expired", %{user: user, token: token} do
{1, nil} = Repo.update_all(UserToken, set: [inserted_at: ~N[2020-01-01 00:00:00]])
refute Accounts.get_user_by_reset_password_token(token)
assert Repo.get_by(UserToken, user_id: user.id)
end
end
describe "reset_user_password/2" do
setup do
%{user: user_fixture()}
end
test "validates password", %{user: user} do
{:error, changeset} =
Accounts.reset_user_password(user, %{
password: "not valid",
password_confirmation: "another"
})
assert %{
password: ["should be at least 12 character(s)"],
password_confirmation: ["does not match password"]
} = errors_on(changeset)
end
test "validates maximum values for password for security", %{user: user} do
too_long = String.duplicate("db", 100)
{:error, changeset} = Accounts.reset_user_password(user, %{password: too_long})
assert "should be at most 80 character(s)" in errors_on(changeset).password
end
test "updates the password", %{user: user} do
{:ok, updated_user} = Accounts.reset_user_password(user, %{password: "new valid password"})
assert is_nil(updated_user.password)
assert Accounts.get_user_by_email_and_password(user.email, "new valid password")
end
test "deletes all tokens for the given user", %{user: user} do
_ = Accounts.generate_user_session_token(user)
{:ok, _} = Accounts.reset_user_password(user, %{password: "new valid password"})
refute Repo.get_by(UserToken, user_id: user.id)
end
end
describe "inspect/2" do
test "does not include password" do
refute inspect(%User{password: "123456"}) =~ "password: \"123456\""
end
end
end
| 33.740594 | 97 | 0.659076 |
ffdb99253d6dd85bfd369cb829d74006612c20c9 | 37,500 | ex | Elixir | lib/aws/generated/efs.ex | andrewhr/aws-elixir | 861dc2fafca50a2b2f83badba4cdcb44b5b0c171 | [
"Apache-2.0"
] | null | null | null | lib/aws/generated/efs.ex | andrewhr/aws-elixir | 861dc2fafca50a2b2f83badba4cdcb44b5b0c171 | [
"Apache-2.0"
] | null | null | null | lib/aws/generated/efs.ex | andrewhr/aws-elixir | 861dc2fafca50a2b2f83badba4cdcb44b5b0c171 | [
"Apache-2.0"
] | null | null | null | # WARNING: DO NOT EDIT, AUTO-GENERATED CODE!
# See https://github.com/aws-beam/aws-codegen for more details.
defmodule AWS.EFS do
@moduledoc """
Amazon Elastic File System
Amazon Elastic File System (Amazon EFS) provides simple, scalable file storage
for use with Amazon EC2 instances in the Amazon Web Services Cloud.
With Amazon EFS, storage capacity is elastic, growing and shrinking
automatically as you add and remove files, so your applications have the storage
they need, when they need it. For more information, see the [Amazon Elastic File System API
Reference](https://docs.aws.amazon.com/efs/latest/ug/api-reference.html) and the
[Amazon Elastic File System User Guide](https://docs.aws.amazon.com/efs/latest/ug/whatisefs.html).
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: nil,
api_version: "2015-02-01",
content_type: "application/x-amz-json-1.1",
credential_scope: nil,
endpoint_prefix: "elasticfilesystem",
global?: false,
protocol: "rest-json",
service_id: "EFS",
signature_version: "v4",
signing_name: "elasticfilesystem",
target_prefix: nil
}
end
@doc """
Creates an EFS access point.
An access point is an application-specific view into an EFS file system that
applies an operating system user and group, and a file system path, to any file
system request made through the access point. The operating system user and
group override any identity information provided by the NFS client. The file
system path is exposed as the access point's root directory. Applications using
the access point can only access data in its own directory and below. To learn
more, see [Mounting a file system using EFS access points](https://docs.aws.amazon.com/efs/latest/ug/efs-access-points.html).
This operation requires permissions for the
`elasticfilesystem:CreateAccessPoint` action.
"""
def create_access_point(%Client{} = client, input, options \\ []) do
url_path = "/2015-02-01/access-points"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Creates a new, empty file system.
The operation requires a creation token in the request that Amazon EFS uses to
ensure idempotent creation (calling the operation with same creation token has
no effect). If a file system does not currently exist that is owned by the
caller's Amazon Web Services account with the specified creation token, this
operation does the following:
* Creates a new, empty file system. The file system will have an
Amazon EFS assigned ID, and an initial lifecycle state `creating`.
* Returns with the description of the created file system.
Otherwise, this operation returns a `FileSystemAlreadyExists` error with the ID
of the existing file system.
For basic use cases, you can use a randomly generated UUID for the creation
token.
The idempotent operation allows you to retry a `CreateFileSystem` call without
risk of creating an extra file system. This can happen when an initial call
fails in a way that leaves it uncertain whether or not a file system was
actually created. An example might be that a transport level timeout occurred or
your connection was reset. As long as you use the same creation token, if the
initial call had succeeded in creating a file system, the client can learn of
its existence from the `FileSystemAlreadyExists` error.
For more information, see [Creating a file system](https://docs.aws.amazon.com/efs/latest/ug/creating-using-create-fs.html#creating-using-create-fs-part1)
in the *Amazon EFS User Guide*.
The `CreateFileSystem` call returns while the file system's lifecycle state is
still `creating`. You can check the file system creation status by calling the
`DescribeFileSystems` operation, which among other things returns the file
system state.
This operation accepts an optional `PerformanceMode` parameter that you choose
for your file system. We recommend `generalPurpose` performance mode for most
file systems. File systems using the `maxIO` performance mode can scale to
higher levels of aggregate throughput and operations per second with a tradeoff
of slightly higher latencies for most file operations. The performance mode
can't be changed after the file system has been created. For more information,
see [Amazon EFS performance modes](https://docs.aws.amazon.com/efs/latest/ug/performance.html#performancemodes.html).
You can set the throughput mode for the file system using the `ThroughputMode`
parameter.
After the file system is fully created, Amazon EFS sets its lifecycle state to
`available`, at which point you can create one or more mount targets for the
file system in your VPC. For more information, see `CreateMountTarget`. You
mount your Amazon EFS file system on an EC2 instances in your VPC by using the
mount target. For more information, see [Amazon EFS: How it Works](https://docs.aws.amazon.com/efs/latest/ug/how-it-works.html).
This operation requires permissions for the `elasticfilesystem:CreateFileSystem`
action.
"""
def create_file_system(%Client{} = client, input, options \\ []) do
url_path = "/2015-02-01/file-systems"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
201
)
end
@doc """
Creates a mount target for a file system.
You can then mount the file system on EC2 instances by using the mount target.
You can create one mount target in each Availability Zone in your VPC. All EC2
instances in a VPC within a given Availability Zone share a single mount target
for a given file system. If you have multiple subnets in an Availability Zone,
you create a mount target in one of the subnets. EC2 instances do not need to be
in the same subnet as the mount target in order to access their file system.
You can create only one mount target for an EFS file system using One Zone
storage classes. You must create that mount target in the same Availability Zone
in which the file system is located. Use the `AvailabilityZoneName` and
`AvailabiltyZoneId` properties in the `DescribeFileSystems` response object to
get this information. Use the `subnetId` associated with the file system's
Availability Zone when creating the mount target.
For more information, see [Amazon EFS: How it Works](https://docs.aws.amazon.com/efs/latest/ug/how-it-works.html).
To create a mount target for a file system, the file system's lifecycle state
must be `available`. For more information, see `DescribeFileSystems`.
In the request, provide the following:
* The file system ID for which you are creating the mount target.
* A subnet ID, which determines the following:
* The VPC in which Amazon EFS creates the mount target
* The Availability Zone in which Amazon EFS creates the
mount target
* The IP address range from which Amazon EFS selects the
IP address of the mount target (if you don't specify an IP address in the
request)
After creating the mount target, Amazon EFS returns a response that includes, a
`MountTargetId` and an `IpAddress`. You use this IP address when mounting the
file system in an EC2 instance. You can also use the mount target's DNS name
when mounting the file system. The EC2 instance on which you mount the file
system by using the mount target can resolve the mount target's DNS name to its
IP address. For more information, see [How it Works: Implementation Overview](https://docs.aws.amazon.com/efs/latest/ug/how-it-works.html#how-it-works-implementation).
Note that you can create mount targets for a file system in only one VPC, and
there can be only one mount target per Availability Zone. That is, if the file
system already has one or more mount targets created for it, the subnet
specified in the request to add another mount target must meet the following
requirements:
* Must belong to the same VPC as the subnets of the existing mount
targets
* Must not be in the same Availability Zone as any of the subnets of
the existing mount targets
If the request satisfies the requirements, Amazon EFS does the following:
* Creates a new mount target in the specified subnet.
* Also creates a new network interface in the subnet as follows:
* If the request provides an `IpAddress`, Amazon EFS
assigns that IP address to the network interface. Otherwise, Amazon EFS assigns
a free address in the subnet (in the same way that the Amazon EC2
`CreateNetworkInterface` call does when a request does not specify a primary
private IP address).
* If the request provides `SecurityGroups`, this network
interface is associated with those security groups. Otherwise, it belongs to the
default security group for the subnet's VPC.
* Assigns the description `Mount target *fsmt-id* for
file system *fs-id* ` where ` *fsmt-id* ` is the mount target ID, and ` *fs-id*
` is the `FileSystemId`.
* Sets the `requesterManaged` property of the network
interface to `true`, and the `requesterId` value to `EFS`.
Each Amazon EFS mount target has one corresponding requester-managed EC2 network
interface. After the network interface is created, Amazon EFS sets the
`NetworkInterfaceId` field in the mount target's description to the network
interface ID, and the `IpAddress` field to its address. If network interface
creation fails, the entire `CreateMountTarget` operation fails.
The `CreateMountTarget` call returns only after creating the network interface,
but while the mount target state is still `creating`, you can check the mount
target creation status by calling the `DescribeMountTargets` operation, which
among other things returns the mount target state.
We recommend that you create a mount target in each of the Availability Zones.
There are cost considerations for using a file system in an Availability Zone
through a mount target created in another Availability Zone. For more
information, see [Amazon EFS](http://aws.amazon.com/efs/). In addition, by
always using a mount target local to the instance's Availability Zone, you
eliminate a partial failure scenario. If the Availability Zone in which your
mount target is created goes down, then you can't access your file system
through that mount target.
This operation requires permissions for the following action on the file system:
* `elasticfilesystem:CreateMountTarget`
This operation also requires permissions for the following Amazon EC2 actions:
* `ec2:DescribeSubnets`
* `ec2:DescribeNetworkInterfaces`
* `ec2:CreateNetworkInterface`
"""
def create_mount_target(%Client{} = client, input, options \\ []) do
url_path = "/2015-02-01/mount-targets"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
DEPRECATED - CreateTags is deprecated and not maintained.
Please use the API action to create tags for EFS resources.
Creates or overwrites tags associated with a file system. Each tag is a
key-value pair. If a tag key specified in the request already exists on the file
system, this operation overwrites its value with the value provided in the
request. If you add the `Name` tag to your file system, Amazon EFS returns it in
the response to the `DescribeFileSystems` operation.
This operation requires permission for the `elasticfilesystem:CreateTags`
action.
"""
def create_tags(%Client{} = client, file_system_id, input, options \\ []) do
url_path = "/2015-02-01/create-tags/#{AWS.Util.encode_uri(file_system_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
204
)
end
@doc """
Deletes the specified access point.
After deletion is complete, new clients can no longer connect to the access
points. Clients connected to the access point at the time of deletion will
continue to function until they terminate their connection.
This operation requires permissions for the
`elasticfilesystem:DeleteAccessPoint` action.
"""
def delete_access_point(%Client{} = client, access_point_id, input, options \\ []) do
url_path = "/2015-02-01/access-points/#{AWS.Util.encode_uri(access_point_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
204
)
end
@doc """
Deletes a file system, permanently severing access to its contents.
Upon return, the file system no longer exists and you can't access any contents
of the deleted file system.
You can't delete a file system that is in use. That is, if the file system has
any mount targets, you must first delete them. For more information, see
`DescribeMountTargets` and `DeleteMountTarget`.
The `DeleteFileSystem` call returns while the file system state is still
`deleting`. You can check the file system deletion status by calling the
`DescribeFileSystems` operation, which returns a list of file systems in your
account. If you pass file system ID or creation token for the deleted file
system, the `DescribeFileSystems` returns a `404 FileSystemNotFound` error.
This operation requires permissions for the `elasticfilesystem:DeleteFileSystem`
action.
"""
def delete_file_system(%Client{} = client, file_system_id, input, options \\ []) do
url_path = "/2015-02-01/file-systems/#{AWS.Util.encode_uri(file_system_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
204
)
end
@doc """
Deletes the `FileSystemPolicy` for the specified file system.
The default `FileSystemPolicy` goes into effect once the existing policy is
deleted. For more information about the default file system policy, see [Using Resource-based Policies with
EFS](https://docs.aws.amazon.com/efs/latest/ug/res-based-policies-efs.html).
This operation requires permissions for the
`elasticfilesystem:DeleteFileSystemPolicy` action.
"""
def delete_file_system_policy(%Client{} = client, file_system_id, input, options \\ []) do
url_path = "/2015-02-01/file-systems/#{AWS.Util.encode_uri(file_system_id)}/policy"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Deletes the specified mount target.
This operation forcibly breaks any mounts of the file system by using the mount
target that is being deleted, which might disrupt instances or applications
using those mounts. To avoid applications getting cut off abruptly, you might
consider unmounting any mounts of the mount target, if feasible. The operation
also deletes the associated network interface. Uncommitted writes might be lost,
but breaking a mount target using this operation does not corrupt the file
system itself. The file system you created remains. You can mount an EC2
instance in your VPC by using another mount target.
This operation requires permissions for the following action on the file system:
* `elasticfilesystem:DeleteMountTarget`
The `DeleteMountTarget` call returns while the mount target state is still
`deleting`. You can check the mount target deletion by calling the
`DescribeMountTargets` operation, which returns a list of mount target
descriptions for the given file system.
The operation also requires permissions for the following Amazon EC2 action on
the mount target's network interface:
* `ec2:DeleteNetworkInterface`
"""
def delete_mount_target(%Client{} = client, mount_target_id, input, options \\ []) do
url_path = "/2015-02-01/mount-targets/#{AWS.Util.encode_uri(mount_target_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
204
)
end
@doc """
DEPRECATED - DeleteTags is deprecated and not maintained.
Please use the API action to remove tags from EFS resources.
Deletes the specified tags from a file system. If the `DeleteTags` request
includes a tag key that doesn't exist, Amazon EFS ignores it and doesn't cause
an error. For more information about tags and related restrictions, see [Tag restrictions](https://docs.aws.amazon.com/awsaccountbilling/latest/aboutv2/cost-alloc-tags.html)
in the *Billing and Cost Management User Guide*.
This operation requires permissions for the `elasticfilesystem:DeleteTags`
action.
"""
def delete_tags(%Client{} = client, file_system_id, input, options \\ []) do
url_path = "/2015-02-01/delete-tags/#{AWS.Util.encode_uri(file_system_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
204
)
end
@doc """
Returns the description of a specific Amazon EFS access point if the
`AccessPointId` is provided.
If you provide an EFS `FileSystemId`, it returns descriptions of all access
points for that file system. You can provide either an `AccessPointId` or a
`FileSystemId` in the request, but not both.
This operation requires permissions for the
`elasticfilesystem:DescribeAccessPoints` action.
"""
def describe_access_points(
%Client{} = client,
access_point_id \\ nil,
file_system_id \\ nil,
max_results \\ nil,
next_token \\ nil,
options \\ []
) do
url_path = "/2015-02-01/access-points"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"NextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"MaxResults", max_results} | query_params]
else
query_params
end
query_params =
if !is_nil(file_system_id) do
[{"FileSystemId", file_system_id} | query_params]
else
query_params
end
query_params =
if !is_nil(access_point_id) do
[{"AccessPointId", access_point_id} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Returns the account preferences settings for the Amazon Web Services account
associated with the user making the request, in the current Amazon Web Services
Region.
For more information, see [Managing Amazon EFS resource IDs](efs/latest/ug/manage-efs-resource-ids.html).
"""
def describe_account_preferences(%Client{} = client, options \\ []) do
url_path = "/2015-02-01/account-preferences"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Returns the backup policy for the specified EFS file system.
"""
def describe_backup_policy(%Client{} = client, file_system_id, options \\ []) do
url_path = "/2015-02-01/file-systems/#{AWS.Util.encode_uri(file_system_id)}/backup-policy"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Returns the `FileSystemPolicy` for the specified EFS file system.
This operation requires permissions for the
`elasticfilesystem:DescribeFileSystemPolicy` action.
"""
def describe_file_system_policy(%Client{} = client, file_system_id, options \\ []) do
url_path = "/2015-02-01/file-systems/#{AWS.Util.encode_uri(file_system_id)}/policy"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Returns the description of a specific Amazon EFS file system if either the file
system `CreationToken` or the `FileSystemId` is provided.
Otherwise, it returns descriptions of all file systems owned by the caller's
Amazon Web Services account in the Amazon Web Services Region of the endpoint
that you're calling.
When retrieving all file system descriptions, you can optionally specify the
`MaxItems` parameter to limit the number of descriptions in a response.
Currently, this number is automatically set to 10. If more file system
descriptions remain, Amazon EFS returns a `NextMarker`, an opaque token, in the
response. In this case, you should send a subsequent request with the `Marker`
request parameter set to the value of `NextMarker`.
To retrieve a list of your file system descriptions, this operation is used in
an iterative process, where `DescribeFileSystems` is called first without the
`Marker` and then the operation continues to call it with the `Marker` parameter
set to the value of the `NextMarker` from the previous response until the
response has no `NextMarker`.
The order of file systems returned in the response of one `DescribeFileSystems`
call and the order of file systems returned across the responses of a multi-call
iteration is unspecified.
This operation requires permissions for the
`elasticfilesystem:DescribeFileSystems` action.
"""
def describe_file_systems(
%Client{} = client,
creation_token \\ nil,
file_system_id \\ nil,
marker \\ nil,
max_items \\ nil,
options \\ []
) do
url_path = "/2015-02-01/file-systems"
headers = []
query_params = []
query_params =
if !is_nil(max_items) do
[{"MaxItems", max_items} | query_params]
else
query_params
end
query_params =
if !is_nil(marker) do
[{"Marker", marker} | query_params]
else
query_params
end
query_params =
if !is_nil(file_system_id) do
[{"FileSystemId", file_system_id} | query_params]
else
query_params
end
query_params =
if !is_nil(creation_token) do
[{"CreationToken", creation_token} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Returns the current `LifecycleConfiguration` object for the specified Amazon EFS
file system.
EFS lifecycle management uses the `LifecycleConfiguration` object to identify
which files to move to the EFS Infrequent Access (IA) storage class. For a file
system without a `LifecycleConfiguration` object, the call returns an empty
array in the response.
When EFS Intelligent Tiering is enabled, `TransitionToPrimaryStorageClass` has a
value of `AFTER_1_ACCESS`.
This operation requires permissions for the
`elasticfilesystem:DescribeLifecycleConfiguration` operation.
"""
def describe_lifecycle_configuration(%Client{} = client, file_system_id, options \\ []) do
url_path =
"/2015-02-01/file-systems/#{AWS.Util.encode_uri(file_system_id)}/lifecycle-configuration"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Returns the security groups currently in effect for a mount target.
This operation requires that the network interface of the mount target has been
created and the lifecycle state of the mount target is not `deleted`.
This operation requires permissions for the following actions:
* `elasticfilesystem:DescribeMountTargetSecurityGroups` action on
the mount target's file system.
* `ec2:DescribeNetworkInterfaceAttribute` action on the mount
target's network interface.
"""
def describe_mount_target_security_groups(%Client{} = client, mount_target_id, options \\ []) do
url_path = "/2015-02-01/mount-targets/#{AWS.Util.encode_uri(mount_target_id)}/security-groups"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Returns the descriptions of all the current mount targets, or a specific mount
target, for a file system.
When requesting all of the current mount targets, the order of mount targets
returned in the response is unspecified.
This operation requires permissions for the
`elasticfilesystem:DescribeMountTargets` action, on either the file system ID
that you specify in `FileSystemId`, or on the file system of the mount target
that you specify in `MountTargetId`.
"""
def describe_mount_targets(
%Client{} = client,
access_point_id \\ nil,
file_system_id \\ nil,
marker \\ nil,
max_items \\ nil,
mount_target_id \\ nil,
options \\ []
) do
url_path = "/2015-02-01/mount-targets"
headers = []
query_params = []
query_params =
if !is_nil(mount_target_id) do
[{"MountTargetId", mount_target_id} | query_params]
else
query_params
end
query_params =
if !is_nil(max_items) do
[{"MaxItems", max_items} | query_params]
else
query_params
end
query_params =
if !is_nil(marker) do
[{"Marker", marker} | query_params]
else
query_params
end
query_params =
if !is_nil(file_system_id) do
[{"FileSystemId", file_system_id} | query_params]
else
query_params
end
query_params =
if !is_nil(access_point_id) do
[{"AccessPointId", access_point_id} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
DEPRECATED - The DeleteTags action is deprecated and not maintained.
Please use the API action to remove tags from EFS resources.
Returns the tags associated with a file system. The order of tags returned in
the response of one `DescribeTags` call and the order of tags returned across
the responses of a multiple-call iteration (when using pagination) is
unspecified.
This operation requires permissions for the `elasticfilesystem:DescribeTags`
action.
"""
def describe_tags(
%Client{} = client,
file_system_id,
marker \\ nil,
max_items \\ nil,
options \\ []
) do
url_path = "/2015-02-01/tags/#{AWS.Util.encode_uri(file_system_id)}/"
headers = []
query_params = []
query_params =
if !is_nil(max_items) do
[{"MaxItems", max_items} | query_params]
else
query_params
end
query_params =
if !is_nil(marker) do
[{"Marker", marker} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Lists all tags for a top-level EFS resource.
You must provide the ID of the resource that you want to retrieve the tags for.
This operation requires permissions for the
`elasticfilesystem:DescribeAccessPoints` action.
"""
def list_tags_for_resource(
%Client{} = client,
resource_id,
max_results \\ nil,
next_token \\ nil,
options \\ []
) do
url_path = "/2015-02-01/resource-tags/#{AWS.Util.encode_uri(resource_id)}"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"NextToken", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"MaxResults", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Modifies the set of security groups in effect for a mount target.
When you create a mount target, Amazon EFS also creates a new network interface.
For more information, see `CreateMountTarget`. This operation replaces the
security groups in effect for the network interface associated with a mount
target, with the `SecurityGroups` provided in the request. This operation
requires that the network interface of the mount target has been created and the
lifecycle state of the mount target is not `deleted`.
The operation requires permissions for the following actions:
* `elasticfilesystem:ModifyMountTargetSecurityGroups` action on the
mount target's file system.
* `ec2:ModifyNetworkInterfaceAttribute` action on the mount target's
network interface.
"""
def modify_mount_target_security_groups(
%Client{} = client,
mount_target_id,
input,
options \\ []
) do
url_path = "/2015-02-01/mount-targets/#{AWS.Util.encode_uri(mount_target_id)}/security-groups"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
204
)
end
@doc """
Use this operation to set the account preference in the current Amazon Web
Services Region to use long 17 character (63 bit) or short 8 character (32 bit)
resource IDs for new EFS file system and mount target resources.
All existing resource IDs are not affected by any changes you make. You can set
the ID preference during the opt-in period as EFS transitions to long resource
IDs. For more information, see [Managing Amazon EFS resource IDs](https://docs.aws.amazon.com/efs/latest/ug/manage-efs-resource-ids.html).
Starting in October, 2021, you will receive an error if you try to set the
account preference to use the short 8 character format resource ID. Contact
Amazon Web Services support if you receive an error and need to use short IDs
for file system and mount target resources.
"""
def put_account_preferences(%Client{} = client, input, options \\ []) do
url_path = "/2015-02-01/account-preferences"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Updates the file system's backup policy.
Use this action to start or stop automatic backups of the file system.
"""
def put_backup_policy(%Client{} = client, file_system_id, input, options \\ []) do
url_path = "/2015-02-01/file-systems/#{AWS.Util.encode_uri(file_system_id)}/backup-policy"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Applies an Amazon EFS `FileSystemPolicy` to an Amazon EFS file system.
A file system policy is an IAM resource-based policy and can contain multiple
policy statements. A file system always has exactly one file system policy,
which can be the default policy or an explicit policy set or updated using this
API operation. EFS file system policies have a 20,000 character limit. When an
explicit policy is set, it overrides the default policy. For more information
about the default file system policy, see [Default EFS File System Policy](https://docs.aws.amazon.com/efs/latest/ug/iam-access-control-nfs-efs.html#default-filesystempolicy).
EFS file system policies have a 20,000 character limit.
This operation requires permissions for the
`elasticfilesystem:PutFileSystemPolicy` action.
"""
def put_file_system_policy(%Client{} = client, file_system_id, input, options \\ []) do
url_path = "/2015-02-01/file-systems/#{AWS.Util.encode_uri(file_system_id)}/policy"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Enables lifecycle management by creating a new `LifecycleConfiguration` object.
A `LifecycleConfiguration` object defines when files in an Amazon EFS file
system are automatically transitioned to the lower-cost EFS Infrequent Access
(IA) storage class. To enable EFS Intelligent Tiering, set the value of
`TransitionToPrimaryStorageClass` to `AFTER_1_ACCESS`. For more information, see
[EFS Lifecycle Management](https://docs.aws.amazon.com/efs/latest/ug/lifecycle-management-efs.html).
Each Amazon EFS file system supports one lifecycle configuration, which applies
to all files in the file system. If a `LifecycleConfiguration` object already
exists for the specified file system, a `PutLifecycleConfiguration` call
modifies the existing configuration. A `PutLifecycleConfiguration` call with an
empty `LifecyclePolicies` array in the request body deletes any existing
`LifecycleConfiguration` and turns off lifecycle management for the file system.
In the request, specify the following:
* The ID for the file system for which you are enabling, disabling,
or modifying lifecycle management.
* A `LifecyclePolicies` array of `LifecyclePolicy` objects that
define when files are moved to the IA storage class. Amazon EFS requires that
each `LifecyclePolicy` object have only have a single transition, so the
`LifecyclePolicies` array needs to be structured with separate `LifecyclePolicy`
objects. See the example requests in the following section for more information.
This operation requires permissions for the
`elasticfilesystem:PutLifecycleConfiguration` operation.
To apply a `LifecycleConfiguration` object to an encrypted file system, you need
the same Key Management Service permissions as when you created the encrypted
file system.
"""
def put_lifecycle_configuration(%Client{} = client, file_system_id, input, options \\ []) do
url_path =
"/2015-02-01/file-systems/#{AWS.Util.encode_uri(file_system_id)}/lifecycle-configuration"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Creates a tag for an EFS resource.
You can create tags for EFS file systems and access points using this API
operation.
This operation requires permissions for the `elasticfilesystem:TagResource`
action.
"""
def tag_resource(%Client{} = client, resource_id, input, options \\ []) do
url_path = "/2015-02-01/resource-tags/#{AWS.Util.encode_uri(resource_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Removes tags from an EFS resource.
You can remove tags from EFS file systems and access points using this API
operation.
This operation requires permissions for the `elasticfilesystem:UntagResource`
action.
"""
def untag_resource(%Client{} = client, resource_id, input, options \\ []) do
url_path = "/2015-02-01/resource-tags/#{AWS.Util.encode_uri(resource_id)}"
headers = []
{query_params, input} =
[
{"TagKeys", "tagKeys"}
]
|> Request.build_params(input)
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Updates the throughput mode or the amount of provisioned throughput of an
existing file system.
"""
def update_file_system(%Client{} = client, file_system_id, input, options \\ []) do
url_path = "/2015-02-01/file-systems/#{AWS.Util.encode_uri(file_system_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
202
)
end
end
| 31.539108 | 177 | 0.695307 |
ffdbad76c1b3c66d403e4459585fc38bbd03287a | 858 | ex | Elixir | lib/relax_lib/image_magick/image.ex | gialib/relax_lib | 0a94b1fe38d00b51832ef947a4c3a1c2956583ca | [
"MIT"
] | null | null | null | lib/relax_lib/image_magick/image.ex | gialib/relax_lib | 0a94b1fe38d00b51832ef947a4c3a1c2956583ca | [
"MIT"
] | null | null | null | lib/relax_lib/image_magick/image.ex | gialib/relax_lib | 0a94b1fe38d00b51832ef947a4c3a1c2956583ca | [
"MIT"
] | null | null | null | defmodule RelaxLib.ImageMagick.Image do
@type path :: binary
@type ext :: binary
@type format :: binary
@type width :: integer
@type height :: integer
@type animated :: boolean
@type frame_count :: integer
@type operations :: Keyword.t
@type dirty :: %{atom => any}
@type t :: %__MODULE__{
path: path,
ext: ext,
format: format,
width: width,
height: height,
animated: animated,
frame_count: frame_count,
operations: operations,
dirty: dirty
}
defstruct path: nil,
ext: nil,
format: nil,
width: nil,
height: nil,
animated: false,
frame_count: 1,
operations: [],
dirty: %{}
end
| 24.514286 | 39 | 0.480186 |
ffdbc6ec360be23c33dbdc1e545f103722c863b6 | 836 | ex | Elixir | lib/system/definitions/animate_mods.ex | doawoo/elixir_rpg | 4dcd0eb717bd1d654b3e6a06be31aba4c3254fb3 | [
"MIT"
] | 23 | 2021-10-24T00:21:13.000Z | 2022-03-13T12:33:38.000Z | lib/system/definitions/animate_mods.ex | doawoo/elixir_rpg | 4dcd0eb717bd1d654b3e6a06be31aba4c3254fb3 | [
"MIT"
] | null | null | null | lib/system/definitions/animate_mods.ex | doawoo/elixir_rpg | 4dcd0eb717bd1d654b3e6a06be31aba4c3254fb3 | [
"MIT"
] | 3 | 2021-11-04T02:42:25.000Z | 2022-02-02T14:22:52.000Z | use ElixirRPG.DSL.System
defsystem AnimateModSystem do
require Logger
alias ElixirRPG.Entity
name "AnimateModSystem"
wants AnimationMod
on_tick do
_ = delta_time
_ = frontend_pid
_ = world_name
current_anims = get_component_data(AnimationMod, :active_mods)
updated_values =
Enum.map(current_anims, fn {anim, tick_count} ->
{anim, tick_count - 1}
end)
|> Enum.filter(fn {_, tick_count} -> tick_count > 0 end)
set_component_data(AnimationMod, :active_mods, updated_values)
end
def add_animation(entity, animation_class, len \\ 5) do
anim_data = Entity.get_component(entity, AnimationMod)
new_anim = {animation_class, len}
new_data = [new_anim | anim_data.active_mods]
Entity.set_component_data(entity, AnimationMod, :active_mods, new_data)
end
end
| 24.588235 | 75 | 0.714115 |
ffdc02445342bee0bc7902ba7bb0c3aaa6916243 | 820 | ex | Elixir | lib/rocketpay_web/views/accounts_view.ex | willianns/rocketpay | 34c882b47ab1cb2a83b51c6bb17eeceb7714ab92 | [
"Unlicense"
] | 2 | 2021-03-01T09:15:57.000Z | 2021-03-02T23:30:57.000Z | lib/rocketpay_web/views/accounts_view.ex | willianns/rocketpay | 34c882b47ab1cb2a83b51c6bb17eeceb7714ab92 | [
"Unlicense"
] | null | null | null | lib/rocketpay_web/views/accounts_view.ex | willianns/rocketpay | 34c882b47ab1cb2a83b51c6bb17eeceb7714ab92 | [
"Unlicense"
] | null | null | null | defmodule RocketpayWeb.AccountsView do
alias Rocketpay.{Account}
alias Rocketpay.Accounts.Transactions.Response, as: TransactionResponse
def render("update.json", %{ account: %Account{id: account_id, balance: balance}}) do
%{
message: "Balance changed successfully!",
account: %{
id: account_id,
balance: balance
}
}
end
def render("transaction.json", %{
transaction: %TransactionResponse{to_account: to_account, from_account: from_account }
}) do
%{
message: "Transaction done successfully",
transaction: %{
to_account: %{
id: to_account.id,
balance: to_account.balance
},
from_account: %{
id: from_account.id,
balance: from_account.balance
}
}
}
end
end
| 24.848485 | 92 | 0.612195 |
ffdc23366a35e6a3fedc1bf6d15903f6ae409ace | 27,621 | ex | Elixir | lib/codes/codes_v59.ex | badubizzle/icd_code | 4c625733f92b7b1d616e272abc3009bb8b916c0c | [
"Apache-2.0"
] | null | null | null | lib/codes/codes_v59.ex | badubizzle/icd_code | 4c625733f92b7b1d616e272abc3009bb8b916c0c | [
"Apache-2.0"
] | null | null | null | lib/codes/codes_v59.ex | badubizzle/icd_code | 4c625733f92b7b1d616e272abc3009bb8b916c0c | [
"Apache-2.0"
] | null | null | null | defmodule IcdCode.ICDCode.Codes_V59 do
alias IcdCode.ICDCode
def _V5900XA do
%ICDCode{full_code: "V5900XA",
category_code: "V59",
short_code: "00XA",
full_name: "Driver of pick-up truck or van injured in collision with unspecified motor vehicles in nontraffic accident, initial encounter",
short_name: "Driver of pick-up truck or van injured in collision with unspecified motor vehicles in nontraffic accident, initial encounter",
category_name: "Driver of pick-up truck or van injured in collision with unspecified motor vehicles in nontraffic accident, initial encounter"
}
end
def _V5900XD do
%ICDCode{full_code: "V5900XD",
category_code: "V59",
short_code: "00XD",
full_name: "Driver of pick-up truck or van injured in collision with unspecified motor vehicles in nontraffic accident, subsequent encounter",
short_name: "Driver of pick-up truck or van injured in collision with unspecified motor vehicles in nontraffic accident, subsequent encounter",
category_name: "Driver of pick-up truck or van injured in collision with unspecified motor vehicles in nontraffic accident, subsequent encounter"
}
end
def _V5900XS do
%ICDCode{full_code: "V5900XS",
category_code: "V59",
short_code: "00XS",
full_name: "Driver of pick-up truck or van injured in collision with unspecified motor vehicles in nontraffic accident, sequela",
short_name: "Driver of pick-up truck or van injured in collision with unspecified motor vehicles in nontraffic accident, sequela",
category_name: "Driver of pick-up truck or van injured in collision with unspecified motor vehicles in nontraffic accident, sequela"
}
end
def _V5909XA do
%ICDCode{full_code: "V5909XA",
category_code: "V59",
short_code: "09XA",
full_name: "Driver of pick-up truck or van injured in collision with other motor vehicles in nontraffic accident, initial encounter",
short_name: "Driver of pick-up truck or van injured in collision with other motor vehicles in nontraffic accident, initial encounter",
category_name: "Driver of pick-up truck or van injured in collision with other motor vehicles in nontraffic accident, initial encounter"
}
end
def _V5909XD do
%ICDCode{full_code: "V5909XD",
category_code: "V59",
short_code: "09XD",
full_name: "Driver of pick-up truck or van injured in collision with other motor vehicles in nontraffic accident, subsequent encounter",
short_name: "Driver of pick-up truck or van injured in collision with other motor vehicles in nontraffic accident, subsequent encounter",
category_name: "Driver of pick-up truck or van injured in collision with other motor vehicles in nontraffic accident, subsequent encounter"
}
end
def _V5909XS do
%ICDCode{full_code: "V5909XS",
category_code: "V59",
short_code: "09XS",
full_name: "Driver of pick-up truck or van injured in collision with other motor vehicles in nontraffic accident, sequela",
short_name: "Driver of pick-up truck or van injured in collision with other motor vehicles in nontraffic accident, sequela",
category_name: "Driver of pick-up truck or van injured in collision with other motor vehicles in nontraffic accident, sequela"
}
end
def _V5910XA do
%ICDCode{full_code: "V5910XA",
category_code: "V59",
short_code: "10XA",
full_name: "Passenger in pick-up truck or van injured in collision with unspecified motor vehicles in nontraffic accident, initial encounter",
short_name: "Passenger in pick-up truck or van injured in collision with unspecified motor vehicles in nontraffic accident, initial encounter",
category_name: "Passenger in pick-up truck or van injured in collision with unspecified motor vehicles in nontraffic accident, initial encounter"
}
end
def _V5910XD do
%ICDCode{full_code: "V5910XD",
category_code: "V59",
short_code: "10XD",
full_name: "Passenger in pick-up truck or van injured in collision with unspecified motor vehicles in nontraffic accident, subsequent encounter",
short_name: "Passenger in pick-up truck or van injured in collision with unspecified motor vehicles in nontraffic accident, subsequent encounter",
category_name: "Passenger in pick-up truck or van injured in collision with unspecified motor vehicles in nontraffic accident, subsequent encounter"
}
end
def _V5910XS do
%ICDCode{full_code: "V5910XS",
category_code: "V59",
short_code: "10XS",
full_name: "Passenger in pick-up truck or van injured in collision with unspecified motor vehicles in nontraffic accident, sequela",
short_name: "Passenger in pick-up truck or van injured in collision with unspecified motor vehicles in nontraffic accident, sequela",
category_name: "Passenger in pick-up truck or van injured in collision with unspecified motor vehicles in nontraffic accident, sequela"
}
end
def _V5919XA do
%ICDCode{full_code: "V5919XA",
category_code: "V59",
short_code: "19XA",
full_name: "Passenger in pick-up truck or van injured in collision with other motor vehicles in nontraffic accident, initial encounter",
short_name: "Passenger in pick-up truck or van injured in collision with other motor vehicles in nontraffic accident, initial encounter",
category_name: "Passenger in pick-up truck or van injured in collision with other motor vehicles in nontraffic accident, initial encounter"
}
end
def _V5919XD do
%ICDCode{full_code: "V5919XD",
category_code: "V59",
short_code: "19XD",
full_name: "Passenger in pick-up truck or van injured in collision with other motor vehicles in nontraffic accident, subsequent encounter",
short_name: "Passenger in pick-up truck or van injured in collision with other motor vehicles in nontraffic accident, subsequent encounter",
category_name: "Passenger in pick-up truck or van injured in collision with other motor vehicles in nontraffic accident, subsequent encounter"
}
end
def _V5919XS do
%ICDCode{full_code: "V5919XS",
category_code: "V59",
short_code: "19XS",
full_name: "Passenger in pick-up truck or van injured in collision with other motor vehicles in nontraffic accident, sequela",
short_name: "Passenger in pick-up truck or van injured in collision with other motor vehicles in nontraffic accident, sequela",
category_name: "Passenger in pick-up truck or van injured in collision with other motor vehicles in nontraffic accident, sequela"
}
end
def _V5920XA do
%ICDCode{full_code: "V5920XA",
category_code: "V59",
short_code: "20XA",
full_name: "Unspecified occupant of pick-up truck or van injured in collision with unspecified motor vehicles in nontraffic accident, initial encounter",
short_name: "Unspecified occupant of pick-up truck or van injured in collision with unspecified motor vehicles in nontraffic accident, initial encounter",
category_name: "Unspecified occupant of pick-up truck or van injured in collision with unspecified motor vehicles in nontraffic accident, initial encounter"
}
end
def _V5920XD do
%ICDCode{full_code: "V5920XD",
category_code: "V59",
short_code: "20XD",
full_name: "Unspecified occupant of pick-up truck or van injured in collision with unspecified motor vehicles in nontraffic accident, subsequent encounter",
short_name: "Unspecified occupant of pick-up truck or van injured in collision with unspecified motor vehicles in nontraffic accident, subsequent encounter",
category_name: "Unspecified occupant of pick-up truck or van injured in collision with unspecified motor vehicles in nontraffic accident, subsequent encounter"
}
end
def _V5920XS do
%ICDCode{full_code: "V5920XS",
category_code: "V59",
short_code: "20XS",
full_name: "Unspecified occupant of pick-up truck or van injured in collision with unspecified motor vehicles in nontraffic accident, sequela",
short_name: "Unspecified occupant of pick-up truck or van injured in collision with unspecified motor vehicles in nontraffic accident, sequela",
category_name: "Unspecified occupant of pick-up truck or van injured in collision with unspecified motor vehicles in nontraffic accident, sequela"
}
end
def _V5929XA do
%ICDCode{full_code: "V5929XA",
category_code: "V59",
short_code: "29XA",
full_name: "Unspecified occupant of pick-up truck or van injured in collision with other motor vehicles in nontraffic accident, initial encounter",
short_name: "Unspecified occupant of pick-up truck or van injured in collision with other motor vehicles in nontraffic accident, initial encounter",
category_name: "Unspecified occupant of pick-up truck or van injured in collision with other motor vehicles in nontraffic accident, initial encounter"
}
end
def _V5929XD do
%ICDCode{full_code: "V5929XD",
category_code: "V59",
short_code: "29XD",
full_name: "Unspecified occupant of pick-up truck or van injured in collision with other motor vehicles in nontraffic accident, subsequent encounter",
short_name: "Unspecified occupant of pick-up truck or van injured in collision with other motor vehicles in nontraffic accident, subsequent encounter",
category_name: "Unspecified occupant of pick-up truck or van injured in collision with other motor vehicles in nontraffic accident, subsequent encounter"
}
end
def _V5929XS do
%ICDCode{full_code: "V5929XS",
category_code: "V59",
short_code: "29XS",
full_name: "Unspecified occupant of pick-up truck or van injured in collision with other motor vehicles in nontraffic accident, sequela",
short_name: "Unspecified occupant of pick-up truck or van injured in collision with other motor vehicles in nontraffic accident, sequela",
category_name: "Unspecified occupant of pick-up truck or van injured in collision with other motor vehicles in nontraffic accident, sequela"
}
end
def _V593XXA do
%ICDCode{full_code: "V593XXA",
category_code: "V59",
short_code: "3XXA",
full_name: "Occupant (driver) (passenger) of pick-up truck or van injured in unspecified nontraffic accident, initial encounter",
short_name: "Occupant (driver) (passenger) of pick-up truck or van injured in unspecified nontraffic accident, initial encounter",
category_name: "Occupant (driver) (passenger) of pick-up truck or van injured in unspecified nontraffic accident, initial encounter"
}
end
def _V593XXD do
%ICDCode{full_code: "V593XXD",
category_code: "V59",
short_code: "3XXD",
full_name: "Occupant (driver) (passenger) of pick-up truck or van injured in unspecified nontraffic accident, subsequent encounter",
short_name: "Occupant (driver) (passenger) of pick-up truck or van injured in unspecified nontraffic accident, subsequent encounter",
category_name: "Occupant (driver) (passenger) of pick-up truck or van injured in unspecified nontraffic accident, subsequent encounter"
}
end
def _V593XXS do
%ICDCode{full_code: "V593XXS",
category_code: "V59",
short_code: "3XXS",
full_name: "Occupant (driver) (passenger) of pick-up truck or van injured in unspecified nontraffic accident, sequela",
short_name: "Occupant (driver) (passenger) of pick-up truck or van injured in unspecified nontraffic accident, sequela",
category_name: "Occupant (driver) (passenger) of pick-up truck or van injured in unspecified nontraffic accident, sequela"
}
end
def _V5940XA do
%ICDCode{full_code: "V5940XA",
category_code: "V59",
short_code: "40XA",
full_name: "Driver of pick-up truck or van injured in collision with unspecified motor vehicles in traffic accident, initial encounter",
short_name: "Driver of pick-up truck or van injured in collision with unspecified motor vehicles in traffic accident, initial encounter",
category_name: "Driver of pick-up truck or van injured in collision with unspecified motor vehicles in traffic accident, initial encounter"
}
end
def _V5940XD do
%ICDCode{full_code: "V5940XD",
category_code: "V59",
short_code: "40XD",
full_name: "Driver of pick-up truck or van injured in collision with unspecified motor vehicles in traffic accident, subsequent encounter",
short_name: "Driver of pick-up truck or van injured in collision with unspecified motor vehicles in traffic accident, subsequent encounter",
category_name: "Driver of pick-up truck or van injured in collision with unspecified motor vehicles in traffic accident, subsequent encounter"
}
end
def _V5940XS do
%ICDCode{full_code: "V5940XS",
category_code: "V59",
short_code: "40XS",
full_name: "Driver of pick-up truck or van injured in collision with unspecified motor vehicles in traffic accident, sequela",
short_name: "Driver of pick-up truck or van injured in collision with unspecified motor vehicles in traffic accident, sequela",
category_name: "Driver of pick-up truck or van injured in collision with unspecified motor vehicles in traffic accident, sequela"
}
end
def _V5949XA do
%ICDCode{full_code: "V5949XA",
category_code: "V59",
short_code: "49XA",
full_name: "Driver of pick-up truck or van injured in collision with other motor vehicles in traffic accident, initial encounter",
short_name: "Driver of pick-up truck or van injured in collision with other motor vehicles in traffic accident, initial encounter",
category_name: "Driver of pick-up truck or van injured in collision with other motor vehicles in traffic accident, initial encounter"
}
end
def _V5949XD do
%ICDCode{full_code: "V5949XD",
category_code: "V59",
short_code: "49XD",
full_name: "Driver of pick-up truck or van injured in collision with other motor vehicles in traffic accident, subsequent encounter",
short_name: "Driver of pick-up truck or van injured in collision with other motor vehicles in traffic accident, subsequent encounter",
category_name: "Driver of pick-up truck or van injured in collision with other motor vehicles in traffic accident, subsequent encounter"
}
end
def _V5949XS do
%ICDCode{full_code: "V5949XS",
category_code: "V59",
short_code: "49XS",
full_name: "Driver of pick-up truck or van injured in collision with other motor vehicles in traffic accident, sequela",
short_name: "Driver of pick-up truck or van injured in collision with other motor vehicles in traffic accident, sequela",
category_name: "Driver of pick-up truck or van injured in collision with other motor vehicles in traffic accident, sequela"
}
end
def _V5950XA do
%ICDCode{full_code: "V5950XA",
category_code: "V59",
short_code: "50XA",
full_name: "Passenger in pick-up truck or van injured in collision with unspecified motor vehicles in traffic accident, initial encounter",
short_name: "Passenger in pick-up truck or van injured in collision with unspecified motor vehicles in traffic accident, initial encounter",
category_name: "Passenger in pick-up truck or van injured in collision with unspecified motor vehicles in traffic accident, initial encounter"
}
end
def _V5950XD do
%ICDCode{full_code: "V5950XD",
category_code: "V59",
short_code: "50XD",
full_name: "Passenger in pick-up truck or van injured in collision with unspecified motor vehicles in traffic accident, subsequent encounter",
short_name: "Passenger in pick-up truck or van injured in collision with unspecified motor vehicles in traffic accident, subsequent encounter",
category_name: "Passenger in pick-up truck or van injured in collision with unspecified motor vehicles in traffic accident, subsequent encounter"
}
end
def _V5950XS do
%ICDCode{full_code: "V5950XS",
category_code: "V59",
short_code: "50XS",
full_name: "Passenger in pick-up truck or van injured in collision with unspecified motor vehicles in traffic accident, sequela",
short_name: "Passenger in pick-up truck or van injured in collision with unspecified motor vehicles in traffic accident, sequela",
category_name: "Passenger in pick-up truck or van injured in collision with unspecified motor vehicles in traffic accident, sequela"
}
end
def _V5959XA do
%ICDCode{full_code: "V5959XA",
category_code: "V59",
short_code: "59XA",
full_name: "Passenger in pick-up truck or van injured in collision with other motor vehicles in traffic accident, initial encounter",
short_name: "Passenger in pick-up truck or van injured in collision with other motor vehicles in traffic accident, initial encounter",
category_name: "Passenger in pick-up truck or van injured in collision with other motor vehicles in traffic accident, initial encounter"
}
end
def _V5959XD do
%ICDCode{full_code: "V5959XD",
category_code: "V59",
short_code: "59XD",
full_name: "Passenger in pick-up truck or van injured in collision with other motor vehicles in traffic accident, subsequent encounter",
short_name: "Passenger in pick-up truck or van injured in collision with other motor vehicles in traffic accident, subsequent encounter",
category_name: "Passenger in pick-up truck or van injured in collision with other motor vehicles in traffic accident, subsequent encounter"
}
end
def _V5959XS do
%ICDCode{full_code: "V5959XS",
category_code: "V59",
short_code: "59XS",
full_name: "Passenger in pick-up truck or van injured in collision with other motor vehicles in traffic accident, sequela",
short_name: "Passenger in pick-up truck or van injured in collision with other motor vehicles in traffic accident, sequela",
category_name: "Passenger in pick-up truck or van injured in collision with other motor vehicles in traffic accident, sequela"
}
end
def _V5960XA do
%ICDCode{full_code: "V5960XA",
category_code: "V59",
short_code: "60XA",
full_name: "Unspecified occupant of pick-up truck or van injured in collision with unspecified motor vehicles in traffic accident, initial encounter",
short_name: "Unspecified occupant of pick-up truck or van injured in collision with unspecified motor vehicles in traffic accident, initial encounter",
category_name: "Unspecified occupant of pick-up truck or van injured in collision with unspecified motor vehicles in traffic accident, initial encounter"
}
end
def _V5960XD do
%ICDCode{full_code: "V5960XD",
category_code: "V59",
short_code: "60XD",
full_name: "Unspecified occupant of pick-up truck or van injured in collision with unspecified motor vehicles in traffic accident, subsequent encounter",
short_name: "Unspecified occupant of pick-up truck or van injured in collision with unspecified motor vehicles in traffic accident, subsequent encounter",
category_name: "Unspecified occupant of pick-up truck or van injured in collision with unspecified motor vehicles in traffic accident, subsequent encounter"
}
end
def _V5960XS do
%ICDCode{full_code: "V5960XS",
category_code: "V59",
short_code: "60XS",
full_name: "Unspecified occupant of pick-up truck or van injured in collision with unspecified motor vehicles in traffic accident, sequela",
short_name: "Unspecified occupant of pick-up truck or van injured in collision with unspecified motor vehicles in traffic accident, sequela",
category_name: "Unspecified occupant of pick-up truck or van injured in collision with unspecified motor vehicles in traffic accident, sequela"
}
end
def _V5969XA do
%ICDCode{full_code: "V5969XA",
category_code: "V59",
short_code: "69XA",
full_name: "Unspecified occupant of pick-up truck or van injured in collision with other motor vehicles in traffic accident, initial encounter",
short_name: "Unspecified occupant of pick-up truck or van injured in collision with other motor vehicles in traffic accident, initial encounter",
category_name: "Unspecified occupant of pick-up truck or van injured in collision with other motor vehicles in traffic accident, initial encounter"
}
end
def _V5969XD do
%ICDCode{full_code: "V5969XD",
category_code: "V59",
short_code: "69XD",
full_name: "Unspecified occupant of pick-up truck or van injured in collision with other motor vehicles in traffic accident, subsequent encounter",
short_name: "Unspecified occupant of pick-up truck or van injured in collision with other motor vehicles in traffic accident, subsequent encounter",
category_name: "Unspecified occupant of pick-up truck or van injured in collision with other motor vehicles in traffic accident, subsequent encounter"
}
end
def _V5969XS do
%ICDCode{full_code: "V5969XS",
category_code: "V59",
short_code: "69XS",
full_name: "Unspecified occupant of pick-up truck or van injured in collision with other motor vehicles in traffic accident, sequela",
short_name: "Unspecified occupant of pick-up truck or van injured in collision with other motor vehicles in traffic accident, sequela",
category_name: "Unspecified occupant of pick-up truck or van injured in collision with other motor vehicles in traffic accident, sequela"
}
end
def _V5981XA do
%ICDCode{full_code: "V5981XA",
category_code: "V59",
short_code: "81XA",
full_name: "Occupant (driver) (passenger) of pick-up truck or van injured in transport accident with military vehicle, initial encounter",
short_name: "Occupant (driver) (passenger) of pick-up truck or van injured in transport accident with military vehicle, initial encounter",
category_name: "Occupant (driver) (passenger) of pick-up truck or van injured in transport accident with military vehicle, initial encounter"
}
end
def _V5981XD do
%ICDCode{full_code: "V5981XD",
category_code: "V59",
short_code: "81XD",
full_name: "Occupant (driver) (passenger) of pick-up truck or van injured in transport accident with military vehicle, subsequent encounter",
short_name: "Occupant (driver) (passenger) of pick-up truck or van injured in transport accident with military vehicle, subsequent encounter",
category_name: "Occupant (driver) (passenger) of pick-up truck or van injured in transport accident with military vehicle, subsequent encounter"
}
end
def _V5981XS do
%ICDCode{full_code: "V5981XS",
category_code: "V59",
short_code: "81XS",
full_name: "Occupant (driver) (passenger) of pick-up truck or van injured in transport accident with military vehicle, sequela",
short_name: "Occupant (driver) (passenger) of pick-up truck or van injured in transport accident with military vehicle, sequela",
category_name: "Occupant (driver) (passenger) of pick-up truck or van injured in transport accident with military vehicle, sequela"
}
end
def _V5988XA do
%ICDCode{full_code: "V5988XA",
category_code: "V59",
short_code: "88XA",
full_name: "Occupant (driver) (passenger) of pick-up truck or van injured in other specified transport accidents, initial encounter",
short_name: "Occupant (driver) (passenger) of pick-up truck or van injured in other specified transport accidents, initial encounter",
category_name: "Occupant (driver) (passenger) of pick-up truck or van injured in other specified transport accidents, initial encounter"
}
end
def _V5988XD do
%ICDCode{full_code: "V5988XD",
category_code: "V59",
short_code: "88XD",
full_name: "Occupant (driver) (passenger) of pick-up truck or van injured in other specified transport accidents, subsequent encounter",
short_name: "Occupant (driver) (passenger) of pick-up truck or van injured in other specified transport accidents, subsequent encounter",
category_name: "Occupant (driver) (passenger) of pick-up truck or van injured in other specified transport accidents, subsequent encounter"
}
end
def _V5988XS do
%ICDCode{full_code: "V5988XS",
category_code: "V59",
short_code: "88XS",
full_name: "Occupant (driver) (passenger) of pick-up truck or van injured in other specified transport accidents, sequela",
short_name: "Occupant (driver) (passenger) of pick-up truck or van injured in other specified transport accidents, sequela",
category_name: "Occupant (driver) (passenger) of pick-up truck or van injured in other specified transport accidents, sequela"
}
end
def _V599XXA do
%ICDCode{full_code: "V599XXA",
category_code: "V59",
short_code: "9XXA",
full_name: "Occupant (driver) (passenger) of pick-up truck or van injured in unspecified traffic accident, initial encounter",
short_name: "Occupant (driver) (passenger) of pick-up truck or van injured in unspecified traffic accident, initial encounter",
category_name: "Occupant (driver) (passenger) of pick-up truck or van injured in unspecified traffic accident, initial encounter"
}
end
def _V599XXD do
%ICDCode{full_code: "V599XXD",
category_code: "V59",
short_code: "9XXD",
full_name: "Occupant (driver) (passenger) of pick-up truck or van injured in unspecified traffic accident, subsequent encounter",
short_name: "Occupant (driver) (passenger) of pick-up truck or van injured in unspecified traffic accident, subsequent encounter",
category_name: "Occupant (driver) (passenger) of pick-up truck or van injured in unspecified traffic accident, subsequent encounter"
}
end
def _V599XXS do
%ICDCode{full_code: "V599XXS",
category_code: "V59",
short_code: "9XXS",
full_name: "Occupant (driver) (passenger) of pick-up truck or van injured in unspecified traffic accident, sequela",
short_name: "Occupant (driver) (passenger) of pick-up truck or van injured in unspecified traffic accident, sequela",
category_name: "Occupant (driver) (passenger) of pick-up truck or van injured in unspecified traffic accident, sequela"
}
end
end
| 62.917995 | 169 | 0.710945 |
ffdc25dfb31ccc26fe1dafaade561731ecf7dc68 | 758 | exs | Elixir | config/test.exs | media-io/ex_step_flow | 07b6fe98399836679728547319c55137a1b5c933 | [
"MIT"
] | 4 | 2019-12-07T05:18:26.000Z | 2020-11-06T23:28:43.000Z | config/test.exs | media-io/ex_step_flow | 07b6fe98399836679728547319c55137a1b5c933 | [
"MIT"
] | 53 | 2020-01-06T11:23:09.000Z | 2021-06-25T15:30:07.000Z | config/test.exs | media-io/ex_step_flow | 07b6fe98399836679728547319c55137a1b5c933 | [
"MIT"
] | 3 | 2020-01-30T15:37:40.000Z | 2020-10-27T14:10:02.000Z | use Mix.Config
config :logger, :console, format: "[$level] $message\n"
config :logger, level: :error
config :plug, :validate_header_keys_during_test, true
config :step_flow, StepFlow,
workers_work_directory: "/test_work_dir",
workflow_definition: "./test/definitions",
enable_metrics: true
config :step_flow, StepFlow.Metrics,
scale: "day",
delta: -1
config :step_flow, StepFlow.Repo,
hostname: "postgres",
port: 5432,
username: "postgres",
password: "postgres",
database: "step_flow_test",
pool_size: 10,
pool: Ecto.Adapters.SQL.Sandbox
config :step_flow, StepFlow.Amqp,
hostname: "rabbitmq",
port: 5672,
username: "guest",
password: "guest",
virtual_host: ""
config :step_flow, StepFlow.Workflows, time_interval: 1
| 22.294118 | 55 | 0.724274 |
ffdc449434128c1fd2dc3ae64226f83145db1b3f | 170 | ex | Elixir | web/views/tp_destination_view.ex | zombalo/cgrates_web_jsonapi | 47845be4311839fe180cc9f2c7c6795649da4430 | [
"MIT"
] | null | null | null | web/views/tp_destination_view.ex | zombalo/cgrates_web_jsonapi | 47845be4311839fe180cc9f2c7c6795649da4430 | [
"MIT"
] | null | null | null | web/views/tp_destination_view.ex | zombalo/cgrates_web_jsonapi | 47845be4311839fe180cc9f2c7c6795649da4430 | [
"MIT"
] | null | null | null | defmodule CgratesWebJsonapi.TpDestinationView do
use CgratesWebJsonapi.Web, :view
use JaSerializer.PhoenixView
attributes [:tpid, :tag, :prefix, :created_at]
end
| 21.25 | 48 | 0.788235 |
ffdc460a8f7d08287b3b0af729a6a0f929543563 | 992 | ex | Elixir | lib/hello/application.ex | michaelroudnitski/hello-phoenix | 3fee75784948b37732a0feba1a31e22b83281e7f | [
"Apache-2.0"
] | 3 | 2020-12-21T07:31:53.000Z | 2021-07-07T05:11:32.000Z | lib/hello/application.ex | michaelroudnitski/hello-phoenix | 3fee75784948b37732a0feba1a31e22b83281e7f | [
"Apache-2.0"
] | 1 | 2021-12-31T20:45:35.000Z | 2021-12-31T20:45:35.000Z | lib/hello/application.ex | michaelroudnitski/hello-phoenix | 3fee75784948b37732a0feba1a31e22b83281e7f | [
"Apache-2.0"
] | 1 | 2020-11-17T07:06:17.000Z | 2020-11-17T07:06:17.000Z | defmodule Hello.Application do
# See https://hexdocs.pm/elixir/Application.html
# for more information on OTP Applications
@moduledoc false
use Application
def start(_type, _args) do
children = [
# Start the Ecto repository
Hello.Repo,
# Start the Telemetry supervisor
HelloWeb.Telemetry,
# Start the PubSub system
{Phoenix.PubSub, name: Hello.PubSub},
# Start the Endpoint (http/https)
HelloWeb.Endpoint
# Start a worker by calling: Hello.Worker.start_link(arg)
# {Hello.Worker, arg}
]
# See https://hexdocs.pm/elixir/Supervisor.html
# for other strategies and supported options
opts = [strategy: :one_for_one, name: Hello.Supervisor]
Supervisor.start_link(children, opts)
end
# Tell Phoenix to update the endpoint configuration
# whenever the application is updated.
def config_change(changed, _new, removed) do
HelloWeb.Endpoint.config_change(changed, removed)
:ok
end
end
| 28.342857 | 63 | 0.696573 |
ffdc849e3298463cc39937c471a8aef3f355954a | 2,890 | ex | Elixir | lib/hexpm/web/controllers/api/key_controller.ex | lau/hexpm | beee80f5358a356530debfea35ee65c3a0aa9b25 | [
"Apache-2.0"
] | null | null | null | lib/hexpm/web/controllers/api/key_controller.ex | lau/hexpm | beee80f5358a356530debfea35ee65c3a0aa9b25 | [
"Apache-2.0"
] | null | null | null | lib/hexpm/web/controllers/api/key_controller.ex | lau/hexpm | beee80f5358a356530debfea35ee65c3a0aa9b25 | [
"Apache-2.0"
] | null | null | null | defmodule Hexpm.Web.API.KeyController do
use Hexpm.Web, :controller
plug :fetch_organization
plug :authorize,
[
domain: "api",
resource: "write",
allow_unconfirmed: true,
fun: &maybe_organization_access_write/2
]
when action == :create
plug :authorize,
[domain: "api", resource: "write", fun: &maybe_organization_access_write/2]
when action in [:delete, :delete_all]
plug :authorize,
[domain: "api", resource: "read", fun: &maybe_organization_access/2]
when action in [:index, :show]
plug :require_organization_path
def index(conn, _params) do
user_or_organization = conn.assigns.organization || conn.assigns.current_user
authing_key = conn.assigns.key
keys = Keys.all(user_or_organization)
conn
|> api_cache(:private)
|> render(:index, keys: keys, authing_key: authing_key)
end
def show(conn, %{"name" => name}) do
user_or_organization = conn.assigns.organization || conn.assigns.current_user
authing_key = conn.assigns.key
key = Keys.get(user_or_organization, name)
if key do
when_stale(conn, key, fn conn ->
conn
|> api_cache(:private)
|> render(:show, key: key, authing_key: authing_key)
end)
else
not_found(conn)
end
end
def create(conn, params) do
user_or_organization = conn.assigns.organization || conn.assigns.current_user
authing_key = conn.assigns.key
case Keys.create(user_or_organization, params, audit: audit_data(conn)) do
{:ok, %{key: key}} ->
location = Routes.api_key_url(conn, :show, params["name"])
conn
|> put_resp_header("location", location)
|> api_cache(:private)
|> put_status(201)
|> render(:show, key: key, authing_key: authing_key)
{:error, :key, changeset, _} ->
validation_failed(conn, changeset)
end
end
def delete(conn, %{"name" => name}) do
user_or_organization = conn.assigns.organization || conn.assigns.current_user
authing_key = conn.assigns.key
case Keys.revoke(user_or_organization, name, audit: audit_data(conn)) do
{:ok, %{key: key}} ->
conn
|> api_cache(:private)
|> put_status(200)
|> render(:delete, key: key, authing_key: authing_key)
_ ->
not_found(conn)
end
end
def delete_all(conn, _params) do
user_or_organization = conn.assigns.organization || conn.assigns.current_user
key = conn.assigns.key
{:ok, _} = Keys.revoke_all(user_or_organization, audit: audit_data(conn))
conn
|> put_status(200)
|> render(:delete, key: Keys.get(key.id), authing_key: key)
end
defp require_organization_path(conn, _opts) do
if conn.assigns.current_organization && !conn.assigns.organization do
not_found(conn)
else
conn
end
end
end
| 27.788462 | 82 | 0.647405 |
ffdc97793f076987aec396d38c6c836cd740a64d | 1,858 | ex | Elixir | lib/cgrates_web_jsonapi/tariff_plans/tp_attribute.ex | max-konin/cgrates_web_jsonapi | e82690e343d790b0f77dea6699483fcb6fd8a162 | [
"MIT"
] | 2 | 2018-10-03T07:41:32.000Z | 2021-03-21T11:27:27.000Z | lib/cgrates_web_jsonapi/tariff_plans/tp_attribute.ex | max-konin/cgrates_web_jsonapi | e82690e343d790b0f77dea6699483fcb6fd8a162 | [
"MIT"
] | 1 | 2018-10-31T04:55:59.000Z | 2018-10-31T04:55:59.000Z | lib/cgrates_web_jsonapi/tariff_plans/tp_attribute.ex | max-konin/cgrates_web_jsonapi | e82690e343d790b0f77dea6699483fcb6fd8a162 | [
"MIT"
] | 5 | 2018-09-27T11:30:44.000Z | 2021-01-16T08:28:58.000Z | defmodule CgratesWebJsonapi.TariffPlans.TpAttribute do
use Ecto.Schema
import Ecto.Changeset
use EctoConditionals, repo: CgratesWebJsonapi.Repo
@attributes ~w[tpid tenant custom_id contexts filter_ids activation_interval blocker weight
attribute_filter_ids path value cg_type]a
use CgratesWebJsonapi.CsvImport, module: __MODULE__, attributes: @attributes
@primary_key {:pk, :id, autogenerate: true}
@derive {Phoenix.Param, key: :pk}
schema "tp_attributes" do
field :id, :integer, source: :pk
field :tpid, :string
field :tenant, :string
field :custom_id, :string, source: :id
field :contexts, :string, default: ""
field :filter_ids, :string, default: ""
field :activation_interval, :string, default: ""
field :blocker, :boolean
field :weight, :decimal, default: 0
field :attribute_filter_ids, :string, default: ""
field :path, :string
field :cg_type, :string, source: :type
field :value, :string
timestamps(
inserted_at: :created_at,
updated_at: false,
inserted_at_source: :created_at,
updated_at_source: false,
type: :utc_datetime
)
end
@doc """
Builds a changeset based on the `struct` and `params`.
"""
def changeset(struct, params \\ %{}) do
struct
|> cast(params, @attributes)
|> validate_required([:tpid, :tenant, :custom_id, :path, :value, :cg_type])
|> validate_length(:tpid, max: 64)
|> validate_length(:tenant, max: 64)
|> validate_length(:custom_id, max: 64)
|> validate_length(:contexts, max: 64)
|> validate_length(:filter_ids, max: 64)
|> validate_length(:activation_interval, max: 64)
|> validate_length(:attribute_filter_ids, max: 64)
|> validate_length(:path, max: 64)
|> validate_length(:cg_type, max: 64)
|> validate_length(:value, max: 64)
end
end
| 33.781818 | 93 | 0.67761 |
ffdce312f7f9247d86710efd1f5082330b214144 | 15,001 | ex | Elixir | lib/elixir/lib/io.ex | xtian/elixir | c680eb1a3992309c272e8f808e15990ea5318d6e | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/io.ex | xtian/elixir | c680eb1a3992309c272e8f808e15990ea5318d6e | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/io.ex | xtian/elixir | c680eb1a3992309c272e8f808e15990ea5318d6e | [
"Apache-2.0"
] | null | null | null | defmodule IO do
@moduledoc """
Functions handling input/output (IO).
Many functions in this module expect an IO device as an argument.
An IO device must be a PID or an atom representing a process.
For convenience, Elixir provides `:stdio` and `:stderr` as
shortcuts to Erlang's `:standard_io` and `:standard_error`.
The majority of the functions expect chardata, i.e. strings or
lists of characters and strings. In case another type is given,
functions will convert to string via the `String.Chars` protocol
(as shown in typespecs).
The functions starting with `bin` expect iodata as an argument,
i.e. binaries or lists of bytes and binaries.
## IO devices
An IO device may be an atom or a PID. In case it is an atom,
the atom must be the name of a registered process. In addition,
Elixir provides two shortcuts:
* `:stdio` - a shortcut for `:standard_io`, which maps to
the current `Process.group_leader/0` in Erlang
* `:stderr` - a shortcut for the named process `:standard_error`
provided in Erlang
IO devices maintain their position, that means subsequent calls to any
reading or writing functions will start from the place when the device
was last accessed. Position of files can be changed using the
`:file.position/2` function.
"""
@type device :: atom | pid
@type nodata :: {:error, term} | :eof
@type chardata() :: :unicode.chardata()
defmacrop is_iodata(data) do
quote do
is_list(unquote(data)) or is_binary(unquote(data))
end
end
@doc """
Reads from the IO `device`.
The `device` is iterated by the given number of characters or line by line if
`:line` is given.
Alternatively, if `:all` is given, then whole `device` is returned.
It returns:
* `data` - the output characters
* `:eof` - end of file was encountered
* `{:error, reason}` - other (rare) error condition;
for instance, `{:error, :estale}` if reading from an
NFS volume
If `:all` is given, `:eof` is never returned, but an
empty string in case the device has reached EOF.
"""
@spec read(device, :all | :line | non_neg_integer) :: chardata | nodata
def read(device \\ :stdio, line_or_chars)
def read(device, :all) do
do_read_all(map_dev(device), "")
end
def read(device, :line) do
:io.get_line(map_dev(device), '')
end
def read(device, count) when is_integer(count) and count >= 0 do
:io.get_chars(map_dev(device), '', count)
end
defp do_read_all(mapped_dev, acc) do
case :io.get_line(mapped_dev, "") do
line when is_binary(line) -> do_read_all(mapped_dev, acc <> line)
:eof -> acc
other -> other
end
end
@doc """
Reads from the IO `device`. The operation is Unicode unsafe.
The `device` is iterated by the given number of bytes or line by line if
`:line` is given.
Alternatively, if `:all` is given, then whole `device` is returned.
It returns:
* `data` - the output bytes
* `:eof` - end of file was encountered
* `{:error, reason}` - other (rare) error condition;
for instance, `{:error, :estale}` if reading from an
NFS volume
If `:all` is given, `:eof` is never returned, but an
empty string in case the device has reached EOF.
Note: do not use this function on IO devices in Unicode mode
as it will return the wrong result.
"""
@spec binread(device, :all | :line | non_neg_integer) :: iodata | nodata
def binread(device \\ :stdio, line_or_chars)
def binread(device, :all) do
do_binread_all(map_dev(device), "")
end
def binread(device, :line) do
case :file.read_line(map_dev(device)) do
{:ok, data} -> data
other -> other
end
end
def binread(device, count) when is_integer(count) and count >= 0 do
case :file.read(map_dev(device), count) do
{:ok, data} -> data
other -> other
end
end
@read_all_size 4096
defp do_binread_all(mapped_dev, acc) do
case :file.read(mapped_dev, @read_all_size) do
{:ok, data} -> do_binread_all(mapped_dev, acc <> data)
:eof -> acc
other -> other
end
end
@doc """
Writes `item` to the given `device`.
By default the `device` is the standard output.
It returns `:ok` if it succeeds.
## Examples
IO.write "sample"
#=> sample
IO.write :stderr, "error"
#=> error
"""
@spec write(device, chardata | String.Chars.t) :: :ok
def write(device \\ :stdio, item) do
:io.put_chars map_dev(device), to_chardata(item)
end
@doc """
Writes `item` as a binary to the given `device`.
No Unicode conversion happens.
The operation is Unicode unsafe.
Check `write/2` for more information.
Note: do not use this function on IO devices in Unicode mode
as it will return the wrong result.
"""
@spec binwrite(device, iodata) :: :ok | {:error, term}
def binwrite(device \\ :stdio, item) when is_iodata(item) do
:file.write map_dev(device), item
end
@doc """
Writes `item` to the given `device`, similar to `write/2`,
but adds a newline at the end.
"""
@spec puts(device, chardata | String.Chars.t) :: :ok
def puts(device \\ :stdio, item) do
:io.put_chars map_dev(device), [to_chardata(item), ?\n]
end
@doc """
Writes a `message` to stderr, along with the given `stacktrace`.
This function also notifies the compiler a warning was printed
(in case --warnings-as-errors was enabled). It returns `:ok`
if it succeeds.
An empty list can be passed to avoid stacktrace printing.
## Examples
stacktrace = [{MyApp, :main, 1, [file: 'my_app.ex', line: 4]}]
IO.warn "variable bar is unused", stacktrace
#=> warning: variable bar is unused
#=> my_app.ex:4: MyApp.main/1
"""
@spec warn(chardata | String.Chars.t, Exception.stacktrace) :: :ok
def warn(message, []) do
:elixir_errors.warn([to_chardata(message), ?\n])
end
def warn(message, stacktrace) when is_list(stacktrace) do
formatted = Enum.map_join(stacktrace, "\n ", &Exception.format_stacktrace_entry(&1))
:elixir_errors.warn([to_chardata(message), ?\n, " ", formatted, ?\n])
end
@doc """
Writes a `message` to stderr, along with the current stacktrace.
It returns `:ok` if it succeeds.
## Examples
IO.warn "variable bar is unused"
#=> warning: variable bar is unused
#=> (iex) evaluator.ex:108: IEx.Evaluator.eval/4
"""
@spec warn(chardata | String.Chars.t) :: :ok
def warn(message) do
{:current_stacktrace, stacktrace} = Process.info(self(), :current_stacktrace)
warn(message, Enum.drop(stacktrace, 2))
end
@doc """
Inspects and writes the given `item` to the device.
It's important to note that it returns the given `item` unchanged.
This makes it possible to "spy" on values by inserting an
`IO.inspect/2` call almost anywhere in your code, for example,
in the middle of a pipeline.
It enables pretty printing by default with width of
80 characters. The width can be changed by explicitly
passing the `:width` option.
The output can be decorated with a label, by providing the `:label`
option to easily distinguish it from other `IO.inspect/2` calls.
The label will be printed before the inspected `item`.
See `Inspect.Opts` for a full list of remaining formatting options.
## Examples
IO.inspect <<0, 1, 2>>, width: 40
Prints:
<<0, 1, 2>>
We can use the `:label` option to decorate the output:
IO.inspect 1..100, label: "a wonderful range"
Prints:
a wonderful range: 1..100
The `:label` option is especially useful with pipelines:
[1, 2, 3]
|> IO.inspect(label: "before")
|> Enum.map(&(&1 * 2))
|> IO.inspect(label: "after")
|> Enum.sum
Prints:
before: [1, 2, 3]
after: [2, 4, 6]
"""
@spec inspect(item, Keyword.t) :: item when item: var
def inspect(item, opts \\ []) do
inspect :stdio, item, opts
end
@doc """
Inspects `item` according to the given options using the IO `device`.
See `inspect/2` for a full list of options.
"""
@spec inspect(device, item, Keyword.t) :: item when item: var
def inspect(device, item, opts) when is_list(opts) do
label = if (label = opts[:label]), do: [to_chardata(label), ": "], else: []
opts = struct(Inspect.Opts, opts)
chardata = Inspect.Algebra.format(Inspect.Algebra.to_doc(item, opts), opts.width)
puts device, [label, chardata]
item
end
@doc """
Gets a number of bytes from IO device `:stdio`.
If `:stdio` is a Unicode device, `count` implies
the number of Unicode codepoints to be retrieved.
Otherwise, `count` is the number of raw bytes to be retrieved.
See `IO.getn/3` for a description of return values.
"""
@spec getn(chardata | String.Chars.t, pos_integer) :: chardata | nodata
@spec getn(device, chardata | String.Chars.t) :: chardata | nodata
def getn(prompt, count \\ 1)
def getn(prompt, count) when is_integer(count) and count > 0 do
getn(:stdio, prompt, count)
end
def getn(device, prompt) when not is_integer(prompt) do
getn(device, prompt, 1)
end
@doc """
Gets a number of bytes from the IO `device`.
If the IO `device` is a Unicode device, `count` implies
the number of Unicode codepoints to be retrieved.
Otherwise, `count` is the number of raw bytes to be retrieved.
It returns:
* `data` - the input characters
* `:eof` - end of file was encountered
* `{:error, reason}` - other (rare) error condition;
for instance, `{:error, :estale}` if reading from an
NFS volume
"""
@spec getn(device, chardata | String.Chars.t, pos_integer) :: chardata | nodata
def getn(device, prompt, count) when is_integer(count) and count > 0 do
:io.get_chars(map_dev(device), to_chardata(prompt), count)
end
@doc ~S"""
Reads a line from the IO `device`.
It returns:
* `data` - the characters in the line terminated
by a line-feed (LF) or end of file (EOF)
* `:eof` - end of file was encountered
* `{:error, reason}` - other (rare) error condition;
for instance, `{:error, :estale}` if reading from an
NFS volume
## Examples
To display "What is your name?" as a prompt and await user input:
IO.gets "What is your name?\n"
"""
@spec gets(device, chardata | String.Chars.t) :: chardata | nodata
def gets(device \\ :stdio, prompt) do
:io.get_line(map_dev(device), to_chardata(prompt))
end
@doc """
Converts the IO `device` into an `IO.Stream`.
An `IO.Stream` implements both `Enumerable` and
`Collectable`, allowing it to be used for both read
and write.
The `device` is iterated by the given number of characters or line by line if
`:line` is given.
This reads from the IO as UTF-8. Check out
`IO.binstream/2` to handle the IO as a raw binary.
Note that an IO stream has side effects and every time
you go over the stream you may get different results.
## Examples
Here is an example on how we mimic an echo server
from the command line:
Enum.each IO.stream(:stdio, :line), &IO.write(&1)
"""
@spec stream(device, :line | pos_integer) :: Enumerable.t
def stream(device, line_or_codepoints)
when line_or_codepoints == :line
when is_integer(line_or_codepoints) and line_or_codepoints > 0 do
IO.Stream.__build__(map_dev(device), false, line_or_codepoints)
end
@doc """
Converts the IO `device` into an `IO.Stream`. The operation is Unicode unsafe.
An `IO.Stream` implements both `Enumerable` and
`Collectable`, allowing it to be used for both read
and write.
The `device` is iterated by the given number of bytes or line by line if
`:line` is given.
This reads from the IO device as a raw binary.
Note that an IO stream has side effects and every time
you go over the stream you may get different results.
Finally, do not use this function on IO devices in Unicode
mode as it will return the wrong result.
"""
@spec binstream(device, :line | pos_integer) :: Enumerable.t
def binstream(device, line_or_bytes)
when line_or_bytes == :line
when is_integer(line_or_bytes) and line_or_bytes > 0 do
IO.Stream.__build__(map_dev(device), true, line_or_bytes)
end
@doc """
Converts chardata (a list of integers representing codepoints,
lists and strings) into a string.
In case the conversion fails, it raises an `UnicodeConversionError`.
If a string is given, it returns the string itself.
## Examples
iex> IO.chardata_to_string([0x00E6, 0x00DF])
"æß"
iex> IO.chardata_to_string([0x0061, "bc"])
"abc"
iex> IO.chardata_to_string("string")
"string"
"""
@spec chardata_to_string(chardata) :: String.t | no_return
def chardata_to_string(string) when is_binary(string) do
string
end
def chardata_to_string(list) when is_list(list) do
List.to_string(list)
end
@doc """
Converts iodata (a list of integers representing bytes, lists
and binaries) into a binary.
The operation is Unicode unsafe.
Notice that this function treats lists of integers as raw bytes
and does not perform any kind of encoding conversion. If you want
to convert from a charlist to a string (UTF-8 encoded), please
use `chardata_to_string/1` instead.
If this function receives a binary, the same binary is returned.
Inlined by the compiler.
## Examples
iex> bin1 = <<1, 2, 3>>
iex> bin2 = <<4, 5>>
iex> bin3 = <<6>>
iex> IO.iodata_to_binary([bin1, 1, [2, 3, bin2], 4 | bin3])
<<1, 2, 3, 1, 2, 3, 4, 5, 4, 6>>
iex> bin = <<1, 2, 3>>
iex> IO.iodata_to_binary(bin)
<<1, 2, 3>>
"""
@spec iodata_to_binary(iodata) :: binary
def iodata_to_binary(item) do
:erlang.iolist_to_binary(item)
end
@doc """
Returns the size of an iodata.
Inlined by the compiler.
## Examples
iex> IO.iodata_length([1, 2 | <<3, 4>>])
4
"""
@spec iodata_length(iodata) :: non_neg_integer
def iodata_length(item) do
:erlang.iolist_size(item)
end
@doc false
def each_stream(device, line_or_codepoints) do
case read(device, line_or_codepoints) do
:eof ->
{:halt, device}
{:error, reason} ->
raise IO.StreamError, reason: reason
data ->
{[data], device}
end
end
@doc false
def each_binstream(device, line_or_chars) do
case binread(device, line_or_chars) do
:eof ->
{:halt, device}
{:error, reason} ->
raise IO.StreamError, reason: reason
data ->
{[data], device}
end
end
@compile {:inline, map_dev: 1, to_chardata: 1}
# Map the Elixir names for standard IO and error to Erlang names
defp map_dev(:stdio), do: :standard_io
defp map_dev(:stderr), do: :standard_error
defp map_dev(other) when is_atom(other) or is_pid(other) or is_tuple(other), do: other
defp to_chardata(list) when is_list(list), do: list
defp to_chardata(other), do: to_string(other)
end
| 28.039252 | 89 | 0.663489 |
ffdce416ee43499983feeefaff014dbecf4c9330 | 2,087 | exs | Elixir | catcasts/config/dev.exs | cashmann/phoenix-tutorial | ea37b9d54a79df9bc1351a948eb8f8400c5e62ff | [
"MIT"
] | null | null | null | catcasts/config/dev.exs | cashmann/phoenix-tutorial | ea37b9d54a79df9bc1351a948eb8f8400c5e62ff | [
"MIT"
] | 3 | 2021-03-09T20:36:45.000Z | 2021-05-10T17:47:02.000Z | catcasts/config/dev.exs | cashmann/phoenix-tutorial | ea37b9d54a79df9bc1351a948eb8f8400c5e62ff | [
"MIT"
] | null | null | null | use Mix.Config
# For development, we disable any cache and enable
# debugging and code reloading.
#
# The watchers configuration can be used to run external
# watchers to your application. For example, we use it
# with webpack to recompile .js and .css sources.
config :catcasts, CatcastsWeb.Endpoint,
http: [port: 4000],
debug_errors: true,
code_reloader: true,
check_origin: false,
watchers: [
node: [
"node_modules/webpack/bin/webpack.js",
"--mode",
"development",
"--watch-stdin",
cd: Path.expand("../assets", __DIR__)
]
]
# ## SSL Support
#
# In order to use HTTPS in development, a self-signed
# certificate can be generated by running the following
# Mix task:
#
# mix phx.gen.cert
#
# Note that this task requires Erlang/OTP 20 or later.
# Run `mix help phx.gen.cert` for more information.
#
# The `http:` config above can be replaced with:
#
# https: [
# port: 4001,
# cipher_suite: :strong,
# keyfile: "priv/cert/selfsigned_key.pem",
# certfile: "priv/cert/selfsigned.pem"
# ],
#
# If desired, both `http:` and `https:` keys can be
# configured to run both http and https servers on
# different ports.
# Watch static and templates for browser reloading.
config :catcasts, CatcastsWeb.Endpoint,
live_reload: [
patterns: [
~r{priv/static/.*(js|css|png|jpeg|jpg|gif|svg)$},
~r{priv/gettext/.*(po)$},
~r{lib/catcasts_web/views/.*(ex)$},
~r{lib/catcasts_web/templates/.*(eex)$}
]
]
# Do not include metadata nor timestamps in development logs
config :logger, :console, format: "[$level] $message\n"
# Set a higher stacktrace during development. Avoid configuring such
# in production as building large stacktraces may be expensive.
config :phoenix, :stacktrace_depth, 20
# Initialize plugs at runtime for faster development compilation
config :phoenix, :plug_init_mode, :runtime
# Configure your database
config :catcasts, Catcasts.Repo,
username: "nathan",
password: "postgres",
database: "catcasts_dev",
hostname: "localhost",
pool_size: 10
| 27.460526 | 68 | 0.68759 |
ffdce8915b48e831898f5c82b33d396c97f72713 | 389 | ex | Elixir | lib/new_relic/telemetry/ecto/supervisor.ex | backdoer/elixir_agent | b42f891ed667fa799d0b82244fb465ce54429127 | [
"Apache-2.0"
] | null | null | null | lib/new_relic/telemetry/ecto/supervisor.ex | backdoer/elixir_agent | b42f891ed667fa799d0b82244fb465ce54429127 | [
"Apache-2.0"
] | null | null | null | lib/new_relic/telemetry/ecto/supervisor.ex | backdoer/elixir_agent | b42f891ed667fa799d0b82244fb465ce54429127 | [
"Apache-2.0"
] | null | null | null | defmodule NewRelic.Telemetry.Ecto.Supervisor do
@moduledoc false
use DynamicSupervisor
def start_link() do
DynamicSupervisor.start_link(__MODULE__, :ok, name: __MODULE__)
end
def start_child(otp_app) do
DynamicSupervisor.start_child(__MODULE__, {NewRelic.Telemetry.Ecto, otp_app})
end
def init(:ok) do
DynamicSupervisor.init(strategy: :one_for_one)
end
end
| 21.611111 | 81 | 0.760925 |
ffdcf7b4f6beebf5042f8915939427f620bea9ce | 1,625 | ex | Elixir | haha/elixir/lib/haha_web/endpoint.ex | oasis-open/openc2-lycan-beam | 0be49bb26f4489260f3d8c9c64545c0008c52e1c | [
"MIT"
] | 4 | 2018-04-27T19:40:56.000Z | 2019-07-02T14:12:30.000Z | haha/elixir/lib/haha_web/endpoint.ex | oasis-open/openc2-lycan-beam | 0be49bb26f4489260f3d8c9c64545c0008c52e1c | [
"MIT"
] | 8 | 2018-10-23T20:12:51.000Z | 2021-05-11T01:27:53.000Z | haha/elixir/lib/haha_web/endpoint.ex | oasis-open/openc2-lycan-beam | 0be49bb26f4489260f3d8c9c64545c0008c52e1c | [
"MIT"
] | 3 | 2018-03-23T20:37:40.000Z | 2018-06-13T01:12:31.000Z | defmodule HaHaWeb.Endpoint do
use Phoenix.Endpoint, otp_app: :haha
socket "/socket", HaHaWeb.UserSocket
# Serve at "/" the static files from "priv/static" directory.
#
# You should set gzip to true if you are running phoenix.digest
# when deploying your static files in production.
plug Plug.Static,
at: "/", from: :haha, gzip: false,
only: ~w(css fonts images js favicon.ico robots.txt)
# Code reloading can be explicitly enabled under the
# :code_reloader configuration of your endpoint.
if code_reloading? do
socket "/phoenix/live_reload/socket", Phoenix.LiveReloader.Socket
plug Phoenix.LiveReloader
plug Phoenix.CodeReloader
end
plug Plug.Logger
plug Plug.Parsers,
parsers: [:urlencoded, :multipart, :json],
pass: ["*/*"],
json_decoder: Jason
plug Plug.MethodOverride
plug Plug.Head
# The session will be stored in the cookie and signed,
# this means its contents can be read but not tampered with.
# Set :encryption_salt if you would also like to encrypt it.
plug Plug.Session,
store: :cookie,
key: "_haha_key",
signing_salt: "Hg/Pkzja"
plug HaHaWeb.Router
@doc """
Callback invoked for dynamically configuring the endpoint.
It receives the endpoint configuration and checks if
configuration should be loaded from the system environment.
"""
def init(_key, config) do
if config[:load_from_system_env] do
port = System.get_env("PORT") || raise "expected the PORT environment variable to be set"
{:ok, Keyword.put(config, :http, [:inet6, port: port])}
else
{:ok, config}
end
end
end
| 28.508772 | 95 | 0.702769 |
ffdd14d4c48a5a5962d3cef70d6764d0531573a1 | 2,787 | ex | Elixir | lib/api/real.ex | hectorperez/amplitude_ex | 4df519f2e8edfefcc61e5732da1f3b1190bec293 | [
"BSD-3-Clause"
] | null | null | null | lib/api/real.ex | hectorperez/amplitude_ex | 4df519f2e8edfefcc61e5732da1f3b1190bec293 | [
"BSD-3-Clause"
] | null | null | null | lib/api/real.ex | hectorperez/amplitude_ex | 4df519f2e8edfefcc61e5732da1f3b1190bec293 | [
"BSD-3-Clause"
] | 2 | 2020-04-26T22:17:11.000Z | 2021-08-16T14:38:44.000Z | defmodule Amplitude.API.Real do
use HTTPoison.Base
require Logger
@track_api_url "https://api.amplitude.com/httpapi"
@identify_api_url "https://api.amplitude.com/identify"
defp track_api_url do
Application.get_env(:amplitude, :api_host) || @track_api_url
end
defp identify_api_url, do: @identify_api_url
defp json_header, do: ["Content-Type": "application/json"]
def api_track(params, opts \\ []) do
api_key =
Keyword.get_lazy(opts, :api_key, fn ->
Application.get_env(:amplitude, :api_key)
end)
case params |> Poison.encode() do
{:ok, params} ->
Task.start(fn ->
start()
case get(track_api_url(), json_header(), params: %{api_key: api_key, event: params}) do
{:ok, %HTTPoison.Response{status_code: 200, body: body}} ->
Logger.debug("Amplitude Track: #{body}")
{:ok, %HTTPoison.Response{status_code: 404, body: body}} ->
Logger.debug("Amplitude Track: #{body}")
{:ok, %HTTPoison.Response{status_code: 400, body: body}} ->
Logger.debug("Amplitude Track Failed: #{body}")
{:error, %HTTPoison.Error{reason: reason}} ->
Logger.debug("Amplitude Track Failed: #{reason}")
end
end)
{:ok, "success"}
{:error, message} ->
{:error, "Unable to serialize params to JSON: #{inspect(message)}"}
end
end
def api_identify(params, opts \\ []) do
api_key =
Keyword.get_lazy(opts, :api_key, fn ->
Application.get_env(:amplitude, :api_key)
end)
case params |> Poison.encode() do
{:ok, params} ->
Task.start(fn ->
start()
case get(identify_api_url(), json_header(),
params: %{api_key: api_key, insert_id: UUID.uuid4(), identification: params}
) do
{:ok, %HTTPoison.Response{status_code: 200, body: body}} ->
Logger.debug("Amplitude Track: #{body}")
{:ok, %HTTPoison.Response{status_code: 404, body: body}} ->
Logger.debug("Amplitude Track: #{body}")
{:ok, %HTTPoison.Response{status_code: 400, body: body}} ->
Logger.debug("Amplitude Track Failed: #{body}")
{:error, %HTTPoison.Error{reason: reason}} ->
Logger.debug("Amplitude Track Failed: #{reason}")
end
end)
{:ok, "success"}
{:error, message} ->
{:error, "Unable to serialize params to JSON: #{inspect(message)}"}
end
end
# validate Poison response and strip out json value
def verify_json({:ok, json}), do: json
def verify_json({_, response}), do: "#{inspect(response)}"
def process_request_headers(headers), do: headers ++ json_header()
end
| 30.966667 | 97 | 0.586294 |
ffdd2dc0bfe02d73638d28de710afedf2cc273bb | 1,406 | ex | Elixir | test/support/data_case.ex | LunarLogic/timetracker | 5c2ffffc9023c7b099d7af8de7f21225352483ca | [
"MIT"
] | 1 | 2019-06-26T06:53:01.000Z | 2019-06-26T06:53:01.000Z | test/support/data_case.ex | LunarLogic/timetracker | 5c2ffffc9023c7b099d7af8de7f21225352483ca | [
"MIT"
] | null | null | null | test/support/data_case.ex | LunarLogic/timetracker | 5c2ffffc9023c7b099d7af8de7f21225352483ca | [
"MIT"
] | null | null | null | defmodule Timetracker.DataCase do
@moduledoc """
This module defines the setup for tests requiring
access to the application's data layer.
You may define functions here to be used as helpers in
your tests.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
alias Timetracker.Repo
import Ecto
import Ecto.Changeset
import Ecto.Query
import Timetracker.DataCase
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(Timetracker.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(Timetracker.Repo, {:shared, self()})
end
:ok
end
@doc """
A helper that transforms changeset errors into a map of messages.
assert {:error, changeset} = Accounts.create_user(%{password: "short"})
assert "password is too short" in errors_on(changeset).password
assert %{password: ["password is too short"]} = errors_on(changeset)
"""
def errors_on(changeset) do
Ecto.Changeset.traverse_errors(changeset, fn {message, opts} ->
Enum.reduce(opts, message, fn {key, value}, acc ->
String.replace(acc, "%{#{key}}", to_string(value))
end)
end)
end
end
| 26.037037 | 77 | 0.684922 |
ffdd2eda24afdd710063975eafa908ef8e82ed5d | 5,118 | ex | Elixir | lib/swoosh/adapters/mailgun.ex | DavidOliver/swoosh | ddbb4c5afb7cdb3416e2b8713e7a5a3cda824179 | [
"MIT"
] | null | null | null | lib/swoosh/adapters/mailgun.ex | DavidOliver/swoosh | ddbb4c5afb7cdb3416e2b8713e7a5a3cda824179 | [
"MIT"
] | null | null | null | lib/swoosh/adapters/mailgun.ex | DavidOliver/swoosh | ddbb4c5afb7cdb3416e2b8713e7a5a3cda824179 | [
"MIT"
] | null | null | null | defmodule Swoosh.Adapters.Mailgun do
@moduledoc ~S"""
An adapter that sends email using the Mailgun API.
For reference: [Mailgun API docs](https://documentation.mailgun.com/api-sending.html#sending)
## Dependency
Mailgun adapter requires `Plug` to work properly.
## Example
# config/config.exs
config :sample, Sample.Mailer,
adapter: Swoosh.Adapters.Mailgun,
api_key: "my-api-key",
domain: "avengers.com"
# lib/sample/mailer.ex
defmodule Sample.Mailer do
use Swoosh.Mailer, otp_app: :sample
end
"""
use Swoosh.Adapter, required_config: [:api_key, :domain], required_deps: [plug: Plug.Conn.Query]
alias Swoosh.Email
import Swoosh.Email.Render
@base_url "https://api.mailgun.net/v3"
@api_endpoint "/messages"
def deliver(%Email{} = email, config \\ []) do
headers = prepare_headers(email, config)
url = [base_url(config), "/", config[:domain], @api_endpoint]
case :hackney.post(url, headers, prepare_body(email), [:with_body]) do
{:ok, 200, _headers, body} ->
{:ok, %{id: Swoosh.json_library().decode!(body)["id"]}}
{:ok, 401, _headers, body} ->
{:error, {401, body}}
{:ok, code, _headers, ""} when code > 399 ->
{:error, {code, ""}}
{:ok, code, _headers, body} when code > 399 ->
{:error, {code, Swoosh.json_library().decode!(body)}}
{:error, reason} ->
{:error, reason}
end
end
defp base_url(config), do: config[:base_url] || @base_url
defp prepare_headers(email, config) do
[
{"User-Agent", "swoosh/#{Swoosh.version()}"},
{"Authorization", "Basic #{auth(config)}"},
{"Content-Type", content_type(email)}
]
end
defp auth(config), do: Base.encode64("api:#{config[:api_key]}")
defp content_type(%{attachments: []}), do: "application/x-www-form-urlencoded"
defp content_type(%{}), do: "multipart/form-data"
defp prepare_body(email) do
%{}
|> prepare_from(email)
|> prepare_to(email)
|> prepare_subject(email)
|> prepare_html(email)
|> prepare_text(email)
|> prepare_cc(email)
|> prepare_bcc(email)
|> prepare_reply_to(email)
|> prepare_attachments(email)
|> prepare_custom_vars(email)
|> prepare_recipient_vars(email)
|> prepare_custom_headers(email)
|> encode_body
end
# example custom_vars
#
# %{"my_var" => %{"my_message_id": 123},
# "my_other_var" => %{"my_other_id": 1, "stuff": 2}}
defp prepare_custom_vars(body, %{provider_options: %{custom_vars: custom_vars}}) do
Enum.reduce(custom_vars, body, fn {k, v}, body ->
Map.put(body, "v:#{k}", Swoosh.json_library().encode!(v))
end)
end
defp prepare_custom_vars(body, _email), do: body
defp prepare_recipient_vars(body, %{provider_options: %{recipient_vars: recipient_vars}}) do
Map.put(body, "recipient-variables", Swoosh.json_library().encode!(recipient_vars))
end
defp prepare_recipient_vars(body, _email), do: body
defp prepare_custom_headers(body, %{headers: headers}) do
Enum.reduce(headers, body, fn {k, v}, body -> Map.put(body, "h:#{k}", v) end)
end
defp prepare_attachments(body, %{attachments: []}), do: body
defp prepare_attachments(body, %{attachments: attachments}) do
{normal_attachments, inline_attachments} =
Enum.split_with(attachments, fn %{type: type} -> type == :attachment end)
body
|> Map.put(:attachments, Enum.map(normal_attachments, &prepare_file(&1, "attachment")))
|> Map.put(:inline, Enum.map(inline_attachments, &prepare_file(&1, "inline")))
end
defp prepare_file(attachment, type) do
{:file, attachment.path,
{"form-data", [{~s/"name"/, ~s/"#{type}"/}, {~s/"filename"/, ~s/"#{attachment.filename}"/}]},
[]}
end
defp prepare_from(body, %{from: from}), do: Map.put(body, :from, render_recipient(from))
defp prepare_to(body, %{to: to}), do: Map.put(body, :to, render_recipient(to))
defp prepare_reply_to(body, %{reply_to: nil}), do: body
defp prepare_reply_to(body, %{reply_to: {_name, address}}),
do: Map.put(body, "h:Reply-To", address)
defp prepare_cc(body, %{cc: []}), do: body
defp prepare_cc(body, %{cc: cc}), do: Map.put(body, :cc, render_recipient(cc))
defp prepare_bcc(body, %{bcc: []}), do: body
defp prepare_bcc(body, %{bcc: bcc}), do: Map.put(body, :bcc, render_recipient(bcc))
defp prepare_subject(body, %{subject: subject}), do: Map.put(body, :subject, subject)
defp prepare_text(body, %{text_body: nil}), do: body
defp prepare_text(body, %{text_body: text_body}), do: Map.put(body, :text, text_body)
defp prepare_html(body, %{html_body: nil}), do: body
defp prepare_html(body, %{html_body: html_body}), do: Map.put(body, :html, html_body)
defp encode_body(%{attachments: attachments, inline: inline} = params) do
{:multipart,
params
|> Map.drop([:attachments, :inline])
|> Enum.map(fn {k, v} -> {to_string(k), v} end)
|> Kernel.++(attachments)
|> Kernel.++(inline)}
end
defp encode_body(no_attachments), do: Plug.Conn.Query.encode(no_attachments)
end
| 31.9875 | 98 | 0.646542 |
ffdd3395942b2d9e1bcd75b38d34e201a0154310 | 66 | ex | Elixir | lib/operation.ex | mpichette/hippy | 46d7140b6374426812b2c21e7369345fbdf67b17 | [
"MIT"
] | 8 | 2018-04-02T14:14:28.000Z | 2021-05-04T19:39:48.000Z | lib/operation.ex | mpichette/hippy | 46d7140b6374426812b2c21e7369345fbdf67b17 | [
"MIT"
] | 1 | 2018-04-06T01:55:04.000Z | 2018-04-06T05:15:47.000Z | lib/operation.ex | mpichette/hippy | 46d7140b6374426812b2c21e7369345fbdf67b17 | [
"MIT"
] | 3 | 2018-04-02T23:45:51.000Z | 2021-04-04T20:14:49.000Z | defprotocol Hippy.Operation do
def build_request(operation)
end
| 16.5 | 30 | 0.833333 |
ffdd33aeb483fd1814145579f1f1b831c5cdb877 | 2,409 | exs | Elixir | bench_pg/Elixir/retailixir/config/dev.exs | cassinius/api-bench | d6f578c2096b5057de28541c5c527a2f2fe9ae99 | [
"MIT"
] | null | null | null | bench_pg/Elixir/retailixir/config/dev.exs | cassinius/api-bench | d6f578c2096b5057de28541c5c527a2f2fe9ae99 | [
"MIT"
] | 27 | 2020-04-20T06:05:50.000Z | 2021-12-09T22:06:37.000Z | bench_pg/Elixir/retailixir/config/dev.exs | cassinius/api-bench | d6f578c2096b5057de28541c5c527a2f2fe9ae99 | [
"MIT"
] | null | null | null | import Config
# Configure your database
config :retailixir, Retailixir.Repo,
username: "retailer",
password: "retailer",
database: "retailer_api",
hostname: "localhost",
show_sensitive_data_on_connection_error: true,
pool_size: 90
# For development, we disable any cache and enable
# debugging and code reloading.
#
# The watchers configuration can be used to run external
# watchers to your application. For example, we use it
# with esbuild to bundle .js and .css sources.
config :retailixir, RetailixirWeb.Endpoint,
# Binding to loopback ipv4 address prevents access from other machines.
# Change to `ip: {0, 0, 0, 0}` to allow access from other machines.
http: [ip: {127, 0, 0, 1}, port: 8000],
check_origin: false,
code_reloader: true,
debug_errors: true,
secret_key_base: "bO2gpfjEke7d+KwnhPhJ1ov1l5XpPGDk+SvYueAQc0MQ6Yr0xh6DZE1IOhRf8Q48",
watchers: [
# Start the esbuild watcher by calling Esbuild.install_and_run(:default, args)
esbuild: {Esbuild, :install_and_run, [:default, ~w(--sourcemap=inline --watch)]}
]
# ## SSL Support
#
# In order to use HTTPS in development, a self-signed
# certificate can be generated by running the following
# Mix task:
#
# mix phx.gen.cert
#
# Note that this task requires Erlang/OTP 20 or later.
# Run `mix help phx.gen.cert` for more information.
#
# The `http:` config above can be replaced with:
#
# https: [
# port: 4001,
# cipher_suite: :strong,
# keyfile: "priv/cert/selfsigned_key.pem",
# certfile: "priv/cert/selfsigned.pem"
# ],
#
# If desired, both `http:` and `https:` keys can be
# configured to run both http and https servers on
# different ports.
# Watch static and templates for browser reloading.
config :retailixir, RetailixirWeb.Endpoint,
live_reload: [
patterns: [
~r"priv/static/.*(js|css|png|jpeg|jpg|gif|svg)$",
~r"priv/gettext/.*(po)$",
~r"lib/retailixir_web/(live|views)/.*(ex)$",
~r"lib/retailixir_web/templates/.*(eex)$"
]
]
# Do not include metadata nor timestamps in development logs
config :logger, :console, format: "[$level] $message\n"
# Set a higher stacktrace during development. Avoid configuring such
# in production as building large stacktraces may be expensive.
config :phoenix, :stacktrace_depth, 20
# Initialize plugs at runtime for faster development compilation
config :phoenix, :plug_init_mode, :runtime
| 32.12 | 86 | 0.714404 |
ffdd4ea187a511e02f41a33ec7cbb79b7bc16d5d | 1,133 | exs | Elixir | apps/order_parser/config/config.exs | qwertystop/dominions5.analyst | 7c39ea14640b3a2d2c70df249688bc501f2f2184 | [
"MIT"
] | 1 | 2019-07-04T00:39:31.000Z | 2019-07-04T00:39:31.000Z | apps/order_parser/config/config.exs | qwertystop/dominions5.analyst | 7c39ea14640b3a2d2c70df249688bc501f2f2184 | [
"MIT"
] | null | null | null | apps/order_parser/config/config.exs | qwertystop/dominions5.analyst | 7c39ea14640b3a2d2c70df249688bc501f2f2184 | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# 3rd-party users, it should be done in your "mix.exs" file.
# You can configure your application as:
#
# config :order_parser, key: :value
#
# and access this configuration in your application as:
#
# Application.get_env(:order_parser, :key)
#
# You can also configure a 3rd-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env}.exs"
| 36.548387 | 73 | 0.753751 |
ffdd4feca58c1012b40efcaaf9325ab28491d233 | 172 | ex | Elixir | lib/oli_web/live/manual_grading/score_feedback.ex | malav2110/oli-torus | 8af64e762a7c8a2058bd27a7ab8e96539ffc055f | [
"MIT"
] | 1 | 2022-03-17T20:35:47.000Z | 2022-03-17T20:35:47.000Z | lib/oli_web/live/manual_grading/score_feedback.ex | malav2110/oli-torus | 8af64e762a7c8a2058bd27a7ab8e96539ffc055f | [
"MIT"
] | 9 | 2021-11-02T16:52:09.000Z | 2022-03-25T15:14:01.000Z | lib/oli_web/live/manual_grading/score_feedback.ex | malav2110/oli-torus | 8af64e762a7c8a2058bd27a7ab8e96539ffc055f | [
"MIT"
] | null | null | null | defmodule OliWeb.ManualGrading.ScoreFeedback do
@enforce_keys [
:score,
:out_of,
:feedback
]
defstruct [
:score,
:out_of,
:feedback
]
end
| 11.466667 | 47 | 0.610465 |
ffdd5ec3cdeaa813dd02b07873abb00a0280ecf5 | 906 | exs | Elixir | test/id_token/certificate_store_test.exs | Joe-noh/id_token | 8486b97dc45f871222a57ba97eca1ef85061daea | [
"MIT"
] | null | null | null | test/id_token/certificate_store_test.exs | Joe-noh/id_token | 8486b97dc45f871222a57ba97eca1ef85061daea | [
"MIT"
] | null | null | null | test/id_token/certificate_store_test.exs | Joe-noh/id_token | 8486b97dc45f871222a57ba97eca1ef85061daea | [
"MIT"
] | null | null | null | defmodule IDToken.CertificateStoreTest do
use ExUnit.Case, async: true
alias IDToken.{CertificateStore, Certificate}
setup do
now = DateTime.utc_now()
%{
expired_cert: Certificate.new("secret", DateTime.add(now, -60, :second)),
unexpired_cert: Certificate.new("secret", DateTime.add(now, 60, :second))
}
end
test "put unexpired certificate then get it", %{test: key, unexpired_cert: cert} do
assert :ok == CertificateStore.put(key, cert)
assert cert == CertificateStore.get(key)
end
test "returns nil if expired", %{test: key, expired_cert: cert} do
assert :ok == CertificateStore.put(key, cert)
assert nil == CertificateStore.get(key)
end
test "erase from store", %{test: key, unexpired_cert: cert} do
assert :ok == CertificateStore.put(key, cert)
CertificateStore.erase(key)
assert nil == CertificateStore.get(key)
end
end
| 27.454545 | 85 | 0.689845 |
ffdda0bd47d947191188ee255b82d082535156fa | 5,194 | ex | Elixir | clients/cloud_asset/lib/google_api/cloud_asset/v1/model/options.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | clients/cloud_asset/lib/google_api/cloud_asset/v1/model/options.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/cloud_asset/lib/google_api/cloud_asset/v1/model/options.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.CloudAsset.V1.Model.Options do
@moduledoc """
Contains query options.
## Attributes
* `analyzeServiceAccountImpersonation` (*type:* `boolean()`, *default:* `nil`) - Optional. If true, the response will include access analysis from identities to resources via service account impersonation. This is a very expensive operation, because many derived queries will be executed. We highly recommend you use AssetService.AnalyzeIamPolicyLongrunning rpc instead. For example, if the request analyzes for which resources user A has permission P, and there's an IAM policy states user A has iam.serviceAccounts.getAccessToken permission to a service account SA, and there's another IAM policy states service account SA has permission P to a GCP folder F, then user A potentially has access to the GCP folder F. And those advanced analysis results will be included in AnalyzeIamPolicyResponse.service_account_impersonation_analysis. Another example, if the request analyzes for who has permission P to a GCP folder F, and there's an IAM policy states user A has iam.serviceAccounts.actAs permission to a service account SA, and there's another IAM policy states service account SA has permission P to the GCP folder F, then user A potentially has access to the GCP folder F. And those advanced analysis results will be included in AnalyzeIamPolicyResponse.service_account_impersonation_analysis. Default is false.
* `expandGroups` (*type:* `boolean()`, *default:* `nil`) - Optional. If true, the identities section of the result will expand any Google groups appearing in an IAM policy binding. If IamPolicyAnalysisQuery.identity_selector is specified, the identity in the result will be determined by the selector, and this flag is not allowed to set. Default is false.
* `expandResources` (*type:* `boolean()`, *default:* `nil`) - Optional. If true and IamPolicyAnalysisQuery.resource_selector is not specified, the resource section of the result will expand any resource attached to an IAM policy to include resources lower in the resource hierarchy. For example, if the request analyzes for which resources user A has permission P, and the results include an IAM policy with P on a GCP folder, the results will also include resources in that folder with permission P. If true and IamPolicyAnalysisQuery.resource_selector is specified, the resource section of the result will expand the specified resource to include resources lower in the resource hierarchy. Only project or lower resources are supported. Folder and organization resource cannot be used together with this option. For example, if the request analyzes for which users have permission P on a GCP project with this option enabled, the results will include all users who have permission P on that project or any lower resource. Default is false.
* `expandRoles` (*type:* `boolean()`, *default:* `nil`) - Optional. If true, the access section of result will expand any roles appearing in IAM policy bindings to include their permissions. If IamPolicyAnalysisQuery.access_selector is specified, the access section of the result will be determined by the selector, and this flag is not allowed to set. Default is false.
* `outputGroupEdges` (*type:* `boolean()`, *default:* `nil`) - Optional. If true, the result will output group identity edges, starting from the binding's group members, to any expanded identities. Default is false.
* `outputResourceEdges` (*type:* `boolean()`, *default:* `nil`) - Optional. If true, the result will output resource edges, starting from the policy attached resource, to any expanded resources. Default is false.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:analyzeServiceAccountImpersonation => boolean(),
:expandGroups => boolean(),
:expandResources => boolean(),
:expandRoles => boolean(),
:outputGroupEdges => boolean(),
:outputResourceEdges => boolean()
}
field(:analyzeServiceAccountImpersonation)
field(:expandGroups)
field(:expandResources)
field(:expandRoles)
field(:outputGroupEdges)
field(:outputResourceEdges)
end
defimpl Poison.Decoder, for: GoogleApi.CloudAsset.V1.Model.Options do
def decode(value, options) do
GoogleApi.CloudAsset.V1.Model.Options.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.CloudAsset.V1.Model.Options do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 83.774194 | 1,322 | 0.772237 |
ffdda1594c8c5bf3939669bee4fdc480975c3c19 | 360 | ex | Elixir | test/support/test_helpers.ex | patricklafleur/phoenix-live-props | a93e364cf1d83fba276fa000da0bd11d145c6a32 | [
"MIT"
] | 11 | 2020-07-02T17:02:18.000Z | 2021-10-31T11:27:53.000Z | test/support/test_helpers.ex | patricklafleur/phoenix-live-props | a93e364cf1d83fba276fa000da0bd11d145c6a32 | [
"MIT"
] | null | null | null | test/support/test_helpers.ex | patricklafleur/phoenix-live-props | a93e364cf1d83fba276fa000da0bd11d145c6a32 | [
"MIT"
] | 1 | 2021-01-04T20:29:07.000Z | 2021-01-04T20:29:07.000Z | defmodule LiveProps.TestHelpers do
@moduledoc false
defmacro assert_no_compile(error, message, do: do_block) do
quote do
assert_raise unquote(error), unquote(message), fn ->
defmodule Error do
@moduledoc false
use LiveProps, include: [:prop, :state]
unquote(do_block)
end
end
end
end
end
| 24 | 61 | 0.636111 |
ffdda6c1daa3e0d6ac0c1837caf68307e77b337a | 9,333 | exs | Elixir | test/blue_jet_web/controllers/catalogue/product_collection_membership_controller_test.exs | freshcom/freshcom-api | 4f2083277943cf4e4e8fd4c4d443c7309f285ad7 | [
"BSD-3-Clause"
] | 44 | 2018-05-09T01:08:57.000Z | 2021-01-19T07:25:26.000Z | test/blue_jet_web/controllers/catalogue/product_collection_membership_controller_test.exs | freshcom/freshcom-api | 4f2083277943cf4e4e8fd4c4d443c7309f285ad7 | [
"BSD-3-Clause"
] | 36 | 2018-05-08T23:59:54.000Z | 2018-09-28T13:50:30.000Z | test/blue_jet_web/controllers/catalogue/product_collection_membership_controller_test.exs | freshcom/freshcom-api | 4f2083277943cf4e4e8fd4c4d443c7309f285ad7 | [
"BSD-3-Clause"
] | 9 | 2018-05-09T14:09:19.000Z | 2021-03-21T21:04:04.000Z | defmodule BlueJetWeb.ProductCollectionMembershipControllerTest do
use BlueJetWeb.ConnCase
import BlueJet.Identity.TestHelper
import BlueJet.Catalogue.TestHelper
setup do
conn =
build_conn()
|> put_req_header("accept", "application/vnd.api+json")
|> put_req_header("content-type", "application/vnd.api+json")
%{conn: conn}
end
# List product collection membership
describe "GET /v1/product_collections/:id/memberships" do
test "without access token", %{conn: conn} do
conn = get(conn, "/v1/product_collections/#{UUID.generate()}/memberships")
assert conn.status == 401
end
test "with PAT", %{conn: conn} do
account1 = account_fixture()
account2 = account_fixture()
collection1 = product_collection_fixture(account1)
collection2 = product_collection_fixture(account2)
product11 = product_fixture(account1)
product12 = product_fixture(account1, %{status: "active"})
product21 = product_fixture(account2)
product_collection_membership_fixture(account1, collection1, product11)
product_collection_membership_fixture(account1, collection1, product12)
product_collection_membership_fixture(account2, collection2, product21)
pat = get_pat(account1)
conn = put_req_header(conn, "authorization", "Bearer #{pat}")
conn = get(conn, "/v1/product_collections/#{collection1.id}/memberships")
response = json_response(conn, 200)
assert length(response["data"]) == 1
end
test "with UAT", %{conn: conn} do
user1 = standard_user_fixture()
user2 = standard_user_fixture()
account1 = user1.default_account
account2 = user2.default_account
collection1 = product_collection_fixture(account1)
collection2 = product_collection_fixture(account2)
product11 = product_fixture(account1)
product12 = product_fixture(account1, %{status: "active"})
product21 = product_fixture(account2)
product_collection_membership_fixture(account1, collection1, product11)
product_collection_membership_fixture(account1, collection1, product12)
product_collection_membership_fixture(account2, collection2, product21)
uat = get_uat(account1, user1)
conn = put_req_header(conn, "authorization", "Bearer #{uat}")
conn = get(conn, "/v1/product_collections/#{collection1.id}/memberships")
response = json_response(conn, 200)
assert length(response["data"]) == 2
end
end
# Create a product collection membership
describe "POST /v1/product_collections/:id/memberships" do
test "without access token", %{conn: conn} do
conn = post(conn, "/v1/product_collections/#{UUID.generate()}/memberships", %{
"data" => %{
"type" => "ProductCollectionMembership"
}
})
assert conn.status == 401
end
test "with PAT", %{conn: conn} do
account = account_fixture()
pat = get_pat(account)
conn = put_req_header(conn, "authorization", "Bearer #{pat}")
conn = post(conn, "/v1/product_collections/#{UUID.generate()}/memberships", %{
"data" => %{
"type" => "ProductCollectionMembership"
}
})
assert conn.status == 403
end
test "with no attributes", %{conn: conn} do
user = standard_user_fixture()
collection = product_collection_fixture(user.default_account)
uat = get_uat(user.default_account, user)
conn = put_req_header(conn, "authorization", "Bearer #{uat}")
conn = post(conn, "/v1/product_collections/#{collection.id}/memberships", %{
"data" => %{
"type" => "ProductCollectionMembership"
}
})
response = json_response(conn, 422)
assert length(response["errors"]) == 2
end
test "with valid attributes", %{conn: conn} do
user = standard_user_fixture()
collection = product_collection_fixture(user.default_account)
product = product_fixture(user.default_account)
uat = get_uat(user.default_account, user)
conn = put_req_header(conn, "authorization", "Bearer #{uat}")
conn = post(conn, "/v1/product_collections/#{collection.id}/memberships", %{
"data" => %{
"type" => "ProductCollectionMembership",
"relationships" => %{
"collection" => %{
"data" => %{
"id" => collection.id,
"type" => "ProductCollection"
}
},
"product" => %{
"data" => %{
"id" => product.id,
"type" => "Product"
}
}
}
}
})
assert json_response(conn, 201)
end
end
# Retrieve a product collection membership
describe "GET /v1/product_collection_memberships/:id" do
test "without access token", %{conn: conn} do
conn = get(conn, "/v1/product_collection_memberships/#{UUID.generate()}")
assert conn.status == 401
end
test "with PAT", %{conn: conn} do
account = account_fixture()
collection = product_collection_fixture(account)
product = product_fixture(account, %{status: "active"})
membership = product_collection_membership_fixture(account, collection, product)
pat = get_pat(account)
conn = put_req_header(conn, "authorization", "Bearer #{pat}")
conn = get(conn, "/v1/product_collection_memberships/#{membership.id}")
assert conn.status == 403
end
test "with UAT", %{conn: conn} do
user = standard_user_fixture()
collection = product_collection_fixture(user.default_account)
product = product_fixture(user.default_account)
membership = product_collection_membership_fixture(user.default_account, collection, product)
uat = get_uat(user.default_account, user)
conn = put_req_header(conn, "authorization", "Bearer #{uat}")
conn = get(conn, "/v1/product_collection_memberships/#{membership.id}")
assert json_response(conn, 200)
end
end
# Update a product collection membership
describe "PATCH /v1/product_collection_memberships/:id" do
test "without access token", %{conn: conn} do
conn = patch(conn, "/v1/product_collection_memberships/#{UUID.generate()}", %{
"data" => %{
"type" => "ProductCollectionMembership",
"attributes" => %{
"sortIndex" => System.unique_integer([:positive])
}
}
})
assert conn.status == 401
end
test "with PAT", %{conn: conn} do
account = account_fixture()
collection = product_collection_fixture(account)
product = product_fixture(account, %{status: "active"})
membership = product_collection_membership_fixture(account, collection, product)
pat = get_pat(account)
conn = put_req_header(conn, "authorization", "Bearer #{pat}")
conn = patch(conn, "/v1/product_collection_memberships/#{membership.id}", %{
"data" => %{
"id" => membership.id,
"type" => "ProductCollectionMembership",
"attributes" => %{
"sortIndex" => System.unique_integer([:positive])
}
}
})
assert conn.status == 403
end
test "with UAT", %{conn: conn} do
user = standard_user_fixture()
collection = product_collection_fixture(user.default_account)
product = product_fixture(user.default_account)
membership = product_collection_membership_fixture(user.default_account, collection, product)
uat = get_uat(user.default_account, user)
conn = put_req_header(conn, "authorization", "Bearer #{uat}")
conn = patch(conn, "/v1/product_collection_memberships/#{membership.id}", %{
"data" => %{
"id" => membership.id,
"type" => "ProductCollectionMembership",
"attributes" => %{
"sortIndex" => System.unique_integer([:positive])
}
}
})
assert json_response(conn, 200)
end
end
# Delete a product collection membership
describe "DELETE /v1/product_collection_memberships/:id" do
test "without access token", %{conn: conn} do
conn = delete(conn, "/v1/products/#{UUID.generate()}")
assert conn.status == 401
end
test "with PAT", %{conn: conn} do
account = account_fixture()
collection = product_collection_fixture(account)
product = product_fixture(account, %{status: "active"})
membership = product_collection_membership_fixture(account, collection, product)
pat = get_pat(account)
conn = put_req_header(conn, "authorization", "Bearer #{pat}")
conn = delete(conn, "/v1/product_collection_memberships/#{membership.id}")
assert conn.status == 403
end
test "with UAT", %{conn: conn} do
user = standard_user_fixture()
collection = product_collection_fixture(user.default_account)
product = product_fixture(user.default_account)
membership = product_collection_membership_fixture(user.default_account, collection, product)
uat = get_uat(user.default_account, user)
conn = put_req_header(conn, "authorization", "Bearer #{uat}")
conn = delete(conn, "/v1/product_collection_memberships/#{membership.id}")
assert conn.status == 204
end
end
end
| 33.815217 | 99 | 0.646523 |
ffddaa4e4e956fa772ef5c09725613dd1203adaf | 4,362 | exs | Elixir | machine_translation/MorpHIN/Learned/Resources/Set1/TrainingInstances/68.exs | AdityaPrasadMishra/NLP--Project-Group-16 | fb62cc6a1db4a494058171f11c14a2be3933a9a1 | [
"MIT"
] | null | null | null | machine_translation/MorpHIN/Learned/Resources/Set1/TrainingInstances/68.exs | AdityaPrasadMishra/NLP--Project-Group-16 | fb62cc6a1db4a494058171f11c14a2be3933a9a1 | [
"MIT"
] | null | null | null | machine_translation/MorpHIN/Learned/Resources/Set1/TrainingInstances/68.exs | AdityaPrasadMishra/NLP--Project-Group-16 | fb62cc6a1db4a494058171f11c14a2be3933a9a1 | [
"MIT"
] | null | null | null | **EXAMPLE FILE**
cm pn conj quantifier cm;
adjective noun demonstrative noun cm;
pn noun adjective verb cm;
adjective verb particle noun cm;
adjective verb noun verb cm;
SYM pnoun pnoun cm cm;
cm noun noun verb cm;
cardinal noun verb verb_aux cm;
cm noun adjective verb cm;
cm noun verb verb_aux cm;
cm pn verb verb_aux cm;
cm noun cardinal noun cm;
conj noun adjective verb cm;
adjective noun adjective noun cm;
cardinal noun noun verb cm;
cm noun noun cm cm;
noun verb noun verb cm;
adjective noun pnoun noun cm;
verb verb adjective verb cm;
pnoun pnoun pnoun pnoun cm;
verb verb_aux noun cm cm;
verb_aux pn pnoun cm cm;
cm noun pnoun cm cm;
cm noun noun verb cm;
ordinal noun verb SYM cm;
ordinal noun verb SYM cm;
ordinal noun verb SYM cm;
cardinal noun pnoun cm cm;
demonstrative noun adjective noun cm;
cm noun noun verb cm;
cm noun verb noun cm;
cm noun adjective verb cm;
pnoun noun pn noun cm;
adjective noun pnoun pnoun cm;
adjective noun pnoun cm cm;
cm noun pnoun cm cm;
cm noun noun verb cm;
noun noun verb verb_aux cm;
cm pnoun noun verb cm;
quantifier verb pn verb cm;
pn verb demonstrative noun cm;
demonstrative noun noun verb cm;
noun noun noun verb cm;
noun SYM noun verb cm;
demonstrative noun cardinal cardinal cm;
demonstrative noun SYM adjective cm;
adjective noun quantifier adjective cm;
pn noun cardinal cm cm;
cm noun pnoun cm cm;
adjective noun particle noun cm;
cardinal noun noun verb cm;
cm noun cardinal noun cm;
adjective noun adjective cardinal cm;
cardinal noun adverb adjective cm;
pn noun quantifier noun cm;
demonstrative noun noun verb cm;
demonstrative noun cardinal quantifier cm;
cm noun noun pnoun cm;
pn verb adjective noun cm;
particle noun verb verb_aux cm;
cm noun verb verb_aux cm;
SYM pnoun pnoun cm cm;
quantifier noun pnoun cm cm;
cm pnoun noun quantifier cm;
cm noun verb verb_aux cm;
cm pnoun pn noun cm;
cm noun quantifier adjective cm;
adjective verb adjective verb cm;
adjective verb adjective verb cm;
demonstrative noun adjective noun cm;
adjective noun noun verb cm;
cm noun noun verb cm;
noun noun particle noun cm;
pnoun pnoun cardinal demonstrative cm;
demonstrative noun noun neg cm;
cm noun verb verb_aux cm;
demonstrative noun pnoun cm cm;
cm noun pnoun verb cm;
cm noun pn adjective cm;
cm noun noun SYM cm;
adjective verb P_wh verb cm;
ordinal noun adjective pnoun cm;
cm noun pn noun cm;
pn noun noun cm cm;
cm noun cardinal noun cm;
conj noun noun verb cm;
demonstrative noun quantifier adjective cm;
demonstrative noun adjective noun cm;
noun noun noun cm cm;
pnoun noun verb conj cm;
ordinal noun verb verb_aux cm;
verb noun verb conj cm;
adverb noun noun verb cm;
adjective noun pn noun cm;
particle noun verb verb_aux cm;
conj pn noun cm cm;
pn noun noun adjective cm;
cm pnoun pnoun cm cm;
cm pnoun pnoun cm cm;
adjective noun pn SYM cm;
adjective noun adjective noun cm;
cm noun noun verb cm;
cm noun demonstrative noun cm;
conj noun pn noun cm;
cm pnoun cardinal noun cm;
cm noun verb SYM cm;
cm verb noun verb cm;
cm verb noun verb cm;
cm noun adverb cardinal cm;
noun verb verb verb_aux cm;
noun noun noun verb cm;
adjective noun particle noun cm;
cm verb noun verb cm;
cm verb noun verb cm;
pnoun noun verb noun cm;
cardinal noun cardinal noun cm;
cardinal noun cardinal noun cm;
cardinal noun pn ordinal cm;
cardinal noun ordinal noun cm;
pnoun pnoun noun verb cm;
ordinal noun verb verb_aux cm;
verb pn pn cm cm;
adjective noun adjective noun cm;
adjective noun noun verb cm;
adjective noun pnoun cm cm;
pnoun noun pnoun SYM cm;
particle pnoun pnoun SYM cm;
cm noun pnoun cm cm;
cm noun verb verb_aux cm;
cm noun pnoun pnoun cm;
noun noun verb verb_aux cm;
SYM noun noun noun cm;
conj noun verb verb_aux cm;
SYM noun verb cm cm;
demonstrative noun demonstrative noun cm;
cm noun noun verb cm;
conj noun pnoun cm cm;
adjective noun noun neg cm;
demonstrative noun conj noun cm;
cardinal noun pnoun cm cm;
cm noun noun P_wh cm;
noun noun pn ordinal cm;
conj pn cardinal noun cm;
conj noun cardinal cardinal cm;
SYM noun noun conj cm;
cm noun noun cm cm;
demonstrative noun noun verb cm;
adjective noun noun cm cm;
noun noun verb verb_aux cm;
pn noun noun cardinal cm;
adjective noun pnoun cm cm;
noun noun adjective adjective cm;
| 28.141935 | 44 | 0.75447 |
ffddd5269368dd3081849d6166d1369ef0ecb95b | 96 | exs | Elixir | module.exs | pferreirafabricio/elixir-hellowWorld | a6d4d409148ec2e43f1c4fea36ab515d210878c4 | [
"MIT"
] | null | null | null | module.exs | pferreirafabricio/elixir-hellowWorld | a6d4d409148ec2e43f1c4fea36ab515d210878c4 | [
"MIT"
] | null | null | null | module.exs | pferreirafabricio/elixir-hellowWorld | a6d4d409148ec2e43f1c4fea36ab515d210878c4 | [
"MIT"
] | null | null | null | defmodule HelloWorld do
def sayHelloWorld() do
IO.puts("Hello World with a module")
end
end
| 16 | 38 | 0.75 |
ffde29a79cdf26e123245e4eb767588bb905106f | 5,171 | ex | Elixir | lib/symbelix.ex | kek/symbelix | 590f0ca3f0eb5b2e6a4f2c2167232caacc2a2b2d | [
"Apache-2.0"
] | null | null | null | lib/symbelix.ex | kek/symbelix | 590f0ca3f0eb5b2e6a4f2c2167232caacc2a2b2d | [
"Apache-2.0"
] | null | null | null | lib/symbelix.ex | kek/symbelix | 590f0ca3f0eb5b2e6a4f2c2167232caacc2a2b2d | [
"Apache-2.0"
] | null | null | null | defmodule Symbelix do
alias Symbelix.Expression
@moduledoc """
Expression parser and evaluator.
"""
@spec run(source :: String.t(), library :: Module.t()) :: any()
@doc """
Runs a program specified by the source code `source` together with
the function library `library`. Returns the result of the program,
or an {:error, message} tuple in case of an error.
## Examples:
iex> Symbelix.run("(add 1 2)", Mathematician)
3
iex> Symbelix.run("(sub 1 2)", Mathematician)
{:error, "Unknown function (atom) 'sub' at line 1 with 2 parameter(s): (1 2)"}
iex> Symbelix.run("(sub (add 1 2) 1)", Mathematician)
{:error, "Unknown function (atom) 'sub' at line 1 with 2 parameter(s): ((add 1 2) 1)"}
iex> Symbelix.run("(first [1 2])", ListProcessor)
1
"""
def run(source, library, debug \\ fn x, _ -> x end) do
with {:ok, code} <- Expression.parse(source),
^code <- debug.(code, label: "code"),
{:ok, ast} <- compile(code, library),
^ast <- debug.(ast, label: "ast") do
case ast do
{:proc, proc} ->
{:proc, proc} |> debug.([])
_ ->
{result, _binding} = Code.eval_quoted(ast)
result |> debug.([])
end
else
error -> error |> debug.([])
end
end
@doc """
Compiles a symbolic expression to Elixir AST.
## Examples:
iex> compile([{:atom, 1, 'add'}, {:number, 1, 1}, {:number, 1, 2}], Mathematician)
{:ok, {:apply, [context: Symbelix.Library, import: Kernel], [Symbelix.TestHelpers.Libraries.Mathematician, :add, [[1, 2]]]}}
iex> compile([{:atom, 1, 'aliens'}, {:atom, 1, 'built'}, {:atom, 1, 'it'}], Mathematician)
{:error, "Unknown function (atom) 'aliens' at line 1 with 2 parameter(s): (built it)"}
iex> compile([{:atom, 1, '<?php'}], PHP)
{:error, "The module PHP doesn't exist"}
iex> compile([{:atom, 1, 'public'}], Java)
{:error, "The module Symbelix.TestHelpers.Libraries.Java doesn't implement Symbelix.Library behaviour"}
iex> compile([{:atom, 1, 'proc'}, {:atom, 1, 'add'}, {:number, 1, 1}, {:number, 1, 2}], Mathematician)
{:ok, {:proc, [{:atom, 1, 'add'}, {:number, 1, 1}, {:number, 1, 2}]}}
"""
def compile([{:atom, _line, 'eval'} | [[{:atom, _, 'proc'} | code]]], library) do
{:ok, ast} = compile(code, library)
{:ok, ast}
end
def compile([{:atom, _line, 'eval'} | [params]], library) do
{:ok, ast} = compile(params, library)
{{:proc, code}, []} = Code.eval_quoted(ast)
{{:ok, result}, []} = compile(code, library) |> Code.eval_quoted()
{:ok, result}
end
def compile([{:atom, _, 'proc'} | params], _) do
{:ok, {:proc, params}}
end
def compile([{type, line, name} | params], library) do
if Code.ensure_compiled?(library) do
values = Enum.map(params, &value_of(&1, library))
try do
case library.generate_ast([name] ++ values) do
{:ok, ast} ->
{:ok, ast}
{:error, :no_such_implementation} ->
values_description =
params
|> Enum.map(&show/1)
|> Enum.join(" ")
{:error,
"Unknown function (#{type}) '#{name}' at line #{line} with #{Enum.count(values)} parameter(s): (#{
values_description
})"}
end
rescue
UndefinedFunctionError ->
{:error, "The module #{inspect(library)} doesn't implement Symbelix.Library behaviour"}
end
else
{:error, "The module #{inspect(library)} doesn't exist"}
end
end
def repl(library \\ Symbelix.Library.Standard) do
unless Process.whereis(Symbelix.Library.Memory) do
{:ok, memory} = Symbelix.Library.Memory.start_link()
Process.register(memory, Symbelix.Library.Memory)
end
source = IO.gets("> ")
case source do
:eof ->
IO.puts("\nGoodbye")
"q\n" ->
IO.puts("Goodbye")
"r\n" ->
IO.inspect(IEx.Helpers.recompile(), label: "recompile")
repl(library)
source ->
try do
run(source, library, &IO.inspect/2)
rescue
exception ->
IO.puts("Error: #{inspect(exception)}")
IO.inspect(__STACKTRACE__, label: "stacktrace")
end
repl(library)
end
end
defp show({:number, _, value}), do: "#{value}"
defp show({:atom, _, value}), do: "#{value}"
defp show({:string, _, value}), do: "#{value}"
defp show({:list, value}), do: inspect(value)
defp show([x | tail]), do: "(" <> show(x) <> show_tail(tail)
defp show_tail([x | tail]), do: " " <> show(x) <> show_tail(tail)
defp show_tail([]), do: ")"
defp value_of({:number, _, value}, _), do: value
defp value_of({:atom, _, value}, _), do: value
defp value_of({:string, _, value}, _), do: value
defp value_of({:list, value}, binding), do: Enum.map(value, &value_of(&1, binding))
defp value_of([{:atom, _line, 'proc'} | proc], _) do
{:proc, Macro.escape(proc)}
end
defp value_of(expression, library) when is_list(expression) do
{:ok, ast} = compile(expression, library)
ast
end
end
| 30.417647 | 130 | 0.562174 |
ffde2e7fb658db3665f3ce2ab64f0e6a9fa443f1 | 103 | exs | Elixir | config/list.exs | kelostrada/peerage | 1bc83b2662129161d36901a063553c1650bcbf4e | [
"MIT"
] | 389 | 2016-10-29T01:40:47.000Z | 2022-03-19T09:49:26.000Z | config/list.exs | kelostrada/peerage | 1bc83b2662129161d36901a063553c1650bcbf4e | [
"MIT"
] | 18 | 2017-01-11T13:27:36.000Z | 2019-04-09T20:17:15.000Z | config/list.exs | kelostrada/peerage | 1bc83b2662129161d36901a063553c1650bcbf4e | [
"MIT"
] | 23 | 2017-01-11T12:50:18.000Z | 2021-04-12T01:42:54.000Z | use Mix.Config
config :peerage, via: Peerage.Via.List, node_list: [
:"a@127.0.0.1", :"b@127.0.0.1"
]
| 20.6 | 52 | 0.631068 |
ffde34e5f4f45075ac0f67a7a6f05b605abda79d | 4,413 | exs | Elixir | test/glimesh/chat_effects_test.exs | LittleToonCat/glimesh.tv | 97b0eb7881888faeb525e6a99cc73ebb559e8eba | [
"MIT"
] | null | null | null | test/glimesh/chat_effects_test.exs | LittleToonCat/glimesh.tv | 97b0eb7881888faeb525e6a99cc73ebb559e8eba | [
"MIT"
] | null | null | null | test/glimesh/chat_effects_test.exs | LittleToonCat/glimesh.tv | 97b0eb7881888faeb525e6a99cc73ebb559e8eba | [
"MIT"
] | null | null | null | defmodule Glimesh.ChatEffectsTest do
use Glimesh.DataCase
import Glimesh.AccountsFixtures
import Glimesh.PaymentsFixtures
import Phoenix.HTML, only: [safe_to_string: 1]
alias Glimesh.Chat.ChatMessage
alias Glimesh.Chat.Effects
alias Glimesh.StreamModeration
describe "chat rendering" do
setup do
streamer = streamer_fixture()
moderator = user_fixture()
{:ok, _} =
StreamModeration.create_channel_moderator(streamer, streamer.channel, moderator, %{
can_short_timeout: true,
can_long_timeout: true,
can_ban: true
})
%{
channel: streamer.channel,
streamer: streamer,
moderator: moderator,
user: user_fixture()
}
end
test "renders appropriate tags for admins", %{channel: channel} do
admin = admin_fixture()
assert safe_to_string(Effects.render_username(admin)) =~ "Glimesh Staff"
assert safe_to_string(Effects.render_avatar(admin)) =~
"avatar-ring platform-admin-ring"
assert Effects.render_channel_badge(channel, admin) == ""
end
test "renders appropriate tags for moderator", %{
channel: channel,
streamer: streamer,
moderator: moderator
} do
{:ok, _} = StreamModeration.create_channel_moderator(streamer, channel, moderator, %{})
assert safe_to_string(Effects.render_channel_badge(channel, moderator)) =~
"Mod"
assert safe_to_string(Effects.render_channel_badge(channel, moderator)) =~
"badge badge-primary"
end
test "renders appropriate tags for platform founder subscribers", %{
user: user
} do
{:ok, _sub} = platform_founder_subscription_fixture(user)
rendered_username = safe_to_string(Effects.render_username(user))
rendered_avatar = safe_to_string(Effects.render_avatar(user))
assert rendered_username =~ "text-warning"
assert rendered_username =~ "Glimesh Founder Subscriber"
assert rendered_avatar =~ "avatar-ring avatar-animated-ring platform-founder-ring"
end
test "renders appropriate tags for platform supporter subscribers", %{user: user} do
{:ok, _sub} = platform_supporter_subscription_fixture(user)
rendered_username = safe_to_string(Effects.render_username(user))
rendered_avatar = safe_to_string(Effects.render_avatar(user))
assert rendered_username =~ "text-white"
assert rendered_username =~ "Glimesh Supporter Subscriber"
assert rendered_avatar =~ "avatar-ring platform-supporter-ring"
end
test "renders appropriate tags for channel subscribers", %{
channel: channel,
streamer: streamer,
user: user
} do
{:ok, _sub} = channel_subscription_fixture(streamer, user)
assert safe_to_string(Effects.render_channel_badge(channel, user)) =~
"Channel Subscriber"
assert safe_to_string(Effects.render_channel_badge(channel, user)) =~
"badge badge-secondary"
end
test "renders appropriate tags for streamer", %{channel: channel, streamer: streamer} do
assert safe_to_string(Effects.render_channel_badge(channel, streamer)) =~
"Streamer"
assert safe_to_string(Effects.render_channel_badge(channel, streamer)) =~
"badge badge-primary"
end
test "renders appropriate tags for regular viewer", %{user: user} do
rendered_username = safe_to_string(Effects.render_username(user))
rendered_avatar = safe_to_string(Effects.render_avatar(user))
assert rendered_username =~ "text-white"
assert rendered_username =~ user.displayname
assert rendered_avatar =~ "avatar-ring"
end
test "user_in_message/2 doesn't trigger with no user" do
assert Effects.user_in_message(nil, %ChatMessage{}) == false
end
test "user_in_message/2 detects username in message", %{
user: user,
streamer: streamer,
channel: channel
} do
{:ok, message} =
Glimesh.Chat.create_chat_message(streamer, channel, %{
message: "hello #{user.username} world"
})
assert Effects.user_in_message(user, message)
{:ok, message} =
Glimesh.Chat.create_chat_message(streamer, channel, %{
message: "hello @#{user.username} world"
})
assert Effects.user_in_message(user, message)
end
end
end
| 32.688889 | 93 | 0.678677 |
ffde3dd7b5dd488f7dbeb6335071075189f41ca6 | 184 | ex | Elixir | lib/nubank_api/feature.ex | jeffhsta/nubank_api | c4ca246f6e6ff6e77e349293daaff3741d3975ff | [
"MIT"
] | 14 | 2019-02-19T16:59:16.000Z | 2021-10-30T14:57:10.000Z | lib/nubank_api/feature.ex | fernandozoomp/API_NuBank | cb05de00d48275f3eae94fa3cb4ea55684289937 | [
"MIT"
] | 32 | 2019-02-21T02:40:03.000Z | 2020-06-09T00:04:03.000Z | lib/nubank_api/feature.ex | fernandozoomp/API_NuBank | cb05de00d48275f3eae94fa3cb4ea55684289937 | [
"MIT"
] | 3 | 2019-02-22T00:20:34.000Z | 2021-04-08T20:35:07.000Z | defmodule NubankAPI.Feature do
@moduledoc false
defmacro __using__(_) do
quote do
@http Application.get_env(:nubank_api, :http, NubankAPI.HTTPWrapper)
end
end
end
| 18.4 | 74 | 0.722826 |
ffde50fd928b2f4d889d824558b702f94b2e4e5f | 421 | exs | Elixir | server/test/blogsley_web/views/error_view_test.exs | blogsley/blogsley-absinthe | 7266f80a0638f8c30c37b37296ba5d78dcf8fb8b | [
"MIT"
] | 3 | 2020-01-23T06:37:18.000Z | 2020-12-05T08:31:00.000Z | server/test/blogsley_web/views/error_view_test.exs | blogsley/blogsley-absinthe | 7266f80a0638f8c30c37b37296ba5d78dcf8fb8b | [
"MIT"
] | 16 | 2020-04-04T05:13:52.000Z | 2022-03-31T11:19:20.000Z | server/test/blogsley_web/views/error_view_test.exs | blogsley/blogsley | 7266f80a0638f8c30c37b37296ba5d78dcf8fb8b | [
"MIT"
] | null | null | null | defmodule BlogsleyWeb.ErrorViewTest do
use BlogsleyWeb.ConnCase, async: true
# Bring render/3 and render_to_string/3 for testing custom views
import Phoenix.View
test "renders 404.html" do
assert render_to_string(BlogsleyWeb.ErrorView, "404.html", []) == "Not Found"
end
test "renders 500.html" do
assert render_to_string(BlogsleyWeb.ErrorView, "500.html", []) == "Internal Server Error"
end
end
| 28.066667 | 93 | 0.736342 |
ffde8aa251481e5e4aed6bcc91c0f98daea2003f | 388 | exs | Elixir | elixir/hello_tasks/priv/repo/migrations/20210526230556_add_parameter.exs | ivan-iver/goophers_mx | c2342d9513e07e77f84dee82eaf74ac1ed1ae38f | [
"MIT"
] | null | null | null | elixir/hello_tasks/priv/repo/migrations/20210526230556_add_parameter.exs | ivan-iver/goophers_mx | c2342d9513e07e77f84dee82eaf74ac1ed1ae38f | [
"MIT"
] | null | null | null | elixir/hello_tasks/priv/repo/migrations/20210526230556_add_parameter.exs | ivan-iver/goophers_mx | c2342d9513e07e77f84dee82eaf74ac1ed1ae38f | [
"MIT"
] | null | null | null | defmodule HelloTasks.Repo.Migrations.AddParameter do
use Ecto.Migration
@comment "Stores the configuration of parameters"
def change do
create table(:parameter, comment: @comment) do
add(:name, :string)
add(:data, :string)
add(:data_type, :string)
add(:description, :string)
timestamps()
end
create(index(:parameter, [:name]))
end
end
| 21.555556 | 52 | 0.664948 |
ffde8c44ec6c85c3799b30b507345f4de5137a52 | 2,299 | exs | Elixir | mix.exs | buoy49/zcash-explorer | 3774ef15a46ef13379d5a808f7cea198b76c589a | [
"Apache-2.0"
] | 5 | 2021-11-04T20:19:35.000Z | 2022-02-15T06:55:49.000Z | mix.exs | buoy49/zcash-explorer | 3774ef15a46ef13379d5a808f7cea198b76c589a | [
"Apache-2.0"
] | 5 | 2021-09-12T01:36:25.000Z | 2022-02-18T07:28:42.000Z | mix.exs | buoy49/zcash-explorer | 3774ef15a46ef13379d5a808f7cea198b76c589a | [
"Apache-2.0"
] | 8 | 2021-07-23T17:11:41.000Z | 2022-03-17T17:07:55.000Z | defmodule ZcashExplorer.MixProject do
use Mix.Project
def project do
[
app: :zcash_explorer,
version: "0.1.0",
elixir: "~> 1.7",
elixirc_paths: elixirc_paths(Mix.env()),
compilers: [:phoenix, :gettext] ++ Mix.compilers(),
start_permanent: Mix.env() == :prod,
aliases: aliases(),
deps: deps()
]
end
# Configuration for the OTP application.
#
# Type `mix help compile.app` for more information.
def application do
[
mod: {ZcashExplorer.Application, []},
extra_applications: [:logger, :runtime_tools, :os_mon, :cachex]
]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
# Specifies your project dependencies.
#
# Type `mix help deps` for examples and options.
defp deps do
[
{:phoenix, "~> 1.5.8"},
{:phoenix_ecto, "~> 4.1"},
{:ecto_sql, "~> 3.4"},
{:postgrex, ">= 0.0.0"},
{:ecto_psql_extras, "~> 0.2"},
{:phoenix_html, "~> 2.11"},
{:phoenix_live_reload, "~> 1.2", only: :dev},
{:phoenix_live_dashboard, "~> 0.4"},
{:telemetry_metrics, "~> 0.4"},
{:telemetry_poller, "~> 0.4"},
{:gettext, "~> 0.11"},
{:jason, "~> 1.0"},
{:plug_cowboy, "~> 2.0"},
{:httpoison, "~> 1.8"},
{:poison, "~> 3.1"},
{:observer_cli, "~> 1.6"},
{:cachex, "~> 3.3"},
{:phoenix_live_view, "~> 0.15.4"},
{:floki, ">= 0.27.0", only: :test},
{:zcashex, github: "nighthawk-apps/zcashex"},
{:timex, "~> 3.0"},
{:sizeable, "~> 1.0"},
{:eqrcode, "~> 0.1.8"},
{:contex, "~> 0.3.0"},
{:muontrap, "~> 0.6.1"}
]
end
# Aliases are shortcuts or tasks specific to the current project.
# For example, to install project dependencies and perform other setup tasks, run:
#
# $ mix setup
#
# See the documentation for `Mix` for more info on aliases.
defp aliases do
[
setup: ["deps.get", "ecto.setup", "cmd npm install --prefix assets"],
"ecto.setup": ["ecto.create", "ecto.migrate", "run priv/repo/seeds.exs"],
"ecto.reset": ["ecto.drop", "ecto.setup"],
test: ["ecto.create --quiet", "ecto.migrate --quiet", "test"]
]
end
end
| 29.101266 | 84 | 0.551544 |
ffde9151bab5d6e5f43b42a80aff9fe76fc44f66 | 1,891 | exs | Elixir | test/ex_oauth2_provider/plug/ensure_scopes_test.exs | loopsocial/ex_oauth2_provider | 59d177f1c7581e1d794823279067022b1598f5f2 | [
"MIT"
] | null | null | null | test/ex_oauth2_provider/plug/ensure_scopes_test.exs | loopsocial/ex_oauth2_provider | 59d177f1c7581e1d794823279067022b1598f5f2 | [
"MIT"
] | null | null | null | test/ex_oauth2_provider/plug/ensure_scopes_test.exs | loopsocial/ex_oauth2_provider | 59d177f1c7581e1d794823279067022b1598f5f2 | [
"MIT"
] | null | null | null | defmodule ExOauth2Provider.Plug.EnsureScopesTest do
@moduledoc false
use ExOauth2Provider.ConnCase
alias ExOauth2Provider.{Plug, Plug.EnsureScopes}
alias Dummy.OauthAccessTokens.OauthAccessToken
@default_scopes "read write"
defmodule TestHandler do
@moduledoc false
def unauthorized(conn, _) do
assert conn.halted
:forbidden
end
end
test "is valid when there's no scopes", %{conn: conn} do
conn = run_plug(conn, @default_scopes, scopes: ~w())
refute conn == :forbidden
end
test "is valid when all scopes are present", %{conn: conn} do
conn = run_plug(conn, @default_scopes, scopes: ~w(read write))
refute conn == :forbidden
end
test "is valid when the scope is present", %{conn: conn} do
conn = run_plug(conn, @default_scopes, scopes: ~w(read))
refute conn == :forbidden
end
test "is invalid when all scopes are not present", %{conn: conn} do
conn = run_plug(conn, "read", scopes: ~w(read write))
assert conn == :forbidden
end
test "is invalid when access token doesn't have any required scopes", %{conn: conn} do
conn = run_plug(conn, "other_read", scopes: ~w(read write))
assert conn == :forbidden
end
test "is invalid when none of the one_of scopes is present", %{conn: conn} do
conn = run_plug(conn, "other_read", one_of: [~w(other_write), ~w(read write)])
assert conn == :forbidden
end
test "is valid when at least one_of the scopes is present", %{conn: conn} do
conn = run_plug(conn, "other_read", one_of: [~w(other_read), ~w(read write)])
refute conn == :forbidden
end
defp run_plug(conn, scopes, opts) do
access_token = %OauthAccessToken{token: "secret", scopes: scopes}
opts = Keyword.merge([handler: TestHandler], opts)
conn
|> Plug.set_current_access_token({:ok, access_token})
|> EnsureScopes.call(opts)
end
end
| 26.633803 | 88 | 0.681121 |
ffde934cb9d3816d5ec1353b43ab78a8362d4cbc | 4,847 | ex | Elixir | clients/vm_migration/lib/google_api/vm_migration/v1/model/compute_engine_target_details.ex | renovate-bot/elixir-google-api | 1da34cd39b670c99f067011e05ab90af93fef1f6 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/vm_migration/lib/google_api/vm_migration/v1/model/compute_engine_target_details.ex | swansoffiee/elixir-google-api | 9ea6d39f273fb430634788c258b3189d3613dde0 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/vm_migration/lib/google_api/vm_migration/v1/model/compute_engine_target_details.ex | dazuma/elixir-google-api | 6a9897168008efe07a6081d2326735fe332e522c | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.VMMigration.V1.Model.ComputeEngineTargetDetails do
@moduledoc """
ComputeEngineTargetDetails is a collection of details for creating a VM in a target Compute Engine project.
## Attributes
* `additionalLicenses` (*type:* `list(String.t)`, *default:* `nil`) - Additional licenses to assign to the VM.
* `appliedLicense` (*type:* `GoogleApi.VMMigration.V1.Model.AppliedLicense.t`, *default:* `nil`) - The OS license returned from the adaptation module report.
* `bootOption` (*type:* `String.t`, *default:* `nil`) - The VM Boot Option, as set in the source vm.
* `computeScheduling` (*type:* `GoogleApi.VMMigration.V1.Model.ComputeScheduling.t`, *default:* `nil`) - Compute instance scheduling information (if empty default is used).
* `diskType` (*type:* `String.t`, *default:* `nil`) - The disk type to use in the VM.
* `labels` (*type:* `map()`, *default:* `nil`) - A map of labels to associate with the VM.
* `licenseType` (*type:* `String.t`, *default:* `nil`) - The license type to use in OS adaptation.
* `machineType` (*type:* `String.t`, *default:* `nil`) - The machine type to create the VM with.
* `machineTypeSeries` (*type:* `String.t`, *default:* `nil`) - The machine type series to create the VM with.
* `metadata` (*type:* `map()`, *default:* `nil`) - The metadata key/value pairs to assign to the VM.
* `networkInterfaces` (*type:* `list(GoogleApi.VMMigration.V1.Model.NetworkInterface.t)`, *default:* `nil`) - List of NICs connected to this VM.
* `networkTags` (*type:* `list(String.t)`, *default:* `nil`) - A map of network tags to associate with the VM.
* `project` (*type:* `String.t`, *default:* `nil`) - The GCP target project ID or project name.
* `secureBoot` (*type:* `boolean()`, *default:* `nil`) - Defines whether the instance has Secure Boot enabled. This can be set to true only if the vm boot option is EFI.
* `serviceAccount` (*type:* `String.t`, *default:* `nil`) - The service account to associate the VM with.
* `vmName` (*type:* `String.t`, *default:* `nil`) - The name of the VM to create.
* `zone` (*type:* `String.t`, *default:* `nil`) - The zone in which to create the VM.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:additionalLicenses => list(String.t()) | nil,
:appliedLicense => GoogleApi.VMMigration.V1.Model.AppliedLicense.t() | nil,
:bootOption => String.t() | nil,
:computeScheduling => GoogleApi.VMMigration.V1.Model.ComputeScheduling.t() | nil,
:diskType => String.t() | nil,
:labels => map() | nil,
:licenseType => String.t() | nil,
:machineType => String.t() | nil,
:machineTypeSeries => String.t() | nil,
:metadata => map() | nil,
:networkInterfaces => list(GoogleApi.VMMigration.V1.Model.NetworkInterface.t()) | nil,
:networkTags => list(String.t()) | nil,
:project => String.t() | nil,
:secureBoot => boolean() | nil,
:serviceAccount => String.t() | nil,
:vmName => String.t() | nil,
:zone => String.t() | nil
}
field(:additionalLicenses, type: :list)
field(:appliedLicense, as: GoogleApi.VMMigration.V1.Model.AppliedLicense)
field(:bootOption)
field(:computeScheduling, as: GoogleApi.VMMigration.V1.Model.ComputeScheduling)
field(:diskType)
field(:labels, type: :map)
field(:licenseType)
field(:machineType)
field(:machineTypeSeries)
field(:metadata, type: :map)
field(:networkInterfaces, as: GoogleApi.VMMigration.V1.Model.NetworkInterface, type: :list)
field(:networkTags, type: :list)
field(:project)
field(:secureBoot)
field(:serviceAccount)
field(:vmName)
field(:zone)
end
defimpl Poison.Decoder, for: GoogleApi.VMMigration.V1.Model.ComputeEngineTargetDetails do
def decode(value, options) do
GoogleApi.VMMigration.V1.Model.ComputeEngineTargetDetails.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.VMMigration.V1.Model.ComputeEngineTargetDetails do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 51.021053 | 176 | 0.676501 |
ffdeb14f3f621cdd27da50fafd95c61893d99bcd | 2,145 | ex | Elixir | test/support/fixtures/events_fixtures.ex | cristineguadelupe/tapebas | 5f8c70d5ac36b2a606fe4630cc659161b2f4d7bf | [
"MIT"
] | 3 | 2022-03-24T16:48:38.000Z | 2022-03-24T16:50:04.000Z | test/support/fixtures/events_fixtures.ex | cristineguadelupe/tapebas | 5f8c70d5ac36b2a606fe4630cc659161b2f4d7bf | [
"MIT"
] | null | null | null | test/support/fixtures/events_fixtures.ex | cristineguadelupe/tapebas | 5f8c70d5ac36b2a606fe4630cc659161b2f4d7bf | [
"MIT"
] | 1 | 2022-03-20T01:11:12.000Z | 2022-03-20T01:11:12.000Z | defmodule Tapebas.EventsFixtures do
@moduledoc """
This module defines test helpers for creating
entities via the `Tapebas.Events` context.
"""
@doc """
Generate a event description.
"""
def event_description, do: "some description#{System.unique_integer([:positive])}"
@doc """
Generate a unique event title.
"""
def unique_event_title, do: "some title#{System.unique_integer([:positive])}"
@doc """
Generate a random talk type.
"""
def random_talk_type, do: Enum.random(~w(keynote general beginner advanced))
@doc """
Generate a event.
"""
def event_fixture(attrs \\ %{}) do
%{id: user_id} = Tapebas.AccountsFixtures.user_fixture()
title = unique_event_title()
{:ok, event} =
attrs
|> Enum.into(%{
title: title,
slug: Slug.slugify(title),
description: event_description(),
user_id: user_id
})
|> Tapebas.Events.create_event()
event
end
@doc """
Generate a talk.
"""
def talk_fixture(attrs \\ %{}) do
%{id: event_id} = event_fixture()
{:ok, talk} =
attrs
|> Enum.into(%{
speaker: "some speaker",
title: "some title",
type: random_talk_type(),
event_id: event_id
})
|> Tapebas.Events.create_talk()
talk
end
@doc """
Generate a question.
"""
def question_fixture(attrs \\ %{}) do
%{id: user_id} = Tapebas.AccountsFixtures.user_fixture()
%{id: talk_id} = talk_fixture()
{:ok, question} =
attrs
|> Enum.into(%{
answered: Enum.random([true, false]),
title: "some title",
user_id: user_id,
talk_id: talk_id
})
|> Tapebas.Events.create_question()
question
end
@doc """
Generate a comment.
"""
def comment_fixture(attrs \\ %{}) do
%{id: user_id} = Tapebas.AccountsFixtures.user_fixture()
%{id: question_id} = question_fixture()
{:ok, comment} =
attrs
|> Enum.into(%{
message: "some message",
user_id: user_id,
question_id: question_id
})
|> Tapebas.Events.create_comment()
comment
end
end
| 21.45 | 84 | 0.591608 |
ffded37171bc3499dbf7f63c0d8fa0e366abd2c2 | 966 | ex | Elixir | lib/square_up/resources/v2/catalog_items.ex | beaver21/SquareUp | c9791d96ed9335926933403a966eba5076fbc15b | [
"MIT"
] | 4 | 2020-10-21T18:34:50.000Z | 2022-03-16T06:25:44.000Z | lib/square_up/resources/v2/catalog_items.ex | beaver21/SquareUp | c9791d96ed9335926933403a966eba5076fbc15b | [
"MIT"
] | 5 | 2020-10-21T23:16:32.000Z | 2021-05-13T13:42:44.000Z | lib/square_up/resources/v2/catalog_items.ex | beaver21/SquareUp | c9791d96ed9335926933403a966eba5076fbc15b | [
"MIT"
] | 3 | 2020-10-21T21:20:36.000Z | 2021-03-15T18:00:30.000Z | defmodule SquareUp.V2.CatalogItems do
import Norm
import SquareUp.Client, only: [call: 2]
@spec search(SquareUp.Client.t(), %{}, SquareUp.TypeSpecs.search_catalog_items_request(), %{}) ::
SquareUp.Client.response(SquareUp.TypeSpecs.search_catalog_items_response())
def search(client, path_params \\ %{}, params \\ %{}, query_params \\ %{}) do
path_params_spec = schema(%{})
params_spec = Norm.Delegate.delegate(&SquareUp.NormSchema.search_catalog_items_request/0)
query_params_spec = schema(%{})
response_spec = {:delegate, &SquareUp.ResponseSchema.search_catalog_items_response/0}
call(client, %{
method: :post,
path_params: path_params,
params: params,
query_params: query_params,
path_params_spec: path_params_spec,
params_spec: params_spec,
query_params_spec: query_params_spec,
response_spec: response_spec,
path: "/v2/catalog/search-catalog-items"
})
end
end
| 35.777778 | 99 | 0.704969 |
ffdeda6767a74c51c0148ae6a4c91e2b3fdcf00e | 979 | exs | Elixir | exercism/elixir/leap/test/year_test.exs | Tyyagoo/studies | f8fcc3a539cfb6d04a149174c88bf2208e220b96 | [
"Unlicense"
] | null | null | null | exercism/elixir/leap/test/year_test.exs | Tyyagoo/studies | f8fcc3a539cfb6d04a149174c88bf2208e220b96 | [
"Unlicense"
] | null | null | null | exercism/elixir/leap/test/year_test.exs | Tyyagoo/studies | f8fcc3a539cfb6d04a149174c88bf2208e220b96 | [
"Unlicense"
] | null | null | null | defmodule YearTest do
use ExUnit.Case
test "year not divisible by 4 is common year" do
refute Year.leap_year?(2015)
end
test "year divisible by 2, not divisible by 4 is common year" do
refute Year.leap_year?(1970)
end
test "year divisible by 4, not divisible by 100 is leap year" do
assert Year.leap_year?(1996)
end
test "year divisible by 4 and 5 is still a leap year" do
assert Year.leap_year?(1960)
end
test "year divisible by 100, not divisible by 400 is common year" do
refute Year.leap_year?(2100)
end
test "year divisible by 100 but not by 3 is still not a leap year" do
refute Year.leap_year?(1900)
end
test "year divisible by 400 is leap year" do
assert Year.leap_year?(2000)
end
test "year divisible by 400 but not by 125 is still a leap year" do
assert Year.leap_year?(2400)
end
test "year divisible by 200, not divisible by 400 in common year" do
refute Year.leap_year?(1800)
end
end
| 24.475 | 71 | 0.703779 |
ffdeef95632100ae3a217b1c0c8dda931bb3ef2f | 3,263 | ex | Elixir | lib/scripts/brainfuck.ex | Inouju/mambo | facc3b4bca4ce88ae1e65219d9342105108d97f3 | [
"MIT"
] | 15 | 2016-01-24T00:08:40.000Z | 2017-09-04T05:40:48.000Z | lib/scripts/brainfuck.ex | Inouju/mambo | facc3b4bca4ce88ae1e65219d9342105108d97f3 | [
"MIT"
] | 7 | 2016-02-07T07:14:16.000Z | 2020-01-26T23:57:09.000Z | lib/scripts/brainfuck.ex | Inouju/mambo | facc3b4bca4ce88ae1e65219d9342105108d97f3 | [
"MIT"
] | 2 | 2016-03-23T14:16:44.000Z | 2020-03-22T13:46:59.000Z | defmodule Brainfuck do
@moduledoc """
Brainfuck interpreter.
Examples
.bf ++++++++++[>++++++++>++++++<<-]>---.>+++++.<.>+.<++.
"""
use GenEvent
@tape <<0 :: size(30000) - unit(8)>>
@data_pointer 0
def init(_) do
{:ok, []}
end
def handle_event({:msg, {".help brainfuck", _, {cid,_,_}}}, _) do
Mambo.Bot.send_msg(<<?\n, @moduledoc>>, cid)
{:ok, []}
end
def handle_event({:privmsg, {".help brainfuck", _, {clid,_}}}, _) do
Mambo.Bot.send_privmsg(<<?\n, @moduledoc>>, clid)
{:ok, []}
end
def handle_event({:msg, {<<".bf ", l :: binary>>, _, {cid,_,_}}}, _) do
answer = fn(x) -> Mambo.Bot.send_msg(x, cid) end
spawn(fn -> run(l, answer) end)
{:ok, []}
end
def handle_event({:privmsg, {<<".bf ", l :: binary>>, _, {clid,_}}}, _) do
answer = fn(x) -> Mambo.Bot.send_privmsg(x, clid) end
spawn(fn -> run(l, answer) end)
{:ok, []}
end
def handle_event(_, _) do
{:ok, []}
end
# Helpers
def run(instructions, callback) do
{_, _, out} = parse(instructions, @data_pointer, @tape, [])
callback.("#{Enum.join(Enum.reverse(out), "")}")
end
# parse brainfuck code
defp parse(<< ?>, ins :: binary>>, dpointer, tape, out) do
parse(ins, dpointer + 1, tape, out)
end
defp parse(<< ?<, ins :: binary>>, dpointer, tape, out) do
parse(ins, dpointer - 1, tape, out)
end
defp parse(<< ?+, ins :: binary>>, dpointer, tape, out) do
<<prev :: binary - size(dpointer), x :: integer, next :: binary>> = tape
parse(ins, dpointer, <<prev :: binary, (x + 1) :: integer, next :: binary>>, out)
end
defp parse(<< ?-, ins :: binary>>, dpointer, tape, out) do
<<prev :: binary - size(dpointer), x :: integer, next :: binary>> = tape
parse(ins, dpointer, <<prev :: binary, (x - 1) :: integer, next :: binary>>, out)
end
# user input is disabled
defp parse(<< ?,, ins :: binary>>, dpointer, tape, out) do
parse(ins, dpointer, tape, out)
end
defp parse(<< ?., ins :: binary>>, dpointer, tape, out) do
<<_ :: binary - size(dpointer), x :: integer, _ :: binary>> = tape
parse(ins, dpointer, tape, [<<x>> | out])
end
defp parse(<< ?[, ins :: binary>>, dpointer, tape, out) do
match = find_matching(ins)
<<bf_loop :: binary - size(match), next :: binary>> = ins
<<_ :: binary - size(dpointer), x :: integer, _:: binary>> = tape
if x === 0 do
parse(next, dpointer, tape, out)
else
{ndpointer, ntape, nout} = parse(bf_loop, dpointer, tape, out)
parse(<< ?[, bf_loop :: binary, ?], next :: binary>>, ndpointer, ntape, nout)
end
end
defp parse(<< _, ins :: binary>>, dpointer, tape, out) do
parse(ins, dpointer, tape, out)
end
defp parse(<<>>, dpointer, tape, out) do
{dpointer, tape, out}
end
# find matching bracket
defp find_matching(ins) do
find_matching(ins, 0, 0)
end
defp find_matching(<<?], _ :: binary>>, 0, pos) do
pos + 1
end
defp find_matching(<<?], ins :: binary>>, n, pos) do
find_matching(ins, n - 1, pos + 1)
end
defp find_matching(<<?[, ins :: binary>>, n, pos) do
find_matching(ins, n + 1, pos + 1)
end
defp find_matching(<<_, ins :: binary>>, n, pos) do
find_matching(ins, n, pos + 1)
end
end
| 26.745902 | 85 | 0.557769 |
ffdf2e6ce8bf82b6c0f2b6f6480a41589a428b1d | 1,546 | ex | Elixir | test/support/rem/test/factory.ex | pyzlnar/rem-bot | 100b71949026eb191c1ebf80d64270406f237958 | [
"MIT"
] | 4 | 2022-02-20T13:33:48.000Z | 2022-03-31T00:48:52.000Z | test/support/rem/test/factory.ex | pyzlnar/rem-bot | 100b71949026eb191c1ebf80d64270406f237958 | [
"MIT"
] | 1 | 2022-02-22T05:42:05.000Z | 2022-02-22T05:42:05.000Z | test/support/rem/test/factory.ex | pyzlnar/rem-bot | 100b71949026eb191c1ebf80d64270406f237958 | [
"MIT"
] | null | null | null | defmodule Rem.Test.Factory do
@moduledoc """
This module handles ex-machina's factory declaration.
It is intended for factory declaration _only_, if you want to create cases that
require insertions or multiple build, write it in Rem.Test.Scenario instead.
"""
use ExMachina.Ecto,
repo: Rem.Repo
# --- Database Models --- #
def wordle_game_factory do
%Schema.Wordle.Game{
discord_user_id: sequence(:discord_user_id, &int/1),
number: sequence(:number, &int/1),
solution: sequence(:solution, &int_to_word/1, start_at: 10_000),
mode: :normal,
state: :active,
attempts: [],
evaluations: []
}
end
def wordle_solution_factory do
%Schema.Wordle.Solution{
number: sequence(:number, &int/1),
name: sequence(:name, &int_to_word/1, start_at: 10_000)
}
end
def wordle_word_factory do
%Schema.Wordle.Word{
name: sequence(:name, &int_to_word/1, start_at: 10_000)
}
end
# --- Virtual Models --- #
def virtual_game_factory do
%Wordle.Game{
number: sequence(:number, &int/1),
solution: sequence(:solution, &int_to_word/1, start_at: 10_000),
mode: :normal,
state: :active,
attempts: [],
evaluations: []
}
end
# --- Helpers --- #
defp int(int), do: int
defp int_to_word(int) do
int
|> Integer.digits
|> Enum.map(&(&1 + ?a))
|> List.to_string
end
end
| 24.935484 | 84 | 0.582147 |
ffdf3e6b114b2491eb5ba92367b03228099c81b4 | 1,126 | ex | Elixir | test/fixtures/host_tool/lib/host_tool/platform.ex | RickCarlino/nerves | 9b80531a4f84868e4811b81aaed84f7fdef8cddd | [
"Apache-2.0"
] | 1,944 | 2016-01-25T14:04:50.000Z | 2022-03-30T13:25:53.000Z | test/fixtures/host_tool/lib/host_tool/platform.ex | RickCarlino/nerves | 9b80531a4f84868e4811b81aaed84f7fdef8cddd | [
"Apache-2.0"
] | 397 | 2016-05-19T21:17:51.000Z | 2022-03-30T18:02:49.000Z | test/fixtures/host_tool/lib/host_tool/platform.ex | RickCarlino/nerves | 9b80531a4f84868e4811b81aaed84f7fdef8cddd | [
"Apache-2.0"
] | 191 | 2016-01-30T01:56:25.000Z | 2022-03-30T17:58:57.000Z | defmodule HostTool.Platform do
use Nerves.Package.Platform
alias Nerves.Artifact
def bootstrap(pkg) do
path = System.get_env("PATH")
System.put_env("PATH", "#{Nerves.Artifact.Cache.get(pkg)}:#{path}")
:ok
end
def build(pkg, _toolchain, _opts) do
build_path = Artifact.build_path(pkg)
File.rm_rf(build_path)
priv_dir =
pkg.path
|> Path.join("priv")
|> Path.expand()
build_path
|> Path.dirname()
|> File.mkdir_p!()
File.mkdir_p!(priv_dir)
File.ln_s!(priv_dir, build_path)
Nerves.Port.cmd("make", [])
{:ok, build_path}
end
def build_path_link(pkg) do
Artifact.build_path(pkg)
end
def archive(pkg, _toolchain, _opts) do
build_path = Artifact.build_path(pkg)
name = Artifact.download_name(pkg) <> Artifact.ext(pkg)
Nerves.Utils.File.tar(build_path, name)
{:ok, Path.join(File.cwd!(), name)}
end
def clean(pkg) do
build_path = Artifact.build_path(pkg)
File.rm_rf(build_path)
priv_dir =
pkg.path
|> Path.join("priv")
|> Path.expand()
File.rm_rf(priv_dir)
:ok
end
end
| 19.413793 | 71 | 0.634103 |
ffdf76a95a21b71a5a2baf8e7dd5a6efed93f316 | 4,836 | exs | Elixir | test/gen_websocket_test.exs | ejscunha/gen_websocket | e8d88e00cce7e07079898868e6a10edfb0e3973a | [
"MIT"
] | null | null | null | test/gen_websocket_test.exs | ejscunha/gen_websocket | e8d88e00cce7e07079898868e6a10edfb0e3973a | [
"MIT"
] | null | null | null | test/gen_websocket_test.exs | ejscunha/gen_websocket | e8d88e00cce7e07079898868e6a10edfb0e3973a | [
"MIT"
] | null | null | null | defmodule GenWebsocketTest do
use ExUnit.Case
alias GenWebsocket.Support.WebsocketServer
@host 'localhost'
setup context do
port = Enum.random(49152..65535)
opts =
context
|> Map.get(:opts, [])
|> Keyword.put(:port, port)
{:ok, server_pid} = start_supervised({WebsocketServer, opts})
{:ok, port: port, server_pid: server_pid}
end
test "#connect it connects to the server and returns {:ok, pid}", %{port: port} do
assert {:ok, pid} = GenWebsocket.connect(@host, port)
assert is_pid(pid)
end
test "#connect if the server is not available in the given port it returns {:error, :connection_refused}",
%{
port: port
} do
assert {:error, :connection_refused} = GenWebsocket.connect(@host, port - 1)
end
@tag opts: [delay: 2]
test "#connect if the connection exceeds the timeout it returns {:error, :timeout}", %{
port: port,
opts: opts
} do
assert {:error, :timeout} = GenWebsocket.connect(@host, port, [], opts[:delay] - 1)
end
test "#connect if the host can not be resolved it retuns {:error, :host_not_found}", %{
port: port
} do
assert {:error, :host_not_found} = GenWebsocket.connect('invalid', port)
end
test "#send it sends binary data to the websocket server and returns :ok", %{
port: port,
server_pid: server_pid
} do
data = "data"
{:ok, pid} = GenWebsocket.connect(@host, port)
assert :ok = GenWebsocket.send(pid, data)
Process.sleep(10)
assert WebsocketServer.received?(server_pid, data)
end
test "#recv it receives binary data with the provided length from the websocket server and returns {:ok, data}",
%{
port: port,
server_pid: server_pid
} do
data = "data"
{:ok, pid} = GenWebsocket.connect(@host, port)
WebsocketServer.send(server_pid, data)
assert {:ok, ^data} = GenWebsocket.recv(pid, byte_size(data))
end
test "#recv it returns binary data equivalent to the provided length in bytes",
%{
port: port,
server_pid: server_pid
} do
data = "datadata"
expected_received_data = "data"
expected_received_data_length = byte_size(expected_received_data)
{:ok, pid} = GenWebsocket.connect(@host, port)
WebsocketServer.send(server_pid, data)
assert {:ok, ^expected_received_data} = GenWebsocket.recv(pid, expected_received_data_length)
assert {:ok, ^expected_received_data} = GenWebsocket.recv(pid, expected_received_data_length)
end
test "#recv if the provided length is 0 it returns all data present in the server",
%{
port: port,
server_pid: server_pid
} do
data = "data"
{:ok, pid} = GenWebsocket.connect(@host, port)
WebsocketServer.send(server_pid, data)
Process.sleep(10)
assert {:ok, ^data} = GenWebsocket.recv(pid, 0)
end
test "#recv if the provided timeout is exceeded it returns {:error, :timeout}", %{port: port} do
{:ok, pid} = GenWebsocket.connect(@host, port)
assert {:error, :timeout} = GenWebsocket.recv(pid, 1, 10)
end
test "#controlling_process it changes the owning process of the client and returns :ok", %{
port: port,
server_pid: server_pid
} do
{:ok, pid} = GenWebsocket.connect(@host, port, active: true)
test_pid = self()
data = "data"
new_owner =
spawn(fn ->
receive do
msg -> send(test_pid, msg)
end
end)
assert :ok = GenWebsocket.controlling_process(pid, new_owner)
WebsocketServer.send(server_pid, data)
assert_receive({:websocket, ^pid, ^data})
end
test "#close it closes the connection to the server and stops the client and returns :ok", %{
port: port
} do
{:ok, pid} = GenWebsocket.connect(@host, port)
assert :ok = GenWebsocket.close(pid)
assert {:error, :closed} = GenWebsocket.send(pid, "data")
end
test "#set_active it defines the client active mode and returns :ok", %{
port: port,
server_pid: server_pid
} do
data = "data"
{:ok, pid} = GenWebsocket.connect(@host, port, active: false)
assert :ok = GenWebsocket.set_active(pid, true)
WebsocketServer.send(server_pid, data)
assert_receive({:websocket, ^pid, ^data})
WebsocketServer.send(server_pid, data)
assert_receive({:websocket, ^pid, ^data})
assert :ok = GenWebsocket.set_active(pid, false)
WebsocketServer.send(server_pid, data)
refute_receive({:websocket, ^pid, ^data}, 10)
assert {:ok, ^data} = GenWebsocket.recv(pid, 0)
assert :ok = GenWebsocket.set_active(pid, :once)
WebsocketServer.send(server_pid, data)
assert_receive({:websocket, ^pid, ^data})
WebsocketServer.send(server_pid, data)
refute_receive({:websocket, ^pid, ^data}, 10)
assert {:ok, ^data} = GenWebsocket.recv(pid, 0)
end
end
| 31.607843 | 114 | 0.656741 |
ffdff95e262031641a4855d99700886bfaf798af | 563 | exs | Elixir | test/views/error_view_test.exs | Wilo/mediax | 0f3ceda0662bae5bd13f08cada5e8ad34715dd0a | [
"MIT"
] | null | null | null | test/views/error_view_test.exs | Wilo/mediax | 0f3ceda0662bae5bd13f08cada5e8ad34715dd0a | [
"MIT"
] | null | null | null | test/views/error_view_test.exs | Wilo/mediax | 0f3ceda0662bae5bd13f08cada5e8ad34715dd0a | [
"MIT"
] | null | null | null | defmodule Mediax.ErrorViewTest do
use Mediax.ConnCase, async: true
# Bring render/3 and render_to_string/3 for testing custom views
import Phoenix.View
test "renders 404.html" do
assert render_to_string(Mediax.ErrorView, "404.html", []) ==
"Page not found"
end
test "render 500.html" do
assert render_to_string(Mediax.ErrorView, "500.html", []) ==
"Internal server error"
end
test "render any other" do
assert render_to_string(Mediax.ErrorView, "505.html", []) ==
"Internal server error"
end
end
| 25.590909 | 66 | 0.673179 |
ffdffe2843c0163d91388fd9d6564b375b1b328e | 882 | exs | Elixir | mix.exs | Ziinc/phoenix_ts_interface | bde31c6a348301907eaf5a264030ed9b45a25f43 | [
"MIT"
] | null | null | null | mix.exs | Ziinc/phoenix_ts_interface | bde31c6a348301907eaf5a264030ed9b45a25f43 | [
"MIT"
] | 1 | 2020-04-22T15:10:28.000Z | 2020-04-22T15:10:28.000Z | mix.exs | Ziinc/phoenix_ts_interface | bde31c6a348301907eaf5a264030ed9b45a25f43 | [
"MIT"
] | null | null | null | defmodule PhoenixTsInterface.Mixfile do
use Mix.Project
def project do
[
app: :phoenix_ts_interface,
version: "1.0.0",
elixir: "~> 1.10",
build_embedded: Mix.env() == :prod,
start_permanent: Mix.env() == :prod,
description: description(),
package: package(),
deps: deps()
]
end
def application do
[applications: [:logger]]
end
defp deps do
[
{:phoenix, ">= 1.4.0", only: :test},
{:ex_doc, ">= 0.0.0", only: :dev, runtime: false}
]
end
defp description do
"""
TS interface for Phoenix APIs
"""
end
defp package do
[
name: :phoenix_ts_interface,
files: ["lib", "priv", "mix.exs", "README*", "LICENSE*"],
licenses: ["MIT"],
maintainers: ["ziinc"],
links: %{"GitHub" => "https://github.com/Ziinc/phoenix-ts-interface"}
]
end
end
| 20.045455 | 75 | 0.554422 |
ffe02a2f317d37ba1a4916a2bac4f755c48307dd | 320 | ex | Elixir | test/support/test_convenience_helpers.ex | GrantJamesPowell/battle_box | 301091955b68cd4672f6513d645eca4e3c4e17d0 | [
"Apache-2.0"
] | 2 | 2020-10-17T05:48:49.000Z | 2020-11-11T02:34:15.000Z | test/support/test_convenience_helpers.ex | FlyingDutchmanGames/battle_box | 301091955b68cd4672f6513d645eca4e3c4e17d0 | [
"Apache-2.0"
] | 3 | 2020-05-18T05:52:21.000Z | 2020-06-09T07:24:14.000Z | test/support/test_convenience_helpers.ex | FlyingDutchmanGames/battle_box | 301091955b68cd4672f6513d645eca4e3c4e17d0 | [
"Apache-2.0"
] | null | null | null | defmodule BattleBox.TestConvenienceHelpers do
def named_proxy(name, init_func \\ nil) do
me = self()
spawn_link(fn ->
if init_func, do: init_func.()
Stream.iterate(0, & &1)
|> Enum.each(fn _ ->
receive do
x -> send(me, {name, x})
end
end)
end)
end
end
| 18.823529 | 45 | 0.55 |
ffe0322ba84f8672c0023834e64cead04a22cde9 | 67 | ex | Elixir | create_fun_umbrella/apps/create_fun_cms/lib/create_fun_cms/views/artist_view.ex | Vorzious/CreateFun | 5744c913ef706bc29062fa90a8ec5de12d267dab | [
"MIT"
] | null | null | null | create_fun_umbrella/apps/create_fun_cms/lib/create_fun_cms/views/artist_view.ex | Vorzious/CreateFun | 5744c913ef706bc29062fa90a8ec5de12d267dab | [
"MIT"
] | 9 | 2018-06-17T09:54:03.000Z | 2018-06-17T09:55:20.000Z | create_fun_umbrella/apps/create_fun_cms/lib/create_fun_cms/views/artist_view.ex | Vorzious/CreateFun | 5744c913ef706bc29062fa90a8ec5de12d267dab | [
"MIT"
] | 1 | 2018-06-05T18:38:01.000Z | 2018-06-05T18:38:01.000Z | defmodule CreateFunCms.ArtistView do
use CreateFunCms, :view
end
| 16.75 | 36 | 0.820896 |
ffe04eeaec8987c23a7a52c301a2fc7cd503a01a | 4,617 | ex | Elixir | chromoid_web/lib/chromoid_discord/fake_discord_source.ex | ConnorRigby/chromoid | 6424a9234227d18d7c287ded869caeb31511bb97 | [
"Apache-2.0"
] | 7 | 2020-11-18T11:29:20.000Z | 2022-01-16T03:16:14.000Z | chromoid_web/lib/chromoid_discord/fake_discord_source.ex | ConnorRigby/chromoid | 6424a9234227d18d7c287ded869caeb31511bb97 | [
"Apache-2.0"
] | null | null | null | chromoid_web/lib/chromoid_discord/fake_discord_source.ex | ConnorRigby/chromoid | 6424a9234227d18d7c287ded869caeb31511bb97 | [
"Apache-2.0"
] | 1 | 2021-01-06T15:40:46.000Z | 2021-01-06T15:40:46.000Z | defmodule ChromoidDiscord.FakeDiscordSource do
@moduledoc """
Stub interface for dispatching Discord events
"""
use GenServer
require Logger
def message_create(%Nostrum.Struct.Message{guild_id: guild_id} = message) do
guild = %Nostrum.Struct.Guild{id: guild_id}
ChromoidDiscord.Guild.EventDispatcher.dispatch(guild, {:MESSAGE_CREATE, message})
end
def message_create(content) do
guild = default_guild()
channel = default_channel()
message_create(guild, channel, content)
end
def message_create(guild, channel, content) do
message = default_message(guild, channel, content)
ChromoidDiscord.Guild.EventDispatcher.dispatch(guild, {:MESSAGE_CREATE, message})
end
@doc false
def start_link(args) do
GenServer.start_link(__MODULE__, args, name: __MODULE__)
end
@impl GenServer
def init(_) do
{:ok, %{}}
end
def init_guild(%Nostrum.Struct.Guild{} = guild, %Nostrum.Struct.User{} = current_user) do
Logger.info("GUILD_AVAILABLE: #{guild.name}")
config = ChromoidDiscord.NostrumConsumer.get_or_create_config(guild)
case ChromoidDiscord.GuildSupervisor.start_guild(guild, config, current_user) do
{:ok, pid} ->
{:ok, pid}
{:error, {:already_started, pid}} ->
{:ok, pid}
error ->
Logger.error("Could not start guild: #{guild.name}: #{inspect(error)}")
error
end
end
def init_guild() do
init_guild(default_guild(), default_user())
end
def default_user() do
%Nostrum.Struct.User{
avatar: nil,
bot: true,
discriminator: "4588",
email: nil,
id: 755_805_360_123_805_987,
mfa_enabled: true,
public_flags: %Nostrum.Struct.User.Flags{
bug_hunter_level_1: false,
bug_hunter_level_2: false,
early_supporter: false,
hypesquad_balance: false,
hypesquad_bravery: false,
hypesquad_brilliance: false,
hypesquad_events: false,
partner: false,
staff: false,
system: false,
team_user: false,
verified_bot: false,
verified_developer: false
},
username: "Chromoid",
verified: true
}
end
def default_author do
user = default_user()
%{user | id: 805_755_805_360_123_987}
end
def default_config(guild) do
ChromoidDiscord.NostrumConsumer.get_or_create_config(guild)
end
def default_guild do
%Nostrum.Struct.Guild{
afk_channel_id: nil,
afk_timeout: 300,
application_id: nil,
channels: nil,
default_message_notifications: 1,
embed_channel_id: "643947340453118019",
embed_enabled: true,
emojis: [
%Nostrum.Struct.Emoji{
animated: false,
id: 644_017_512_102_494_209,
managed: false,
name: "ping",
require_colons: true,
roles: [],
user: nil
}
],
explicit_content_filter: 0,
features: [],
icon: "0c8e107405e53f0923deaa2a69cb504f",
id: 643_947_339_895_013_416,
joined_at: nil,
large: nil,
member_count: nil,
members: nil,
mfa_level: 0,
name: "miata",
owner_id: 316_741_621_498_511_363,
region: "us-central",
roles: %{
643_947_339_895_013_416 => %Nostrum.Struct.Guild.Role{
color: 0,
hoist: false,
id: 643_947_339_895_013_416,
managed: false,
mentionable: false,
name: "@everyone",
permissions: 104_324_689,
position: 0
}
},
splash: nil,
system_channel_id: 643_947_340_453_118_019,
unavailable: nil,
verification_level: 2,
voice_states: nil,
widget_channel_id: 643_947_340_453_118_019,
widget_enabled: true
}
end
def default_channel do
%Nostrum.Struct.Channel{
application_id: nil,
bitrate: nil,
guild_id: 643_947_339_895_013_416,
icon: nil,
id: 644_744_557_166_329_420,
last_message_id: nil,
last_pin_timestamp: nil,
name: "deleteme",
nsfw: false,
owner_id: nil,
parent_id: 644_744_557_166_329_857,
permission_overwrites: [],
position: 53,
recipients: nil,
topic: nil,
type: 0,
user_limit: nil
}
end
def default_message(guild, channel, content) do
%Nostrum.Struct.Message{
guild_id: guild.id,
content: content,
channel_id: channel.id,
author: default_author(),
member: %Nostrum.Struct.Guild.Member{
nick: "asdf",
user: default_user()
}
}
end
end
| 25.368132 | 91 | 0.62963 |
ffe07eeb72899b7bd8bf8e9c8a3ad55f6481da06 | 184 | exs | Elixir | app/priv/repo/migrations/20201102214719_add_recording_type.exs | nathanjohnson320/noodl | 2e449aab15b54fc5a1dc45ebf4b79e7b64b7c967 | [
"MIT"
] | 1 | 2021-01-20T20:00:50.000Z | 2021-01-20T20:00:50.000Z | app/priv/repo/migrations/20201102214719_add_recording_type.exs | nathanjohnson320/noodl | 2e449aab15b54fc5a1dc45ebf4b79e7b64b7c967 | [
"MIT"
] | null | null | null | app/priv/repo/migrations/20201102214719_add_recording_type.exs | nathanjohnson320/noodl | 2e449aab15b54fc5a1dc45ebf4b79e7b64b7c967 | [
"MIT"
] | null | null | null | defmodule Noodl.Repo.Migrations.AddRecordingType do
use Ecto.Migration
def change do
alter table(:recordings) do
add :type, :string, default: "system"
end
end
end
| 18.4 | 51 | 0.706522 |
ffe0aa528fb0fd2f9f15a4d2f72e0d60be38e808 | 2,899 | exs | Elixir | machine_translation/MorpHIN/Learned/Resources/Set1/TrainingInstances/83.exs | AdityaPrasadMishra/NLP--Project-Group-16 | fb62cc6a1db4a494058171f11c14a2be3933a9a1 | [
"MIT"
] | null | null | null | machine_translation/MorpHIN/Learned/Resources/Set1/TrainingInstances/83.exs | AdityaPrasadMishra/NLP--Project-Group-16 | fb62cc6a1db4a494058171f11c14a2be3933a9a1 | [
"MIT"
] | null | null | null | machine_translation/MorpHIN/Learned/Resources/Set1/TrainingInstances/83.exs | AdityaPrasadMishra/NLP--Project-Group-16 | fb62cc6a1db4a494058171f11c14a2be3933a9a1 | [
"MIT"
] | null | null | null | **EXAMPLE FILE**
pnoun cm cm verb noun;
nst adjective cm pnoun noun;
verb_aux SYM cm noun noun;
cardinal noun pnoun cm pnoun;
pnoun cm pnoun cm pnoun;
pn nst pnoun cardinal pnoun;
noun verb pnoun cm pnoun;
pnoun cm pnoun cm pnoun;
conj pnoun pnoun cm pnoun;
adjective noun pnoun cm pnoun;
SYM adjective cm adjective pnoun;
noun pnoun particle adjective pnoun;
SYM pnoun cm noun pnoun;
cm adjective cm noun noun;
pnoun pnoun cm cardinal pnoun;
pnoun pnoun cm noun pnoun;
conj pnoun cm pn pnoun;
conj pnoun cm quantifier pnoun;
cm pnoun cm adjective pnoun;
noun pnoun cm noun pnoun;
pn pnoun cm noun pnoun;
pn pnoun cm quantifier pnoun;
cm pnoun cm noun pnoun;
cm pn adjective verb noun;
cm pnoun conj quantifier pnoun;
noun adjective cm pn noun;
noun pnoun SYM adjective pnoun;
cm pnoun noun pnoun pnoun;
noun cardinal cm pnoun pnoun;
noun conj particle pnoun noun;
nst pnoun cm noun pnoun;
SYM pnoun cm demonstrative pnoun;
noun pnoun cm pnoun pnoun;
SYM pnoun cm adjective pnoun;
particle noun cm noun pnoun;
noun cm cm noun pnoun;
verb_aux SYM cm adjective pnoun;
noun pnoun conj pnoun pnoun;
pnoun cm cm noun pnoun;
verb conj cm noun pnoun;
pnoun cm cm noun pnoun;
cm nst pnoun pnoun pnoun;
nst pnoun pnoun adjective pnoun;
demonstrative noun noun verb noun;
cm pnoun noun noun pnoun;
pnoun cm cm pnoun pnoun;
noun cm pnoun conj pnoun;
verb_aux SYM pnoun noun pnoun;
pnoun cm pnoun cm pnoun;
nst adjective cm cardinal noun;
cm adjective pnoun cm pnoun;
conj conj pnoun noun pnoun;
noun cm pnoun conj pnoun;
conj conj pnoun cm pnoun;
verb SYM pnoun noun pnoun;
cm adjective pnoun cm pnoun;
pnoun cm pnoun cm pnoun;
nst adjective cm cardinal noun;
conj conj pnoun noun pnoun;
adjective noun cm noun pnoun;
noun pnoun cm noun pnoun;
noun pnoun cm cm pnoun;
adjective noun pnoun conj pnoun;
verb_aux pnoun cm cm pnoun;
neg pnoun pn particle pnoun;
particle pnoun noun cm pnoun;
cardinal adjective cardinal cm pnoun;
verb SYM cm noun pnoun;
cm noun pnoun cm pnoun;
verb_aux conj pnoun pnoun pnoun;
noun pnoun cm nst pnoun;
noun pnoun cm pn pnoun;
pn pnoun cm adjective pnoun;
noun pnoun cm pnoun pnoun;
cm pnoun pnoun cm pnoun;
SYM pnoun cm pnoun pnoun;
cm pnoun cm cardinal pnoun;
noun pnoun cm noun pnoun;
SYM pnoun pnoun cm pnoun;
noun pnoun cm noun pnoun;
cm particle cm verb noun;
SYM pnoun pnoun cm pnoun;
conj pnoun pnoun conj pnoun;
SYM pnoun cm noun pnoun;
noun cm pnoun cm pnoun;
cm pnoun cm noun pnoun;
pn pnoun cm noun pnoun;
cm pnoun noun noun pnoun;
cm pnoun noun verb pnoun;
adjective noun pnoun conj pnoun;
noun pnoun pnoun SYM pnoun;
noun pnoun pnoun verb pnoun;
noun pnoun pnoun cm pnoun;
SYM pnoun pnoun cardinal pnoun;
SYM pnoun pnoun cm pnoun;
noun pnoun conj adjective pnoun;
cm pnoun cm noun pnoun;
cm pnoun cm cm pnoun;
pnoun cardinal cm noun pnoun;
noun SYM noun noun pnoun;
| 28.145631 | 38 | 0.756123 |
ffe0ae7804fc2cddea49d595a8712e548f1ba59c | 6,129 | exs | Elixir | lib/eex/test/eex/tokenizer_test.exs | sjBao/elixir | 52ee3ac163098f91f15dcb1360ae801a98453a77 | [
"Apache-2.0"
] | 1 | 2020-01-14T18:44:56.000Z | 2020-01-14T18:44:56.000Z | lib/eex/test/eex/tokenizer_test.exs | sjBao/elixir | 52ee3ac163098f91f15dcb1360ae801a98453a77 | [
"Apache-2.0"
] | null | null | null | lib/eex/test/eex/tokenizer_test.exs | sjBao/elixir | 52ee3ac163098f91f15dcb1360ae801a98453a77 | [
"Apache-2.0"
] | null | null | null | Code.require_file("../test_helper.exs", __DIR__)
defmodule EEx.TokenizerTest do
use ExUnit.Case, async: true
require EEx.Tokenizer, as: T
test "simple chars lists" do
assert T.tokenize('foo', 1, 1) == {:ok, [{:text, 'foo'}]}
end
test "simple strings" do
assert T.tokenize("foo", 1, 1) == {:ok, [{:text, 'foo'}]}
end
test "strings with embedded code" do
assert T.tokenize('foo <% bar %>', 1, 1) ==
{:ok, [{:text, 'foo '}, {:expr, 1, 5, '', ' bar ', false}]}
end
test "strings with embedded equals code" do
assert T.tokenize('foo <%= bar %>', 1, 1) ==
{:ok, [{:text, 'foo '}, {:expr, 1, 5, '=', ' bar ', false}]}
end
test "strings with embedded slash code" do
assert T.tokenize('foo <%/ bar %>', 1, 1) ==
{:ok, [{:text, 'foo '}, {:expr, 1, 5, '/', ' bar ', false}]}
end
test "strings with embedded pipe code" do
assert T.tokenize('foo <%| bar %>', 1, 1) ==
{:ok, [{:text, 'foo '}, {:expr, 1, 5, '|', ' bar ', false}]}
end
test "strings with more than one line" do
assert T.tokenize('foo\n<%= bar %>', 1, 1) ==
{:ok, [{:text, 'foo\n'}, {:expr, 2, 1, '=', ' bar ', false}]}
end
test "strings with more than one line and expression with more than one line" do
string = '''
foo <%= bar
baz %>
<% foo %>
'''
exprs = [
{:text, 'foo '},
{:expr, 1, 5, '=', ' bar\n\nbaz ', false},
{:text, '\n'},
{:expr, 4, 1, '', ' foo ', false},
{:text, '\n'}
]
assert T.tokenize(string, 1, 1) == {:ok, exprs}
end
test "quotation" do
assert T.tokenize('foo <%% true %>', 1, 1) == {:ok, [{:text, 'foo <% true %>'}]}
end
test "quotation with do/end" do
assert T.tokenize('foo <%% true do %>bar<%% end %>', 1, 1) ==
{:ok, [{:text, 'foo <% true do %>bar<% end %>'}]}
end
test "quotation with interpolation" do
exprs = [
{:text, 'a <% b '},
{:expr, 1, 9, '=', ' c ', false},
{:text, ' '},
{:expr, 1, 18, '=', ' d ', false},
{:text, ' e %> f'}
]
assert T.tokenize('a <%% b <%= c %> <%= d %> e %> f', 1, 1) == {:ok, exprs}
end
test "improperly formatted quotation with interpolation" do
exprs = [
{:text, '<%% a <%= b %> c %>'}
]
assert T.tokenize('<%%% a <%%= b %> c %>', 1, 1) == {:ok, exprs}
end
test "comments" do
exprs = [
{:text, 'foo '}
]
assert T.tokenize('foo <%# true %>', 1, 1) == {:ok, exprs}
end
test "comments with do/end" do
exprs = [
{:text, 'foo bar'}
]
assert T.tokenize('foo <%# true do %>bar<%# end %>', 1, 1) == {:ok, exprs}
end
test "strings with embedded do end" do
exprs = [
{:text, 'foo '},
{:start_expr, 1, 5, '', ' if true do ', false},
{:text, 'bar'},
{:end_expr, 1, 24, '', ' end ', false}
]
assert T.tokenize('foo <% if true do %>bar<% end %>', 1, 1) == {:ok, exprs}
end
test "strings with embedded -> end" do
exprs = [
{:text, 'foo '},
{:start_expr, 1, 5, '', ' cond do ', false},
{:middle_expr, 1, 18, '', ' false -> ', false},
{:text, 'bar'},
{:middle_expr, 1, 35, '', ' true -> ', false},
{:text, 'baz'},
{:end_expr, 1, 51, '', ' end ', false}
]
assert T.tokenize('foo <% cond do %><% false -> %>bar<% true -> %>baz<% end %>', 1, 1) ==
{:ok, exprs}
end
test "strings with multiple callbacks" do
exprs = [
{:start_expr, 1, 1, '=', ' a fn -> ', false},
{:text, 'foo'},
{:middle_expr, 1, 18, '', ' end, fn -> ', false},
{:text, 'bar'},
{:end_expr, 1, 37, '', ' end ', false}
]
assert T.tokenize('<%= a fn -> %>foo<% end, fn -> %>bar<% end %>', 1, 1) == {:ok, exprs}
end
test "strings with callback followed by do block" do
exprs = [
{:start_expr, 1, 1, '=', ' a fn -> ', false},
{:text, 'foo'},
{:middle_expr, 1, 18, '', ' end do ', false},
{:text, 'bar'},
{:end_expr, 1, 33, '', ' end ', false}
]
assert T.tokenize('<%= a fn -> %>foo<% end do %>bar<% end %>', 1, 1) == {:ok, exprs}
end
test "strings with embedded keywords blocks" do
exprs = [
{:text, 'foo '},
{:start_expr, 1, 5, '', ' if true do ', false},
{:text, 'bar'},
{:middle_expr, 1, 24, '', ' else ', false},
{:text, 'baz'},
{:end_expr, 1, 37, '', ' end ', false}
]
assert T.tokenize('foo <% if true do %>bar<% else %>baz<% end %>', 1, 1) == {:ok, exprs}
end
test "trim mode" do
template = '\t<%= if true do %> \n TRUE \n <% else %>\n FALSE \n <% end %> '
exprs = [
{:start_expr, 1, 2, '=', ' if true do ', true},
{:text, ' TRUE \n'},
{:middle_expr, 3, 3, '', ' else ', true},
{:text, ' FALSE \n'},
{:end_expr, 5, 3, '', ' end ', true}
]
assert T.tokenize(template, 1, 1, trim: true) == {:ok, exprs}
end
test "trim mode with comment" do
exprs = [
{:text, '123'}
]
assert T.tokenize(' <%# comment %> \n123', 1, 1, trim: true) == {:ok, exprs}
end
test "trim mode with CRLF" do
exprs = [
{:text, '0\r\n'},
{:expr, 2, 3, '=', ' 12 ', true},
{:text, '34'}
]
assert T.tokenize('0\r\n <%= 12 %> \r\n34', 1, 1, trim: true) == {:ok, exprs}
end
test "trim mode set to false" do
exprs = [
{:text, ' '},
{:expr, 1, 2, '=', ' 12 ', false},
{:text, ' \n'}
]
assert T.tokenize(' <%= 12 %> \n', 1, 1, trim: false) == {:ok, exprs}
end
test "trim mode no false positives" do
assert_not_trimmed = fn x -> assert T.tokenize(x, 1, 1, trim: true) == T.tokenize(x, 1, 1) end
assert_not_trimmed.('foo <%= "bar" %> ')
assert_not_trimmed.('\n <%= "foo" %>bar')
assert_not_trimmed.(' <%% hello %> ')
assert_not_trimmed.(' <%= 01 %><%= 23 %>\n')
end
test "raise syntax error when there is start mark and no end mark" do
assert T.tokenize('foo <% :bar', 1, 1) == {:error, 1, "missing token '%>'"}
assert T.tokenize('<%# true ', 1, 1) == {:error, 1, "missing token '%>'"}
end
end
| 27.484305 | 98 | 0.472508 |
ffe0c557bbef810c3a8fdd6e3bb45e33a038653d | 832 | ex | Elixir | elixir/basic/programming_elixir/ch5/ex_functions_1/lib/ex_functions1.ex | TGITS/programming-workouts | 799e805ccf3fd0936ec8ac2417f7193b8e9bcb55 | [
"MIT"
] | null | null | null | elixir/basic/programming_elixir/ch5/ex_functions_1/lib/ex_functions1.ex | TGITS/programming-workouts | 799e805ccf3fd0936ec8ac2417f7193b8e9bcb55 | [
"MIT"
] | 16 | 2020-05-30T12:38:13.000Z | 2022-02-19T09:23:31.000Z | elixir/basic/programming_elixir/ch5/ex_functions_1/lib/ex_functions1.ex | TGITS/programming-workouts | 799e805ccf3fd0936ec8ac2417f7193b8e9bcb55 | [
"MIT"
] | null | null | null | defmodule ExFunctions1 do
@moduledoc """
Documentation for ExFunctions1.
"""
@doc """
list_concat concatenates 2 lists
## Examples
iex> ExFunctions1.list_concat([:a, :b], [:c, :d])
[:a, :b, :c, :d]
"""
@spec list_concat(list(), list()) :: list()
def list_concat(list_1, list_2) do
list_1 ++ list_2
end
@doc """
sum sums 3 numbers
## Examples
iex> ExFunctions1.sum(1,2,3)
6
"""
@spec sum(number(), number(), number()) :: number()
def sum(x, y, z) do
x + y + z
end
@doc """
pair_tuple_to_list transform a pair tuple in a list of 2 elements
## Examples
iex> ExFunctions1.pair_tuple_to_list({ 1234, 5678 })
[1234, 5678]
"""
@spec pair_tuple_to_list(tuple()) :: list()
def pair_tuple_to_list({p_1, p_2}) do
[p_1, p_2]
end
end
| 16.313725 | 67 | 0.585337 |
ffe0c5e659e759f6160c1051c233b08386571b27 | 2,119 | ex | Elixir | lib/consumer/error_consumer.ex | MajAhd/elx_logger | b0bd1596ba6d8973f9eece523228984a2ca77d0e | [
"MIT"
] | 1 | 2021-07-03T08:27:50.000Z | 2021-07-03T08:27:50.000Z | lib/consumer/error_consumer.ex | MajAhd/elx_logger | b0bd1596ba6d8973f9eece523228984a2ca77d0e | [
"MIT"
] | null | null | null | lib/consumer/error_consumer.ex | MajAhd/elx_logger | b0bd1596ba6d8973f9eece523228984a2ca77d0e | [
"MIT"
] | null | null | null | defmodule ElxLogger.ErrorConsumer do
@moduledoc """
Error Consumer
- create Error Consumer via genserver
- Exchange type : Topic
- incomming topics:
- error.db : save log to database
- error.file : save log to file
- error.mail : publish log via email
"""
use GenServer
use AMQP
def start_link(arg \\ []) do
GenServer.start_link(__MODULE__, arg, name: :elx_logger)
end
@exchange Application.compile_env!(:elx_logger, :error_exchange)
@queue "error"
# @queue_error "#{@queue}_error"
def init(_opts) do
{:ok, conn} = Connection.open()
{:ok, chan} = Channel.open(conn)
Queue.declare(chan, @queue, durable: true)
Exchange.topic(chan, @exchange, durable: true)
Queue.bind(chan, @queue, @exchange, routing_key: "error.*")
{:ok, _consumer_tag} = Basic.consume(chan, @queue, nil, no_ack: true)
{:ok, chan}
end
# Confirmation sent by the broker after registering this process as a consumer
def handle_info({:basic_consume_ok, %{consumer_tag: _consumer_tag}}, chan) do
{:noreply, chan}
end
# Sent by the broker when the consumer is unexpectedly cancelled (such as after a queue deletion)
def handle_info({:basic_cancel, %{consumer_tag: _consumer_tag}}, chan) do
{:stop, :normal, chan}
end
# Confirmation sent by the broker to the consumer process after a Basic.cancel
def handle_info({:basic_cancel_ok, %{consumer_tag: _consumer_tag}}, chan) do
{:noreply, chan}
end
def handle_info(
{:basic_deliver, payload,
%{routing_key: routing_key, delivery_tag: _tag, redelivered: _redelivered}},
chan
) do
spawn(fn -> consume(payload, routing_key) end)
{:noreply, chan}
end
defp consume(payload, action) do
IO.puts("action: #{action} log: #{payload}")
cond do
action == "error.db" -> ElxLogger.make(:db, :error, payload)
action == "error.file" -> ElxLogger.make(:file, :error, payload)
action == "error.mail" -> ElxLogger.make(:mail, :error, payload)
end
rescue
_ ->
IO.puts("Error Consumer did not completed")
end
end
| 30.271429 | 99 | 0.662577 |
ffe0ecc36bab3c90e79733470704c29cbd5ae35d | 10,103 | ex | Elixir | lib/oban/query.ex | andyl/oban | 90af5ae5e999a46b912a60407f1f6396ddc06469 | [
"Apache-2.0"
] | 1 | 2021-11-20T19:16:24.000Z | 2021-11-20T19:16:24.000Z | lib/oban/query.ex | andyl/oban | 90af5ae5e999a46b912a60407f1f6396ddc06469 | [
"Apache-2.0"
] | null | null | null | lib/oban/query.ex | andyl/oban | 90af5ae5e999a46b912a60407f1f6396ddc06469 | [
"Apache-2.0"
] | null | null | null | defmodule Oban.Query do
@moduledoc false
import Ecto.Query
import DateTime, only: [utc_now: 0]
alias Ecto.{Changeset, Multi}
alias Oban.{Beat, Config, Job}
@spec fetch_available_jobs(Config.t(), binary(), binary(), pos_integer()) ::
{integer(), nil | [Job.t()]}
def fetch_available_jobs(%Config{} = conf, queue, nonce, demand) do
%Config{node: node, prefix: prefix, repo: repo, verbose: verbose} = conf
subquery =
Job
|> select([:id])
|> where([j], j.state == "available")
|> where([j], j.queue == ^queue)
|> order_by([j], asc: j.priority, asc: j.scheduled_at, asc: j.id)
|> limit(^demand)
|> lock("FOR UPDATE SKIP LOCKED")
updates = [
set: [state: "executing", attempted_at: utc_now(), attempted_by: [node, queue, nonce]],
inc: [attempt: 1]
]
Job
|> join(:inner, [j], x in subquery(subquery, prefix: prefix), on: j.id == x.id)
|> select([j, _], j)
|> repo.update_all(updates, log: verbose, prefix: prefix)
end
@spec fetch_or_insert_job(Config.t(), Changeset.t()) :: {:ok, Job.t()} | {:error, Changeset.t()}
def fetch_or_insert_job(%Config{repo: repo, verbose: verbose} = conf, changeset) do
fun = fn -> insert_unique(conf, changeset) end
with {:ok, result} <- repo.transaction(fun, log: verbose), do: result
end
@spec fetch_or_insert_job(Config.t(), Multi.t(), term(), Changeset.t()) :: Multi.t()
def fetch_or_insert_job(config, multi, name, changeset) do
Multi.run(multi, name, fn repo, _changes ->
insert_unique(%{config | repo: repo}, changeset)
end)
end
@spec insert_all_jobs(Config.t(), [Changeset.t(Job.t())]) :: [Job.t()]
def insert_all_jobs(%Config{} = conf, changesets) when is_list(changesets) do
%Config{prefix: prefix, repo: repo, verbose: verbose} = conf
entries = Enum.map(changesets, &Job.to_map/1)
opts = [log: verbose, on_conflict: :nothing, prefix: prefix, returning: true]
case repo.insert_all(Job, entries, opts) do
{0, _} -> []
{_count, jobs} -> jobs
end
end
@spec insert_all_jobs(Config.t(), Multi.t(), atom(), [Changeset.t()]) :: Multi.t()
def insert_all_jobs(config, multi, name, changesets) when is_list(changesets) do
Multi.run(multi, name, fn repo, _changes ->
{:ok, insert_all_jobs(%{config | repo: repo}, changesets)}
end)
end
@spec stage_scheduled_jobs(Config.t(), binary(), opts :: keyword()) :: {integer(), nil}
def stage_scheduled_jobs(%Config{} = config, queue, opts \\ []) do
%Config{prefix: prefix, repo: repo, verbose: verbose} = config
max_scheduled_at = Keyword.get(opts, :max_scheduled_at, utc_now())
Job
|> where([j], j.state in ["scheduled", "retryable"])
|> where([j], j.queue == ^queue)
|> where([j], j.scheduled_at <= ^max_scheduled_at)
|> repo.update_all([set: [state: "available"]], log: verbose, prefix: prefix)
end
@spec insert_beat(Config.t(), map()) :: {:ok, Beat.t()} | {:error, Changeset.t()}
def insert_beat(%Config{prefix: prefix, repo: repo, verbose: verbose}, params) do
params
|> Beat.new()
|> repo.insert(log: verbose, prefix: prefix)
end
@spec rescue_orphaned_jobs(Config.t(), binary()) :: {integer(), integer()}
def rescue_orphaned_jobs(%Config{} = conf, queue) do
%Config{prefix: prefix, repo: repo, rescue_after: seconds, verbose: verbose} = conf
orphaned_at = DateTime.add(utc_now(), -seconds)
subquery =
Job
|> where([j], j.state == "executing" and j.queue == ^queue)
|> join(:left, [j], b in Beat,
on: j.attempted_by == [b.node, b.queue, b.nonce] and b.inserted_at > ^orphaned_at
)
|> where([_, b], is_nil(b.node))
|> select([:id])
requery = join(Job, :inner, [j], x in subquery(subquery, prefix: prefix), on: j.id == x.id)
query_opts = [log: verbose, prefix: prefix]
# This is done using two queries rather than a single update with a case statement because we
# can't cast to the `oban_job_state` enum easily outside of the public schema.
{available_count, _} =
requery
|> where([j], j.attempt < j.max_attempts)
|> repo.update_all([set: [state: "available"]], query_opts)
{discarded_count, _} =
requery
|> where([j], j.attempt >= j.max_attempts)
|> repo.update_all([set: [state: "discarded", discarded_at: utc_now()]], query_opts)
{available_count, discarded_count}
end
# Deleting truncated or outdated jobs needs to use the same index. We force the queries to use
# the same partial composite index by providing an `attempted_at` value for both.
@spec delete_truncated_jobs(Config.t(), pos_integer(), pos_integer()) :: {integer(), nil}
def delete_truncated_jobs(%Config{prefix: prefix, repo: repo, verbose: verbose}, length, limit) do
subquery =
Job
|> where([j], j.state in ["completed", "discarded"])
|> where([j], j.attempted_at < ^utc_now())
|> order_by(desc: :attempted_at)
|> select([:id])
|> offset(^length)
|> limit(^limit)
Job
|> join(:inner, [j], x in subquery(subquery, prefix: prefix), on: j.id == x.id)
|> repo.delete_all(log: verbose, prefix: prefix)
end
@spec delete_outdated_jobs(Config.t(), pos_integer(), pos_integer()) :: {integer(), nil}
def delete_outdated_jobs(%Config{prefix: prefix, repo: repo, verbose: verbose}, seconds, limit) do
outdated_at = DateTime.add(utc_now(), -seconds)
subquery =
Job
|> where([j], j.state in ["completed", "discarded"])
|> where([j], j.attempted_at < ^outdated_at)
|> select([:id])
|> limit(^limit)
Job
|> join(:inner, [j], x in subquery(subquery, prefix: prefix), on: j.id == x.id)
|> repo.delete_all(log: verbose, prefix: prefix)
end
@spec delete_outdated_beats(Config.t(), pos_integer(), pos_integer()) :: {integer(), nil}
def delete_outdated_beats(%Config{prefix: prefix, repo: repo, verbose: verbose}, seconds, limit) do
outdated_at = DateTime.add(utc_now(), -seconds)
subquery =
Beat
|> where([b], b.inserted_at < ^outdated_at)
|> order_by(asc: :inserted_at)
|> limit(^limit)
Beat
|> join(:inner, [b], x in subquery(subquery, prefix: prefix),
on: b.inserted_at == x.inserted_at
)
|> repo.delete_all(log: verbose, prefix: prefix)
end
@spec complete_job(Config.t(), Job.t()) :: :ok
def complete_job(%Config{prefix: prefix, repo: repo, verbose: verbose}, %Job{id: id}) do
repo.update_all(
where(Job, id: ^id),
[set: [state: "completed", completed_at: utc_now()]],
log: verbose,
prefix: prefix
)
:ok
end
@spec discard_job(Config.t(), Job.t()) :: :ok
def discard_job(%Config{prefix: prefix, repo: repo, verbose: verbose}, %Job{id: id}) do
repo.update_all(
where(Job, id: ^id),
[set: [state: "discarded", discarded_at: utc_now()]],
log: verbose,
prefix: prefix
)
:ok
end
@spec retry_job(Config.t(), Job.t(), pos_integer(), binary()) :: :ok
def retry_job(%Config{repo: repo} = config, %Job{} = job, backoff, formatted_error) do
%Job{attempt: attempt, id: id, max_attempts: max_attempts} = job
set =
if attempt >= max_attempts do
[state: "discarded", completed_at: utc_now()]
else
[state: "retryable", completed_at: utc_now(), scheduled_at: next_attempt_at(backoff)]
end
updates = [
set: set,
push: [errors: %{attempt: attempt, at: utc_now(), error: formatted_error}]
]
repo.update_all(
where(Job, id: ^id),
updates,
log: config.verbose,
prefix: config.prefix
)
end
@spec notify(Config.t(), binary(), map()) :: :ok
def notify(%Config{} = conf, channel, %{} = payload) when is_binary(channel) do
%Config{prefix: prefix, repo: repo, verbose: verbose} = conf
repo.query(
"SELECT pg_notify($1, $2)",
["#{prefix}.#{channel}", Jason.encode!(payload)],
log: verbose
)
:ok
end
# Helpers
defp next_attempt_at(backoff), do: DateTime.add(utc_now(), backoff, :second)
defp insert_unique(%Config{prefix: prefix, repo: repo, verbose: verbose}, changeset) do
query_opts = [log: verbose, on_conflict: :nothing, prefix: prefix]
with {:ok, query, lock_key} <- unique_query(changeset),
:ok <- acquire_lock(repo, lock_key, query_opts),
{:ok, job} <- unprepared_one(repo, query, query_opts) do
{:ok, job}
else
{:error, :locked} ->
{:ok, Changeset.apply_changes(changeset)}
nil ->
repo.insert(changeset, query_opts)
end
end
defp unique_query(%{changes: %{unique: %{} = unique}} = changeset) do
%{fields: fields, period: period, states: states} = unique
since = DateTime.add(utc_now(), period * -1, :second)
fields = for field <- fields, do: {field, Changeset.get_field(changeset, field)}
states = for state <- states, do: to_string(state)
lock_key = :erlang.phash2([fields, states])
query =
Job
|> where([j], j.state in ^states)
|> where([j], j.inserted_at > ^since)
|> where(^fields)
|> order_by(desc: :id)
|> limit(1)
{:ok, query, lock_key}
end
defp unique_query(_changeset), do: nil
defp acquire_lock(repo, lock_key, opts) do
case repo.query("SELECT pg_try_advisory_xact_lock($1)", [lock_key], opts) do
{:ok, %{rows: [[true]]}} ->
:ok
_ ->
{:error, :locked}
end
end
# With certain unique option combinations Postgres will decide to use a `generic plan` instead
# of a `custom plan`, which *drastically* impacts the unique query performance. Ecto doesn't
# provide a way to opt out of prepared statements for a single query, so this function works
# around the issue by forcing a raw SQL query.
defp unprepared_one(repo, query, opts) do
{raw_sql, bindings} = repo.to_sql(:all, query)
case repo.query(raw_sql, bindings, opts) do
{:ok, %{columns: columns, rows: [rows]}} -> {:ok, repo.load(Job, {columns, rows})}
_ -> nil
end
end
end
| 33.789298 | 101 | 0.623775 |
ffe0ede87bb4198d8b44a7f8b174c832d166feaa | 3,255 | exs | Elixir | test/logger_logstash_backend_test.exs | EasyMile/logger_logstash_backend | 59df57a59108e6d29d00cf941428eb01fd8bea46 | [
"Apache-2.0"
] | null | null | null | test/logger_logstash_backend_test.exs | EasyMile/logger_logstash_backend | 59df57a59108e6d29d00cf941428eb01fd8bea46 | [
"Apache-2.0"
] | null | null | null | test/logger_logstash_backend_test.exs | EasyMile/logger_logstash_backend | 59df57a59108e6d29d00cf941428eb01fd8bea46 | [
"Apache-2.0"
] | 1 | 2020-04-28T10:14:04.000Z | 2020-04-28T10:14:04.000Z | ################################################################################
# Copyright 2015 Marcelo Gornstein <marcelog@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
################################################################################
defmodule LoggerLogstashBackendTest do
use ExUnit.Case, async: false
require Logger
@env_var "42"
@backend {LoggerLogstashBackend, :test}
Logger.add_backend @backend
setup do
System.put_env("TEST_ENV_VAR", @env_var)
Logger.configure_backend @backend, [
host: "127.0.0.1",
port: 10001,
level: :info,
type: "some_app",
version: "some_version",
metadata: [
some_metadata: "go here",
env: {:system, "TEST_ENV_VAR"},
]
]
{:ok, socket} = :gen_udp.open 10001, [:binary, {:active, true}]
on_exit fn ->
:ok = :gen_udp.close socket
end
:ok
end
test "can log" do
Logger.info "hello world", [key1: "field1"]
json = get_log()
{:ok, data} = Poison.decode json
assert data["type"] === "some_app"
assert data["version"] === "some_version"
assert data["message"] === "hello world"
expected = %{
"function" => "test can log/1",
"level" => "info",
"module" => "LoggerLogstashBackendTest",
"pid" => (inspect self()),
"some_metadata" => "go here",
"env" => @env_var,
"line" => 47,
"key1" => "field1",
}
assert contains?(data["fields"], expected)
{:ok, ts} = NaiveDateTime.from_iso8601(data["@timestamp"])
now = NaiveDateTime.utc_now
assert NaiveDateTime.diff(now, ts) < 1000
end
test "can log pids" do
Logger.info "pid", [pid_key: self()]
json = get_log()
{:ok, data} = Poison.decode json
assert data["type"] === "some_app"
assert data["version"] === "some_version"
assert data["message"] === "pid"
expected = %{
"function" => "test can log pids/1",
"level" => "info",
"module" => "LoggerLogstashBackendTest",
"pid" => inspect(self()),
"pid_key" => inspect(self()),
"some_metadata" => "go here",
"env" => @env_var,
"line" => 72,
}
assert contains?(data["fields"], expected)
{:ok, ts} = NaiveDateTime.from_iso8601(data["@timestamp"])
now = NaiveDateTime.utc_now
assert NaiveDateTime.diff(now, ts) < 1000
end
test "cant log when minor levels" do
Logger.debug "hello world", [key1: "field1"]
:nothing_received = get_log()
end
defp get_log do
receive do
{:udp, _, _, _, json} -> json
after 500 -> :nothing_received
end
end
defp contains?(map1, map2) do
Enum.all?(Map.to_list(map2), fn {key, value} ->
Map.fetch!(map1, key) == value
end)
end
end
| 28.552632 | 80 | 0.586482 |
ffe101575e978a42905d6e1e9473c57fd2ca705e | 4,165 | ex | Elixir | geronimo/lib/geronimo/utils/crud.ex | FoxComm/highlander | 1aaf8f9e5353b94c34d574c2a92206a1c363b5be | [
"MIT"
] | 10 | 2018-04-12T22:29:52.000Z | 2021-10-18T17:07:45.000Z | geronimo/lib/geronimo/utils/crud.ex | FoxComm/highlander | 1aaf8f9e5353b94c34d574c2a92206a1c363b5be | [
"MIT"
] | null | null | null | geronimo/lib/geronimo/utils/crud.ex | FoxComm/highlander | 1aaf8f9e5353b94c34d574c2a92206a1c363b5be | [
"MIT"
] | 1 | 2018-07-06T18:42:05.000Z | 2018-07-06T18:42:05.000Z | defmodule Geronimo.Crud do
defmacro __using__(_) do
quote do
alias Geronimo.Repo
def get_all(scope) do
(from m in __MODULE__, where: fragment("scope @> ?", ^scope))
|> Repo.all
end
def get(id, scope \\ "1") do
try do
case Repo.one(from m in __MODULE__, where: fragment("scope @> ?", ^scope), where: m.id == ^id) do
nil -> {:error, "Not found"}
x -> {:ok, Map.merge(x, %{versions: get_versions(x.id)})}
end
rescue Ecto.NoResultsError ->
raise %NotFoundError{message: "Entity with id #{id} not found"}
end
end
def get_versions(id) do
q = "select sys_period from #{history_table()} where id=$1::integer"
case Ecto.Adapters.SQL.query(Repo, q, [id]) do
{:ok, vers} ->
Enum.map(vers.rows, fn r -> hd(r).upper |> Timex.to_datetime end)
_ -> []
end
end
def create(params, user = %Geronimo.User{}) when is_map(params) do
payload = Map.merge(params, %{created_by: user.id, scope: user.scope})
Repo.transaction(fn ->
case Repo.insert(changeset(apply(__MODULE__, :__struct__, []), payload)) do
{:ok, record} ->
Geronimo.Kafka.Worker.push_async(table(), record)
record
{_, changes} -> Repo.rollback(changes)
end
end)
end
def get_specific_version(id, version, scope) do
datetime = Timex.parse!(version, "%FT%T.%fZ", :strftime)
|> Timex.shift(seconds: -1)
|> Timex.to_datetime
q = """
SELECT #{version_fields()} FROM #{table()}
WHERE id = $1::integer AND sys_period @> $2::timestamptz AND scope @> $3::ltree
UNION ALL
SELECT #{version_fields()} from #{history_table()}
WHERE id = $1::integer AND sys_period @> $2::timestamptz AND scope @> $3::ltree;
"""
case Ecto.Adapters.SQL.query(Repo, q, [id, datetime, scope]) do
{:ok, res} -> format_version(res)
{:error, _} -> raise %NotFoundError{message: "Version #{version} not found"}
end
end
def restore_version(id, version, scope) do
{:ok,row} = get(id, scope)
{:ok, version} = get_specific_version(id, version, scope)
fld = content_field()
changeset(row, %{fld => Map.get(version, fld)})
|> apply_change()
end
def update(id, params, scope) when is_map(params) do
case get(id, scope) do
{:ok, row} -> changeset(row, params)
|> apply_change()
{:error, _} ->
raise %NotFoundError{message: "#{Inflex.singularize(table()) |> Inflex.camelize} with id #{id} not found"}
end
end
def delete(id) do
Repo.transaction(fn ->
case Repo.delete(__MODULE__, id) do
{:error, err} -> err
_ -> nil
end
end)
end
defp apply_change(changeset) do
Repo.transaction(fn ->
case Repo.update(changeset) do
{:ok, record} -> Map.merge(record, %{versions: get_versions(record.id)})
{:error, changeset} ->
Repo.rollback(changeset)
{:error, changeset}
end
end)
end
def format_version(data) do
cols = Enum.map(data.columns, &(String.to_atom(&1)))
v = Enum.flat_map(data.rows, fn(r) ->
Enum.zip(cols, r)
end)
|> Enum.into(%{})
|> Map.update!(:inserted_at, &(Timex.format!(&1, "%FT%T.%fZ", :strftime)))
|> Map.update!(:updated_at, &(Timex.format!(&1, "%FT%T.%fZ", :strftime)))
d = if Map.has_key?(v, :schema_version) do
Map.update!(v, :schema_version, &(Timex.format!(&1, "%FT%T.%fZ", :strftime)))
else
v
end
{:ok, d}
end
def table do
[m, t] = Inflex.underscore(__MODULE__) |> String.split(".")
Inflex.pluralize(t)
end
def history_table do
"#{table()}_history"
end
end
end
end
| 33.055556 | 118 | 0.52461 |
ffe14fea9dce9f2233274f9dc3d3e8f11b6d0b78 | 399 | exs | Elixir | priv/repo/migrations/20181101051243_create_replies.exs | The-Vikings/phoenix-and-elm | c6c6bcc2415a479b81cae09cd62872873c7b7e78 | [
"MIT"
] | 1 | 2018-10-27T13:52:51.000Z | 2018-10-27T13:52:51.000Z | priv/repo/migrations/20181101051243_create_replies.exs | The-Vikings/phoenix-and-elm | c6c6bcc2415a479b81cae09cd62872873c7b7e78 | [
"MIT"
] | 1 | 2018-11-01T08:12:02.000Z | 2018-11-01T09:37:56.000Z | priv/repo/migrations/20181101051243_create_replies.exs | The-Vikings/phoenix-and-elm | c6c6bcc2415a479b81cae09cd62872873c7b7e78 | [
"MIT"
] | null | null | null | defmodule PhoenixAndElm.Repo.Migrations.CreateReplies do
use Ecto.Migration
def change do
create table(:replies) do
add :body, :string
add :user_id, references(:users, on_delete: :nothing)
add :question_id, references(:questions, on_delete: :nothing)
timestamps()
end
create index(:replies, [:user_id])
create index(:replies, [:question_id])
end
end
| 23.470588 | 67 | 0.686717 |
ffe15f345f57a555e3401d04bb2037f0ad2524cf | 116 | ex | Elixir | {{cookiecutter.project_underscored}}/lib/{{cookiecutter.project_underscored}}/web/html/page/page_view.ex | ijcd/cookiecutter-screaming-phoenix | f34ca6704f8f8b0581e91b4f6e3e75239fb79cb1 | [
"Apache-2.0"
] | null | null | null | {{cookiecutter.project_underscored}}/lib/{{cookiecutter.project_underscored}}/web/html/page/page_view.ex | ijcd/cookiecutter-screaming-phoenix | f34ca6704f8f8b0581e91b4f6e3e75239fb79cb1 | [
"Apache-2.0"
] | null | null | null | {{cookiecutter.project_underscored}}/lib/{{cookiecutter.project_underscored}}/web/html/page/page_view.ex | ijcd/cookiecutter-screaming-phoenix | f34ca6704f8f8b0581e91b4f6e3e75239fb79cb1 | [
"Apache-2.0"
] | null | null | null | defmodule {{cookiecutter.project_module}}.Web.HTML.PageView do
use {{cookiecutter.project_module}}.Web, :view
end
| 29 | 62 | 0.784483 |
ffe15f7e78d0cb47c81cbc32cb2f39fad6f59f06 | 15,622 | ex | Elixir | lib/elasticsearch.ex | LetThereBeDwight/elasticsearch-elixir | a24a198fe38aae7886fb0765ccc487ed65e995e5 | [
"MIT"
] | 215 | 2019-01-28T23:31:14.000Z | 2022-03-31T16:03:30.000Z | lib/elasticsearch.ex | LetThereBeDwight/elasticsearch-elixir | a24a198fe38aae7886fb0765ccc487ed65e995e5 | [
"MIT"
] | 56 | 2018-01-02T17:52:54.000Z | 2019-01-19T16:05:47.000Z | lib/elasticsearch.ex | LetThereBeDwight/elasticsearch-elixir | a24a198fe38aae7886fb0765ccc487ed65e995e5 | [
"MIT"
] | 39 | 2019-03-12T20:35:03.000Z | 2022-03-11T16:49:45.000Z | defmodule Elasticsearch do
@moduledoc """
Entry-point for interacting with your Elasticsearch cluster(s).
You should configure at least one `Elasticsearch.Cluster` in order to
use the functions in this module, or else you'll need to pass all the
configuration for the cluster into each function call.
"""
alias Elasticsearch.{
Document,
Cluster,
Cluster.Config
}
@type index_name :: String.t()
@type url :: Path.t()
@type opts :: Keyword.t()
@type data :: map | String.t()
@type response :: {:ok, map} | {:error, Elasticsearch.Exception.t()}
@doc """
Creates or updates a document in a given index.
The document must implement the `Elasticsearch.Document` protocol.
## Example
iex> Index.create_from_file(Cluster, "posts-1", "test/support/settings/posts.json")
...> struct = %Post{id: 123, title: "Post", author: "Author"}
...> Elasticsearch.put_document(Cluster, struct, "posts-1")
{:ok,
%{
"_id" => "123",
"_index" => "posts-1",
"_primary_term" => 1,
"_seq_no" => 0,
"_shards" => %{"failed" => 0, "successful" => 1, "total" => 2},
"_type" => "_doc",
"_version" => 1,
"result" => "created"
}}
"""
@spec put_document(Cluster.t(), Document.t(), index_name) :: response
def put_document(cluster, document, index) do
put(cluster, document_url(document, index), Document.encode(document))
end
@doc """
Creates a document in a given index. Use this function when your documents
do not have IDs or you want to use Elasticsearch's automatic ID generation.
https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-index_.html#_automatic_id_generation
The document must implement the `Elasticsearch.Document`. protocol.
## Example
Index.create_from_file(Cluster, "posts-1", "test/support/settings/posts.json")
Elasticsearch.post_document(Cluster, %Post{title: "Post"}, "posts-1")
# => {:ok,
# => %{
# => "_id" => "W0tpsmIBdwcYyG50zbta",
# => "_index" => "posts-1",
# => "_primary_term" => 1,
# => "_seq_no" => 0,
# => "_shards" => %{"failed" => 0, "successful" => 1, "total" => 2},
# => "_type" => "_doc",
# => "_version" => 1,
# => "result" => "created"
# => }}
"""
@spec post_document(Cluster.t(), Document.t(), index_name) :: response
def post_document(cluster, document, index) do
post(cluster, document_url(document, index), Document.encode(document))
end
@doc """
Same as `put_document/2`, but raises on errors.
## Example
iex> Index.create_from_file(Cluster, "posts-1", "test/support/settings/posts.json")
...> struct = %Post{id: 123, title: "Post", author: "Author"}
...> Elasticsearch.put_document!(Cluster, struct, "posts-1")
%{
"_id" => "123",
"_index" => "posts-1",
"_primary_term" => 1,
"_seq_no" => 0,
"_shards" => %{"failed" => 0, "successful" => 1, "total" => 2},
"_type" => "_doc",
"_version" => 1,
"result" => "created"
}
"""
@spec put_document!(Cluster.t(), Document.t(), index_name) :: map | no_return
def put_document!(cluster, document, index) do
put!(cluster, document_url(document, index), Document.encode(document))
end
@doc """
Deletes a document from a given index.
The document must implement the `Elasticsearch.Document` protocol.
## Example
iex> Index.create_from_file(Cluster, "posts-1", "test/support/settings/posts.json")
...> struct = %Post{id: 123, title: "Post", author: "Author"}
...> Elasticsearch.put_document!(Cluster, struct, "posts-1")
...> Elasticsearch.delete_document(Cluster, struct, "posts-1")
{:ok,
%{
"_id" => "123",
"_index" => "posts-1",
"_primary_term" => 1,
"_seq_no" => 1,
"_shards" => %{"failed" => 0, "successful" => 1, "total" => 2},
"_type" => "_doc",
"_version" => 2,
"result" => "deleted"
}}
"""
@spec delete_document(Cluster.t(), Document.t(), index_name) :: response
def delete_document(cluster, document, index) do
delete(cluster, document_url(document, index))
end
@doc """
Same as `delete_document/2`, but raises on errors.
## Example
iex> Index.create_from_file(Cluster, "posts-1", "test/support/settings/posts.json")
...> struct = %Post{id: 123, title: "Post", author: "Author"}
...> Elasticsearch.put_document!(Cluster, struct, "posts-1")
...> Elasticsearch.delete_document!(Cluster, struct, "posts-1")
%{
"_id" => "123",
"_index" => "posts-1",
"_primary_term" => 1,
"_seq_no" => 1,
"_shards" => %{"failed" => 0, "successful" => 1, "total" => 2},
"_type" => "_doc",
"_version" => 2,
"result" => "deleted"
}
"""
@spec delete_document!(Cluster.t(), Document.t(), index_name) :: map | no_return
def delete_document!(cluster, document, index) do
delete!(cluster, document_url(document, index))
end
defp document_url(document, index) do
url = "/#{index}/_doc/#{Document.id(document)}"
if routing = Document.routing(document) do
document_url_with_routing(url, routing)
else
url
end
end
defp document_url_with_routing(url, routing) do
url <>
if url =~ ~r/\?/ do
"&"
else
"?"
end <> URI.encode_query(%{routing: routing})
end
@doc """
Waits for a given Elasticsearch cluster to be available.
It will try a given number of times, with 1sec delay between tries.
"""
@spec wait_for_boot(Cluster.t(), integer) ::
{:ok, map}
| {:error, RuntimeError.t()}
| {:error, Elasticsearch.Exception.t()}
def wait_for_boot(cluster, tries, count \\ 0)
def wait_for_boot(_cluster, tries, count) when count == tries do
{
:error,
RuntimeError.exception("""
Elasticsearch could not be found after #{count} tries. Make sure it's running?
""")
}
end
def wait_for_boot(cluster, tries, count) do
with {:error, _} <- get(cluster, "/_cat/health?format=json") do
:timer.sleep(1000)
wait_for_boot(cluster, tries, count + 1)
end
end
@doc """
Gets the contents of a path from the Elasticsearch API.
## Examples
iex> {:ok, resp} = Elasticsearch.get(Cluster, "/_cat/health?format=json")
...> is_list(resp)
true
iex> Elasticsearch.get(Cluster, "/nonexistent")
{:error,
%Elasticsearch.Exception{col: nil, line: nil,
message: "no such index", query: nil,
raw: %{"error" => %{"index" => "nonexistent",
"index_uuid" => "_na_", "reason" => "no such index",
"resource.id" => "nonexistent",
"resource.type" => "index_or_alias",
"root_cause" => [%{"index" => "nonexistent",
"index_uuid" => "_na_", "reason" => "no such index",
"resource.id" => "nonexistent",
"resource.type" => "index_or_alias",
"type" => "index_not_found_exception"}],
"type" => "index_not_found_exception"}, "status" => 404},
status: 404, type: "index_not_found_exception"}}
"""
@spec get(Cluster.t(), url) :: response
@spec get(Cluster.t(), url, opts) :: response
def get(cluster, url, opts \\ []) do
config = Config.get(cluster)
config
|> config.api.request(:get, url, "", opts)
|> format()
end
@doc """
The same as `get/1`, but returns the response instead of a tuple. Raises on
errors.
## Examples
iex> resp = Elasticsearch.get!(Cluster, "/_cat/health?format=json")
...> is_list(resp)
true
iex> Elasticsearch.get!(Cluster, "/nonexistent")
** (Elasticsearch.Exception) (index_not_found_exception) no such index
"""
@spec get!(Cluster.t(), url) :: map | no_return
@spec get!(Cluster.t(), url, opts) :: map | no_return
def get!(cluster, url, opts \\ []) do
cluster
|> get(url, opts)
|> unwrap!()
end
@doc """
Puts data to a given Elasticsearch API path.
## Examples
iex> Index.create_from_file(Cluster, "posts-1", "test/support/settings/posts.json")
...> Elasticsearch.put(Cluster, "/posts-1/_doc/id", %{"title" => "title", "author" => "author"})
{:ok,
%{
"_id" => "id",
"_index" => "posts-1",
"_primary_term" => 1,
"_seq_no" => 0,
"_shards" => %{"failed" => 0, "successful" => 1, "total" => 2},
"_type" => "_doc",
"_version" => 1,
"result" => "created"
}}
iex> Elasticsearch.put(Cluster, "/bad/url", %{"title" => "title", "author" => "author"})
{:error,
%Elasticsearch.Exception{col: nil, line: nil,
message: "Incorrect HTTP method for uri [/bad/url] and method [PUT], allowed: [POST]",
query: nil, raw: nil, status: nil, type: nil}}
"""
@spec put(Cluster.t(), url, data) :: response
@spec put(Cluster.t(), url, data, opts) :: response
def put(cluster, url, data, opts \\ []) do
config = Config.get(cluster)
config
|> config.api.request(:put, url, data, opts)
|> format()
end
@doc """
The same as `put/2`, but returns the response instead of a tuple. Raises on
errors.
## Examples
iex> Index.create_from_file(Cluster, "posts", "test/support/settings/posts.json")
...> Elasticsearch.put!(Cluster, "/posts/_doc/id", %{"name" => "name", "author" => "author"})
%{
"_id" => "id",
"_index" => "posts",
"_primary_term" => 1,
"_seq_no" => 0,
"_shards" => %{"failed" => 0, "successful" => 1, "total" => 2},
"_type" => "_doc",
"_version" => 1,
"result" => "created"
}
iex> Elasticsearch.put!(Cluster, "/bad/url", %{"data" => "here"})
** (Elasticsearch.Exception) Incorrect HTTP method for uri [/bad/url] and method [PUT], allowed: [POST]
"""
@spec put!(Cluster.t(), url, data) :: map | no_return
@spec put!(Cluster.t(), url, data, opts) :: map | no_return
def put!(cluster, url, data, opts \\ []) do
cluster
|> put(url, data, opts)
|> unwrap!()
end
@doc """
Posts data or queries to a given Elasticsearch path.
## Examples
iex> Index.create_from_file(Cluster, "posts", "test/support/settings/posts.json")
...> query = %{"query" => %{"match_all" => %{}}}
...> {:ok, resp} = Elasticsearch.post(Cluster, "/posts/_search", query)
...> resp["hits"]["hits"]
[]
"""
@spec post(Cluster.t(), url, data) :: response
@spec post(Cluster.t(), url, data, opts) :: response
def post(cluster, url, data, opts \\ []) do
config = Config.get(cluster)
config
|> config.api.request(:post, url, data, opts)
|> format()
end
@doc """
The same as `post/1`, but returns the response. Raises on errors.
## Examples
iex> Index.create_from_file(Cluster, "posts", "test/support/settings/posts.json")
...> query = %{"query" => %{"match_all" => %{}}}
...> resp = Elasticsearch.post!(Cluster, "/posts/_search", query)
...> is_map(resp)
true
Raises an error if the path is invalid or another error occurs:
iex> query = %{"query" => %{"match_all" => %{}}}
...> Elasticsearch.post!(Cluster, "/nonexistent/_search", query)
** (Elasticsearch.Exception) (index_not_found_exception) no such index
"""
@spec post!(Cluster.t(), url, data) :: map | no_return
@spec post!(Cluster.t(), url, data, opts) :: map | no_return
def post!(cluster, url, data, opts \\ []) do
cluster
|> post(url, data, opts)
|> unwrap!()
end
@doc """
Deletes data at a given Elasticsearch URL.
## Examples
iex> Index.create_from_file(Cluster, "posts", "test/support/settings/posts.json")
...> Elasticsearch.delete(Cluster, "/posts")
{:ok, %{"acknowledged" => true}}
It returns an error if the given resource does not exist.
iex> Elasticsearch.delete(Cluster, "/nonexistent")
{:error,
%Elasticsearch.Exception{col: nil, line: nil,
message: "no such index", query: nil,
raw: %{"error" => %{"index" => "nonexistent",
"index_uuid" => "_na_", "reason" => "no such index",
"resource.id" => "nonexistent",
"resource.type" => "index_or_alias",
"root_cause" => [%{"index" => "nonexistent",
"index_uuid" => "_na_", "reason" => "no such index",
"resource.id" => "nonexistent",
"resource.type" => "index_or_alias",
"type" => "index_not_found_exception"}],
"type" => "index_not_found_exception"}, "status" => 404},
status: 404, type: "index_not_found_exception"}}
"""
@spec delete(Cluster.t(), url) :: response
@spec delete(Cluster.t(), url, opts) :: response
def delete(cluster, url, opts \\ []) do
config = Config.get(cluster)
config
|> config.api.request(:delete, url, "", opts)
|> format()
end
@doc """
Same as `delete/1`, but returns the response and raises errors.
## Examples
iex> Index.create_from_file(Cluster, "posts", "test/support/settings/posts.json")
...> Elasticsearch.delete!(Cluster, "/posts")
%{"acknowledged" => true}
Raises an error if the resource is invalid.
iex> Elasticsearch.delete!(Cluster, "/nonexistent")
** (Elasticsearch.Exception) (index_not_found_exception) no such index
"""
@spec delete!(Cluster.t(), url) :: map | no_return
@spec delete!(Cluster.t(), url, opts) :: map | no_return
def delete!(cluster, url, opts \\ []) do
cluster
|> delete(url, opts)
|> unwrap!()
end
@doc """
Determines whether a resource exists at a given Elasticsearch path
## Examples
iex> Index.create_from_file(Cluster, "posts", "test/support/settings/posts.json")
...> Elasticsearch.head(Cluster, "/posts")
{:ok, ""}
It returns an error if the given resource does not exist.
iex> Elasticsearch.head(Cluster, "/nonexistent")
{:error,
%Elasticsearch.Exception{
col: nil,
line: nil,
message: "",
query: nil,
raw: nil,
status: nil,
type: nil
}}
"""
@spec head(Cluster.t(), url) :: response
@spec head(Cluster.t(), url, opts) :: response
def head(cluster, url, opts \\ []) do
config = Config.get(cluster)
config
|> config.api.request(:head, url, "", opts)
|> format()
end
@doc """
Same as `head/1`, but returns the response and raises errors.
## Examples
iex> Index.create_from_file(Cluster, "posts", "test/support/settings/posts.json")
...> Elasticsearch.head!(Cluster, "/posts")
""
Raises an error if the resource is invalid.
iex> Elasticsearch.head!(Cluster, "/nonexistent")
** (Elasticsearch.Exception)
"""
@spec head!(Cluster.t(), url) :: map | no_return
@spec head!(Cluster.t(), url, opts) :: map | no_return
def head!(cluster, url, opts \\ []) do
cluster
|> head(url, opts)
|> unwrap!()
end
defp format({:ok, %{status_code: code, body: body}})
when code >= 200 and code < 300 do
{:ok, body}
end
defp format({:ok, %{body: body}}) do
error = Elasticsearch.Exception.exception(response: body)
{:error, error}
end
defp format(error), do: error
defp unwrap!({:ok, value}), do: value
defp unwrap!({:error, exception}), do: raise(exception)
end
| 31.369478 | 109 | 0.579951 |
ffe1719e4465e76c1519946fd0a2213119658ec1 | 384 | exs | Elixir | programming_elixir_1.3_snippets/protocols/is_collection.exs | benjohns1/elixer-app | 6e866ec084c5e75442c0b70f66e35f61b5b74d34 | [
"MIT"
] | null | null | null | programming_elixir_1.3_snippets/protocols/is_collection.exs | benjohns1/elixer-app | 6e866ec084c5e75442c0b70f66e35f61b5b74d34 | [
"MIT"
] | null | null | null | programming_elixir_1.3_snippets/protocols/is_collection.exs | benjohns1/elixer-app | 6e866ec084c5e75442c0b70f66e35f61b5b74d34 | [
"MIT"
] | null | null | null | defprotocol Collection do
@fallback_to_any true
def is_collection?(value)
end
defimpl Collection, for: [List, Tuple, BitString, Map] do
def is_collection?(_), do: true
end
defimpl Collection, for: Any do
def is_collection?(_), do: false
end
Enum.each [1, 1.0, [1,2], {1,2}, %{}, "cat", 'cat'], fn value -> IO.puts "#{inspect value}: #{Collection.is_collection?(value)}" end | 27.428571 | 132 | 0.6875 |
ffe181dc78a34ff40921cd92b8662142322d93c8 | 20,309 | ex | Elixir | lib/teiserver/coordinator/consul_commands.ex | Jazcash/teiserver | fec14784901cb2965d8c1350fe84107c57451877 | [
"MIT"
] | 1 | 2021-02-23T22:34:12.000Z | 2021-02-23T22:34:12.000Z | lib/teiserver/coordinator/consul_commands.ex | Jazcash/teiserver | fec14784901cb2965d8c1350fe84107c57451877 | [
"MIT"
] | null | null | null | lib/teiserver/coordinator/consul_commands.ex | Jazcash/teiserver | fec14784901cb2965d8c1350fe84107c57451877 | [
"MIT"
] | null | null | null | defmodule Teiserver.Coordinator.ConsulCommands do
require Logger
alias Teiserver.Coordinator.ConsulServer
alias Teiserver.{Coordinator, User, Client}
alias Teiserver.Battle.{Lobby, LobbyChat}
# alias Phoenix.PubSub
# alias Teiserver.Data.Types, as: T
@doc """
Command has structure:
%{
raw: string,
remaining: string,
command: nil | string,
senderid: userid
}
"""
@split_delay 30_000
@spec handle_command(Map.t(), Map.t()) :: Map.t()
@default_ban_reason "Banned"
#################### For everybody
def handle_command(%{command: "s"} = cmd, state), do: handle_command(Map.put(cmd, :command, "status"), state)
def handle_command(%{command: "status", senderid: senderid} = _cmd, state) do
locks = state.locks
|> Enum.map(fn l -> to_string(l) end)
|> Enum.join(", ")
pos_str = case get_queue_position(state.join_queue, senderid) do
-1 ->
nil
pos ->
"You are position #{pos} in the queue"
end
queue_string = state.join_queue
|> Enum.map(&User.get_username/1)
|> Enum.join(", ")
status_msg = [
"Status for battle ##{state.lobby_id}",
"Locks: #{locks}",
"Gatekeeper: #{state.gatekeeper}",
pos_str,
"Join queue: #{queue_string}",
]
|> Enum.filter(fn s -> s != nil end)
Coordinator.send_to_user(senderid, status_msg)
state
end
def handle_command(%{command: "help", senderid: senderid} = _cmd, state) do
status_msg = [
"Command list can currently be found at https://github.com/beyond-all-reason/teiserver/blob/master/lib/teiserver/coordinator/coordinator_lib.ex"
]
Coordinator.send_to_user(senderid, status_msg)
state
end
def handle_command(%{command: "splitlobby", senderid: senderid} = cmd, %{split: nil} = state) do
sender_name = User.get_username(senderid)
LobbyChat.sayex(state.coordinator_id, "#{sender_name} is moving to a new lobby, to follow them say $y. If you want to follow someone else then say $follow <name> and you will follow that user. The split will take place in #{round(@split_delay/1_000)} seconds, you can change your mind at any time. Say $n to cancel your decision and stay here.", state.lobby_id)
LobbyChat.sayprivateex(state.coordinator_id, senderid, "Splitlobby sequence started. If you stay in this lobby you will be moved to a random empty lobby. If you choose a lobby yourself then anybody ", state.lobby_id)
split_uuid = UUID.uuid4()
new_split = %{
split_uuid: split_uuid,
first_splitter_id: senderid,
splitters: %{}
}
Logger.warn("Started split lobby #{Kernel.inspect new_split}")
:timer.send_after(@split_delay, {:do_split, split_uuid})
ConsulServer.say_command(cmd, state)
%{state | split: new_split}
end
def handle_command(%{command: "splitlobby", senderid: senderid} = _cmd, state) do
LobbyChat.sayprivateex(state.coordinator_id, senderid, "A split is already underway, you cannot start a new one yet", state.lobbyid)
state
end
# Split commands for when there is no split happening
def handle_command(%{command: "y"}, %{split: nil} = state), do: state
def handle_command(%{command: "n"}, %{split: nil} = state), do: state
def handle_command(%{command: "follow"}, %{split: nil} = state), do: state
# And for when it is
def handle_command(%{command: "n", senderid: senderid} = cmd, state) do
Logger.warn("Split.n from #{senderid}")
new_splitters = Map.delete(state.split.splitters, senderid)
new_split = %{state.split | splitters: new_splitters}
ConsulServer.say_command(cmd, state)
%{state | split: new_split}
end
def handle_command(%{command: "y", senderid: senderid} = cmd, state) do
Logger.warn("Split.y from #{senderid}")
new_splitters = Map.put(state.split.splitters, senderid, true)
new_split = %{state.split | splitters: new_splitters}
ConsulServer.say_command(cmd, state)
%{state | split: new_split}
end
def handle_command(%{command: "follow", remaining: target, senderid: senderid} = cmd, state) do
case ConsulServer.get_user(target, state) do
nil ->
ConsulServer.say_command(%{cmd | error: "no user found"}, state)
player_id ->
Logger.warn("Split.follow from #{senderid}")
new_splitters = if player_id == state.split.first_splitter_id do
Map.put(state.split.splitters, senderid, true)
else
Map.put(state.split.splitters, senderid, player_id)
end
new_split = %{state.split | splitters: new_splitters}
ConsulServer.say_command(cmd, state)
%{state | split: new_split}
end
end
def handle_command(%{command: "joinq", senderid: senderid} = cmd, state) do
case Client.get_client_by_id(senderid) do
%{player: true} ->
LobbyChat.sayprivateex(state.coordinator_id, senderid, "You are already a player, you can't join the queue!", state.lobby_id)
state
_ ->
new_state = case Enum.member?(state.join_queue, senderid) do
false ->
new_queue = state.join_queue ++ [senderid]
pos = get_queue_position(new_queue, senderid) + 1
LobbyChat.sayprivateex(state.coordinator_id, senderid, "You are now in the join-queue at position #{pos}", state.lobby_id)
%{state | join_queue: new_queue}
true ->
pos = get_queue_position(state.join_queue, senderid) + 1
LobbyChat.sayprivateex(state.coordinator_id, senderid, "You were already in the join-queue at position #{pos}", state.lobby_id)
state
end
ConsulServer.say_command(cmd, new_state)
end
end
def handle_command(%{command: "leaveq", senderid: senderid} = cmd, state) do
new_queue = List.delete(state.join_queue, senderid)
new_state = %{state | join_queue: new_queue}
LobbyChat.sayprivateex(state.coordinator_id, senderid, "You have been removed from the join queue", state.lobby_id)
ConsulServer.say_command(cmd, new_state)
end
#################### Host and Moderator
def handle_command(%{command: "gatekeeper", remaining: mode} = cmd, state) do
state = case mode do
"friends" ->
%{state | gatekeeper: :friends}
"friendsplay" ->
%{state | gatekeeper: :friendsplay}
"clan" ->
%{state | gatekeeper: :clan}
"default" ->
%{state | gatekeeper: :default}
_ ->
state
end
ConsulServer.say_command(cmd, state)
end
def handle_command(%{command: "lock", remaining: remaining} = cmd, state) do
new_locks = case get_lock(remaining) do
nil -> state.locks
lock ->
ConsulServer.say_command(cmd, state)
[lock | state.locks] |> Enum.uniq
end
%{state | locks: new_locks}
end
def handle_command(%{command: "unlock", remaining: remaining} = cmd, state) do
new_locks = case get_lock(remaining) do
nil -> state.locks
lock ->
ConsulServer.say_command(cmd, state)
List.delete(state.locks, lock)
end
%{state | locks: new_locks}
end
def handle_command(%{command: "welcome-message", remaining: remaining} = cmd, state) do
new_state = case String.trim(remaining) do
"" ->
%{state | welcome_message: nil}
msg ->
Lobby.say(cmd.senderid, "New welcome message set to: #{msg}", state.lobby_id)
%{state | welcome_message: msg}
end
ConsulServer.broadcast_update(new_state)
end
def handle_command(%{command: "specunready"} = cmd, state) do
battle = Lobby.get_lobby!(state.lobby_id)
battle.players
|> Enum.each(fn player_id ->
client = Client.get_client_by_id(player_id)
if client.ready == false and client.player == true do
User.ring(player_id, state.coordinator_id)
Lobby.force_change_client(state.coordinator_id, player_id, %{player: false})
end
end)
ConsulServer.say_command(cmd, state)
end
def handle_command(%{command: "makeready", remaining: ""} = cmd, state) do
battle = Lobby.get_lobby!(state.lobby_id)
battle.players
|> Enum.each(fn player_id ->
client = Client.get_client_by_id(player_id)
if client.ready == false and client.player == true do
User.ring(player_id, state.coordinator_id)
Lobby.force_change_client(state.coordinator_id, player_id, %{ready: true})
end
end)
ConsulServer.say_command(cmd, state)
end
def handle_command(%{command: "makeready", remaining: target} = cmd, state) do
case ConsulServer.get_user(target, state) do
nil ->
ConsulServer.say_command(%{cmd | error: "no user found"}, state)
player_id ->
User.ring(player_id, state.coordinator_id)
Lobby.force_change_client(state.coordinator_id, player_id, %{ready: true})
ConsulServer.say_command(cmd, state)
end
end
#################### Moderator only
# ----------------- General commands
def handle_command(%{command: "cancelsplit"}, %{split: nil} = state) do
state
end
def handle_command(%{command: "cancelsplit"} = cmd, state) do
:timer.send_after(50, :cancel_split)
ConsulServer.say_command(cmd, state)
end
def handle_command(%{command: "dosplit"}, %{split: nil} = state) do
state
end
def handle_command(%{command: "dosplit"} = cmd, %{split: split} = state) do
:timer.send_after(50, {:do_split, split.split_uuid})
ConsulServer.say_command(cmd, state)
end
def handle_command(%{command: "rename", remaining: new_name} = cmd, state) do
Lobby.rename_lobby(state.lobby_id, new_name)
ConsulServer.say_command(cmd, state)
end
def handle_command(%{command: "pull", remaining: target} = cmd, state) do
case ConsulServer.get_user(target, state) do
nil ->
ConsulServer.say_command(%{cmd | error: "no user found"}, state)
target_id ->
Lobby.force_add_user_to_battle(target_id, state.lobby_id)
ConsulServer.say_command(cmd, state)
end
end
def handle_command(%{command: "settag", remaining: remaining} = cmd, state) do
case String.split(remaining, " ") do
[key, value | _] ->
battle = Lobby.get_lobby!(state.lobby_id)
new_tags = Map.put(battle.tags, String.downcase(key), value)
Lobby.set_script_tags(state.lobby_id, new_tags)
ConsulServer.say_command(cmd, state)
_ ->
ConsulServer.say_command(%{cmd | error: "no regex match"}, state)
end
end
# ----------------- Moderation commands
def handle_command(cmd = %{command: "modwarn", remaining: remaining}, state) do
[username, hours | reason] = String.split(remaining, " ")
reason = Enum.join(reason, " ")
userid = ConsulServer.get_user(username, state)
until = "#{hours} hours"
case Central.Account.ReportLib.perform_action(%{}, "Warn", until) do
{:ok, expires} ->
{:ok, _report} =
Central.Account.create_report(%{
"location" => "battle-lobby",
"location_id" => nil,
"reason" => reason,
"reporter_id" => cmd.senderid,
"target_id" => userid,
"response_text" => reason,
"response_action" => "Warn",
"expires" => expires,
"responder_id" => cmd.senderid
})
user = User.get_user_by_id(userid)
sender = User.get_user_by_id(cmd.senderid)
LobbyChat.say(state.coordinator_id, "#{user.name} warned for #{hours} hours by #{sender.name}, reason: #{reason}", state.lobby_id)
_ ->
LobbyChat.sayprivateex(state.coordinator_id, cmd.senderid, "Unable to find a user by that name", state.lobby_id)
end
state
end
def handle_command(cmd = %{command: "modmute", remaining: remaining}, state) do
[username, hours | reason] = String.split(remaining, " ")
reason = Enum.join(reason, " ")
userid = ConsulServer.get_user(username, state)
until = "#{hours} hours"
case Central.Account.ReportLib.perform_action(%{}, "Mute", until) do
{:ok, expires} ->
{:ok, _report} =
Central.Account.create_report(%{
"location" => "battle-lobby",
"location_id" => nil,
"reason" => reason,
"reporter_id" => cmd.senderid,
"target_id" => userid,
"response_text" => reason,
"response_action" => "Mute",
"expires" => expires,
"responder_id" => cmd.senderid
})
user = User.get_user_by_id(userid)
sender = User.get_user_by_id(cmd.senderid)
LobbyChat.say(state.coordinator_id, "#{user.name} muted for #{hours} hours by #{sender.name}, reason: #{reason}", state.lobby_id)
_ ->
LobbyChat.sayprivateex(state.coordinator_id, cmd.senderid, "Unable to find a user by that name", state.lobby_id)
end
state
end
def handle_command(cmd = %{command: "modban", remaining: remaining}, state) do
[username, hours | reason] = String.split(remaining, " ")
reason = Enum.join(reason, " ")
userid = ConsulServer.get_user(username, state)
until = "#{hours} hours"
case Central.Account.ReportLib.perform_action(%{}, "Ban", until) do
{:ok, expires} ->
{:ok, _report} =
Central.Account.create_report(%{
"location" => "battle-lobby",
"location_id" => nil,
"reason" => reason,
"reporter_id" => cmd.senderid,
"target_id" => userid,
"response_text" => reason,
"response_action" => "Ban",
"expires" => expires,
"responder_id" => cmd.senderid
})
user = User.get_user_by_id(userid)
sender = User.get_user_by_id(cmd.senderid)
LobbyChat.say(state.coordinator_id, "#{user.name} banned for #{hours} hours by #{sender.name}, reason: #{reason}", state.lobby_id)
_ ->
LobbyChat.sayprivateex(state.coordinator_id, cmd.senderid, "Unable to find a user by that name", state.lobby_id)
end
state
end
def handle_command(%{command: "speclock", remaining: target} = cmd, state) do
case ConsulServer.get_user(target, state) do
nil ->
state
target_id ->
ban = new_ban(%{level: :spectator, by: cmd.senderid}, state)
new_bans = Map.put(state.bans, target_id, ban)
Lobby.force_change_client(state.coordinator_id, target_id, %{player: false})
ConsulServer.say_command(cmd, state)
%{state | bans: new_bans}
end
end
def handle_command(%{command: "forceplay", remaining: target} = cmd, state) do
case ConsulServer.get_user(target, state) do
nil ->
state
target_id ->
Lobby.force_change_client(state.coordinator_id, target_id, %{player: true, ready: true})
ConsulServer.say_command(cmd, state)
end
end
def handle_command(%{command: "timeout", remaining: target} = cmd, state) do
[target | reason_list] = String.split(target, " ")
case ConsulServer.get_user(target, state) do
nil ->
ConsulServer.say_command(%{cmd | error: "no user found"}, state)
target_id ->
reason = if reason_list == [], do: "You have been given a timeout on the naughty step", else: Enum.join(reason_list, " ")
timeout = new_timeout(%{level: :banned, by: cmd.senderid, reason: reason}, state)
new_timeouts = Map.put(state.timeouts, target_id, timeout)
Lobby.kick_user_from_battle(target_id, state.lobby_id)
ConsulServer.say_command(cmd, state)
%{state | timeouts: new_timeouts}
|> ConsulServer.broadcast_update("timeout")
end
end
def handle_command(%{command: "lobbykick", remaining: target} = cmd, state) do
case ConsulServer.get_user(target, state) do
nil ->
ConsulServer.say_command(%{cmd | error: "no user found"}, state)
target_id ->
Lobby.kick_user_from_battle(target_id, state.lobby_id)
ConsulServer.say_command(cmd, state)
end
end
def handle_command(%{command: "lobbyban", remaining: target} = cmd, state) do
[target | reason_list] = String.split(target, " ")
case ConsulServer.get_user(target, state) do
nil ->
ConsulServer.say_command(%{cmd | error: "no user found"}, state)
target_id ->
reason = if reason_list == [], do: @default_ban_reason, else: Enum.join(reason_list, " ")
ban = new_ban(%{level: :banned, by: cmd.senderid, reason: reason}, state)
new_bans = Map.put(state.bans, target_id, ban)
Lobby.kick_user_from_battle(target_id, state.lobby_id)
ConsulServer.say_command(cmd, state)
%{state | bans: new_bans}
|> ConsulServer.broadcast_update("ban")
end
end
def handle_command(%{command: "lobbybanmult", remaining: targets} = cmd, state) do
{targets, reason} = case String.split(targets, "!!") do
[t] -> {t, @default_ban_reason}
[t, r | _] -> {t, String.trim(r)}
end
ConsulServer.say_command(cmd, state)
String.split(targets, " ")
|> Enum.reduce(state, fn (target, acc) ->
case ConsulServer.get_user(target, acc) do
nil ->
acc
target_id ->
ban = new_ban(%{level: :banned, by: cmd.senderid, reason: reason}, acc)
new_bans = Map.put(acc.bans, target_id, ban)
Lobby.kick_user_from_battle(target_id, acc.lobby_id)
%{acc | bans: new_bans}
|> ConsulServer.broadcast_update("ban")
end
end)
end
def handle_command(%{command: "unban", remaining: target} = cmd, state) do
case ConsulServer.get_user(target, state) do
nil ->
ConsulServer.say_command(%{cmd | error: "no user found"}, state)
target_id ->
new_bans = Map.drop(state.bans, [target_id])
ConsulServer.say_command(cmd, state)
%{state | bans: new_bans}
|> ConsulServer.broadcast_update("unban")
end
end
# This is here to make tests easier to run, it's not expected you'll use this and it's not in the docs
def handle_command(%{command: "forcespec", remaining: target} = cmd, state) do
case ConsulServer.get_user(target, state) do
nil ->
ConsulServer.say_command(%{cmd | error: "no user found"}, state)
target_id ->
ban = new_ban(%{level: :spectator, by: cmd.senderid, reason: "forcespec"}, state)
new_bans = Map.put(state.bans, target_id, ban)
Lobby.force_change_client(state.coordinator_id, target_id, %{player: false})
ConsulServer.say_command(cmd, state)
%{state | bans: new_bans}
|> ConsulServer.broadcast_update("ban")
end
end
def handle_command(%{command: "reset"} = _cmd, state) do
ConsulServer.empty_state(state.lobby_id)
|> ConsulServer.broadcast_update("reset")
end
#################### Internal commands
# Would need to be sent by internal since battlestatus isn't part of the command queue
def handle_command(%{command: "change-battlestatus", remaining: target_id, status: new_status}, state) do
Lobby.force_change_client(state.coordinator_id, target_id, new_status)
state
end
def handle_command(cmd, state) do
if Map.has_key?(cmd, :raw) do
LobbyChat.do_say(cmd.senderid, cmd.raw, state.lobby_id)
else
Logger.error("No handler in consul_server for command #{Kernel.inspect cmd}")
end
state
end
defp new_ban(data, state) do
Map.merge(%{
by: state.coordinator_id,
reason: @default_ban_reason,
# :player | :spectator | :banned
level: :banned
}, data)
end
defp new_timeout(data, state) do
Map.merge(%{
by: state.coordinator_id,
reason: "You have been given a timeout on the naughty step",
# :player | :spectator | :banned
level: :banned
}, data)
end
@spec get_lock(String.t()) :: atom | nil
defp get_lock(name) do
case name |> String.downcase |> String.trim do
"team" -> :team
"allyid" -> :allyid
"player" -> :player
"spectator" -> :spectator
"side" -> :side
_ -> nil
end
end
defp get_queue_position(queue, userid) do
case Enum.member?(queue, userid) do
true ->
Enum.with_index(queue)
|> Map.new
|> Map.get(userid)
false ->
-1
end
end
end
| 34.597956 | 365 | 0.637648 |
ffe18a11598429bcb81511f1dd28939a7cb80906 | 1,152 | ex | Elixir | lib/apportion.ex | garyf/apportion_ex | 1f65f2087976e920d303463f72d72b94e0534325 | [
"MIT"
] | 2 | 2016-09-25T22:16:54.000Z | 2020-01-13T19:33:19.000Z | lib/apportion.ex | garyf/apportion_ex | 1f65f2087976e920d303463f72d72b94e0534325 | [
"MIT"
] | null | null | null | lib/apportion.ex | garyf/apportion_ex | 1f65f2087976e920d303463f72d72b94e0534325 | [
"MIT"
] | null | null | null | defmodule Apportion do
@moduledoc """
Top level interface, or API, to proportionally distribute a quantity
"""
alias Apportion.Algorithm.EqualProportions
alias Apportion.Setup
alias Apportion.Util
@doc """
Distributes a quantity proportionally among recipients per relative weights using the
'equal proportions' algorithm
## Example
iex> weights = %{a: 41, b: 32, c: 27}
...> Apportion.distribute(weights, 7)
%{a: 3, b: 2, c: 2}
"""
def distribute(weights, quantity, options \\ []) do
portions = Setup.initial_portions(Map.keys(weights), options)
portions_sum = Util.map_values_sum(portions)
Setup.validate(quantity, portions_sum)
next_step(weights, portions, portions_sum, quantity)
end
# recursively
defp next_step(_, portions, portions_sum, quantity) when portions_sum == quantity do
portions
end
defp next_step(weights, portions, portions_sum, quantity) do
key = EqualProportions.recipient(weights, portions)
next_portions = Map.merge(portions, %{key => Map.fetch!(portions, key) + 1})
next_step(weights, next_portions, portions_sum + 1, quantity)
end
end
| 32 | 87 | 0.711806 |
ffe20dcd7b57d580c4edf99b9c0d2958ba7d6db9 | 1,833 | exs | Elixir | mix.exs | Iluminai/bow | b3ccefb1aaac9fb55e4f9f81832a9f1b7141c2b4 | [
"MIT"
] | null | null | null | mix.exs | Iluminai/bow | b3ccefb1aaac9fb55e4f9f81832a9f1b7141c2b4 | [
"MIT"
] | 2 | 2021-06-28T23:04:53.000Z | 2021-06-29T18:31:07.000Z | mix.exs | Iluminai/bow | b3ccefb1aaac9fb55e4f9f81832a9f1b7141c2b4 | [
"MIT"
] | null | null | null | defmodule Bow.Mixfile do
use Mix.Project
@version "0.4.0"
def project do
[
app: :bow,
version: @version,
elixir: "~> 1.11",
elixirc_paths: elixirc_paths(Mix.env()),
start_permanent: Mix.env() == :prod,
aliases: aliases(),
deps: deps(),
test_coverage: [tool: Coverex.Task],
package: package(),
dialyzer: dialyzer(),
# Docs
name: "Bow",
docs: docs()
]
end
defp package() do
[
description: "File uploads for Elixir.",
maintainers: [],
licenses: ["MIT"],
links: %{
"Github" => "https://github.com/iluminai/bow"
}
]
end
defp docs() do
[
main: "readme",
extras: ["README.md"],
source_url: "https://github.com/iluminai/bow",
source_ref: @version
]
end
def application do
[
extra_applications: [:logger]
]
end
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
defp deps do
[
{:plug, "~> 1.11"},
{:tesla, "~> 1.3"},
{:ecto, "~> 3.6", optional: true},
{:ecto_sql, "~> 3.6", optional: true},
{:erlexec, "~> 1.19", optional: true},
{:ex_aws, "~> 2.2", optional: true},
{:ex_aws_s3, "~> 2.2", optional: true},
{:hackney, "~> 1.17"},
{:sweet_xml, "~> 0.7", optional: true},
# testing & docs
{:postgrex, ">= 0.0.0", only: :test},
{:coverex, "~> 1.5", only: :test},
{:ex_doc, "~> 0.24", only: :dev},
{:dialyxir, "~> 1.1", only: :dev}
]
end
def aliases do
[
"ecto.setup": ["ecto.create --quiet", "ecto.migrate --quiet"],
"ecto.reset": ["ecto.drop --quiet", "ecto.setup"]
]
end
defp dialyzer do
[
plt_add_apps: [:ecto, :ex_aws, :ex_aws_s3, :erlexec]
]
end
end
| 21.068966 | 68 | 0.507365 |
ffe24d5d96be554f659b6de9e23115e3466bbb7d | 1,279 | ex | Elixir | lib/arc/actions/delete.ex | hollar/arc | e2871e02c8aab0aaba885c8d141ed3a51a1ec8a8 | [
"Apache-2.0"
] | 1,213 | 2015-06-18T04:01:20.000Z | 2022-01-19T18:47:23.000Z | lib/arc/actions/delete.ex | hollar/arc | e2871e02c8aab0aaba885c8d141ed3a51a1ec8a8 | [
"Apache-2.0"
] | 260 | 2015-06-18T22:34:58.000Z | 2022-01-06T17:43:29.000Z | lib/arc/actions/delete.ex | hollar/arc | e2871e02c8aab0aaba885c8d141ed3a51a1ec8a8 | [
"Apache-2.0"
] | 270 | 2015-07-07T17:10:31.000Z | 2021-11-13T09:16:03.000Z | defmodule Arc.Actions.Delete do
defmacro __using__(_) do
quote do
def delete(args), do: Arc.Actions.Delete.delete(__MODULE__, args)
defoverridable [{:delete, 1}]
end
end
def delete(definition, {filepath, scope}) when is_binary(filepath) do
do_delete(definition, {%{file_name: filepath}, scope})
end
def delete(definition, filepath) when is_binary(filepath) do
do_delete(definition, {%{file_name: filepath}, nil})
end
#
# Private
#
defp version_timeout do
Application.get_env(:arc, :version_timeout) || 15_000
end
defp do_delete(definition, {file, scope}) do
if definition.async do
definition.__versions
|> Enum.map(fn(r) -> async_delete_version(definition, r, {file, scope}) end)
|> Enum.each(fn(task) -> Task.await(task, version_timeout()) end)
else
definition.__versions
|> Enum.map(fn(version) -> delete_version(definition, version, {file, scope}) end)
end
:ok
end
defp async_delete_version(definition, version, {file, scope}) do
Task.async(fn -> delete_version(definition, version, {file, scope}) end)
end
defp delete_version(definition, version, {file, scope}) do
definition.__storage.delete(definition, version, {file, scope})
end
end
| 27.804348 | 88 | 0.680219 |
ffe26907031f62b0b67c5acc10a9bd76e7a66ac0 | 1,163 | ex | Elixir | web/channels/user_socket.ex | strofcon/scipse | f7597b73dac2e7ffe9f5aa0a403600d9d8ea8eeb | [
"Apache-2.0"
] | null | null | null | web/channels/user_socket.ex | strofcon/scipse | f7597b73dac2e7ffe9f5aa0a403600d9d8ea8eeb | [
"Apache-2.0"
] | null | null | null | web/channels/user_socket.ex | strofcon/scipse | f7597b73dac2e7ffe9f5aa0a403600d9d8ea8eeb | [
"Apache-2.0"
] | null | null | null | defmodule Scipse.UserSocket do
use Phoenix.Socket
## Channels
# channel "room:*", Scipse.RoomChannel
## Transports
transport :websocket, Phoenix.Transports.WebSocket
# transport :longpoll, Phoenix.Transports.LongPoll
# Socket params are passed from the client and can
# be used to verify and authenticate a user. After
# verification, you can put default assigns into
# the socket that will be set for all channels, ie
#
# {:ok, assign(socket, :user_id, verified_user_id)}
#
# To deny connection, return `:error`.
#
# See `Phoenix.Token` documentation for examples in
# performing token verification on connect.
def connect(_params, socket) do
{:ok, socket}
end
# Socket id's are topics that allow you to identify all sockets for a given user:
#
# def id(socket), do: "users_socket:#{socket.assigns.user_id}"
#
# Would allow you to broadcast a "disconnect" event and terminate
# all active sockets and channels for a given user:
#
# Scipse.Endpoint.broadcast("users_socket:#{user.id}", "disconnect", %{})
#
# Returning `nil` makes this socket anonymous.
def id(_socket), do: nil
end
| 30.605263 | 83 | 0.700774 |
ffe2841fb3af0c4247126ca449c8237c82b757ce | 1,993 | ex | Elixir | lib/elixir_jobs/offers/offer.ex | savekirk/elixir_jobs | d7ec0f088a1365f3ae5cbbd6c07c2b3fdde9a946 | [
"MIT"
] | null | null | null | lib/elixir_jobs/offers/offer.ex | savekirk/elixir_jobs | d7ec0f088a1365f3ae5cbbd6c07c2b3fdde9a946 | [
"MIT"
] | null | null | null | lib/elixir_jobs/offers/offer.ex | savekirk/elixir_jobs | d7ec0f088a1365f3ae5cbbd6c07c2b3fdde9a946 | [
"MIT"
] | null | null | null | defmodule ElixirJobs.Offers.Offer do
use Ecto.Schema
import Ecto.Changeset
alias ElixirJobs.{
Offers.Offer,
EctoEnums.JobPlace,
EctoEnums.JobType
}
@primary_key {:id, :binary_id, autogenerate: true}
@foreign_key_type :binary_id
schema "offers" do
field :title, :string
field :company, :string
field :description, :string
field :location, :string
field :url, :string
field :slug, :string
field :job_place, JobPlace
field :job_type, JobType
field :published_at, :naive_datetime
timestamps()
end
@required_attrs [:title, :company, :description, :location, :url, :job_place, :job_type]
@optional_attrs [:published_at, :slug]
@attributes @required_attrs ++ @optional_attrs
@doc false
def changeset(%Offer{} = offer, attrs) do
offer
|> cast(attrs, @attributes)
|> validate_required(@required_attrs)
|> validate_length(:title, min: 5, max: 50)
|> validate_length(:company, min: 2, max: 30)
|> validate_length(:description, min: 10, max: 500)
|> validate_length(:location, min: 3, max: 50)
|> validate_length(:url, min: 1, max: 255)
|> validate_format(:url, ~r/^\b((https?:\/\/?)[^\s()<>]+(?:\([\w\d]+\)|([^[:punct:]\s]|\/)))$/)
|> validate_inclusion(:job_place, JobPlace.__valid_values__())
|> validate_inclusion(:job_type, JobType.__valid_values__())
|> unique_constraint(:slug)
|> generate_slug()
end
defp generate_slug(changeset) do
case get_field(changeset, :slug) do
nil -> put_change(changeset, :slug, do_generate_slug(changeset))
_ -> changeset
end
end
defp do_generate_slug(changeset) do
uid =
Ecto.UUID.generate()
|> to_string()
|> String.split("-")
|> List.first
title =
changeset
|> get_field(:title)
|> Slugger.slugify_downcase()
company =
changeset
|> get_field(:company)
|> Slugger.slugify_downcase()
"#{company}-#{title}-#{uid}"
end
end
| 25.883117 | 99 | 0.638234 |
ffe28708524bd418a49bdf3a9dcb7975c517fd2a | 1,696 | ex | Elixir | clients/docs/lib/google_api/docs/v1/model/substring_match_criteria.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/docs/lib/google_api/docs/v1/model/substring_match_criteria.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/docs/lib/google_api/docs/v1/model/substring_match_criteria.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Docs.V1.Model.SubstringMatchCriteria do
@moduledoc """
A criteria that matches a specific string of text in the document.
## Attributes
* `matchCase` (*type:* `boolean()`, *default:* `nil`) - Indicates whether the search should respect case:
- `True`: the search is case sensitive.
- `False`: the search is case insensitive.
* `text` (*type:* `String.t`, *default:* `nil`) - The text to search for in the document.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:matchCase => boolean(),
:text => String.t()
}
field(:matchCase)
field(:text)
end
defimpl Poison.Decoder, for: GoogleApi.Docs.V1.Model.SubstringMatchCriteria do
def decode(value, options) do
GoogleApi.Docs.V1.Model.SubstringMatchCriteria.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Docs.V1.Model.SubstringMatchCriteria do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 32 | 109 | 0.717571 |
ffe299352339cc175337aabf5c55f25da6487170 | 1,368 | ex | Elixir | test/support/data_case.ex | aeons/elixir-elm-todo | 2dd4577e94eb28025ea2d1ea925626e4ee3b181a | [
"MIT"
] | null | null | null | test/support/data_case.ex | aeons/elixir-elm-todo | 2dd4577e94eb28025ea2d1ea925626e4ee3b181a | [
"MIT"
] | null | null | null | test/support/data_case.ex | aeons/elixir-elm-todo | 2dd4577e94eb28025ea2d1ea925626e4ee3b181a | [
"MIT"
] | null | null | null | defmodule Todo.DataCase do
@moduledoc """
This module defines the setup for tests requiring
access to the application's data layer.
You may define functions here to be used as helpers in
your tests.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
alias Todo.Repo
import Ecto
import Ecto.Changeset
import Ecto.Query
import Todo.DataCase
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(Todo.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(Todo.Repo, {:shared, self()})
end
:ok
end
@doc """
A helper that transform changeset errors to a map of messages.
assert {:error, changeset} = Accounts.create_user(%{password: "short"})
assert "password is too short" in errors_on(changeset).password
assert %{password: ["password is too short"]} = errors_on(changeset)
"""
def errors_on(changeset) do
Ecto.Changeset.traverse_errors(changeset, fn {message, opts} ->
Enum.reduce(opts, message, fn {key, value}, acc ->
String.replace(acc, "%{#{key}}", to_string(value))
end)
end)
end
end
| 25.333333 | 77 | 0.67617 |
ffe2ad4d24c25cfc3e9c52cb9247cc9c7908ce46 | 1,497 | ex | Elixir | clients/deployment_manager/lib/google_api/deployment_manager/v2/model/deployment_update_label_entry.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/deployment_manager/lib/google_api/deployment_manager/v2/model/deployment_update_label_entry.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/deployment_manager/lib/google_api/deployment_manager/v2/model/deployment_update_label_entry.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DeploymentManager.V2.Model.DeploymentUpdateLabelEntry do
@moduledoc """
## Attributes
* `key` (*type:* `String.t`, *default:* `nil`) -
* `value` (*type:* `String.t`, *default:* `nil`) -
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:key => String.t(),
:value => String.t()
}
field(:key)
field(:value)
end
defimpl Poison.Decoder, for: GoogleApi.DeploymentManager.V2.Model.DeploymentUpdateLabelEntry do
def decode(value, options) do
GoogleApi.DeploymentManager.V2.Model.DeploymentUpdateLabelEntry.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.DeploymentManager.V2.Model.DeploymentUpdateLabelEntry do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 29.94 | 95 | 0.724115 |
ffe2b94464d3a7892f1acfa26341fc8f285b2da8 | 2,118 | exs | Elixir | config/dev.exs | seent-app/seent | 6071a0f90f1cb5345faa0c9e476d3c64310a7be9 | [
"0BSD"
] | null | null | null | config/dev.exs | seent-app/seent | 6071a0f90f1cb5345faa0c9e476d3c64310a7be9 | [
"0BSD"
] | 1 | 2020-07-04T17:19:38.000Z | 2020-07-04T17:19:38.000Z | config/dev.exs | seent-app/seent | 6071a0f90f1cb5345faa0c9e476d3c64310a7be9 | [
"0BSD"
] | null | null | null | use Mix.Config
# Configure your database
config :seent, Seent.Repo,
username: "postgres",
password: "postgres",
database: "seent_dev",
hostname: "localhost",
show_sensitive_data_on_connection_error: true,
pool_size: 10
# For development, we disable any cache and enable
# debugging and code reloading.
#
# The watchers configuration can be used to run external
# watchers to your application. For example, we use it
# with webpack to recompile .js and .css sources.
config :seent, SeentWeb.Endpoint,
http: [port: 4000],
debug_errors: true,
code_reloader: true,
check_origin: false,
watchers: [
node: [
"node_modules/webpack/bin/webpack.js",
"--mode",
"development",
"--watch-stdin",
cd: Path.expand("../assets", __DIR__)
]
]
# ## SSL Support
#
# In order to use HTTPS in development, a self-signed
# certificate can be generated by running the following
# Mix task:
#
# mix phx.gen.cert
#
# Note that this task requires Erlang/OTP 20 or later.
# Run `mix help phx.gen.cert` for more information.
#
# The `http:` config above can be replaced with:
#
# https: [
# port: 4001,
# cipher_suite: :strong,
# keyfile: "priv/cert/selfsigned_key.pem",
# certfile: "priv/cert/selfsigned.pem"
# ],
#
# If desired, both `http:` and `https:` keys can be
# configured to run both http and https servers on
# different ports.
# Watch static and templates for browser reloading.
config :seent, SeentWeb.Endpoint,
live_reload: [
patterns: [
~r"priv/static/.*(js|css|png|jpeg|jpg|gif|svg)$",
~r"priv/gettext/.*(po)$",
~r"lib/seent_web/(live|views)/.*(ex)$",
~r"lib/seent_web/templates/.*(eex)$"
]
]
# Do not include metadata nor timestamps in development logs
config :logger, :console, format: "[$level] $message\n"
# Set a higher stacktrace during development. Avoid configuring such
# in production as building large stacktraces may be expensive.
config :phoenix, :stacktrace_depth, 20
# Initialize plugs at runtime for faster development compilation
config :phoenix, :plug_init_mode, :runtime
| 27.506494 | 68 | 0.687913 |
ffe2c7a579e3dfeffcea6f4c859577a3382dc213 | 3,036 | exs | Elixir | test/oli/delivery/page/objectives_rollup_test.exs | malav2110/oli-torus | 8af64e762a7c8a2058bd27a7ab8e96539ffc055f | [
"MIT"
] | 1 | 2022-03-17T20:35:47.000Z | 2022-03-17T20:35:47.000Z | test/oli/delivery/page/objectives_rollup_test.exs | malav2110/oli-torus | 8af64e762a7c8a2058bd27a7ab8e96539ffc055f | [
"MIT"
] | 9 | 2021-11-02T16:52:09.000Z | 2022-03-25T15:14:01.000Z | test/oli/delivery/page/objectives_rollup_test.exs | malav2110/oli-torus | 8af64e762a7c8a2058bd27a7ab8e96539ffc055f | [
"MIT"
] | null | null | null | defmodule Oli.Delivery.Page.ObjectivesRollupTest do
use Oli.DataCase
alias Oli.Delivery.Page.ObjectivesRollup
describe "page context" do
setup do
content = %{
"stem" => "1",
"authoring" => %{
"parts" => [
%{
"id" => "1",
"responses" => [],
"scoringStrategy" => "best",
"evaluationStrategy" => "regex"
}
]
}
}
map =
Seeder.base_project_with_resource2()
|> Seeder.add_objective("objective one", :o1)
|> Seeder.add_objective("objective two", :o2)
o = Map.get(map, :o1).revision.resource_id
o2 = Map.get(map, :o2).revision.resource_id
map =
Seeder.add_activity(
map,
%{title: "one", objectives: %{"1" => [o]}, content: content},
:a1
)
|> Seeder.add_activity(%{title: "two", content: %{"stem" => "3"}}, :a2)
|> Seeder.add_user(%{}, :user1)
attrs = %{
title: "page1",
content: %{
"model" => [
%{"type" => "activity-reference", "activity_id" => Map.get(map, :a1).resource.id},
%{"type" => "activity-reference", "activity_id" => Map.get(map, :a2).resource.id}
]
},
objectives: %{"attached" => []}
}
attrs2 = %{
title: "page1",
content: %{
"model" => [
%{"type" => "activity-reference", "activity_id" => Map.get(map, :a1).resource.id},
%{"type" => "activity-reference", "activity_id" => Map.get(map, :a2).resource.id}
]
},
objectives: %{"attached" => [o2]}
}
attrs3 = %{
title: "page1",
content: %{
"model" => [
%{"type" => "activity-reference", "activity_id" => Map.get(map, :a1).resource.id},
%{"type" => "activity-reference", "activity_id" => Map.get(map, :a2).resource.id}
]
},
objectives: %{}
}
Seeder.add_page(map, attrs, :p1)
|> Seeder.add_page(attrs2, :p2)
|> Seeder.add_page(attrs3, :p3)
end
test "rolls up correctly when directly attached to page",
%{
project: project,
p1: p1,
p2: p2,
p3: p3,
a1: a1,
a2: a2
} do
activity_revisions = [a1.revision, a2.revision]
# Test that when none are attached
assert [] == ObjectivesRollup.rollup_objectives(
p1.revision, activity_revisions, Oli.Publishing.AuthoringResolver, project.slug)
# Tests when one is attached
assert ["objective two"] == ObjectivesRollup.rollup_objectives(
p2.revision, activity_revisions, Oli.Publishing.AuthoringResolver, project.slug)
# Tests when objectives map is malformed (simply an empty map in this case)
assert [] == ObjectivesRollup.rollup_objectives(
p3.revision, activity_revisions, Oli.Publishing.AuthoringResolver, project.slug)
end
end
end
| 29.475728 | 94 | 0.520751 |
ffe2cb4c86dc6e908fe5105e542d8a24141aa773 | 782 | ex | Elixir | apps/idp/src/idp/permissions/permissions.ex | lbrty/idp-backend | 81d5f10ef6177a1e678b994331c5a09abbdca8d6 | [
"Apache-2.0"
] | null | null | null | apps/idp/src/idp/permissions/permissions.ex | lbrty/idp-backend | 81d5f10ef6177a1e678b994331c5a09abbdca8d6 | [
"Apache-2.0"
] | null | null | null | apps/idp/src/idp/permissions/permissions.ex | lbrty/idp-backend | 81d5f10ef6177a1e678b994331c5a09abbdca8d6 | [
"Apache-2.0"
] | null | null | null | defmodule Idp.Permissions do
@moduledoc false
use Idp.Base.Query
alias Idp.Permissions.{Permission, PermissionForms}
def all, do: Repo.all(Permission)
def find_by_id(permission_id) do
Repo.one(from p in Permission, where: p.id == ^permission_id)
end
def find_by_project(project_id) do
Repo.one(from p in Permission, where: p.project_id == ^project_id)
end
def create(params) do
%Permission{}
|> PermissionForms.new(params)
|> Repo.insert()
end
def update(%Permission{} = permission, params) do
permission
|> PermissionForms.update(params)
|> Repo.update()
end
def delete(permission_id) do
case permission_id |> find_by_id() do
nil -> {:error, :not_found}
one -> one |> Repo.delete()
end
end
end
| 21.722222 | 70 | 0.675192 |
ffe2e70d52a6824dbd74a02c851830a71dcc2956 | 3,136 | ex | Elixir | clients/health_care/lib/google_api/health_care/v1beta1/model/export_dicom_data_request.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | clients/health_care/lib/google_api/health_care/v1beta1/model/export_dicom_data_request.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/health_care/lib/google_api/health_care/v1beta1/model/export_dicom_data_request.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.HealthCare.V1beta1.Model.ExportDicomDataRequest do
@moduledoc """
Exports data from the specified DICOM store. If a given resource, such as a DICOM object with the same SOPInstance UID, already exists in the output, it is overwritten with the version in the source dataset. Exported DICOM data persists when the DICOM store from which it was exported is deleted.
## Attributes
* `bigqueryDestination` (*type:* `GoogleApi.HealthCare.V1beta1.Model.GoogleCloudHealthcareV1beta1DicomBigQueryDestination.t`, *default:* `nil`) - The BigQuery output destination. You can only export to a BigQuery dataset that's in the same project as the DICOM store you're exporting from. The Cloud Healthcare Service Agent requires two IAM roles on the BigQuery location: `roles/bigquery.dataEditor` and `roles/bigquery.jobUser`.
* `filterConfig` (*type:* `GoogleApi.HealthCare.V1beta1.Model.DicomFilterConfig.t`, *default:* `nil`) - Specifies the filter configuration.
* `gcsDestination` (*type:* `GoogleApi.HealthCare.V1beta1.Model.GoogleCloudHealthcareV1beta1DicomGcsDestination.t`, *default:* `nil`) - The Cloud Storage output destination. The Cloud Healthcare Service Agent requires the `roles/storage.objectAdmin` Cloud IAM roles on the Cloud Storage location.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:bigqueryDestination =>
GoogleApi.HealthCare.V1beta1.Model.GoogleCloudHealthcareV1beta1DicomBigQueryDestination.t(),
:filterConfig => GoogleApi.HealthCare.V1beta1.Model.DicomFilterConfig.t(),
:gcsDestination =>
GoogleApi.HealthCare.V1beta1.Model.GoogleCloudHealthcareV1beta1DicomGcsDestination.t()
}
field(:bigqueryDestination,
as: GoogleApi.HealthCare.V1beta1.Model.GoogleCloudHealthcareV1beta1DicomBigQueryDestination
)
field(:filterConfig, as: GoogleApi.HealthCare.V1beta1.Model.DicomFilterConfig)
field(:gcsDestination,
as: GoogleApi.HealthCare.V1beta1.Model.GoogleCloudHealthcareV1beta1DicomGcsDestination
)
end
defimpl Poison.Decoder, for: GoogleApi.HealthCare.V1beta1.Model.ExportDicomDataRequest do
def decode(value, options) do
GoogleApi.HealthCare.V1beta1.Model.ExportDicomDataRequest.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.HealthCare.V1beta1.Model.ExportDicomDataRequest do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 51.409836 | 435 | 0.777423 |
ffe2f173539ac55fefeaa0e33006c28e86d6b186 | 2,626 | exs | Elixir | exercises/concept/rpn-calculator-output/test/rpn_calculator/output_test.exs | jaimeiniesta/elixir-1 | e8ddafeb313822645e0cd76743955a5c728a84c5 | [
"MIT"
] | 343 | 2017-06-22T16:28:28.000Z | 2022-03-25T21:33:32.000Z | exercises/concept/rpn-calculator-output/test/rpn_calculator/output_test.exs | jaimeiniesta/elixir-1 | e8ddafeb313822645e0cd76743955a5c728a84c5 | [
"MIT"
] | 583 | 2017-06-19T10:48:40.000Z | 2022-03-28T21:43:12.000Z | exercises/concept/rpn-calculator-output/test/rpn_calculator/output_test.exs | jaimeiniesta/elixir-1 | e8ddafeb313822645e0cd76743955a5c728a84c5 | [
"MIT"
] | 228 | 2017-07-05T07:09:32.000Z | 2022-03-27T08:59:08.000Z | defmodule RPNCalculator.OutputTest do
use ExUnit.Case
setup do
on_exit(fn -> File.rm("filename") end)
end
def open(filename \\ "<nil>") do
send(self(), {:open, filename})
case filename do
"filename" -> File.open(filename, [:write])
"bad_filename" -> {:ok, spawn(fn -> nil end)}
end
end
def close(file) do
send(self(), :close)
File.close(file)
end
describe "write/3" do
@tag task_id: 1
test "returns ok tuple if function succeeds" do
resource = __MODULE__
filename = "filename"
equation = "1 1 +"
assert {:ok, equation} == RPNCalculator.Output.write(resource, filename, equation)
end
@use_open_error_message """
Use the open/1 function from the `resource` specified in the arguments to open `filename`.
E.g.) resource.open(filename)
"""
@tag task_id: 1
test "opens resource" do
resource = __MODULE__
filename = "filename"
equation = "1 1 +"
RPNCalculator.Output.write(resource, filename, equation)
assert_received {:open, ^filename}, @use_open_error_message
end
@use_write_error_message """
Use IO.write/2 to write to the opened `filename`.
"""
@tag task_id: 2
test "writes to resource" do
resource = __MODULE__
filename = "filename"
equation = "1 1 +"
{:ok, ^equation} = RPNCalculator.Output.write(resource, filename, equation)
assert File.read!(filename) == "1 1 +", @use_write_error_message
end
@tag task_id: 2
test "rescues and returns error tuple from raised error" do
resource = __MODULE__
bad_filename = "bad_filename"
equation = "1 1 +"
assert {:error, "Unable to write to resource"} ==
RPNCalculator.Output.write(resource, bad_filename, equation)
end
@use_close_error_message """
Use the close/1 function from the `resource` specified in the arguments to close the opened file handle.
E.g.) resource.close(filename)
"""
@tag task_id: 3
test "closes resource" do
resource = __MODULE__
filename = "filename"
equation = "1 1 +"
RPNCalculator.Output.write(resource, filename, equation)
assert_received :close, @use_close_error_message
end
@tag task_id: 3
test "closes resource even when rescuing from raised error" do
resource = __MODULE__
bad_filename = "bad_filename"
equation = "1 1 +"
RPNCalculator.Output.write(resource, bad_filename, equation)
assert_received :close, "write/3 should close the `resource` even if an error is raised"
end
end
end
| 27.354167 | 108 | 0.645468 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.